{ "best_metric": null, "best_model_checkpoint": null, "epoch": 3.981973610853001, "eval_steps": 1346, "global_step": 21524, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.00018583906337112061, "grad_norm": 0.27501940727233887, "learning_rate": 1e-05, "loss": 1.4066, "step": 1 }, { "epoch": 0.00018583906337112061, "eval_loss": 1.6832497119903564, "eval_runtime": 22.6047, "eval_samples_per_second": 48.309, "eval_steps_per_second": 24.154, "step": 1 }, { "epoch": 0.00037167812674224123, "grad_norm": 0.6742346286773682, "learning_rate": 2e-05, "loss": 1.4238, "step": 2 }, { "epoch": 0.0005575171901133618, "grad_norm": 0.29993563890457153, "learning_rate": 3e-05, "loss": 1.2451, "step": 3 }, { "epoch": 0.0007433562534844825, "grad_norm": 0.28028905391693115, "learning_rate": 4e-05, "loss": 1.1953, "step": 4 }, { "epoch": 0.000929195316855603, "grad_norm": 0.2644942104816437, "learning_rate": 5e-05, "loss": 1.3122, "step": 5 }, { "epoch": 0.0011150343802267236, "grad_norm": 0.30770039558410645, "learning_rate": 6e-05, "loss": 1.5342, "step": 6 }, { "epoch": 0.0013008734435978444, "grad_norm": 0.2737680971622467, "learning_rate": 7e-05, "loss": 1.1543, "step": 7 }, { "epoch": 0.001486712506968965, "grad_norm": 0.3635600805282593, "learning_rate": 8e-05, "loss": 1.4659, "step": 8 }, { "epoch": 0.0016725515703400855, "grad_norm": 0.2415798455476761, "learning_rate": 9e-05, "loss": 1.0784, "step": 9 }, { "epoch": 0.001858390633711206, "grad_norm": 0.4179717004299164, "learning_rate": 0.0001, "loss": 1.4486, "step": 10 }, { "epoch": 0.0020442296970823266, "grad_norm": 1.9943130016326904, "learning_rate": 0.00011000000000000002, "loss": 4.5419, "step": 11 }, { "epoch": 0.002230068760453447, "grad_norm": 0.3632005453109741, "learning_rate": 0.00012, "loss": 1.4089, "step": 12 }, { "epoch": 0.0024159078238245677, "grad_norm": 0.30995360016822815, "learning_rate": 0.00013000000000000002, "loss": 1.047, "step": 13 }, { "epoch": 0.0026017468871956887, "grad_norm": 0.40917372703552246, "learning_rate": 0.00014, "loss": 1.239, "step": 14 }, { "epoch": 0.0027875859505668093, "grad_norm": 0.43013089895248413, "learning_rate": 0.00015000000000000001, "loss": 1.4529, "step": 15 }, { "epoch": 0.00297342501393793, "grad_norm": 0.4031105935573578, "learning_rate": 0.00016, "loss": 1.2565, "step": 16 }, { "epoch": 0.0031592640773090504, "grad_norm": 2.158144474029541, "learning_rate": 0.00017, "loss": 4.981, "step": 17 }, { "epoch": 0.003345103140680171, "grad_norm": 0.4983663558959961, "learning_rate": 0.00018, "loss": 1.2107, "step": 18 }, { "epoch": 0.0035309422040512915, "grad_norm": 0.7070329189300537, "learning_rate": 0.00019, "loss": 1.1058, "step": 19 }, { "epoch": 0.003716781267422412, "grad_norm": 0.36021122336387634, "learning_rate": 0.0002, "loss": 1.0866, "step": 20 }, { "epoch": 0.0039026203307935326, "grad_norm": 0.9074283242225647, "learning_rate": 0.00019999999572339194, "loss": 1.0985, "step": 21 }, { "epoch": 0.004088459394164653, "grad_norm": 0.4314013719558716, "learning_rate": 0.00019999998289356812, "loss": 0.9917, "step": 22 }, { "epoch": 0.004274298457535774, "grad_norm": 0.4853290319442749, "learning_rate": 0.0001999999615105296, "loss": 1.1093, "step": 23 }, { "epoch": 0.004460137520906894, "grad_norm": 0.4557550847530365, "learning_rate": 0.0001999999315742783, "loss": 1.3921, "step": 24 }, { "epoch": 0.004645976584278015, "grad_norm": 0.4223005771636963, "learning_rate": 0.00019999989308481668, "loss": 1.1424, "step": 25 }, { "epoch": 0.004831815647649135, "grad_norm": 0.6201490759849548, "learning_rate": 0.00019999984604214812, "loss": 1.4515, "step": 26 }, { "epoch": 0.005017654711020256, "grad_norm": 0.5055346488952637, "learning_rate": 0.00019999979044627657, "loss": 0.9974, "step": 27 }, { "epoch": 0.005203493774391377, "grad_norm": 0.4199519753456116, "learning_rate": 0.0001999997262972068, "loss": 1.3593, "step": 28 }, { "epoch": 0.0053893328377624975, "grad_norm": 0.42003971338272095, "learning_rate": 0.00019999965359494435, "loss": 1.1522, "step": 29 }, { "epoch": 0.0055751719011336185, "grad_norm": 0.4767492413520813, "learning_rate": 0.00019999957233949534, "loss": 1.1272, "step": 30 }, { "epoch": 0.005761010964504739, "grad_norm": 0.5115839242935181, "learning_rate": 0.00019999948253086685, "loss": 1.1056, "step": 31 }, { "epoch": 0.00594685002787586, "grad_norm": 0.48334285616874695, "learning_rate": 0.00019999938416906645, "loss": 1.0248, "step": 32 }, { "epoch": 0.00613268909124698, "grad_norm": 0.3692367672920227, "learning_rate": 0.00019999927725410262, "loss": 1.0031, "step": 33 }, { "epoch": 0.006318528154618101, "grad_norm": 0.38361677527427673, "learning_rate": 0.00019999916178598445, "loss": 1.221, "step": 34 }, { "epoch": 0.006504367217989221, "grad_norm": 0.34514594078063965, "learning_rate": 0.00019999903776472187, "loss": 1.1274, "step": 35 }, { "epoch": 0.006690206281360342, "grad_norm": 0.4714662730693817, "learning_rate": 0.00019999890519032545, "loss": 1.0562, "step": 36 }, { "epoch": 0.006876045344731463, "grad_norm": 0.47009018063545227, "learning_rate": 0.00019999876406280655, "loss": 1.2228, "step": 37 }, { "epoch": 0.007061884408102583, "grad_norm": 0.4776786267757416, "learning_rate": 0.00019999861438217722, "loss": 1.2817, "step": 38 }, { "epoch": 0.007247723471473704, "grad_norm": 0.42615750432014465, "learning_rate": 0.0001999984561484503, "loss": 1.2395, "step": 39 }, { "epoch": 0.007433562534844824, "grad_norm": 0.39289531111717224, "learning_rate": 0.00019999828936163928, "loss": 1.245, "step": 40 }, { "epoch": 0.007619401598215945, "grad_norm": 0.4392164349555969, "learning_rate": 0.00019999811402175848, "loss": 1.1604, "step": 41 }, { "epoch": 0.007805240661587065, "grad_norm": 0.7305529713630676, "learning_rate": 0.00019999793012882283, "loss": 1.2129, "step": 42 }, { "epoch": 0.007991079724958185, "grad_norm": 0.4402279257774353, "learning_rate": 0.0001999977376828481, "loss": 0.9151, "step": 43 }, { "epoch": 0.008176918788329306, "grad_norm": 0.5128088593482971, "learning_rate": 0.00019999753668385072, "loss": 1.3266, "step": 44 }, { "epoch": 0.008362757851700427, "grad_norm": 0.40549737215042114, "learning_rate": 0.00019999732713184797, "loss": 1.2442, "step": 45 }, { "epoch": 0.008548596915071548, "grad_norm": 0.5504630208015442, "learning_rate": 0.00019999710902685765, "loss": 1.36, "step": 46 }, { "epoch": 0.00873443597844267, "grad_norm": 0.4083457589149475, "learning_rate": 0.00019999688236889846, "loss": 1.1267, "step": 47 }, { "epoch": 0.008920275041813789, "grad_norm": 0.467245489358902, "learning_rate": 0.00019999664715798985, "loss": 1.0897, "step": 48 }, { "epoch": 0.00910611410518491, "grad_norm": 0.42215853929519653, "learning_rate": 0.00019999640339415188, "loss": 1.0611, "step": 49 }, { "epoch": 0.00929195316855603, "grad_norm": 0.48484039306640625, "learning_rate": 0.00019999615107740536, "loss": 1.015, "step": 50 }, { "epoch": 0.009477792231927152, "grad_norm": 0.4362781047821045, "learning_rate": 0.00019999589020777196, "loss": 1.0966, "step": 51 }, { "epoch": 0.00966363129529827, "grad_norm": 0.42648428678512573, "learning_rate": 0.00019999562078527395, "loss": 1.0121, "step": 52 }, { "epoch": 0.009849470358669392, "grad_norm": 0.4830460548400879, "learning_rate": 0.00019999534280993437, "loss": 1.0878, "step": 53 }, { "epoch": 0.010035309422040513, "grad_norm": 0.4462554156780243, "learning_rate": 0.000199995056281777, "loss": 1.324, "step": 54 }, { "epoch": 0.010221148485411634, "grad_norm": 0.3730710744857788, "learning_rate": 0.00019999476120082635, "loss": 0.9527, "step": 55 }, { "epoch": 0.010406987548782755, "grad_norm": 0.5048691034317017, "learning_rate": 0.00019999445756710765, "loss": 1.28, "step": 56 }, { "epoch": 0.010592826612153874, "grad_norm": 0.40299585461616516, "learning_rate": 0.00019999414538064686, "loss": 1.1119, "step": 57 }, { "epoch": 0.010778665675524995, "grad_norm": 0.4745088219642639, "learning_rate": 0.0001999938246414707, "loss": 1.029, "step": 58 }, { "epoch": 0.010964504738896116, "grad_norm": 0.4021121859550476, "learning_rate": 0.00019999349534960666, "loss": 0.9435, "step": 59 }, { "epoch": 0.011150343802267237, "grad_norm": 0.4799672067165375, "learning_rate": 0.0001999931575050828, "loss": 1.3012, "step": 60 }, { "epoch": 0.011336182865638356, "grad_norm": 0.4614189565181732, "learning_rate": 0.00019999281110792807, "loss": 1.1238, "step": 61 }, { "epoch": 0.011522021929009477, "grad_norm": 0.36898913979530334, "learning_rate": 0.00019999245615817213, "loss": 1.1332, "step": 62 }, { "epoch": 0.011707860992380598, "grad_norm": 0.4333168864250183, "learning_rate": 0.00019999209265584526, "loss": 1.1219, "step": 63 }, { "epoch": 0.01189370005575172, "grad_norm": 0.4542793333530426, "learning_rate": 0.0001999917206009786, "loss": 1.1224, "step": 64 }, { "epoch": 0.01207953911912284, "grad_norm": 0.41229432821273804, "learning_rate": 0.00019999133999360397, "loss": 1.2116, "step": 65 }, { "epoch": 0.01226537818249396, "grad_norm": 0.5130184888839722, "learning_rate": 0.00019999095083375395, "loss": 1.0225, "step": 66 }, { "epoch": 0.01245121724586508, "grad_norm": 0.45014601945877075, "learning_rate": 0.00019999055312146176, "loss": 1.2452, "step": 67 }, { "epoch": 0.012637056309236202, "grad_norm": 0.5515113472938538, "learning_rate": 0.00019999014685676144, "loss": 1.2329, "step": 68 }, { "epoch": 0.012822895372607323, "grad_norm": 0.43442612886428833, "learning_rate": 0.0001999897320396878, "loss": 0.951, "step": 69 }, { "epoch": 0.013008734435978442, "grad_norm": 0.46794602274894714, "learning_rate": 0.00019998930867027624, "loss": 1.155, "step": 70 }, { "epoch": 0.013194573499349563, "grad_norm": 0.45471006631851196, "learning_rate": 0.000199988876748563, "loss": 0.9123, "step": 71 }, { "epoch": 0.013380412562720684, "grad_norm": 0.3680677115917206, "learning_rate": 0.00019998843627458505, "loss": 1.1987, "step": 72 }, { "epoch": 0.013566251626091805, "grad_norm": 0.5155819654464722, "learning_rate": 0.00019998798724838005, "loss": 1.1141, "step": 73 }, { "epoch": 0.013752090689462926, "grad_norm": 0.40624114871025085, "learning_rate": 0.0001999875296699864, "loss": 1.2618, "step": 74 }, { "epoch": 0.013937929752834045, "grad_norm": 0.437930166721344, "learning_rate": 0.00019998706353944318, "loss": 1.0078, "step": 75 }, { "epoch": 0.014123768816205166, "grad_norm": 0.710334837436676, "learning_rate": 0.00019998658885679035, "loss": 1.4127, "step": 76 }, { "epoch": 0.014309607879576287, "grad_norm": 0.5053734183311462, "learning_rate": 0.00019998610562206847, "loss": 1.0414, "step": 77 }, { "epoch": 0.014495446942947408, "grad_norm": 0.5894109606742859, "learning_rate": 0.00019998561383531888, "loss": 1.3005, "step": 78 }, { "epoch": 0.014681286006318527, "grad_norm": 0.39885780215263367, "learning_rate": 0.00019998511349658368, "loss": 1.0932, "step": 79 }, { "epoch": 0.014867125069689648, "grad_norm": 0.4869062006473541, "learning_rate": 0.00019998460460590554, "loss": 1.1018, "step": 80 }, { "epoch": 0.01505296413306077, "grad_norm": 0.3895723521709442, "learning_rate": 0.00019998408716332815, "loss": 1.1326, "step": 81 }, { "epoch": 0.01523880319643189, "grad_norm": 0.49565768241882324, "learning_rate": 0.00019998356116889568, "loss": 1.2525, "step": 82 }, { "epoch": 0.015424642259803011, "grad_norm": 0.46484237909317017, "learning_rate": 0.00019998302662265312, "loss": 1.1684, "step": 83 }, { "epoch": 0.01561048132317413, "grad_norm": 0.4285367727279663, "learning_rate": 0.00019998248352464618, "loss": 1.2158, "step": 84 }, { "epoch": 0.015796320386545253, "grad_norm": 0.4140075743198395, "learning_rate": 0.00019998193187492137, "loss": 1.1595, "step": 85 }, { "epoch": 0.01598215944991637, "grad_norm": 0.4073302447795868, "learning_rate": 0.0001999813716735258, "loss": 0.9106, "step": 86 }, { "epoch": 0.016167998513287492, "grad_norm": 0.47974786162376404, "learning_rate": 0.00019998080292050744, "loss": 1.0553, "step": 87 }, { "epoch": 0.016353837576658613, "grad_norm": 0.47133541107177734, "learning_rate": 0.00019998022561591494, "loss": 1.1256, "step": 88 }, { "epoch": 0.016539676640029734, "grad_norm": 0.45809057354927063, "learning_rate": 0.00019997963975979763, "loss": 1.0548, "step": 89 }, { "epoch": 0.016725515703400855, "grad_norm": 0.358278751373291, "learning_rate": 0.00019997904535220564, "loss": 0.8885, "step": 90 }, { "epoch": 0.016911354766771976, "grad_norm": 0.4621461033821106, "learning_rate": 0.00019997844239318983, "loss": 0.9386, "step": 91 }, { "epoch": 0.017097193830143097, "grad_norm": 0.5164687037467957, "learning_rate": 0.00019997783088280177, "loss": 1.1284, "step": 92 }, { "epoch": 0.017283032893514218, "grad_norm": 0.4301346242427826, "learning_rate": 0.00019997721082109376, "loss": 1.1444, "step": 93 }, { "epoch": 0.01746887195688534, "grad_norm": 0.5308482646942139, "learning_rate": 0.0001999765822081188, "loss": 1.1924, "step": 94 }, { "epoch": 0.017654711020256456, "grad_norm": 0.45287665724754333, "learning_rate": 0.0001999759450439307, "loss": 1.1199, "step": 95 }, { "epoch": 0.017840550083627577, "grad_norm": 0.42649218440055847, "learning_rate": 0.00019997529932858394, "loss": 1.3226, "step": 96 }, { "epoch": 0.018026389146998698, "grad_norm": 0.4206051826477051, "learning_rate": 0.00019997464506213376, "loss": 0.9306, "step": 97 }, { "epoch": 0.01821222821036982, "grad_norm": 0.4551459848880768, "learning_rate": 0.00019997398224463612, "loss": 1.1994, "step": 98 }, { "epoch": 0.01839806727374094, "grad_norm": 0.44016626477241516, "learning_rate": 0.0001999733108761477, "loss": 1.0889, "step": 99 }, { "epoch": 0.01858390633711206, "grad_norm": 0.405900776386261, "learning_rate": 0.00019997263095672594, "loss": 0.8563, "step": 100 }, { "epoch": 0.018769745400483182, "grad_norm": 0.4155542254447937, "learning_rate": 0.00019997194248642898, "loss": 1.1767, "step": 101 }, { "epoch": 0.018955584463854303, "grad_norm": 0.4083574116230011, "learning_rate": 0.0001999712454653157, "loss": 1.1509, "step": 102 }, { "epoch": 0.019141423527225424, "grad_norm": 2.3378918170928955, "learning_rate": 0.00019997053989344573, "loss": 4.8463, "step": 103 }, { "epoch": 0.01932726259059654, "grad_norm": 0.421448290348053, "learning_rate": 0.00019996982577087945, "loss": 1.1293, "step": 104 }, { "epoch": 0.019513101653967663, "grad_norm": 0.40957626700401306, "learning_rate": 0.00019996910309767784, "loss": 1.2171, "step": 105 }, { "epoch": 0.019698940717338784, "grad_norm": 0.4505980312824249, "learning_rate": 0.00019996837187390284, "loss": 0.9156, "step": 106 }, { "epoch": 0.019884779780709905, "grad_norm": 0.39445918798446655, "learning_rate": 0.0001999676320996169, "loss": 1.1852, "step": 107 }, { "epoch": 0.020070618844081026, "grad_norm": 0.41280612349510193, "learning_rate": 0.00019996688377488334, "loss": 1.0868, "step": 108 }, { "epoch": 0.020256457907452147, "grad_norm": 0.3600105047225952, "learning_rate": 0.00019996612689976618, "loss": 1.0886, "step": 109 }, { "epoch": 0.020442296970823268, "grad_norm": 0.44562339782714844, "learning_rate": 0.00019996536147433012, "loss": 1.1261, "step": 110 }, { "epoch": 0.02062813603419439, "grad_norm": 0.5070035457611084, "learning_rate": 0.00019996458749864062, "loss": 1.3632, "step": 111 }, { "epoch": 0.02081397509756551, "grad_norm": 0.4161199927330017, "learning_rate": 0.0001999638049727639, "loss": 0.9583, "step": 112 }, { "epoch": 0.020999814160936627, "grad_norm": 0.32975199818611145, "learning_rate": 0.00019996301389676687, "loss": 0.907, "step": 113 }, { "epoch": 0.021185653224307748, "grad_norm": 0.4182499945163727, "learning_rate": 0.00019996221427071725, "loss": 1.3569, "step": 114 }, { "epoch": 0.02137149228767887, "grad_norm": 0.5010649561882019, "learning_rate": 0.00019996140609468343, "loss": 1.0246, "step": 115 }, { "epoch": 0.02155733135104999, "grad_norm": 0.40202227234840393, "learning_rate": 0.00019996058936873443, "loss": 0.924, "step": 116 }, { "epoch": 0.02174317041442111, "grad_norm": 0.4302928149700165, "learning_rate": 0.00019995976409294022, "loss": 1.2835, "step": 117 }, { "epoch": 0.021929009477792232, "grad_norm": 0.4092571437358856, "learning_rate": 0.00019995893026737133, "loss": 1.2428, "step": 118 }, { "epoch": 0.022114848541163353, "grad_norm": 0.5921717882156372, "learning_rate": 0.0001999580878920991, "loss": 1.2466, "step": 119 }, { "epoch": 0.022300687604534474, "grad_norm": 0.4174133837223053, "learning_rate": 0.00019995723696719555, "loss": 1.0471, "step": 120 }, { "epoch": 0.022486526667905595, "grad_norm": 0.40855398774147034, "learning_rate": 0.00019995637749273352, "loss": 0.9747, "step": 121 }, { "epoch": 0.022672365731276713, "grad_norm": 0.43846818804740906, "learning_rate": 0.00019995550946878647, "loss": 1.3309, "step": 122 }, { "epoch": 0.022858204794647834, "grad_norm": 0.38275861740112305, "learning_rate": 0.00019995463289542868, "loss": 1.1033, "step": 123 }, { "epoch": 0.023044043858018955, "grad_norm": 0.4742163419723511, "learning_rate": 0.0001999537477727351, "loss": 1.145, "step": 124 }, { "epoch": 0.023229882921390076, "grad_norm": 0.4117315113544464, "learning_rate": 0.00019995285410078143, "loss": 1.021, "step": 125 }, { "epoch": 0.023415721984761197, "grad_norm": 0.4802894592285156, "learning_rate": 0.00019995195187964414, "loss": 1.2366, "step": 126 }, { "epoch": 0.023601561048132318, "grad_norm": 0.4284798502922058, "learning_rate": 0.00019995104110940036, "loss": 1.0542, "step": 127 }, { "epoch": 0.02378740011150344, "grad_norm": 0.48981085419654846, "learning_rate": 0.00019995012179012807, "loss": 1.2871, "step": 128 }, { "epoch": 0.02397323917487456, "grad_norm": 0.4514794647693634, "learning_rate": 0.00019994919392190576, "loss": 1.3027, "step": 129 }, { "epoch": 0.02415907823824568, "grad_norm": 0.4130529463291168, "learning_rate": 0.00019994825750481291, "loss": 1.1686, "step": 130 }, { "epoch": 0.024344917301616798, "grad_norm": 0.5316373109817505, "learning_rate": 0.0001999473125389296, "loss": 1.3746, "step": 131 }, { "epoch": 0.02453075636498792, "grad_norm": 0.49608317017555237, "learning_rate": 0.00019994635902433663, "loss": 1.2536, "step": 132 }, { "epoch": 0.02471659542835904, "grad_norm": 0.38635924458503723, "learning_rate": 0.00019994539696111555, "loss": 1.2128, "step": 133 }, { "epoch": 0.02490243449173016, "grad_norm": 0.37796691060066223, "learning_rate": 0.00019994442634934867, "loss": 1.2026, "step": 134 }, { "epoch": 0.025088273555101282, "grad_norm": 0.4202409088611603, "learning_rate": 0.00019994344718911897, "loss": 1.2832, "step": 135 }, { "epoch": 0.025274112618472403, "grad_norm": 0.4410860538482666, "learning_rate": 0.00019994245948051024, "loss": 1.1883, "step": 136 }, { "epoch": 0.025459951681843524, "grad_norm": 1.8298826217651367, "learning_rate": 0.00019994146322360694, "loss": 4.4886, "step": 137 }, { "epoch": 0.025645790745214645, "grad_norm": 0.4328976273536682, "learning_rate": 0.00019994045841849429, "loss": 1.1451, "step": 138 }, { "epoch": 0.025831629808585766, "grad_norm": 0.3915058374404907, "learning_rate": 0.00019993944506525825, "loss": 0.9214, "step": 139 }, { "epoch": 0.026017468871956884, "grad_norm": 0.44726213812828064, "learning_rate": 0.00019993842316398544, "loss": 1.1946, "step": 140 }, { "epoch": 0.026203307935328005, "grad_norm": 0.47718754410743713, "learning_rate": 0.00019993739271476332, "loss": 1.1276, "step": 141 }, { "epoch": 0.026389146998699126, "grad_norm": 0.3767591118812561, "learning_rate": 0.00019993635371768002, "loss": 0.9029, "step": 142 }, { "epoch": 0.026574986062070247, "grad_norm": 0.3988798260688782, "learning_rate": 0.00019993530617282436, "loss": 1.0303, "step": 143 }, { "epoch": 0.026760825125441368, "grad_norm": 0.3820064067840576, "learning_rate": 0.00019993425008028602, "loss": 1.116, "step": 144 }, { "epoch": 0.02694666418881249, "grad_norm": 0.4157227873802185, "learning_rate": 0.00019993318544015527, "loss": 1.0564, "step": 145 }, { "epoch": 0.02713250325218361, "grad_norm": 0.4232051372528076, "learning_rate": 0.00019993211225252316, "loss": 1.2417, "step": 146 }, { "epoch": 0.02731834231555473, "grad_norm": 0.47376081347465515, "learning_rate": 0.00019993103051748153, "loss": 1.0504, "step": 147 }, { "epoch": 0.02750418137892585, "grad_norm": 0.4919112026691437, "learning_rate": 0.00019992994023512292, "loss": 1.3752, "step": 148 }, { "epoch": 0.02769002044229697, "grad_norm": 0.5118665099143982, "learning_rate": 0.00019992884140554047, "loss": 1.0661, "step": 149 }, { "epoch": 0.02787585950566809, "grad_norm": 0.44284725189208984, "learning_rate": 0.00019992773402882828, "loss": 1.233, "step": 150 }, { "epoch": 0.02806169856903921, "grad_norm": 0.4117078483104706, "learning_rate": 0.000199926618105081, "loss": 1.169, "step": 151 }, { "epoch": 0.028247537632410332, "grad_norm": 0.549756646156311, "learning_rate": 0.00019992549363439412, "loss": 1.2191, "step": 152 }, { "epoch": 0.028433376695781453, "grad_norm": 0.39329949021339417, "learning_rate": 0.00019992436061686382, "loss": 1.2276, "step": 153 }, { "epoch": 0.028619215759152574, "grad_norm": 0.4098329246044159, "learning_rate": 0.00019992321905258699, "loss": 1.1029, "step": 154 }, { "epoch": 0.028805054822523695, "grad_norm": 0.5076633095741272, "learning_rate": 0.00019992206894166125, "loss": 1.1196, "step": 155 }, { "epoch": 0.028990893885894816, "grad_norm": 0.41998520493507385, "learning_rate": 0.000199920910284185, "loss": 1.1913, "step": 156 }, { "epoch": 0.029176732949265937, "grad_norm": 0.4319917857646942, "learning_rate": 0.00019991974308025733, "loss": 1.1782, "step": 157 }, { "epoch": 0.029362572012637055, "grad_norm": 0.6997054815292358, "learning_rate": 0.00019991856732997807, "loss": 1.0852, "step": 158 }, { "epoch": 0.029548411076008176, "grad_norm": 0.48971933126449585, "learning_rate": 0.00019991738303344782, "loss": 1.1819, "step": 159 }, { "epoch": 0.029734250139379297, "grad_norm": 0.5128749012947083, "learning_rate": 0.00019991619019076783, "loss": 1.3655, "step": 160 }, { "epoch": 0.029920089202750418, "grad_norm": 0.41675087809562683, "learning_rate": 0.00019991498880204018, "loss": 1.0505, "step": 161 }, { "epoch": 0.03010592826612154, "grad_norm": 0.48276373744010925, "learning_rate": 0.00019991377886736754, "loss": 1.2697, "step": 162 }, { "epoch": 0.03029176732949266, "grad_norm": 0.4491844177246094, "learning_rate": 0.00019991256038685352, "loss": 1.2506, "step": 163 }, { "epoch": 0.03047760639286378, "grad_norm": 0.4582407772541046, "learning_rate": 0.0001999113333606022, "loss": 1.2433, "step": 164 }, { "epoch": 0.0306634454562349, "grad_norm": 0.4408119320869446, "learning_rate": 0.00019991009778871864, "loss": 1.1231, "step": 165 }, { "epoch": 0.030849284519606023, "grad_norm": 0.4083813726902008, "learning_rate": 0.00019990885367130847, "loss": 1.0707, "step": 166 }, { "epoch": 0.031035123582977144, "grad_norm": 0.5064361095428467, "learning_rate": 0.00019990760100847813, "loss": 1.0206, "step": 167 }, { "epoch": 0.03122096264634826, "grad_norm": 0.41701388359069824, "learning_rate": 0.00019990633980033473, "loss": 1.2338, "step": 168 }, { "epoch": 0.031406801709719386, "grad_norm": 0.457020103931427, "learning_rate": 0.00019990507004698615, "loss": 1.2124, "step": 169 }, { "epoch": 0.031592640773090507, "grad_norm": 0.45959389209747314, "learning_rate": 0.00019990379174854102, "loss": 1.1267, "step": 170 }, { "epoch": 0.03177847983646163, "grad_norm": 0.45844098925590515, "learning_rate": 0.00019990250490510867, "loss": 1.1652, "step": 171 }, { "epoch": 0.03196431889983274, "grad_norm": 0.5090590119361877, "learning_rate": 0.00019990120951679914, "loss": 1.0736, "step": 172 }, { "epoch": 0.03215015796320386, "grad_norm": 0.4741423726081848, "learning_rate": 0.00019989990558372326, "loss": 1.1206, "step": 173 }, { "epoch": 0.032335997026574984, "grad_norm": 0.4410255253314972, "learning_rate": 0.00019989859310599254, "loss": 1.2636, "step": 174 }, { "epoch": 0.032521836089946105, "grad_norm": 0.4406038522720337, "learning_rate": 0.00019989727208371926, "loss": 0.987, "step": 175 }, { "epoch": 0.032707675153317226, "grad_norm": 0.4072116017341614, "learning_rate": 0.00019989594251701635, "loss": 1.1572, "step": 176 }, { "epoch": 0.032893514216688347, "grad_norm": 0.4296364486217499, "learning_rate": 0.00019989460440599758, "loss": 1.046, "step": 177 }, { "epoch": 0.03307935328005947, "grad_norm": 0.4294045865535736, "learning_rate": 0.00019989325775077743, "loss": 0.9476, "step": 178 }, { "epoch": 0.03326519234343059, "grad_norm": 0.5257508158683777, "learning_rate": 0.00019989190255147103, "loss": 1.0242, "step": 179 }, { "epoch": 0.03345103140680171, "grad_norm": 0.36463358998298645, "learning_rate": 0.00019989053880819428, "loss": 0.7214, "step": 180 }, { "epoch": 0.03363687047017283, "grad_norm": 0.49668338894844055, "learning_rate": 0.00019988916652106392, "loss": 1.2282, "step": 181 }, { "epoch": 0.03382270953354395, "grad_norm": 0.47722092270851135, "learning_rate": 0.0001998877856901972, "loss": 1.2502, "step": 182 }, { "epoch": 0.03400854859691507, "grad_norm": 0.40731075406074524, "learning_rate": 0.00019988639631571232, "loss": 1.063, "step": 183 }, { "epoch": 0.034194387660286193, "grad_norm": 0.45999324321746826, "learning_rate": 0.00019988499839772804, "loss": 0.9978, "step": 184 }, { "epoch": 0.034380226723657314, "grad_norm": 0.4087058901786804, "learning_rate": 0.00019988359193636398, "loss": 1.231, "step": 185 }, { "epoch": 0.034566065787028435, "grad_norm": 0.3663274645805359, "learning_rate": 0.00019988217693174044, "loss": 0.8827, "step": 186 }, { "epoch": 0.034751904850399556, "grad_norm": 0.6143786907196045, "learning_rate": 0.00019988075338397843, "loss": 1.1063, "step": 187 }, { "epoch": 0.03493774391377068, "grad_norm": 0.36634212732315063, "learning_rate": 0.0001998793212931997, "loss": 1.1026, "step": 188 }, { "epoch": 0.0351235829771418, "grad_norm": 0.48718002438545227, "learning_rate": 0.00019987788065952677, "loss": 1.1274, "step": 189 }, { "epoch": 0.03530942204051291, "grad_norm": 0.4710601270198822, "learning_rate": 0.00019987643148308286, "loss": 1.2466, "step": 190 }, { "epoch": 0.035495261103884033, "grad_norm": 0.4751073718070984, "learning_rate": 0.00019987497376399186, "loss": 1.1662, "step": 191 }, { "epoch": 0.035681100167255154, "grad_norm": 0.35801300406455994, "learning_rate": 0.00019987350750237852, "loss": 0.926, "step": 192 }, { "epoch": 0.035866939230626275, "grad_norm": 0.40735355019569397, "learning_rate": 0.00019987203269836824, "loss": 1.2028, "step": 193 }, { "epoch": 0.036052778293997396, "grad_norm": 0.3875338137149811, "learning_rate": 0.00019987054935208712, "loss": 1.0547, "step": 194 }, { "epoch": 0.03623861735736852, "grad_norm": 0.3859501779079437, "learning_rate": 0.0001998690574636621, "loss": 0.9658, "step": 195 }, { "epoch": 0.03642445642073964, "grad_norm": 0.47865334153175354, "learning_rate": 0.0001998675570332207, "loss": 1.1934, "step": 196 }, { "epoch": 0.03661029548411076, "grad_norm": 0.5451048016548157, "learning_rate": 0.00019986604806089133, "loss": 1.1744, "step": 197 }, { "epoch": 0.03679613454748188, "grad_norm": 0.40316689014434814, "learning_rate": 0.00019986453054680306, "loss": 1.0095, "step": 198 }, { "epoch": 0.036981973610853, "grad_norm": 0.3943663537502289, "learning_rate": 0.00019986300449108564, "loss": 1.1106, "step": 199 }, { "epoch": 0.03716781267422412, "grad_norm": 0.4198141396045685, "learning_rate": 0.00019986146989386957, "loss": 1.041, "step": 200 }, { "epoch": 0.03735365173759524, "grad_norm": 0.3879221975803375, "learning_rate": 0.0001998599267552862, "loss": 0.948, "step": 201 }, { "epoch": 0.037539490800966364, "grad_norm": 0.39751577377319336, "learning_rate": 0.00019985837507546745, "loss": 1.0038, "step": 202 }, { "epoch": 0.037725329864337485, "grad_norm": 0.37878310680389404, "learning_rate": 0.00019985681485454605, "loss": 1.1636, "step": 203 }, { "epoch": 0.037911168927708606, "grad_norm": 0.48006248474121094, "learning_rate": 0.00019985524609265548, "loss": 1.2288, "step": 204 }, { "epoch": 0.03809700799107973, "grad_norm": 0.40840205550193787, "learning_rate": 0.0001998536687899299, "loss": 1.1478, "step": 205 }, { "epoch": 0.03828284705445085, "grad_norm": 0.500511884689331, "learning_rate": 0.00019985208294650422, "loss": 1.0763, "step": 206 }, { "epoch": 0.03846868611782197, "grad_norm": 0.45608195662498474, "learning_rate": 0.00019985048856251405, "loss": 1.2623, "step": 207 }, { "epoch": 0.03865452518119308, "grad_norm": 0.4747142195701599, "learning_rate": 0.0001998488856380958, "loss": 1.2437, "step": 208 }, { "epoch": 0.038840364244564204, "grad_norm": 0.5317223072052002, "learning_rate": 0.00019984727417338656, "loss": 1.114, "step": 209 }, { "epoch": 0.039026203307935325, "grad_norm": 0.4821191728115082, "learning_rate": 0.00019984565416852415, "loss": 1.2248, "step": 210 }, { "epoch": 0.039212042371306446, "grad_norm": 0.42245104908943176, "learning_rate": 0.00019984402562364716, "loss": 1.2469, "step": 211 }, { "epoch": 0.03939788143467757, "grad_norm": 1.796623945236206, "learning_rate": 0.00019984238853889484, "loss": 4.2855, "step": 212 }, { "epoch": 0.03958372049804869, "grad_norm": 0.44016581773757935, "learning_rate": 0.00019984074291440728, "loss": 1.066, "step": 213 }, { "epoch": 0.03976955956141981, "grad_norm": 0.4538607597351074, "learning_rate": 0.00019983908875032515, "loss": 1.3343, "step": 214 }, { "epoch": 0.03995539862479093, "grad_norm": 0.4110296368598938, "learning_rate": 0.00019983742604679002, "loss": 1.2131, "step": 215 }, { "epoch": 0.04014123768816205, "grad_norm": 0.606137216091156, "learning_rate": 0.00019983575480394402, "loss": 0.9551, "step": 216 }, { "epoch": 0.04032707675153317, "grad_norm": 0.4060904085636139, "learning_rate": 0.00019983407502193015, "loss": 1.0849, "step": 217 }, { "epoch": 0.04051291581490429, "grad_norm": 0.41577842831611633, "learning_rate": 0.00019983238670089214, "loss": 1.1291, "step": 218 }, { "epoch": 0.040698754878275414, "grad_norm": 0.43058526515960693, "learning_rate": 0.00019983068984097426, "loss": 1.2347, "step": 219 }, { "epoch": 0.040884593941646535, "grad_norm": 0.3922870457172394, "learning_rate": 0.00019982898444232173, "loss": 1.0996, "step": 220 }, { "epoch": 0.041070433005017656, "grad_norm": 0.49691110849380493, "learning_rate": 0.00019982727050508038, "loss": 1.1778, "step": 221 }, { "epoch": 0.04125627206838878, "grad_norm": 0.45880937576293945, "learning_rate": 0.00019982554802939687, "loss": 1.1532, "step": 222 }, { "epoch": 0.0414421111317599, "grad_norm": 0.49630632996559143, "learning_rate": 0.00019982381701541847, "loss": 1.1742, "step": 223 }, { "epoch": 0.04162795019513102, "grad_norm": 0.3757645785808563, "learning_rate": 0.00019982207746329324, "loss": 0.8219, "step": 224 }, { "epoch": 0.04181378925850214, "grad_norm": 0.423665851354599, "learning_rate": 0.00019982032937316998, "loss": 0.9377, "step": 225 }, { "epoch": 0.041999628321873254, "grad_norm": 0.5444109439849854, "learning_rate": 0.0001998185727451982, "loss": 1.02, "step": 226 }, { "epoch": 0.042185467385244375, "grad_norm": 0.4525771141052246, "learning_rate": 0.00019981680757952822, "loss": 1.2185, "step": 227 }, { "epoch": 0.042371306448615496, "grad_norm": 0.4341532289981842, "learning_rate": 0.0001998150338763109, "loss": 1.0239, "step": 228 }, { "epoch": 0.04255714551198662, "grad_norm": 0.4187489151954651, "learning_rate": 0.00019981325163569803, "loss": 0.9313, "step": 229 }, { "epoch": 0.04274298457535774, "grad_norm": 0.38820531964302063, "learning_rate": 0.000199811460857842, "loss": 0.9375, "step": 230 }, { "epoch": 0.04292882363872886, "grad_norm": 0.36113470792770386, "learning_rate": 0.00019980966154289602, "loss": 1.0695, "step": 231 }, { "epoch": 0.04311466270209998, "grad_norm": 4.252074241638184, "learning_rate": 0.00019980785369101396, "loss": 4.6367, "step": 232 }, { "epoch": 0.0433005017654711, "grad_norm": 0.43251553177833557, "learning_rate": 0.00019980603730235044, "loss": 1.0693, "step": 233 }, { "epoch": 0.04348634082884222, "grad_norm": 0.47350192070007324, "learning_rate": 0.00019980421237706088, "loss": 1.2545, "step": 234 }, { "epoch": 0.04367217989221334, "grad_norm": 0.3853001296520233, "learning_rate": 0.00019980237891530133, "loss": 0.8562, "step": 235 }, { "epoch": 0.043858018955584464, "grad_norm": 0.46240493655204773, "learning_rate": 0.00019980053691722856, "loss": 1.2941, "step": 236 }, { "epoch": 0.044043858018955585, "grad_norm": 0.4637151062488556, "learning_rate": 0.00019979868638300021, "loss": 1.223, "step": 237 }, { "epoch": 0.044229697082326706, "grad_norm": 0.40852031111717224, "learning_rate": 0.00019979682731277452, "loss": 0.8145, "step": 238 }, { "epoch": 0.04441553614569783, "grad_norm": 0.4328407943248749, "learning_rate": 0.0001997949597067105, "loss": 1.1434, "step": 239 }, { "epoch": 0.04460137520906895, "grad_norm": 0.42404770851135254, "learning_rate": 0.00019979308356496786, "loss": 0.9842, "step": 240 }, { "epoch": 0.04478721427244007, "grad_norm": 0.40816017985343933, "learning_rate": 0.00019979119888770714, "loss": 1.0351, "step": 241 }, { "epoch": 0.04497305333581119, "grad_norm": 0.44897130131721497, "learning_rate": 0.00019978930567508948, "loss": 1.0483, "step": 242 }, { "epoch": 0.04515889239918231, "grad_norm": 0.40088385343551636, "learning_rate": 0.00019978740392727687, "loss": 0.9936, "step": 243 }, { "epoch": 0.045344731462553425, "grad_norm": 0.4536771774291992, "learning_rate": 0.00019978549364443188, "loss": 1.2624, "step": 244 }, { "epoch": 0.045530570525924546, "grad_norm": 0.4574795067310333, "learning_rate": 0.00019978357482671797, "loss": 1.1316, "step": 245 }, { "epoch": 0.04571640958929567, "grad_norm": 0.41690966486930847, "learning_rate": 0.00019978164747429925, "loss": 1.1572, "step": 246 }, { "epoch": 0.04590224865266679, "grad_norm": 0.4959973692893982, "learning_rate": 0.0001997797115873406, "loss": 1.2667, "step": 247 }, { "epoch": 0.04608808771603791, "grad_norm": 0.4050540626049042, "learning_rate": 0.0001997777671660075, "loss": 1.2102, "step": 248 }, { "epoch": 0.04627392677940903, "grad_norm": 0.3968220055103302, "learning_rate": 0.00019977581421046637, "loss": 1.3496, "step": 249 }, { "epoch": 0.04645976584278015, "grad_norm": 0.38303324580192566, "learning_rate": 0.0001997738527208842, "loss": 1.0241, "step": 250 }, { "epoch": 0.04664560490615127, "grad_norm": 0.36732155084609985, "learning_rate": 0.00019977188269742878, "loss": 0.9691, "step": 251 }, { "epoch": 0.04683144396952239, "grad_norm": 0.44628089666366577, "learning_rate": 0.00019976990414026857, "loss": 1.0435, "step": 252 }, { "epoch": 0.047017283032893514, "grad_norm": 0.42364564538002014, "learning_rate": 0.00019976791704957284, "loss": 1.1622, "step": 253 }, { "epoch": 0.047203122096264635, "grad_norm": 0.4465533196926117, "learning_rate": 0.00019976592142551152, "loss": 1.0775, "step": 254 }, { "epoch": 0.047388961159635756, "grad_norm": 0.41187724471092224, "learning_rate": 0.00019976391726825535, "loss": 0.907, "step": 255 }, { "epoch": 0.04757480022300688, "grad_norm": 0.41359013319015503, "learning_rate": 0.0001997619045779757, "loss": 1.2421, "step": 256 }, { "epoch": 0.047760639286378, "grad_norm": 0.5085680484771729, "learning_rate": 0.0001997598833548447, "loss": 1.2285, "step": 257 }, { "epoch": 0.04794647834974912, "grad_norm": 0.5501344799995422, "learning_rate": 0.00019975785359903535, "loss": 1.2846, "step": 258 }, { "epoch": 0.04813231741312024, "grad_norm": 0.3470396101474762, "learning_rate": 0.00019975581531072112, "loss": 0.9717, "step": 259 }, { "epoch": 0.04831815647649136, "grad_norm": 0.41229280829429626, "learning_rate": 0.00019975376849007642, "loss": 1.0211, "step": 260 }, { "epoch": 0.04850399553986248, "grad_norm": 0.6339378356933594, "learning_rate": 0.0001997517131372763, "loss": 1.2711, "step": 261 }, { "epoch": 0.048689834603233596, "grad_norm": 0.4885227084159851, "learning_rate": 0.00019974964925249656, "loss": 1.1049, "step": 262 }, { "epoch": 0.04887567366660472, "grad_norm": 0.5992847681045532, "learning_rate": 0.00019974757683591373, "loss": 1.1126, "step": 263 }, { "epoch": 0.04906151272997584, "grad_norm": 0.4222111105918884, "learning_rate": 0.0001997454958877051, "loss": 1.2963, "step": 264 }, { "epoch": 0.04924735179334696, "grad_norm": 0.4678804576396942, "learning_rate": 0.0001997434064080486, "loss": 1.0687, "step": 265 }, { "epoch": 0.04943319085671808, "grad_norm": 0.6195970177650452, "learning_rate": 0.000199741308397123, "loss": 1.0591, "step": 266 }, { "epoch": 0.0496190299200892, "grad_norm": 2.341949224472046, "learning_rate": 0.00019973920185510768, "loss": 4.5625, "step": 267 }, { "epoch": 0.04980486898346032, "grad_norm": 0.42770177125930786, "learning_rate": 0.0001997370867821829, "loss": 0.8979, "step": 268 }, { "epoch": 0.04999070804683144, "grad_norm": 0.4548147916793823, "learning_rate": 0.00019973496317852952, "loss": 1.1994, "step": 269 }, { "epoch": 0.050176547110202564, "grad_norm": 0.521209180355072, "learning_rate": 0.00019973283104432918, "loss": 1.4071, "step": 270 }, { "epoch": 0.050362386173573685, "grad_norm": 0.49684473872184753, "learning_rate": 0.00019973069037976428, "loss": 1.2518, "step": 271 }, { "epoch": 0.050548225236944806, "grad_norm": 0.41964155435562134, "learning_rate": 0.00019972854118501782, "loss": 1.2989, "step": 272 }, { "epoch": 0.05073406430031593, "grad_norm": 0.41438028216362, "learning_rate": 0.0001997263834602737, "loss": 1.154, "step": 273 }, { "epoch": 0.05091990336368705, "grad_norm": 0.42565253376960754, "learning_rate": 0.0001997242172057165, "loss": 1.0732, "step": 274 }, { "epoch": 0.05110574242705817, "grad_norm": 0.43003037571907043, "learning_rate": 0.00019972204242153148, "loss": 1.1901, "step": 275 }, { "epoch": 0.05129158149042929, "grad_norm": 0.38172245025634766, "learning_rate": 0.00019971985910790464, "loss": 1.0215, "step": 276 }, { "epoch": 0.05147742055380041, "grad_norm": 0.5161172151565552, "learning_rate": 0.00019971766726502267, "loss": 0.788, "step": 277 }, { "epoch": 0.05166325961717153, "grad_norm": 0.4897576570510864, "learning_rate": 0.00019971546689307316, "loss": 1.2448, "step": 278 }, { "epoch": 0.05184909868054265, "grad_norm": 0.5034477114677429, "learning_rate": 0.00019971325799224423, "loss": 1.2172, "step": 279 }, { "epoch": 0.05203493774391377, "grad_norm": 0.3733575642108917, "learning_rate": 0.00019971104056272485, "loss": 1.0159, "step": 280 }, { "epoch": 0.05222077680728489, "grad_norm": 0.5659055709838867, "learning_rate": 0.00019970881460470467, "loss": 1.0378, "step": 281 }, { "epoch": 0.05240661587065601, "grad_norm": 0.44712385535240173, "learning_rate": 0.00019970658011837404, "loss": 1.0341, "step": 282 }, { "epoch": 0.05259245493402713, "grad_norm": 0.40552452206611633, "learning_rate": 0.00019970433710392416, "loss": 1.309, "step": 283 }, { "epoch": 0.05277829399739825, "grad_norm": 2.332465410232544, "learning_rate": 0.00019970208556154684, "loss": 4.0356, "step": 284 }, { "epoch": 0.05296413306076937, "grad_norm": 0.5010231137275696, "learning_rate": 0.00019969982549143464, "loss": 1.1609, "step": 285 }, { "epoch": 0.05314997212414049, "grad_norm": 0.4107954502105713, "learning_rate": 0.00019969755689378085, "loss": 1.1437, "step": 286 }, { "epoch": 0.053335811187511614, "grad_norm": 0.40947437286376953, "learning_rate": 0.0001996952797687796, "loss": 1.319, "step": 287 }, { "epoch": 0.053521650250882735, "grad_norm": 0.41476190090179443, "learning_rate": 0.0001996929941166256, "loss": 1.0567, "step": 288 }, { "epoch": 0.053707489314253856, "grad_norm": 0.5421552062034607, "learning_rate": 0.00019969069993751431, "loss": 1.2611, "step": 289 }, { "epoch": 0.05389332837762498, "grad_norm": 0.3736020624637604, "learning_rate": 0.000199688397231642, "loss": 1.1123, "step": 290 }, { "epoch": 0.0540791674409961, "grad_norm": 0.4207514226436615, "learning_rate": 0.00019968608599920564, "loss": 1.1748, "step": 291 }, { "epoch": 0.05426500650436722, "grad_norm": 0.5403153896331787, "learning_rate": 0.0001996837662404029, "loss": 1.2441, "step": 292 }, { "epoch": 0.05445084556773834, "grad_norm": 0.3881579041481018, "learning_rate": 0.00019968143795543215, "loss": 1.1158, "step": 293 }, { "epoch": 0.05463668463110946, "grad_norm": 0.47338834404945374, "learning_rate": 0.0001996791011444926, "loss": 1.2112, "step": 294 }, { "epoch": 0.05482252369448058, "grad_norm": 0.46377336978912354, "learning_rate": 0.00019967675580778412, "loss": 1.0821, "step": 295 }, { "epoch": 0.0550083627578517, "grad_norm": 0.37327149510383606, "learning_rate": 0.00019967440194550723, "loss": 1.138, "step": 296 }, { "epoch": 0.055194201821222824, "grad_norm": 0.45194652676582336, "learning_rate": 0.00019967203955786335, "loss": 1.0792, "step": 297 }, { "epoch": 0.05538004088459394, "grad_norm": 0.442440927028656, "learning_rate": 0.0001996696686450545, "loss": 1.1935, "step": 298 }, { "epoch": 0.05556587994796506, "grad_norm": 0.5025745630264282, "learning_rate": 0.0001996672892072835, "loss": 1.1351, "step": 299 }, { "epoch": 0.05575171901133618, "grad_norm": 0.47533366084098816, "learning_rate": 0.0001996649012447538, "loss": 1.3472, "step": 300 }, { "epoch": 0.0559375580747073, "grad_norm": 0.45607975125312805, "learning_rate": 0.00019966250475766972, "loss": 1.1532, "step": 301 }, { "epoch": 0.05612339713807842, "grad_norm": 0.4451979994773865, "learning_rate": 0.0001996600997462362, "loss": 0.8268, "step": 302 }, { "epoch": 0.05630923620144954, "grad_norm": 0.4001930356025696, "learning_rate": 0.00019965768621065896, "loss": 0.9162, "step": 303 }, { "epoch": 0.056495075264820664, "grad_norm": 0.415159672498703, "learning_rate": 0.00019965526415114445, "loss": 1.1613, "step": 304 }, { "epoch": 0.056680914328191785, "grad_norm": 0.4185488224029541, "learning_rate": 0.0001996528335678998, "loss": 1.1483, "step": 305 }, { "epoch": 0.056866753391562906, "grad_norm": 1.838326334953308, "learning_rate": 0.0001996503944611329, "loss": 4.0785, "step": 306 }, { "epoch": 0.05705259245493403, "grad_norm": 0.42079976201057434, "learning_rate": 0.0001996479468310524, "loss": 0.9382, "step": 307 }, { "epoch": 0.05723843151830515, "grad_norm": 0.3876133859157562, "learning_rate": 0.00019964549067786762, "loss": 0.9098, "step": 308 }, { "epoch": 0.05742427058167627, "grad_norm": 0.4491420388221741, "learning_rate": 0.0001996430260017887, "loss": 1.104, "step": 309 }, { "epoch": 0.05761010964504739, "grad_norm": 0.3816368281841278, "learning_rate": 0.00019964055280302637, "loss": 0.9202, "step": 310 }, { "epoch": 0.05779594870841851, "grad_norm": 0.3864871859550476, "learning_rate": 0.00019963807108179223, "loss": 0.8936, "step": 311 }, { "epoch": 0.05798178777178963, "grad_norm": 0.4503324329853058, "learning_rate": 0.00019963558083829853, "loss": 1.051, "step": 312 }, { "epoch": 0.05816762683516075, "grad_norm": 0.4208626449108124, "learning_rate": 0.00019963308207275823, "loss": 1.2242, "step": 313 }, { "epoch": 0.058353465898531874, "grad_norm": 0.5481116771697998, "learning_rate": 0.00019963057478538513, "loss": 1.5282, "step": 314 }, { "epoch": 0.058539304961902995, "grad_norm": 0.46902161836624146, "learning_rate": 0.0001996280589763936, "loss": 1.2003, "step": 315 }, { "epoch": 0.05872514402527411, "grad_norm": 0.37822434306144714, "learning_rate": 0.00019962553464599887, "loss": 0.7875, "step": 316 }, { "epoch": 0.05891098308864523, "grad_norm": 0.4753560423851013, "learning_rate": 0.00019962300179441687, "loss": 1.2323, "step": 317 }, { "epoch": 0.05909682215201635, "grad_norm": 0.5222552418708801, "learning_rate": 0.00019962046042186417, "loss": 1.3808, "step": 318 }, { "epoch": 0.05928266121538747, "grad_norm": 0.37479904294013977, "learning_rate": 0.0001996179105285582, "loss": 1.1761, "step": 319 }, { "epoch": 0.05946850027875859, "grad_norm": 0.5287699103355408, "learning_rate": 0.00019961535211471704, "loss": 0.9187, "step": 320 }, { "epoch": 0.059654339342129714, "grad_norm": 0.5424025058746338, "learning_rate": 0.0001996127851805595, "loss": 1.3986, "step": 321 }, { "epoch": 0.059840178405500835, "grad_norm": 0.3869524896144867, "learning_rate": 0.0001996102097263052, "loss": 0.9046, "step": 322 }, { "epoch": 0.060026017468871956, "grad_norm": 0.42366644740104675, "learning_rate": 0.00019960762575217434, "loss": 1.064, "step": 323 }, { "epoch": 0.06021185653224308, "grad_norm": 0.4955057203769684, "learning_rate": 0.00019960503325838798, "loss": 1.0469, "step": 324 }, { "epoch": 0.0603976955956142, "grad_norm": 0.49584606289863586, "learning_rate": 0.00019960243224516783, "loss": 1.1859, "step": 325 }, { "epoch": 0.06058353465898532, "grad_norm": 0.3556949496269226, "learning_rate": 0.0001995998227127364, "loss": 1.1647, "step": 326 }, { "epoch": 0.06076937372235644, "grad_norm": 0.41173893213272095, "learning_rate": 0.00019959720466131687, "loss": 1.145, "step": 327 }, { "epoch": 0.06095521278572756, "grad_norm": 0.44232895970344543, "learning_rate": 0.00019959457809113316, "loss": 0.9857, "step": 328 }, { "epoch": 0.06114105184909868, "grad_norm": 0.43240708112716675, "learning_rate": 0.00019959194300240993, "loss": 1.1169, "step": 329 }, { "epoch": 0.0613268909124698, "grad_norm": 0.5672433972358704, "learning_rate": 0.00019958929939537258, "loss": 1.1568, "step": 330 }, { "epoch": 0.061512729975840924, "grad_norm": 0.30468809604644775, "learning_rate": 0.00019958664727024722, "loss": 0.8156, "step": 331 }, { "epoch": 0.061698569039212045, "grad_norm": 2.1194663047790527, "learning_rate": 0.0001995839866272607, "loss": 3.7851, "step": 332 }, { "epoch": 0.061884408102583166, "grad_norm": 0.4063419699668884, "learning_rate": 0.0001995813174666405, "loss": 1.0279, "step": 333 }, { "epoch": 0.06207024716595429, "grad_norm": 0.4712132513523102, "learning_rate": 0.00019957863978861505, "loss": 1.1407, "step": 334 }, { "epoch": 0.0622560862293254, "grad_norm": 0.44568800926208496, "learning_rate": 0.0001995759535934133, "loss": 1.1316, "step": 335 }, { "epoch": 0.06244192529269652, "grad_norm": 0.4557202160358429, "learning_rate": 0.00019957325888126507, "loss": 1.0661, "step": 336 }, { "epoch": 0.06262776435606765, "grad_norm": 0.38041767477989197, "learning_rate": 0.00019957055565240076, "loss": 0.9131, "step": 337 }, { "epoch": 0.06281360341943877, "grad_norm": 0.4428039491176605, "learning_rate": 0.00019956784390705163, "loss": 1.2109, "step": 338 }, { "epoch": 0.06299944248280989, "grad_norm": 0.4144112467765808, "learning_rate": 0.0001995651236454496, "loss": 1.0889, "step": 339 }, { "epoch": 0.06318528154618101, "grad_norm": 0.3976646363735199, "learning_rate": 0.0001995623948678274, "loss": 1.0306, "step": 340 }, { "epoch": 0.06337112060955213, "grad_norm": 0.47308364510536194, "learning_rate": 0.00019955965757441834, "loss": 1.2456, "step": 341 }, { "epoch": 0.06355695967292325, "grad_norm": 0.3834746778011322, "learning_rate": 0.00019955691176545663, "loss": 1.0008, "step": 342 }, { "epoch": 0.06374279873629438, "grad_norm": 0.46271997690200806, "learning_rate": 0.00019955415744117707, "loss": 1.1123, "step": 343 }, { "epoch": 0.06392863779966548, "grad_norm": 0.4702741503715515, "learning_rate": 0.00019955139460181527, "loss": 1.2039, "step": 344 }, { "epoch": 0.0641144768630366, "grad_norm": 0.4005366861820221, "learning_rate": 0.00019954862324760752, "loss": 0.955, "step": 345 }, { "epoch": 0.06430031592640773, "grad_norm": 0.44210031628608704, "learning_rate": 0.00019954584337879085, "loss": 1.2393, "step": 346 }, { "epoch": 0.06448615498977885, "grad_norm": 0.4394225478172302, "learning_rate": 0.00019954305499560307, "loss": 1.2445, "step": 347 }, { "epoch": 0.06467199405314997, "grad_norm": 0.6319501399993896, "learning_rate": 0.00019954025809828266, "loss": 1.2729, "step": 348 }, { "epoch": 0.06485783311652109, "grad_norm": 0.49698418378829956, "learning_rate": 0.00019953745268706883, "loss": 1.3173, "step": 349 }, { "epoch": 0.06504367217989221, "grad_norm": 2.311923027038574, "learning_rate": 0.00019953463876220154, "loss": 3.9918, "step": 350 }, { "epoch": 0.06522951124326333, "grad_norm": 0.4096338450908661, "learning_rate": 0.0001995318163239215, "loss": 1.0602, "step": 351 }, { "epoch": 0.06541535030663445, "grad_norm": 0.5735236406326294, "learning_rate": 0.00019952898537247004, "loss": 1.1047, "step": 352 }, { "epoch": 0.06560118937000557, "grad_norm": 0.43088850378990173, "learning_rate": 0.0001995261459080894, "loss": 1.1304, "step": 353 }, { "epoch": 0.06578702843337669, "grad_norm": 0.39601054787635803, "learning_rate": 0.0001995232979310224, "loss": 1.0685, "step": 354 }, { "epoch": 0.06597286749674781, "grad_norm": 0.5507276654243469, "learning_rate": 0.0001995204414415126, "loss": 1.1807, "step": 355 }, { "epoch": 0.06615870656011894, "grad_norm": 0.4136437773704529, "learning_rate": 0.00019951757643980433, "loss": 1.038, "step": 356 }, { "epoch": 0.06634454562349006, "grad_norm": 0.4400745928287506, "learning_rate": 0.00019951470292614272, "loss": 1.1044, "step": 357 }, { "epoch": 0.06653038468686118, "grad_norm": 0.3629709482192993, "learning_rate": 0.00019951182090077344, "loss": 1.0114, "step": 358 }, { "epoch": 0.0667162237502323, "grad_norm": 0.6199255585670471, "learning_rate": 0.00019950893036394307, "loss": 1.3218, "step": 359 }, { "epoch": 0.06690206281360342, "grad_norm": 0.4702179729938507, "learning_rate": 0.00019950603131589883, "loss": 1.1172, "step": 360 }, { "epoch": 0.06708790187697454, "grad_norm": 0.42372721433639526, "learning_rate": 0.00019950312375688865, "loss": 1.1621, "step": 361 }, { "epoch": 0.06727374094034566, "grad_norm": 0.4708528518676758, "learning_rate": 0.00019950020768716128, "loss": 1.222, "step": 362 }, { "epoch": 0.06745958000371678, "grad_norm": 0.4498105049133301, "learning_rate": 0.00019949728310696604, "loss": 1.1895, "step": 363 }, { "epoch": 0.0676454190670879, "grad_norm": 0.4580400586128235, "learning_rate": 0.00019949435001655313, "loss": 1.1215, "step": 364 }, { "epoch": 0.06783125813045902, "grad_norm": 0.4212903678417206, "learning_rate": 0.00019949140841617348, "loss": 1.3536, "step": 365 }, { "epoch": 0.06801709719383014, "grad_norm": 0.4488227069377899, "learning_rate": 0.00019948845830607863, "loss": 1.1161, "step": 366 }, { "epoch": 0.06820293625720127, "grad_norm": 0.46963340044021606, "learning_rate": 0.00019948549968652087, "loss": 0.859, "step": 367 }, { "epoch": 0.06838877532057239, "grad_norm": 0.489118367433548, "learning_rate": 0.00019948253255775336, "loss": 1.221, "step": 368 }, { "epoch": 0.06857461438394351, "grad_norm": 0.4626411199569702, "learning_rate": 0.0001994795569200298, "loss": 1.0484, "step": 369 }, { "epoch": 0.06876045344731463, "grad_norm": 0.47765642404556274, "learning_rate": 0.00019947657277360478, "loss": 1.1136, "step": 370 }, { "epoch": 0.06894629251068575, "grad_norm": 0.45885488390922546, "learning_rate": 0.00019947358011873344, "loss": 1.2611, "step": 371 }, { "epoch": 0.06913213157405687, "grad_norm": 0.6871397495269775, "learning_rate": 0.0001994705789556718, "loss": 1.3397, "step": 372 }, { "epoch": 0.06931797063742799, "grad_norm": 0.3958967626094818, "learning_rate": 0.00019946756928467658, "loss": 1.0389, "step": 373 }, { "epoch": 0.06950380970079911, "grad_norm": 0.47687655687332153, "learning_rate": 0.0001994645511060052, "loss": 1.1862, "step": 374 }, { "epoch": 0.06968964876417023, "grad_norm": 0.45492956042289734, "learning_rate": 0.00019946152441991576, "loss": 1.1136, "step": 375 }, { "epoch": 0.06987548782754135, "grad_norm": 0.5021520256996155, "learning_rate": 0.00019945848922666717, "loss": 1.1335, "step": 376 }, { "epoch": 0.07006132689091248, "grad_norm": 0.5270656943321228, "learning_rate": 0.00019945544552651905, "loss": 0.829, "step": 377 }, { "epoch": 0.0702471659542836, "grad_norm": 0.39411312341690063, "learning_rate": 0.00019945239331973174, "loss": 1.0567, "step": 378 }, { "epoch": 0.07043300501765472, "grad_norm": 0.4584178328514099, "learning_rate": 0.00019944933260656624, "loss": 1.158, "step": 379 }, { "epoch": 0.07061884408102582, "grad_norm": 0.4279657304286957, "learning_rate": 0.00019944626338728444, "loss": 1.0037, "step": 380 }, { "epoch": 0.07080468314439695, "grad_norm": 0.554434061050415, "learning_rate": 0.00019944318566214876, "loss": 1.2635, "step": 381 }, { "epoch": 0.07099052220776807, "grad_norm": 0.4390695095062256, "learning_rate": 0.0001994400994314225, "loss": 0.8383, "step": 382 }, { "epoch": 0.07117636127113919, "grad_norm": 0.49963220953941345, "learning_rate": 0.00019943700469536962, "loss": 1.2483, "step": 383 }, { "epoch": 0.07136220033451031, "grad_norm": 0.5585063099861145, "learning_rate": 0.0001994339014542548, "loss": 1.3737, "step": 384 }, { "epoch": 0.07154803939788143, "grad_norm": 0.4825543165206909, "learning_rate": 0.00019943078970834348, "loss": 0.9761, "step": 385 }, { "epoch": 0.07173387846125255, "grad_norm": 0.3667401969432831, "learning_rate": 0.00019942766945790186, "loss": 0.9599, "step": 386 }, { "epoch": 0.07191971752462367, "grad_norm": 0.5182320475578308, "learning_rate": 0.00019942454070319673, "loss": 1.1975, "step": 387 }, { "epoch": 0.07210555658799479, "grad_norm": 0.4698030352592468, "learning_rate": 0.00019942140344449577, "loss": 1.2344, "step": 388 }, { "epoch": 0.07229139565136591, "grad_norm": 0.42861101031303406, "learning_rate": 0.0001994182576820673, "loss": 1.2487, "step": 389 }, { "epoch": 0.07247723471473703, "grad_norm": 0.5619314908981323, "learning_rate": 0.00019941510341618036, "loss": 1.3243, "step": 390 }, { "epoch": 0.07266307377810816, "grad_norm": 0.4856956899166107, "learning_rate": 0.0001994119406471048, "loss": 1.1531, "step": 391 }, { "epoch": 0.07284891284147928, "grad_norm": 0.3697066903114319, "learning_rate": 0.00019940876937511108, "loss": 1.1304, "step": 392 }, { "epoch": 0.0730347519048504, "grad_norm": 0.417986124753952, "learning_rate": 0.00019940558960047046, "loss": 1.0578, "step": 393 }, { "epoch": 0.07322059096822152, "grad_norm": 0.4160363972187042, "learning_rate": 0.0001994024013234549, "loss": 1.3094, "step": 394 }, { "epoch": 0.07340643003159264, "grad_norm": 0.4469776153564453, "learning_rate": 0.00019939920454433715, "loss": 1.0983, "step": 395 }, { "epoch": 0.07359226909496376, "grad_norm": 0.5031948685646057, "learning_rate": 0.0001993959992633906, "loss": 1.536, "step": 396 }, { "epoch": 0.07377810815833488, "grad_norm": 0.45184874534606934, "learning_rate": 0.00019939278548088937, "loss": 0.8456, "step": 397 }, { "epoch": 0.073963947221706, "grad_norm": 0.45181095600128174, "learning_rate": 0.00019938956319710843, "loss": 1.2099, "step": 398 }, { "epoch": 0.07414978628507712, "grad_norm": 0.49429285526275635, "learning_rate": 0.00019938633241232335, "loss": 1.1858, "step": 399 }, { "epoch": 0.07433562534844824, "grad_norm": 0.41390714049339294, "learning_rate": 0.0001993830931268104, "loss": 0.9845, "step": 400 }, { "epoch": 0.07452146441181937, "grad_norm": 0.42566490173339844, "learning_rate": 0.00019937984534084674, "loss": 1.0536, "step": 401 }, { "epoch": 0.07470730347519049, "grad_norm": 0.4550681412220001, "learning_rate": 0.00019937658905471012, "loss": 1.1484, "step": 402 }, { "epoch": 0.07489314253856161, "grad_norm": 0.4505470097064972, "learning_rate": 0.00019937332426867905, "loss": 1.3151, "step": 403 }, { "epoch": 0.07507898160193273, "grad_norm": 0.3663678765296936, "learning_rate": 0.00019937005098303277, "loss": 1.1038, "step": 404 }, { "epoch": 0.07526482066530385, "grad_norm": 0.4451432228088379, "learning_rate": 0.00019936676919805126, "loss": 1.0466, "step": 405 }, { "epoch": 0.07545065972867497, "grad_norm": 0.4404486417770386, "learning_rate": 0.00019936347891401522, "loss": 1.2363, "step": 406 }, { "epoch": 0.07563649879204609, "grad_norm": 0.42253008484840393, "learning_rate": 0.0001993601801312061, "loss": 0.8975, "step": 407 }, { "epoch": 0.07582233785541721, "grad_norm": 0.4789395034313202, "learning_rate": 0.000199356872849906, "loss": 1.0806, "step": 408 }, { "epoch": 0.07600817691878833, "grad_norm": 0.4530208706855774, "learning_rate": 0.00019935355707039785, "loss": 1.0968, "step": 409 }, { "epoch": 0.07619401598215945, "grad_norm": 0.47005361318588257, "learning_rate": 0.00019935023279296524, "loss": 1.2328, "step": 410 }, { "epoch": 0.07637985504553058, "grad_norm": 0.4853919446468353, "learning_rate": 0.00019934690001789247, "loss": 1.148, "step": 411 }, { "epoch": 0.0765656941089017, "grad_norm": 0.5219566226005554, "learning_rate": 0.00019934355874546466, "loss": 1.1759, "step": 412 }, { "epoch": 0.07675153317227282, "grad_norm": 0.3366680145263672, "learning_rate": 0.0001993402089759675, "loss": 0.8707, "step": 413 }, { "epoch": 0.07693737223564394, "grad_norm": 0.6554648876190186, "learning_rate": 0.0001993368507096876, "loss": 1.3711, "step": 414 }, { "epoch": 0.07712321129901506, "grad_norm": 0.44505739212036133, "learning_rate": 0.0001993334839469122, "loss": 1.0383, "step": 415 }, { "epoch": 0.07730905036238617, "grad_norm": 0.44871413707733154, "learning_rate": 0.00019933010868792916, "loss": 1.1774, "step": 416 }, { "epoch": 0.07749488942575729, "grad_norm": 0.39043372869491577, "learning_rate": 0.00019932672493302729, "loss": 1.1545, "step": 417 }, { "epoch": 0.07768072848912841, "grad_norm": 0.5223948955535889, "learning_rate": 0.00019932333268249595, "loss": 1.3809, "step": 418 }, { "epoch": 0.07786656755249953, "grad_norm": 0.4341309368610382, "learning_rate": 0.00019931993193662528, "loss": 0.9368, "step": 419 }, { "epoch": 0.07805240661587065, "grad_norm": 0.5227263569831848, "learning_rate": 0.0001993165226957062, "loss": 1.1342, "step": 420 }, { "epoch": 0.07823824567924177, "grad_norm": 0.40590155124664307, "learning_rate": 0.00019931310496003029, "loss": 1.3317, "step": 421 }, { "epoch": 0.07842408474261289, "grad_norm": 0.4488816559314728, "learning_rate": 0.00019930967872988984, "loss": 0.9919, "step": 422 }, { "epoch": 0.07860992380598401, "grad_norm": 0.3798006772994995, "learning_rate": 0.00019930624400557796, "loss": 0.7698, "step": 423 }, { "epoch": 0.07879576286935513, "grad_norm": 0.41681668162345886, "learning_rate": 0.00019930280078738836, "loss": 1.096, "step": 424 }, { "epoch": 0.07898160193272626, "grad_norm": 0.40335264801979065, "learning_rate": 0.00019929934907561564, "loss": 1.113, "step": 425 }, { "epoch": 0.07916744099609738, "grad_norm": 0.4952360689640045, "learning_rate": 0.00019929588887055495, "loss": 1.2477, "step": 426 }, { "epoch": 0.0793532800594685, "grad_norm": 0.44737347960472107, "learning_rate": 0.00019929242017250226, "loss": 1.0625, "step": 427 }, { "epoch": 0.07953911912283962, "grad_norm": 0.4532301723957062, "learning_rate": 0.00019928894298175428, "loss": 1.1246, "step": 428 }, { "epoch": 0.07972495818621074, "grad_norm": 0.4013027250766754, "learning_rate": 0.00019928545729860846, "loss": 1.0591, "step": 429 }, { "epoch": 0.07991079724958186, "grad_norm": 0.4169690012931824, "learning_rate": 0.00019928196312336285, "loss": 1.2644, "step": 430 }, { "epoch": 0.08009663631295298, "grad_norm": 0.549383282661438, "learning_rate": 0.00019927846045631638, "loss": 1.093, "step": 431 }, { "epoch": 0.0802824753763241, "grad_norm": 1.4828532934188843, "learning_rate": 0.0001992749492977686, "loss": 3.8312, "step": 432 }, { "epoch": 0.08046831443969522, "grad_norm": 0.414670467376709, "learning_rate": 0.00019927142964801985, "loss": 1.0559, "step": 433 }, { "epoch": 0.08065415350306634, "grad_norm": 0.46947577595710754, "learning_rate": 0.00019926790150737118, "loss": 0.6598, "step": 434 }, { "epoch": 0.08083999256643747, "grad_norm": 0.37407925724983215, "learning_rate": 0.00019926436487612435, "loss": 1.0784, "step": 435 }, { "epoch": 0.08102583162980859, "grad_norm": 0.4735274910926819, "learning_rate": 0.00019926081975458183, "loss": 1.1369, "step": 436 }, { "epoch": 0.08121167069317971, "grad_norm": 0.3918091952800751, "learning_rate": 0.00019925726614304688, "loss": 1.0932, "step": 437 }, { "epoch": 0.08139750975655083, "grad_norm": 0.4890015125274658, "learning_rate": 0.00019925370404182341, "loss": 1.4067, "step": 438 }, { "epoch": 0.08158334881992195, "grad_norm": 0.5494944453239441, "learning_rate": 0.00019925013345121616, "loss": 1.0676, "step": 439 }, { "epoch": 0.08176918788329307, "grad_norm": 0.4678218364715576, "learning_rate": 0.00019924655437153046, "loss": 0.9595, "step": 440 }, { "epoch": 0.08195502694666419, "grad_norm": 0.42315244674682617, "learning_rate": 0.0001992429668030725, "loss": 1.1847, "step": 441 }, { "epoch": 0.08214086601003531, "grad_norm": 0.4802471399307251, "learning_rate": 0.00019923937074614907, "loss": 0.9514, "step": 442 }, { "epoch": 0.08232670507340643, "grad_norm": 0.38350144028663635, "learning_rate": 0.00019923576620106776, "loss": 0.9091, "step": 443 }, { "epoch": 0.08251254413677755, "grad_norm": 0.42246341705322266, "learning_rate": 0.0001992321531681369, "loss": 1.2286, "step": 444 }, { "epoch": 0.08269838320014868, "grad_norm": 0.48033586144447327, "learning_rate": 0.0001992285316476655, "loss": 1.2414, "step": 445 }, { "epoch": 0.0828842222635198, "grad_norm": 0.42830902338027954, "learning_rate": 0.00019922490163996336, "loss": 0.8, "step": 446 }, { "epoch": 0.08307006132689092, "grad_norm": 0.4135034382343292, "learning_rate": 0.0001992212631453409, "loss": 1.0903, "step": 447 }, { "epoch": 0.08325590039026204, "grad_norm": 0.4331395924091339, "learning_rate": 0.00019921761616410939, "loss": 1.1565, "step": 448 }, { "epoch": 0.08344173945363316, "grad_norm": 0.530741274356842, "learning_rate": 0.00019921396069658072, "loss": 1.2175, "step": 449 }, { "epoch": 0.08362757851700428, "grad_norm": 0.5240445137023926, "learning_rate": 0.00019921029674306755, "loss": 1.0252, "step": 450 }, { "epoch": 0.0838134175803754, "grad_norm": 0.47946205735206604, "learning_rate": 0.00019920662430388328, "loss": 0.9948, "step": 451 }, { "epoch": 0.08399925664374651, "grad_norm": 0.4071696698665619, "learning_rate": 0.00019920294337934203, "loss": 1.0508, "step": 452 }, { "epoch": 0.08418509570711763, "grad_norm": 0.528746485710144, "learning_rate": 0.0001991992539697586, "loss": 1.0941, "step": 453 }, { "epoch": 0.08437093477048875, "grad_norm": 0.44651922583580017, "learning_rate": 0.0001991955560754486, "loss": 1.0259, "step": 454 }, { "epoch": 0.08455677383385987, "grad_norm": 0.5037238001823425, "learning_rate": 0.0001991918496967283, "loss": 1.2394, "step": 455 }, { "epoch": 0.08474261289723099, "grad_norm": 0.4429932236671448, "learning_rate": 0.00019918813483391472, "loss": 1.0982, "step": 456 }, { "epoch": 0.08492845196060211, "grad_norm": 0.5680214166641235, "learning_rate": 0.0001991844114873256, "loss": 1.1919, "step": 457 }, { "epoch": 0.08511429102397323, "grad_norm": 0.43642061948776245, "learning_rate": 0.00019918067965727938, "loss": 1.1807, "step": 458 }, { "epoch": 0.08530013008734436, "grad_norm": 0.4607391655445099, "learning_rate": 0.00019917693934409528, "loss": 1.1641, "step": 459 }, { "epoch": 0.08548596915071548, "grad_norm": 0.48521819710731506, "learning_rate": 0.00019917319054809317, "loss": 0.9252, "step": 460 }, { "epoch": 0.0856718082140866, "grad_norm": 0.5308165550231934, "learning_rate": 0.00019916943326959378, "loss": 1.3597, "step": 461 }, { "epoch": 0.08585764727745772, "grad_norm": 0.5356461405754089, "learning_rate": 0.00019916566750891843, "loss": 1.2329, "step": 462 }, { "epoch": 0.08604348634082884, "grad_norm": 0.45560821890830994, "learning_rate": 0.0001991618932663892, "loss": 1.027, "step": 463 }, { "epoch": 0.08622932540419996, "grad_norm": 0.44379356503486633, "learning_rate": 0.0001991581105423289, "loss": 1.2826, "step": 464 }, { "epoch": 0.08641516446757108, "grad_norm": 0.5654692649841309, "learning_rate": 0.00019915431933706113, "loss": 1.0153, "step": 465 }, { "epoch": 0.0866010035309422, "grad_norm": 0.4437659978866577, "learning_rate": 0.00019915051965091013, "loss": 1.0302, "step": 466 }, { "epoch": 0.08678684259431332, "grad_norm": 0.41596749424934387, "learning_rate": 0.00019914671148420085, "loss": 1.2477, "step": 467 }, { "epoch": 0.08697268165768444, "grad_norm": 0.40698888897895813, "learning_rate": 0.00019914289483725909, "loss": 1.1617, "step": 468 }, { "epoch": 0.08715852072105557, "grad_norm": 0.5427649021148682, "learning_rate": 0.00019913906971041122, "loss": 0.9785, "step": 469 }, { "epoch": 0.08734435978442669, "grad_norm": 0.4189436137676239, "learning_rate": 0.0001991352361039845, "loss": 1.025, "step": 470 }, { "epoch": 0.08753019884779781, "grad_norm": 0.5531293153762817, "learning_rate": 0.00019913139401830674, "loss": 1.4385, "step": 471 }, { "epoch": 0.08771603791116893, "grad_norm": 0.46829280257225037, "learning_rate": 0.00019912754345370663, "loss": 1.1963, "step": 472 }, { "epoch": 0.08790187697454005, "grad_norm": 0.5418177247047424, "learning_rate": 0.00019912368441051345, "loss": 1.412, "step": 473 }, { "epoch": 0.08808771603791117, "grad_norm": 0.4771590828895569, "learning_rate": 0.00019911981688905734, "loss": 1.1606, "step": 474 }, { "epoch": 0.08827355510128229, "grad_norm": 0.4091399013996124, "learning_rate": 0.00019911594088966905, "loss": 1.1281, "step": 475 }, { "epoch": 0.08845939416465341, "grad_norm": 0.47988924384117126, "learning_rate": 0.00019911205641268012, "loss": 0.8196, "step": 476 }, { "epoch": 0.08864523322802453, "grad_norm": 0.5251743793487549, "learning_rate": 0.00019910816345842277, "loss": 0.9623, "step": 477 }, { "epoch": 0.08883107229139565, "grad_norm": 0.44282639026641846, "learning_rate": 0.00019910426202723004, "loss": 1.1464, "step": 478 }, { "epoch": 0.08901691135476678, "grad_norm": 0.6290245056152344, "learning_rate": 0.00019910035211943557, "loss": 1.3983, "step": 479 }, { "epoch": 0.0892027504181379, "grad_norm": 0.4757908880710602, "learning_rate": 0.0001990964337353738, "loss": 1.2815, "step": 480 }, { "epoch": 0.08938858948150902, "grad_norm": 0.43329253792762756, "learning_rate": 0.00019909250687537986, "loss": 1.28, "step": 481 }, { "epoch": 0.08957442854488014, "grad_norm": 0.4964252710342407, "learning_rate": 0.00019908857153978966, "loss": 1.2799, "step": 482 }, { "epoch": 0.08976026760825126, "grad_norm": 0.4180728793144226, "learning_rate": 0.00019908462772893977, "loss": 1.1443, "step": 483 }, { "epoch": 0.08994610667162238, "grad_norm": 0.45169374346733093, "learning_rate": 0.00019908067544316751, "loss": 0.8931, "step": 484 }, { "epoch": 0.0901319457349935, "grad_norm": 0.5057590007781982, "learning_rate": 0.00019907671468281097, "loss": 1.218, "step": 485 }, { "epoch": 0.09031778479836462, "grad_norm": 0.40992191433906555, "learning_rate": 0.00019907274544820887, "loss": 1.162, "step": 486 }, { "epoch": 0.09050362386173574, "grad_norm": 0.5198535323143005, "learning_rate": 0.00019906876773970072, "loss": 1.0092, "step": 487 }, { "epoch": 0.09068946292510685, "grad_norm": 0.44954633712768555, "learning_rate": 0.00019906478155762677, "loss": 1.2348, "step": 488 }, { "epoch": 0.09087530198847797, "grad_norm": 0.4218129813671112, "learning_rate": 0.00019906078690232792, "loss": 1.057, "step": 489 }, { "epoch": 0.09106114105184909, "grad_norm": 0.5415510535240173, "learning_rate": 0.0001990567837741459, "loss": 0.9229, "step": 490 }, { "epoch": 0.09124698011522021, "grad_norm": 0.41418707370758057, "learning_rate": 0.00019905277217342304, "loss": 1.3003, "step": 491 }, { "epoch": 0.09143281917859133, "grad_norm": 0.47480088472366333, "learning_rate": 0.00019904875210050252, "loss": 1.0083, "step": 492 }, { "epoch": 0.09161865824196246, "grad_norm": 0.4657514989376068, "learning_rate": 0.00019904472355572814, "loss": 1.1367, "step": 493 }, { "epoch": 0.09180449730533358, "grad_norm": 0.6550506949424744, "learning_rate": 0.0001990406865394445, "loss": 1.364, "step": 494 }, { "epoch": 0.0919903363687047, "grad_norm": 0.44624167680740356, "learning_rate": 0.0001990366410519969, "loss": 1.0305, "step": 495 }, { "epoch": 0.09217617543207582, "grad_norm": 0.5374132990837097, "learning_rate": 0.00019903258709373132, "loss": 1.3057, "step": 496 }, { "epoch": 0.09236201449544694, "grad_norm": 0.5603333711624146, "learning_rate": 0.00019902852466499452, "loss": 1.2736, "step": 497 }, { "epoch": 0.09254785355881806, "grad_norm": 0.43850839138031006, "learning_rate": 0.000199024453766134, "loss": 1.207, "step": 498 }, { "epoch": 0.09273369262218918, "grad_norm": 0.47750213742256165, "learning_rate": 0.00019902037439749792, "loss": 1.158, "step": 499 }, { "epoch": 0.0929195316855603, "grad_norm": 0.46765515208244324, "learning_rate": 0.00019901628655943522, "loss": 1.0762, "step": 500 }, { "epoch": 0.09310537074893142, "grad_norm": 0.46739229559898376, "learning_rate": 0.00019901219025229554, "loss": 1.177, "step": 501 }, { "epoch": 0.09329120981230254, "grad_norm": 0.5190978050231934, "learning_rate": 0.0001990080854764292, "loss": 1.0188, "step": 502 }, { "epoch": 0.09347704887567367, "grad_norm": 0.49740681052207947, "learning_rate": 0.00019900397223218734, "loss": 1.2849, "step": 503 }, { "epoch": 0.09366288793904479, "grad_norm": 0.4239771068096161, "learning_rate": 0.00019899985051992177, "loss": 1.2079, "step": 504 }, { "epoch": 0.09384872700241591, "grad_norm": 0.5220719575881958, "learning_rate": 0.000198995720339985, "loss": 1.1537, "step": 505 }, { "epoch": 0.09403456606578703, "grad_norm": 0.4205484986305237, "learning_rate": 0.0001989915816927303, "loss": 1.057, "step": 506 }, { "epoch": 0.09422040512915815, "grad_norm": 0.4098726511001587, "learning_rate": 0.0001989874345785117, "loss": 1.0374, "step": 507 }, { "epoch": 0.09440624419252927, "grad_norm": 0.4050571024417877, "learning_rate": 0.00019898327899768386, "loss": 0.9851, "step": 508 }, { "epoch": 0.09459208325590039, "grad_norm": 0.560594916343689, "learning_rate": 0.00019897911495060224, "loss": 1.032, "step": 509 }, { "epoch": 0.09477792231927151, "grad_norm": 0.5087330937385559, "learning_rate": 0.00019897494243762301, "loss": 1.0799, "step": 510 }, { "epoch": 0.09496376138264263, "grad_norm": 0.40081334114074707, "learning_rate": 0.00019897076145910302, "loss": 1.1713, "step": 511 }, { "epoch": 0.09514960044601375, "grad_norm": 0.4222041964530945, "learning_rate": 0.0001989665720153999, "loss": 1.0581, "step": 512 }, { "epoch": 0.09533543950938488, "grad_norm": 0.47098875045776367, "learning_rate": 0.00019896237410687198, "loss": 1.1315, "step": 513 }, { "epoch": 0.095521278572756, "grad_norm": 0.41529735922813416, "learning_rate": 0.00019895816773387833, "loss": 1.1249, "step": 514 }, { "epoch": 0.09570711763612712, "grad_norm": 0.5826699733734131, "learning_rate": 0.0001989539528967787, "loss": 1.3797, "step": 515 }, { "epoch": 0.09589295669949824, "grad_norm": 0.4980536997318268, "learning_rate": 0.00019894972959593363, "loss": 1.0622, "step": 516 }, { "epoch": 0.09607879576286936, "grad_norm": 0.4493926763534546, "learning_rate": 0.00019894549783170434, "loss": 1.0394, "step": 517 }, { "epoch": 0.09626463482624048, "grad_norm": 0.4388639032840729, "learning_rate": 0.00019894125760445275, "loss": 1.0333, "step": 518 }, { "epoch": 0.0964504738896116, "grad_norm": 0.5359880924224854, "learning_rate": 0.00019893700891454157, "loss": 1.4027, "step": 519 }, { "epoch": 0.09663631295298272, "grad_norm": 0.459553062915802, "learning_rate": 0.00019893275176233416, "loss": 1.2656, "step": 520 }, { "epoch": 0.09682215201635384, "grad_norm": 0.5185438990592957, "learning_rate": 0.0001989284861481947, "loss": 0.9231, "step": 521 }, { "epoch": 0.09700799107972496, "grad_norm": 0.3912256062030792, "learning_rate": 0.000198924212072488, "loss": 1.1295, "step": 522 }, { "epoch": 0.09719383014309609, "grad_norm": 0.41744834184646606, "learning_rate": 0.0001989199295355796, "loss": 1.0672, "step": 523 }, { "epoch": 0.09737966920646719, "grad_norm": 0.4016173779964447, "learning_rate": 0.0001989156385378359, "loss": 1.0234, "step": 524 }, { "epoch": 0.09756550826983831, "grad_norm": 0.422979474067688, "learning_rate": 0.0001989113390796238, "loss": 1.0978, "step": 525 }, { "epoch": 0.09775134733320943, "grad_norm": 0.529803991317749, "learning_rate": 0.00019890703116131112, "loss": 1.3601, "step": 526 }, { "epoch": 0.09793718639658056, "grad_norm": 0.3872774839401245, "learning_rate": 0.00019890271478326628, "loss": 1.0105, "step": 527 }, { "epoch": 0.09812302545995168, "grad_norm": 0.5113834738731384, "learning_rate": 0.0001988983899458585, "loss": 1.3075, "step": 528 }, { "epoch": 0.0983088645233228, "grad_norm": 0.4002736508846283, "learning_rate": 0.00019889405664945766, "loss": 1.294, "step": 529 }, { "epoch": 0.09849470358669392, "grad_norm": 0.49016720056533813, "learning_rate": 0.00019888971489443445, "loss": 0.9481, "step": 530 }, { "epoch": 0.09868054265006504, "grad_norm": 0.6148933172225952, "learning_rate": 0.00019888536468116018, "loss": 1.0486, "step": 531 }, { "epoch": 0.09886638171343616, "grad_norm": 0.4160175621509552, "learning_rate": 0.00019888100601000695, "loss": 0.9821, "step": 532 }, { "epoch": 0.09905222077680728, "grad_norm": 0.4974881410598755, "learning_rate": 0.00019887663888134755, "loss": 1.0666, "step": 533 }, { "epoch": 0.0992380598401784, "grad_norm": 0.5117452144622803, "learning_rate": 0.00019887226329555553, "loss": 1.2383, "step": 534 }, { "epoch": 0.09942389890354952, "grad_norm": 0.4444761276245117, "learning_rate": 0.00019886787925300515, "loss": 1.1308, "step": 535 }, { "epoch": 0.09960973796692064, "grad_norm": 0.5025230646133423, "learning_rate": 0.00019886348675407137, "loss": 0.9898, "step": 536 }, { "epoch": 0.09979557703029177, "grad_norm": 0.4477807283401489, "learning_rate": 0.00019885908579912994, "loss": 1.1329, "step": 537 }, { "epoch": 0.09998141609366289, "grad_norm": 0.5180255174636841, "learning_rate": 0.00019885467638855716, "loss": 1.2191, "step": 538 }, { "epoch": 0.10016725515703401, "grad_norm": 0.48357728123664856, "learning_rate": 0.0001988502585227303, "loss": 0.9723, "step": 539 }, { "epoch": 0.10035309422040513, "grad_norm": 0.5262537598609924, "learning_rate": 0.0001988458322020272, "loss": 1.2641, "step": 540 }, { "epoch": 0.10053893328377625, "grad_norm": 0.39484769105911255, "learning_rate": 0.0001988413974268264, "loss": 1.0507, "step": 541 }, { "epoch": 0.10072477234714737, "grad_norm": 0.4654625654220581, "learning_rate": 0.00019883695419750728, "loss": 1.0045, "step": 542 }, { "epoch": 0.10091061141051849, "grad_norm": 0.4511539041996002, "learning_rate": 0.00019883250251444983, "loss": 1.0578, "step": 543 }, { "epoch": 0.10109645047388961, "grad_norm": 0.48877015709877014, "learning_rate": 0.00019882804237803488, "loss": 0.892, "step": 544 }, { "epoch": 0.10128228953726073, "grad_norm": 0.4010559916496277, "learning_rate": 0.0001988235737886438, "loss": 1.0008, "step": 545 }, { "epoch": 0.10146812860063185, "grad_norm": 0.4214247763156891, "learning_rate": 0.00019881909674665892, "loss": 1.2221, "step": 546 }, { "epoch": 0.10165396766400298, "grad_norm": 0.47292011976242065, "learning_rate": 0.0001988146112524631, "loss": 1.1664, "step": 547 }, { "epoch": 0.1018398067273741, "grad_norm": 0.6377806663513184, "learning_rate": 0.00019881011730644004, "loss": 1.1985, "step": 548 }, { "epoch": 0.10202564579074522, "grad_norm": 0.46788981556892395, "learning_rate": 0.00019880561490897406, "loss": 1.236, "step": 549 }, { "epoch": 0.10221148485411634, "grad_norm": 0.42801347374916077, "learning_rate": 0.0001988011040604503, "loss": 1.183, "step": 550 }, { "epoch": 0.10239732391748746, "grad_norm": 0.4006738066673279, "learning_rate": 0.00019879658476125458, "loss": 1.1887, "step": 551 }, { "epoch": 0.10258316298085858, "grad_norm": 0.45762765407562256, "learning_rate": 0.00019879205701177344, "loss": 1.2906, "step": 552 }, { "epoch": 0.1027690020442297, "grad_norm": 0.4175111949443817, "learning_rate": 0.0001987875208123941, "loss": 1.1363, "step": 553 }, { "epoch": 0.10295484110760082, "grad_norm": 0.4322377145290375, "learning_rate": 0.00019878297616350465, "loss": 1.0614, "step": 554 }, { "epoch": 0.10314068017097194, "grad_norm": 0.5138376355171204, "learning_rate": 0.00019877842306549376, "loss": 1.0583, "step": 555 }, { "epoch": 0.10332651923434306, "grad_norm": 0.4600732624530792, "learning_rate": 0.0001987738615187508, "loss": 1.0225, "step": 556 }, { "epoch": 0.10351235829771419, "grad_norm": 0.549647867679596, "learning_rate": 0.00019876929152366603, "loss": 1.2555, "step": 557 }, { "epoch": 0.1036981973610853, "grad_norm": 0.47968605160713196, "learning_rate": 0.00019876471308063028, "loss": 1.1949, "step": 558 }, { "epoch": 0.10388403642445643, "grad_norm": 0.4664756655693054, "learning_rate": 0.00019876012619003515, "loss": 1.2603, "step": 559 }, { "epoch": 0.10406987548782753, "grad_norm": 0.45363089442253113, "learning_rate": 0.00019875553085227295, "loss": 1.2025, "step": 560 }, { "epoch": 0.10425571455119866, "grad_norm": 0.4678725302219391, "learning_rate": 0.00019875092706773682, "loss": 1.1634, "step": 561 }, { "epoch": 0.10444155361456978, "grad_norm": 0.4928404986858368, "learning_rate": 0.00019874631483682046, "loss": 1.2043, "step": 562 }, { "epoch": 0.1046273926779409, "grad_norm": 0.39862075448036194, "learning_rate": 0.00019874169415991832, "loss": 1.1535, "step": 563 }, { "epoch": 0.10481323174131202, "grad_norm": 0.4637607932090759, "learning_rate": 0.0001987370650374257, "loss": 1.1165, "step": 564 }, { "epoch": 0.10499907080468314, "grad_norm": 0.4636867046356201, "learning_rate": 0.0001987324274697385, "loss": 1.3045, "step": 565 }, { "epoch": 0.10518490986805426, "grad_norm": 0.48317477107048035, "learning_rate": 0.00019872778145725342, "loss": 1.2164, "step": 566 }, { "epoch": 0.10537074893142538, "grad_norm": 0.5793139338493347, "learning_rate": 0.00019872312700036778, "loss": 0.9508, "step": 567 }, { "epoch": 0.1055565879947965, "grad_norm": 0.49341946840286255, "learning_rate": 0.00019871846409947973, "loss": 1.2695, "step": 568 }, { "epoch": 0.10574242705816762, "grad_norm": 0.4591580033302307, "learning_rate": 0.0001987137927549881, "loss": 1.2874, "step": 569 }, { "epoch": 0.10592826612153874, "grad_norm": 0.4602017402648926, "learning_rate": 0.00019870911296729243, "loss": 1.3729, "step": 570 }, { "epoch": 0.10611410518490987, "grad_norm": 0.34223201870918274, "learning_rate": 0.00019870442473679298, "loss": 0.6184, "step": 571 }, { "epoch": 0.10629994424828099, "grad_norm": 0.49571168422698975, "learning_rate": 0.00019869972806389074, "loss": 1.0509, "step": 572 }, { "epoch": 0.10648578331165211, "grad_norm": 0.4025332033634186, "learning_rate": 0.00019869502294898744, "loss": 1.0083, "step": 573 }, { "epoch": 0.10667162237502323, "grad_norm": 0.5795102119445801, "learning_rate": 0.00019869030939248555, "loss": 1.2375, "step": 574 }, { "epoch": 0.10685746143839435, "grad_norm": 0.49539127945899963, "learning_rate": 0.00019868558739478815, "loss": 1.1855, "step": 575 }, { "epoch": 0.10704330050176547, "grad_norm": 0.44701632857322693, "learning_rate": 0.00019868085695629923, "loss": 1.0034, "step": 576 }, { "epoch": 0.10722913956513659, "grad_norm": 0.4629371762275696, "learning_rate": 0.0001986761180774233, "loss": 1.0642, "step": 577 }, { "epoch": 0.10741497862850771, "grad_norm": 0.44119590520858765, "learning_rate": 0.00019867137075856573, "loss": 0.9856, "step": 578 }, { "epoch": 0.10760081769187883, "grad_norm": 0.5623904466629028, "learning_rate": 0.00019866661500013256, "loss": 1.2922, "step": 579 }, { "epoch": 0.10778665675524995, "grad_norm": 0.5019487142562866, "learning_rate": 0.00019866185080253057, "loss": 1.2963, "step": 580 }, { "epoch": 0.10797249581862108, "grad_norm": 0.46231526136398315, "learning_rate": 0.00019865707816616723, "loss": 1.1691, "step": 581 }, { "epoch": 0.1081583348819922, "grad_norm": 0.3911663591861725, "learning_rate": 0.0001986522970914508, "loss": 1.0292, "step": 582 }, { "epoch": 0.10834417394536332, "grad_norm": 0.48484066128730774, "learning_rate": 0.00019864750757879015, "loss": 1.3642, "step": 583 }, { "epoch": 0.10853001300873444, "grad_norm": 0.44198179244995117, "learning_rate": 0.000198642709628595, "loss": 0.8633, "step": 584 }, { "epoch": 0.10871585207210556, "grad_norm": 0.5480027198791504, "learning_rate": 0.00019863790324127568, "loss": 1.2279, "step": 585 }, { "epoch": 0.10890169113547668, "grad_norm": 0.5162123441696167, "learning_rate": 0.00019863308841724334, "loss": 1.3003, "step": 586 }, { "epoch": 0.1090875301988478, "grad_norm": 0.4140394628047943, "learning_rate": 0.00019862826515690973, "loss": 0.8132, "step": 587 }, { "epoch": 0.10927336926221892, "grad_norm": 0.4862360954284668, "learning_rate": 0.00019862343346068747, "loss": 0.9631, "step": 588 }, { "epoch": 0.10945920832559004, "grad_norm": 0.4251142144203186, "learning_rate": 0.00019861859332898977, "loss": 0.9675, "step": 589 }, { "epoch": 0.10964504738896116, "grad_norm": 0.44398531317710876, "learning_rate": 0.00019861374476223064, "loss": 1.1292, "step": 590 }, { "epoch": 0.10983088645233229, "grad_norm": 0.42411577701568604, "learning_rate": 0.0001986088877608248, "loss": 1.0539, "step": 591 }, { "epoch": 0.1100167255157034, "grad_norm": 0.3475162088871002, "learning_rate": 0.00019860402232518766, "loss": 0.5955, "step": 592 }, { "epoch": 0.11020256457907453, "grad_norm": 0.43855538964271545, "learning_rate": 0.00019859914845573543, "loss": 1.1307, "step": 593 }, { "epoch": 0.11038840364244565, "grad_norm": 0.5022986531257629, "learning_rate": 0.00019859426615288488, "loss": 0.9466, "step": 594 }, { "epoch": 0.11057424270581677, "grad_norm": 0.44647181034088135, "learning_rate": 0.00019858937541705364, "loss": 1.0578, "step": 595 }, { "epoch": 0.11076008176918788, "grad_norm": 0.41280534863471985, "learning_rate": 0.00019858447624866008, "loss": 0.9337, "step": 596 }, { "epoch": 0.110945920832559, "grad_norm": 0.4094390273094177, "learning_rate": 0.00019857956864812316, "loss": 1.1393, "step": 597 }, { "epoch": 0.11113175989593012, "grad_norm": 0.4634653329849243, "learning_rate": 0.00019857465261586272, "loss": 1.1014, "step": 598 }, { "epoch": 0.11131759895930124, "grad_norm": 0.46472883224487305, "learning_rate": 0.00019856972815229917, "loss": 0.8368, "step": 599 }, { "epoch": 0.11150343802267236, "grad_norm": 0.4838216304779053, "learning_rate": 0.00019856479525785372, "loss": 1.0311, "step": 600 }, { "epoch": 0.11168927708604348, "grad_norm": 0.5469513535499573, "learning_rate": 0.00019855985393294834, "loss": 1.2409, "step": 601 }, { "epoch": 0.1118751161494146, "grad_norm": 0.5099655985832214, "learning_rate": 0.00019855490417800562, "loss": 1.2431, "step": 602 }, { "epoch": 0.11206095521278572, "grad_norm": 0.6649165749549866, "learning_rate": 0.00019854994599344893, "loss": 1.3869, "step": 603 }, { "epoch": 0.11224679427615684, "grad_norm": 0.46297532320022583, "learning_rate": 0.00019854497937970238, "loss": 1.2108, "step": 604 }, { "epoch": 0.11243263333952797, "grad_norm": 0.46323227882385254, "learning_rate": 0.00019854000433719074, "loss": 1.0765, "step": 605 }, { "epoch": 0.11261847240289909, "grad_norm": 0.44460347294807434, "learning_rate": 0.00019853502086633956, "loss": 1.1857, "step": 606 }, { "epoch": 0.11280431146627021, "grad_norm": 0.47829505801200867, "learning_rate": 0.00019853002896757512, "loss": 1.2685, "step": 607 }, { "epoch": 0.11299015052964133, "grad_norm": 0.5406739711761475, "learning_rate": 0.00019852502864132433, "loss": 1.1095, "step": 608 }, { "epoch": 0.11317598959301245, "grad_norm": 0.46139219403266907, "learning_rate": 0.00019852001988801486, "loss": 1.0732, "step": 609 }, { "epoch": 0.11336182865638357, "grad_norm": 0.47796913981437683, "learning_rate": 0.00019851500270807518, "loss": 1.184, "step": 610 }, { "epoch": 0.11354766771975469, "grad_norm": 0.4437905251979828, "learning_rate": 0.00019850997710193444, "loss": 1.0998, "step": 611 }, { "epoch": 0.11373350678312581, "grad_norm": 0.48870012164115906, "learning_rate": 0.00019850494307002242, "loss": 1.133, "step": 612 }, { "epoch": 0.11391934584649693, "grad_norm": 0.4321851134300232, "learning_rate": 0.0001984999006127697, "loss": 1.125, "step": 613 }, { "epoch": 0.11410518490986805, "grad_norm": 0.5969387292861938, "learning_rate": 0.00019849484973060764, "loss": 1.3145, "step": 614 }, { "epoch": 0.11429102397323918, "grad_norm": 0.5733935832977295, "learning_rate": 0.00019848979042396816, "loss": 1.2648, "step": 615 }, { "epoch": 0.1144768630366103, "grad_norm": 0.39343923330307007, "learning_rate": 0.0001984847226932841, "loss": 1.0274, "step": 616 }, { "epoch": 0.11466270209998142, "grad_norm": 0.46703800559043884, "learning_rate": 0.0001984796465389888, "loss": 1.4018, "step": 617 }, { "epoch": 0.11484854116335254, "grad_norm": 0.4704144299030304, "learning_rate": 0.00019847456196151652, "loss": 1.087, "step": 618 }, { "epoch": 0.11503438022672366, "grad_norm": 0.5104250311851501, "learning_rate": 0.0001984694689613021, "loss": 1.0647, "step": 619 }, { "epoch": 0.11522021929009478, "grad_norm": 0.47510913014411926, "learning_rate": 0.0001984643675387812, "loss": 0.9258, "step": 620 }, { "epoch": 0.1154060583534659, "grad_norm": 0.5313277840614319, "learning_rate": 0.00019845925769439014, "loss": 1.3115, "step": 621 }, { "epoch": 0.11559189741683702, "grad_norm": 0.42490604519844055, "learning_rate": 0.00019845413942856595, "loss": 0.9596, "step": 622 }, { "epoch": 0.11577773648020814, "grad_norm": 0.40668928623199463, "learning_rate": 0.00019844901274174644, "loss": 0.9975, "step": 623 }, { "epoch": 0.11596357554357926, "grad_norm": 0.44957441091537476, "learning_rate": 0.0001984438776343701, "loss": 0.9959, "step": 624 }, { "epoch": 0.11614941460695039, "grad_norm": 0.427727609872818, "learning_rate": 0.0001984387341068761, "loss": 1.0412, "step": 625 }, { "epoch": 0.1163352536703215, "grad_norm": 0.4397459328174591, "learning_rate": 0.00019843358215970445, "loss": 1.2047, "step": 626 }, { "epoch": 0.11652109273369263, "grad_norm": 0.522113561630249, "learning_rate": 0.00019842842179329577, "loss": 0.9631, "step": 627 }, { "epoch": 0.11670693179706375, "grad_norm": 0.4980854094028473, "learning_rate": 0.00019842325300809143, "loss": 1.1708, "step": 628 }, { "epoch": 0.11689277086043487, "grad_norm": 0.4923211634159088, "learning_rate": 0.00019841807580453359, "loss": 1.0419, "step": 629 }, { "epoch": 0.11707860992380599, "grad_norm": 0.38797327876091003, "learning_rate": 0.00019841289018306498, "loss": 0.9371, "step": 630 }, { "epoch": 0.11726444898717711, "grad_norm": 0.37108826637268066, "learning_rate": 0.00019840769614412916, "loss": 0.7634, "step": 631 }, { "epoch": 0.11745028805054822, "grad_norm": 0.44877228140830994, "learning_rate": 0.0001984024936881704, "loss": 0.8744, "step": 632 }, { "epoch": 0.11763612711391934, "grad_norm": 0.5255552530288696, "learning_rate": 0.0001983972828156337, "loss": 1.2011, "step": 633 }, { "epoch": 0.11782196617729046, "grad_norm": 0.46886783838272095, "learning_rate": 0.00019839206352696476, "loss": 1.2743, "step": 634 }, { "epoch": 0.11800780524066158, "grad_norm": 0.6389307379722595, "learning_rate": 0.00019838683582260993, "loss": 1.2846, "step": 635 }, { "epoch": 0.1181936443040327, "grad_norm": 0.39743301272392273, "learning_rate": 0.0001983815997030164, "loss": 0.865, "step": 636 }, { "epoch": 0.11837948336740382, "grad_norm": 0.5184935331344604, "learning_rate": 0.00019837635516863205, "loss": 1.0444, "step": 637 }, { "epoch": 0.11856532243077494, "grad_norm": 0.4453527629375458, "learning_rate": 0.0001983711022199054, "loss": 0.9113, "step": 638 }, { "epoch": 0.11875116149414607, "grad_norm": NaN, "learning_rate": 0.0001983711022199054, "loss": 5.2668, "step": 639 }, { "epoch": 0.11893700055751719, "grad_norm": 0.5004279017448425, "learning_rate": 0.00019836584085728575, "loss": 0.9127, "step": 640 }, { "epoch": 0.11912283962088831, "grad_norm": 0.5233067274093628, "learning_rate": 0.00019836057108122316, "loss": 1.1963, "step": 641 }, { "epoch": 0.11930867868425943, "grad_norm": 0.5132344961166382, "learning_rate": 0.00019835529289216833, "loss": 1.169, "step": 642 }, { "epoch": 0.11949451774763055, "grad_norm": 0.3598637282848358, "learning_rate": 0.00019835000629057271, "loss": 0.5946, "step": 643 }, { "epoch": 0.11968035681100167, "grad_norm": 0.5927154421806335, "learning_rate": 0.0001983447112768885, "loss": 1.1492, "step": 644 }, { "epoch": 0.11986619587437279, "grad_norm": 0.4621802568435669, "learning_rate": 0.00019833940785156858, "loss": 1.1075, "step": 645 }, { "epoch": 0.12005203493774391, "grad_norm": 0.5019433498382568, "learning_rate": 0.00019833409601506657, "loss": 1.1041, "step": 646 }, { "epoch": 0.12023787400111503, "grad_norm": 0.46227413415908813, "learning_rate": 0.00019832877576783683, "loss": 1.2795, "step": 647 }, { "epoch": 0.12042371306448615, "grad_norm": 0.6081055998802185, "learning_rate": 0.00019832344711033436, "loss": 1.297, "step": 648 }, { "epoch": 0.12060955212785728, "grad_norm": 0.457120418548584, "learning_rate": 0.00019831811004301494, "loss": 1.0885, "step": 649 }, { "epoch": 0.1207953911912284, "grad_norm": 0.33518850803375244, "learning_rate": 0.0001983127645663351, "loss": 0.8171, "step": 650 }, { "epoch": 0.12098123025459952, "grad_norm": 0.55155348777771, "learning_rate": 0.00019830741068075202, "loss": 1.4775, "step": 651 }, { "epoch": 0.12116706931797064, "grad_norm": 0.44404229521751404, "learning_rate": 0.0001983020483867236, "loss": 1.2144, "step": 652 }, { "epoch": 0.12135290838134176, "grad_norm": 0.4665181338787079, "learning_rate": 0.00019829667768470858, "loss": 1.1258, "step": 653 }, { "epoch": 0.12153874744471288, "grad_norm": 0.4867573380470276, "learning_rate": 0.0001982912985751662, "loss": 1.1113, "step": 654 }, { "epoch": 0.121724586508084, "grad_norm": 0.4458369016647339, "learning_rate": 0.00019828591105855667, "loss": 1.1641, "step": 655 }, { "epoch": 0.12191042557145512, "grad_norm": 0.3621409833431244, "learning_rate": 0.00019828051513534075, "loss": 0.8111, "step": 656 }, { "epoch": 0.12209626463482624, "grad_norm": 0.4944764971733093, "learning_rate": 0.00019827511080597993, "loss": 1.1098, "step": 657 }, { "epoch": 0.12228210369819736, "grad_norm": 0.4163343608379364, "learning_rate": 0.00019826969807093647, "loss": 1.0126, "step": 658 }, { "epoch": 0.12246794276156849, "grad_norm": 0.43734899163246155, "learning_rate": 0.00019826427693067338, "loss": 1.0961, "step": 659 }, { "epoch": 0.1226537818249396, "grad_norm": 0.5198599100112915, "learning_rate": 0.00019825884738565428, "loss": 1.0699, "step": 660 }, { "epoch": 0.12283962088831073, "grad_norm": 0.548116147518158, "learning_rate": 0.0001982534094363436, "loss": 1.2081, "step": 661 }, { "epoch": 0.12302545995168185, "grad_norm": 0.526623010635376, "learning_rate": 0.0001982479630832065, "loss": 1.2044, "step": 662 }, { "epoch": 0.12321129901505297, "grad_norm": 0.4783700108528137, "learning_rate": 0.00019824250832670872, "loss": 1.3622, "step": 663 }, { "epoch": 0.12339713807842409, "grad_norm": 0.5150986313819885, "learning_rate": 0.00019823704516731691, "loss": 0.9491, "step": 664 }, { "epoch": 0.12358297714179521, "grad_norm": 0.3757990896701813, "learning_rate": 0.0001982315736054983, "loss": 0.8711, "step": 665 }, { "epoch": 0.12376881620516633, "grad_norm": 0.599030077457428, "learning_rate": 0.0001982260936417209, "loss": 0.9816, "step": 666 }, { "epoch": 0.12395465526853745, "grad_norm": 0.5053818821907043, "learning_rate": 0.0001982206052764534, "loss": 0.8597, "step": 667 }, { "epoch": 0.12414049433190857, "grad_norm": 0.46872928738594055, "learning_rate": 0.00019821510851016525, "loss": 1.1802, "step": 668 }, { "epoch": 0.12432633339527968, "grad_norm": 0.4599286913871765, "learning_rate": 0.00019820960334332658, "loss": 1.0911, "step": 669 }, { "epoch": 0.1245121724586508, "grad_norm": 0.564627468585968, "learning_rate": 0.00019820408977640831, "loss": 1.0628, "step": 670 }, { "epoch": 0.12469801152202192, "grad_norm": 0.5068032741546631, "learning_rate": 0.00019819856780988197, "loss": 1.3099, "step": 671 }, { "epoch": 0.12488385058539304, "grad_norm": 0.5990276336669922, "learning_rate": 0.00019819303744421992, "loss": 1.1733, "step": 672 }, { "epoch": 0.12506968964876417, "grad_norm": 0.4374839663505554, "learning_rate": 0.00019818749867989515, "loss": 1.1439, "step": 673 }, { "epoch": 0.1252555287121353, "grad_norm": 0.511102557182312, "learning_rate": 0.00019818195151738137, "loss": 1.1636, "step": 674 }, { "epoch": 0.1254413677755064, "grad_norm": 0.5559630990028381, "learning_rate": 0.00019817639595715313, "loss": 1.1273, "step": 675 }, { "epoch": 0.12562720683887754, "grad_norm": 0.4792366027832031, "learning_rate": 0.00019817083199968552, "loss": 1.1224, "step": 676 }, { "epoch": 0.12581304590224865, "grad_norm": 0.46028703451156616, "learning_rate": 0.00019816525964545448, "loss": 0.9323, "step": 677 }, { "epoch": 0.12599888496561978, "grad_norm": 0.5629606246948242, "learning_rate": 0.0001981596788949366, "loss": 1.2004, "step": 678 }, { "epoch": 0.1261847240289909, "grad_norm": 0.5521604418754578, "learning_rate": 0.00019815408974860926, "loss": 1.0562, "step": 679 }, { "epoch": 0.12637056309236203, "grad_norm": 0.41555026173591614, "learning_rate": 0.00019814849220695048, "loss": 0.9565, "step": 680 }, { "epoch": 0.12655640215573313, "grad_norm": 0.5460541248321533, "learning_rate": 0.00019814288627043904, "loss": 1.113, "step": 681 }, { "epoch": 0.12674224121910427, "grad_norm": 0.4217720031738281, "learning_rate": 0.0001981372719395544, "loss": 1.1451, "step": 682 }, { "epoch": 0.12692808028247538, "grad_norm": 0.4192173182964325, "learning_rate": 0.00019813164921477682, "loss": 1.1911, "step": 683 }, { "epoch": 0.1271139193458465, "grad_norm": 0.5131157636642456, "learning_rate": 0.00019812601809658718, "loss": 1.2259, "step": 684 }, { "epoch": 0.12729975840921762, "grad_norm": 0.42514634132385254, "learning_rate": 0.00019812037858546713, "loss": 1.2219, "step": 685 }, { "epoch": 0.12748559747258875, "grad_norm": 0.4878459572792053, "learning_rate": 0.00019811473068189904, "loss": 1.2935, "step": 686 }, { "epoch": 0.12767143653595986, "grad_norm": 0.42948704957962036, "learning_rate": 0.00019810907438636596, "loss": 1.0748, "step": 687 }, { "epoch": 0.12785727559933097, "grad_norm": 0.44495806097984314, "learning_rate": 0.00019810340969935174, "loss": 1.2191, "step": 688 }, { "epoch": 0.1280431146627021, "grad_norm": 0.35490602254867554, "learning_rate": 0.00019809773662134083, "loss": 0.7925, "step": 689 }, { "epoch": 0.1282289537260732, "grad_norm": 0.5111147165298462, "learning_rate": 0.0001980920551528185, "loss": 1.352, "step": 690 }, { "epoch": 0.12841479278944434, "grad_norm": 0.5153658390045166, "learning_rate": 0.0001980863652942707, "loss": 1.2032, "step": 691 }, { "epoch": 0.12860063185281545, "grad_norm": 0.47706758975982666, "learning_rate": 0.00019808066704618408, "loss": 0.8932, "step": 692 }, { "epoch": 0.12878647091618659, "grad_norm": 0.5452196598052979, "learning_rate": 0.000198074960409046, "loss": 1.1013, "step": 693 }, { "epoch": 0.1289723099795577, "grad_norm": 0.43593546748161316, "learning_rate": 0.0001980692453833446, "loss": 0.9039, "step": 694 }, { "epoch": 0.12915814904292883, "grad_norm": 0.4466397166252136, "learning_rate": 0.0001980635219695687, "loss": 1.206, "step": 695 }, { "epoch": 0.12934398810629993, "grad_norm": 0.4870753884315491, "learning_rate": 0.00019805779016820782, "loss": 1.0871, "step": 696 }, { "epoch": 0.12952982716967107, "grad_norm": 0.43623775243759155, "learning_rate": 0.0001980520499797522, "loss": 1.1557, "step": 697 }, { "epoch": 0.12971566623304218, "grad_norm": 0.443454384803772, "learning_rate": 0.00019804630140469285, "loss": 1.2333, "step": 698 }, { "epoch": 0.1299015052964133, "grad_norm": 0.5000869631767273, "learning_rate": 0.00019804054444352143, "loss": 1.0621, "step": 699 }, { "epoch": 0.13008734435978442, "grad_norm": 0.5300038456916809, "learning_rate": 0.00019803477909673034, "loss": 1.2468, "step": 700 }, { "epoch": 0.13027318342315555, "grad_norm": 0.45927757024765015, "learning_rate": 0.00019802900536481272, "loss": 1.1653, "step": 701 }, { "epoch": 0.13045902248652666, "grad_norm": 0.5365282893180847, "learning_rate": 0.0001980232232482624, "loss": 1.1903, "step": 702 }, { "epoch": 0.1306448615498978, "grad_norm": 0.5043763518333435, "learning_rate": 0.00019801743274757395, "loss": 0.9898, "step": 703 }, { "epoch": 0.1308307006132689, "grad_norm": 0.4623168706893921, "learning_rate": 0.00019801163386324263, "loss": 1.0392, "step": 704 }, { "epoch": 0.13101653967664004, "grad_norm": 0.6235971450805664, "learning_rate": 0.0001980058265957644, "loss": 1.1318, "step": 705 }, { "epoch": 0.13120237874001114, "grad_norm": 0.47475048899650574, "learning_rate": 0.00019800001094563604, "loss": 0.9881, "step": 706 }, { "epoch": 0.13138821780338228, "grad_norm": 0.579794704914093, "learning_rate": 0.00019799418691335494, "loss": 1.0279, "step": 707 }, { "epoch": 0.13157405686675339, "grad_norm": 0.48420289158821106, "learning_rate": 0.00019798835449941923, "loss": 1.1624, "step": 708 }, { "epoch": 0.13175989593012452, "grad_norm": 0.49966883659362793, "learning_rate": 0.00019798251370432778, "loss": 1.2414, "step": 709 }, { "epoch": 0.13194573499349563, "grad_norm": 0.5556090474128723, "learning_rate": 0.00019797666452858016, "loss": 1.1312, "step": 710 }, { "epoch": 0.13213157405686676, "grad_norm": 0.474415123462677, "learning_rate": 0.00019797080697267667, "loss": 1.0425, "step": 711 }, { "epoch": 0.13231741312023787, "grad_norm": 0.4529750347137451, "learning_rate": 0.00019796494103711834, "loss": 1.1246, "step": 712 }, { "epoch": 0.132503252183609, "grad_norm": 0.49492666125297546, "learning_rate": 0.00019795906672240686, "loss": 1.2322, "step": 713 }, { "epoch": 0.1326890912469801, "grad_norm": 0.6376146674156189, "learning_rate": 0.00019795318402904467, "loss": 1.0772, "step": 714 }, { "epoch": 0.13287493031035125, "grad_norm": 0.4275801479816437, "learning_rate": 0.00019794729295753496, "loss": 1.2186, "step": 715 }, { "epoch": 0.13306076937372235, "grad_norm": 0.4138648211956024, "learning_rate": 0.00019794139350838157, "loss": 1.0326, "step": 716 }, { "epoch": 0.1332466084370935, "grad_norm": 0.4423656761646271, "learning_rate": 0.0001979354856820891, "loss": 1.2348, "step": 717 }, { "epoch": 0.1334324475004646, "grad_norm": 0.4263252019882202, "learning_rate": 0.00019792956947916292, "loss": 1.2048, "step": 718 }, { "epoch": 0.13361828656383573, "grad_norm": 0.4504089951515198, "learning_rate": 0.000197923644900109, "loss": 0.9089, "step": 719 }, { "epoch": 0.13380412562720684, "grad_norm": 0.5284350514411926, "learning_rate": 0.00019791771194543407, "loss": 1.0991, "step": 720 }, { "epoch": 0.13398996469057797, "grad_norm": 0.5844881534576416, "learning_rate": 0.00019791177061564562, "loss": 1.2395, "step": 721 }, { "epoch": 0.13417580375394908, "grad_norm": 0.5521482825279236, "learning_rate": 0.00019790582091125183, "loss": 1.1193, "step": 722 }, { "epoch": 0.1343616428173202, "grad_norm": 0.6519663333892822, "learning_rate": 0.00019789986283276156, "loss": 1.0444, "step": 723 }, { "epoch": 0.13454748188069132, "grad_norm": 0.5339438915252686, "learning_rate": 0.0001978938963806844, "loss": 1.235, "step": 724 }, { "epoch": 0.13473332094406243, "grad_norm": 0.5313912630081177, "learning_rate": 0.00019788792155553075, "loss": 1.1505, "step": 725 }, { "epoch": 0.13491916000743356, "grad_norm": 0.49416816234588623, "learning_rate": 0.0001978819383578116, "loss": 0.8922, "step": 726 }, { "epoch": 0.13510499907080467, "grad_norm": 0.4863441586494446, "learning_rate": 0.0001978759467880387, "loss": 1.0582, "step": 727 }, { "epoch": 0.1352908381341758, "grad_norm": 0.501708984375, "learning_rate": 0.00019786994684672452, "loss": 1.205, "step": 728 }, { "epoch": 0.1354766771975469, "grad_norm": 0.5261609554290771, "learning_rate": 0.0001978639385343823, "loss": 1.1186, "step": 729 }, { "epoch": 0.13566251626091805, "grad_norm": 0.4654827415943146, "learning_rate": 0.00019785792185152587, "loss": 1.1819, "step": 730 }, { "epoch": 0.13584835532428916, "grad_norm": 0.42379230260849, "learning_rate": 0.0001978518967986699, "loss": 1.1737, "step": 731 }, { "epoch": 0.1360341943876603, "grad_norm": 0.5935065746307373, "learning_rate": 0.00019784586337632973, "loss": 1.1976, "step": 732 }, { "epoch": 0.1362200334510314, "grad_norm": 0.4229237139225006, "learning_rate": 0.00019783982158502137, "loss": 1.149, "step": 733 }, { "epoch": 0.13640587251440253, "grad_norm": 0.4713400900363922, "learning_rate": 0.0001978337714252616, "loss": 1.2643, "step": 734 }, { "epoch": 0.13659171157777364, "grad_norm": 0.49922773241996765, "learning_rate": 0.00019782771289756794, "loss": 1.2761, "step": 735 }, { "epoch": 0.13677755064114477, "grad_norm": 0.4869403541088104, "learning_rate": 0.00019782164600245855, "loss": 0.9267, "step": 736 }, { "epoch": 0.13696338970451588, "grad_norm": 0.40278908610343933, "learning_rate": 0.00019781557074045235, "loss": 1.0307, "step": 737 }, { "epoch": 0.13714922876788702, "grad_norm": 0.4315333962440491, "learning_rate": 0.00019780948711206899, "loss": 1.1248, "step": 738 }, { "epoch": 0.13733506783125812, "grad_norm": 0.5250226259231567, "learning_rate": 0.0001978033951178288, "loss": 1.412, "step": 739 }, { "epoch": 0.13752090689462926, "grad_norm": 0.45913055539131165, "learning_rate": 0.00019779729475825284, "loss": 1.361, "step": 740 }, { "epoch": 0.13770674595800036, "grad_norm": 0.49358081817626953, "learning_rate": 0.0001977911860338629, "loss": 1.1859, "step": 741 }, { "epoch": 0.1378925850213715, "grad_norm": 0.4311683177947998, "learning_rate": 0.00019778506894518145, "loss": 1.3164, "step": 742 }, { "epoch": 0.1380784240847426, "grad_norm": 0.47601014375686646, "learning_rate": 0.0001977789434927317, "loss": 0.8034, "step": 743 }, { "epoch": 0.13826426314811374, "grad_norm": 0.5088006258010864, "learning_rate": 0.0001977728096770376, "loss": 1.1709, "step": 744 }, { "epoch": 0.13845010221148485, "grad_norm": 0.5519676804542542, "learning_rate": 0.0001977666674986238, "loss": 1.2942, "step": 745 }, { "epoch": 0.13863594127485598, "grad_norm": 0.440140038728714, "learning_rate": 0.0001977605169580156, "loss": 0.8967, "step": 746 }, { "epoch": 0.1388217803382271, "grad_norm": 0.3971608877182007, "learning_rate": 0.0001977543580557391, "loss": 0.708, "step": 747 }, { "epoch": 0.13900761940159823, "grad_norm": 0.42031824588775635, "learning_rate": 0.0001977481907923211, "loss": 0.92, "step": 748 }, { "epoch": 0.13919345846496933, "grad_norm": 0.49634236097335815, "learning_rate": 0.00019774201516828906, "loss": 0.9919, "step": 749 }, { "epoch": 0.13937929752834047, "grad_norm": 0.45243629813194275, "learning_rate": 0.0001977358311841712, "loss": 1.0909, "step": 750 }, { "epoch": 0.13956513659171157, "grad_norm": 0.4697221517562866, "learning_rate": 0.00019772963884049648, "loss": 1.0697, "step": 751 }, { "epoch": 0.1397509756550827, "grad_norm": 0.5083733797073364, "learning_rate": 0.00019772343813779454, "loss": 0.9514, "step": 752 }, { "epoch": 0.13993681471845382, "grad_norm": 0.43233105540275574, "learning_rate": 0.00019771722907659572, "loss": 1.0183, "step": 753 }, { "epoch": 0.14012265378182495, "grad_norm": 0.6939888596534729, "learning_rate": 0.0001977110116574311, "loss": 1.2529, "step": 754 }, { "epoch": 0.14030849284519606, "grad_norm": 0.4073755145072937, "learning_rate": 0.00019770478588083249, "loss": 0.9524, "step": 755 }, { "epoch": 0.1404943319085672, "grad_norm": 0.43617358803749084, "learning_rate": 0.00019769855174733234, "loss": 1.1203, "step": 756 }, { "epoch": 0.1406801709719383, "grad_norm": 0.3306081295013428, "learning_rate": 0.00019769230925746392, "loss": 0.438, "step": 757 }, { "epoch": 0.14086601003530944, "grad_norm": 0.4973095953464508, "learning_rate": 0.00019768605841176116, "loss": 1.0405, "step": 758 }, { "epoch": 0.14105184909868054, "grad_norm": 0.4343370497226715, "learning_rate": 0.00019767979921075866, "loss": 1.2194, "step": 759 }, { "epoch": 0.14123768816205165, "grad_norm": 0.5016842484474182, "learning_rate": 0.00019767353165499186, "loss": 1.2143, "step": 760 }, { "epoch": 0.14142352722542278, "grad_norm": 0.563551127910614, "learning_rate": 0.00019766725574499677, "loss": 1.4206, "step": 761 }, { "epoch": 0.1416093662887939, "grad_norm": 0.33113259077072144, "learning_rate": 0.00019766097148131025, "loss": 0.872, "step": 762 }, { "epoch": 0.14179520535216503, "grad_norm": 0.4594592750072479, "learning_rate": 0.0001976546788644697, "loss": 0.9665, "step": 763 }, { "epoch": 0.14198104441553613, "grad_norm": 0.37534263730049133, "learning_rate": 0.00019764837789501346, "loss": 0.8677, "step": 764 }, { "epoch": 0.14216688347890727, "grad_norm": 0.45439133048057556, "learning_rate": 0.0001976420685734804, "loss": 1.1933, "step": 765 }, { "epoch": 0.14235272254227838, "grad_norm": 0.5969803929328918, "learning_rate": 0.00019763575090041017, "loss": 1.356, "step": 766 }, { "epoch": 0.1425385616056495, "grad_norm": 0.49011388421058655, "learning_rate": 0.00019762942487634315, "loss": 1.1676, "step": 767 }, { "epoch": 0.14272440066902062, "grad_norm": 0.4911096692085266, "learning_rate": 0.00019762309050182044, "loss": 0.9579, "step": 768 }, { "epoch": 0.14291023973239175, "grad_norm": 0.5238345861434937, "learning_rate": 0.00019761674777738378, "loss": 1.2124, "step": 769 }, { "epoch": 0.14309607879576286, "grad_norm": 0.5594388842582703, "learning_rate": 0.00019761039670357572, "loss": 1.2953, "step": 770 }, { "epoch": 0.143281917859134, "grad_norm": 0.43093204498291016, "learning_rate": 0.00019760403728093947, "loss": 1.1205, "step": 771 }, { "epoch": 0.1434677569225051, "grad_norm": 0.537513256072998, "learning_rate": 0.00019759766951001897, "loss": 1.0663, "step": 772 }, { "epoch": 0.14365359598587624, "grad_norm": 0.39102646708488464, "learning_rate": 0.00019759129339135881, "loss": 1.0509, "step": 773 }, { "epoch": 0.14383943504924734, "grad_norm": 0.6008369326591492, "learning_rate": 0.00019758490892550447, "loss": 1.2021, "step": 774 }, { "epoch": 0.14402527411261848, "grad_norm": 0.4833708703517914, "learning_rate": 0.0001975785161130019, "loss": 1.1288, "step": 775 }, { "epoch": 0.14421111317598959, "grad_norm": 0.5020431280136108, "learning_rate": 0.000197572114954398, "loss": 1.2715, "step": 776 }, { "epoch": 0.14439695223936072, "grad_norm": 0.45390018820762634, "learning_rate": 0.0001975657054502402, "loss": 0.9566, "step": 777 }, { "epoch": 0.14458279130273183, "grad_norm": 0.4128166437149048, "learning_rate": 0.00019755928760107675, "loss": 1.0689, "step": 778 }, { "epoch": 0.14476863036610296, "grad_norm": 0.4302813410758972, "learning_rate": 0.0001975528614074566, "loss": 1.1927, "step": 779 }, { "epoch": 0.14495446942947407, "grad_norm": 0.45130646228790283, "learning_rate": 0.00019754642686992935, "loss": 1.1991, "step": 780 }, { "epoch": 0.1451403084928452, "grad_norm": 0.4731914699077606, "learning_rate": 0.00019753998398904538, "loss": 1.175, "step": 781 }, { "epoch": 0.1453261475562163, "grad_norm": 0.6347445249557495, "learning_rate": 0.00019753353276535577, "loss": 1.1229, "step": 782 }, { "epoch": 0.14551198661958745, "grad_norm": 0.495601624250412, "learning_rate": 0.00019752707319941232, "loss": 0.9881, "step": 783 }, { "epoch": 0.14569782568295855, "grad_norm": 0.5375710129737854, "learning_rate": 0.00019752060529176752, "loss": 1.1683, "step": 784 }, { "epoch": 0.1458836647463297, "grad_norm": 0.454044371843338, "learning_rate": 0.00019751412904297455, "loss": 0.8788, "step": 785 }, { "epoch": 0.1460695038097008, "grad_norm": 0.4273867905139923, "learning_rate": 0.0001975076444535874, "loss": 1.1718, "step": 786 }, { "epoch": 0.14625534287307193, "grad_norm": 0.4461952745914459, "learning_rate": 0.00019750115152416067, "loss": 1.179, "step": 787 }, { "epoch": 0.14644118193644304, "grad_norm": 0.5559700727462769, "learning_rate": 0.0001974946502552497, "loss": 1.3204, "step": 788 }, { "epoch": 0.14662702099981417, "grad_norm": NaN, "learning_rate": 0.0001974946502552497, "loss": 5.4482, "step": 789 }, { "epoch": 0.14681286006318528, "grad_norm": 0.5265190601348877, "learning_rate": 0.00019748814064741058, "loss": 1.0285, "step": 790 }, { "epoch": 0.14699869912655641, "grad_norm": 0.524591326713562, "learning_rate": 0.00019748162270120012, "loss": 1.0945, "step": 791 }, { "epoch": 0.14718453818992752, "grad_norm": 0.4927036762237549, "learning_rate": 0.0001974750964171758, "loss": 1.171, "step": 792 }, { "epoch": 0.14737037725329866, "grad_norm": 0.5644413232803345, "learning_rate": 0.00019746856179589573, "loss": 1.2689, "step": 793 }, { "epoch": 0.14755621631666976, "grad_norm": 0.5673896670341492, "learning_rate": 0.000197462018837919, "loss": 1.3814, "step": 794 }, { "epoch": 0.14774205538004087, "grad_norm": 0.461016446352005, "learning_rate": 0.00019745546754380513, "loss": 1.3054, "step": 795 }, { "epoch": 0.147927894443412, "grad_norm": 0.49385157227516174, "learning_rate": 0.00019744890791411447, "loss": 1.0074, "step": 796 }, { "epoch": 0.1481137335067831, "grad_norm": 0.43251967430114746, "learning_rate": 0.0001974423399494081, "loss": 0.9248, "step": 797 }, { "epoch": 0.14829957257015425, "grad_norm": 0.5019454956054688, "learning_rate": 0.00019743576365024784, "loss": 1.089, "step": 798 }, { "epoch": 0.14848541163352535, "grad_norm": 0.49163103103637695, "learning_rate": 0.00019742917901719608, "loss": 1.0207, "step": 799 }, { "epoch": 0.1486712506968965, "grad_norm": 0.5061166882514954, "learning_rate": 0.00019742258605081608, "loss": 1.0134, "step": 800 }, { "epoch": 0.1488570897602676, "grad_norm": 0.5079135298728943, "learning_rate": 0.00019741598475167175, "loss": 1.2507, "step": 801 }, { "epoch": 0.14904292882363873, "grad_norm": 0.4781422019004822, "learning_rate": 0.00019740937512032768, "loss": 1.1281, "step": 802 }, { "epoch": 0.14922876788700984, "grad_norm": 0.45758572220802307, "learning_rate": 0.00019740275715734925, "loss": 1.044, "step": 803 }, { "epoch": 0.14941460695038097, "grad_norm": 0.4596036970615387, "learning_rate": 0.00019739613086330247, "loss": 0.9013, "step": 804 }, { "epoch": 0.14960044601375208, "grad_norm": 0.4866902232170105, "learning_rate": 0.00019738949623875413, "loss": 0.9927, "step": 805 }, { "epoch": 0.14978628507712322, "grad_norm": 0.41937676072120667, "learning_rate": 0.00019738285328427165, "loss": 0.9932, "step": 806 }, { "epoch": 0.14997212414049432, "grad_norm": 0.4618746340274811, "learning_rate": 0.00019737620200042328, "loss": 0.9386, "step": 807 }, { "epoch": 0.15015796320386546, "grad_norm": 0.44216638803482056, "learning_rate": 0.00019736954238777792, "loss": 0.9385, "step": 808 }, { "epoch": 0.15034380226723656, "grad_norm": 0.49641454219818115, "learning_rate": 0.00019736287444690513, "loss": 1.0367, "step": 809 }, { "epoch": 0.1505296413306077, "grad_norm": 0.5560820698738098, "learning_rate": 0.00019735619817837528, "loss": 1.3052, "step": 810 }, { "epoch": 0.1507154803939788, "grad_norm": 0.6007441282272339, "learning_rate": 0.00019734951358275934, "loss": 1.3654, "step": 811 }, { "epoch": 0.15090131945734994, "grad_norm": 0.47275739908218384, "learning_rate": 0.00019734282066062914, "loss": 1.3, "step": 812 }, { "epoch": 0.15108715852072105, "grad_norm": 0.43406975269317627, "learning_rate": 0.00019733611941255712, "loss": 1.2628, "step": 813 }, { "epoch": 0.15127299758409218, "grad_norm": 0.5152772665023804, "learning_rate": 0.00019732940983911641, "loss": 1.2358, "step": 814 }, { "epoch": 0.1514588366474633, "grad_norm": 0.4792425036430359, "learning_rate": 0.00019732269194088091, "loss": 1.1774, "step": 815 }, { "epoch": 0.15164467571083443, "grad_norm": 0.46853065490722656, "learning_rate": 0.00019731596571842525, "loss": 1.3217, "step": 816 }, { "epoch": 0.15183051477420553, "grad_norm": 0.394144207239151, "learning_rate": 0.00019730923117232472, "loss": 0.9721, "step": 817 }, { "epoch": 0.15201635383757667, "grad_norm": 0.5005031228065491, "learning_rate": 0.0001973024883031553, "loss": 1.0645, "step": 818 }, { "epoch": 0.15220219290094777, "grad_norm": 0.5808000564575195, "learning_rate": 0.0001972957371114938, "loss": 0.9737, "step": 819 }, { "epoch": 0.1523880319643189, "grad_norm": 0.5996263027191162, "learning_rate": 0.0001972889775979176, "loss": 1.011, "step": 820 }, { "epoch": 0.15257387102769002, "grad_norm": NaN, "learning_rate": 0.0001972889775979176, "loss": 5.2029, "step": 821 }, { "epoch": 0.15275971009106115, "grad_norm": 0.5036181211471558, "learning_rate": 0.00019728220976300486, "loss": 1.1093, "step": 822 }, { "epoch": 0.15294554915443226, "grad_norm": 0.5951871275901794, "learning_rate": 0.00019727543360733448, "loss": 1.23, "step": 823 }, { "epoch": 0.1531313882178034, "grad_norm": 0.5592852830886841, "learning_rate": 0.00019726864913148604, "loss": 1.2979, "step": 824 }, { "epoch": 0.1533172272811745, "grad_norm": 0.4100774824619293, "learning_rate": 0.00019726185633603976, "loss": 1.0589, "step": 825 }, { "epoch": 0.15350306634454564, "grad_norm": 0.5126859545707703, "learning_rate": 0.00019725505522157676, "loss": 0.9879, "step": 826 }, { "epoch": 0.15368890540791674, "grad_norm": 0.49354124069213867, "learning_rate": 0.00019724824578867865, "loss": 1.0719, "step": 827 }, { "epoch": 0.15387474447128788, "grad_norm": 0.39361080527305603, "learning_rate": 0.00019724142803792793, "loss": 1.0554, "step": 828 }, { "epoch": 0.15406058353465898, "grad_norm": 0.48714399337768555, "learning_rate": 0.0001972346019699077, "loss": 1.1068, "step": 829 }, { "epoch": 0.15424642259803012, "grad_norm": 0.6365624666213989, "learning_rate": 0.0001972277675852018, "loss": 0.7661, "step": 830 }, { "epoch": 0.15443226166140123, "grad_norm": 0.4671623706817627, "learning_rate": 0.00019722092488439478, "loss": 1.1978, "step": 831 }, { "epoch": 0.15461810072477233, "grad_norm": 0.4496230483055115, "learning_rate": 0.00019721407386807195, "loss": 0.978, "step": 832 }, { "epoch": 0.15480393978814347, "grad_norm": 0.568945050239563, "learning_rate": 0.0001972072145368193, "loss": 1.3419, "step": 833 }, { "epoch": 0.15498977885151458, "grad_norm": 0.425636351108551, "learning_rate": 0.00019720034689122345, "loss": 1.1048, "step": 834 }, { "epoch": 0.1551756179148857, "grad_norm": 0.6280021071434021, "learning_rate": 0.00019719347093187192, "loss": 1.289, "step": 835 }, { "epoch": 0.15536145697825682, "grad_norm": 0.49712350964546204, "learning_rate": 0.0001971865866593527, "loss": 1.1054, "step": 836 }, { "epoch": 0.15554729604162795, "grad_norm": 0.5043768882751465, "learning_rate": 0.00019717969407425468, "loss": 0.9071, "step": 837 }, { "epoch": 0.15573313510499906, "grad_norm": 0.5186277627944946, "learning_rate": 0.0001971727931771674, "loss": 1.2653, "step": 838 }, { "epoch": 0.1559189741683702, "grad_norm": 0.4321137070655823, "learning_rate": 0.0001971658839686811, "loss": 1.1057, "step": 839 }, { "epoch": 0.1561048132317413, "grad_norm": 64.90776824951172, "learning_rate": 0.00019715896644938676, "loss": 6.1631, "step": 840 }, { "epoch": 0.15629065229511244, "grad_norm": 0.4284045696258545, "learning_rate": 0.00019715204061987603, "loss": 0.9935, "step": 841 }, { "epoch": 0.15647649135848354, "grad_norm": 0.529630184173584, "learning_rate": 0.00019714510648074127, "loss": 0.8112, "step": 842 }, { "epoch": 0.15666233042185468, "grad_norm": 0.5717606544494629, "learning_rate": 0.0001971381640325756, "loss": 1.2969, "step": 843 }, { "epoch": 0.15684816948522579, "grad_norm": 0.4687287211418152, "learning_rate": 0.00019713121327597283, "loss": 0.9925, "step": 844 }, { "epoch": 0.15703400854859692, "grad_norm": 0.5366423726081848, "learning_rate": 0.00019712425421152747, "loss": 1.419, "step": 845 }, { "epoch": 0.15721984761196803, "grad_norm": 0.6493159532546997, "learning_rate": 0.0001971172868398347, "loss": 1.3085, "step": 846 }, { "epoch": 0.15740568667533916, "grad_norm": 0.5041580200195312, "learning_rate": 0.00019711031116149052, "loss": 1.0164, "step": 847 }, { "epoch": 0.15759152573871027, "grad_norm": 0.46504315733909607, "learning_rate": 0.00019710332717709153, "loss": 1.2274, "step": 848 }, { "epoch": 0.1577773648020814, "grad_norm": 0.5412839651107788, "learning_rate": 0.0001970963348872351, "loss": 0.9564, "step": 849 }, { "epoch": 0.1579632038654525, "grad_norm": 0.5719959735870361, "learning_rate": 0.00019708933429251927, "loss": 1.0234, "step": 850 }, { "epoch": 0.15814904292882365, "grad_norm": 0.42526480555534363, "learning_rate": 0.00019708232539354287, "loss": 1.0373, "step": 851 }, { "epoch": 0.15833488199219475, "grad_norm": 0.5555219650268555, "learning_rate": 0.00019707530819090537, "loss": 1.1596, "step": 852 }, { "epoch": 0.1585207210555659, "grad_norm": 0.4331360161304474, "learning_rate": 0.00019706828268520693, "loss": 1.0561, "step": 853 }, { "epoch": 0.158706560118937, "grad_norm": 0.44076040387153625, "learning_rate": 0.00019706124887704848, "loss": 0.9639, "step": 854 }, { "epoch": 0.15889239918230813, "grad_norm": 0.6320684552192688, "learning_rate": 0.0001970542067670316, "loss": 0.9481, "step": 855 }, { "epoch": 0.15907823824567924, "grad_norm": 0.43688324093818665, "learning_rate": 0.00019704715635575872, "loss": 0.9687, "step": 856 }, { "epoch": 0.15926407730905037, "grad_norm": 0.37447649240493774, "learning_rate": 0.00019704009764383277, "loss": 0.9623, "step": 857 }, { "epoch": 0.15944991637242148, "grad_norm": 0.5964031219482422, "learning_rate": 0.00019703303063185756, "loss": 0.918, "step": 858 }, { "epoch": 0.15963575543579261, "grad_norm": 0.502717137336731, "learning_rate": 0.00019702595532043754, "loss": 1.0252, "step": 859 }, { "epoch": 0.15982159449916372, "grad_norm": 0.4272380471229553, "learning_rate": 0.0001970188717101778, "loss": 0.9805, "step": 860 }, { "epoch": 0.16000743356253486, "grad_norm": 0.453313946723938, "learning_rate": 0.00019701177980168433, "loss": 1.033, "step": 861 }, { "epoch": 0.16019327262590596, "grad_norm": 0.5758691430091858, "learning_rate": 0.00019700467959556363, "loss": 1.182, "step": 862 }, { "epoch": 0.1603791116892771, "grad_norm": 0.48800668120384216, "learning_rate": 0.00019699757109242305, "loss": 0.9889, "step": 863 }, { "epoch": 0.1605649507526482, "grad_norm": 0.594584047794342, "learning_rate": 0.00019699045429287057, "loss": 1.3348, "step": 864 }, { "epoch": 0.16075078981601934, "grad_norm": 0.47926321625709534, "learning_rate": 0.0001969833291975149, "loss": 1.0722, "step": 865 }, { "epoch": 0.16093662887939045, "grad_norm": 0.4536885619163513, "learning_rate": 0.0001969761958069655, "loss": 1.0953, "step": 866 }, { "epoch": 0.16112246794276155, "grad_norm": 0.5602397322654724, "learning_rate": 0.00019696905412183247, "loss": 1.0182, "step": 867 }, { "epoch": 0.1613083070061327, "grad_norm": 0.5677642822265625, "learning_rate": 0.00019696190414272664, "loss": 1.201, "step": 868 }, { "epoch": 0.1614941460695038, "grad_norm": 0.5723028779029846, "learning_rate": 0.00019695474587025962, "loss": 1.4633, "step": 869 }, { "epoch": 0.16167998513287493, "grad_norm": 0.4471625089645386, "learning_rate": 0.00019694757930504363, "loss": 1.0097, "step": 870 }, { "epoch": 0.16186582419624604, "grad_norm": 0.4626026153564453, "learning_rate": 0.00019694040444769164, "loss": 0.94, "step": 871 }, { "epoch": 0.16205166325961717, "grad_norm": 0.4059935212135315, "learning_rate": 0.00019693322129881734, "loss": 0.8778, "step": 872 }, { "epoch": 0.16223750232298828, "grad_norm": 0.4863290786743164, "learning_rate": 0.00019692602985903513, "loss": 1.2015, "step": 873 }, { "epoch": 0.16242334138635942, "grad_norm": 0.49636152386665344, "learning_rate": 0.00019691883012896012, "loss": 1.1677, "step": 874 }, { "epoch": 0.16260918044973052, "grad_norm": 0.46879225969314575, "learning_rate": 0.00019691162210920806, "loss": 1.1934, "step": 875 }, { "epoch": 0.16279501951310166, "grad_norm": 0.45269060134887695, "learning_rate": 0.00019690440580039554, "loss": 0.8255, "step": 876 }, { "epoch": 0.16298085857647276, "grad_norm": 0.49863696098327637, "learning_rate": 0.00019689718120313973, "loss": 1.0585, "step": 877 }, { "epoch": 0.1631666976398439, "grad_norm": 0.5355581641197205, "learning_rate": 0.0001968899483180586, "loss": 1.2448, "step": 878 }, { "epoch": 0.163352536703215, "grad_norm": 0.5691109895706177, "learning_rate": 0.0001968827071457708, "loss": 1.2223, "step": 879 }, { "epoch": 0.16353837576658614, "grad_norm": 0.5250231623649597, "learning_rate": 0.00019687545768689563, "loss": 1.344, "step": 880 }, { "epoch": 0.16372421482995725, "grad_norm": 0.47329142689704895, "learning_rate": 0.00019686819994205323, "loss": 1.115, "step": 881 }, { "epoch": 0.16391005389332838, "grad_norm": 0.5414997935295105, "learning_rate": 0.00019686093391186427, "loss": 1.107, "step": 882 }, { "epoch": 0.1640958929566995, "grad_norm": 0.48563218116760254, "learning_rate": 0.00019685365959695034, "loss": 1.1476, "step": 883 }, { "epoch": 0.16428173202007063, "grad_norm": 0.5817862749099731, "learning_rate": 0.00019684637699793358, "loss": 1.2211, "step": 884 }, { "epoch": 0.16446757108344173, "grad_norm": 0.5920995473861694, "learning_rate": 0.00019683908611543686, "loss": 1.1086, "step": 885 }, { "epoch": 0.16465341014681287, "grad_norm": 0.49546167254447937, "learning_rate": 0.0001968317869500838, "loss": 1.0569, "step": 886 }, { "epoch": 0.16483924921018397, "grad_norm": 0.5465673804283142, "learning_rate": 0.00019682447950249875, "loss": 1.1475, "step": 887 }, { "epoch": 0.1650250882735551, "grad_norm": 0.6050189137458801, "learning_rate": 0.00019681716377330667, "loss": 1.2038, "step": 888 }, { "epoch": 0.16521092733692622, "grad_norm": 0.4171687364578247, "learning_rate": 0.00019680983976313337, "loss": 1.0506, "step": 889 }, { "epoch": 0.16539676640029735, "grad_norm": 0.4742315113544464, "learning_rate": 0.00019680250747260522, "loss": 1.133, "step": 890 }, { "epoch": 0.16558260546366846, "grad_norm": 0.4795233905315399, "learning_rate": 0.00019679516690234938, "loss": 1.1432, "step": 891 }, { "epoch": 0.1657684445270396, "grad_norm": 0.5627532601356506, "learning_rate": 0.00019678781805299372, "loss": 0.88, "step": 892 }, { "epoch": 0.1659542835904107, "grad_norm": 0.4793561100959778, "learning_rate": 0.00019678046092516676, "loss": 1.1651, "step": 893 }, { "epoch": 0.16614012265378184, "grad_norm": 0.42398396134376526, "learning_rate": 0.00019677309551949785, "loss": 1.0936, "step": 894 }, { "epoch": 0.16632596171715294, "grad_norm": 0.4386990964412689, "learning_rate": 0.00019676572183661693, "loss": 0.9839, "step": 895 }, { "epoch": 0.16651180078052408, "grad_norm": 0.41191449761390686, "learning_rate": 0.00019675833987715463, "loss": 0.9892, "step": 896 }, { "epoch": 0.16669763984389518, "grad_norm": 0.5318143367767334, "learning_rate": 0.00019675094964174245, "loss": 0.8916, "step": 897 }, { "epoch": 0.16688347890726632, "grad_norm": 0.43514615297317505, "learning_rate": 0.00019674355113101242, "loss": 1.1414, "step": 898 }, { "epoch": 0.16706931797063743, "grad_norm": 0.5335797071456909, "learning_rate": 0.00019673614434559739, "loss": 1.0711, "step": 899 }, { "epoch": 0.16725515703400856, "grad_norm": 0.6108746528625488, "learning_rate": 0.00019672872928613085, "loss": 1.0356, "step": 900 }, { "epoch": 0.16744099609737967, "grad_norm": 0.5601165890693665, "learning_rate": 0.00019672130595324702, "loss": 1.2728, "step": 901 }, { "epoch": 0.1676268351607508, "grad_norm": 0.6302157640457153, "learning_rate": 0.00019671387434758085, "loss": 1.3459, "step": 902 }, { "epoch": 0.1678126742241219, "grad_norm": 0.4896230697631836, "learning_rate": 0.000196706434469768, "loss": 1.0278, "step": 903 }, { "epoch": 0.16799851328749302, "grad_norm": 0.5244489908218384, "learning_rate": 0.00019669898632044478, "loss": 0.9506, "step": 904 }, { "epoch": 0.16818435235086415, "grad_norm": 0.49130362272262573, "learning_rate": 0.0001966915299002483, "loss": 1.079, "step": 905 }, { "epoch": 0.16837019141423526, "grad_norm": 0.4698750972747803, "learning_rate": 0.00019668406520981626, "loss": 1.1445, "step": 906 }, { "epoch": 0.1685560304776064, "grad_norm": 0.5378125309944153, "learning_rate": 0.00019667659224978718, "loss": 1.0339, "step": 907 }, { "epoch": 0.1687418695409775, "grad_norm": 0.5768793225288391, "learning_rate": 0.0001966691110208002, "loss": 1.3645, "step": 908 }, { "epoch": 0.16892770860434864, "grad_norm": 0.49996206164360046, "learning_rate": 0.00019666162152349523, "loss": 1.2858, "step": 909 }, { "epoch": 0.16911354766771974, "grad_norm": 0.4342251420021057, "learning_rate": 0.00019665412375851287, "loss": 0.8591, "step": 910 }, { "epoch": 0.16929938673109088, "grad_norm": 0.4541759192943573, "learning_rate": 0.0001966466177264944, "loss": 1.2329, "step": 911 }, { "epoch": 0.16948522579446199, "grad_norm": 0.48625192046165466, "learning_rate": 0.00019663910342808183, "loss": 1.0584, "step": 912 }, { "epoch": 0.16967106485783312, "grad_norm": 0.4969070553779602, "learning_rate": 0.00019663158086391787, "loss": 1.1282, "step": 913 }, { "epoch": 0.16985690392120423, "grad_norm": 0.4897630214691162, "learning_rate": 0.00019662405003464598, "loss": 0.8879, "step": 914 }, { "epoch": 0.17004274298457536, "grad_norm": 0.5059701800346375, "learning_rate": 0.00019661651094091023, "loss": 1.141, "step": 915 }, { "epoch": 0.17022858204794647, "grad_norm": 0.578790545463562, "learning_rate": 0.0001966089635833555, "loss": 1.2052, "step": 916 }, { "epoch": 0.1704144211113176, "grad_norm": 0.4383024275302887, "learning_rate": 0.00019660140796262733, "loss": 0.8611, "step": 917 }, { "epoch": 0.1706002601746887, "grad_norm": 0.5056631565093994, "learning_rate": 0.00019659384407937192, "loss": 1.1212, "step": 918 }, { "epoch": 0.17078609923805985, "grad_norm": 0.48702114820480347, "learning_rate": 0.00019658627193423626, "loss": 1.1543, "step": 919 }, { "epoch": 0.17097193830143095, "grad_norm": 0.47001418471336365, "learning_rate": 0.00019657869152786802, "loss": 1.1391, "step": 920 }, { "epoch": 0.1711577773648021, "grad_norm": 0.4468410313129425, "learning_rate": 0.00019657110286091557, "loss": 0.8197, "step": 921 }, { "epoch": 0.1713436164281732, "grad_norm": 0.413083553314209, "learning_rate": 0.00019656350593402796, "loss": 1.0223, "step": 922 }, { "epoch": 0.17152945549154433, "grad_norm": 0.533121645450592, "learning_rate": 0.00019655590074785502, "loss": 0.9284, "step": 923 }, { "epoch": 0.17171529455491544, "grad_norm": 0.45572182536125183, "learning_rate": 0.0001965482873030472, "loss": 1.1174, "step": 924 }, { "epoch": 0.17190113361828657, "grad_norm": 0.5130087733268738, "learning_rate": 0.00019654066560025567, "loss": 1.1895, "step": 925 }, { "epoch": 0.17208697268165768, "grad_norm": 0.5442116856575012, "learning_rate": 0.00019653303564013235, "loss": 1.1295, "step": 926 }, { "epoch": 0.17227281174502881, "grad_norm": 0.4951963424682617, "learning_rate": 0.00019652539742332986, "loss": 1.2979, "step": 927 }, { "epoch": 0.17245865080839992, "grad_norm": 0.6164078712463379, "learning_rate": 0.00019651775095050155, "loss": 1.2801, "step": 928 }, { "epoch": 0.17264448987177106, "grad_norm": 0.45666176080703735, "learning_rate": 0.00019651009622230134, "loss": 1.2134, "step": 929 }, { "epoch": 0.17283032893514216, "grad_norm": 0.5629132390022278, "learning_rate": 0.00019650243323938408, "loss": 1.4352, "step": 930 }, { "epoch": 0.1730161679985133, "grad_norm": 0.5350096225738525, "learning_rate": 0.0001964947620024051, "loss": 1.2231, "step": 931 }, { "epoch": 0.1732020070618844, "grad_norm": 0.5338471531867981, "learning_rate": 0.00019648708251202054, "loss": 1.2145, "step": 932 }, { "epoch": 0.17338784612525554, "grad_norm": 0.479168176651001, "learning_rate": 0.00019647939476888734, "loss": 0.9549, "step": 933 }, { "epoch": 0.17357368518862665, "grad_norm": 0.548399031162262, "learning_rate": 0.00019647169877366297, "loss": 0.9933, "step": 934 }, { "epoch": 0.17375952425199778, "grad_norm": 0.4960792362689972, "learning_rate": 0.00019646399452700566, "loss": 1.2743, "step": 935 }, { "epoch": 0.1739453633153689, "grad_norm": 0.4801279902458191, "learning_rate": 0.00019645628202957444, "loss": 1.1701, "step": 936 }, { "epoch": 0.17413120237874002, "grad_norm": 0.47602880001068115, "learning_rate": 0.00019644856128202897, "loss": 1.0865, "step": 937 }, { "epoch": 0.17431704144211113, "grad_norm": 0.5058044195175171, "learning_rate": 0.00019644083228502956, "loss": 1.2168, "step": 938 }, { "epoch": 0.17450288050548224, "grad_norm": 0.46569764614105225, "learning_rate": 0.00019643309503923737, "loss": 1.126, "step": 939 }, { "epoch": 0.17468871956885337, "grad_norm": 0.5684424042701721, "learning_rate": 0.0001964253495453141, "loss": 0.9557, "step": 940 }, { "epoch": 0.17487455863222448, "grad_norm": 0.5311669111251831, "learning_rate": 0.0001964175958039223, "loss": 0.9657, "step": 941 }, { "epoch": 0.17506039769559562, "grad_norm": 2.8396852016448975, "learning_rate": 0.00019640983381572516, "loss": 3.9658, "step": 942 }, { "epoch": 0.17524623675896672, "grad_norm": 0.55197674036026, "learning_rate": 0.00019640206358138656, "loss": 1.1804, "step": 943 }, { "epoch": 0.17543207582233786, "grad_norm": 0.48992422223091125, "learning_rate": 0.00019639428510157109, "loss": 1.1826, "step": 944 }, { "epoch": 0.17561791488570896, "grad_norm": 0.508070170879364, "learning_rate": 0.0001963864983769441, "loss": 1.0286, "step": 945 }, { "epoch": 0.1758037539490801, "grad_norm": 0.5306305289268494, "learning_rate": 0.00019637870340817156, "loss": 1.2903, "step": 946 }, { "epoch": 0.1759895930124512, "grad_norm": 0.5408816933631897, "learning_rate": 0.00019637090019592024, "loss": 1.1369, "step": 947 }, { "epoch": 0.17617543207582234, "grad_norm": 0.45540884137153625, "learning_rate": 0.00019636308874085753, "loss": 1.0051, "step": 948 }, { "epoch": 0.17636127113919345, "grad_norm": 0.4946479797363281, "learning_rate": 0.0001963552690436516, "loss": 1.306, "step": 949 }, { "epoch": 0.17654711020256458, "grad_norm": 0.5659436583518982, "learning_rate": 0.00019634744110497122, "loss": 1.0725, "step": 950 }, { "epoch": 0.1767329492659357, "grad_norm": 0.4694545269012451, "learning_rate": 0.00019633960492548597, "loss": 1.3804, "step": 951 }, { "epoch": 0.17691878832930683, "grad_norm": 0.5113719701766968, "learning_rate": 0.0001963317605058661, "loss": 1.0854, "step": 952 }, { "epoch": 0.17710462739267793, "grad_norm": 0.579812228679657, "learning_rate": 0.00019632390784678257, "loss": 1.1792, "step": 953 }, { "epoch": 0.17729046645604907, "grad_norm": 0.556024968624115, "learning_rate": 0.000196316046948907, "loss": 1.1594, "step": 954 }, { "epoch": 0.17747630551942017, "grad_norm": 0.5733163952827454, "learning_rate": 0.00019630817781291177, "loss": 1.2513, "step": 955 }, { "epoch": 0.1776621445827913, "grad_norm": 0.47930943965911865, "learning_rate": 0.00019630030043946993, "loss": 1.2074, "step": 956 }, { "epoch": 0.17784798364616242, "grad_norm": 0.5812712907791138, "learning_rate": 0.00019629241482925532, "loss": 0.9204, "step": 957 }, { "epoch": 0.17803382270953355, "grad_norm": 0.6513651013374329, "learning_rate": 0.0001962845209829423, "loss": 1.1605, "step": 958 }, { "epoch": 0.17821966177290466, "grad_norm": 0.5434703826904297, "learning_rate": 0.0001962766189012061, "loss": 1.3342, "step": 959 }, { "epoch": 0.1784055008362758, "grad_norm": 0.49916622042655945, "learning_rate": 0.00019626870858472262, "loss": 0.9762, "step": 960 }, { "epoch": 0.1785913398996469, "grad_norm": 0.48941847681999207, "learning_rate": 0.00019626079003416845, "loss": 0.6947, "step": 961 }, { "epoch": 0.17877717896301804, "grad_norm": 0.6125507950782776, "learning_rate": 0.00019625286325022082, "loss": 1.0488, "step": 962 }, { "epoch": 0.17896301802638914, "grad_norm": 0.5615450143814087, "learning_rate": 0.0001962449282335578, "loss": 1.269, "step": 963 }, { "epoch": 0.17914885708976028, "grad_norm": 0.5081421732902527, "learning_rate": 0.00019623698498485802, "loss": 0.9589, "step": 964 }, { "epoch": 0.17933469615313138, "grad_norm": 0.5921791195869446, "learning_rate": 0.00019622903350480098, "loss": 0.9497, "step": 965 }, { "epoch": 0.17952053521650252, "grad_norm": 0.5271924734115601, "learning_rate": 0.00019622107379406667, "loss": 1.248, "step": 966 }, { "epoch": 0.17970637427987363, "grad_norm": 0.5222536325454712, "learning_rate": 0.00019621310585333598, "loss": 1.1401, "step": 967 }, { "epoch": 0.17989221334324476, "grad_norm": 0.46469056606292725, "learning_rate": 0.0001962051296832904, "loss": 0.9384, "step": 968 }, { "epoch": 0.18007805240661587, "grad_norm": 0.495740681886673, "learning_rate": 0.00019619714528461212, "loss": 1.178, "step": 969 }, { "epoch": 0.180263891469987, "grad_norm": 0.4830992817878723, "learning_rate": 0.00019618915265798414, "loss": 1.3077, "step": 970 }, { "epoch": 0.1804497305333581, "grad_norm": 0.5313519835472107, "learning_rate": 0.00019618115180409002, "loss": 1.2829, "step": 971 }, { "epoch": 0.18063556959672925, "grad_norm": 0.45389869809150696, "learning_rate": 0.0001961731427236141, "loss": 1.064, "step": 972 }, { "epoch": 0.18082140866010035, "grad_norm": 0.4235461354255676, "learning_rate": 0.00019616512541724147, "loss": 0.8205, "step": 973 }, { "epoch": 0.1810072477234715, "grad_norm": 0.5951849222183228, "learning_rate": 0.0001961570998856578, "loss": 1.2295, "step": 974 }, { "epoch": 0.1811930867868426, "grad_norm": 0.4912169277667999, "learning_rate": 0.00019614906612954952, "loss": 1.1792, "step": 975 }, { "epoch": 0.1813789258502137, "grad_norm": 0.6600517630577087, "learning_rate": 0.00019614102414960386, "loss": 1.1413, "step": 976 }, { "epoch": 0.18156476491358484, "grad_norm": 0.48177197575569153, "learning_rate": 0.00019613297394650858, "loss": 1.2551, "step": 977 }, { "epoch": 0.18175060397695594, "grad_norm": 0.4557987153530121, "learning_rate": 0.00019612491552095228, "loss": 1.1687, "step": 978 }, { "epoch": 0.18193644304032708, "grad_norm": 0.47915035486221313, "learning_rate": 0.0001961168488736242, "loss": 1.0173, "step": 979 }, { "epoch": 0.18212228210369819, "grad_norm": 0.7031456828117371, "learning_rate": 0.00019610877400521432, "loss": 1.1247, "step": 980 }, { "epoch": 0.18230812116706932, "grad_norm": 0.4912133812904358, "learning_rate": 0.00019610069091641324, "loss": 0.8791, "step": 981 }, { "epoch": 0.18249396023044043, "grad_norm": 0.5140329003334045, "learning_rate": 0.00019609259960791243, "loss": 0.8195, "step": 982 }, { "epoch": 0.18267979929381156, "grad_norm": 0.4770338237285614, "learning_rate": 0.00019608450008040383, "loss": 1.2822, "step": 983 }, { "epoch": 0.18286563835718267, "grad_norm": 0.4585348665714264, "learning_rate": 0.0001960763923345803, "loss": 1.0453, "step": 984 }, { "epoch": 0.1830514774205538, "grad_norm": 0.5334509611129761, "learning_rate": 0.00019606827637113527, "loss": 1.1493, "step": 985 }, { "epoch": 0.1832373164839249, "grad_norm": 0.5813632607460022, "learning_rate": 0.0001960601521907629, "loss": 1.2191, "step": 986 }, { "epoch": 0.18342315554729605, "grad_norm": 0.5605377554893494, "learning_rate": 0.00019605201979415817, "loss": 0.9412, "step": 987 }, { "epoch": 0.18360899461066715, "grad_norm": 0.5268937945365906, "learning_rate": 0.00019604387918201657, "loss": 1.0694, "step": 988 }, { "epoch": 0.1837948336740383, "grad_norm": 0.5224151611328125, "learning_rate": 0.00019603573035503437, "loss": 1.016, "step": 989 }, { "epoch": 0.1839806727374094, "grad_norm": 0.45612266659736633, "learning_rate": 0.00019602757331390865, "loss": 1.1016, "step": 990 }, { "epoch": 0.18416651180078053, "grad_norm": 0.49727365374565125, "learning_rate": 0.000196019408059337, "loss": 0.9785, "step": 991 }, { "epoch": 0.18435235086415164, "grad_norm": 0.5259670615196228, "learning_rate": 0.00019601123459201788, "loss": 1.124, "step": 992 }, { "epoch": 0.18453818992752277, "grad_norm": 0.4743773937225342, "learning_rate": 0.00019600305291265032, "loss": 1.1237, "step": 993 }, { "epoch": 0.18472402899089388, "grad_norm": 0.5588434934616089, "learning_rate": 0.00019599486302193417, "loss": 1.2736, "step": 994 }, { "epoch": 0.18490986805426501, "grad_norm": 0.5143772959709167, "learning_rate": 0.00019598666492056993, "loss": 1.0648, "step": 995 }, { "epoch": 0.18509570711763612, "grad_norm": 0.5187076330184937, "learning_rate": 0.00019597845860925878, "loss": 0.8345, "step": 996 }, { "epoch": 0.18528154618100726, "grad_norm": 0.5211664438247681, "learning_rate": 0.0001959702440887026, "loss": 1.0942, "step": 997 }, { "epoch": 0.18546738524437836, "grad_norm": 0.4638168513774872, "learning_rate": 0.00019596202135960407, "loss": 1.1404, "step": 998 }, { "epoch": 0.1856532243077495, "grad_norm": 0.4244816303253174, "learning_rate": 0.0001959537904226664, "loss": 1.1829, "step": 999 }, { "epoch": 0.1858390633711206, "grad_norm": 0.4680684208869934, "learning_rate": 0.0001959455512785937, "loss": 1.1177, "step": 1000 }, { "epoch": 0.18602490243449174, "grad_norm": 0.5658469200134277, "learning_rate": 0.0001959373039280906, "loss": 1.4114, "step": 1001 }, { "epoch": 0.18621074149786285, "grad_norm": 0.5405275225639343, "learning_rate": 0.00019592904837186257, "loss": 1.0142, "step": 1002 }, { "epoch": 0.18639658056123398, "grad_norm": 0.5917633771896362, "learning_rate": 0.0001959207846106157, "loss": 1.3173, "step": 1003 }, { "epoch": 0.1865824196246051, "grad_norm": 0.5246212482452393, "learning_rate": 0.00019591251264505682, "loss": 1.0017, "step": 1004 }, { "epoch": 0.18676825868797622, "grad_norm": 0.5147131085395813, "learning_rate": 0.0001959042324758934, "loss": 1.2113, "step": 1005 }, { "epoch": 0.18695409775134733, "grad_norm": 0.4775300920009613, "learning_rate": 0.0001958959441038337, "loss": 1.072, "step": 1006 }, { "epoch": 0.18713993681471847, "grad_norm": 0.4175073504447937, "learning_rate": 0.00019588764752958668, "loss": 1.0136, "step": 1007 }, { "epoch": 0.18732577587808957, "grad_norm": 0.5542589426040649, "learning_rate": 0.00019587934275386193, "loss": 1.3874, "step": 1008 }, { "epoch": 0.1875116149414607, "grad_norm": 0.44940391182899475, "learning_rate": 0.00019587102977736972, "loss": 1.0096, "step": 1009 }, { "epoch": 0.18769745400483182, "grad_norm": 0.6043052077293396, "learning_rate": 0.00019586270860082118, "loss": 1.2021, "step": 1010 }, { "epoch": 0.18788329306820295, "grad_norm": 0.5352486371994019, "learning_rate": 0.000195854379224928, "loss": 1.1452, "step": 1011 }, { "epoch": 0.18806913213157406, "grad_norm": 0.4326801896095276, "learning_rate": 0.00019584604165040254, "loss": 0.8873, "step": 1012 }, { "epoch": 0.18825497119494516, "grad_norm": 0.5577175617218018, "learning_rate": 0.00019583769587795802, "loss": 1.2128, "step": 1013 }, { "epoch": 0.1884408102583163, "grad_norm": 0.4985814392566681, "learning_rate": 0.0001958293419083082, "loss": 1.3347, "step": 1014 }, { "epoch": 0.1886266493216874, "grad_norm": 0.5640441179275513, "learning_rate": 0.00019582097974216772, "loss": 1.0509, "step": 1015 }, { "epoch": 0.18881248838505854, "grad_norm": 0.48311781883239746, "learning_rate": 0.0001958126093802517, "loss": 1.1147, "step": 1016 }, { "epoch": 0.18899832744842965, "grad_norm": 0.5454368591308594, "learning_rate": 0.00019580423082327613, "loss": 1.1961, "step": 1017 }, { "epoch": 0.18918416651180078, "grad_norm": 0.49283385276794434, "learning_rate": 0.00019579584407195764, "loss": 1.1866, "step": 1018 }, { "epoch": 0.1893700055751719, "grad_norm": 0.6507675647735596, "learning_rate": 0.00019578744912701358, "loss": 1.2556, "step": 1019 }, { "epoch": 0.18955584463854303, "grad_norm": 0.5064658522605896, "learning_rate": 0.00019577904598916192, "loss": 1.0026, "step": 1020 }, { "epoch": 0.18974168370191413, "grad_norm": 0.76408451795578, "learning_rate": 0.00019577063465912149, "loss": 1.2837, "step": 1021 }, { "epoch": 0.18992752276528527, "grad_norm": 0.6197009086608887, "learning_rate": 0.00019576221513761168, "loss": 1.3769, "step": 1022 }, { "epoch": 0.19011336182865637, "grad_norm": 0.5020315051078796, "learning_rate": 0.00019575378742535266, "loss": 1.1076, "step": 1023 }, { "epoch": 0.1902992008920275, "grad_norm": 0.541795015335083, "learning_rate": 0.00019574535152306523, "loss": 1.1054, "step": 1024 }, { "epoch": 0.19048503995539862, "grad_norm": 0.49482569098472595, "learning_rate": 0.00019573690743147096, "loss": 1.2124, "step": 1025 }, { "epoch": 0.19067087901876975, "grad_norm": 0.5648143887519836, "learning_rate": 0.00019572845515129208, "loss": 1.2169, "step": 1026 }, { "epoch": 0.19085671808214086, "grad_norm": 0.4914950728416443, "learning_rate": 0.00019571999468325152, "loss": 1.0648, "step": 1027 }, { "epoch": 0.191042557145512, "grad_norm": 0.5238738656044006, "learning_rate": 0.00019571152602807296, "loss": 1.0846, "step": 1028 }, { "epoch": 0.1912283962088831, "grad_norm": 0.4733150005340576, "learning_rate": 0.00019570304918648071, "loss": 1.0583, "step": 1029 }, { "epoch": 0.19141423527225424, "grad_norm": 0.5372300148010254, "learning_rate": 0.00019569456415919985, "loss": 1.1714, "step": 1030 }, { "epoch": 0.19160007433562534, "grad_norm": 0.5832946300506592, "learning_rate": 0.00019568607094695603, "loss": 1.3292, "step": 1031 }, { "epoch": 0.19178591339899648, "grad_norm": 0.5379717350006104, "learning_rate": 0.00019567756955047582, "loss": 1.2471, "step": 1032 }, { "epoch": 0.19197175246236758, "grad_norm": 0.5493462681770325, "learning_rate": 0.00019566905997048627, "loss": 1.0719, "step": 1033 }, { "epoch": 0.19215759152573872, "grad_norm": 0.4469130337238312, "learning_rate": 0.00019566054220771526, "loss": 1.2158, "step": 1034 }, { "epoch": 0.19234343058910983, "grad_norm": 0.4941439628601074, "learning_rate": 0.0001956520162628913, "loss": 1.0047, "step": 1035 }, { "epoch": 0.19252926965248096, "grad_norm": 0.5384143590927124, "learning_rate": 0.0001956434821367437, "loss": 1.1704, "step": 1036 }, { "epoch": 0.19271510871585207, "grad_norm": 0.4670869708061218, "learning_rate": 0.00019563493983000232, "loss": 0.961, "step": 1037 }, { "epoch": 0.1929009477792232, "grad_norm": 0.43627220392227173, "learning_rate": 0.00019562638934339787, "loss": 1.0979, "step": 1038 }, { "epoch": 0.1930867868425943, "grad_norm": 0.5042278170585632, "learning_rate": 0.00019561783067766167, "loss": 0.9918, "step": 1039 }, { "epoch": 0.19327262590596545, "grad_norm": 0.46678847074508667, "learning_rate": 0.0001956092638335257, "loss": 1.0757, "step": 1040 }, { "epoch": 0.19345846496933655, "grad_norm": 0.9079703092575073, "learning_rate": 0.0001956006888117228, "loss": 0.994, "step": 1041 }, { "epoch": 0.1936443040327077, "grad_norm": 0.4500991404056549, "learning_rate": 0.00019559210561298637, "loss": 1.1286, "step": 1042 }, { "epoch": 0.1938301430960788, "grad_norm": 0.49819493293762207, "learning_rate": 0.00019558351423805054, "loss": 1.0163, "step": 1043 }, { "epoch": 0.19401598215944993, "grad_norm": 0.507444441318512, "learning_rate": 0.00019557491468765017, "loss": 1.027, "step": 1044 }, { "epoch": 0.19420182122282104, "grad_norm": 0.5294500589370728, "learning_rate": 0.00019556630696252074, "loss": 0.9572, "step": 1045 }, { "epoch": 0.19438766028619217, "grad_norm": 0.5559682250022888, "learning_rate": 0.00019555769106339857, "loss": 1.1363, "step": 1046 }, { "epoch": 0.19457349934956328, "grad_norm": 0.5073032379150391, "learning_rate": 0.00019554906699102053, "loss": 1.1022, "step": 1047 }, { "epoch": 0.19475933841293439, "grad_norm": 0.4607917368412018, "learning_rate": 0.0001955404347461243, "loss": 1.0064, "step": 1048 }, { "epoch": 0.19494517747630552, "grad_norm": 0.4846749007701874, "learning_rate": 0.00019553179432944817, "loss": 1.1871, "step": 1049 }, { "epoch": 0.19513101653967663, "grad_norm": 0.7353876829147339, "learning_rate": 0.0001955231457417312, "loss": 1.168, "step": 1050 }, { "epoch": 0.19531685560304776, "grad_norm": 0.5777313709259033, "learning_rate": 0.00019551448898371315, "loss": 1.0699, "step": 1051 }, { "epoch": 0.19550269466641887, "grad_norm": 0.5727697014808655, "learning_rate": 0.0001955058240561344, "loss": 1.0917, "step": 1052 }, { "epoch": 0.19568853372979, "grad_norm": 0.4747198522090912, "learning_rate": 0.00019549715095973613, "loss": 1.1084, "step": 1053 }, { "epoch": 0.1958743727931611, "grad_norm": 0.46111252903938293, "learning_rate": 0.00019548846969526013, "loss": 0.9441, "step": 1054 }, { "epoch": 0.19606021185653225, "grad_norm": 0.5396764874458313, "learning_rate": 0.00019547978026344893, "loss": 1.2142, "step": 1055 }, { "epoch": 0.19624605091990335, "grad_norm": 0.6169037222862244, "learning_rate": 0.00019547108266504577, "loss": 1.2052, "step": 1056 }, { "epoch": 0.1964318899832745, "grad_norm": 0.5420907139778137, "learning_rate": 0.00019546237690079457, "loss": 1.1335, "step": 1057 }, { "epoch": 0.1966177290466456, "grad_norm": 0.5372158288955688, "learning_rate": 0.00019545366297143995, "loss": 0.8468, "step": 1058 }, { "epoch": 0.19680356811001673, "grad_norm": 0.5079149603843689, "learning_rate": 0.00019544494087772723, "loss": 1.1215, "step": 1059 }, { "epoch": 0.19698940717338784, "grad_norm": 0.49826011061668396, "learning_rate": 0.00019543621062040244, "loss": 0.9791, "step": 1060 }, { "epoch": 0.19717524623675897, "grad_norm": 0.49958309531211853, "learning_rate": 0.00019542747220021228, "loss": 1.0473, "step": 1061 }, { "epoch": 0.19736108530013008, "grad_norm": 0.5966783165931702, "learning_rate": 0.0001954187256179042, "loss": 1.355, "step": 1062 }, { "epoch": 0.19754692436350121, "grad_norm": 0.5301747918128967, "learning_rate": 0.00019540997087422627, "loss": 1.1467, "step": 1063 }, { "epoch": 0.19773276342687232, "grad_norm": 0.4955202341079712, "learning_rate": 0.00019540120796992733, "loss": 1.2297, "step": 1064 }, { "epoch": 0.19791860249024346, "grad_norm": 0.4707731008529663, "learning_rate": 0.00019539243690575687, "loss": 0.9205, "step": 1065 }, { "epoch": 0.19810444155361456, "grad_norm": 0.6613312363624573, "learning_rate": 0.00019538365768246517, "loss": 0.9754, "step": 1066 }, { "epoch": 0.1982902806169857, "grad_norm": 0.47768664360046387, "learning_rate": 0.00019537487030080303, "loss": 1.0455, "step": 1067 }, { "epoch": 0.1984761196803568, "grad_norm": 0.6217352151870728, "learning_rate": 0.00019536607476152207, "loss": 1.4062, "step": 1068 }, { "epoch": 0.19866195874372794, "grad_norm": 0.6218780279159546, "learning_rate": 0.00019535727106537463, "loss": 1.2606, "step": 1069 }, { "epoch": 0.19884779780709905, "grad_norm": 0.6484935283660889, "learning_rate": 0.00019534845921311374, "loss": 1.319, "step": 1070 }, { "epoch": 0.19903363687047018, "grad_norm": 0.493118554353714, "learning_rate": 0.00019533963920549306, "loss": 1.218, "step": 1071 }, { "epoch": 0.1992194759338413, "grad_norm": 0.6731880903244019, "learning_rate": 0.00019533081104326696, "loss": 1.1587, "step": 1072 }, { "epoch": 0.19940531499721242, "grad_norm": 0.7866933941841125, "learning_rate": 0.00019532197472719057, "loss": 1.5491, "step": 1073 }, { "epoch": 0.19959115406058353, "grad_norm": 0.6097940802574158, "learning_rate": 0.00019531313025801964, "loss": 1.1939, "step": 1074 }, { "epoch": 0.19977699312395467, "grad_norm": 0.5222741365432739, "learning_rate": 0.00019530427763651072, "loss": 1.2106, "step": 1075 }, { "epoch": 0.19996283218732577, "grad_norm": 0.5517898797988892, "learning_rate": 0.00019529541686342093, "loss": 0.8951, "step": 1076 }, { "epoch": 0.2001486712506969, "grad_norm": 0.5374531745910645, "learning_rate": 0.00019528654793950817, "loss": 1.0006, "step": 1077 }, { "epoch": 0.20033451031406802, "grad_norm": 0.572433590888977, "learning_rate": 0.000195277670865531, "loss": 1.1501, "step": 1078 }, { "epoch": 0.20052034937743915, "grad_norm": 0.4874914586544037, "learning_rate": 0.00019526878564224878, "loss": 0.9626, "step": 1079 }, { "epoch": 0.20070618844081026, "grad_norm": 0.520273745059967, "learning_rate": 0.00019525989227042138, "loss": 0.9302, "step": 1080 }, { "epoch": 0.2008920275041814, "grad_norm": 0.45634725689888, "learning_rate": 0.00019525099075080952, "loss": 1.1159, "step": 1081 }, { "epoch": 0.2010778665675525, "grad_norm": 0.49319830536842346, "learning_rate": 0.00019524208108417454, "loss": 0.8992, "step": 1082 }, { "epoch": 0.20126370563092363, "grad_norm": 0.4766017496585846, "learning_rate": 0.00019523316327127853, "loss": 1.2904, "step": 1083 }, { "epoch": 0.20144954469429474, "grad_norm": 0.5087282061576843, "learning_rate": 0.00019522423731288427, "loss": 1.1372, "step": 1084 }, { "epoch": 0.20163538375766585, "grad_norm": 0.591387927532196, "learning_rate": 0.00019521530320975518, "loss": 1.5339, "step": 1085 }, { "epoch": 0.20182122282103698, "grad_norm": 0.6128246784210205, "learning_rate": 0.00019520636096265537, "loss": 1.1951, "step": 1086 }, { "epoch": 0.2020070618844081, "grad_norm": 0.46142295002937317, "learning_rate": 0.00019519741057234978, "loss": 1.2229, "step": 1087 }, { "epoch": 0.20219290094777922, "grad_norm": 0.537652313709259, "learning_rate": 0.00019518845203960388, "loss": 1.1164, "step": 1088 }, { "epoch": 0.20237874001115033, "grad_norm": 0.6652979850769043, "learning_rate": 0.000195179485365184, "loss": 1.2889, "step": 1089 }, { "epoch": 0.20256457907452147, "grad_norm": 0.6123540997505188, "learning_rate": 0.000195170510549857, "loss": 1.1483, "step": 1090 }, { "epoch": 0.20275041813789257, "grad_norm": 0.46437254548072815, "learning_rate": 0.00019516152759439055, "loss": 1.0709, "step": 1091 }, { "epoch": 0.2029362572012637, "grad_norm": 0.5575506091117859, "learning_rate": 0.00019515253649955296, "loss": 1.1623, "step": 1092 }, { "epoch": 0.20312209626463482, "grad_norm": 0.4591430127620697, "learning_rate": 0.00019514353726611327, "loss": 1.0653, "step": 1093 }, { "epoch": 0.20330793532800595, "grad_norm": 0.5363798141479492, "learning_rate": 0.0001951345298948412, "loss": 1.1025, "step": 1094 }, { "epoch": 0.20349377439137706, "grad_norm": 0.5441762208938599, "learning_rate": 0.0001951255143865072, "loss": 1.3256, "step": 1095 }, { "epoch": 0.2036796134547482, "grad_norm": 0.5097872614860535, "learning_rate": 0.00019511649074188233, "loss": 1.1426, "step": 1096 }, { "epoch": 0.2038654525181193, "grad_norm": 0.4579317271709442, "learning_rate": 0.00019510745896173846, "loss": 0.9945, "step": 1097 }, { "epoch": 0.20405129158149043, "grad_norm": 0.49984660744667053, "learning_rate": 0.00019509841904684808, "loss": 0.9922, "step": 1098 }, { "epoch": 0.20423713064486154, "grad_norm": 0.5130319595336914, "learning_rate": 0.00019508937099798436, "loss": 1.0339, "step": 1099 }, { "epoch": 0.20442296970823268, "grad_norm": 0.6417672038078308, "learning_rate": 0.00019508031481592123, "loss": 0.9417, "step": 1100 }, { "epoch": 0.20460880877160378, "grad_norm": 0.4974082410335541, "learning_rate": 0.00019507125050143324, "loss": 1.0728, "step": 1101 }, { "epoch": 0.20479464783497492, "grad_norm": 0.5567024350166321, "learning_rate": 0.0001950621780552958, "loss": 1.2673, "step": 1102 }, { "epoch": 0.20498048689834603, "grad_norm": 0.5510198473930359, "learning_rate": 0.00019505309747828474, "loss": 1.1641, "step": 1103 }, { "epoch": 0.20516632596171716, "grad_norm": 0.5027584433555603, "learning_rate": 0.00019504400877117686, "loss": 0.9991, "step": 1104 }, { "epoch": 0.20535216502508827, "grad_norm": 0.5015250444412231, "learning_rate": 0.00019503491193474947, "loss": 1.1148, "step": 1105 }, { "epoch": 0.2055380040884594, "grad_norm": 0.5679439306259155, "learning_rate": 0.00019502580696978067, "loss": 0.9935, "step": 1106 }, { "epoch": 0.2057238431518305, "grad_norm": 0.4760668873786926, "learning_rate": 0.00019501669387704925, "loss": 0.9816, "step": 1107 }, { "epoch": 0.20590968221520164, "grad_norm": 0.5136030912399292, "learning_rate": 0.0001950075726573346, "loss": 1.0082, "step": 1108 }, { "epoch": 0.20609552127857275, "grad_norm": 0.5538644790649414, "learning_rate": 0.00019499844331141695, "loss": 1.1304, "step": 1109 }, { "epoch": 0.2062813603419439, "grad_norm": 0.5204370021820068, "learning_rate": 0.0001949893058400771, "loss": 1.2725, "step": 1110 }, { "epoch": 0.206467199405315, "grad_norm": 0.7252811789512634, "learning_rate": 0.00019498016024409664, "loss": 1.1573, "step": 1111 }, { "epoch": 0.20665303846868613, "grad_norm": 0.5194401144981384, "learning_rate": 0.0001949710065242578, "loss": 1.2174, "step": 1112 }, { "epoch": 0.20683887753205724, "grad_norm": 0.5207048654556274, "learning_rate": 0.0001949618446813435, "loss": 1.2053, "step": 1113 }, { "epoch": 0.20702471659542837, "grad_norm": 0.5159600973129272, "learning_rate": 0.0001949526747161374, "loss": 1.1801, "step": 1114 }, { "epoch": 0.20721055565879948, "grad_norm": 0.5342578291893005, "learning_rate": 0.0001949434966294238, "loss": 1.0367, "step": 1115 }, { "epoch": 0.2073963947221706, "grad_norm": 0.5836411714553833, "learning_rate": 0.00019493431042198774, "loss": 1.1718, "step": 1116 }, { "epoch": 0.20758223378554172, "grad_norm": 0.6905984878540039, "learning_rate": 0.00019492511609461491, "loss": 1.1931, "step": 1117 }, { "epoch": 0.20776807284891285, "grad_norm": 0.5296867489814758, "learning_rate": 0.0001949159136480918, "loss": 1.1577, "step": 1118 }, { "epoch": 0.20795391191228396, "grad_norm": 0.5768712162971497, "learning_rate": 0.0001949067030832054, "loss": 1.1849, "step": 1119 }, { "epoch": 0.20813975097565507, "grad_norm": 0.59405118227005, "learning_rate": 0.00019489748440074358, "loss": 1.0622, "step": 1120 }, { "epoch": 0.2083255900390262, "grad_norm": 0.4969901740550995, "learning_rate": 0.00019488825760149482, "loss": 1.0519, "step": 1121 }, { "epoch": 0.2085114291023973, "grad_norm": 0.4819211959838867, "learning_rate": 0.0001948790226862483, "loss": 0.7888, "step": 1122 }, { "epoch": 0.20869726816576845, "grad_norm": 0.5228898525238037, "learning_rate": 0.00019486977965579389, "loss": 0.9494, "step": 1123 }, { "epoch": 0.20888310722913955, "grad_norm": 0.4955415725708008, "learning_rate": 0.00019486052851092222, "loss": 0.9514, "step": 1124 }, { "epoch": 0.2090689462925107, "grad_norm": 0.5162959098815918, "learning_rate": 0.0001948512692524245, "loss": 1.1837, "step": 1125 }, { "epoch": 0.2092547853558818, "grad_norm": 0.4941028952598572, "learning_rate": 0.00019484200188109275, "loss": 1.1001, "step": 1126 }, { "epoch": 0.20944062441925293, "grad_norm": 0.5514204502105713, "learning_rate": 0.0001948327263977196, "loss": 1.3927, "step": 1127 }, { "epoch": 0.20962646348262404, "grad_norm": 0.5498420596122742, "learning_rate": 0.00019482344280309836, "loss": 1.3795, "step": 1128 }, { "epoch": 0.20981230254599517, "grad_norm": 0.5233075022697449, "learning_rate": 0.00019481415109802314, "loss": 1.2404, "step": 1129 }, { "epoch": 0.20999814160936628, "grad_norm": 0.5122292637825012, "learning_rate": 0.00019480485128328868, "loss": 1.1559, "step": 1130 }, { "epoch": 0.2101839806727374, "grad_norm": 0.5212186574935913, "learning_rate": 0.00019479554335969038, "loss": 1.1201, "step": 1131 }, { "epoch": 0.21036981973610852, "grad_norm": 0.5237042903900146, "learning_rate": 0.0001947862273280243, "loss": 0.995, "step": 1132 }, { "epoch": 0.21055565879947966, "grad_norm": 0.5185428857803345, "learning_rate": 0.00019477690318908742, "loss": 0.9331, "step": 1133 }, { "epoch": 0.21074149786285076, "grad_norm": 0.4496161937713623, "learning_rate": 0.00019476757094367714, "loss": 1.0886, "step": 1134 }, { "epoch": 0.2109273369262219, "grad_norm": 0.5477585792541504, "learning_rate": 0.00019475823059259174, "loss": 1.0802, "step": 1135 }, { "epoch": 0.211113175989593, "grad_norm": 0.6424799561500549, "learning_rate": 0.00019474888213663003, "loss": 1.0755, "step": 1136 }, { "epoch": 0.21129901505296414, "grad_norm": 0.47722670435905457, "learning_rate": 0.00019473952557659167, "loss": 1.1139, "step": 1137 }, { "epoch": 0.21148485411633525, "grad_norm": 0.5191829800605774, "learning_rate": 0.00019473016091327693, "loss": 0.978, "step": 1138 }, { "epoch": 0.21167069317970638, "grad_norm": 0.4725750684738159, "learning_rate": 0.0001947207881474868, "loss": 0.9026, "step": 1139 }, { "epoch": 0.2118565322430775, "grad_norm": 0.5386378169059753, "learning_rate": 0.0001947114072800229, "loss": 1.3181, "step": 1140 }, { "epoch": 0.21204237130644862, "grad_norm": 0.5343483686447144, "learning_rate": 0.00019470201831168766, "loss": 1.1293, "step": 1141 }, { "epoch": 0.21222821036981973, "grad_norm": 0.6510689854621887, "learning_rate": 0.0001946926212432841, "loss": 1.031, "step": 1142 }, { "epoch": 0.21241404943319087, "grad_norm": 0.5814812779426575, "learning_rate": 0.00019468321607561602, "loss": 1.0944, "step": 1143 }, { "epoch": 0.21259988849656197, "grad_norm": 0.4645356237888336, "learning_rate": 0.00019467380280948783, "loss": 1.0804, "step": 1144 }, { "epoch": 0.2127857275599331, "grad_norm": 2.570280075073242, "learning_rate": 0.00019466438144570466, "loss": 3.3341, "step": 1145 }, { "epoch": 0.21297156662330421, "grad_norm": 0.5189799070358276, "learning_rate": 0.00019465495198507234, "loss": 1.2346, "step": 1146 }, { "epoch": 0.21315740568667535, "grad_norm": 0.5512979626655579, "learning_rate": 0.0001946455144283974, "loss": 1.179, "step": 1147 }, { "epoch": 0.21334324475004646, "grad_norm": 0.555311381816864, "learning_rate": 0.00019463606877648704, "loss": 0.9932, "step": 1148 }, { "epoch": 0.2135290838134176, "grad_norm": 0.5090897679328918, "learning_rate": 0.0001946266150301492, "loss": 1.049, "step": 1149 }, { "epoch": 0.2137149228767887, "grad_norm": 0.537129819393158, "learning_rate": 0.00019461715319019248, "loss": 1.2181, "step": 1150 }, { "epoch": 0.21390076194015983, "grad_norm": 0.567992091178894, "learning_rate": 0.00019460768325742613, "loss": 1.0256, "step": 1151 }, { "epoch": 0.21408660100353094, "grad_norm": 0.5266989469528198, "learning_rate": 0.00019459820523266017, "loss": 1.0451, "step": 1152 }, { "epoch": 0.21427244006690208, "grad_norm": 0.5035572052001953, "learning_rate": 0.00019458871911670524, "loss": 1.2143, "step": 1153 }, { "epoch": 0.21445827913027318, "grad_norm": 0.4713079035282135, "learning_rate": 0.00019457922491037275, "loss": 1.093, "step": 1154 }, { "epoch": 0.21464411819364432, "grad_norm": 0.508577823638916, "learning_rate": 0.00019456972261447475, "loss": 1.3012, "step": 1155 }, { "epoch": 0.21482995725701542, "grad_norm": 0.9455997347831726, "learning_rate": 0.00019456021222982397, "loss": 1.1844, "step": 1156 }, { "epoch": 0.21501579632038653, "grad_norm": 0.5246649980545044, "learning_rate": 0.00019455069375723386, "loss": 1.1327, "step": 1157 }, { "epoch": 0.21520163538375767, "grad_norm": 0.5420750379562378, "learning_rate": 0.00019454116719751856, "loss": 1.1217, "step": 1158 }, { "epoch": 0.21538747444712877, "grad_norm": 0.49900150299072266, "learning_rate": 0.00019453163255149289, "loss": 0.8934, "step": 1159 }, { "epoch": 0.2155733135104999, "grad_norm": 0.5530839562416077, "learning_rate": 0.00019452208981997243, "loss": 1.0734, "step": 1160 }, { "epoch": 0.21575915257387102, "grad_norm": 0.5028192400932312, "learning_rate": 0.0001945125390037733, "loss": 1.0318, "step": 1161 }, { "epoch": 0.21594499163724215, "grad_norm": 0.4835618734359741, "learning_rate": 0.00019450298010371242, "loss": 1.1212, "step": 1162 }, { "epoch": 0.21613083070061326, "grad_norm": 0.7778669595718384, "learning_rate": 0.00019449341312060745, "loss": 1.5447, "step": 1163 }, { "epoch": 0.2163166697639844, "grad_norm": 0.5164691209793091, "learning_rate": 0.0001944838380552766, "loss": 1.261, "step": 1164 }, { "epoch": 0.2165025088273555, "grad_norm": 0.5152435302734375, "learning_rate": 0.00019447425490853887, "loss": 1.0973, "step": 1165 }, { "epoch": 0.21668834789072663, "grad_norm": 0.48500069975852966, "learning_rate": 0.00019446466368121398, "loss": 1.0991, "step": 1166 }, { "epoch": 0.21687418695409774, "grad_norm": 0.7115551829338074, "learning_rate": 0.00019445506437412217, "loss": 1.1862, "step": 1167 }, { "epoch": 0.21706002601746888, "grad_norm": 0.469462126493454, "learning_rate": 0.0001944454569880846, "loss": 1.0227, "step": 1168 }, { "epoch": 0.21724586508083998, "grad_norm": 0.46283483505249023, "learning_rate": 0.00019443584152392296, "loss": 1.0887, "step": 1169 }, { "epoch": 0.21743170414421112, "grad_norm": 0.594821572303772, "learning_rate": 0.0001944262179824597, "loss": 1.1053, "step": 1170 }, { "epoch": 0.21761754320758223, "grad_norm": 0.5451986193656921, "learning_rate": 0.00019441658636451794, "loss": 1.041, "step": 1171 }, { "epoch": 0.21780338227095336, "grad_norm": 3.8261632919311523, "learning_rate": 0.00019440694667092147, "loss": 4.0756, "step": 1172 }, { "epoch": 0.21798922133432447, "grad_norm": 0.5103700160980225, "learning_rate": 0.00019439729890249484, "loss": 1.1334, "step": 1173 }, { "epoch": 0.2181750603976956, "grad_norm": 0.5776328444480896, "learning_rate": 0.00019438764306006317, "loss": 1.2469, "step": 1174 }, { "epoch": 0.2183608994610667, "grad_norm": 0.5896931886672974, "learning_rate": 0.00019437797914445242, "loss": 0.9341, "step": 1175 }, { "epoch": 0.21854673852443784, "grad_norm": 0.5199896097183228, "learning_rate": 0.00019436830715648912, "loss": 1.1746, "step": 1176 }, { "epoch": 0.21873257758780895, "grad_norm": 0.5308344960212708, "learning_rate": 0.00019435862709700057, "loss": 1.1905, "step": 1177 }, { "epoch": 0.2189184166511801, "grad_norm": 0.6593711376190186, "learning_rate": 0.0001943489389668147, "loss": 1.3087, "step": 1178 }, { "epoch": 0.2191042557145512, "grad_norm": 0.4791925847530365, "learning_rate": 0.00019433924276676017, "loss": 1.2127, "step": 1179 }, { "epoch": 0.21929009477792233, "grad_norm": 0.48847538232803345, "learning_rate": 0.0001943295384976663, "loss": 0.7622, "step": 1180 }, { "epoch": 0.21947593384129344, "grad_norm": 0.614321768283844, "learning_rate": 0.00019431982616036314, "loss": 1.1416, "step": 1181 }, { "epoch": 0.21966177290466457, "grad_norm": 0.43737366795539856, "learning_rate": 0.0001943101057556814, "loss": 0.8295, "step": 1182 }, { "epoch": 0.21984761196803568, "grad_norm": 0.5130372047424316, "learning_rate": 0.00019430037728445247, "loss": 1.1073, "step": 1183 }, { "epoch": 0.2200334510314068, "grad_norm": 0.5877685546875, "learning_rate": 0.00019429064074750848, "loss": 1.2899, "step": 1184 }, { "epoch": 0.22021929009477792, "grad_norm": 0.543999969959259, "learning_rate": 0.00019428089614568217, "loss": 1.4243, "step": 1185 }, { "epoch": 0.22040512915814905, "grad_norm": 0.4639683961868286, "learning_rate": 0.00019427114347980707, "loss": 1.1822, "step": 1186 }, { "epoch": 0.22059096822152016, "grad_norm": 0.5469757914543152, "learning_rate": 0.0001942613827507173, "loss": 1.2563, "step": 1187 }, { "epoch": 0.2207768072848913, "grad_norm": 0.5121818780899048, "learning_rate": 0.00019425161395924775, "loss": 1.1868, "step": 1188 }, { "epoch": 0.2209626463482624, "grad_norm": 0.4748496115207672, "learning_rate": 0.00019424183710623397, "loss": 0.9397, "step": 1189 }, { "epoch": 0.22114848541163354, "grad_norm": 0.5774352550506592, "learning_rate": 0.00019423205219251216, "loss": 1.304, "step": 1190 }, { "epoch": 0.22133432447500465, "grad_norm": 0.4741893410682678, "learning_rate": 0.00019422225921891925, "loss": 0.905, "step": 1191 }, { "epoch": 0.22152016353837575, "grad_norm": 0.500920832157135, "learning_rate": 0.0001942124581862929, "loss": 1.043, "step": 1192 }, { "epoch": 0.2217060026017469, "grad_norm": 0.4882694184780121, "learning_rate": 0.00019420264909547136, "loss": 0.9522, "step": 1193 }, { "epoch": 0.221891841665118, "grad_norm": 0.4808083176612854, "learning_rate": 0.00019419283194729365, "loss": 1.125, "step": 1194 }, { "epoch": 0.22207768072848913, "grad_norm": 0.5128915905952454, "learning_rate": 0.00019418300674259946, "loss": 0.9932, "step": 1195 }, { "epoch": 0.22226351979186024, "grad_norm": 0.5226528644561768, "learning_rate": 0.0001941731734822291, "loss": 1.1762, "step": 1196 }, { "epoch": 0.22244935885523137, "grad_norm": 0.504802405834198, "learning_rate": 0.00019416333216702375, "loss": 1.0157, "step": 1197 }, { "epoch": 0.22263519791860248, "grad_norm": 0.5046318173408508, "learning_rate": 0.00019415348279782504, "loss": 0.9394, "step": 1198 }, { "epoch": 0.2228210369819736, "grad_norm": 0.524804949760437, "learning_rate": 0.00019414362537547545, "loss": 1.0875, "step": 1199 }, { "epoch": 0.22300687604534472, "grad_norm": 0.45535653829574585, "learning_rate": 0.0001941337599008181, "loss": 1.1935, "step": 1200 }, { "epoch": 0.22319271510871586, "grad_norm": 0.5665996670722961, "learning_rate": 0.00019412388637469685, "loss": 1.2213, "step": 1201 }, { "epoch": 0.22337855417208696, "grad_norm": 0.6267208456993103, "learning_rate": 0.00019411400479795617, "loss": 0.9289, "step": 1202 }, { "epoch": 0.2235643932354581, "grad_norm": 0.5627426505088806, "learning_rate": 0.00019410411517144124, "loss": 1.1286, "step": 1203 }, { "epoch": 0.2237502322988292, "grad_norm": 0.5556461215019226, "learning_rate": 0.00019409421749599795, "loss": 1.0649, "step": 1204 }, { "epoch": 0.22393607136220034, "grad_norm": 0.45988309383392334, "learning_rate": 0.00019408431177247288, "loss": 0.9952, "step": 1205 }, { "epoch": 0.22412191042557145, "grad_norm": 0.6533599495887756, "learning_rate": 0.00019407439800171326, "loss": 1.2076, "step": 1206 }, { "epoch": 0.22430774948894258, "grad_norm": 0.49994662404060364, "learning_rate": 0.0001940644761845671, "loss": 0.7867, "step": 1207 }, { "epoch": 0.2244935885523137, "grad_norm": 0.46270355582237244, "learning_rate": 0.00019405454632188295, "loss": 0.9882, "step": 1208 }, { "epoch": 0.22467942761568482, "grad_norm": 0.4713519215583801, "learning_rate": 0.0001940446084145102, "loss": 1.1609, "step": 1209 }, { "epoch": 0.22486526667905593, "grad_norm": 0.5355565547943115, "learning_rate": 0.0001940346624632988, "loss": 0.8165, "step": 1210 }, { "epoch": 0.22505110574242707, "grad_norm": 0.6384341716766357, "learning_rate": 0.00019402470846909952, "loss": 1.2009, "step": 1211 }, { "epoch": 0.22523694480579817, "grad_norm": 0.7215434908866882, "learning_rate": 0.0001940147464327637, "loss": 1.2914, "step": 1212 }, { "epoch": 0.2254227838691693, "grad_norm": 0.5999711751937866, "learning_rate": 0.00019400477635514346, "loss": 1.1607, "step": 1213 }, { "epoch": 0.22560862293254041, "grad_norm": 0.5356886386871338, "learning_rate": 0.00019399479823709146, "loss": 1.2823, "step": 1214 }, { "epoch": 0.22579446199591155, "grad_norm": 0.555874764919281, "learning_rate": 0.00019398481207946126, "loss": 1.3053, "step": 1215 }, { "epoch": 0.22598030105928266, "grad_norm": 0.5327717661857605, "learning_rate": 0.00019397481788310694, "loss": 1.0022, "step": 1216 }, { "epoch": 0.2261661401226538, "grad_norm": 0.530863344669342, "learning_rate": 0.00019396481564888337, "loss": 1.1806, "step": 1217 }, { "epoch": 0.2263519791860249, "grad_norm": 0.5350611805915833, "learning_rate": 0.00019395480537764598, "loss": 1.1462, "step": 1218 }, { "epoch": 0.22653781824939603, "grad_norm": 0.41025102138519287, "learning_rate": 0.00019394478707025105, "loss": 0.7575, "step": 1219 }, { "epoch": 0.22672365731276714, "grad_norm": 0.5089498162269592, "learning_rate": 0.00019393476072755546, "loss": 1.0725, "step": 1220 }, { "epoch": 0.22690949637613828, "grad_norm": 0.5464990735054016, "learning_rate": 0.00019392472635041674, "loss": 0.9933, "step": 1221 }, { "epoch": 0.22709533543950938, "grad_norm": 0.5634016990661621, "learning_rate": 0.00019391468393969316, "loss": 0.9743, "step": 1222 }, { "epoch": 0.22728117450288052, "grad_norm": 10.35197639465332, "learning_rate": 0.00019390463349624372, "loss": 3.9092, "step": 1223 }, { "epoch": 0.22746701356625162, "grad_norm": 0.462494820356369, "learning_rate": 0.000193894575020928, "loss": 0.7876, "step": 1224 }, { "epoch": 0.22765285262962276, "grad_norm": 0.465165913105011, "learning_rate": 0.0001938845085146064, "loss": 0.9355, "step": 1225 }, { "epoch": 0.22783869169299387, "grad_norm": 1.9577351808547974, "learning_rate": 0.0001938744339781398, "loss": 3.4486, "step": 1226 }, { "epoch": 0.228024530756365, "grad_norm": 0.6147559285163879, "learning_rate": 0.00019386435141239004, "loss": 1.0793, "step": 1227 }, { "epoch": 0.2282103698197361, "grad_norm": 0.5095495581626892, "learning_rate": 0.00019385426081821938, "loss": 0.9592, "step": 1228 }, { "epoch": 0.22839620888310722, "grad_norm": 0.5296030640602112, "learning_rate": 0.00019384416219649098, "loss": 1.3726, "step": 1229 }, { "epoch": 0.22858204794647835, "grad_norm": 0.7352867722511292, "learning_rate": 0.00019383405554806855, "loss": 1.1072, "step": 1230 }, { "epoch": 0.22876788700984946, "grad_norm": 0.40808743238449097, "learning_rate": 0.00019382394087381655, "loss": 0.701, "step": 1231 }, { "epoch": 0.2289537260732206, "grad_norm": 0.5722739696502686, "learning_rate": 0.0001938138181746001, "loss": 1.2682, "step": 1232 }, { "epoch": 0.2291395651365917, "grad_norm": 0.6272040009498596, "learning_rate": 0.00019380368745128504, "loss": 0.7458, "step": 1233 }, { "epoch": 0.22932540419996283, "grad_norm": 0.5764666199684143, "learning_rate": 0.00019379354870473786, "loss": 0.9817, "step": 1234 }, { "epoch": 0.22951124326333394, "grad_norm": 0.682600736618042, "learning_rate": 0.00019378340193582568, "loss": 1.1695, "step": 1235 }, { "epoch": 0.22969708232670508, "grad_norm": 0.5942116379737854, "learning_rate": 0.00019377324714541652, "loss": 1.0587, "step": 1236 }, { "epoch": 0.22988292139007618, "grad_norm": 0.5158277153968811, "learning_rate": 0.00019376308433437882, "loss": 1.0289, "step": 1237 }, { "epoch": 0.23006876045344732, "grad_norm": 0.6414231657981873, "learning_rate": 0.00019375291350358186, "loss": 1.3772, "step": 1238 }, { "epoch": 0.23025459951681843, "grad_norm": 0.578842282295227, "learning_rate": 0.00019374273465389564, "loss": 1.1186, "step": 1239 }, { "epoch": 0.23044043858018956, "grad_norm": 0.6373087763786316, "learning_rate": 0.00019373254778619066, "loss": 0.9283, "step": 1240 }, { "epoch": 0.23062627764356067, "grad_norm": 0.5056608319282532, "learning_rate": 0.00019372235290133832, "loss": 0.9324, "step": 1241 }, { "epoch": 0.2308121167069318, "grad_norm": 0.5139119625091553, "learning_rate": 0.00019371215000021054, "loss": 1.1976, "step": 1242 }, { "epoch": 0.2309979557703029, "grad_norm": 0.4746711552143097, "learning_rate": 0.00019370193908368008, "loss": 0.8965, "step": 1243 }, { "epoch": 0.23118379483367404, "grad_norm": 0.580740213394165, "learning_rate": 0.00019369172015262022, "loss": 0.9536, "step": 1244 }, { "epoch": 0.23136963389704515, "grad_norm": 0.48911336064338684, "learning_rate": 0.00019368149320790506, "loss": 1.0331, "step": 1245 }, { "epoch": 0.2315554729604163, "grad_norm": 0.5302042365074158, "learning_rate": 0.00019367125825040928, "loss": 1.0308, "step": 1246 }, { "epoch": 0.2317413120237874, "grad_norm": 0.5719622373580933, "learning_rate": 0.00019366101528100834, "loss": 1.3409, "step": 1247 }, { "epoch": 0.23192715108715853, "grad_norm": 0.5955301523208618, "learning_rate": 0.00019365076430057836, "loss": 1.167, "step": 1248 }, { "epoch": 0.23211299015052964, "grad_norm": 0.5893456339836121, "learning_rate": 0.0001936405053099961, "loss": 1.211, "step": 1249 }, { "epoch": 0.23229882921390077, "grad_norm": 0.5408098697662354, "learning_rate": 0.000193630238310139, "loss": 0.9324, "step": 1250 }, { "epoch": 0.23248466827727188, "grad_norm": 0.5345112085342407, "learning_rate": 0.0001936199633018853, "loss": 1.1313, "step": 1251 }, { "epoch": 0.232670507340643, "grad_norm": 0.6104554533958435, "learning_rate": 0.0001936096802861138, "loss": 1.1836, "step": 1252 }, { "epoch": 0.23285634640401412, "grad_norm": 0.4254988729953766, "learning_rate": 0.000193599389263704, "loss": 0.8247, "step": 1253 }, { "epoch": 0.23304218546738525, "grad_norm": 0.5082131624221802, "learning_rate": 0.00019358909023553614, "loss": 1.1895, "step": 1254 }, { "epoch": 0.23322802453075636, "grad_norm": 0.5012628436088562, "learning_rate": 0.00019357878320249114, "loss": 1.1176, "step": 1255 }, { "epoch": 0.2334138635941275, "grad_norm": 0.5261410474777222, "learning_rate": 0.00019356846816545055, "loss": 1.0318, "step": 1256 }, { "epoch": 0.2335997026574986, "grad_norm": 0.49757251143455505, "learning_rate": 0.00019355814512529666, "loss": 1.0032, "step": 1257 }, { "epoch": 0.23378554172086974, "grad_norm": 0.5406329035758972, "learning_rate": 0.00019354781408291243, "loss": 1.0992, "step": 1258 }, { "epoch": 0.23397138078424085, "grad_norm": 0.5017756819725037, "learning_rate": 0.00019353747503918144, "loss": 1.1414, "step": 1259 }, { "epoch": 0.23415721984761198, "grad_norm": 0.5275353193283081, "learning_rate": 0.00019352712799498804, "loss": 1.0786, "step": 1260 }, { "epoch": 0.2343430589109831, "grad_norm": 0.5688086152076721, "learning_rate": 0.0001935167729512173, "loss": 1.2523, "step": 1261 }, { "epoch": 0.23452889797435422, "grad_norm": 0.5454958081245422, "learning_rate": 0.0001935064099087548, "loss": 0.9357, "step": 1262 }, { "epoch": 0.23471473703772533, "grad_norm": 0.5572336316108704, "learning_rate": 0.000193496038868487, "loss": 1.2626, "step": 1263 }, { "epoch": 0.23490057610109644, "grad_norm": 0.6043594479560852, "learning_rate": 0.0001934856598313009, "loss": 1.1696, "step": 1264 }, { "epoch": 0.23508641516446757, "grad_norm": 0.502384603023529, "learning_rate": 0.00019347527279808428, "loss": 1.0992, "step": 1265 }, { "epoch": 0.23527225422783868, "grad_norm": 0.5702905654907227, "learning_rate": 0.00019346487776972555, "loss": 1.0492, "step": 1266 }, { "epoch": 0.2354580932912098, "grad_norm": 0.6055151224136353, "learning_rate": 0.00019345447474711382, "loss": 1.0244, "step": 1267 }, { "epoch": 0.23564393235458092, "grad_norm": 0.5263853669166565, "learning_rate": 0.00019344406373113886, "loss": 1.1904, "step": 1268 }, { "epoch": 0.23582977141795206, "grad_norm": 0.5104175806045532, "learning_rate": 0.0001934336447226912, "loss": 0.9947, "step": 1269 }, { "epoch": 0.23601561048132316, "grad_norm": 0.6638094186782837, "learning_rate": 0.00019342321772266197, "loss": 1.1783, "step": 1270 }, { "epoch": 0.2362014495446943, "grad_norm": 0.4550234079360962, "learning_rate": 0.00019341278273194302, "loss": 1.0144, "step": 1271 }, { "epoch": 0.2363872886080654, "grad_norm": 0.5424718260765076, "learning_rate": 0.00019340233975142685, "loss": 1.1628, "step": 1272 }, { "epoch": 0.23657312767143654, "grad_norm": 0.512442409992218, "learning_rate": 0.00019339188878200666, "loss": 1.0532, "step": 1273 }, { "epoch": 0.23675896673480765, "grad_norm": 0.5735439658164978, "learning_rate": 0.00019338142982457644, "loss": 0.8838, "step": 1274 }, { "epoch": 0.23694480579817878, "grad_norm": 0.5866655707359314, "learning_rate": 0.00019337096288003067, "loss": 1.0592, "step": 1275 }, { "epoch": 0.2371306448615499, "grad_norm": 0.5844191908836365, "learning_rate": 0.00019336048794926466, "loss": 1.0053, "step": 1276 }, { "epoch": 0.23731648392492102, "grad_norm": 0.4920348525047302, "learning_rate": 0.00019335000503317433, "loss": 0.943, "step": 1277 }, { "epoch": 0.23750232298829213, "grad_norm": 0.47772717475891113, "learning_rate": 0.0001933395141326563, "loss": 1.2133, "step": 1278 }, { "epoch": 0.23768816205166327, "grad_norm": 0.5314902067184448, "learning_rate": 0.0001933290152486079, "loss": 1.1368, "step": 1279 }, { "epoch": 0.23787400111503437, "grad_norm": 0.5484104752540588, "learning_rate": 0.00019331850838192713, "loss": 1.0915, "step": 1280 }, { "epoch": 0.2380598401784055, "grad_norm": 0.5783165097236633, "learning_rate": 0.00019330799353351266, "loss": 1.0065, "step": 1281 }, { "epoch": 0.23824567924177661, "grad_norm": 0.4743618965148926, "learning_rate": 0.0001932974707042638, "loss": 0.8908, "step": 1282 }, { "epoch": 0.23843151830514775, "grad_norm": 0.5681834816932678, "learning_rate": 0.00019328693989508063, "loss": 1.1216, "step": 1283 }, { "epoch": 0.23861735736851886, "grad_norm": 0.5138354301452637, "learning_rate": 0.0001932764011068639, "loss": 0.8675, "step": 1284 }, { "epoch": 0.23880319643189, "grad_norm": 0.6256782412528992, "learning_rate": 0.00019326585434051497, "loss": 1.0162, "step": 1285 }, { "epoch": 0.2389890354952611, "grad_norm": 0.5428333878517151, "learning_rate": 0.00019325529959693597, "loss": 1.1342, "step": 1286 }, { "epoch": 0.23917487455863223, "grad_norm": 0.5870194435119629, "learning_rate": 0.0001932447368770296, "loss": 1.2211, "step": 1287 }, { "epoch": 0.23936071362200334, "grad_norm": 0.6258403658866882, "learning_rate": 0.0001932341661816994, "loss": 1.1359, "step": 1288 }, { "epoch": 0.23954655268537448, "grad_norm": 0.5419567227363586, "learning_rate": 0.00019322358751184944, "loss": 1.038, "step": 1289 }, { "epoch": 0.23973239174874558, "grad_norm": 5.651763439178467, "learning_rate": 0.00019321300086838457, "loss": 3.7309, "step": 1290 }, { "epoch": 0.23991823081211672, "grad_norm": 0.5438091158866882, "learning_rate": 0.00019320240625221024, "loss": 1.2439, "step": 1291 }, { "epoch": 0.24010406987548782, "grad_norm": 0.5790950059890747, "learning_rate": 0.0001931918036642327, "loss": 0.7855, "step": 1292 }, { "epoch": 0.24028990893885896, "grad_norm": 0.6340652108192444, "learning_rate": 0.00019318119310535875, "loss": 1.3861, "step": 1293 }, { "epoch": 0.24047574800223007, "grad_norm": 0.5647812485694885, "learning_rate": 0.000193170574576496, "loss": 1.2188, "step": 1294 }, { "epoch": 0.2406615870656012, "grad_norm": 0.5352049469947815, "learning_rate": 0.0001931599480785526, "loss": 1.1037, "step": 1295 }, { "epoch": 0.2408474261289723, "grad_norm": 0.6570692658424377, "learning_rate": 0.00019314931361243752, "loss": 1.0727, "step": 1296 }, { "epoch": 0.24103326519234344, "grad_norm": 0.5156503319740295, "learning_rate": 0.00019313867117906033, "loss": 1.1296, "step": 1297 }, { "epoch": 0.24121910425571455, "grad_norm": 0.4969639182090759, "learning_rate": 0.0001931280207793313, "loss": 1.0714, "step": 1298 }, { "epoch": 0.24140494331908569, "grad_norm": 0.49877431988716125, "learning_rate": 0.00019311736241416135, "loss": 0.9473, "step": 1299 }, { "epoch": 0.2415907823824568, "grad_norm": 0.5098116993904114, "learning_rate": 0.00019310669608446214, "loss": 1.0151, "step": 1300 }, { "epoch": 0.2417766214458279, "grad_norm": 0.47907474637031555, "learning_rate": 0.00019309602179114598, "loss": 1.0961, "step": 1301 }, { "epoch": 0.24196246050919903, "grad_norm": 0.4947602450847626, "learning_rate": 0.0001930853395351259, "loss": 1.1161, "step": 1302 }, { "epoch": 0.24214829957257014, "grad_norm": 0.5896280407905579, "learning_rate": 0.00019307464931731555, "loss": 1.1756, "step": 1303 }, { "epoch": 0.24233413863594128, "grad_norm": 0.5432973504066467, "learning_rate": 0.00019306395113862922, "loss": 1.8116, "step": 1304 }, { "epoch": 0.24251997769931238, "grad_norm": 0.6156009435653687, "learning_rate": 0.00019305324499998208, "loss": 1.2343, "step": 1305 }, { "epoch": 0.24270581676268352, "grad_norm": 0.47537344694137573, "learning_rate": 0.00019304253090228975, "loss": 1.155, "step": 1306 }, { "epoch": 0.24289165582605463, "grad_norm": 0.4657548666000366, "learning_rate": 0.00019303180884646867, "loss": 0.8811, "step": 1307 }, { "epoch": 0.24307749488942576, "grad_norm": 0.6590142846107483, "learning_rate": 0.0001930210788334359, "loss": 1.2152, "step": 1308 }, { "epoch": 0.24326333395279687, "grad_norm": 0.6140265464782715, "learning_rate": 0.00019301034086410923, "loss": 1.2579, "step": 1309 }, { "epoch": 0.243449173016168, "grad_norm": 0.5011003613471985, "learning_rate": 0.00019299959493940708, "loss": 0.9862, "step": 1310 }, { "epoch": 0.2436350120795391, "grad_norm": 0.4542607069015503, "learning_rate": 0.00019298884106024855, "loss": 0.8064, "step": 1311 }, { "epoch": 0.24382085114291024, "grad_norm": 0.5873056054115295, "learning_rate": 0.0001929780792275535, "loss": 1.1433, "step": 1312 }, { "epoch": 0.24400669020628135, "grad_norm": 0.4630652666091919, "learning_rate": 0.00019296730944224236, "loss": 1.0874, "step": 1313 }, { "epoch": 0.2441925292696525, "grad_norm": 0.7520668506622314, "learning_rate": 0.00019295653170523633, "loss": 1.2873, "step": 1314 }, { "epoch": 0.2443783683330236, "grad_norm": 0.5418159365653992, "learning_rate": 0.00019294574601745723, "loss": 1.0383, "step": 1315 }, { "epoch": 0.24456420739639473, "grad_norm": 0.5760306715965271, "learning_rate": 0.00019293495237982758, "loss": 1.1608, "step": 1316 }, { "epoch": 0.24475004645976584, "grad_norm": 0.4945579469203949, "learning_rate": 0.0001929241507932706, "loss": 1.0581, "step": 1317 }, { "epoch": 0.24493588552313697, "grad_norm": 0.5649460554122925, "learning_rate": 0.00019291334125871022, "loss": 0.9331, "step": 1318 }, { "epoch": 0.24512172458650808, "grad_norm": 0.5532613396644592, "learning_rate": 0.00019290252377707087, "loss": 1.0461, "step": 1319 }, { "epoch": 0.2453075636498792, "grad_norm": 0.4638751745223999, "learning_rate": 0.00019289169834927792, "loss": 0.9448, "step": 1320 }, { "epoch": 0.24549340271325032, "grad_norm": 0.46956831216812134, "learning_rate": 0.00019288086497625724, "loss": 1.0126, "step": 1321 }, { "epoch": 0.24567924177662145, "grad_norm": 0.6272051930427551, "learning_rate": 0.0001928700236589354, "loss": 1.3591, "step": 1322 }, { "epoch": 0.24586508083999256, "grad_norm": 0.5483406782150269, "learning_rate": 0.00019285917439823975, "loss": 1.212, "step": 1323 }, { "epoch": 0.2460509199033637, "grad_norm": 0.5835994482040405, "learning_rate": 0.00019284831719509822, "loss": 0.9838, "step": 1324 }, { "epoch": 0.2462367589667348, "grad_norm": 0.5645042657852173, "learning_rate": 0.00019283745205043946, "loss": 1.2848, "step": 1325 }, { "epoch": 0.24642259803010594, "grad_norm": 0.5618262887001038, "learning_rate": 0.00019282657896519276, "loss": 1.2191, "step": 1326 }, { "epoch": 0.24660843709347705, "grad_norm": 0.4727591872215271, "learning_rate": 0.00019281569794028813, "loss": 1.032, "step": 1327 }, { "epoch": 0.24679427615684818, "grad_norm": 1.797974705696106, "learning_rate": 0.00019280480897665626, "loss": 3.749, "step": 1328 }, { "epoch": 0.2469801152202193, "grad_norm": 0.5174382925033569, "learning_rate": 0.0001927939120752285, "loss": 0.9723, "step": 1329 }, { "epoch": 0.24716595428359042, "grad_norm": 0.5607216358184814, "learning_rate": 0.0001927830072369369, "loss": 1.2211, "step": 1330 }, { "epoch": 0.24735179334696153, "grad_norm": 0.5592674612998962, "learning_rate": 0.00019277209446271414, "loss": 0.9497, "step": 1331 }, { "epoch": 0.24753763241033266, "grad_norm": 0.592069685459137, "learning_rate": 0.00019276117375349364, "loss": 1.1111, "step": 1332 }, { "epoch": 0.24772347147370377, "grad_norm": 0.5631759166717529, "learning_rate": 0.0001927502451102095, "loss": 1.2154, "step": 1333 }, { "epoch": 0.2479093105370749, "grad_norm": 0.5535110235214233, "learning_rate": 0.00019273930853379637, "loss": 1.1496, "step": 1334 }, { "epoch": 0.248095149600446, "grad_norm": 0.5004221200942993, "learning_rate": 0.0001927283640251898, "loss": 1.2262, "step": 1335 }, { "epoch": 0.24828098866381715, "grad_norm": 0.526226282119751, "learning_rate": 0.00019271741158532582, "loss": 0.9087, "step": 1336 }, { "epoch": 0.24846682772718826, "grad_norm": 0.504088819026947, "learning_rate": 0.00019270645121514124, "loss": 1.0982, "step": 1337 }, { "epoch": 0.24865266679055936, "grad_norm": 0.650536060333252, "learning_rate": 0.00019269548291557352, "loss": 1.2494, "step": 1338 }, { "epoch": 0.2488385058539305, "grad_norm": 0.588284969329834, "learning_rate": 0.0001926845066875608, "loss": 1.3854, "step": 1339 }, { "epoch": 0.2490243449173016, "grad_norm": 0.609093427658081, "learning_rate": 0.00019267352253204192, "loss": 1.0135, "step": 1340 }, { "epoch": 0.24921018398067274, "grad_norm": 0.5409224629402161, "learning_rate": 0.00019266253044995636, "loss": 0.918, "step": 1341 }, { "epoch": 0.24939602304404385, "grad_norm": 0.6197240352630615, "learning_rate": 0.0001926515304422443, "loss": 1.2752, "step": 1342 }, { "epoch": 0.24958186210741498, "grad_norm": 0.6102698445320129, "learning_rate": 0.0001926405225098466, "loss": 0.8095, "step": 1343 }, { "epoch": 0.2497677011707861, "grad_norm": 0.5725678205490112, "learning_rate": 0.00019262950665370476, "loss": 1.0331, "step": 1344 }, { "epoch": 0.24995354023415722, "grad_norm": 0.686580240726471, "learning_rate": 0.00019261848287476104, "loss": 1.2452, "step": 1345 }, { "epoch": 0.25013937929752833, "grad_norm": 0.6485561728477478, "learning_rate": 0.00019260745117395832, "loss": 0.9583, "step": 1346 }, { "epoch": 0.25013937929752833, "eval_loss": 1.1052125692367554, "eval_runtime": 22.8091, "eval_samples_per_second": 47.876, "eval_steps_per_second": 23.938, "step": 1346 }, { "epoch": 0.25032521836089944, "grad_norm": 0.5164663791656494, "learning_rate": 0.0001925964115522401, "loss": 0.6874, "step": 1347 }, { "epoch": 0.2505110574242706, "grad_norm": 0.4719383716583252, "learning_rate": 0.00019258536401055073, "loss": 0.9657, "step": 1348 }, { "epoch": 0.2506968964876417, "grad_norm": 0.6003237366676331, "learning_rate": 0.00019257430854983506, "loss": 0.9867, "step": 1349 }, { "epoch": 0.2508827355510128, "grad_norm": 0.5269563794136047, "learning_rate": 0.00019256324517103867, "loss": 0.9536, "step": 1350 }, { "epoch": 0.2510685746143839, "grad_norm": 0.502997636795044, "learning_rate": 0.00019255217387510788, "loss": 1.0991, "step": 1351 }, { "epoch": 0.2512544136777551, "grad_norm": 0.5721033811569214, "learning_rate": 0.00019254109466298964, "loss": 1.1603, "step": 1352 }, { "epoch": 0.2514402527411262, "grad_norm": 0.5710723996162415, "learning_rate": 0.00019253000753563154, "loss": 1.1508, "step": 1353 }, { "epoch": 0.2516260918044973, "grad_norm": 0.5256808996200562, "learning_rate": 0.00019251891249398196, "loss": 0.7742, "step": 1354 }, { "epoch": 0.2518119308678684, "grad_norm": 0.6614579558372498, "learning_rate": 0.0001925078095389898, "loss": 1.297, "step": 1355 }, { "epoch": 0.25199776993123957, "grad_norm": 0.4824221432209015, "learning_rate": 0.00019249669867160476, "loss": 0.9859, "step": 1356 }, { "epoch": 0.2521836089946107, "grad_norm": 0.5019868612289429, "learning_rate": 0.00019248557989277716, "loss": 1.0879, "step": 1357 }, { "epoch": 0.2523694480579818, "grad_norm": 0.6684537529945374, "learning_rate": 0.00019247445320345804, "loss": 1.1135, "step": 1358 }, { "epoch": 0.2525552871213529, "grad_norm": 0.5733745098114014, "learning_rate": 0.00019246331860459906, "loss": 1.1445, "step": 1359 }, { "epoch": 0.25274112618472405, "grad_norm": 0.5823042392730713, "learning_rate": 0.0001924521760971526, "loss": 1.1241, "step": 1360 }, { "epoch": 0.25292696524809516, "grad_norm": 0.5691298246383667, "learning_rate": 0.0001924410256820717, "loss": 1.2497, "step": 1361 }, { "epoch": 0.25311280431146627, "grad_norm": 0.630459189414978, "learning_rate": 0.0001924298673603101, "loss": 1.2617, "step": 1362 }, { "epoch": 0.2532986433748374, "grad_norm": 0.4968092739582062, "learning_rate": 0.00019241870113282216, "loss": 1.0298, "step": 1363 }, { "epoch": 0.25348448243820854, "grad_norm": 0.5145949721336365, "learning_rate": 0.00019240752700056297, "loss": 1.0263, "step": 1364 }, { "epoch": 0.25367032150157964, "grad_norm": 0.5401347875595093, "learning_rate": 0.0001923963449644883, "loss": 1.1958, "step": 1365 }, { "epoch": 0.25385616056495075, "grad_norm": 0.4620814323425293, "learning_rate": 0.0001923851550255545, "loss": 0.9539, "step": 1366 }, { "epoch": 0.25404199962832186, "grad_norm": 0.7093937993049622, "learning_rate": 0.00019237395718471877, "loss": 1.0059, "step": 1367 }, { "epoch": 0.254227838691693, "grad_norm": 0.4676429331302643, "learning_rate": 0.00019236275144293884, "loss": 1.0978, "step": 1368 }, { "epoch": 0.2544136777550641, "grad_norm": 0.6647943258285522, "learning_rate": 0.00019235153780117314, "loss": 1.0992, "step": 1369 }, { "epoch": 0.25459951681843523, "grad_norm": 0.5866450667381287, "learning_rate": 0.00019234031626038083, "loss": 1.3438, "step": 1370 }, { "epoch": 0.25478535588180634, "grad_norm": 0.5575841665267944, "learning_rate": 0.00019232908682152163, "loss": 1.1333, "step": 1371 }, { "epoch": 0.2549711949451775, "grad_norm": 0.6370420455932617, "learning_rate": 0.00019231784948555615, "loss": 1.0891, "step": 1372 }, { "epoch": 0.2551570340085486, "grad_norm": 0.49752578139305115, "learning_rate": 0.00019230660425344546, "loss": 0.9855, "step": 1373 }, { "epoch": 0.2553428730719197, "grad_norm": 0.517709493637085, "learning_rate": 0.00019229535112615142, "loss": 1.1083, "step": 1374 }, { "epoch": 0.2555287121352908, "grad_norm": 0.5422150492668152, "learning_rate": 0.0001922840901046365, "loss": 1.2918, "step": 1375 }, { "epoch": 0.25571455119866193, "grad_norm": 0.5656682848930359, "learning_rate": 0.00019227282118986394, "loss": 1.0533, "step": 1376 }, { "epoch": 0.2559003902620331, "grad_norm": 0.4613041579723358, "learning_rate": 0.00019226154438279752, "loss": 1.0798, "step": 1377 }, { "epoch": 0.2560862293254042, "grad_norm": 0.4737628102302551, "learning_rate": 0.00019225025968440181, "loss": 1.0431, "step": 1378 }, { "epoch": 0.2562720683887753, "grad_norm": 0.5070370435714722, "learning_rate": 0.00019223896709564202, "loss": 1.1703, "step": 1379 }, { "epoch": 0.2564579074521464, "grad_norm": 0.49695461988449097, "learning_rate": 0.00019222766661748398, "loss": 1.2816, "step": 1380 }, { "epoch": 0.2566437465155176, "grad_norm": 0.5315166115760803, "learning_rate": 0.00019221635825089433, "loss": 1.0998, "step": 1381 }, { "epoch": 0.2568295855788887, "grad_norm": 0.5896740555763245, "learning_rate": 0.00019220504199684023, "loss": 0.9867, "step": 1382 }, { "epoch": 0.2570154246422598, "grad_norm": 0.6068691611289978, "learning_rate": 0.00019219371785628964, "loss": 1.2522, "step": 1383 }, { "epoch": 0.2572012637056309, "grad_norm": 0.375446617603302, "learning_rate": 0.00019218238583021107, "loss": 0.7037, "step": 1384 }, { "epoch": 0.25738710276900206, "grad_norm": 0.5455111861228943, "learning_rate": 0.00019217104591957382, "loss": 0.9547, "step": 1385 }, { "epoch": 0.25757294183237317, "grad_norm": 0.5593006014823914, "learning_rate": 0.00019215969812534783, "loss": 1.2173, "step": 1386 }, { "epoch": 0.2577587808957443, "grad_norm": 0.5944668054580688, "learning_rate": 0.00019214834244850364, "loss": 1.2137, "step": 1387 }, { "epoch": 0.2579446199591154, "grad_norm": 0.6287859082221985, "learning_rate": 0.00019213697889001256, "loss": 1.3013, "step": 1388 }, { "epoch": 0.25813045902248655, "grad_norm": 0.4995611906051636, "learning_rate": 0.00019212560745084656, "loss": 1.0576, "step": 1389 }, { "epoch": 0.25831629808585765, "grad_norm": 0.494229793548584, "learning_rate": 0.00019211422813197824, "loss": 0.9894, "step": 1390 }, { "epoch": 0.25850213714922876, "grad_norm": 0.4791043698787689, "learning_rate": 0.0001921028409343809, "loss": 0.9988, "step": 1391 }, { "epoch": 0.25868797621259987, "grad_norm": 0.5609716773033142, "learning_rate": 0.00019209144585902852, "loss": 1.0351, "step": 1392 }, { "epoch": 0.25887381527597103, "grad_norm": 0.619724452495575, "learning_rate": 0.00019208004290689572, "loss": 1.1413, "step": 1393 }, { "epoch": 0.25905965433934214, "grad_norm": 0.5314267873764038, "learning_rate": 0.00019206863207895785, "loss": 1.5014, "step": 1394 }, { "epoch": 0.25924549340271325, "grad_norm": 0.49732571840286255, "learning_rate": 0.0001920572133761909, "loss": 1.0477, "step": 1395 }, { "epoch": 0.25943133246608435, "grad_norm": 0.45689892768859863, "learning_rate": 0.00019204578679957152, "loss": 1.0746, "step": 1396 }, { "epoch": 0.2596171715294555, "grad_norm": 0.6244208216667175, "learning_rate": 0.00019203435235007706, "loss": 1.3679, "step": 1397 }, { "epoch": 0.2598030105928266, "grad_norm": 0.5010833144187927, "learning_rate": 0.00019202291002868554, "loss": 1.0572, "step": 1398 }, { "epoch": 0.25998884965619773, "grad_norm": 0.6845056414604187, "learning_rate": 0.00019201145983637562, "loss": 1.0037, "step": 1399 }, { "epoch": 0.26017468871956884, "grad_norm": 0.5309386849403381, "learning_rate": 0.0001920000017741267, "loss": 1.0226, "step": 1400 }, { "epoch": 0.26036052778294, "grad_norm": 0.6004118323326111, "learning_rate": 0.00019198853584291876, "loss": 1.1238, "step": 1401 }, { "epoch": 0.2605463668463111, "grad_norm": 0.4703200161457062, "learning_rate": 0.0001919770620437326, "loss": 1.0052, "step": 1402 }, { "epoch": 0.2607322059096822, "grad_norm": 0.5243449807167053, "learning_rate": 0.00019196558037754946, "loss": 1.3672, "step": 1403 }, { "epoch": 0.2609180449730533, "grad_norm": 0.5253371596336365, "learning_rate": 0.00019195409084535146, "loss": 1.0317, "step": 1404 }, { "epoch": 0.2611038840364245, "grad_norm": 2.241456985473633, "learning_rate": 0.0001919425934481214, "loss": 3.0421, "step": 1405 }, { "epoch": 0.2612897230997956, "grad_norm": 0.5534107089042664, "learning_rate": 0.00019193108818684257, "loss": 1.0637, "step": 1406 }, { "epoch": 0.2614755621631667, "grad_norm": 0.5403827428817749, "learning_rate": 0.0001919195750624991, "loss": 1.1521, "step": 1407 }, { "epoch": 0.2616614012265378, "grad_norm": 0.6491701602935791, "learning_rate": 0.0001919080540760757, "loss": 1.2381, "step": 1408 }, { "epoch": 0.2618472402899089, "grad_norm": 0.5003953576087952, "learning_rate": 0.00019189652522855777, "loss": 1.1388, "step": 1409 }, { "epoch": 0.2620330793532801, "grad_norm": 0.5262916088104248, "learning_rate": 0.00019188498852093145, "loss": 1.1532, "step": 1410 }, { "epoch": 0.2622189184166512, "grad_norm": 0.4679466784000397, "learning_rate": 0.00019187344395418346, "loss": 0.9516, "step": 1411 }, { "epoch": 0.2624047574800223, "grad_norm": 0.5046184062957764, "learning_rate": 0.00019186189152930124, "loss": 1.2808, "step": 1412 }, { "epoch": 0.2625905965433934, "grad_norm": 0.595723032951355, "learning_rate": 0.00019185033124727288, "loss": 1.2959, "step": 1413 }, { "epoch": 0.26277643560676456, "grad_norm": 0.5154600739479065, "learning_rate": 0.0001918387631090872, "loss": 0.9881, "step": 1414 }, { "epoch": 0.26296227467013567, "grad_norm": 0.5345718264579773, "learning_rate": 0.00019182718711573365, "loss": 1.1117, "step": 1415 }, { "epoch": 0.26314811373350677, "grad_norm": 0.4770624041557312, "learning_rate": 0.00019181560326820226, "loss": 0.9044, "step": 1416 }, { "epoch": 0.2633339527968779, "grad_norm": 0.5534773468971252, "learning_rate": 0.00019180401156748396, "loss": 1.0823, "step": 1417 }, { "epoch": 0.26351979186024904, "grad_norm": 0.5540618300437927, "learning_rate": 0.00019179241201457007, "loss": 1.3371, "step": 1418 }, { "epoch": 0.26370563092362015, "grad_norm": 0.5049322247505188, "learning_rate": 0.00019178080461045282, "loss": 1.2151, "step": 1419 }, { "epoch": 0.26389146998699126, "grad_norm": 0.605796754360199, "learning_rate": 0.00019176918935612496, "loss": 1.0641, "step": 1420 }, { "epoch": 0.26407730905036236, "grad_norm": 0.4726361036300659, "learning_rate": 0.00019175756625258004, "loss": 0.9796, "step": 1421 }, { "epoch": 0.2642631481137335, "grad_norm": 0.6339431405067444, "learning_rate": 0.00019174593530081212, "loss": 1.3727, "step": 1422 }, { "epoch": 0.26444898717710463, "grad_norm": 0.6950168013572693, "learning_rate": 0.00019173429650181606, "loss": 0.8703, "step": 1423 }, { "epoch": 0.26463482624047574, "grad_norm": 0.5827997922897339, "learning_rate": 0.0001917226498565874, "loss": 1.2306, "step": 1424 }, { "epoch": 0.26482066530384685, "grad_norm": 0.5235970616340637, "learning_rate": 0.00019171099536612225, "loss": 1.25, "step": 1425 }, { "epoch": 0.265006504367218, "grad_norm": 0.5464771389961243, "learning_rate": 0.00019169933303141742, "loss": 0.8001, "step": 1426 }, { "epoch": 0.2651923434305891, "grad_norm": 0.5554638504981995, "learning_rate": 0.00019168766285347047, "loss": 1.3444, "step": 1427 }, { "epoch": 0.2653781824939602, "grad_norm": 0.5135937333106995, "learning_rate": 0.00019167598483327955, "loss": 1.0415, "step": 1428 }, { "epoch": 0.26556402155733133, "grad_norm": 0.5569391846656799, "learning_rate": 0.00019166429897184354, "loss": 1.0694, "step": 1429 }, { "epoch": 0.2657498606207025, "grad_norm": 0.5736503005027771, "learning_rate": 0.0001916526052701619, "loss": 0.9726, "step": 1430 }, { "epoch": 0.2659356996840736, "grad_norm": 0.47747477889060974, "learning_rate": 0.0001916409037292348, "loss": 1.0458, "step": 1431 }, { "epoch": 0.2661215387474447, "grad_norm": 0.6593626141548157, "learning_rate": 0.00019162919435006321, "loss": 1.0182, "step": 1432 }, { "epoch": 0.2663073778108158, "grad_norm": 0.5842790603637695, "learning_rate": 0.00019161747713364858, "loss": 1.1817, "step": 1433 }, { "epoch": 0.266493216874187, "grad_norm": 0.5009592175483704, "learning_rate": 0.00019160575208099307, "loss": 1.112, "step": 1434 }, { "epoch": 0.2666790559375581, "grad_norm": 0.6723164916038513, "learning_rate": 0.00019159401919309965, "loss": 1.1049, "step": 1435 }, { "epoch": 0.2668648950009292, "grad_norm": 0.530133068561554, "learning_rate": 0.00019158227847097177, "loss": 1.2624, "step": 1436 }, { "epoch": 0.2670507340643003, "grad_norm": 0.6356481313705444, "learning_rate": 0.0001915705299156137, "loss": 1.3338, "step": 1437 }, { "epoch": 0.26723657312767146, "grad_norm": 0.5509076714515686, "learning_rate": 0.0001915587735280303, "loss": 1.1009, "step": 1438 }, { "epoch": 0.26742241219104257, "grad_norm": 0.6118791699409485, "learning_rate": 0.0001915470093092271, "loss": 1.2114, "step": 1439 }, { "epoch": 0.2676082512544137, "grad_norm": 0.5365028977394104, "learning_rate": 0.00019153523726021034, "loss": 1.0852, "step": 1440 }, { "epoch": 0.2677940903177848, "grad_norm": 0.4970124661922455, "learning_rate": 0.00019152345738198692, "loss": 1.2104, "step": 1441 }, { "epoch": 0.26797992938115595, "grad_norm": 0.7996759414672852, "learning_rate": 0.00019151166967556438, "loss": 1.3305, "step": 1442 }, { "epoch": 0.26816576844452705, "grad_norm": 0.4470215141773224, "learning_rate": 0.0001914998741419509, "loss": 1.0159, "step": 1443 }, { "epoch": 0.26835160750789816, "grad_norm": 0.4929320216178894, "learning_rate": 0.0001914880707821555, "loss": 0.8658, "step": 1444 }, { "epoch": 0.26853744657126927, "grad_norm": 0.5478307008743286, "learning_rate": 0.00019147625959718765, "loss": 1.3146, "step": 1445 }, { "epoch": 0.2687232856346404, "grad_norm": 0.6132805943489075, "learning_rate": 0.0001914644405880576, "loss": 1.1372, "step": 1446 }, { "epoch": 0.26890912469801154, "grad_norm": 0.7843652963638306, "learning_rate": 0.00019145261375577626, "loss": 1.1238, "step": 1447 }, { "epoch": 0.26909496376138264, "grad_norm": 0.4491705298423767, "learning_rate": 0.0001914407791013552, "loss": 0.91, "step": 1448 }, { "epoch": 0.26928080282475375, "grad_norm": 0.46373412013053894, "learning_rate": 0.00019142893662580667, "loss": 1.0154, "step": 1449 }, { "epoch": 0.26946664188812486, "grad_norm": 0.5411079525947571, "learning_rate": 0.00019141708633014362, "loss": 1.2359, "step": 1450 }, { "epoch": 0.269652480951496, "grad_norm": 0.5943930149078369, "learning_rate": 0.00019140522821537958, "loss": 1.208, "step": 1451 }, { "epoch": 0.26983832001486713, "grad_norm": 0.5719057321548462, "learning_rate": 0.00019139336228252883, "loss": 1.1051, "step": 1452 }, { "epoch": 0.27002415907823824, "grad_norm": 0.5495271682739258, "learning_rate": 0.00019138148853260626, "loss": 1.114, "step": 1453 }, { "epoch": 0.27020999814160934, "grad_norm": 0.47945669293403625, "learning_rate": 0.0001913696069666275, "loss": 1.1378, "step": 1454 }, { "epoch": 0.2703958372049805, "grad_norm": 0.5663251876831055, "learning_rate": 0.00019135771758560876, "loss": 1.0401, "step": 1455 }, { "epoch": 0.2705816762683516, "grad_norm": 0.5720750093460083, "learning_rate": 0.000191345820390567, "loss": 0.988, "step": 1456 }, { "epoch": 0.2707675153317227, "grad_norm": 0.5430013537406921, "learning_rate": 0.00019133391538251978, "loss": 1.1438, "step": 1457 }, { "epoch": 0.2709533543950938, "grad_norm": 0.5141093730926514, "learning_rate": 0.0001913220025624854, "loss": 0.8791, "step": 1458 }, { "epoch": 0.271139193458465, "grad_norm": 0.5556092858314514, "learning_rate": 0.00019131008193148274, "loss": 1.318, "step": 1459 }, { "epoch": 0.2713250325218361, "grad_norm": 0.5598298907279968, "learning_rate": 0.00019129815349053144, "loss": 1.0279, "step": 1460 }, { "epoch": 0.2715108715852072, "grad_norm": 0.5438913106918335, "learning_rate": 0.00019128621724065175, "loss": 1.0816, "step": 1461 }, { "epoch": 0.2716967106485783, "grad_norm": 0.4408967196941376, "learning_rate": 0.0001912742731828646, "loss": 0.9494, "step": 1462 }, { "epoch": 0.2718825497119495, "grad_norm": 0.5961295962333679, "learning_rate": 0.0001912623213181916, "loss": 1.2759, "step": 1463 }, { "epoch": 0.2720683887753206, "grad_norm": 0.577762246131897, "learning_rate": 0.000191250361647655, "loss": 1.0705, "step": 1464 }, { "epoch": 0.2722542278386917, "grad_norm": 0.5204886794090271, "learning_rate": 0.00019123839417227779, "loss": 1.1823, "step": 1465 }, { "epoch": 0.2724400669020628, "grad_norm": 0.5674437880516052, "learning_rate": 0.0001912264188930835, "loss": 0.9889, "step": 1466 }, { "epoch": 0.27262590596543396, "grad_norm": 0.8880316615104675, "learning_rate": 0.00019121443581109643, "loss": 1.1098, "step": 1467 }, { "epoch": 0.27281174502880506, "grad_norm": 0.5054218769073486, "learning_rate": 0.00019120244492734154, "loss": 1.1621, "step": 1468 }, { "epoch": 0.27299758409217617, "grad_norm": 0.5926326513290405, "learning_rate": 0.00019119044624284442, "loss": 1.1382, "step": 1469 }, { "epoch": 0.2731834231555473, "grad_norm": 2.5959131717681885, "learning_rate": 0.00019117843975863136, "loss": 3.5187, "step": 1470 }, { "epoch": 0.27336926221891844, "grad_norm": 0.5273279547691345, "learning_rate": 0.00019116642547572924, "loss": 1.1472, "step": 1471 }, { "epoch": 0.27355510128228955, "grad_norm": 0.6036831736564636, "learning_rate": 0.00019115440339516576, "loss": 1.1127, "step": 1472 }, { "epoch": 0.27374094034566065, "grad_norm": 0.5351670384407043, "learning_rate": 0.0001911423735179691, "loss": 1.1082, "step": 1473 }, { "epoch": 0.27392677940903176, "grad_norm": 0.7399309873580933, "learning_rate": 0.0001911303358451683, "loss": 1.1346, "step": 1474 }, { "epoch": 0.2741126184724029, "grad_norm": 0.6517348885536194, "learning_rate": 0.00019111829037779288, "loss": 1.0512, "step": 1475 }, { "epoch": 0.27429845753577403, "grad_norm": 0.5458797812461853, "learning_rate": 0.00019110623711687317, "loss": 1.0631, "step": 1476 }, { "epoch": 0.27448429659914514, "grad_norm": 0.5033796429634094, "learning_rate": 0.00019109417606344008, "loss": 0.9589, "step": 1477 }, { "epoch": 0.27467013566251625, "grad_norm": 0.6234574317932129, "learning_rate": 0.00019108210721852523, "loss": 1.2062, "step": 1478 }, { "epoch": 0.2748559747258874, "grad_norm": 0.5345573425292969, "learning_rate": 0.00019107003058316092, "loss": 1.0058, "step": 1479 }, { "epoch": 0.2750418137892585, "grad_norm": 0.589379072189331, "learning_rate": 0.00019105794615838003, "loss": 1.1664, "step": 1480 }, { "epoch": 0.2752276528526296, "grad_norm": 0.5771492123603821, "learning_rate": 0.00019104585394521623, "loss": 1.1288, "step": 1481 }, { "epoch": 0.27541349191600073, "grad_norm": 0.5232322812080383, "learning_rate": 0.00019103375394470375, "loss": 1.1587, "step": 1482 }, { "epoch": 0.27559933097937184, "grad_norm": 0.5217462778091431, "learning_rate": 0.0001910216461578776, "loss": 1.068, "step": 1483 }, { "epoch": 0.275785170042743, "grad_norm": 0.5919577479362488, "learning_rate": 0.00019100953058577328, "loss": 0.9965, "step": 1484 }, { "epoch": 0.2759710091061141, "grad_norm": 0.5517082810401917, "learning_rate": 0.0001909974072294271, "loss": 1.3083, "step": 1485 }, { "epoch": 0.2761568481694852, "grad_norm": 0.7410999536514282, "learning_rate": 0.00019098527608987602, "loss": 1.1973, "step": 1486 }, { "epoch": 0.2763426872328563, "grad_norm": 0.5045968890190125, "learning_rate": 0.00019097313716815763, "loss": 1.1717, "step": 1487 }, { "epoch": 0.2765285262962275, "grad_norm": 0.4956260323524475, "learning_rate": 0.0001909609904653102, "loss": 1.0734, "step": 1488 }, { "epoch": 0.2767143653595986, "grad_norm": 0.486935555934906, "learning_rate": 0.00019094883598237266, "loss": 1.0529, "step": 1489 }, { "epoch": 0.2769002044229697, "grad_norm": 0.48205116391181946, "learning_rate": 0.00019093667372038463, "loss": 1.0731, "step": 1490 }, { "epoch": 0.2770860434863408, "grad_norm": 0.4888480305671692, "learning_rate": 0.00019092450368038636, "loss": 1.0925, "step": 1491 }, { "epoch": 0.27727188254971197, "grad_norm": 0.5380078554153442, "learning_rate": 0.00019091232586341875, "loss": 0.9717, "step": 1492 }, { "epoch": 0.2774577216130831, "grad_norm": 0.4658101797103882, "learning_rate": 0.00019090014027052342, "loss": 0.7607, "step": 1493 }, { "epoch": 0.2776435606764542, "grad_norm": 0.5170148015022278, "learning_rate": 0.00019088794690274265, "loss": 0.9027, "step": 1494 }, { "epoch": 0.2778293997398253, "grad_norm": 0.5996540784835815, "learning_rate": 0.00019087574576111935, "loss": 1.3506, "step": 1495 }, { "epoch": 0.27801523880319645, "grad_norm": 0.4204680621623993, "learning_rate": 0.0001908635368466971, "loss": 0.6852, "step": 1496 }, { "epoch": 0.27820107786656756, "grad_norm": 0.49904823303222656, "learning_rate": 0.00019085132016052016, "loss": 0.9465, "step": 1497 }, { "epoch": 0.27838691692993867, "grad_norm": 0.4688251316547394, "learning_rate": 0.00019083909570363346, "loss": 1.0922, "step": 1498 }, { "epoch": 0.2785727559933098, "grad_norm": 0.5986531972885132, "learning_rate": 0.00019082686347708254, "loss": 1.2231, "step": 1499 }, { "epoch": 0.27875859505668094, "grad_norm": 0.4802838861942291, "learning_rate": 0.00019081462348191373, "loss": 1.057, "step": 1500 }, { "epoch": 0.27894443412005204, "grad_norm": 0.6052405834197998, "learning_rate": 0.00019080237571917388, "loss": 1.046, "step": 1501 }, { "epoch": 0.27913027318342315, "grad_norm": 0.5468049645423889, "learning_rate": 0.00019079012018991058, "loss": 1.0525, "step": 1502 }, { "epoch": 0.27931611224679426, "grad_norm": 0.4576660692691803, "learning_rate": 0.00019077785689517208, "loss": 0.9774, "step": 1503 }, { "epoch": 0.2795019513101654, "grad_norm": 0.5179756879806519, "learning_rate": 0.00019076558583600728, "loss": 1.1046, "step": 1504 }, { "epoch": 0.2796877903735365, "grad_norm": 0.7189592123031616, "learning_rate": 0.00019075330701346578, "loss": 1.2653, "step": 1505 }, { "epoch": 0.27987362943690763, "grad_norm": 0.48832884430885315, "learning_rate": 0.00019074102042859775, "loss": 1.1849, "step": 1506 }, { "epoch": 0.28005946850027874, "grad_norm": 0.5543190836906433, "learning_rate": 0.00019072872608245412, "loss": 0.8673, "step": 1507 }, { "epoch": 0.2802453075636499, "grad_norm": 0.5351570844650269, "learning_rate": 0.00019071642397608648, "loss": 1.0057, "step": 1508 }, { "epoch": 0.280431146627021, "grad_norm": 0.4867394268512726, "learning_rate": 0.00019070411411054706, "loss": 1.0946, "step": 1509 }, { "epoch": 0.2806169856903921, "grad_norm": 0.47239750623703003, "learning_rate": 0.00019069179648688867, "loss": 1.0842, "step": 1510 }, { "epoch": 0.2808028247537632, "grad_norm": 0.5905438661575317, "learning_rate": 0.00019067947110616492, "loss": 0.9838, "step": 1511 }, { "epoch": 0.2809886638171344, "grad_norm": 0.464809387922287, "learning_rate": 0.00019066713796943007, "loss": 0.9241, "step": 1512 }, { "epoch": 0.2811745028805055, "grad_norm": 0.548126220703125, "learning_rate": 0.0001906547970777389, "loss": 0.9682, "step": 1513 }, { "epoch": 0.2813603419438766, "grad_norm": 0.5304195880889893, "learning_rate": 0.00019064244843214706, "loss": 1.1021, "step": 1514 }, { "epoch": 0.2815461810072477, "grad_norm": 0.5824160575866699, "learning_rate": 0.00019063009203371066, "loss": 1.1086, "step": 1515 }, { "epoch": 0.28173202007061887, "grad_norm": 0.5206829905509949, "learning_rate": 0.0001906177278834866, "loss": 1.1925, "step": 1516 }, { "epoch": 0.28191785913399, "grad_norm": 0.6318472623825073, "learning_rate": 0.00019060535598253243, "loss": 0.9323, "step": 1517 }, { "epoch": 0.2821036981973611, "grad_norm": 0.5612223744392395, "learning_rate": 0.0001905929763319064, "loss": 1.1283, "step": 1518 }, { "epoch": 0.2822895372607322, "grad_norm": 0.4812374711036682, "learning_rate": 0.00019058058893266724, "loss": 0.9585, "step": 1519 }, { "epoch": 0.2824753763241033, "grad_norm": 0.5273768305778503, "learning_rate": 0.00019056819378587454, "loss": 1.3129, "step": 1520 }, { "epoch": 0.28266121538747446, "grad_norm": 0.5103570818901062, "learning_rate": 0.00019055579089258853, "loss": 1.1644, "step": 1521 }, { "epoch": 0.28284705445084557, "grad_norm": 0.5185193419456482, "learning_rate": 0.00019054338025386994, "loss": 1.0037, "step": 1522 }, { "epoch": 0.2830328935142167, "grad_norm": 0.5031711459159851, "learning_rate": 0.0001905309618707804, "loss": 0.8256, "step": 1523 }, { "epoch": 0.2832187325775878, "grad_norm": 0.5190253257751465, "learning_rate": 0.00019051853574438203, "loss": 0.9677, "step": 1524 }, { "epoch": 0.28340457164095895, "grad_norm": 0.5510692000389099, "learning_rate": 0.00019050610187573762, "loss": 0.9814, "step": 1525 }, { "epoch": 0.28359041070433005, "grad_norm": 0.5186879634857178, "learning_rate": 0.00019049366026591073, "loss": 1.1876, "step": 1526 }, { "epoch": 0.28377624976770116, "grad_norm": 0.5891383290290833, "learning_rate": 0.00019048121091596551, "loss": 1.0911, "step": 1527 }, { "epoch": 0.28396208883107227, "grad_norm": 0.5759264230728149, "learning_rate": 0.00019046875382696676, "loss": 0.9242, "step": 1528 }, { "epoch": 0.28414792789444343, "grad_norm": 0.6390319466590881, "learning_rate": 0.00019045628899997996, "loss": 1.1155, "step": 1529 }, { "epoch": 0.28433376695781454, "grad_norm": 0.5420219898223877, "learning_rate": 0.00019044381643607125, "loss": 1.2446, "step": 1530 }, { "epoch": 0.28451960602118564, "grad_norm": 0.6645781993865967, "learning_rate": 0.00019043133613630745, "loss": 1.1495, "step": 1531 }, { "epoch": 0.28470544508455675, "grad_norm": 0.6481037139892578, "learning_rate": 0.00019041884810175604, "loss": 0.9835, "step": 1532 }, { "epoch": 0.2848912841479279, "grad_norm": 0.7210860252380371, "learning_rate": 0.0001904063523334851, "loss": 1.0869, "step": 1533 }, { "epoch": 0.285077123211299, "grad_norm": 0.5078470706939697, "learning_rate": 0.00019039384883256347, "loss": 1.1299, "step": 1534 }, { "epoch": 0.28526296227467013, "grad_norm": 0.5624436736106873, "learning_rate": 0.00019038133760006058, "loss": 1.2529, "step": 1535 }, { "epoch": 0.28544880133804124, "grad_norm": 0.49437078833580017, "learning_rate": 0.00019036881863704656, "loss": 1.0552, "step": 1536 }, { "epoch": 0.2856346404014124, "grad_norm": 0.53767329454422, "learning_rate": 0.00019035629194459215, "loss": 1.0651, "step": 1537 }, { "epoch": 0.2858204794647835, "grad_norm": 0.5064103603363037, "learning_rate": 0.0001903437575237688, "loss": 0.9809, "step": 1538 }, { "epoch": 0.2860063185281546, "grad_norm": 0.5985023975372314, "learning_rate": 0.00019033121537564862, "loss": 0.9943, "step": 1539 }, { "epoch": 0.2861921575915257, "grad_norm": 0.6447902917861938, "learning_rate": 0.00019031866550130438, "loss": 1.1544, "step": 1540 }, { "epoch": 0.2863779966548969, "grad_norm": 0.4907586872577667, "learning_rate": 0.00019030610790180943, "loss": 1.0605, "step": 1541 }, { "epoch": 0.286563835718268, "grad_norm": 0.5801265239715576, "learning_rate": 0.00019029354257823792, "loss": 1.2279, "step": 1542 }, { "epoch": 0.2867496747816391, "grad_norm": 0.5001637935638428, "learning_rate": 0.00019028096953166456, "loss": 0.9343, "step": 1543 }, { "epoch": 0.2869355138450102, "grad_norm": 0.5724277496337891, "learning_rate": 0.00019026838876316476, "loss": 1.0683, "step": 1544 }, { "epoch": 0.28712135290838137, "grad_norm": 0.5190825462341309, "learning_rate": 0.00019025580027381454, "loss": 1.2547, "step": 1545 }, { "epoch": 0.2873071919717525, "grad_norm": 0.6240419149398804, "learning_rate": 0.0001902432040646907, "loss": 0.9924, "step": 1546 }, { "epoch": 0.2874930310351236, "grad_norm": 0.48464152216911316, "learning_rate": 0.00019023060013687056, "loss": 0.9693, "step": 1547 }, { "epoch": 0.2876788700984947, "grad_norm": 0.503472089767456, "learning_rate": 0.00019021798849143217, "loss": 0.997, "step": 1548 }, { "epoch": 0.28786470916186585, "grad_norm": 0.580359697341919, "learning_rate": 0.00019020536912945423, "loss": 1.1117, "step": 1549 }, { "epoch": 0.28805054822523696, "grad_norm": 0.622320294380188, "learning_rate": 0.0001901927420520161, "loss": 1.2558, "step": 1550 }, { "epoch": 0.28823638728860806, "grad_norm": 0.5121637582778931, "learning_rate": 0.00019018010726019785, "loss": 1.0791, "step": 1551 }, { "epoch": 0.28842222635197917, "grad_norm": 0.567800760269165, "learning_rate": 0.00019016746475508008, "loss": 1.0396, "step": 1552 }, { "epoch": 0.2886080654153503, "grad_norm": 0.5097231268882751, "learning_rate": 0.00019015481453774422, "loss": 1.012, "step": 1553 }, { "epoch": 0.28879390447872144, "grad_norm": 0.556196928024292, "learning_rate": 0.00019014215660927218, "loss": 1.2133, "step": 1554 }, { "epoch": 0.28897974354209255, "grad_norm": 0.5323203206062317, "learning_rate": 0.00019012949097074668, "loss": 0.9587, "step": 1555 }, { "epoch": 0.28916558260546366, "grad_norm": 0.5991493463516235, "learning_rate": 0.00019011681762325103, "loss": 1.049, "step": 1556 }, { "epoch": 0.28935142166883476, "grad_norm": 0.4759868085384369, "learning_rate": 0.00019010413656786918, "loss": 1.1119, "step": 1557 }, { "epoch": 0.2895372607322059, "grad_norm": 0.5300564765930176, "learning_rate": 0.0001900914478056858, "loss": 1.2332, "step": 1558 }, { "epoch": 0.28972309979557703, "grad_norm": 0.7315372824668884, "learning_rate": 0.0001900787513377862, "loss": 0.8838, "step": 1559 }, { "epoch": 0.28990893885894814, "grad_norm": 0.5923600196838379, "learning_rate": 0.00019006604716525626, "loss": 0.9924, "step": 1560 }, { "epoch": 0.29009477792231925, "grad_norm": 0.5271015763282776, "learning_rate": 0.00019005333528918268, "loss": 1.1689, "step": 1561 }, { "epoch": 0.2902806169856904, "grad_norm": 0.5299558043479919, "learning_rate": 0.00019004061571065267, "loss": 1.2, "step": 1562 }, { "epoch": 0.2904664560490615, "grad_norm": 0.5884658694267273, "learning_rate": 0.00019002788843075426, "loss": 1.0522, "step": 1563 }, { "epoch": 0.2906522951124326, "grad_norm": 0.40285414457321167, "learning_rate": 0.0001900151534505759, "loss": 0.8925, "step": 1564 }, { "epoch": 0.29083813417580373, "grad_norm": 0.46703270077705383, "learning_rate": 0.00019000241077120697, "loss": 1.0306, "step": 1565 }, { "epoch": 0.2910239732391749, "grad_norm": 0.49443286657333374, "learning_rate": 0.0001899896603937373, "loss": 1.2487, "step": 1566 }, { "epoch": 0.291209812302546, "grad_norm": 0.5805255174636841, "learning_rate": 0.00018997690231925752, "loss": 1.1078, "step": 1567 }, { "epoch": 0.2913956513659171, "grad_norm": 0.5543115735054016, "learning_rate": 0.0001899641365488588, "loss": 1.0998, "step": 1568 }, { "epoch": 0.2915814904292882, "grad_norm": 0.5571877956390381, "learning_rate": 0.000189951363083633, "loss": 1.1004, "step": 1569 }, { "epoch": 0.2917673294926594, "grad_norm": 0.4987258315086365, "learning_rate": 0.00018993858192467277, "loss": 1.1713, "step": 1570 }, { "epoch": 0.2919531685560305, "grad_norm": 0.6147323250770569, "learning_rate": 0.00018992579307307119, "loss": 1.1392, "step": 1571 }, { "epoch": 0.2921390076194016, "grad_norm": 0.5259654521942139, "learning_rate": 0.00018991299652992217, "loss": 0.8903, "step": 1572 }, { "epoch": 0.2923248466827727, "grad_norm": 0.6485708355903625, "learning_rate": 0.00018990019229632031, "loss": 1.045, "step": 1573 }, { "epoch": 0.29251068574614386, "grad_norm": 0.49851977825164795, "learning_rate": 0.00018988738037336062, "loss": 0.7989, "step": 1574 }, { "epoch": 0.29269652480951497, "grad_norm": 0.6040017008781433, "learning_rate": 0.00018987456076213904, "loss": 1.0092, "step": 1575 }, { "epoch": 0.2928823638728861, "grad_norm": 0.5967439413070679, "learning_rate": 0.00018986173346375205, "loss": 1.0741, "step": 1576 }, { "epoch": 0.2930682029362572, "grad_norm": 0.6299907565116882, "learning_rate": 0.00018984889847929677, "loss": 1.2439, "step": 1577 }, { "epoch": 0.29325404199962835, "grad_norm": 0.635490894317627, "learning_rate": 0.00018983605580987102, "loss": 0.9893, "step": 1578 }, { "epoch": 0.29343988106299945, "grad_norm": 0.501427173614502, "learning_rate": 0.00018982320545657324, "loss": 0.9781, "step": 1579 }, { "epoch": 0.29362572012637056, "grad_norm": 0.6587309837341309, "learning_rate": 0.00018981034742050258, "loss": 1.2243, "step": 1580 }, { "epoch": 0.29381155918974167, "grad_norm": 0.524802565574646, "learning_rate": 0.0001897974817027588, "loss": 1.2386, "step": 1581 }, { "epoch": 0.29399739825311283, "grad_norm": 0.4880768656730652, "learning_rate": 0.0001897846083044423, "loss": 1.1358, "step": 1582 }, { "epoch": 0.29418323731648394, "grad_norm": 0.46930983662605286, "learning_rate": 0.00018977172722665423, "loss": 1.0273, "step": 1583 }, { "epoch": 0.29436907637985504, "grad_norm": 0.5368134379386902, "learning_rate": 0.0001897588384704963, "loss": 1.1655, "step": 1584 }, { "epoch": 0.29455491544322615, "grad_norm": 0.7028413414955139, "learning_rate": 0.00018974594203707094, "loss": 1.0176, "step": 1585 }, { "epoch": 0.2947407545065973, "grad_norm": 0.5124669075012207, "learning_rate": 0.00018973303792748115, "loss": 1.1427, "step": 1586 }, { "epoch": 0.2949265935699684, "grad_norm": 0.6192018985748291, "learning_rate": 0.0001897201261428307, "loss": 1.0188, "step": 1587 }, { "epoch": 0.2951124326333395, "grad_norm": 0.5622596740722656, "learning_rate": 0.000189707206684224, "loss": 1.0261, "step": 1588 }, { "epoch": 0.29529827169671063, "grad_norm": 0.5319574475288391, "learning_rate": 0.000189694279552766, "loss": 1.0369, "step": 1589 }, { "epoch": 0.29548411076008174, "grad_norm": 0.5429655313491821, "learning_rate": 0.0001896813447495624, "loss": 0.822, "step": 1590 }, { "epoch": 0.2956699498234529, "grad_norm": 0.49856165051460266, "learning_rate": 0.00018966840227571953, "loss": 0.9698, "step": 1591 }, { "epoch": 0.295855788886824, "grad_norm": 0.5880312323570251, "learning_rate": 0.00018965545213234445, "loss": 1.2094, "step": 1592 }, { "epoch": 0.2960416279501951, "grad_norm": 0.6625062823295593, "learning_rate": 0.0001896424943205448, "loss": 1.2754, "step": 1593 }, { "epoch": 0.2962274670135662, "grad_norm": 0.542192816734314, "learning_rate": 0.00018962952884142884, "loss": 1.0705, "step": 1594 }, { "epoch": 0.2964133060769374, "grad_norm": 0.5409523844718933, "learning_rate": 0.00018961655569610557, "loss": 1.0441, "step": 1595 }, { "epoch": 0.2965991451403085, "grad_norm": 0.5490151047706604, "learning_rate": 0.0001896035748856846, "loss": 1.0385, "step": 1596 }, { "epoch": 0.2967849842036796, "grad_norm": 0.5456315279006958, "learning_rate": 0.00018959058641127623, "loss": 1.0773, "step": 1597 }, { "epoch": 0.2969708232670507, "grad_norm": 0.5822267532348633, "learning_rate": 0.00018957759027399135, "loss": 1.0326, "step": 1598 }, { "epoch": 0.2971566623304219, "grad_norm": 0.6549869775772095, "learning_rate": 0.00018956458647494158, "loss": 1.0455, "step": 1599 }, { "epoch": 0.297342501393793, "grad_norm": 0.6730489730834961, "learning_rate": 0.00018955157501523913, "loss": 0.9874, "step": 1600 }, { "epoch": 0.2975283404571641, "grad_norm": 0.5480064153671265, "learning_rate": 0.00018953855589599696, "loss": 0.9857, "step": 1601 }, { "epoch": 0.2977141795205352, "grad_norm": 0.5919156670570374, "learning_rate": 0.00018952552911832856, "loss": 1.2629, "step": 1602 }, { "epoch": 0.29790001858390636, "grad_norm": 0.5088599920272827, "learning_rate": 0.00018951249468334818, "loss": 1.073, "step": 1603 }, { "epoch": 0.29808585764727746, "grad_norm": 0.6555619835853577, "learning_rate": 0.00018949945259217067, "loss": 1.3914, "step": 1604 }, { "epoch": 0.29827169671064857, "grad_norm": 0.5444729924201965, "learning_rate": 0.00018948640284591152, "loss": 1.1564, "step": 1605 }, { "epoch": 0.2984575357740197, "grad_norm": 0.5014877915382385, "learning_rate": 0.00018947334544568696, "loss": 0.9872, "step": 1606 }, { "epoch": 0.29864337483739084, "grad_norm": 0.5414091348648071, "learning_rate": 0.0001894602803926138, "loss": 0.8565, "step": 1607 }, { "epoch": 0.29882921390076195, "grad_norm": 0.5945007801055908, "learning_rate": 0.00018944720768780945, "loss": 1.238, "step": 1608 }, { "epoch": 0.29901505296413305, "grad_norm": 0.6132047772407532, "learning_rate": 0.00018943412733239215, "loss": 0.9816, "step": 1609 }, { "epoch": 0.29920089202750416, "grad_norm": 0.5965632796287537, "learning_rate": 0.00018942103932748065, "loss": 1.2321, "step": 1610 }, { "epoch": 0.2993867310908753, "grad_norm": 0.5612810254096985, "learning_rate": 0.00018940794367419437, "loss": 1.1615, "step": 1611 }, { "epoch": 0.29957257015424643, "grad_norm": 0.5566758513450623, "learning_rate": 0.00018939484037365346, "loss": 1.099, "step": 1612 }, { "epoch": 0.29975840921761754, "grad_norm": 0.4823967218399048, "learning_rate": 0.0001893817294269786, "loss": 1.065, "step": 1613 }, { "epoch": 0.29994424828098865, "grad_norm": 0.505115270614624, "learning_rate": 0.00018936861083529126, "loss": 1.0358, "step": 1614 }, { "epoch": 0.3001300873443598, "grad_norm": 0.4831530749797821, "learning_rate": 0.0001893554845997135, "loss": 0.9381, "step": 1615 }, { "epoch": 0.3003159264077309, "grad_norm": 0.5076389312744141, "learning_rate": 0.00018934235072136802, "loss": 0.9582, "step": 1616 }, { "epoch": 0.300501765471102, "grad_norm": 0.5244806408882141, "learning_rate": 0.0001893292092013782, "loss": 1.0831, "step": 1617 }, { "epoch": 0.30068760453447313, "grad_norm": 0.4544525146484375, "learning_rate": 0.000189316060040868, "loss": 1.2185, "step": 1618 }, { "epoch": 0.3008734435978443, "grad_norm": 0.553446888923645, "learning_rate": 0.0001893029032409622, "loss": 0.9928, "step": 1619 }, { "epoch": 0.3010592826612154, "grad_norm": 0.5081769227981567, "learning_rate": 0.00018928973880278605, "loss": 1.0391, "step": 1620 }, { "epoch": 0.3012451217245865, "grad_norm": 0.5252265930175781, "learning_rate": 0.00018927656672746554, "loss": 1.0321, "step": 1621 }, { "epoch": 0.3014309607879576, "grad_norm": 0.5269213914871216, "learning_rate": 0.00018926338701612738, "loss": 1.2644, "step": 1622 }, { "epoch": 0.3016167998513288, "grad_norm": 0.5293154120445251, "learning_rate": 0.00018925019966989877, "loss": 1.0577, "step": 1623 }, { "epoch": 0.3018026389146999, "grad_norm": 0.593284010887146, "learning_rate": 0.00018923700468990767, "loss": 1.1221, "step": 1624 }, { "epoch": 0.301988477978071, "grad_norm": 0.5898813605308533, "learning_rate": 0.00018922380207728274, "loss": 1.0924, "step": 1625 }, { "epoch": 0.3021743170414421, "grad_norm": 0.5219821333885193, "learning_rate": 0.00018921059183315316, "loss": 1.0935, "step": 1626 }, { "epoch": 0.3023601561048132, "grad_norm": 0.5993270874023438, "learning_rate": 0.00018919737395864882, "loss": 1.0921, "step": 1627 }, { "epoch": 0.30254599516818437, "grad_norm": 0.4896244704723358, "learning_rate": 0.00018918414845490035, "loss": 0.9751, "step": 1628 }, { "epoch": 0.3027318342315555, "grad_norm": 0.5135372281074524, "learning_rate": 0.0001891709153230389, "loss": 1.0925, "step": 1629 }, { "epoch": 0.3029176732949266, "grad_norm": 0.6543980240821838, "learning_rate": 0.0001891576745641963, "loss": 1.2144, "step": 1630 }, { "epoch": 0.3031035123582977, "grad_norm": 0.5897946953773499, "learning_rate": 0.00018914442617950513, "loss": 1.1757, "step": 1631 }, { "epoch": 0.30328935142166885, "grad_norm": 0.5730814933776855, "learning_rate": 0.00018913117017009854, "loss": 0.8796, "step": 1632 }, { "epoch": 0.30347519048503996, "grad_norm": 0.5452990531921387, "learning_rate": 0.0001891179065371103, "loss": 1.0472, "step": 1633 }, { "epoch": 0.30366102954841107, "grad_norm": 0.5573520064353943, "learning_rate": 0.00018910463528167492, "loss": 1.2547, "step": 1634 }, { "epoch": 0.3038468686117822, "grad_norm": 0.4705549478530884, "learning_rate": 0.0001890913564049275, "loss": 0.9675, "step": 1635 }, { "epoch": 0.30403270767515334, "grad_norm": 0.525519073009491, "learning_rate": 0.00018907806990800383, "loss": 0.9774, "step": 1636 }, { "epoch": 0.30421854673852444, "grad_norm": 0.5609502792358398, "learning_rate": 0.0001890647757920403, "loss": 0.9778, "step": 1637 }, { "epoch": 0.30440438580189555, "grad_norm": 0.5060504674911499, "learning_rate": 0.00018905147405817399, "loss": 1.1342, "step": 1638 }, { "epoch": 0.30459022486526666, "grad_norm": 0.7549655437469482, "learning_rate": 0.00018903816470754265, "loss": 0.8456, "step": 1639 }, { "epoch": 0.3047760639286378, "grad_norm": 0.5245809555053711, "learning_rate": 0.00018902484774128463, "loss": 1.2306, "step": 1640 }, { "epoch": 0.3049619029920089, "grad_norm": 0.5347371101379395, "learning_rate": 0.000189011523160539, "loss": 0.883, "step": 1641 }, { "epoch": 0.30514774205538003, "grad_norm": 0.5013769865036011, "learning_rate": 0.0001889981909664454, "loss": 1.1944, "step": 1642 }, { "epoch": 0.30533358111875114, "grad_norm": 0.5568968653678894, "learning_rate": 0.0001889848511601442, "loss": 1.2865, "step": 1643 }, { "epoch": 0.3055194201821223, "grad_norm": 0.5005005598068237, "learning_rate": 0.00018897150374277635, "loss": 1.0849, "step": 1644 }, { "epoch": 0.3057052592454934, "grad_norm": 0.5866604447364807, "learning_rate": 0.00018895814871548348, "loss": 1.2455, "step": 1645 }, { "epoch": 0.3058910983088645, "grad_norm": 0.5890522003173828, "learning_rate": 0.0001889447860794079, "loss": 0.9689, "step": 1646 }, { "epoch": 0.3060769373722356, "grad_norm": 0.5062227845191956, "learning_rate": 0.00018893141583569252, "loss": 1.1549, "step": 1647 }, { "epoch": 0.3062627764356068, "grad_norm": 0.5845811367034912, "learning_rate": 0.0001889180379854809, "loss": 1.1489, "step": 1648 }, { "epoch": 0.3064486154989779, "grad_norm": 0.641802191734314, "learning_rate": 0.00018890465252991738, "loss": 1.0747, "step": 1649 }, { "epoch": 0.306634454562349, "grad_norm": 0.5519110560417175, "learning_rate": 0.00018889125947014676, "loss": 1.1263, "step": 1650 }, { "epoch": 0.3068202936257201, "grad_norm": 0.594745934009552, "learning_rate": 0.0001888778588073146, "loss": 1.0389, "step": 1651 }, { "epoch": 0.30700613268909127, "grad_norm": 0.44811272621154785, "learning_rate": 0.00018886445054256705, "loss": 0.8878, "step": 1652 }, { "epoch": 0.3071919717524624, "grad_norm": 0.6732388734817505, "learning_rate": 0.00018885103467705102, "loss": 1.1783, "step": 1653 }, { "epoch": 0.3073778108158335, "grad_norm": 0.6434346437454224, "learning_rate": 0.00018883761121191395, "loss": 1.2321, "step": 1654 }, { "epoch": 0.3075636498792046, "grad_norm": 0.5560142993927002, "learning_rate": 0.000188824180148304, "loss": 1.0874, "step": 1655 }, { "epoch": 0.30774948894257576, "grad_norm": 0.6674293279647827, "learning_rate": 0.00018881074148736993, "loss": 1.165, "step": 1656 }, { "epoch": 0.30793532800594686, "grad_norm": 0.5190140604972839, "learning_rate": 0.00018879729523026123, "loss": 0.9781, "step": 1657 }, { "epoch": 0.30812116706931797, "grad_norm": 0.5726615786552429, "learning_rate": 0.00018878384137812793, "loss": 1.1098, "step": 1658 }, { "epoch": 0.3083070061326891, "grad_norm": 0.6239640712738037, "learning_rate": 0.00018877037993212075, "loss": 1.0968, "step": 1659 }, { "epoch": 0.30849284519606024, "grad_norm": 0.5197991132736206, "learning_rate": 0.00018875691089339117, "loss": 0.9939, "step": 1660 }, { "epoch": 0.30867868425943135, "grad_norm": 0.5677801966667175, "learning_rate": 0.00018874343426309118, "loss": 1.4178, "step": 1661 }, { "epoch": 0.30886452332280245, "grad_norm": 0.504206120967865, "learning_rate": 0.0001887299500423734, "loss": 1.0849, "step": 1662 }, { "epoch": 0.30905036238617356, "grad_norm": 0.7125438451766968, "learning_rate": 0.00018871645823239128, "loss": 0.922, "step": 1663 }, { "epoch": 0.30923620144954467, "grad_norm": 0.6472058892250061, "learning_rate": 0.00018870295883429873, "loss": 1.3155, "step": 1664 }, { "epoch": 0.30942204051291583, "grad_norm": 0.5385541915893555, "learning_rate": 0.0001886894518492504, "loss": 0.9653, "step": 1665 }, { "epoch": 0.30960787957628694, "grad_norm": 0.5295284986495972, "learning_rate": 0.00018867593727840153, "loss": 0.9995, "step": 1666 }, { "epoch": 0.30979371863965804, "grad_norm": 0.5476770997047424, "learning_rate": 0.00018866241512290814, "loss": 1.2066, "step": 1667 }, { "epoch": 0.30997955770302915, "grad_norm": 0.5373952984809875, "learning_rate": 0.00018864888538392676, "loss": 1.0628, "step": 1668 }, { "epoch": 0.3101653967664003, "grad_norm": 0.5620145797729492, "learning_rate": 0.00018863534806261458, "loss": 1.1074, "step": 1669 }, { "epoch": 0.3103512358297714, "grad_norm": 0.49720069766044617, "learning_rate": 0.00018862180316012953, "loss": 1.0306, "step": 1670 }, { "epoch": 0.31053707489314253, "grad_norm": 0.5404595136642456, "learning_rate": 0.00018860825067763015, "loss": 1.0686, "step": 1671 }, { "epoch": 0.31072291395651364, "grad_norm": 0.4632123112678528, "learning_rate": 0.00018859469061627556, "loss": 0.9225, "step": 1672 }, { "epoch": 0.3109087530198848, "grad_norm": 0.6441605687141418, "learning_rate": 0.0001885811229772256, "loss": 1.3782, "step": 1673 }, { "epoch": 0.3110945920832559, "grad_norm": 0.5591496229171753, "learning_rate": 0.0001885675477616407, "loss": 0.9357, "step": 1674 }, { "epoch": 0.311280431146627, "grad_norm": 0.5491276979446411, "learning_rate": 0.0001885539649706821, "loss": 0.9397, "step": 1675 }, { "epoch": 0.3114662702099981, "grad_norm": 0.5409485697746277, "learning_rate": 0.00018854037460551144, "loss": 0.9025, "step": 1676 }, { "epoch": 0.3116521092733693, "grad_norm": 0.5407100319862366, "learning_rate": 0.00018852677666729117, "loss": 1.1127, "step": 1677 }, { "epoch": 0.3118379483367404, "grad_norm": 0.6072363257408142, "learning_rate": 0.0001885131711571844, "loss": 1.1601, "step": 1678 }, { "epoch": 0.3120237874001115, "grad_norm": 0.4891136586666107, "learning_rate": 0.00018849955807635475, "loss": 1.0007, "step": 1679 }, { "epoch": 0.3122096264634826, "grad_norm": 0.7494361400604248, "learning_rate": 0.00018848593742596664, "loss": 1.3113, "step": 1680 }, { "epoch": 0.31239546552685377, "grad_norm": 0.6173309087753296, "learning_rate": 0.00018847230920718508, "loss": 0.9148, "step": 1681 }, { "epoch": 0.3125813045902249, "grad_norm": 0.6358309388160706, "learning_rate": 0.0001884586734211757, "loss": 1.104, "step": 1682 }, { "epoch": 0.312767143653596, "grad_norm": 0.47288426756858826, "learning_rate": 0.0001884450300691048, "loss": 0.8665, "step": 1683 }, { "epoch": 0.3129529827169671, "grad_norm": 0.643383264541626, "learning_rate": 0.00018843137915213932, "loss": 1.2428, "step": 1684 }, { "epoch": 0.31313882178033825, "grad_norm": 0.42575201392173767, "learning_rate": 0.0001884177206714468, "loss": 0.865, "step": 1685 }, { "epoch": 0.31332466084370936, "grad_norm": 0.662293553352356, "learning_rate": 0.00018840405462819558, "loss": 1.2715, "step": 1686 }, { "epoch": 0.31351049990708046, "grad_norm": 0.6092833280563354, "learning_rate": 0.0001883903810235545, "loss": 1.3039, "step": 1687 }, { "epoch": 0.31369633897045157, "grad_norm": 0.5167418718338013, "learning_rate": 0.0001883766998586931, "loss": 1.0148, "step": 1688 }, { "epoch": 0.31388217803382273, "grad_norm": 0.580491840839386, "learning_rate": 0.00018836301113478156, "loss": 1.1673, "step": 1689 }, { "epoch": 0.31406801709719384, "grad_norm": 0.6332157850265503, "learning_rate": 0.0001883493148529907, "loss": 1.3328, "step": 1690 }, { "epoch": 0.31425385616056495, "grad_norm": 0.4925262928009033, "learning_rate": 0.000188335611014492, "loss": 1.0126, "step": 1691 }, { "epoch": 0.31443969522393606, "grad_norm": 0.4704776704311371, "learning_rate": 0.00018832189962045753, "loss": 0.923, "step": 1692 }, { "epoch": 0.3146255342873072, "grad_norm": 0.6361016631126404, "learning_rate": 0.00018830818067206012, "loss": 0.9724, "step": 1693 }, { "epoch": 0.3148113733506783, "grad_norm": 0.5294798016548157, "learning_rate": 0.00018829445417047316, "loss": 1.1158, "step": 1694 }, { "epoch": 0.31499721241404943, "grad_norm": 0.4817502498626709, "learning_rate": 0.00018828072011687074, "loss": 1.0515, "step": 1695 }, { "epoch": 0.31518305147742054, "grad_norm": 0.5335044264793396, "learning_rate": 0.00018826697851242747, "loss": 1.0616, "step": 1696 }, { "epoch": 0.3153688905407917, "grad_norm": 0.4775748550891876, "learning_rate": 0.00018825322935831878, "loss": 0.881, "step": 1697 }, { "epoch": 0.3155547296041628, "grad_norm": 0.5682370066642761, "learning_rate": 0.00018823947265572065, "loss": 0.9613, "step": 1698 }, { "epoch": 0.3157405686675339, "grad_norm": 0.5231326818466187, "learning_rate": 0.00018822570840580972, "loss": 0.9672, "step": 1699 }, { "epoch": 0.315926407730905, "grad_norm": 0.6463721394538879, "learning_rate": 0.00018821193660976325, "loss": 1.0961, "step": 1700 }, { "epoch": 0.31611224679427613, "grad_norm": 0.5318092703819275, "learning_rate": 0.00018819815726875918, "loss": 1.017, "step": 1701 }, { "epoch": 0.3162980858576473, "grad_norm": 0.5542148947715759, "learning_rate": 0.0001881843703839761, "loss": 0.9725, "step": 1702 }, { "epoch": 0.3164839249210184, "grad_norm": 0.606061577796936, "learning_rate": 0.00018817057595659327, "loss": 1.1379, "step": 1703 }, { "epoch": 0.3166697639843895, "grad_norm": 0.5355798006057739, "learning_rate": 0.00018815677398779048, "loss": 0.9458, "step": 1704 }, { "epoch": 0.3168556030477606, "grad_norm": 0.534319281578064, "learning_rate": 0.00018814296447874828, "loss": 0.8033, "step": 1705 }, { "epoch": 0.3170414421111318, "grad_norm": 0.5069194436073303, "learning_rate": 0.00018812914743064784, "loss": 1.087, "step": 1706 }, { "epoch": 0.3172272811745029, "grad_norm": 0.5822552442550659, "learning_rate": 0.00018811532284467096, "loss": 1.1817, "step": 1707 }, { "epoch": 0.317413120237874, "grad_norm": 0.46280261874198914, "learning_rate": 0.00018810149072200004, "loss": 0.9893, "step": 1708 }, { "epoch": 0.3175989593012451, "grad_norm": 0.6027824282646179, "learning_rate": 0.00018808765106381823, "loss": 1.0487, "step": 1709 }, { "epoch": 0.31778479836461626, "grad_norm": 0.5475772023200989, "learning_rate": 0.00018807380387130923, "loss": 1.216, "step": 1710 }, { "epoch": 0.31797063742798737, "grad_norm": 0.5941911935806274, "learning_rate": 0.00018805994914565744, "loss": 1.4315, "step": 1711 }, { "epoch": 0.3181564764913585, "grad_norm": 0.6065940856933594, "learning_rate": 0.0001880460868880479, "loss": 1.0772, "step": 1712 }, { "epoch": 0.3183423155547296, "grad_norm": 0.6566287875175476, "learning_rate": 0.00018803221709966624, "loss": 1.2141, "step": 1713 }, { "epoch": 0.31852815461810075, "grad_norm": 0.5035297870635986, "learning_rate": 0.00018801833978169876, "loss": 1.3613, "step": 1714 }, { "epoch": 0.31871399368147185, "grad_norm": 0.5885191559791565, "learning_rate": 0.00018800445493533246, "loss": 1.1476, "step": 1715 }, { "epoch": 0.31889983274484296, "grad_norm": 0.6840378046035767, "learning_rate": 0.00018799056256175493, "loss": 1.1244, "step": 1716 }, { "epoch": 0.31908567180821407, "grad_norm": 0.5499553084373474, "learning_rate": 0.00018797666266215441, "loss": 1.0127, "step": 1717 }, { "epoch": 0.31927151087158523, "grad_norm": 0.5777943730354309, "learning_rate": 0.0001879627552377198, "loss": 1.15, "step": 1718 }, { "epoch": 0.31945734993495634, "grad_norm": 0.5416708588600159, "learning_rate": 0.00018794884028964064, "loss": 1.0631, "step": 1719 }, { "epoch": 0.31964318899832744, "grad_norm": 0.6631436944007874, "learning_rate": 0.00018793491781910705, "loss": 1.1563, "step": 1720 }, { "epoch": 0.31982902806169855, "grad_norm": 0.658031702041626, "learning_rate": 0.00018792098782730992, "loss": 1.1838, "step": 1721 }, { "epoch": 0.3200148671250697, "grad_norm": 0.5618680715560913, "learning_rate": 0.00018790705031544063, "loss": 0.9808, "step": 1722 }, { "epoch": 0.3202007061884408, "grad_norm": 0.5322409868240356, "learning_rate": 0.00018789310528469138, "loss": 0.8012, "step": 1723 }, { "epoch": 0.3203865452518119, "grad_norm": 0.5504143238067627, "learning_rate": 0.00018787915273625486, "loss": 1.1341, "step": 1724 }, { "epoch": 0.32057238431518303, "grad_norm": 0.5948871374130249, "learning_rate": 0.00018786519267132446, "loss": 0.9778, "step": 1725 }, { "epoch": 0.3207582233785542, "grad_norm": 0.5112269520759583, "learning_rate": 0.00018785122509109426, "loss": 1.0508, "step": 1726 }, { "epoch": 0.3209440624419253, "grad_norm": 0.4630531072616577, "learning_rate": 0.0001878372499967589, "loss": 0.9361, "step": 1727 }, { "epoch": 0.3211299015052964, "grad_norm": 0.6956179141998291, "learning_rate": 0.00018782326738951368, "loss": 1.3476, "step": 1728 }, { "epoch": 0.3213157405686675, "grad_norm": 0.6133003234863281, "learning_rate": 0.0001878092772705546, "loss": 1.2831, "step": 1729 }, { "epoch": 0.3215015796320387, "grad_norm": 0.5532038807868958, "learning_rate": 0.00018779527964107824, "loss": 0.9402, "step": 1730 }, { "epoch": 0.3216874186954098, "grad_norm": 0.5056849122047424, "learning_rate": 0.0001877812745022819, "loss": 0.8827, "step": 1731 }, { "epoch": 0.3218732577587809, "grad_norm": 0.5501548051834106, "learning_rate": 0.0001877672618553634, "loss": 1.0141, "step": 1732 }, { "epoch": 0.322059096822152, "grad_norm": 0.5648378729820251, "learning_rate": 0.00018775324170152132, "loss": 0.812, "step": 1733 }, { "epoch": 0.3222449358855231, "grad_norm": 0.6281271576881409, "learning_rate": 0.00018773921404195481, "loss": 1.3273, "step": 1734 }, { "epoch": 0.32243077494889427, "grad_norm": 0.6104331016540527, "learning_rate": 0.0001877251788778637, "loss": 1.0296, "step": 1735 }, { "epoch": 0.3226166140122654, "grad_norm": 0.6585345268249512, "learning_rate": 0.00018771113621044843, "loss": 1.1113, "step": 1736 }, { "epoch": 0.3228024530756365, "grad_norm": 0.5706427693367004, "learning_rate": 0.00018769708604091012, "loss": 1.0446, "step": 1737 }, { "epoch": 0.3229882921390076, "grad_norm": 0.6643130779266357, "learning_rate": 0.00018768302837045048, "loss": 0.883, "step": 1738 }, { "epoch": 0.32317413120237876, "grad_norm": 0.4990970492362976, "learning_rate": 0.00018766896320027195, "loss": 0.9353, "step": 1739 }, { "epoch": 0.32335997026574986, "grad_norm": 0.48843303322792053, "learning_rate": 0.0001876548905315775, "loss": 0.9064, "step": 1740 }, { "epoch": 0.32354580932912097, "grad_norm": 0.6881991028785706, "learning_rate": 0.00018764081036557083, "loss": 1.3455, "step": 1741 }, { "epoch": 0.3237316483924921, "grad_norm": 0.5047183036804199, "learning_rate": 0.00018762672270345623, "loss": 0.874, "step": 1742 }, { "epoch": 0.32391748745586324, "grad_norm": 0.5843253135681152, "learning_rate": 0.00018761262754643862, "loss": 1.3657, "step": 1743 }, { "epoch": 0.32410332651923435, "grad_norm": 0.743747889995575, "learning_rate": 0.00018759852489572368, "loss": 1.2149, "step": 1744 }, { "epoch": 0.32428916558260545, "grad_norm": 0.5197893381118774, "learning_rate": 0.00018758441475251754, "loss": 1.038, "step": 1745 }, { "epoch": 0.32447500464597656, "grad_norm": 0.5472385287284851, "learning_rate": 0.0001875702971180271, "loss": 0.9697, "step": 1746 }, { "epoch": 0.3246608437093477, "grad_norm": 0.5840916037559509, "learning_rate": 0.00018755617199345988, "loss": 1.0975, "step": 1747 }, { "epoch": 0.32484668277271883, "grad_norm": 0.5640180706977844, "learning_rate": 0.00018754203938002408, "loss": 0.9969, "step": 1748 }, { "epoch": 0.32503252183608994, "grad_norm": 0.5140122771263123, "learning_rate": 0.00018752789927892845, "loss": 1.0121, "step": 1749 }, { "epoch": 0.32521836089946105, "grad_norm": 0.595289409160614, "learning_rate": 0.0001875137516913824, "loss": 1.228, "step": 1750 }, { "epoch": 0.3254041999628322, "grad_norm": 0.519131600856781, "learning_rate": 0.000187499596618596, "loss": 1.2358, "step": 1751 }, { "epoch": 0.3255900390262033, "grad_norm": 0.617352306842804, "learning_rate": 0.00018748543406178002, "loss": 0.8571, "step": 1752 }, { "epoch": 0.3257758780895744, "grad_norm": 0.5419536828994751, "learning_rate": 0.00018747126402214576, "loss": 0.9787, "step": 1753 }, { "epoch": 0.32596171715294553, "grad_norm": 0.5365861654281616, "learning_rate": 0.00018745708650090527, "loss": 1.0241, "step": 1754 }, { "epoch": 0.3261475562163167, "grad_norm": 0.5210004448890686, "learning_rate": 0.00018744290149927113, "loss": 1.1246, "step": 1755 }, { "epoch": 0.3263333952796878, "grad_norm": 0.6406294107437134, "learning_rate": 0.00018742870901845664, "loss": 1.1412, "step": 1756 }, { "epoch": 0.3265192343430589, "grad_norm": 0.6498095393180847, "learning_rate": 0.0001874145090596757, "loss": 1.1282, "step": 1757 }, { "epoch": 0.32670507340643, "grad_norm": 0.5301967859268188, "learning_rate": 0.00018740030162414287, "loss": 1.0865, "step": 1758 }, { "epoch": 0.3268909124698012, "grad_norm": 0.6150500774383545, "learning_rate": 0.00018738608671307338, "loss": 1.1464, "step": 1759 }, { "epoch": 0.3270767515331723, "grad_norm": 0.5350655317306519, "learning_rate": 0.000187371864327683, "loss": 0.9322, "step": 1760 }, { "epoch": 0.3272625905965434, "grad_norm": 0.5245082378387451, "learning_rate": 0.00018735763446918826, "loss": 1.1232, "step": 1761 }, { "epoch": 0.3274484296599145, "grad_norm": 0.5193549394607544, "learning_rate": 0.0001873433971388062, "loss": 0.927, "step": 1762 }, { "epoch": 0.32763426872328566, "grad_norm": 0.5565290451049805, "learning_rate": 0.0001873291523377546, "loss": 1.1293, "step": 1763 }, { "epoch": 0.32782010778665677, "grad_norm": 0.5595194101333618, "learning_rate": 0.0001873149000672519, "loss": 1.1835, "step": 1764 }, { "epoch": 0.3280059468500279, "grad_norm": 0.5816195011138916, "learning_rate": 0.00018730064032851707, "loss": 1.0722, "step": 1765 }, { "epoch": 0.328191785913399, "grad_norm": 0.5451340675354004, "learning_rate": 0.00018728637312276976, "loss": 1.021, "step": 1766 }, { "epoch": 0.32837762497677014, "grad_norm": 0.3959139883518219, "learning_rate": 0.00018727209845123034, "loss": 0.7137, "step": 1767 }, { "epoch": 0.32856346404014125, "grad_norm": 0.6754137873649597, "learning_rate": 0.00018725781631511975, "loss": 1.3023, "step": 1768 }, { "epoch": 0.32874930310351236, "grad_norm": 0.6047104597091675, "learning_rate": 0.00018724352671565947, "loss": 1.1471, "step": 1769 }, { "epoch": 0.32893514216688347, "grad_norm": 0.60149085521698, "learning_rate": 0.00018722922965407182, "loss": 1.2296, "step": 1770 }, { "epoch": 0.3291209812302546, "grad_norm": 0.5902987718582153, "learning_rate": 0.00018721492513157966, "loss": 0.9665, "step": 1771 }, { "epoch": 0.32930682029362573, "grad_norm": 0.6506375074386597, "learning_rate": 0.00018720061314940643, "loss": 0.846, "step": 1772 }, { "epoch": 0.32949265935699684, "grad_norm": 0.5661017894744873, "learning_rate": 0.00018718629370877626, "loss": 1.0438, "step": 1773 }, { "epoch": 0.32967849842036795, "grad_norm": 0.6380808353424072, "learning_rate": 0.00018717196681091402, "loss": 0.9613, "step": 1774 }, { "epoch": 0.32986433748373906, "grad_norm": 0.6345396637916565, "learning_rate": 0.00018715763245704504, "loss": 1.1511, "step": 1775 }, { "epoch": 0.3300501765471102, "grad_norm": 0.6821529865264893, "learning_rate": 0.00018714329064839532, "loss": 0.9459, "step": 1776 }, { "epoch": 0.3302360156104813, "grad_norm": 0.4994625449180603, "learning_rate": 0.00018712894138619168, "loss": 0.9316, "step": 1777 }, { "epoch": 0.33042185467385243, "grad_norm": 0.579918384552002, "learning_rate": 0.00018711458467166137, "loss": 0.7732, "step": 1778 }, { "epoch": 0.33060769373722354, "grad_norm": 0.602846622467041, "learning_rate": 0.00018710022050603232, "loss": 1.2335, "step": 1779 }, { "epoch": 0.3307935328005947, "grad_norm": 0.5414668917655945, "learning_rate": 0.00018708584889053317, "loss": 0.8378, "step": 1780 }, { "epoch": 0.3309793718639658, "grad_norm": 0.5418664813041687, "learning_rate": 0.00018707146982639317, "loss": 1.2916, "step": 1781 }, { "epoch": 0.3311652109273369, "grad_norm": 0.5478229522705078, "learning_rate": 0.00018705708331484213, "loss": 1.0765, "step": 1782 }, { "epoch": 0.331351049990708, "grad_norm": 0.6811211705207825, "learning_rate": 0.00018704268935711062, "loss": 1.1639, "step": 1783 }, { "epoch": 0.3315368890540792, "grad_norm": 0.676049530506134, "learning_rate": 0.0001870282879544298, "loss": 1.1275, "step": 1784 }, { "epoch": 0.3317227281174503, "grad_norm": 0.4629632234573364, "learning_rate": 0.00018701387910803136, "loss": 0.7645, "step": 1785 }, { "epoch": 0.3319085671808214, "grad_norm": 0.5445986390113831, "learning_rate": 0.0001869994628191478, "loss": 1.0727, "step": 1786 }, { "epoch": 0.3320944062441925, "grad_norm": 0.5960368514060974, "learning_rate": 0.00018698503908901216, "loss": 0.9489, "step": 1787 }, { "epoch": 0.33228024530756367, "grad_norm": 0.536932647228241, "learning_rate": 0.00018697060791885814, "loss": 0.9583, "step": 1788 }, { "epoch": 0.3324660843709348, "grad_norm": 0.7085352540016174, "learning_rate": 0.00018695616930992004, "loss": 1.3772, "step": 1789 }, { "epoch": 0.3326519234343059, "grad_norm": 0.5107738971710205, "learning_rate": 0.00018694172326343285, "loss": 1.1724, "step": 1790 }, { "epoch": 0.332837762497677, "grad_norm": 0.6691588759422302, "learning_rate": 0.00018692726978063214, "loss": 1.1536, "step": 1791 }, { "epoch": 0.33302360156104815, "grad_norm": 0.5077880620956421, "learning_rate": 0.0001869128088627542, "loss": 0.8661, "step": 1792 }, { "epoch": 0.33320944062441926, "grad_norm": 0.655665934085846, "learning_rate": 0.00018689834051103588, "loss": 1.1782, "step": 1793 }, { "epoch": 0.33339527968779037, "grad_norm": 0.5770731568336487, "learning_rate": 0.00018688386472671465, "loss": 0.942, "step": 1794 }, { "epoch": 0.3335811187511615, "grad_norm": 0.531545877456665, "learning_rate": 0.00018686938151102868, "loss": 1.1079, "step": 1795 }, { "epoch": 0.33376695781453264, "grad_norm": 0.59685879945755, "learning_rate": 0.0001868548908652168, "loss": 0.9696, "step": 1796 }, { "epoch": 0.33395279687790375, "grad_norm": 0.597497284412384, "learning_rate": 0.00018684039279051837, "loss": 0.987, "step": 1797 }, { "epoch": 0.33413863594127485, "grad_norm": 0.5863851308822632, "learning_rate": 0.00018682588728817346, "loss": 0.9386, "step": 1798 }, { "epoch": 0.33432447500464596, "grad_norm": 0.557304859161377, "learning_rate": 0.00018681137435942277, "loss": 0.9928, "step": 1799 }, { "epoch": 0.3345103140680171, "grad_norm": 0.49487656354904175, "learning_rate": 0.00018679685400550757, "loss": 1.1482, "step": 1800 }, { "epoch": 0.33469615313138823, "grad_norm": 0.5904025435447693, "learning_rate": 0.0001867823262276699, "loss": 0.878, "step": 1801 }, { "epoch": 0.33488199219475934, "grad_norm": 0.49566707015037537, "learning_rate": 0.00018676779102715226, "loss": 1.2314, "step": 1802 }, { "epoch": 0.33506783125813044, "grad_norm": 0.5034186244010925, "learning_rate": 0.00018675324840519797, "loss": 0.8979, "step": 1803 }, { "epoch": 0.3352536703215016, "grad_norm": 0.5835298299789429, "learning_rate": 0.0001867386983630508, "loss": 1.2786, "step": 1804 }, { "epoch": 0.3354395093848727, "grad_norm": 0.602591872215271, "learning_rate": 0.00018672414090195534, "loss": 1.2119, "step": 1805 }, { "epoch": 0.3356253484482438, "grad_norm": 0.5360019207000732, "learning_rate": 0.00018670957602315663, "loss": 1.2997, "step": 1806 }, { "epoch": 0.33581118751161493, "grad_norm": 0.7245522141456604, "learning_rate": 0.0001866950037279005, "loss": 1.1139, "step": 1807 }, { "epoch": 0.33599702657498604, "grad_norm": 0.5866603851318359, "learning_rate": 0.00018668042401743333, "loss": 1.4327, "step": 1808 }, { "epoch": 0.3361828656383572, "grad_norm": 0.5497808456420898, "learning_rate": 0.00018666583689300214, "loss": 1.2068, "step": 1809 }, { "epoch": 0.3363687047017283, "grad_norm": 0.49823257327079773, "learning_rate": 0.00018665124235585462, "loss": 0.9279, "step": 1810 }, { "epoch": 0.3365545437650994, "grad_norm": 0.5769509673118591, "learning_rate": 0.00018663664040723905, "loss": 1.0357, "step": 1811 }, { "epoch": 0.3367403828284705, "grad_norm": 0.3853382468223572, "learning_rate": 0.0001866220310484044, "loss": 0.6868, "step": 1812 }, { "epoch": 0.3369262218918417, "grad_norm": 0.5640473961830139, "learning_rate": 0.0001866074142806002, "loss": 1.27, "step": 1813 }, { "epoch": 0.3371120609552128, "grad_norm": 0.5786116719245911, "learning_rate": 0.0001865927901050767, "loss": 1.2678, "step": 1814 }, { "epoch": 0.3372979000185839, "grad_norm": 0.5291271209716797, "learning_rate": 0.0001865781585230847, "loss": 0.9834, "step": 1815 }, { "epoch": 0.337483739081955, "grad_norm": 0.6278622150421143, "learning_rate": 0.00018656351953587566, "loss": 1.0949, "step": 1816 }, { "epoch": 0.33766957814532617, "grad_norm": 0.5260774493217468, "learning_rate": 0.00018654887314470173, "loss": 1.1183, "step": 1817 }, { "epoch": 0.3378554172086973, "grad_norm": 0.5557003021240234, "learning_rate": 0.00018653421935081561, "loss": 0.7845, "step": 1818 }, { "epoch": 0.3380412562720684, "grad_norm": 0.6177703142166138, "learning_rate": 0.0001865195581554707, "loss": 1.0623, "step": 1819 }, { "epoch": 0.3382270953354395, "grad_norm": 0.6486766338348389, "learning_rate": 0.000186504889559921, "loss": 1.2507, "step": 1820 }, { "epoch": 0.33841293439881065, "grad_norm": 0.5549860596656799, "learning_rate": 0.00018649021356542108, "loss": 1.0893, "step": 1821 }, { "epoch": 0.33859877346218176, "grad_norm": 0.48546648025512695, "learning_rate": 0.0001864755301732263, "loss": 0.704, "step": 1822 }, { "epoch": 0.33878461252555286, "grad_norm": 2.3476552963256836, "learning_rate": 0.0001864608393845925, "loss": 2.9748, "step": 1823 }, { "epoch": 0.33897045158892397, "grad_norm": 0.5599921941757202, "learning_rate": 0.00018644614120077625, "loss": 1.0073, "step": 1824 }, { "epoch": 0.33915629065229513, "grad_norm": 0.5544316172599792, "learning_rate": 0.00018643143562303475, "loss": 0.9475, "step": 1825 }, { "epoch": 0.33934212971566624, "grad_norm": 0.5891472697257996, "learning_rate": 0.0001864167226526257, "loss": 1.0265, "step": 1826 }, { "epoch": 0.33952796877903735, "grad_norm": 0.6864466667175293, "learning_rate": 0.00018640200229080763, "loss": 1.0996, "step": 1827 }, { "epoch": 0.33971380784240846, "grad_norm": 0.5325470566749573, "learning_rate": 0.00018638727453883957, "loss": 1.1052, "step": 1828 }, { "epoch": 0.3398996469057796, "grad_norm": 0.5583032369613647, "learning_rate": 0.0001863725393979812, "loss": 1.1012, "step": 1829 }, { "epoch": 0.3400854859691507, "grad_norm": 0.6103065013885498, "learning_rate": 0.00018635779686949284, "loss": 1.0018, "step": 1830 }, { "epoch": 0.34027132503252183, "grad_norm": 0.7236659526824951, "learning_rate": 0.0001863430469546355, "loss": 1.2731, "step": 1831 }, { "epoch": 0.34045716409589294, "grad_norm": 0.5036203861236572, "learning_rate": 0.0001863282896546707, "loss": 0.9995, "step": 1832 }, { "epoch": 0.3406430031592641, "grad_norm": 0.585512638092041, "learning_rate": 0.00018631352497086074, "loss": 1.1056, "step": 1833 }, { "epoch": 0.3408288422226352, "grad_norm": 0.6121904253959656, "learning_rate": 0.00018629875290446845, "loss": 1.114, "step": 1834 }, { "epoch": 0.3410146812860063, "grad_norm": 0.5517188310623169, "learning_rate": 0.00018628397345675727, "loss": 1.3077, "step": 1835 }, { "epoch": 0.3412005203493774, "grad_norm": 0.5181845426559448, "learning_rate": 0.0001862691866289914, "loss": 1.193, "step": 1836 }, { "epoch": 0.3413863594127486, "grad_norm": 0.6143492460250854, "learning_rate": 0.00018625439242243548, "loss": 0.8677, "step": 1837 }, { "epoch": 0.3415721984761197, "grad_norm": 0.7705954313278198, "learning_rate": 0.00018623959083835504, "loss": 0.9832, "step": 1838 }, { "epoch": 0.3417580375394908, "grad_norm": 0.5456838011741638, "learning_rate": 0.00018622478187801594, "loss": 1.258, "step": 1839 }, { "epoch": 0.3419438766028619, "grad_norm": 0.4819016456604004, "learning_rate": 0.00018620996554268493, "loss": 0.9829, "step": 1840 }, { "epoch": 0.34212971566623307, "grad_norm": 0.5512806177139282, "learning_rate": 0.00018619514183362923, "loss": 1.0721, "step": 1841 }, { "epoch": 0.3423155547296042, "grad_norm": 0.6261305809020996, "learning_rate": 0.00018618031075211676, "loss": 0.9484, "step": 1842 }, { "epoch": 0.3425013937929753, "grad_norm": 0.5673807263374329, "learning_rate": 0.00018616547229941604, "loss": 1.1987, "step": 1843 }, { "epoch": 0.3426872328563464, "grad_norm": 0.4913477599620819, "learning_rate": 0.00018615062647679627, "loss": 0.8978, "step": 1844 }, { "epoch": 0.3428730719197175, "grad_norm": 0.566608190536499, "learning_rate": 0.0001861357732855272, "loss": 0.9981, "step": 1845 }, { "epoch": 0.34305891098308866, "grad_norm": 0.6863612532615662, "learning_rate": 0.00018612091272687925, "loss": 1.07, "step": 1846 }, { "epoch": 0.34324475004645977, "grad_norm": 0.5646424293518066, "learning_rate": 0.00018610604480212356, "loss": 1.1925, "step": 1847 }, { "epoch": 0.3434305891098309, "grad_norm": 0.6170990467071533, "learning_rate": 0.00018609116951253171, "loss": 1.0215, "step": 1848 }, { "epoch": 0.343616428173202, "grad_norm": 0.6480187177658081, "learning_rate": 0.0001860762868593761, "loss": 1.1675, "step": 1849 }, { "epoch": 0.34380226723657314, "grad_norm": 0.5635117292404175, "learning_rate": 0.00018606139684392963, "loss": 1.1116, "step": 1850 }, { "epoch": 0.34398810629994425, "grad_norm": 0.7464002966880798, "learning_rate": 0.00018604649946746585, "loss": 1.1706, "step": 1851 }, { "epoch": 0.34417394536331536, "grad_norm": 0.5671253204345703, "learning_rate": 0.00018603159473125905, "loss": 1.0751, "step": 1852 }, { "epoch": 0.34435978442668647, "grad_norm": 0.5876680612564087, "learning_rate": 0.00018601668263658398, "loss": 1.3353, "step": 1853 }, { "epoch": 0.34454562349005763, "grad_norm": 0.5108693838119507, "learning_rate": 0.00018600176318471617, "loss": 1.1154, "step": 1854 }, { "epoch": 0.34473146255342874, "grad_norm": 0.5718084573745728, "learning_rate": 0.00018598683637693165, "loss": 1.111, "step": 1855 }, { "epoch": 0.34491730161679984, "grad_norm": 0.599010705947876, "learning_rate": 0.0001859719022145072, "loss": 0.9496, "step": 1856 }, { "epoch": 0.34510314068017095, "grad_norm": 0.5230289697647095, "learning_rate": 0.00018595696069872013, "loss": 1.0775, "step": 1857 }, { "epoch": 0.3452889797435421, "grad_norm": 0.5170990824699402, "learning_rate": 0.00018594201183084842, "loss": 1.092, "step": 1858 }, { "epoch": 0.3454748188069132, "grad_norm": 0.57718425989151, "learning_rate": 0.00018592705561217072, "loss": 0.899, "step": 1859 }, { "epoch": 0.3456606578702843, "grad_norm": 0.5379887223243713, "learning_rate": 0.00018591209204396622, "loss": 1.1148, "step": 1860 }, { "epoch": 0.34584649693365543, "grad_norm": 0.604630172252655, "learning_rate": 0.0001858971211275148, "loss": 1.1184, "step": 1861 }, { "epoch": 0.3460323359970266, "grad_norm": 0.6093925833702087, "learning_rate": 0.00018588214286409697, "loss": 0.8449, "step": 1862 }, { "epoch": 0.3462181750603977, "grad_norm": 0.6406834721565247, "learning_rate": 0.00018586715725499387, "loss": 1.1169, "step": 1863 }, { "epoch": 0.3464040141237688, "grad_norm": 0.49252834916114807, "learning_rate": 0.00018585216430148723, "loss": 0.7095, "step": 1864 }, { "epoch": 0.3465898531871399, "grad_norm": 0.6328451037406921, "learning_rate": 0.00018583716400485937, "loss": 1.2131, "step": 1865 }, { "epoch": 0.3467756922505111, "grad_norm": 0.6539022326469421, "learning_rate": 0.0001858221563663934, "loss": 0.9528, "step": 1866 }, { "epoch": 0.3469615313138822, "grad_norm": 0.5577411651611328, "learning_rate": 0.00018580714138737288, "loss": 1.2018, "step": 1867 }, { "epoch": 0.3471473703772533, "grad_norm": 0.5997073650360107, "learning_rate": 0.00018579211906908215, "loss": 1.1088, "step": 1868 }, { "epoch": 0.3473332094406244, "grad_norm": 0.5570213198661804, "learning_rate": 0.000185777089412806, "loss": 0.9635, "step": 1869 }, { "epoch": 0.34751904850399556, "grad_norm": 0.5627360343933105, "learning_rate": 0.00018576205241983005, "loss": 1.1156, "step": 1870 }, { "epoch": 0.34770488756736667, "grad_norm": 0.6400758028030396, "learning_rate": 0.00018574700809144035, "loss": 1.3113, "step": 1871 }, { "epoch": 0.3478907266307378, "grad_norm": 0.6254924535751343, "learning_rate": 0.00018573195642892376, "loss": 0.9771, "step": 1872 }, { "epoch": 0.3480765656941089, "grad_norm": 0.5834730267524719, "learning_rate": 0.00018571689743356764, "loss": 0.985, "step": 1873 }, { "epoch": 0.34826240475748005, "grad_norm": 0.6734195351600647, "learning_rate": 0.00018570183110666003, "loss": 1.2304, "step": 1874 }, { "epoch": 0.34844824382085116, "grad_norm": 0.6961857080459595, "learning_rate": 0.0001856867574494896, "loss": 1.1581, "step": 1875 }, { "epoch": 0.34863408288422226, "grad_norm": 0.6982626914978027, "learning_rate": 0.0001856716764633456, "loss": 0.9181, "step": 1876 }, { "epoch": 0.34881992194759337, "grad_norm": 0.7757996916770935, "learning_rate": 0.00018565658814951788, "loss": 1.0588, "step": 1877 }, { "epoch": 0.3490057610109645, "grad_norm": 0.5235817432403564, "learning_rate": 0.00018564149250929713, "loss": 0.9972, "step": 1878 }, { "epoch": 0.34919160007433564, "grad_norm": 0.5383930206298828, "learning_rate": 0.0001856263895439744, "loss": 0.9844, "step": 1879 }, { "epoch": 0.34937743913770675, "grad_norm": 0.5330451130867004, "learning_rate": 0.0001856112792548415, "loss": 0.7695, "step": 1880 }, { "epoch": 0.34956327820107785, "grad_norm": 0.5530948042869568, "learning_rate": 0.0001855961616431909, "loss": 1.0495, "step": 1881 }, { "epoch": 0.34974911726444896, "grad_norm": 0.5738743543624878, "learning_rate": 0.00018558103671031554, "loss": 0.9853, "step": 1882 }, { "epoch": 0.3499349563278201, "grad_norm": 0.5287640690803528, "learning_rate": 0.0001855659044575092, "loss": 1.1136, "step": 1883 }, { "epoch": 0.35012079539119123, "grad_norm": 0.5047253966331482, "learning_rate": 0.00018555076488606606, "loss": 0.9412, "step": 1884 }, { "epoch": 0.35030663445456234, "grad_norm": 0.5331687927246094, "learning_rate": 0.00018553561799728113, "loss": 1.174, "step": 1885 }, { "epoch": 0.35049247351793344, "grad_norm": 0.6587241888046265, "learning_rate": 0.00018552046379244992, "loss": 1.0395, "step": 1886 }, { "epoch": 0.3506783125813046, "grad_norm": 0.541508674621582, "learning_rate": 0.0001855053022728686, "loss": 1.1467, "step": 1887 }, { "epoch": 0.3508641516446757, "grad_norm": 0.6289540529251099, "learning_rate": 0.000185490133439834, "loss": 1.0423, "step": 1888 }, { "epoch": 0.3510499907080468, "grad_norm": 0.5184619426727295, "learning_rate": 0.00018547495729464352, "loss": 1.0425, "step": 1889 }, { "epoch": 0.35123582977141793, "grad_norm": 0.541866660118103, "learning_rate": 0.0001854597738385952, "loss": 1.2454, "step": 1890 }, { "epoch": 0.3514216688347891, "grad_norm": 0.46014341711997986, "learning_rate": 0.0001854445830729877, "loss": 1.0373, "step": 1891 }, { "epoch": 0.3516075078981602, "grad_norm": 0.5548123717308044, "learning_rate": 0.00018542938499912034, "loss": 1.0437, "step": 1892 }, { "epoch": 0.3517933469615313, "grad_norm": 0.7150872945785522, "learning_rate": 0.0001854141796182931, "loss": 1.3672, "step": 1893 }, { "epoch": 0.3519791860249024, "grad_norm": 0.6109015345573425, "learning_rate": 0.0001853989669318064, "loss": 1.3214, "step": 1894 }, { "epoch": 0.3521650250882736, "grad_norm": 0.606502115726471, "learning_rate": 0.0001853837469409615, "loss": 1.0622, "step": 1895 }, { "epoch": 0.3523508641516447, "grad_norm": 0.6415699124336243, "learning_rate": 0.0001853685196470602, "loss": 1.1894, "step": 1896 }, { "epoch": 0.3525367032150158, "grad_norm": 0.5605230331420898, "learning_rate": 0.00018535328505140495, "loss": 0.9746, "step": 1897 }, { "epoch": 0.3527225422783869, "grad_norm": 0.5477644205093384, "learning_rate": 0.00018533804315529873, "loss": 0.8564, "step": 1898 }, { "epoch": 0.35290838134175806, "grad_norm": 0.5427902340888977, "learning_rate": 0.00018532279396004522, "loss": 1.1638, "step": 1899 }, { "epoch": 0.35309422040512917, "grad_norm": 0.5802868604660034, "learning_rate": 0.00018530753746694872, "loss": 1.2356, "step": 1900 }, { "epoch": 0.3532800594685003, "grad_norm": 0.5529875755310059, "learning_rate": 0.0001852922736773142, "loss": 1.0866, "step": 1901 }, { "epoch": 0.3534658985318714, "grad_norm": 0.5103433728218079, "learning_rate": 0.00018527700259244717, "loss": 0.9164, "step": 1902 }, { "epoch": 0.35365173759524254, "grad_norm": 0.5571877360343933, "learning_rate": 0.0001852617242136538, "loss": 1.2392, "step": 1903 }, { "epoch": 0.35383757665861365, "grad_norm": 0.5150656700134277, "learning_rate": 0.00018524643854224087, "loss": 1.0823, "step": 1904 }, { "epoch": 0.35402341572198476, "grad_norm": 0.49546682834625244, "learning_rate": 0.00018523114557951578, "loss": 1.077, "step": 1905 }, { "epoch": 0.35420925478535586, "grad_norm": 0.5201963782310486, "learning_rate": 0.00018521584532678667, "loss": 1.1797, "step": 1906 }, { "epoch": 0.354395093848727, "grad_norm": 0.604653537273407, "learning_rate": 0.00018520053778536212, "loss": 1.1176, "step": 1907 }, { "epoch": 0.35458093291209813, "grad_norm": 0.44055116176605225, "learning_rate": 0.0001851852229565514, "loss": 0.7782, "step": 1908 }, { "epoch": 0.35476677197546924, "grad_norm": 0.5750049352645874, "learning_rate": 0.00018516990084166442, "loss": 1.1154, "step": 1909 }, { "epoch": 0.35495261103884035, "grad_norm": 0.5687276124954224, "learning_rate": 0.0001851545714420118, "loss": 1.0966, "step": 1910 }, { "epoch": 0.3551384501022115, "grad_norm": 0.5168128609657288, "learning_rate": 0.00018513923475890463, "loss": 1.1899, "step": 1911 }, { "epoch": 0.3553242891655826, "grad_norm": 0.4693703055381775, "learning_rate": 0.0001851238907936547, "loss": 0.8872, "step": 1912 }, { "epoch": 0.3555101282289537, "grad_norm": 0.5975525379180908, "learning_rate": 0.00018510853954757441, "loss": 1.1444, "step": 1913 }, { "epoch": 0.35569596729232483, "grad_norm": 0.5854206681251526, "learning_rate": 0.00018509318102197678, "loss": 1.2371, "step": 1914 }, { "epoch": 0.35588180635569594, "grad_norm": 0.5721655488014221, "learning_rate": 0.00018507781521817548, "loss": 1.1206, "step": 1915 }, { "epoch": 0.3560676454190671, "grad_norm": 0.659113883972168, "learning_rate": 0.00018506244213748477, "loss": 1.269, "step": 1916 }, { "epoch": 0.3562534844824382, "grad_norm": 0.5849035382270813, "learning_rate": 0.00018504706178121952, "loss": 1.1486, "step": 1917 }, { "epoch": 0.3564393235458093, "grad_norm": 0.6287510991096497, "learning_rate": 0.00018503167415069528, "loss": 1.1698, "step": 1918 }, { "epoch": 0.3566251626091804, "grad_norm": 0.5501243472099304, "learning_rate": 0.00018501627924722818, "loss": 0.9855, "step": 1919 }, { "epoch": 0.3568110016725516, "grad_norm": 0.5094524025917053, "learning_rate": 0.00018500087707213497, "loss": 1.1109, "step": 1920 }, { "epoch": 0.3569968407359227, "grad_norm": 0.53468918800354, "learning_rate": 0.00018498546762673302, "loss": 0.6887, "step": 1921 }, { "epoch": 0.3571826797992938, "grad_norm": 0.5579971671104431, "learning_rate": 0.00018497005091234036, "loss": 1.0723, "step": 1922 }, { "epoch": 0.3573685188626649, "grad_norm": 0.5719289183616638, "learning_rate": 0.00018495462693027557, "loss": 1.1419, "step": 1923 }, { "epoch": 0.35755435792603607, "grad_norm": 0.6961529850959778, "learning_rate": 0.00018493919568185796, "loss": 1.1488, "step": 1924 }, { "epoch": 0.3577401969894072, "grad_norm": 0.5444719195365906, "learning_rate": 0.00018492375716840735, "loss": 1.1814, "step": 1925 }, { "epoch": 0.3579260360527783, "grad_norm": 0.5759077072143555, "learning_rate": 0.00018490831139124424, "loss": 1.1933, "step": 1926 }, { "epoch": 0.3581118751161494, "grad_norm": 0.6321165561676025, "learning_rate": 0.00018489285835168978, "loss": 1.1398, "step": 1927 }, { "epoch": 0.35829771417952055, "grad_norm": 0.6945244669914246, "learning_rate": 0.00018487739805106562, "loss": 1.1798, "step": 1928 }, { "epoch": 0.35848355324289166, "grad_norm": 0.5583364367485046, "learning_rate": 0.0001848619304906942, "loss": 0.9665, "step": 1929 }, { "epoch": 0.35866939230626277, "grad_norm": 0.511746883392334, "learning_rate": 0.00018484645567189845, "loss": 1.1767, "step": 1930 }, { "epoch": 0.3588552313696339, "grad_norm": 0.5670949816703796, "learning_rate": 0.00018483097359600195, "loss": 0.9885, "step": 1931 }, { "epoch": 0.35904107043300504, "grad_norm": 0.5351964235305786, "learning_rate": 0.00018481548426432895, "loss": 1.0039, "step": 1932 }, { "epoch": 0.35922690949637615, "grad_norm": 0.5773396492004395, "learning_rate": 0.00018479998767820425, "loss": 1.0051, "step": 1933 }, { "epoch": 0.35941274855974725, "grad_norm": 0.5246624946594238, "learning_rate": 0.00018478448383895332, "loss": 0.88, "step": 1934 }, { "epoch": 0.35959858762311836, "grad_norm": 0.5461812615394592, "learning_rate": 0.00018476897274790228, "loss": 0.9274, "step": 1935 }, { "epoch": 0.3597844266864895, "grad_norm": 0.5961292386054993, "learning_rate": 0.00018475345440637777, "loss": 1.0634, "step": 1936 }, { "epoch": 0.35997026574986063, "grad_norm": 0.6102057695388794, "learning_rate": 0.00018473792881570713, "loss": 0.9261, "step": 1937 }, { "epoch": 0.36015610481323174, "grad_norm": 0.549595057964325, "learning_rate": 0.0001847223959772183, "loss": 0.9047, "step": 1938 }, { "epoch": 0.36034194387660284, "grad_norm": 0.6503207683563232, "learning_rate": 0.00018470685589223984, "loss": 1.164, "step": 1939 }, { "epoch": 0.360527782939974, "grad_norm": 0.5422487258911133, "learning_rate": 0.00018469130856210093, "loss": 1.0569, "step": 1940 }, { "epoch": 0.3607136220033451, "grad_norm": 0.6560564637184143, "learning_rate": 0.00018467575398813132, "loss": 1.0021, "step": 1941 }, { "epoch": 0.3608994610667162, "grad_norm": 0.5428600907325745, "learning_rate": 0.0001846601921716615, "loss": 1.1048, "step": 1942 }, { "epoch": 0.3610853001300873, "grad_norm": 0.5088539123535156, "learning_rate": 0.00018464462311402243, "loss": 0.9902, "step": 1943 }, { "epoch": 0.3612711391934585, "grad_norm": 0.5354966521263123, "learning_rate": 0.00018462904681654582, "loss": 1.1174, "step": 1944 }, { "epoch": 0.3614569782568296, "grad_norm": 0.581659734249115, "learning_rate": 0.00018461346328056392, "loss": 1.0791, "step": 1945 }, { "epoch": 0.3616428173202007, "grad_norm": 0.6271023750305176, "learning_rate": 0.00018459787250740966, "loss": 1.2299, "step": 1946 }, { "epoch": 0.3618286563835718, "grad_norm": 0.5690140724182129, "learning_rate": 0.0001845822744984165, "loss": 1.1757, "step": 1947 }, { "epoch": 0.362014495446943, "grad_norm": 0.6378099322319031, "learning_rate": 0.00018456666925491862, "loss": 0.9182, "step": 1948 }, { "epoch": 0.3622003345103141, "grad_norm": 0.5846841335296631, "learning_rate": 0.00018455105677825072, "loss": 0.7915, "step": 1949 }, { "epoch": 0.3623861735736852, "grad_norm": 0.5810773372650146, "learning_rate": 0.0001845354370697482, "loss": 1.0371, "step": 1950 }, { "epoch": 0.3625720126370563, "grad_norm": 0.5026060342788696, "learning_rate": 0.00018451981013074703, "loss": 1.2072, "step": 1951 }, { "epoch": 0.3627578517004274, "grad_norm": 0.5775339007377625, "learning_rate": 0.00018450417596258388, "loss": 1.2526, "step": 1952 }, { "epoch": 0.36294369076379857, "grad_norm": 0.5179687142372131, "learning_rate": 0.00018448853456659586, "loss": 1.0594, "step": 1953 }, { "epoch": 0.3631295298271697, "grad_norm": 0.5170058608055115, "learning_rate": 0.00018447288594412094, "loss": 1.0907, "step": 1954 }, { "epoch": 0.3633153688905408, "grad_norm": 0.5153071284294128, "learning_rate": 0.00018445723009649748, "loss": 1.19, "step": 1955 }, { "epoch": 0.3635012079539119, "grad_norm": 0.6060937643051147, "learning_rate": 0.00018444156702506457, "loss": 0.9486, "step": 1956 }, { "epoch": 0.36368704701728305, "grad_norm": 0.5591087341308594, "learning_rate": 0.00018442589673116195, "loss": 0.8211, "step": 1957 }, { "epoch": 0.36387288608065416, "grad_norm": 0.4953506588935852, "learning_rate": 0.0001844102192161299, "loss": 0.7849, "step": 1958 }, { "epoch": 0.36405872514402526, "grad_norm": 0.6325752139091492, "learning_rate": 0.0001843945344813094, "loss": 1.2812, "step": 1959 }, { "epoch": 0.36424456420739637, "grad_norm": 0.5928200483322144, "learning_rate": 0.00018437884252804195, "loss": 1.2031, "step": 1960 }, { "epoch": 0.36443040327076753, "grad_norm": 0.6781278848648071, "learning_rate": 0.0001843631433576697, "loss": 1.2147, "step": 1961 }, { "epoch": 0.36461624233413864, "grad_norm": 0.6580811142921448, "learning_rate": 0.00018434743697153552, "loss": 1.1414, "step": 1962 }, { "epoch": 0.36480208139750975, "grad_norm": 0.5909239053726196, "learning_rate": 0.00018433172337098272, "loss": 1.1057, "step": 1963 }, { "epoch": 0.36498792046088085, "grad_norm": 0.6667724847793579, "learning_rate": 0.00018431600255735538, "loss": 1.0801, "step": 1964 }, { "epoch": 0.365173759524252, "grad_norm": 0.6188902854919434, "learning_rate": 0.00018430027453199808, "loss": 1.0105, "step": 1965 }, { "epoch": 0.3653595985876231, "grad_norm": 0.5609699487686157, "learning_rate": 0.00018428453929625614, "loss": 1.0674, "step": 1966 }, { "epoch": 0.36554543765099423, "grad_norm": 0.6792697906494141, "learning_rate": 0.00018426879685147537, "loss": 1.193, "step": 1967 }, { "epoch": 0.36573127671436534, "grad_norm": 0.6148858666419983, "learning_rate": 0.00018425304719900227, "loss": 1.1392, "step": 1968 }, { "epoch": 0.3659171157777365, "grad_norm": 0.4912833869457245, "learning_rate": 0.00018423729034018396, "loss": 0.8163, "step": 1969 }, { "epoch": 0.3661029548411076, "grad_norm": 0.6004575490951538, "learning_rate": 0.00018422152627636814, "loss": 1.1595, "step": 1970 }, { "epoch": 0.3662887939044787, "grad_norm": 0.6387303471565247, "learning_rate": 0.00018420575500890314, "loss": 1.2459, "step": 1971 }, { "epoch": 0.3664746329678498, "grad_norm": 0.5127397179603577, "learning_rate": 0.00018418997653913793, "loss": 0.9239, "step": 1972 }, { "epoch": 0.366660472031221, "grad_norm": 0.5673999786376953, "learning_rate": 0.00018417419086842206, "loss": 1.0092, "step": 1973 }, { "epoch": 0.3668463110945921, "grad_norm": 0.8210087418556213, "learning_rate": 0.00018415839799810577, "loss": 1.0895, "step": 1974 }, { "epoch": 0.3670321501579632, "grad_norm": 0.5619094967842102, "learning_rate": 0.00018414259792953977, "loss": 0.9131, "step": 1975 }, { "epoch": 0.3672179892213343, "grad_norm": 0.5745676755905151, "learning_rate": 0.0001841267906640755, "loss": 0.7174, "step": 1976 }, { "epoch": 0.36740382828470547, "grad_norm": 0.6106050610542297, "learning_rate": 0.00018411097620306502, "loss": 0.991, "step": 1977 }, { "epoch": 0.3675896673480766, "grad_norm": 0.5693312883377075, "learning_rate": 0.00018409515454786095, "loss": 0.9161, "step": 1978 }, { "epoch": 0.3677755064114477, "grad_norm": 0.623209536075592, "learning_rate": 0.00018407932569981656, "loss": 1.0186, "step": 1979 }, { "epoch": 0.3679613454748188, "grad_norm": 0.5974239110946655, "learning_rate": 0.00018406348966028575, "loss": 1.1224, "step": 1980 }, { "epoch": 0.36814718453818995, "grad_norm": 0.5626535415649414, "learning_rate": 0.00018404764643062296, "loss": 1.0819, "step": 1981 }, { "epoch": 0.36833302360156106, "grad_norm": 0.5896697044372559, "learning_rate": 0.0001840317960121833, "loss": 1.1705, "step": 1982 }, { "epoch": 0.36851886266493217, "grad_norm": 0.5947336554527283, "learning_rate": 0.00018401593840632253, "loss": 1.2353, "step": 1983 }, { "epoch": 0.3687047017283033, "grad_norm": 0.5700284242630005, "learning_rate": 0.00018400007361439697, "loss": 0.9938, "step": 1984 }, { "epoch": 0.36889054079167444, "grad_norm": 0.513639509677887, "learning_rate": 0.00018398420163776358, "loss": 1.1357, "step": 1985 }, { "epoch": 0.36907637985504554, "grad_norm": 0.6426970958709717, "learning_rate": 0.0001839683224777799, "loss": 1.1692, "step": 1986 }, { "epoch": 0.36926221891841665, "grad_norm": 0.5685890913009644, "learning_rate": 0.0001839524361358041, "loss": 1.1505, "step": 1987 }, { "epoch": 0.36944805798178776, "grad_norm": 0.5381999015808105, "learning_rate": 0.000183936542613195, "loss": 1.0664, "step": 1988 }, { "epoch": 0.36963389704515887, "grad_norm": 0.5211588740348816, "learning_rate": 0.00018392064191131202, "loss": 1.1045, "step": 1989 }, { "epoch": 0.36981973610853003, "grad_norm": 0.605248749256134, "learning_rate": 0.00018390473403151513, "loss": 1.0103, "step": 1990 }, { "epoch": 0.37000557517190114, "grad_norm": 0.6501337885856628, "learning_rate": 0.000183888818975165, "loss": 1.1475, "step": 1991 }, { "epoch": 0.37019141423527224, "grad_norm": 0.5290714502334595, "learning_rate": 0.00018387289674362292, "loss": 0.9015, "step": 1992 }, { "epoch": 0.37037725329864335, "grad_norm": 0.5104148983955383, "learning_rate": 0.00018385696733825066, "loss": 0.9868, "step": 1993 }, { "epoch": 0.3705630923620145, "grad_norm": 0.6122913956642151, "learning_rate": 0.00018384103076041078, "loss": 1.0051, "step": 1994 }, { "epoch": 0.3707489314253856, "grad_norm": 6.534750461578369, "learning_rate": 0.0001838250870114663, "loss": 3.3172, "step": 1995 }, { "epoch": 0.3709347704887567, "grad_norm": 0.4851309657096863, "learning_rate": 0.00018380913609278098, "loss": 0.9435, "step": 1996 }, { "epoch": 0.37112060955212783, "grad_norm": 0.6162280440330505, "learning_rate": 0.0001837931780057191, "loss": 1.0495, "step": 1997 }, { "epoch": 0.371306448615499, "grad_norm": 0.5363066792488098, "learning_rate": 0.00018377721275164558, "loss": 1.0672, "step": 1998 }, { "epoch": 0.3714922876788701, "grad_norm": 0.5488103032112122, "learning_rate": 0.00018376124033192602, "loss": 0.985, "step": 1999 }, { "epoch": 0.3716781267422412, "grad_norm": 0.5667101740837097, "learning_rate": 0.00018374526074792656, "loss": 1.0454, "step": 2000 }, { "epoch": 0.3718639658056123, "grad_norm": 0.6776452660560608, "learning_rate": 0.00018372927400101395, "loss": 1.1813, "step": 2001 }, { "epoch": 0.3720498048689835, "grad_norm": 0.6331282258033752, "learning_rate": 0.00018371328009255552, "loss": 1.2781, "step": 2002 }, { "epoch": 0.3722356439323546, "grad_norm": 0.5698820352554321, "learning_rate": 0.00018369727902391937, "loss": 1.2935, "step": 2003 }, { "epoch": 0.3724214829957257, "grad_norm": 0.6152218580245972, "learning_rate": 0.00018368127079647405, "loss": 1.1618, "step": 2004 }, { "epoch": 0.3726073220590968, "grad_norm": 0.6485865116119385, "learning_rate": 0.00018366525541158875, "loss": 1.2801, "step": 2005 }, { "epoch": 0.37279316112246796, "grad_norm": 0.5456370711326599, "learning_rate": 0.00018364923287063334, "loss": 1.0006, "step": 2006 }, { "epoch": 0.37297900018583907, "grad_norm": 0.5099388957023621, "learning_rate": 0.00018363320317497825, "loss": 0.9357, "step": 2007 }, { "epoch": 0.3731648392492102, "grad_norm": 0.5599960088729858, "learning_rate": 0.00018361716632599455, "loss": 1.1451, "step": 2008 }, { "epoch": 0.3733506783125813, "grad_norm": 0.6054771542549133, "learning_rate": 0.00018360112232505387, "loss": 1.1288, "step": 2009 }, { "epoch": 0.37353651737595245, "grad_norm": 0.7153000235557556, "learning_rate": 0.00018358507117352852, "loss": 1.2604, "step": 2010 }, { "epoch": 0.37372235643932356, "grad_norm": 0.6568436026573181, "learning_rate": 0.0001835690128727914, "loss": 1.1132, "step": 2011 }, { "epoch": 0.37390819550269466, "grad_norm": 0.6042101979255676, "learning_rate": 0.00018355294742421598, "loss": 1.3149, "step": 2012 }, { "epoch": 0.37409403456606577, "grad_norm": 0.6615718603134155, "learning_rate": 0.0001835368748291764, "loss": 1.3576, "step": 2013 }, { "epoch": 0.37427987362943693, "grad_norm": 0.5358205437660217, "learning_rate": 0.00018352079508904734, "loss": 1.0024, "step": 2014 }, { "epoch": 0.37446571269280804, "grad_norm": 0.44465547800064087, "learning_rate": 0.00018350470820520417, "loss": 0.8894, "step": 2015 }, { "epoch": 0.37465155175617915, "grad_norm": 0.4735083281993866, "learning_rate": 0.00018348861417902287, "loss": 0.8917, "step": 2016 }, { "epoch": 0.37483739081955025, "grad_norm": 0.6190890073776245, "learning_rate": 0.00018347251301187992, "loss": 1.2724, "step": 2017 }, { "epoch": 0.3750232298829214, "grad_norm": 0.5531015396118164, "learning_rate": 0.0001834564047051525, "loss": 1.1366, "step": 2018 }, { "epoch": 0.3752090689462925, "grad_norm": 0.6023117899894714, "learning_rate": 0.00018344028926021844, "loss": 0.9135, "step": 2019 }, { "epoch": 0.37539490800966363, "grad_norm": 0.5503703355789185, "learning_rate": 0.0001834241666784561, "loss": 1.0205, "step": 2020 }, { "epoch": 0.37558074707303474, "grad_norm": 0.554999828338623, "learning_rate": 0.00018340803696124448, "loss": 1.1605, "step": 2021 }, { "epoch": 0.3757665861364059, "grad_norm": 0.5042999386787415, "learning_rate": 0.00018339190010996321, "loss": 1.1332, "step": 2022 }, { "epoch": 0.375952425199777, "grad_norm": 0.64976567029953, "learning_rate": 0.00018337575612599246, "loss": 1.2517, "step": 2023 }, { "epoch": 0.3761382642631481, "grad_norm": 0.7032577991485596, "learning_rate": 0.00018335960501071312, "loss": 1.26, "step": 2024 }, { "epoch": 0.3763241033265192, "grad_norm": 0.5371274948120117, "learning_rate": 0.0001833434467655066, "loss": 1.0916, "step": 2025 }, { "epoch": 0.37650994238989033, "grad_norm": 0.6726012229919434, "learning_rate": 0.00018332728139175492, "loss": 1.1531, "step": 2026 }, { "epoch": 0.3766957814532615, "grad_norm": 0.5239533185958862, "learning_rate": 0.00018331110889084078, "loss": 0.9916, "step": 2027 }, { "epoch": 0.3768816205166326, "grad_norm": 0.542515754699707, "learning_rate": 0.00018329492926414745, "loss": 1.2967, "step": 2028 }, { "epoch": 0.3770674595800037, "grad_norm": 0.542534351348877, "learning_rate": 0.00018327874251305879, "loss": 1.0778, "step": 2029 }, { "epoch": 0.3772532986433748, "grad_norm": 0.7218822836875916, "learning_rate": 0.00018326254863895928, "loss": 1.3548, "step": 2030 }, { "epoch": 0.377439137706746, "grad_norm": 0.5446364283561707, "learning_rate": 0.00018324634764323402, "loss": 1.0741, "step": 2031 }, { "epoch": 0.3776249767701171, "grad_norm": 0.5216005444526672, "learning_rate": 0.00018323013952726875, "loss": 0.9263, "step": 2032 }, { "epoch": 0.3778108158334882, "grad_norm": 0.5345918536186218, "learning_rate": 0.00018321392429244976, "loss": 1.1902, "step": 2033 }, { "epoch": 0.3779966548968593, "grad_norm": 0.5378860831260681, "learning_rate": 0.00018319770194016397, "loss": 1.1874, "step": 2034 }, { "epoch": 0.37818249396023046, "grad_norm": 2.5068178176879883, "learning_rate": 0.00018318147247179894, "loss": 3.1407, "step": 2035 }, { "epoch": 0.37836833302360157, "grad_norm": 0.5298924446105957, "learning_rate": 0.00018316523588874278, "loss": 1.1361, "step": 2036 }, { "epoch": 0.3785541720869727, "grad_norm": 0.6050952672958374, "learning_rate": 0.00018314899219238422, "loss": 0.869, "step": 2037 }, { "epoch": 0.3787400111503438, "grad_norm": 0.5876106023788452, "learning_rate": 0.00018313274138411268, "loss": 1.1049, "step": 2038 }, { "epoch": 0.37892585021371494, "grad_norm": 0.501067578792572, "learning_rate": 0.0001831164834653181, "loss": 1.049, "step": 2039 }, { "epoch": 0.37911168927708605, "grad_norm": 0.5056796073913574, "learning_rate": 0.00018310021843739104, "loss": 0.866, "step": 2040 }, { "epoch": 0.37929752834045716, "grad_norm": 0.574049174785614, "learning_rate": 0.0001830839463017227, "loss": 0.8973, "step": 2041 }, { "epoch": 0.37948336740382826, "grad_norm": 0.5803636312484741, "learning_rate": 0.00018306766705970486, "loss": 1.3646, "step": 2042 }, { "epoch": 0.3796692064671994, "grad_norm": 0.5793257355690002, "learning_rate": 0.00018305138071272992, "loss": 1.0705, "step": 2043 }, { "epoch": 0.37985504553057053, "grad_norm": 0.4978286921977997, "learning_rate": 0.0001830350872621909, "loss": 1.0592, "step": 2044 }, { "epoch": 0.38004088459394164, "grad_norm": 0.5665377378463745, "learning_rate": 0.00018301878670948137, "loss": 0.8908, "step": 2045 }, { "epoch": 0.38022672365731275, "grad_norm": 0.5234101414680481, "learning_rate": 0.00018300247905599564, "loss": 1.0787, "step": 2046 }, { "epoch": 0.3804125627206839, "grad_norm": 0.5455564856529236, "learning_rate": 0.00018298616430312845, "loss": 1.0485, "step": 2047 }, { "epoch": 0.380598401784055, "grad_norm": 0.6050750613212585, "learning_rate": 0.00018296984245227526, "loss": 1.1934, "step": 2048 }, { "epoch": 0.3807842408474261, "grad_norm": 0.6124750375747681, "learning_rate": 0.00018295351350483216, "loss": 1.2551, "step": 2049 }, { "epoch": 0.38097007991079723, "grad_norm": 0.6014724373817444, "learning_rate": 0.00018293717746219575, "loss": 1.0813, "step": 2050 }, { "epoch": 0.3811559189741684, "grad_norm": 0.5594990253448486, "learning_rate": 0.0001829208343257633, "loss": 1.0199, "step": 2051 }, { "epoch": 0.3813417580375395, "grad_norm": 0.5794000029563904, "learning_rate": 0.00018290448409693266, "loss": 1.0601, "step": 2052 }, { "epoch": 0.3815275971009106, "grad_norm": 0.6697391867637634, "learning_rate": 0.00018288812677710233, "loss": 1.341, "step": 2053 }, { "epoch": 0.3817134361642817, "grad_norm": 0.6012246608734131, "learning_rate": 0.00018287176236767136, "loss": 1.2732, "step": 2054 }, { "epoch": 0.3818992752276529, "grad_norm": 0.5211855173110962, "learning_rate": 0.00018285539087003946, "loss": 1.0806, "step": 2055 }, { "epoch": 0.382085114291024, "grad_norm": 0.6030214428901672, "learning_rate": 0.0001828390122856069, "loss": 1.1769, "step": 2056 }, { "epoch": 0.3822709533543951, "grad_norm": 0.6156930923461914, "learning_rate": 0.00018282262661577456, "loss": 1.272, "step": 2057 }, { "epoch": 0.3824567924177662, "grad_norm": 0.5623907446861267, "learning_rate": 0.000182806233861944, "loss": 1.0137, "step": 2058 }, { "epoch": 0.3826426314811373, "grad_norm": 0.5875430703163147, "learning_rate": 0.00018278983402551728, "loss": 1.4113, "step": 2059 }, { "epoch": 0.38282847054450847, "grad_norm": 0.5854385495185852, "learning_rate": 0.0001827734271078971, "loss": 1.416, "step": 2060 }, { "epoch": 0.3830143096078796, "grad_norm": 0.6129041314125061, "learning_rate": 0.00018275701311048682, "loss": 1.571, "step": 2061 }, { "epoch": 0.3832001486712507, "grad_norm": 0.5709010362625122, "learning_rate": 0.00018274059203469034, "loss": 1.3855, "step": 2062 }, { "epoch": 0.3833859877346218, "grad_norm": 0.5533404350280762, "learning_rate": 0.0001827241638819122, "loss": 1.0585, "step": 2063 }, { "epoch": 0.38357182679799295, "grad_norm": 0.45654740929603577, "learning_rate": 0.00018270772865355755, "loss": 0.9902, "step": 2064 }, { "epoch": 0.38375766586136406, "grad_norm": 0.5475004315376282, "learning_rate": 0.00018269128635103207, "loss": 1.1629, "step": 2065 }, { "epoch": 0.38394350492473517, "grad_norm": 0.6893115639686584, "learning_rate": 0.00018267483697574218, "loss": 1.1441, "step": 2066 }, { "epoch": 0.3841293439881063, "grad_norm": 0.4979449212551117, "learning_rate": 0.00018265838052909482, "loss": 0.9994, "step": 2067 }, { "epoch": 0.38431518305147744, "grad_norm": 0.6472446322441101, "learning_rate": 0.0001826419170124975, "loss": 1.1945, "step": 2068 }, { "epoch": 0.38450102211484855, "grad_norm": 0.5857232213020325, "learning_rate": 0.0001826254464273584, "loss": 1.2508, "step": 2069 }, { "epoch": 0.38468686117821965, "grad_norm": 0.6424605250358582, "learning_rate": 0.0001826089687750863, "loss": 0.9354, "step": 2070 }, { "epoch": 0.38487270024159076, "grad_norm": 0.5561656951904297, "learning_rate": 0.00018259248405709056, "loss": 1.1487, "step": 2071 }, { "epoch": 0.3850585393049619, "grad_norm": 0.6111963391304016, "learning_rate": 0.00018257599227478112, "loss": 1.0517, "step": 2072 }, { "epoch": 0.38524437836833303, "grad_norm": 0.6646915674209595, "learning_rate": 0.00018255949342956863, "loss": 0.7623, "step": 2073 }, { "epoch": 0.38543021743170414, "grad_norm": 0.4870254397392273, "learning_rate": 0.00018254298752286424, "loss": 0.842, "step": 2074 }, { "epoch": 0.38561605649507524, "grad_norm": 0.47323906421661377, "learning_rate": 0.0001825264745560797, "loss": 1.0857, "step": 2075 }, { "epoch": 0.3858018955584464, "grad_norm": 0.6508793234825134, "learning_rate": 0.00018250995453062742, "loss": 1.1488, "step": 2076 }, { "epoch": 0.3859877346218175, "grad_norm": 0.7815990447998047, "learning_rate": 0.00018249342744792045, "loss": 1.1072, "step": 2077 }, { "epoch": 0.3861735736851886, "grad_norm": 0.5861072540283203, "learning_rate": 0.0001824768933093723, "loss": 0.9545, "step": 2078 }, { "epoch": 0.3863594127485597, "grad_norm": 0.5428565144538879, "learning_rate": 0.00018246035211639722, "loss": 1.0341, "step": 2079 }, { "epoch": 0.3865452518119309, "grad_norm": 0.6659338474273682, "learning_rate": 0.00018244380387040998, "loss": 1.1575, "step": 2080 }, { "epoch": 0.386731090875302, "grad_norm": 0.6334238052368164, "learning_rate": 0.00018242724857282604, "loss": 1.108, "step": 2081 }, { "epoch": 0.3869169299386731, "grad_norm": 0.6278782486915588, "learning_rate": 0.00018241068622506138, "loss": 1.1232, "step": 2082 }, { "epoch": 0.3871027690020442, "grad_norm": 0.6690854430198669, "learning_rate": 0.0001823941168285326, "loss": 1.1608, "step": 2083 }, { "epoch": 0.3872886080654154, "grad_norm": 0.6730903387069702, "learning_rate": 0.00018237754038465692, "loss": 1.2147, "step": 2084 }, { "epoch": 0.3874744471287865, "grad_norm": 0.5820775628089905, "learning_rate": 0.0001823609568948522, "loss": 0.8705, "step": 2085 }, { "epoch": 0.3876602861921576, "grad_norm": 0.630463182926178, "learning_rate": 0.00018234436636053683, "loss": 0.9977, "step": 2086 }, { "epoch": 0.3878461252555287, "grad_norm": 0.5832675099372864, "learning_rate": 0.0001823277687831298, "loss": 1.0188, "step": 2087 }, { "epoch": 0.38803196431889986, "grad_norm": 0.6170270442962646, "learning_rate": 0.0001823111641640508, "loss": 1.2257, "step": 2088 }, { "epoch": 0.38821780338227097, "grad_norm": 0.5397138595581055, "learning_rate": 0.00018229455250472003, "loss": 1.0039, "step": 2089 }, { "epoch": 0.38840364244564207, "grad_norm": 0.6032971739768982, "learning_rate": 0.00018227793380655834, "loss": 1.1537, "step": 2090 }, { "epoch": 0.3885894815090132, "grad_norm": 0.725053608417511, "learning_rate": 0.0001822613080709871, "loss": 1.1994, "step": 2091 }, { "epoch": 0.38877532057238434, "grad_norm": 0.5189559459686279, "learning_rate": 0.0001822446752994284, "loss": 0.9984, "step": 2092 }, { "epoch": 0.38896115963575545, "grad_norm": 0.5911458134651184, "learning_rate": 0.0001822280354933049, "loss": 1.1677, "step": 2093 }, { "epoch": 0.38914699869912656, "grad_norm": 0.5729151368141174, "learning_rate": 0.0001822113886540398, "loss": 1.0192, "step": 2094 }, { "epoch": 0.38933283776249766, "grad_norm": 0.5103919506072998, "learning_rate": 0.0001821947347830569, "loss": 0.9939, "step": 2095 }, { "epoch": 0.38951867682586877, "grad_norm": 0.6922236084938049, "learning_rate": 0.00018217807388178076, "loss": 0.8513, "step": 2096 }, { "epoch": 0.38970451588923993, "grad_norm": 0.863508939743042, "learning_rate": 0.00018216140595163629, "loss": 0.7811, "step": 2097 }, { "epoch": 0.38989035495261104, "grad_norm": 0.5084664225578308, "learning_rate": 0.00018214473099404922, "loss": 0.9129, "step": 2098 }, { "epoch": 0.39007619401598215, "grad_norm": 2.070678949356079, "learning_rate": 0.00018212804901044578, "loss": 2.9204, "step": 2099 }, { "epoch": 0.39026203307935325, "grad_norm": 0.4668201506137848, "learning_rate": 0.00018211136000225277, "loss": 0.7666, "step": 2100 }, { "epoch": 0.3904478721427244, "grad_norm": 0.5377861261367798, "learning_rate": 0.0001820946639708977, "loss": 1.0295, "step": 2101 }, { "epoch": 0.3906337112060955, "grad_norm": 0.6034281849861145, "learning_rate": 0.0001820779609178086, "loss": 1.0837, "step": 2102 }, { "epoch": 0.39081955026946663, "grad_norm": 0.5661458373069763, "learning_rate": 0.0001820612508444141, "loss": 0.9965, "step": 2103 }, { "epoch": 0.39100538933283774, "grad_norm": 2.1413791179656982, "learning_rate": 0.00018204453375214344, "loss": 3.3321, "step": 2104 }, { "epoch": 0.3911912283962089, "grad_norm": 0.5809323191642761, "learning_rate": 0.0001820278096424265, "loss": 0.9947, "step": 2105 }, { "epoch": 0.39137706745958, "grad_norm": 0.47406235337257385, "learning_rate": 0.00018201107851669373, "loss": 0.9195, "step": 2106 }, { "epoch": 0.3915629065229511, "grad_norm": 0.686505913734436, "learning_rate": 0.00018199434037637614, "loss": 1.0664, "step": 2107 }, { "epoch": 0.3917487455863222, "grad_norm": 0.5684332251548767, "learning_rate": 0.00018197759522290544, "loss": 1.0698, "step": 2108 }, { "epoch": 0.3919345846496934, "grad_norm": 0.6020855903625488, "learning_rate": 0.0001819608430577138, "loss": 1.0924, "step": 2109 }, { "epoch": 0.3921204237130645, "grad_norm": 0.6147074103355408, "learning_rate": 0.00018194408388223416, "loss": 1.1372, "step": 2110 }, { "epoch": 0.3923062627764356, "grad_norm": 0.662574291229248, "learning_rate": 0.00018192731769789988, "loss": 1.2889, "step": 2111 }, { "epoch": 0.3924921018398067, "grad_norm": 0.6683513522148132, "learning_rate": 0.00018191054450614503, "loss": 1.0685, "step": 2112 }, { "epoch": 0.39267794090317787, "grad_norm": 0.6317805051803589, "learning_rate": 0.00018189376430840436, "loss": 1.1599, "step": 2113 }, { "epoch": 0.392863779966549, "grad_norm": 0.5959762930870056, "learning_rate": 0.00018187697710611298, "loss": 1.1292, "step": 2114 }, { "epoch": 0.3930496190299201, "grad_norm": 0.5756658315658569, "learning_rate": 0.00018186018290070676, "loss": 1.1887, "step": 2115 }, { "epoch": 0.3932354580932912, "grad_norm": 0.5529783368110657, "learning_rate": 0.0001818433816936222, "loss": 1.0081, "step": 2116 }, { "epoch": 0.39342129715666235, "grad_norm": 0.6137245297431946, "learning_rate": 0.00018182657348629632, "loss": 0.8836, "step": 2117 }, { "epoch": 0.39360713622003346, "grad_norm": 0.5938009023666382, "learning_rate": 0.00018180975828016677, "loss": 1.2131, "step": 2118 }, { "epoch": 0.39379297528340457, "grad_norm": 0.6098518967628479, "learning_rate": 0.00018179293607667178, "loss": 0.9022, "step": 2119 }, { "epoch": 0.3939788143467757, "grad_norm": 0.5324256420135498, "learning_rate": 0.00018177610687725016, "loss": 1.1064, "step": 2120 }, { "epoch": 0.39416465341014684, "grad_norm": 0.5962520837783813, "learning_rate": 0.00018175927068334142, "loss": 1.1095, "step": 2121 }, { "epoch": 0.39435049247351794, "grad_norm": 0.6412689685821533, "learning_rate": 0.00018174242749638552, "loss": 1.3036, "step": 2122 }, { "epoch": 0.39453633153688905, "grad_norm": 0.537754476070404, "learning_rate": 0.00018172557731782314, "loss": 1.0712, "step": 2123 }, { "epoch": 0.39472217060026016, "grad_norm": 0.6352578401565552, "learning_rate": 0.0001817087201490955, "loss": 1.19, "step": 2124 }, { "epoch": 0.3949080096636313, "grad_norm": 0.5872593522071838, "learning_rate": 0.00018169185599164442, "loss": 1.1057, "step": 2125 }, { "epoch": 0.39509384872700243, "grad_norm": 0.5860781669616699, "learning_rate": 0.00018167498484691238, "loss": 1.0346, "step": 2126 }, { "epoch": 0.39527968779037354, "grad_norm": 0.5678336024284363, "learning_rate": 0.00018165810671634232, "loss": 0.9507, "step": 2127 }, { "epoch": 0.39546552685374464, "grad_norm": 0.4740240275859833, "learning_rate": 0.00018164122160137792, "loss": 1.143, "step": 2128 }, { "epoch": 0.3956513659171158, "grad_norm": 0.6389288902282715, "learning_rate": 0.00018162432950346338, "loss": 1.1391, "step": 2129 }, { "epoch": 0.3958372049804869, "grad_norm": 0.6023207306861877, "learning_rate": 0.00018160743042404352, "loss": 0.9583, "step": 2130 }, { "epoch": 0.396023044043858, "grad_norm": 0.5941089987754822, "learning_rate": 0.00018159052436456373, "loss": 1.2258, "step": 2131 }, { "epoch": 0.3962088831072291, "grad_norm": 0.5679216384887695, "learning_rate": 0.0001815736113264701, "loss": 0.9775, "step": 2132 }, { "epoch": 0.39639472217060023, "grad_norm": 0.5501899719238281, "learning_rate": 0.00018155669131120919, "loss": 1.2804, "step": 2133 }, { "epoch": 0.3965805612339714, "grad_norm": 0.5905486941337585, "learning_rate": 0.00018153976432022816, "loss": 0.9472, "step": 2134 }, { "epoch": 0.3967664002973425, "grad_norm": 0.7558655142784119, "learning_rate": 0.00018152283035497492, "loss": 1.3491, "step": 2135 }, { "epoch": 0.3969522393607136, "grad_norm": 0.6427619457244873, "learning_rate": 0.00018150588941689776, "loss": 1.1921, "step": 2136 }, { "epoch": 0.3971380784240847, "grad_norm": 0.6187041997909546, "learning_rate": 0.00018148894150744573, "loss": 1.1544, "step": 2137 }, { "epoch": 0.3973239174874559, "grad_norm": 0.5489941239356995, "learning_rate": 0.00018147198662806844, "loss": 1.1024, "step": 2138 }, { "epoch": 0.397509756550827, "grad_norm": 0.5993608832359314, "learning_rate": 0.000181455024780216, "loss": 0.9939, "step": 2139 }, { "epoch": 0.3976955956141981, "grad_norm": 0.5785315036773682, "learning_rate": 0.00018143805596533926, "loss": 1.1785, "step": 2140 }, { "epoch": 0.3978814346775692, "grad_norm": 0.5734258890151978, "learning_rate": 0.0001814210801848896, "loss": 1.1233, "step": 2141 }, { "epoch": 0.39806727374094036, "grad_norm": 0.6431028842926025, "learning_rate": 0.00018140409744031897, "loss": 1.0709, "step": 2142 }, { "epoch": 0.39825311280431147, "grad_norm": 0.553656816482544, "learning_rate": 0.00018138710773307995, "loss": 1.0857, "step": 2143 }, { "epoch": 0.3984389518676826, "grad_norm": 0.639657199382782, "learning_rate": 0.0001813701110646257, "loss": 1.2033, "step": 2144 }, { "epoch": 0.3986247909310537, "grad_norm": 0.5474963784217834, "learning_rate": 0.00018135310743641, "loss": 1.1524, "step": 2145 }, { "epoch": 0.39881062999442485, "grad_norm": 0.5809098482131958, "learning_rate": 0.00018133609684988717, "loss": 1.3077, "step": 2146 }, { "epoch": 0.39899646905779595, "grad_norm": 0.9639946222305298, "learning_rate": 0.00018131907930651222, "loss": 1.1468, "step": 2147 }, { "epoch": 0.39918230812116706, "grad_norm": 0.676054835319519, "learning_rate": 0.00018130205480774061, "loss": 1.0494, "step": 2148 }, { "epoch": 0.39936814718453817, "grad_norm": 0.5698658227920532, "learning_rate": 0.0001812850233550286, "loss": 1.0787, "step": 2149 }, { "epoch": 0.39955398624790933, "grad_norm": 0.5421280264854431, "learning_rate": 0.0001812679849498328, "loss": 0.9483, "step": 2150 }, { "epoch": 0.39973982531128044, "grad_norm": 0.6609428524971008, "learning_rate": 0.00018125093959361067, "loss": 1.1642, "step": 2151 }, { "epoch": 0.39992566437465155, "grad_norm": 0.5440996289253235, "learning_rate": 0.00018123388728782, "loss": 1.2694, "step": 2152 }, { "epoch": 0.40011150343802265, "grad_norm": 0.5741338133811951, "learning_rate": 0.00018121682803391942, "loss": 1.236, "step": 2153 }, { "epoch": 0.4002973425013938, "grad_norm": 0.6801538467407227, "learning_rate": 0.000181199761833368, "loss": 0.6522, "step": 2154 }, { "epoch": 0.4004831815647649, "grad_norm": 0.6567083597183228, "learning_rate": 0.00018118268868762546, "loss": 1.3321, "step": 2155 }, { "epoch": 0.40066902062813603, "grad_norm": 0.6132495403289795, "learning_rate": 0.00018116560859815208, "loss": 0.9719, "step": 2156 }, { "epoch": 0.40085485969150714, "grad_norm": 0.582120954990387, "learning_rate": 0.00018114852156640878, "loss": 0.8756, "step": 2157 }, { "epoch": 0.4010406987548783, "grad_norm": 0.715114414691925, "learning_rate": 0.00018113142759385706, "loss": 1.1517, "step": 2158 }, { "epoch": 0.4012265378182494, "grad_norm": 0.5721078515052795, "learning_rate": 0.000181114326681959, "loss": 0.8977, "step": 2159 }, { "epoch": 0.4014123768816205, "grad_norm": 0.5104873776435852, "learning_rate": 0.00018109721883217725, "loss": 1.0237, "step": 2160 }, { "epoch": 0.4015982159449916, "grad_norm": 0.5614782571792603, "learning_rate": 0.0001810801040459751, "loss": 0.8917, "step": 2161 }, { "epoch": 0.4017840550083628, "grad_norm": 0.5537241101264954, "learning_rate": 0.0001810629823248164, "loss": 1.2027, "step": 2162 }, { "epoch": 0.4019698940717339, "grad_norm": 0.7075382471084595, "learning_rate": 0.00018104585367016563, "loss": 1.0896, "step": 2163 }, { "epoch": 0.402155733135105, "grad_norm": 0.503906786441803, "learning_rate": 0.00018102871808348784, "loss": 0.7954, "step": 2164 }, { "epoch": 0.4023415721984761, "grad_norm": 0.5689190626144409, "learning_rate": 0.00018101157556624865, "loss": 1.2011, "step": 2165 }, { "epoch": 0.40252741126184727, "grad_norm": 0.5936292409896851, "learning_rate": 0.00018099442611991436, "loss": 1.1824, "step": 2166 }, { "epoch": 0.4027132503252184, "grad_norm": 0.5619094371795654, "learning_rate": 0.0001809772697459517, "loss": 1.0462, "step": 2167 }, { "epoch": 0.4028990893885895, "grad_norm": 0.4826868772506714, "learning_rate": 0.00018096010644582817, "loss": 1.0125, "step": 2168 }, { "epoch": 0.4030849284519606, "grad_norm": 0.633266806602478, "learning_rate": 0.00018094293622101173, "loss": 1.1802, "step": 2169 }, { "epoch": 0.4032707675153317, "grad_norm": 0.6388394832611084, "learning_rate": 0.00018092575907297106, "loss": 1.0138, "step": 2170 }, { "epoch": 0.40345660657870286, "grad_norm": 0.5150460600852966, "learning_rate": 0.00018090857500317525, "loss": 0.9774, "step": 2171 }, { "epoch": 0.40364244564207397, "grad_norm": 0.6779277920722961, "learning_rate": 0.00018089138401309422, "loss": 1.0853, "step": 2172 }, { "epoch": 0.4038282847054451, "grad_norm": 0.5205432176589966, "learning_rate": 0.00018087418610419827, "loss": 0.7876, "step": 2173 }, { "epoch": 0.4040141237688162, "grad_norm": 0.4968721568584442, "learning_rate": 0.00018085698127795835, "loss": 0.7495, "step": 2174 }, { "epoch": 0.40419996283218734, "grad_norm": 0.5260478258132935, "learning_rate": 0.0001808397695358461, "loss": 0.8344, "step": 2175 }, { "epoch": 0.40438580189555845, "grad_norm": 0.6795833706855774, "learning_rate": 0.00018082255087933363, "loss": 1.0586, "step": 2176 }, { "epoch": 0.40457164095892956, "grad_norm": 0.5038895010948181, "learning_rate": 0.00018080532530989372, "loss": 0.8544, "step": 2177 }, { "epoch": 0.40475748002230066, "grad_norm": 0.6297782063484192, "learning_rate": 0.00018078809282899967, "loss": 1.3135, "step": 2178 }, { "epoch": 0.4049433190856718, "grad_norm": 0.5927119255065918, "learning_rate": 0.00018077085343812545, "loss": 1.0277, "step": 2179 }, { "epoch": 0.40512915814904293, "grad_norm": 0.5536563992500305, "learning_rate": 0.00018075360713874558, "loss": 1.0211, "step": 2180 }, { "epoch": 0.40531499721241404, "grad_norm": 0.5813071131706238, "learning_rate": 0.00018073635393233513, "loss": 0.9992, "step": 2181 }, { "epoch": 0.40550083627578515, "grad_norm": 0.5180925130844116, "learning_rate": 0.00018071909382036985, "loss": 1.1269, "step": 2182 }, { "epoch": 0.4056866753391563, "grad_norm": 0.5994383692741394, "learning_rate": 0.000180701826804326, "loss": 1.0403, "step": 2183 }, { "epoch": 0.4058725144025274, "grad_norm": 0.6202577948570251, "learning_rate": 0.0001806845528856805, "loss": 0.9314, "step": 2184 }, { "epoch": 0.4060583534658985, "grad_norm": 0.6377980709075928, "learning_rate": 0.00018066727206591084, "loss": 0.8948, "step": 2185 }, { "epoch": 0.40624419252926963, "grad_norm": 0.5450910925865173, "learning_rate": 0.000180649984346495, "loss": 1.2289, "step": 2186 }, { "epoch": 0.4064300315926408, "grad_norm": 0.6690120697021484, "learning_rate": 0.00018063268972891172, "loss": 1.1064, "step": 2187 }, { "epoch": 0.4066158706560119, "grad_norm": 0.9592549800872803, "learning_rate": 0.00018061538821464022, "loss": 1.4348, "step": 2188 }, { "epoch": 0.406801709719383, "grad_norm": 0.5875871181488037, "learning_rate": 0.0001805980798051603, "loss": 1.0296, "step": 2189 }, { "epoch": 0.4069875487827541, "grad_norm": 0.5648946166038513, "learning_rate": 0.00018058076450195248, "loss": 1.1743, "step": 2190 }, { "epoch": 0.4071733878461253, "grad_norm": 0.5465866923332214, "learning_rate": 0.0001805634423064977, "loss": 1.2059, "step": 2191 }, { "epoch": 0.4073592269094964, "grad_norm": 0.6858041882514954, "learning_rate": 0.00018054611322027752, "loss": 1.1359, "step": 2192 }, { "epoch": 0.4075450659728675, "grad_norm": 0.5024155974388123, "learning_rate": 0.00018052877724477424, "loss": 0.9183, "step": 2193 }, { "epoch": 0.4077309050362386, "grad_norm": 1.0227012634277344, "learning_rate": 0.00018051143438147058, "loss": 1.3601, "step": 2194 }, { "epoch": 0.40791674409960976, "grad_norm": 0.6042284369468689, "learning_rate": 0.00018049408463184996, "loss": 1.1513, "step": 2195 }, { "epoch": 0.40810258316298087, "grad_norm": 0.6936082243919373, "learning_rate": 0.00018047672799739628, "loss": 1.0927, "step": 2196 }, { "epoch": 0.408288422226352, "grad_norm": 0.6657347083091736, "learning_rate": 0.00018045936447959413, "loss": 0.9907, "step": 2197 }, { "epoch": 0.4084742612897231, "grad_norm": 0.5620197057723999, "learning_rate": 0.00018044199407992863, "loss": 1.1356, "step": 2198 }, { "epoch": 0.40866010035309425, "grad_norm": 0.5103034973144531, "learning_rate": 0.00018042461679988555, "loss": 0.9557, "step": 2199 }, { "epoch": 0.40884593941646535, "grad_norm": 0.5765652656555176, "learning_rate": 0.00018040723264095112, "loss": 1.215, "step": 2200 }, { "epoch": 0.40903177847983646, "grad_norm": 0.568516194820404, "learning_rate": 0.00018038984160461237, "loss": 0.9904, "step": 2201 }, { "epoch": 0.40921761754320757, "grad_norm": 0.6059340834617615, "learning_rate": 0.00018037244369235666, "loss": 1.1956, "step": 2202 }, { "epoch": 0.4094034566065787, "grad_norm": 0.5740835070610046, "learning_rate": 0.00018035503890567216, "loss": 1.0662, "step": 2203 }, { "epoch": 0.40958929566994984, "grad_norm": 8.388848304748535, "learning_rate": 0.00018033762724604753, "loss": 3.5506, "step": 2204 }, { "epoch": 0.40977513473332094, "grad_norm": 0.6180132031440735, "learning_rate": 0.00018032020871497196, "loss": 0.9254, "step": 2205 }, { "epoch": 0.40996097379669205, "grad_norm": 0.7836258411407471, "learning_rate": 0.00018030278331393539, "loss": 1.1685, "step": 2206 }, { "epoch": 0.41014681286006316, "grad_norm": 0.6342977285385132, "learning_rate": 0.0001802853510444282, "loss": 1.1987, "step": 2207 }, { "epoch": 0.4103326519234343, "grad_norm": 0.4958232045173645, "learning_rate": 0.0001802679119079414, "loss": 0.7183, "step": 2208 }, { "epoch": 0.41051849098680543, "grad_norm": 0.48592817783355713, "learning_rate": 0.0001802504659059666, "loss": 0.8992, "step": 2209 }, { "epoch": 0.41070433005017654, "grad_norm": 0.5504721999168396, "learning_rate": 0.00018023301303999602, "loss": 0.8599, "step": 2210 }, { "epoch": 0.41089016911354764, "grad_norm": 0.5736424326896667, "learning_rate": 0.00018021555331152245, "loss": 1.0319, "step": 2211 }, { "epoch": 0.4110760081769188, "grad_norm": 0.751387357711792, "learning_rate": 0.00018019808672203923, "loss": 1.0879, "step": 2212 }, { "epoch": 0.4112618472402899, "grad_norm": 0.5974605083465576, "learning_rate": 0.0001801806132730403, "loss": 1.209, "step": 2213 }, { "epoch": 0.411447686303661, "grad_norm": 0.516734778881073, "learning_rate": 0.00018016313296602022, "loss": 1.0002, "step": 2214 }, { "epoch": 0.4116335253670321, "grad_norm": 0.5020787715911865, "learning_rate": 0.00018014564580247415, "loss": 0.9484, "step": 2215 }, { "epoch": 0.4118193644304033, "grad_norm": 0.5782486796379089, "learning_rate": 0.00018012815178389775, "loss": 1.1124, "step": 2216 }, { "epoch": 0.4120052034937744, "grad_norm": 0.6170997619628906, "learning_rate": 0.00018011065091178737, "loss": 1.0163, "step": 2217 }, { "epoch": 0.4121910425571455, "grad_norm": 0.5772647261619568, "learning_rate": 0.00018009314318763984, "loss": 1.1784, "step": 2218 }, { "epoch": 0.4123768816205166, "grad_norm": 0.5786311626434326, "learning_rate": 0.00018007562861295273, "loss": 1.0668, "step": 2219 }, { "epoch": 0.4125627206838878, "grad_norm": 0.602414071559906, "learning_rate": 0.000180058107189224, "loss": 1.2294, "step": 2220 }, { "epoch": 0.4127485597472589, "grad_norm": 0.5060234069824219, "learning_rate": 0.00018004057891795236, "loss": 0.9069, "step": 2221 }, { "epoch": 0.41293439881063, "grad_norm": 0.7384883761405945, "learning_rate": 0.000180023043800637, "loss": 1.0488, "step": 2222 }, { "epoch": 0.4131202378740011, "grad_norm": 0.5549197196960449, "learning_rate": 0.0001800055018387777, "loss": 1.0974, "step": 2223 }, { "epoch": 0.41330607693737226, "grad_norm": 0.6316516995429993, "learning_rate": 0.00017998795303387496, "loss": 1.2475, "step": 2224 }, { "epoch": 0.41349191600074336, "grad_norm": 0.6400758028030396, "learning_rate": 0.00017997039738742974, "loss": 1.186, "step": 2225 }, { "epoch": 0.41367775506411447, "grad_norm": 0.637119710445404, "learning_rate": 0.00017995283490094356, "loss": 1.2223, "step": 2226 }, { "epoch": 0.4138635941274856, "grad_norm": 0.5269392728805542, "learning_rate": 0.00017993526557591864, "loss": 1.0976, "step": 2227 }, { "epoch": 0.41404943319085674, "grad_norm": 0.6631988883018494, "learning_rate": 0.00017991768941385766, "loss": 1.0029, "step": 2228 }, { "epoch": 0.41423527225422785, "grad_norm": 0.6144427061080933, "learning_rate": 0.000179900106416264, "loss": 0.903, "step": 2229 }, { "epoch": 0.41442111131759896, "grad_norm": 2.708913564682007, "learning_rate": 0.00017988251658464154, "loss": 2.9746, "step": 2230 }, { "epoch": 0.41460695038097006, "grad_norm": 0.5651253461837769, "learning_rate": 0.0001798649199204948, "loss": 0.9768, "step": 2231 }, { "epoch": 0.4147927894443412, "grad_norm": 0.5956056118011475, "learning_rate": 0.00017984731642532882, "loss": 0.9158, "step": 2232 }, { "epoch": 0.41497862850771233, "grad_norm": 0.6976494193077087, "learning_rate": 0.00017982970610064933, "loss": 1.1362, "step": 2233 }, { "epoch": 0.41516446757108344, "grad_norm": 0.6097948551177979, "learning_rate": 0.00017981208894796253, "loss": 0.9367, "step": 2234 }, { "epoch": 0.41535030663445455, "grad_norm": 0.6046789288520813, "learning_rate": 0.00017979446496877528, "loss": 1.3803, "step": 2235 }, { "epoch": 0.4155361456978257, "grad_norm": 0.5067242383956909, "learning_rate": 0.00017977683416459494, "loss": 0.7473, "step": 2236 }, { "epoch": 0.4157219847611968, "grad_norm": 0.5810707807540894, "learning_rate": 0.0001797591965369296, "loss": 1.0738, "step": 2237 }, { "epoch": 0.4159078238245679, "grad_norm": 0.4900633990764618, "learning_rate": 0.00017974155208728778, "loss": 1.023, "step": 2238 }, { "epoch": 0.41609366288793903, "grad_norm": 2.034982204437256, "learning_rate": 0.00017972390081717865, "loss": 2.4168, "step": 2239 }, { "epoch": 0.41627950195131014, "grad_norm": 0.5499010682106018, "learning_rate": 0.000179706242728112, "loss": 1.1621, "step": 2240 }, { "epoch": 0.4164653410146813, "grad_norm": 0.6297591328620911, "learning_rate": 0.0001796885778215981, "loss": 1.1059, "step": 2241 }, { "epoch": 0.4166511800780524, "grad_norm": 0.601097822189331, "learning_rate": 0.00017967090609914796, "loss": 1.0177, "step": 2242 }, { "epoch": 0.4168370191414235, "grad_norm": 0.6372743844985962, "learning_rate": 0.000179653227562273, "loss": 1.1184, "step": 2243 }, { "epoch": 0.4170228582047946, "grad_norm": 0.6114941239356995, "learning_rate": 0.00017963554221248534, "loss": 1.0708, "step": 2244 }, { "epoch": 0.4172086972681658, "grad_norm": 0.5649812817573547, "learning_rate": 0.00017961785005129766, "loss": 0.8418, "step": 2245 }, { "epoch": 0.4173945363315369, "grad_norm": 0.632774829864502, "learning_rate": 0.00017960015108022318, "loss": 1.0592, "step": 2246 }, { "epoch": 0.417580375394908, "grad_norm": 0.6083325743675232, "learning_rate": 0.00017958244530077573, "loss": 1.0943, "step": 2247 }, { "epoch": 0.4177662144582791, "grad_norm": 0.6060488224029541, "learning_rate": 0.00017956473271446972, "loss": 1.0025, "step": 2248 }, { "epoch": 0.41795205352165027, "grad_norm": 0.6035066843032837, "learning_rate": 0.00017954701332282018, "loss": 1.2486, "step": 2249 }, { "epoch": 0.4181378925850214, "grad_norm": 0.536759614944458, "learning_rate": 0.00017952928712734268, "loss": 0.9172, "step": 2250 }, { "epoch": 0.4183237316483925, "grad_norm": 0.6202712059020996, "learning_rate": 0.00017951155412955331, "loss": 1.1223, "step": 2251 }, { "epoch": 0.4185095707117636, "grad_norm": 0.5908938646316528, "learning_rate": 0.00017949381433096892, "loss": 1.2079, "step": 2252 }, { "epoch": 0.41869540977513475, "grad_norm": 0.6541932225227356, "learning_rate": 0.00017947606773310678, "loss": 1.211, "step": 2253 }, { "epoch": 0.41888124883850586, "grad_norm": 0.5047724843025208, "learning_rate": 0.00017945831433748477, "loss": 0.8668, "step": 2254 }, { "epoch": 0.41906708790187697, "grad_norm": 0.616878867149353, "learning_rate": 0.0001794405541456214, "loss": 1.1663, "step": 2255 }, { "epoch": 0.4192529269652481, "grad_norm": 0.5981701016426086, "learning_rate": 0.00017942278715903575, "loss": 1.1325, "step": 2256 }, { "epoch": 0.41943876602861924, "grad_norm": 0.5608638525009155, "learning_rate": 0.0001794050133792475, "loss": 0.9859, "step": 2257 }, { "epoch": 0.41962460509199034, "grad_norm": 0.7004113793373108, "learning_rate": 0.00017938723280777678, "loss": 1.0358, "step": 2258 }, { "epoch": 0.41981044415536145, "grad_norm": 0.5976504683494568, "learning_rate": 0.00017936944544614449, "loss": 1.0715, "step": 2259 }, { "epoch": 0.41999628321873256, "grad_norm": 0.49607938528060913, "learning_rate": 0.00017935165129587198, "loss": 1.0598, "step": 2260 }, { "epoch": 0.4201821222821037, "grad_norm": 0.6711708903312683, "learning_rate": 0.00017933385035848125, "loss": 1.3064, "step": 2261 }, { "epoch": 0.4203679613454748, "grad_norm": 0.6147506237030029, "learning_rate": 0.00017931604263549483, "loss": 1.1359, "step": 2262 }, { "epoch": 0.42055380040884593, "grad_norm": 0.5291450619697571, "learning_rate": 0.00017929822812843584, "loss": 1.0914, "step": 2263 }, { "epoch": 0.42073963947221704, "grad_norm": 0.5078874826431274, "learning_rate": 0.00017928040683882804, "loss": 1.0915, "step": 2264 }, { "epoch": 0.4209254785355882, "grad_norm": 0.5273508429527283, "learning_rate": 0.00017926257876819566, "loss": 1.1543, "step": 2265 }, { "epoch": 0.4211113175989593, "grad_norm": 0.6677791476249695, "learning_rate": 0.0001792447439180636, "loss": 0.9763, "step": 2266 }, { "epoch": 0.4212971566623304, "grad_norm": 0.5593759417533875, "learning_rate": 0.00017922690228995737, "loss": 1.1768, "step": 2267 }, { "epoch": 0.4214829957257015, "grad_norm": 0.6982876062393188, "learning_rate": 0.00017920905388540295, "loss": 1.1277, "step": 2268 }, { "epoch": 0.4216688347890727, "grad_norm": 0.5563878417015076, "learning_rate": 0.00017919119870592694, "loss": 0.964, "step": 2269 }, { "epoch": 0.4218546738524438, "grad_norm": 0.7023888230323792, "learning_rate": 0.00017917333675305654, "loss": 1.2069, "step": 2270 }, { "epoch": 0.4220405129158149, "grad_norm": 0.8888300061225891, "learning_rate": 0.00017915546802831957, "loss": 1.1435, "step": 2271 }, { "epoch": 0.422226351979186, "grad_norm": 0.6172018647193909, "learning_rate": 0.00017913759253324432, "loss": 1.2328, "step": 2272 }, { "epoch": 0.4224121910425572, "grad_norm": 0.6272783875465393, "learning_rate": 0.00017911971026935973, "loss": 1.0586, "step": 2273 }, { "epoch": 0.4225980301059283, "grad_norm": 0.6125234961509705, "learning_rate": 0.00017910182123819532, "loss": 1.0063, "step": 2274 }, { "epoch": 0.4227838691692994, "grad_norm": 0.6082481741905212, "learning_rate": 0.0001790839254412812, "loss": 1.1442, "step": 2275 }, { "epoch": 0.4229697082326705, "grad_norm": 0.6373506188392639, "learning_rate": 0.000179066022880148, "loss": 1.0645, "step": 2276 }, { "epoch": 0.4231555472960416, "grad_norm": 0.5189694166183472, "learning_rate": 0.00017904811355632698, "loss": 1.2191, "step": 2277 }, { "epoch": 0.42334138635941276, "grad_norm": 0.5406830906867981, "learning_rate": 0.00017903019747134998, "loss": 1.0508, "step": 2278 }, { "epoch": 0.42352722542278387, "grad_norm": 0.5542166829109192, "learning_rate": 0.00017901227462674933, "loss": 1.0213, "step": 2279 }, { "epoch": 0.423713064486155, "grad_norm": 0.6017323136329651, "learning_rate": 0.0001789943450240581, "loss": 1.0446, "step": 2280 }, { "epoch": 0.4238989035495261, "grad_norm": 0.46722128987312317, "learning_rate": 0.00017897640866480983, "loss": 0.731, "step": 2281 }, { "epoch": 0.42408474261289725, "grad_norm": 0.6106616854667664, "learning_rate": 0.0001789584655505386, "loss": 1.2458, "step": 2282 }, { "epoch": 0.42427058167626835, "grad_norm": 0.7112687230110168, "learning_rate": 0.00017894051568277918, "loss": 1.3797, "step": 2283 }, { "epoch": 0.42445642073963946, "grad_norm": 0.5603358149528503, "learning_rate": 0.00017892255906306684, "loss": 1.1287, "step": 2284 }, { "epoch": 0.42464225980301057, "grad_norm": 0.5825716853141785, "learning_rate": 0.00017890459569293746, "loss": 1.2238, "step": 2285 }, { "epoch": 0.42482809886638173, "grad_norm": 0.6225450038909912, "learning_rate": 0.00017888662557392744, "loss": 1.1294, "step": 2286 }, { "epoch": 0.42501393792975284, "grad_norm": 0.5144187808036804, "learning_rate": 0.00017886864870757387, "loss": 1.0482, "step": 2287 }, { "epoch": 0.42519977699312395, "grad_norm": 0.6005070209503174, "learning_rate": 0.0001788506650954143, "loss": 0.9388, "step": 2288 }, { "epoch": 0.42538561605649505, "grad_norm": 0.7564705014228821, "learning_rate": 0.00017883267473898693, "loss": 1.1563, "step": 2289 }, { "epoch": 0.4255714551198662, "grad_norm": 0.600664496421814, "learning_rate": 0.00017881467763983052, "loss": 1.2571, "step": 2290 }, { "epoch": 0.4257572941832373, "grad_norm": 0.5442487001419067, "learning_rate": 0.0001787966737994844, "loss": 1.1045, "step": 2291 }, { "epoch": 0.42594313324660843, "grad_norm": 0.3654262125492096, "learning_rate": 0.00017877866321948843, "loss": 0.6109, "step": 2292 }, { "epoch": 0.42612897230997954, "grad_norm": 0.605783224105835, "learning_rate": 0.00017876064590138316, "loss": 1.3888, "step": 2293 }, { "epoch": 0.4263148113733507, "grad_norm": 0.5427509546279907, "learning_rate": 0.00017874262184670961, "loss": 1.1819, "step": 2294 }, { "epoch": 0.4265006504367218, "grad_norm": 0.5091795921325684, "learning_rate": 0.00017872459105700944, "loss": 1.0144, "step": 2295 }, { "epoch": 0.4266864895000929, "grad_norm": 0.7157513499259949, "learning_rate": 0.00017870655353382486, "loss": 1.02, "step": 2296 }, { "epoch": 0.426872328563464, "grad_norm": 0.6378082633018494, "learning_rate": 0.00017868850927869862, "loss": 1.0082, "step": 2297 }, { "epoch": 0.4270581676268352, "grad_norm": 0.5989370942115784, "learning_rate": 0.00017867045829317416, "loss": 1.0008, "step": 2298 }, { "epoch": 0.4272440066902063, "grad_norm": 0.5860329866409302, "learning_rate": 0.00017865240057879532, "loss": 1.1185, "step": 2299 }, { "epoch": 0.4274298457535774, "grad_norm": 0.6344106793403625, "learning_rate": 0.0001786343361371067, "loss": 1.0703, "step": 2300 }, { "epoch": 0.4276156848169485, "grad_norm": 0.6763262152671814, "learning_rate": 0.00017861626496965333, "loss": 1.2203, "step": 2301 }, { "epoch": 0.42780152388031967, "grad_norm": 0.6049442887306213, "learning_rate": 0.00017859818707798093, "loss": 1.0438, "step": 2302 }, { "epoch": 0.4279873629436908, "grad_norm": 0.5617985725402832, "learning_rate": 0.0001785801024636357, "loss": 1.1025, "step": 2303 }, { "epoch": 0.4281732020070619, "grad_norm": 0.7062197923660278, "learning_rate": 0.00017856201112816447, "loss": 1.3451, "step": 2304 }, { "epoch": 0.428359041070433, "grad_norm": 0.6399662494659424, "learning_rate": 0.00017854391307311468, "loss": 0.9795, "step": 2305 }, { "epoch": 0.42854488013380415, "grad_norm": 0.5584330558776855, "learning_rate": 0.0001785258083000342, "loss": 1.1063, "step": 2306 }, { "epoch": 0.42873071919717526, "grad_norm": 0.525244414806366, "learning_rate": 0.0001785076968104716, "loss": 1.1579, "step": 2307 }, { "epoch": 0.42891655826054637, "grad_norm": 0.6120824813842773, "learning_rate": 0.00017848957860597606, "loss": 1.0426, "step": 2308 }, { "epoch": 0.4291023973239175, "grad_norm": 0.5395716428756714, "learning_rate": 0.00017847145368809724, "loss": 0.7092, "step": 2309 }, { "epoch": 0.42928823638728864, "grad_norm": 0.520346999168396, "learning_rate": 0.00017845332205838533, "loss": 0.9699, "step": 2310 }, { "epoch": 0.42947407545065974, "grad_norm": 0.5615527629852295, "learning_rate": 0.00017843518371839126, "loss": 1.228, "step": 2311 }, { "epoch": 0.42965991451403085, "grad_norm": 0.6460365056991577, "learning_rate": 0.00017841703866966642, "loss": 1.1718, "step": 2312 }, { "epoch": 0.42984575357740196, "grad_norm": 0.5339592099189758, "learning_rate": 0.00017839888691376275, "loss": 0.8472, "step": 2313 }, { "epoch": 0.43003159264077306, "grad_norm": 0.5476655960083008, "learning_rate": 0.00017838072845223284, "loss": 1.0667, "step": 2314 }, { "epoch": 0.4302174317041442, "grad_norm": 0.5243486166000366, "learning_rate": 0.00017836256328662985, "loss": 1.0041, "step": 2315 }, { "epoch": 0.43040327076751533, "grad_norm": 0.7483263611793518, "learning_rate": 0.0001783443914185074, "loss": 0.9907, "step": 2316 }, { "epoch": 0.43058910983088644, "grad_norm": 7.6981706619262695, "learning_rate": 0.0001783262128494199, "loss": 3.1886, "step": 2317 }, { "epoch": 0.43077494889425755, "grad_norm": 0.5558903813362122, "learning_rate": 0.0001783080275809221, "loss": 1.128, "step": 2318 }, { "epoch": 0.4309607879576287, "grad_norm": 0.5539029240608215, "learning_rate": 0.00017828983561456941, "loss": 1.0576, "step": 2319 }, { "epoch": 0.4311466270209998, "grad_norm": 0.6026525497436523, "learning_rate": 0.00017827163695191794, "loss": 1.1955, "step": 2320 }, { "epoch": 0.4313324660843709, "grad_norm": 0.4793394207954407, "learning_rate": 0.00017825343159452416, "loss": 0.9578, "step": 2321 }, { "epoch": 0.43151830514774203, "grad_norm": 0.6857821345329285, "learning_rate": 0.00017823521954394526, "loss": 1.3858, "step": 2322 }, { "epoch": 0.4317041442111132, "grad_norm": 0.6874255537986755, "learning_rate": 0.00017821700080173896, "loss": 0.9156, "step": 2323 }, { "epoch": 0.4318899832744843, "grad_norm": 0.5313177704811096, "learning_rate": 0.0001781987753694635, "loss": 1.0231, "step": 2324 }, { "epoch": 0.4320758223378554, "grad_norm": 0.7569800615310669, "learning_rate": 0.0001781805432486778, "loss": 1.2913, "step": 2325 }, { "epoch": 0.4322616614012265, "grad_norm": 0.5684303641319275, "learning_rate": 0.00017816230444094125, "loss": 1.2554, "step": 2326 }, { "epoch": 0.4324475004645977, "grad_norm": 0.6719896793365479, "learning_rate": 0.00017814405894781387, "loss": 1.3118, "step": 2327 }, { "epoch": 0.4326333395279688, "grad_norm": 0.5125758051872253, "learning_rate": 0.0001781258067708562, "loss": 1.0935, "step": 2328 }, { "epoch": 0.4328191785913399, "grad_norm": 0.6126227974891663, "learning_rate": 0.0001781075479116295, "loss": 1.2605, "step": 2329 }, { "epoch": 0.433005017654711, "grad_norm": 0.44674035906791687, "learning_rate": 0.00017808928237169537, "loss": 0.7494, "step": 2330 }, { "epoch": 0.43319085671808216, "grad_norm": 0.5729489326477051, "learning_rate": 0.00017807101015261616, "loss": 0.8303, "step": 2331 }, { "epoch": 0.43337669578145327, "grad_norm": 0.6711477041244507, "learning_rate": 0.0001780527312559547, "loss": 1.0835, "step": 2332 }, { "epoch": 0.4335625348448244, "grad_norm": 0.5800749659538269, "learning_rate": 0.00017803444568327445, "loss": 1.2225, "step": 2333 }, { "epoch": 0.4337483739081955, "grad_norm": 0.5604079365730286, "learning_rate": 0.00017801615343613942, "loss": 1.0564, "step": 2334 }, { "epoch": 0.43393421297156665, "grad_norm": 0.5705206990242004, "learning_rate": 0.00017799785451611414, "loss": 1.1846, "step": 2335 }, { "epoch": 0.43412005203493775, "grad_norm": 2.446877956390381, "learning_rate": 0.00017797954892476384, "loss": 3.1014, "step": 2336 }, { "epoch": 0.43430589109830886, "grad_norm": 0.5746934413909912, "learning_rate": 0.00017796123666365414, "loss": 1.061, "step": 2337 }, { "epoch": 0.43449173016167997, "grad_norm": 0.5936819314956665, "learning_rate": 0.0001779429177343514, "loss": 1.1547, "step": 2338 }, { "epoch": 0.43467756922505113, "grad_norm": 0.5753687620162964, "learning_rate": 0.0001779245921384224, "loss": 0.9682, "step": 2339 }, { "epoch": 0.43486340828842224, "grad_norm": 0.5654261112213135, "learning_rate": 0.00017790625987743468, "loss": 1.1469, "step": 2340 }, { "epoch": 0.43504924735179334, "grad_norm": 0.5429582595825195, "learning_rate": 0.00017788792095295615, "loss": 1.069, "step": 2341 }, { "epoch": 0.43523508641516445, "grad_norm": 0.5560867786407471, "learning_rate": 0.00017786957536655539, "loss": 0.9442, "step": 2342 }, { "epoch": 0.4354209254785356, "grad_norm": 0.5235719680786133, "learning_rate": 0.00017785122311980155, "loss": 1.1919, "step": 2343 }, { "epoch": 0.4356067645419067, "grad_norm": 0.5630365610122681, "learning_rate": 0.00017783286421426436, "loss": 1.2617, "step": 2344 }, { "epoch": 0.43579260360527783, "grad_norm": 0.5522767901420593, "learning_rate": 0.00017781449865151406, "loss": 1.0568, "step": 2345 }, { "epoch": 0.43597844266864894, "grad_norm": 0.8357774615287781, "learning_rate": 0.00017779612643312152, "loss": 1.1691, "step": 2346 }, { "epoch": 0.4361642817320201, "grad_norm": 0.6235195994377136, "learning_rate": 0.00017777774756065813, "loss": 0.9429, "step": 2347 }, { "epoch": 0.4363501207953912, "grad_norm": 0.5671910047531128, "learning_rate": 0.0001777593620356959, "loss": 1.0226, "step": 2348 }, { "epoch": 0.4365359598587623, "grad_norm": 0.5591475963592529, "learning_rate": 0.00017774096985980736, "loss": 0.9408, "step": 2349 }, { "epoch": 0.4367217989221334, "grad_norm": 0.5957860350608826, "learning_rate": 0.00017772257103456567, "loss": 1.162, "step": 2350 }, { "epoch": 0.4369076379855045, "grad_norm": 0.6221433281898499, "learning_rate": 0.00017770416556154453, "loss": 1.2008, "step": 2351 }, { "epoch": 0.4370934770488757, "grad_norm": 0.5632703304290771, "learning_rate": 0.0001776857534423181, "loss": 1.2612, "step": 2352 }, { "epoch": 0.4372793161122468, "grad_norm": 0.5558828711509705, "learning_rate": 0.00017766733467846133, "loss": 1.0839, "step": 2353 }, { "epoch": 0.4374651551756179, "grad_norm": 0.532751739025116, "learning_rate": 0.00017764890927154952, "loss": 1.129, "step": 2354 }, { "epoch": 0.437650994238989, "grad_norm": 0.5031901597976685, "learning_rate": 0.00017763047722315871, "loss": 1.1353, "step": 2355 }, { "epoch": 0.4378368333023602, "grad_norm": 0.6349141597747803, "learning_rate": 0.00017761203853486538, "loss": 1.152, "step": 2356 }, { "epoch": 0.4380226723657313, "grad_norm": 0.6550739407539368, "learning_rate": 0.0001775935932082467, "loss": 1.0943, "step": 2357 }, { "epoch": 0.4382085114291024, "grad_norm": 0.5785501599311829, "learning_rate": 0.00017757514124488023, "loss": 1.1742, "step": 2358 }, { "epoch": 0.4383943504924735, "grad_norm": 0.5431879758834839, "learning_rate": 0.0001775566826463443, "loss": 1.0897, "step": 2359 }, { "epoch": 0.43858018955584466, "grad_norm": 0.5559978485107422, "learning_rate": 0.00017753821741421769, "loss": 1.1531, "step": 2360 }, { "epoch": 0.43876602861921576, "grad_norm": 0.5944271683692932, "learning_rate": 0.00017751974555007974, "loss": 1.0854, "step": 2361 }, { "epoch": 0.43895186768258687, "grad_norm": 0.545141875743866, "learning_rate": 0.0001775012670555104, "loss": 0.986, "step": 2362 }, { "epoch": 0.439137706745958, "grad_norm": 0.6592658758163452, "learning_rate": 0.0001774827819320902, "loss": 0.9982, "step": 2363 }, { "epoch": 0.43932354580932914, "grad_norm": 0.6145473122596741, "learning_rate": 0.00017746429018140024, "loss": 1.0719, "step": 2364 }, { "epoch": 0.43950938487270025, "grad_norm": 0.5475202798843384, "learning_rate": 0.00017744579180502205, "loss": 1.1697, "step": 2365 }, { "epoch": 0.43969522393607136, "grad_norm": 0.5210398435592651, "learning_rate": 0.00017742728680453796, "loss": 1.084, "step": 2366 }, { "epoch": 0.43988106299944246, "grad_norm": 0.6031140685081482, "learning_rate": 0.00017740877518153064, "loss": 1.298, "step": 2367 }, { "epoch": 0.4400669020628136, "grad_norm": 0.588117241859436, "learning_rate": 0.00017739025693758352, "loss": 0.9898, "step": 2368 }, { "epoch": 0.44025274112618473, "grad_norm": 0.5167676210403442, "learning_rate": 0.00017737173207428043, "loss": 0.836, "step": 2369 }, { "epoch": 0.44043858018955584, "grad_norm": 0.5224547386169434, "learning_rate": 0.00017735320059320589, "loss": 1.0271, "step": 2370 }, { "epoch": 0.44062441925292695, "grad_norm": 0.5996924638748169, "learning_rate": 0.00017733466249594492, "loss": 1.2124, "step": 2371 }, { "epoch": 0.4408102583162981, "grad_norm": 0.6365364789962769, "learning_rate": 0.0001773161177840831, "loss": 1.3201, "step": 2372 }, { "epoch": 0.4409960973796692, "grad_norm": 0.5757020115852356, "learning_rate": 0.00017729756645920664, "loss": 1.0181, "step": 2373 }, { "epoch": 0.4411819364430403, "grad_norm": 0.5585982203483582, "learning_rate": 0.00017727900852290227, "loss": 1.1259, "step": 2374 }, { "epoch": 0.44136777550641143, "grad_norm": 0.7039685845375061, "learning_rate": 0.0001772604439767573, "loss": 1.4213, "step": 2375 }, { "epoch": 0.4415536145697826, "grad_norm": 0.5538169145584106, "learning_rate": 0.00017724187282235955, "loss": 0.9053, "step": 2376 }, { "epoch": 0.4417394536331537, "grad_norm": 0.5275604724884033, "learning_rate": 0.00017722329506129745, "loss": 0.9567, "step": 2377 }, { "epoch": 0.4419252926965248, "grad_norm": 0.5697493553161621, "learning_rate": 0.00017720471069516006, "loss": 1.0802, "step": 2378 }, { "epoch": 0.4421111317598959, "grad_norm": 0.5514183044433594, "learning_rate": 0.00017718611972553688, "loss": 0.9211, "step": 2379 }, { "epoch": 0.4422969708232671, "grad_norm": 0.5721895098686218, "learning_rate": 0.00017716752215401807, "loss": 1.1159, "step": 2380 }, { "epoch": 0.4424828098866382, "grad_norm": 0.5930714011192322, "learning_rate": 0.0001771489179821943, "loss": 1.1339, "step": 2381 }, { "epoch": 0.4426686489500093, "grad_norm": 0.5223318934440613, "learning_rate": 0.00017713030721165684, "loss": 0.889, "step": 2382 }, { "epoch": 0.4428544880133804, "grad_norm": 0.5683040022850037, "learning_rate": 0.0001771116898439975, "loss": 1.2511, "step": 2383 }, { "epoch": 0.4430403270767515, "grad_norm": 0.5765540599822998, "learning_rate": 0.00017709306588080867, "loss": 0.9842, "step": 2384 }, { "epoch": 0.44322616614012267, "grad_norm": 0.568500280380249, "learning_rate": 0.00017707443532368332, "loss": 1.1352, "step": 2385 }, { "epoch": 0.4434120052034938, "grad_norm": 0.5285328030586243, "learning_rate": 0.00017705579817421492, "loss": 1.0351, "step": 2386 }, { "epoch": 0.4435978442668649, "grad_norm": 0.6016953587532043, "learning_rate": 0.00017703715443399752, "loss": 1.2857, "step": 2387 }, { "epoch": 0.443783683330236, "grad_norm": 0.6040376424789429, "learning_rate": 0.00017701850410462586, "loss": 1.1826, "step": 2388 }, { "epoch": 0.44396952239360715, "grad_norm": 0.600718080997467, "learning_rate": 0.00017699984718769505, "loss": 1.1051, "step": 2389 }, { "epoch": 0.44415536145697826, "grad_norm": 0.6064468026161194, "learning_rate": 0.0001769811836848009, "loss": 1.1616, "step": 2390 }, { "epoch": 0.44434120052034937, "grad_norm": 0.6028340458869934, "learning_rate": 0.00017696251359753976, "loss": 1.1355, "step": 2391 }, { "epoch": 0.4445270395837205, "grad_norm": 0.61343914270401, "learning_rate": 0.00017694383692750843, "loss": 1.2456, "step": 2392 }, { "epoch": 0.44471287864709164, "grad_norm": 0.6040683388710022, "learning_rate": 0.0001769251536763045, "loss": 1.1576, "step": 2393 }, { "epoch": 0.44489871771046274, "grad_norm": 0.5966310501098633, "learning_rate": 0.00017690646384552588, "loss": 1.4009, "step": 2394 }, { "epoch": 0.44508455677383385, "grad_norm": 0.5714512467384338, "learning_rate": 0.0001768877674367712, "loss": 1.0652, "step": 2395 }, { "epoch": 0.44527039583720496, "grad_norm": 0.6570279002189636, "learning_rate": 0.0001768690644516396, "loss": 1.2492, "step": 2396 }, { "epoch": 0.4454562349005761, "grad_norm": 0.5377715229988098, "learning_rate": 0.00017685035489173076, "loss": 1.0869, "step": 2397 }, { "epoch": 0.4456420739639472, "grad_norm": 0.6347850561141968, "learning_rate": 0.000176831638758645, "loss": 1.1475, "step": 2398 }, { "epoch": 0.44582791302731833, "grad_norm": 0.5425981283187866, "learning_rate": 0.00017681291605398312, "loss": 1.0795, "step": 2399 }, { "epoch": 0.44601375209068944, "grad_norm": 0.6422131061553955, "learning_rate": 0.0001767941867793465, "loss": 1.2814, "step": 2400 }, { "epoch": 0.4461995911540606, "grad_norm": 0.6414985060691833, "learning_rate": 0.00017677545093633713, "loss": 1.0609, "step": 2401 }, { "epoch": 0.4463854302174317, "grad_norm": 0.5891706943511963, "learning_rate": 0.0001767567085265575, "loss": 1.3514, "step": 2402 }, { "epoch": 0.4465712692808028, "grad_norm": 0.5836945176124573, "learning_rate": 0.00017673795955161067, "loss": 1.0508, "step": 2403 }, { "epoch": 0.4467571083441739, "grad_norm": 0.5158883333206177, "learning_rate": 0.00017671920401310032, "loss": 1.0526, "step": 2404 }, { "epoch": 0.4469429474075451, "grad_norm": 0.603083610534668, "learning_rate": 0.00017670044191263063, "loss": 1.0876, "step": 2405 }, { "epoch": 0.4471287864709162, "grad_norm": 0.5200565457344055, "learning_rate": 0.0001766816732518064, "loss": 0.8484, "step": 2406 }, { "epoch": 0.4473146255342873, "grad_norm": 0.6235826015472412, "learning_rate": 0.0001766628980322329, "loss": 1.17, "step": 2407 }, { "epoch": 0.4475004645976584, "grad_norm": 0.6319602727890015, "learning_rate": 0.00017664411625551606, "loss": 0.9634, "step": 2408 }, { "epoch": 0.44768630366102957, "grad_norm": 0.4631809592247009, "learning_rate": 0.0001766253279232623, "loss": 0.8415, "step": 2409 }, { "epoch": 0.4478721427244007, "grad_norm": 0.5904662013053894, "learning_rate": 0.00017660653303707864, "loss": 0.7638, "step": 2410 }, { "epoch": 0.4480579817877718, "grad_norm": 0.5749669075012207, "learning_rate": 0.00017658773159857263, "loss": 0.9454, "step": 2411 }, { "epoch": 0.4482438208511429, "grad_norm": 0.6295116543769836, "learning_rate": 0.00017656892360935243, "loss": 1.256, "step": 2412 }, { "epoch": 0.44842965991451406, "grad_norm": 0.5321747660636902, "learning_rate": 0.00017655010907102666, "loss": 1.1843, "step": 2413 }, { "epoch": 0.44861549897788516, "grad_norm": 0.5966020822525024, "learning_rate": 0.00017653128798520467, "loss": 1.1913, "step": 2414 }, { "epoch": 0.44880133804125627, "grad_norm": 0.615644633769989, "learning_rate": 0.0001765124603534962, "loss": 1.0032, "step": 2415 }, { "epoch": 0.4489871771046274, "grad_norm": 0.5704545378684998, "learning_rate": 0.0001764936261775116, "loss": 0.9007, "step": 2416 }, { "epoch": 0.44917301616799854, "grad_norm": 0.5010755658149719, "learning_rate": 0.00017647478545886186, "loss": 1.116, "step": 2417 }, { "epoch": 0.44935885523136965, "grad_norm": 0.6472548246383667, "learning_rate": 0.0001764559381991584, "loss": 1.0135, "step": 2418 }, { "epoch": 0.44954469429474075, "grad_norm": 0.49504557251930237, "learning_rate": 0.00017643708440001334, "loss": 1.0561, "step": 2419 }, { "epoch": 0.44973053335811186, "grad_norm": 0.724803626537323, "learning_rate": 0.00017641822406303922, "loss": 0.989, "step": 2420 }, { "epoch": 0.44991637242148297, "grad_norm": 0.5256615877151489, "learning_rate": 0.00017639935718984925, "loss": 1.2047, "step": 2421 }, { "epoch": 0.45010221148485413, "grad_norm": 0.6002311706542969, "learning_rate": 0.00017638048378205713, "loss": 1.1932, "step": 2422 }, { "epoch": 0.45028805054822524, "grad_norm": 0.6114380955696106, "learning_rate": 0.00017636160384127715, "loss": 1.1895, "step": 2423 }, { "epoch": 0.45047388961159635, "grad_norm": 0.6021878123283386, "learning_rate": 0.0001763427173691242, "loss": 1.2897, "step": 2424 }, { "epoch": 0.45065972867496745, "grad_norm": 0.6550626754760742, "learning_rate": 0.00017632382436721358, "loss": 0.9298, "step": 2425 }, { "epoch": 0.4508455677383386, "grad_norm": 0.6041695475578308, "learning_rate": 0.00017630492483716132, "loss": 1.1527, "step": 2426 }, { "epoch": 0.4510314068017097, "grad_norm": 0.6602818369865417, "learning_rate": 0.00017628601878058394, "loss": 1.1716, "step": 2427 }, { "epoch": 0.45121724586508083, "grad_norm": 0.5621387958526611, "learning_rate": 0.00017626710619909848, "loss": 1.1978, "step": 2428 }, { "epoch": 0.45140308492845194, "grad_norm": 0.5593321919441223, "learning_rate": 0.0001762481870943226, "loss": 1.0507, "step": 2429 }, { "epoch": 0.4515889239918231, "grad_norm": 0.5860761404037476, "learning_rate": 0.00017622926146787447, "loss": 1.3819, "step": 2430 }, { "epoch": 0.4517747630551942, "grad_norm": 0.5836736559867859, "learning_rate": 0.00017621032932137287, "loss": 0.9615, "step": 2431 }, { "epoch": 0.4519606021185653, "grad_norm": 0.5087476372718811, "learning_rate": 0.00017619139065643707, "loss": 1.0476, "step": 2432 }, { "epoch": 0.4521464411819364, "grad_norm": 0.5831040740013123, "learning_rate": 0.00017617244547468697, "loss": 0.8807, "step": 2433 }, { "epoch": 0.4523322802453076, "grad_norm": 0.5577731132507324, "learning_rate": 0.00017615349377774297, "loss": 1.1035, "step": 2434 }, { "epoch": 0.4525181193086787, "grad_norm": 0.6123347878456116, "learning_rate": 0.00017613453556722608, "loss": 1.0745, "step": 2435 }, { "epoch": 0.4527039583720498, "grad_norm": 0.5789678692817688, "learning_rate": 0.0001761155708447578, "loss": 1.1167, "step": 2436 }, { "epoch": 0.4528897974354209, "grad_norm": 0.59694504737854, "learning_rate": 0.00017609659961196022, "loss": 1.0494, "step": 2437 }, { "epoch": 0.45307563649879207, "grad_norm": 0.709152340888977, "learning_rate": 0.00017607762187045605, "loss": 0.9394, "step": 2438 }, { "epoch": 0.4532614755621632, "grad_norm": 0.4953658878803253, "learning_rate": 0.00017605863762186843, "loss": 0.9409, "step": 2439 }, { "epoch": 0.4534473146255343, "grad_norm": 0.5981800556182861, "learning_rate": 0.00017603964686782115, "loss": 1.3633, "step": 2440 }, { "epoch": 0.4536331536889054, "grad_norm": 0.604300320148468, "learning_rate": 0.00017602064960993853, "loss": 0.8273, "step": 2441 }, { "epoch": 0.45381899275227655, "grad_norm": 0.5925197601318359, "learning_rate": 0.00017600164584984546, "loss": 1.0395, "step": 2442 }, { "epoch": 0.45400483181564766, "grad_norm": 0.5409677028656006, "learning_rate": 0.00017598263558916735, "loss": 0.9716, "step": 2443 }, { "epoch": 0.45419067087901877, "grad_norm": 0.6586957573890686, "learning_rate": 0.0001759636188295302, "loss": 0.854, "step": 2444 }, { "epoch": 0.4543765099423899, "grad_norm": 0.6706559062004089, "learning_rate": 0.00017594459557256054, "loss": 1.3125, "step": 2445 }, { "epoch": 0.45456234900576103, "grad_norm": 0.6373844742774963, "learning_rate": 0.00017592556581988547, "loss": 1.1909, "step": 2446 }, { "epoch": 0.45474818806913214, "grad_norm": 0.5949018597602844, "learning_rate": 0.0001759065295731327, "loss": 1.0114, "step": 2447 }, { "epoch": 0.45493402713250325, "grad_norm": 0.6161789298057556, "learning_rate": 0.00017588748683393038, "loss": 1.2209, "step": 2448 }, { "epoch": 0.45511986619587436, "grad_norm": 0.6025707721710205, "learning_rate": 0.0001758684376039073, "loss": 1.1802, "step": 2449 }, { "epoch": 0.4553057052592455, "grad_norm": 2.4854700565338135, "learning_rate": 0.0001758493818846928, "loss": 2.2678, "step": 2450 }, { "epoch": 0.4554915443226166, "grad_norm": 0.6085222363471985, "learning_rate": 0.0001758303196779167, "loss": 1.1835, "step": 2451 }, { "epoch": 0.45567738338598773, "grad_norm": 0.6468694806098938, "learning_rate": 0.00017581125098520945, "loss": 1.1857, "step": 2452 }, { "epoch": 0.45586322244935884, "grad_norm": 0.6589061617851257, "learning_rate": 0.0001757921758082021, "loss": 0.9729, "step": 2453 }, { "epoch": 0.45604906151273, "grad_norm": 0.7410516142845154, "learning_rate": 0.0001757730941485261, "loss": 0.9728, "step": 2454 }, { "epoch": 0.4562349005761011, "grad_norm": 0.5850430130958557, "learning_rate": 0.00017575400600781363, "loss": 0.9746, "step": 2455 }, { "epoch": 0.4564207396394722, "grad_norm": 0.4909527599811554, "learning_rate": 0.00017573491138769728, "loss": 0.9898, "step": 2456 }, { "epoch": 0.4566065787028433, "grad_norm": 2.1568737030029297, "learning_rate": 0.0001757158102898103, "loss": 1.937, "step": 2457 }, { "epoch": 0.45679241776621443, "grad_norm": 0.5108540654182434, "learning_rate": 0.0001756967027157864, "loss": 0.7381, "step": 2458 }, { "epoch": 0.4569782568295856, "grad_norm": 0.5407970547676086, "learning_rate": 0.0001756775886672599, "loss": 0.8545, "step": 2459 }, { "epoch": 0.4571640958929567, "grad_norm": 0.6552363634109497, "learning_rate": 0.00017565846814586572, "loss": 1.2524, "step": 2460 }, { "epoch": 0.4573499349563278, "grad_norm": 0.5322883129119873, "learning_rate": 0.0001756393411532392, "loss": 1.1172, "step": 2461 }, { "epoch": 0.4575357740196989, "grad_norm": 0.7922711968421936, "learning_rate": 0.00017562020769101638, "loss": 1.1572, "step": 2462 }, { "epoch": 0.4577216130830701, "grad_norm": 0.5557255744934082, "learning_rate": 0.00017560106776083376, "loss": 1.0438, "step": 2463 }, { "epoch": 0.4579074521464412, "grad_norm": 0.5572389960289001, "learning_rate": 0.00017558192136432843, "loss": 1.0503, "step": 2464 }, { "epoch": 0.4580932912098123, "grad_norm": 0.5063178539276123, "learning_rate": 0.00017556276850313798, "loss": 1.2155, "step": 2465 }, { "epoch": 0.4582791302731834, "grad_norm": 0.6309200525283813, "learning_rate": 0.00017554360917890064, "loss": 1.0558, "step": 2466 }, { "epoch": 0.45846496933655456, "grad_norm": 0.6432199478149414, "learning_rate": 0.00017552444339325514, "loss": 1.1363, "step": 2467 }, { "epoch": 0.45865080839992567, "grad_norm": 0.583766758441925, "learning_rate": 0.00017550527114784073, "loss": 1.1827, "step": 2468 }, { "epoch": 0.4588366474632968, "grad_norm": 0.5922523736953735, "learning_rate": 0.00017548609244429734, "loss": 1.1427, "step": 2469 }, { "epoch": 0.4590224865266679, "grad_norm": 0.6233006715774536, "learning_rate": 0.0001754669072842653, "loss": 1.1008, "step": 2470 }, { "epoch": 0.45920832559003905, "grad_norm": 0.5502414703369141, "learning_rate": 0.00017544771566938559, "loss": 1.199, "step": 2471 }, { "epoch": 0.45939416465341015, "grad_norm": 0.7010074257850647, "learning_rate": 0.00017542851760129965, "loss": 1.1342, "step": 2472 }, { "epoch": 0.45958000371678126, "grad_norm": 0.5409691333770752, "learning_rate": 0.0001754093130816496, "loss": 0.8252, "step": 2473 }, { "epoch": 0.45976584278015237, "grad_norm": 0.6597109436988831, "learning_rate": 0.00017539010211207806, "loss": 1.0491, "step": 2474 }, { "epoch": 0.45995168184352353, "grad_norm": 0.5203375816345215, "learning_rate": 0.00017537088469422808, "loss": 1.0829, "step": 2475 }, { "epoch": 0.46013752090689464, "grad_norm": 0.6410231590270996, "learning_rate": 0.00017535166082974351, "loss": 1.0531, "step": 2476 }, { "epoch": 0.46032335997026574, "grad_norm": 0.6432588696479797, "learning_rate": 0.00017533243052026846, "loss": 1.0289, "step": 2477 }, { "epoch": 0.46050919903363685, "grad_norm": 0.6830189228057861, "learning_rate": 0.00017531319376744783, "loss": 0.9012, "step": 2478 }, { "epoch": 0.460695038097008, "grad_norm": 0.6746105551719666, "learning_rate": 0.000175293950572927, "loss": 1.2264, "step": 2479 }, { "epoch": 0.4608808771603791, "grad_norm": 0.6015942692756653, "learning_rate": 0.00017527470093835182, "loss": 1.0476, "step": 2480 }, { "epoch": 0.46106671622375023, "grad_norm": 0.6453880667686462, "learning_rate": 0.00017525544486536877, "loss": 1.0024, "step": 2481 }, { "epoch": 0.46125255528712134, "grad_norm": 0.5611870884895325, "learning_rate": 0.0001752361823556249, "loss": 1.1538, "step": 2482 }, { "epoch": 0.4614383943504925, "grad_norm": 0.5102313756942749, "learning_rate": 0.00017521691341076774, "loss": 0.8601, "step": 2483 }, { "epoch": 0.4616242334138636, "grad_norm": 0.5948174595832825, "learning_rate": 0.00017519763803244538, "loss": 1.2159, "step": 2484 }, { "epoch": 0.4618100724772347, "grad_norm": 0.523780345916748, "learning_rate": 0.00017517835622230654, "loss": 0.8715, "step": 2485 }, { "epoch": 0.4619959115406058, "grad_norm": 0.5988032221794128, "learning_rate": 0.0001751590679820004, "loss": 1.208, "step": 2486 }, { "epoch": 0.462181750603977, "grad_norm": 0.54287189245224, "learning_rate": 0.00017513977331317678, "loss": 1.1722, "step": 2487 }, { "epoch": 0.4623675896673481, "grad_norm": 0.5695911645889282, "learning_rate": 0.0001751204722174859, "loss": 1.0592, "step": 2488 }, { "epoch": 0.4625534287307192, "grad_norm": 0.606904149055481, "learning_rate": 0.00017510116469657868, "loss": 1.1955, "step": 2489 }, { "epoch": 0.4627392677940903, "grad_norm": 0.5442674160003662, "learning_rate": 0.0001750818507521065, "loss": 0.9511, "step": 2490 }, { "epoch": 0.46292510685746147, "grad_norm": 0.6489145755767822, "learning_rate": 0.0001750625303857214, "loss": 1.2446, "step": 2491 }, { "epoch": 0.4631109459208326, "grad_norm": 0.5138381123542786, "learning_rate": 0.00017504320359907586, "loss": 0.8772, "step": 2492 }, { "epoch": 0.4632967849842037, "grad_norm": 0.5864812135696411, "learning_rate": 0.0001750238703938229, "loss": 1.0736, "step": 2493 }, { "epoch": 0.4634826240475748, "grad_norm": 0.5524820685386658, "learning_rate": 0.00017500453077161617, "loss": 1.1108, "step": 2494 }, { "epoch": 0.4636684631109459, "grad_norm": 0.6688693761825562, "learning_rate": 0.0001749851847341098, "loss": 0.9646, "step": 2495 }, { "epoch": 0.46385430217431706, "grad_norm": 0.5650538206100464, "learning_rate": 0.0001749658322829585, "loss": 1.0911, "step": 2496 }, { "epoch": 0.46404014123768816, "grad_norm": 0.6584517955780029, "learning_rate": 0.00017494647341981755, "loss": 1.1071, "step": 2497 }, { "epoch": 0.46422598030105927, "grad_norm": 0.62405925989151, "learning_rate": 0.00017492710814634275, "loss": 1.0248, "step": 2498 }, { "epoch": 0.4644118193644304, "grad_norm": 0.5927742719650269, "learning_rate": 0.00017490773646419046, "loss": 0.956, "step": 2499 }, { "epoch": 0.46459765842780154, "grad_norm": 0.5736260414123535, "learning_rate": 0.0001748883583750176, "loss": 1.1814, "step": 2500 }, { "epoch": 0.46478349749117265, "grad_norm": 0.6146401166915894, "learning_rate": 0.00017486897388048156, "loss": 0.9654, "step": 2501 }, { "epoch": 0.46496933655454376, "grad_norm": 0.5440464019775391, "learning_rate": 0.00017484958298224037, "loss": 0.9954, "step": 2502 }, { "epoch": 0.46515517561791486, "grad_norm": 0.6333348751068115, "learning_rate": 0.00017483018568195257, "loss": 1.3325, "step": 2503 }, { "epoch": 0.465341014681286, "grad_norm": 0.669081449508667, "learning_rate": 0.0001748107819812773, "loss": 1.1671, "step": 2504 }, { "epoch": 0.46552685374465713, "grad_norm": 0.5631642937660217, "learning_rate": 0.0001747913718818741, "loss": 1.1348, "step": 2505 }, { "epoch": 0.46571269280802824, "grad_norm": 0.565836489200592, "learning_rate": 0.00017477195538540325, "loss": 1.156, "step": 2506 }, { "epoch": 0.46589853187139935, "grad_norm": 0.4931422472000122, "learning_rate": 0.00017475253249352548, "loss": 0.9889, "step": 2507 }, { "epoch": 0.4660843709347705, "grad_norm": 0.6236067414283752, "learning_rate": 0.000174733103207902, "loss": 1.1636, "step": 2508 }, { "epoch": 0.4662702099981416, "grad_norm": 0.5879521369934082, "learning_rate": 0.0001747136675301947, "loss": 1.419, "step": 2509 }, { "epoch": 0.4664560490615127, "grad_norm": 0.5449656844139099, "learning_rate": 0.00017469422546206596, "loss": 1.059, "step": 2510 }, { "epoch": 0.46664188812488383, "grad_norm": 0.5169174671173096, "learning_rate": 0.00017467477700517864, "loss": 1.0173, "step": 2511 }, { "epoch": 0.466827727188255, "grad_norm": 0.5268780589103699, "learning_rate": 0.00017465532216119625, "loss": 1.1683, "step": 2512 }, { "epoch": 0.4670135662516261, "grad_norm": 0.5800816416740417, "learning_rate": 0.00017463586093178287, "loss": 0.8585, "step": 2513 }, { "epoch": 0.4671994053149972, "grad_norm": 0.5580684542655945, "learning_rate": 0.00017461639331860294, "loss": 1.0921, "step": 2514 }, { "epoch": 0.4673852443783683, "grad_norm": 0.6631280183792114, "learning_rate": 0.00017459691932332163, "loss": 0.923, "step": 2515 }, { "epoch": 0.4675710834417395, "grad_norm": 0.5217365026473999, "learning_rate": 0.00017457743894760456, "loss": 1.1524, "step": 2516 }, { "epoch": 0.4677569225051106, "grad_norm": 0.6715142726898193, "learning_rate": 0.000174557952193118, "loss": 1.0847, "step": 2517 }, { "epoch": 0.4679427615684817, "grad_norm": 0.6288979053497314, "learning_rate": 0.0001745384590615286, "loss": 1.2511, "step": 2518 }, { "epoch": 0.4681286006318528, "grad_norm": 0.6113332509994507, "learning_rate": 0.00017451895955450375, "loss": 1.1654, "step": 2519 }, { "epoch": 0.46831443969522396, "grad_norm": 0.5559727549552917, "learning_rate": 0.0001744994536737112, "loss": 1.2103, "step": 2520 }, { "epoch": 0.46850027875859507, "grad_norm": 0.6704648733139038, "learning_rate": 0.00017447994142081935, "loss": 1.2595, "step": 2521 }, { "epoch": 0.4686861178219662, "grad_norm": 0.9100911021232605, "learning_rate": 0.00017446042279749716, "loss": 1.1599, "step": 2522 }, { "epoch": 0.4688719568853373, "grad_norm": 0.5992748141288757, "learning_rate": 0.0001744408978054141, "loss": 0.8977, "step": 2523 }, { "epoch": 0.46905779594870844, "grad_norm": 0.576352596282959, "learning_rate": 0.00017442136644624015, "loss": 1.0218, "step": 2524 }, { "epoch": 0.46924363501207955, "grad_norm": 0.73007732629776, "learning_rate": 0.00017440182872164586, "loss": 1.2671, "step": 2525 }, { "epoch": 0.46942947407545066, "grad_norm": 0.5133440494537354, "learning_rate": 0.00017438228463330236, "loss": 0.8163, "step": 2526 }, { "epoch": 0.46961531313882177, "grad_norm": 0.636014461517334, "learning_rate": 0.0001743627341828813, "loss": 1.1354, "step": 2527 }, { "epoch": 0.4698011522021929, "grad_norm": 0.5063263177871704, "learning_rate": 0.00017434317737205488, "loss": 0.7853, "step": 2528 }, { "epoch": 0.46998699126556404, "grad_norm": 0.5486884713172913, "learning_rate": 0.00017432361420249582, "loss": 0.9424, "step": 2529 }, { "epoch": 0.47017283032893514, "grad_norm": 0.5266200304031372, "learning_rate": 0.00017430404467587738, "loss": 1.146, "step": 2530 }, { "epoch": 0.47035866939230625, "grad_norm": 0.626629114151001, "learning_rate": 0.0001742844687938734, "loss": 1.0168, "step": 2531 }, { "epoch": 0.47054450845567736, "grad_norm": 0.6614012122154236, "learning_rate": 0.0001742648865581583, "loss": 1.0367, "step": 2532 }, { "epoch": 0.4707303475190485, "grad_norm": 0.685212254524231, "learning_rate": 0.00017424529797040693, "loss": 1.3456, "step": 2533 }, { "epoch": 0.4709161865824196, "grad_norm": 0.636879563331604, "learning_rate": 0.00017422570303229473, "loss": 1.2354, "step": 2534 }, { "epoch": 0.47110202564579073, "grad_norm": 0.4891170263290405, "learning_rate": 0.00017420610174549773, "loss": 1.1914, "step": 2535 }, { "epoch": 0.47128786470916184, "grad_norm": 0.5971581935882568, "learning_rate": 0.0001741864941116925, "loss": 1.1496, "step": 2536 }, { "epoch": 0.471473703772533, "grad_norm": 0.6186808347702026, "learning_rate": 0.00017416688013255607, "loss": 1.0804, "step": 2537 }, { "epoch": 0.4716595428359041, "grad_norm": 0.5329042673110962, "learning_rate": 0.00017414725980976608, "loss": 1.1438, "step": 2538 }, { "epoch": 0.4718453818992752, "grad_norm": 0.590497612953186, "learning_rate": 0.00017412763314500072, "loss": 1.1196, "step": 2539 }, { "epoch": 0.4720312209626463, "grad_norm": 0.6951502561569214, "learning_rate": 0.00017410800013993867, "loss": 1.061, "step": 2540 }, { "epoch": 0.4722170600260175, "grad_norm": 0.5251403450965881, "learning_rate": 0.00017408836079625922, "loss": 0.7278, "step": 2541 }, { "epoch": 0.4724028990893886, "grad_norm": 0.7289116978645325, "learning_rate": 0.00017406871511564215, "loss": 1.0795, "step": 2542 }, { "epoch": 0.4725887381527597, "grad_norm": 0.5959666967391968, "learning_rate": 0.00017404906309976777, "loss": 1.1282, "step": 2543 }, { "epoch": 0.4727745772161308, "grad_norm": 0.6236711144447327, "learning_rate": 0.000174029404750317, "loss": 1.2404, "step": 2544 }, { "epoch": 0.47296041627950197, "grad_norm": 0.5691270232200623, "learning_rate": 0.00017400974006897123, "loss": 1.2028, "step": 2545 }, { "epoch": 0.4731462553428731, "grad_norm": 0.5911795496940613, "learning_rate": 0.0001739900690574124, "loss": 0.9487, "step": 2546 }, { "epoch": 0.4733320944062442, "grad_norm": 0.6401910185813904, "learning_rate": 0.00017397039171732312, "loss": 1.2378, "step": 2547 }, { "epoch": 0.4735179334696153, "grad_norm": 0.4885672330856323, "learning_rate": 0.00017395070805038633, "loss": 1.0692, "step": 2548 }, { "epoch": 0.47370377253298646, "grad_norm": 0.6205880641937256, "learning_rate": 0.00017393101805828563, "loss": 0.9946, "step": 2549 }, { "epoch": 0.47388961159635756, "grad_norm": 0.5263411402702332, "learning_rate": 0.00017391132174270521, "loss": 1.0809, "step": 2550 }, { "epoch": 0.47407545065972867, "grad_norm": 0.7040189504623413, "learning_rate": 0.00017389161910532968, "loss": 1.2357, "step": 2551 }, { "epoch": 0.4742612897230998, "grad_norm": 0.561153769493103, "learning_rate": 0.00017387191014784427, "loss": 0.8271, "step": 2552 }, { "epoch": 0.47444712878647094, "grad_norm": 0.611548900604248, "learning_rate": 0.00017385219487193473, "loss": 1.1188, "step": 2553 }, { "epoch": 0.47463296784984205, "grad_norm": 0.6051244735717773, "learning_rate": 0.00017383247327928735, "loss": 0.9883, "step": 2554 }, { "epoch": 0.47481880691321315, "grad_norm": 0.6681859493255615, "learning_rate": 0.00017381274537158896, "loss": 1.1764, "step": 2555 }, { "epoch": 0.47500464597658426, "grad_norm": 0.5694065093994141, "learning_rate": 0.00017379301115052692, "loss": 1.1938, "step": 2556 }, { "epoch": 0.4751904850399554, "grad_norm": 0.5876331329345703, "learning_rate": 0.00017377327061778914, "loss": 1.0628, "step": 2557 }, { "epoch": 0.47537632410332653, "grad_norm": 0.6016728281974792, "learning_rate": 0.00017375352377506408, "loss": 1.1583, "step": 2558 }, { "epoch": 0.47556216316669764, "grad_norm": 0.4448058307170868, "learning_rate": 0.00017373377062404076, "loss": 0.8662, "step": 2559 }, { "epoch": 0.47574800223006874, "grad_norm": 0.6803490519523621, "learning_rate": 0.00017371401116640867, "loss": 1.0937, "step": 2560 }, { "epoch": 0.4759338412934399, "grad_norm": 0.6000282168388367, "learning_rate": 0.00017369424540385783, "loss": 1.1161, "step": 2561 }, { "epoch": 0.476119680356811, "grad_norm": 0.5419662594795227, "learning_rate": 0.00017367447333807898, "loss": 1.0714, "step": 2562 }, { "epoch": 0.4763055194201821, "grad_norm": 0.543039083480835, "learning_rate": 0.00017365469497076318, "loss": 0.918, "step": 2563 }, { "epoch": 0.47649135848355323, "grad_norm": 0.46435242891311646, "learning_rate": 0.0001736349103036021, "loss": 0.9592, "step": 2564 }, { "epoch": 0.47667719754692434, "grad_norm": 0.49158206582069397, "learning_rate": 0.00017361511933828801, "loss": 1.0331, "step": 2565 }, { "epoch": 0.4768630366102955, "grad_norm": 0.5796807408332825, "learning_rate": 0.00017359532207651365, "loss": 1.1738, "step": 2566 }, { "epoch": 0.4770488756736666, "grad_norm": 0.576512336730957, "learning_rate": 0.00017357551851997232, "loss": 1.0411, "step": 2567 }, { "epoch": 0.4772347147370377, "grad_norm": 0.6543241739273071, "learning_rate": 0.0001735557086703579, "loss": 1.1288, "step": 2568 }, { "epoch": 0.4774205538004088, "grad_norm": 0.6577771306037903, "learning_rate": 0.00017353589252936476, "loss": 1.1625, "step": 2569 }, { "epoch": 0.47760639286378, "grad_norm": 0.5853607654571533, "learning_rate": 0.00017351607009868778, "loss": 0.9686, "step": 2570 }, { "epoch": 0.4777922319271511, "grad_norm": 0.6094310283660889, "learning_rate": 0.00017349624138002242, "loss": 1.0382, "step": 2571 }, { "epoch": 0.4779780709905222, "grad_norm": 0.5882353782653809, "learning_rate": 0.0001734764063750647, "loss": 1.0502, "step": 2572 }, { "epoch": 0.4781639100538933, "grad_norm": 0.6566441655158997, "learning_rate": 0.00017345656508551115, "loss": 1.3288, "step": 2573 }, { "epoch": 0.47834974911726447, "grad_norm": 0.4997236430644989, "learning_rate": 0.0001734367175130588, "loss": 1.0965, "step": 2574 }, { "epoch": 0.4785355881806356, "grad_norm": 0.5745366811752319, "learning_rate": 0.0001734168636594053, "loss": 0.9885, "step": 2575 }, { "epoch": 0.4787214272440067, "grad_norm": 0.6105988621711731, "learning_rate": 0.00017339700352624879, "loss": 1.1263, "step": 2576 }, { "epoch": 0.4789072663073778, "grad_norm": 0.5690290331840515, "learning_rate": 0.00017337713711528796, "loss": 1.2127, "step": 2577 }, { "epoch": 0.47909310537074895, "grad_norm": 0.6462541818618774, "learning_rate": 0.00017335726442822195, "loss": 1.216, "step": 2578 }, { "epoch": 0.47927894443412006, "grad_norm": 0.640511155128479, "learning_rate": 0.00017333738546675058, "loss": 0.8523, "step": 2579 }, { "epoch": 0.47946478349749116, "grad_norm": 0.5977886319160461, "learning_rate": 0.00017331750023257415, "loss": 1.0885, "step": 2580 }, { "epoch": 0.47965062256086227, "grad_norm": 0.6122512221336365, "learning_rate": 0.00017329760872739343, "loss": 0.9032, "step": 2581 }, { "epoch": 0.47983646162423343, "grad_norm": 0.6344446539878845, "learning_rate": 0.00017327771095290983, "loss": 1.1595, "step": 2582 }, { "epoch": 0.48002230068760454, "grad_norm": 0.7006237506866455, "learning_rate": 0.00017325780691082528, "loss": 1.2291, "step": 2583 }, { "epoch": 0.48020813975097565, "grad_norm": 0.66008061170578, "learning_rate": 0.00017323789660284212, "loss": 1.1691, "step": 2584 }, { "epoch": 0.48039397881434676, "grad_norm": 0.6128259301185608, "learning_rate": 0.0001732179800306634, "loss": 0.9029, "step": 2585 }, { "epoch": 0.4805798178777179, "grad_norm": 0.6071939468383789, "learning_rate": 0.0001731980571959926, "loss": 1.1245, "step": 2586 }, { "epoch": 0.480765656941089, "grad_norm": 0.5949906706809998, "learning_rate": 0.00017317812810053378, "loss": 0.8654, "step": 2587 }, { "epoch": 0.48095149600446013, "grad_norm": 0.6016258001327515, "learning_rate": 0.0001731581927459915, "loss": 0.8913, "step": 2588 }, { "epoch": 0.48113733506783124, "grad_norm": 0.6196433305740356, "learning_rate": 0.00017313825113407085, "loss": 1.1088, "step": 2589 }, { "epoch": 0.4813231741312024, "grad_norm": 0.7779860496520996, "learning_rate": 0.00017311830326647751, "loss": 0.9617, "step": 2590 }, { "epoch": 0.4815090131945735, "grad_norm": 0.5172240138053894, "learning_rate": 0.0001730983491449177, "loss": 0.9737, "step": 2591 }, { "epoch": 0.4816948522579446, "grad_norm": 0.6355507969856262, "learning_rate": 0.0001730783887710981, "loss": 1.0966, "step": 2592 }, { "epoch": 0.4818806913213157, "grad_norm": 0.5902290940284729, "learning_rate": 0.0001730584221467259, "loss": 1.1274, "step": 2593 }, { "epoch": 0.4820665303846869, "grad_norm": 0.6762295365333557, "learning_rate": 0.00017303844927350903, "loss": 1.3456, "step": 2594 }, { "epoch": 0.482252369448058, "grad_norm": 0.6468362808227539, "learning_rate": 0.0001730184701531557, "loss": 1.2443, "step": 2595 }, { "epoch": 0.4824382085114291, "grad_norm": 0.5669111609458923, "learning_rate": 0.00017299848478737482, "loss": 1.044, "step": 2596 }, { "epoch": 0.4826240475748002, "grad_norm": 0.6939884424209595, "learning_rate": 0.00017297849317787574, "loss": 1.2908, "step": 2597 }, { "epoch": 0.48280988663817137, "grad_norm": 0.6661247611045837, "learning_rate": 0.00017295849532636846, "loss": 1.1268, "step": 2598 }, { "epoch": 0.4829957257015425, "grad_norm": 0.6555077433586121, "learning_rate": 0.00017293849123456334, "loss": 1.1174, "step": 2599 }, { "epoch": 0.4831815647649136, "grad_norm": 0.5667238831520081, "learning_rate": 0.00017291848090417145, "loss": 1.0486, "step": 2600 }, { "epoch": 0.4833674038282847, "grad_norm": 0.6136776804924011, "learning_rate": 0.00017289846433690426, "loss": 1.1264, "step": 2601 }, { "epoch": 0.4835532428916558, "grad_norm": 0.5599073171615601, "learning_rate": 0.0001728784415344739, "loss": 1.1205, "step": 2602 }, { "epoch": 0.48373908195502696, "grad_norm": 0.583046555519104, "learning_rate": 0.0001728584124985929, "loss": 1.1435, "step": 2603 }, { "epoch": 0.48392492101839807, "grad_norm": 0.5243253707885742, "learning_rate": 0.00017283837723097443, "loss": 1.0822, "step": 2604 }, { "epoch": 0.4841107600817692, "grad_norm": 0.6579024195671082, "learning_rate": 0.00017281833573333212, "loss": 1.1433, "step": 2605 }, { "epoch": 0.4842965991451403, "grad_norm": 0.593559205532074, "learning_rate": 0.00017279828800738017, "loss": 0.987, "step": 2606 }, { "epoch": 0.48448243820851145, "grad_norm": 0.5837587714195251, "learning_rate": 0.0001727782340548333, "loss": 1.3241, "step": 2607 }, { "epoch": 0.48466827727188255, "grad_norm": 0.5321952104568481, "learning_rate": 0.0001727581738774068, "loss": 0.935, "step": 2608 }, { "epoch": 0.48485411633525366, "grad_norm": 0.6163881421089172, "learning_rate": 0.00017273810747681645, "loss": 1.1309, "step": 2609 }, { "epoch": 0.48503995539862477, "grad_norm": 0.6759121417999268, "learning_rate": 0.00017271803485477852, "loss": 1.1814, "step": 2610 }, { "epoch": 0.48522579446199593, "grad_norm": 0.5714459419250488, "learning_rate": 0.00017269795601300994, "loss": 1.231, "step": 2611 }, { "epoch": 0.48541163352536704, "grad_norm": 0.5658890604972839, "learning_rate": 0.000172677870953228, "loss": 1.3298, "step": 2612 }, { "epoch": 0.48559747258873814, "grad_norm": 0.6630015969276428, "learning_rate": 0.00017265777967715072, "loss": 1.175, "step": 2613 }, { "epoch": 0.48578331165210925, "grad_norm": 0.6266465187072754, "learning_rate": 0.00017263768218649654, "loss": 1.2725, "step": 2614 }, { "epoch": 0.4859691507154804, "grad_norm": 0.5513420701026917, "learning_rate": 0.00017261757848298438, "loss": 0.9279, "step": 2615 }, { "epoch": 0.4861549897788515, "grad_norm": 3.998467445373535, "learning_rate": 0.00017259746856833382, "loss": 3.0076, "step": 2616 }, { "epoch": 0.48634082884222263, "grad_norm": 0.5792141556739807, "learning_rate": 0.0001725773524442648, "loss": 1.0691, "step": 2617 }, { "epoch": 0.48652666790559373, "grad_norm": 0.5263779163360596, "learning_rate": 0.00017255723011249803, "loss": 1.0758, "step": 2618 }, { "epoch": 0.4867125069689649, "grad_norm": 0.5941771268844604, "learning_rate": 0.0001725371015747545, "loss": 1.0486, "step": 2619 }, { "epoch": 0.486898346032336, "grad_norm": 0.6138995885848999, "learning_rate": 0.00017251696683275594, "loss": 1.1304, "step": 2620 }, { "epoch": 0.4870841850957071, "grad_norm": 0.5196933746337891, "learning_rate": 0.00017249682588822446, "loss": 0.9873, "step": 2621 }, { "epoch": 0.4872700241590782, "grad_norm": 0.8865917921066284, "learning_rate": 0.00017247667874288277, "loss": 1.0958, "step": 2622 }, { "epoch": 0.4874558632224494, "grad_norm": 0.6708472371101379, "learning_rate": 0.0001724565253984541, "loss": 0.9633, "step": 2623 }, { "epoch": 0.4876417022858205, "grad_norm": 0.5149091482162476, "learning_rate": 0.00017243636585666225, "loss": 0.8691, "step": 2624 }, { "epoch": 0.4878275413491916, "grad_norm": 0.7160931825637817, "learning_rate": 0.00017241620011923146, "loss": 1.2219, "step": 2625 }, { "epoch": 0.4880133804125627, "grad_norm": 0.6450536251068115, "learning_rate": 0.00017239602818788652, "loss": 1.1747, "step": 2626 }, { "epoch": 0.48819921947593387, "grad_norm": 0.5064762830734253, "learning_rate": 0.00017237585006435287, "loss": 0.7801, "step": 2627 }, { "epoch": 0.488385058539305, "grad_norm": 0.5990808010101318, "learning_rate": 0.0001723556657503563, "loss": 1.1578, "step": 2628 }, { "epoch": 0.4885708976026761, "grad_norm": 0.665460467338562, "learning_rate": 0.00017233547524762331, "loss": 1.0568, "step": 2629 }, { "epoch": 0.4887567366660472, "grad_norm": 0.5016782283782959, "learning_rate": 0.00017231527855788073, "loss": 1.0718, "step": 2630 }, { "epoch": 0.48894257572941835, "grad_norm": 0.5329027771949768, "learning_rate": 0.0001722950756828561, "loss": 0.9948, "step": 2631 }, { "epoch": 0.48912841479278946, "grad_norm": 0.639634370803833, "learning_rate": 0.0001722748666242774, "loss": 1.02, "step": 2632 }, { "epoch": 0.48931425385616056, "grad_norm": 0.6515493988990784, "learning_rate": 0.00017225465138387316, "loss": 1.0507, "step": 2633 }, { "epoch": 0.48950009291953167, "grad_norm": 0.6322322487831116, "learning_rate": 0.0001722344299633724, "loss": 1.0638, "step": 2634 }, { "epoch": 0.48968593198290283, "grad_norm": 0.6251794099807739, "learning_rate": 0.00017221420236450475, "loss": 1.1392, "step": 2635 }, { "epoch": 0.48987177104627394, "grad_norm": 0.6864203214645386, "learning_rate": 0.0001721939685890003, "loss": 1.17, "step": 2636 }, { "epoch": 0.49005761010964505, "grad_norm": 0.5467199683189392, "learning_rate": 0.00017217372863858964, "loss": 1.1392, "step": 2637 }, { "epoch": 0.49024344917301615, "grad_norm": 0.5941484570503235, "learning_rate": 0.00017215348251500403, "loss": 1.0009, "step": 2638 }, { "epoch": 0.49042928823638726, "grad_norm": 0.660819411277771, "learning_rate": 0.0001721332302199751, "loss": 0.9249, "step": 2639 }, { "epoch": 0.4906151272997584, "grad_norm": 0.5856802463531494, "learning_rate": 0.0001721129717552351, "loss": 1.0534, "step": 2640 }, { "epoch": 0.49080096636312953, "grad_norm": 0.6267640590667725, "learning_rate": 0.00017209270712251673, "loss": 1.1636, "step": 2641 }, { "epoch": 0.49098680542650064, "grad_norm": 0.6789271235466003, "learning_rate": 0.00017207243632355335, "loss": 1.3032, "step": 2642 }, { "epoch": 0.49117264448987175, "grad_norm": 0.5530579090118408, "learning_rate": 0.0001720521593600787, "loss": 1.3485, "step": 2643 }, { "epoch": 0.4913584835532429, "grad_norm": 0.6183351874351501, "learning_rate": 0.00017203187623382715, "loss": 0.9769, "step": 2644 }, { "epoch": 0.491544322616614, "grad_norm": 0.5712160468101501, "learning_rate": 0.00017201158694653352, "loss": 1.1888, "step": 2645 }, { "epoch": 0.4917301616799851, "grad_norm": 0.6171578764915466, "learning_rate": 0.00017199129149993327, "loss": 1.0783, "step": 2646 }, { "epoch": 0.49191600074335623, "grad_norm": 0.5475873351097107, "learning_rate": 0.00017197098989576222, "loss": 1.0462, "step": 2647 }, { "epoch": 0.4921018398067274, "grad_norm": 0.6114869117736816, "learning_rate": 0.00017195068213575688, "loss": 1.1484, "step": 2648 }, { "epoch": 0.4922876788700985, "grad_norm": 0.5490365028381348, "learning_rate": 0.00017193036822165417, "loss": 1.1458, "step": 2649 }, { "epoch": 0.4924735179334696, "grad_norm": 0.5615825653076172, "learning_rate": 0.0001719100481551916, "loss": 0.9985, "step": 2650 }, { "epoch": 0.4926593569968407, "grad_norm": 0.6177067160606384, "learning_rate": 0.00017188972193810722, "loss": 0.9851, "step": 2651 }, { "epoch": 0.4928451960602119, "grad_norm": 0.550390362739563, "learning_rate": 0.00017186938957213953, "loss": 0.8936, "step": 2652 }, { "epoch": 0.493031035123583, "grad_norm": 0.5912401676177979, "learning_rate": 0.0001718490510590276, "loss": 1.1753, "step": 2653 }, { "epoch": 0.4932168741869541, "grad_norm": 0.7773668169975281, "learning_rate": 0.00017182870640051107, "loss": 1.1211, "step": 2654 }, { "epoch": 0.4934027132503252, "grad_norm": 0.6435762643814087, "learning_rate": 0.00017180835559833004, "loss": 1.2553, "step": 2655 }, { "epoch": 0.49358855231369636, "grad_norm": 0.570472240447998, "learning_rate": 0.00017178799865422514, "loss": 1.1645, "step": 2656 }, { "epoch": 0.49377439137706747, "grad_norm": 0.6846404075622559, "learning_rate": 0.0001717676355699376, "loss": 1.1736, "step": 2657 }, { "epoch": 0.4939602304404386, "grad_norm": 0.6840716004371643, "learning_rate": 0.00017174726634720905, "loss": 1.2924, "step": 2658 }, { "epoch": 0.4941460695038097, "grad_norm": 0.6342020034790039, "learning_rate": 0.00017172689098778177, "loss": 1.0777, "step": 2659 }, { "epoch": 0.49433190856718084, "grad_norm": 0.5968679785728455, "learning_rate": 0.00017170650949339847, "loss": 0.7009, "step": 2660 }, { "epoch": 0.49451774763055195, "grad_norm": 0.6246665716171265, "learning_rate": 0.00017168612186580238, "loss": 1.0159, "step": 2661 }, { "epoch": 0.49470358669392306, "grad_norm": 0.5966857671737671, "learning_rate": 0.00017166572810673744, "loss": 1.1489, "step": 2662 }, { "epoch": 0.49488942575729417, "grad_norm": 0.5603086352348328, "learning_rate": 0.0001716453282179478, "loss": 1.1315, "step": 2663 }, { "epoch": 0.49507526482066533, "grad_norm": 0.5681596994400024, "learning_rate": 0.00017162492220117845, "loss": 1.2733, "step": 2664 }, { "epoch": 0.49526110388403644, "grad_norm": 0.6216878294944763, "learning_rate": 0.00017160451005817468, "loss": 1.0994, "step": 2665 }, { "epoch": 0.49544694294740754, "grad_norm": 0.6957216262817383, "learning_rate": 0.00017158409179068244, "loss": 0.8731, "step": 2666 }, { "epoch": 0.49563278201077865, "grad_norm": 0.7972751259803772, "learning_rate": 0.00017156366740044807, "loss": 1.0435, "step": 2667 }, { "epoch": 0.4958186210741498, "grad_norm": 0.5390364527702332, "learning_rate": 0.0001715432368892186, "loss": 1.1918, "step": 2668 }, { "epoch": 0.4960044601375209, "grad_norm": 0.5905940532684326, "learning_rate": 0.00017152280025874142, "loss": 1.1615, "step": 2669 }, { "epoch": 0.496190299200892, "grad_norm": 0.5240758657455444, "learning_rate": 0.00017150235751076456, "loss": 1.1469, "step": 2670 }, { "epoch": 0.49637613826426313, "grad_norm": 0.630538284778595, "learning_rate": 0.00017148190864703654, "loss": 0.9101, "step": 2671 }, { "epoch": 0.4965619773276343, "grad_norm": 0.5258409976959229, "learning_rate": 0.00017146145366930634, "loss": 1.2836, "step": 2672 }, { "epoch": 0.4967478163910054, "grad_norm": 0.5378925800323486, "learning_rate": 0.00017144099257932364, "loss": 0.9881, "step": 2673 }, { "epoch": 0.4969336554543765, "grad_norm": 0.6253618597984314, "learning_rate": 0.00017142052537883834, "loss": 0.6877, "step": 2674 }, { "epoch": 0.4971194945177476, "grad_norm": 0.5238915681838989, "learning_rate": 0.00017140005206960123, "loss": 0.9998, "step": 2675 }, { "epoch": 0.4973053335811187, "grad_norm": 0.6569894552230835, "learning_rate": 0.0001713795726533633, "loss": 1.1629, "step": 2676 }, { "epoch": 0.4974911726444899, "grad_norm": 0.6058579683303833, "learning_rate": 0.00017135908713187628, "loss": 1.218, "step": 2677 }, { "epoch": 0.497677011707861, "grad_norm": 0.6192750930786133, "learning_rate": 0.0001713385955068923, "loss": 1.1703, "step": 2678 }, { "epoch": 0.4978628507712321, "grad_norm": 0.5908500552177429, "learning_rate": 0.00017131809778016404, "loss": 1.2753, "step": 2679 }, { "epoch": 0.4980486898346032, "grad_norm": 0.6565137505531311, "learning_rate": 0.00017129759395344475, "loss": 1.0112, "step": 2680 }, { "epoch": 0.49823452889797437, "grad_norm": 0.621427059173584, "learning_rate": 0.00017127708402848817, "loss": 1.0148, "step": 2681 }, { "epoch": 0.4984203679613455, "grad_norm": 0.5895846486091614, "learning_rate": 0.00017125656800704852, "loss": 1.039, "step": 2682 }, { "epoch": 0.4986062070247166, "grad_norm": 0.5865383148193359, "learning_rate": 0.00017123604589088063, "loss": 1.0531, "step": 2683 }, { "epoch": 0.4987920460880877, "grad_norm": 0.6337679624557495, "learning_rate": 0.00017121551768173977, "loss": 1.2365, "step": 2684 }, { "epoch": 0.49897788515145886, "grad_norm": 0.6088318824768066, "learning_rate": 0.00017119498338138172, "loss": 1.3512, "step": 2685 }, { "epoch": 0.49916372421482996, "grad_norm": 0.6825537085533142, "learning_rate": 0.00017117444299156293, "loss": 1.2751, "step": 2686 }, { "epoch": 0.49934956327820107, "grad_norm": 0.5798403024673462, "learning_rate": 0.00017115389651404018, "loss": 0.8005, "step": 2687 }, { "epoch": 0.4995354023415722, "grad_norm": 0.5042392611503601, "learning_rate": 0.00017113334395057087, "loss": 1.1635, "step": 2688 }, { "epoch": 0.49972124140494334, "grad_norm": 0.5946384072303772, "learning_rate": 0.0001711127853029129, "loss": 1.1139, "step": 2689 }, { "epoch": 0.49990708046831445, "grad_norm": 0.5286500453948975, "learning_rate": 0.00017109222057282476, "loss": 0.7885, "step": 2690 }, { "epoch": 0.5000929195316856, "grad_norm": 0.6756643056869507, "learning_rate": 0.0001710716497620653, "loss": 1.3519, "step": 2691 }, { "epoch": 0.5002787585950567, "grad_norm": 0.6427111625671387, "learning_rate": 0.00017105107287239402, "loss": 1.0801, "step": 2692 }, { "epoch": 0.5002787585950567, "eval_loss": 1.073055386543274, "eval_runtime": 23.0111, "eval_samples_per_second": 47.455, "eval_steps_per_second": 23.728, "step": 2692 }, { "epoch": 0.5004645976584278, "grad_norm": 0.6261104345321655, "learning_rate": 0.00017103048990557094, "loss": 0.956, "step": 2693 }, { "epoch": 0.5006504367217989, "grad_norm": 0.5339560508728027, "learning_rate": 0.00017100990086335653, "loss": 1.0753, "step": 2694 }, { "epoch": 0.5008362757851701, "grad_norm": 0.6238815188407898, "learning_rate": 0.00017098930574751183, "loss": 1.1003, "step": 2695 }, { "epoch": 0.5010221148485412, "grad_norm": 0.6724877953529358, "learning_rate": 0.00017096870455979838, "loss": 1.2206, "step": 2696 }, { "epoch": 0.5012079539119123, "grad_norm": 0.49802348017692566, "learning_rate": 0.00017094809730197825, "loss": 0.9445, "step": 2697 }, { "epoch": 0.5013937929752834, "grad_norm": 0.6113379597663879, "learning_rate": 0.000170927483975814, "loss": 1.1136, "step": 2698 }, { "epoch": 0.5015796320386545, "grad_norm": 0.49268612265586853, "learning_rate": 0.0001709068645830688, "loss": 1.0709, "step": 2699 }, { "epoch": 0.5017654711020256, "grad_norm": 0.7463451623916626, "learning_rate": 0.00017088623912550616, "loss": 1.2494, "step": 2700 }, { "epoch": 0.5019513101653967, "grad_norm": 3.880950450897217, "learning_rate": 0.0001708656076048903, "loss": 2.6297, "step": 2701 }, { "epoch": 0.5021371492287678, "grad_norm": 0.679601788520813, "learning_rate": 0.00017084497002298592, "loss": 1.0374, "step": 2702 }, { "epoch": 0.5023229882921391, "grad_norm": 0.5280315279960632, "learning_rate": 0.00017082432638155807, "loss": 1.0968, "step": 2703 }, { "epoch": 0.5025088273555102, "grad_norm": 0.676991879940033, "learning_rate": 0.00017080367668237254, "loss": 1.1099, "step": 2704 }, { "epoch": 0.5026946664188813, "grad_norm": 0.6209761500358582, "learning_rate": 0.0001707830209271955, "loss": 1.1345, "step": 2705 }, { "epoch": 0.5028805054822524, "grad_norm": 0.5609131455421448, "learning_rate": 0.00017076235911779372, "loss": 0.9599, "step": 2706 }, { "epoch": 0.5030663445456235, "grad_norm": 0.5717629790306091, "learning_rate": 0.00017074169125593443, "loss": 1.2308, "step": 2707 }, { "epoch": 0.5032521836089946, "grad_norm": 0.5387877225875854, "learning_rate": 0.00017072101734338536, "loss": 1.0986, "step": 2708 }, { "epoch": 0.5034380226723657, "grad_norm": 0.6486318111419678, "learning_rate": 0.00017070033738191483, "loss": 1.1697, "step": 2709 }, { "epoch": 0.5036238617357368, "grad_norm": 0.5692694783210754, "learning_rate": 0.0001706796513732917, "loss": 1.0235, "step": 2710 }, { "epoch": 0.5038097007991079, "grad_norm": 0.7457370162010193, "learning_rate": 0.00017065895931928516, "loss": 0.9989, "step": 2711 }, { "epoch": 0.5039955398624791, "grad_norm": 0.49672433733940125, "learning_rate": 0.00017063826122166518, "loss": 0.8731, "step": 2712 }, { "epoch": 0.5041813789258502, "grad_norm": 0.6872747540473938, "learning_rate": 0.000170617557082202, "loss": 1.2155, "step": 2713 }, { "epoch": 0.5043672179892214, "grad_norm": 0.6377733945846558, "learning_rate": 0.00017059684690266656, "loss": 0.9905, "step": 2714 }, { "epoch": 0.5045530570525925, "grad_norm": 0.6393523216247559, "learning_rate": 0.0001705761306848302, "loss": 1.2727, "step": 2715 }, { "epoch": 0.5047388961159636, "grad_norm": 0.5900831818580627, "learning_rate": 0.00017055540843046485, "loss": 1.0815, "step": 2716 }, { "epoch": 0.5049247351793347, "grad_norm": 0.5988309383392334, "learning_rate": 0.00017053468014134295, "loss": 1.099, "step": 2717 }, { "epoch": 0.5051105742427058, "grad_norm": 0.6535857915878296, "learning_rate": 0.0001705139458192374, "loss": 1.0273, "step": 2718 }, { "epoch": 0.5052964133060769, "grad_norm": 0.5833021998405457, "learning_rate": 0.00017049320546592167, "loss": 0.9237, "step": 2719 }, { "epoch": 0.5054822523694481, "grad_norm": 0.6732074618339539, "learning_rate": 0.00017047245908316972, "loss": 0.962, "step": 2720 }, { "epoch": 0.5056680914328192, "grad_norm": 0.5957641005516052, "learning_rate": 0.000170451706672756, "loss": 1.1259, "step": 2721 }, { "epoch": 0.5058539304961903, "grad_norm": 0.640815794467926, "learning_rate": 0.0001704309482364556, "loss": 1.0514, "step": 2722 }, { "epoch": 0.5060397695595614, "grad_norm": 0.5656802654266357, "learning_rate": 0.00017041018377604393, "loss": 1.0022, "step": 2723 }, { "epoch": 0.5062256086229325, "grad_norm": 0.6468055844306946, "learning_rate": 0.00017038941329329709, "loss": 1.1897, "step": 2724 }, { "epoch": 0.5064114476863036, "grad_norm": 0.6397135853767395, "learning_rate": 0.00017036863678999157, "loss": 0.6603, "step": 2725 }, { "epoch": 0.5065972867496747, "grad_norm": 0.5529398918151855, "learning_rate": 0.0001703478542679045, "loss": 0.9799, "step": 2726 }, { "epoch": 0.5067831258130459, "grad_norm": 0.7279483079910278, "learning_rate": 0.0001703270657288134, "loss": 1.4637, "step": 2727 }, { "epoch": 0.5069689648764171, "grad_norm": 0.5699883699417114, "learning_rate": 0.00017030627117449636, "loss": 1.0844, "step": 2728 }, { "epoch": 0.5071548039397882, "grad_norm": 0.663766086101532, "learning_rate": 0.000170285470606732, "loss": 1.1391, "step": 2729 }, { "epoch": 0.5073406430031593, "grad_norm": 0.5575130581855774, "learning_rate": 0.00017026466402729942, "loss": 1.1047, "step": 2730 }, { "epoch": 0.5075264820665304, "grad_norm": 0.6974256634712219, "learning_rate": 0.00017024385143797827, "loss": 0.9739, "step": 2731 }, { "epoch": 0.5077123211299015, "grad_norm": 0.5451050996780396, "learning_rate": 0.0001702230328405487, "loss": 0.9685, "step": 2732 }, { "epoch": 0.5078981601932726, "grad_norm": 0.5307935476303101, "learning_rate": 0.00017020220823679133, "loss": 1.31, "step": 2733 }, { "epoch": 0.5080839992566437, "grad_norm": 0.5251340866088867, "learning_rate": 0.00017018137762848742, "loss": 1.0396, "step": 2734 }, { "epoch": 0.5082698383200148, "grad_norm": 0.5450385212898254, "learning_rate": 0.00017016054101741857, "loss": 1.0322, "step": 2735 }, { "epoch": 0.508455677383386, "grad_norm": 0.661077618598938, "learning_rate": 0.000170139698405367, "loss": 1.2001, "step": 2736 }, { "epoch": 0.5086415164467571, "grad_norm": 0.5541077852249146, "learning_rate": 0.00017011884979411542, "loss": 0.9658, "step": 2737 }, { "epoch": 0.5088273555101283, "grad_norm": 0.610466718673706, "learning_rate": 0.0001700979951854471, "loss": 1.3137, "step": 2738 }, { "epoch": 0.5090131945734994, "grad_norm": 0.6601592898368835, "learning_rate": 0.00017007713458114574, "loss": 1.1539, "step": 2739 }, { "epoch": 0.5091990336368705, "grad_norm": 0.6103355288505554, "learning_rate": 0.0001700562679829956, "loss": 0.6126, "step": 2740 }, { "epoch": 0.5093848727002416, "grad_norm": 0.6720930933952332, "learning_rate": 0.00017003539539278144, "loss": 1.3313, "step": 2741 }, { "epoch": 0.5095707117636127, "grad_norm": 0.5944845080375671, "learning_rate": 0.00017001451681228856, "loss": 0.7785, "step": 2742 }, { "epoch": 0.5097565508269838, "grad_norm": 0.5247288942337036, "learning_rate": 0.00016999363224330273, "loss": 1.1125, "step": 2743 }, { "epoch": 0.509942389890355, "grad_norm": 0.6319442987442017, "learning_rate": 0.00016997274168761026, "loss": 1.3289, "step": 2744 }, { "epoch": 0.5101282289537261, "grad_norm": 0.7262590527534485, "learning_rate": 0.00016995184514699796, "loss": 1.2919, "step": 2745 }, { "epoch": 0.5103140680170972, "grad_norm": 0.5297901630401611, "learning_rate": 0.00016993094262325316, "loss": 0.9359, "step": 2746 }, { "epoch": 0.5104999070804683, "grad_norm": 0.6656481623649597, "learning_rate": 0.0001699100341181637, "loss": 1.1935, "step": 2747 }, { "epoch": 0.5106857461438394, "grad_norm": 0.6488802433013916, "learning_rate": 0.00016988911963351794, "loss": 1.0575, "step": 2748 }, { "epoch": 0.5108715852072105, "grad_norm": 0.6008021235466003, "learning_rate": 0.0001698681991711047, "loss": 1.1663, "step": 2749 }, { "epoch": 0.5110574242705817, "grad_norm": 0.7873336672782898, "learning_rate": 0.00016984727273271338, "loss": 1.2606, "step": 2750 }, { "epoch": 0.5112432633339528, "grad_norm": 0.612949788570404, "learning_rate": 0.0001698263403201339, "loss": 1.0826, "step": 2751 }, { "epoch": 0.5114291023973239, "grad_norm": 0.5440847873687744, "learning_rate": 0.00016980540193515658, "loss": 0.9785, "step": 2752 }, { "epoch": 0.5116149414606951, "grad_norm": 0.6771647930145264, "learning_rate": 0.00016978445757957238, "loss": 0.9802, "step": 2753 }, { "epoch": 0.5118007805240662, "grad_norm": 0.5409290194511414, "learning_rate": 0.0001697635072551727, "loss": 1.0132, "step": 2754 }, { "epoch": 0.5119866195874373, "grad_norm": 0.5121529698371887, "learning_rate": 0.00016974255096374944, "loss": 1.1907, "step": 2755 }, { "epoch": 0.5121724586508084, "grad_norm": 0.9652408957481384, "learning_rate": 0.0001697215887070951, "loss": 1.1317, "step": 2756 }, { "epoch": 0.5123582977141795, "grad_norm": 0.5094935894012451, "learning_rate": 0.00016970062048700258, "loss": 0.9606, "step": 2757 }, { "epoch": 0.5125441367775506, "grad_norm": 0.6005422472953796, "learning_rate": 0.00016967964630526533, "loss": 0.9366, "step": 2758 }, { "epoch": 0.5127299758409217, "grad_norm": 0.647412121295929, "learning_rate": 0.00016965866616367733, "loss": 1.3646, "step": 2759 }, { "epoch": 0.5129158149042928, "grad_norm": 0.6332395672798157, "learning_rate": 0.0001696376800640331, "loss": 0.9569, "step": 2760 }, { "epoch": 0.513101653967664, "grad_norm": 0.5023225545883179, "learning_rate": 0.00016961668800812753, "loss": 0.8663, "step": 2761 }, { "epoch": 0.5132874930310352, "grad_norm": 0.554136335849762, "learning_rate": 0.0001695956899977562, "loss": 1.206, "step": 2762 }, { "epoch": 0.5134733320944063, "grad_norm": 0.5565813183784485, "learning_rate": 0.00016957468603471512, "loss": 1.0772, "step": 2763 }, { "epoch": 0.5136591711577774, "grad_norm": 3.6855132579803467, "learning_rate": 0.00016955367612080075, "loss": 2.6807, "step": 2764 }, { "epoch": 0.5138450102211485, "grad_norm": 0.8236385583877563, "learning_rate": 0.0001695326602578101, "loss": 1.0427, "step": 2765 }, { "epoch": 0.5140308492845196, "grad_norm": 0.5622274279594421, "learning_rate": 0.00016951163844754078, "loss": 0.9239, "step": 2766 }, { "epoch": 0.5142166883478907, "grad_norm": 0.580856442451477, "learning_rate": 0.0001694906106917908, "loss": 0.8981, "step": 2767 }, { "epoch": 0.5144025274112618, "grad_norm": 0.577639102935791, "learning_rate": 0.00016946957699235865, "loss": 1.1712, "step": 2768 }, { "epoch": 0.514588366474633, "grad_norm": 0.7837035655975342, "learning_rate": 0.00016944853735104348, "loss": 1.3395, "step": 2769 }, { "epoch": 0.5147742055380041, "grad_norm": 0.4840414524078369, "learning_rate": 0.0001694274917696448, "loss": 0.9164, "step": 2770 }, { "epoch": 0.5149600446013752, "grad_norm": 0.6124632954597473, "learning_rate": 0.00016940644024996272, "loss": 0.8041, "step": 2771 }, { "epoch": 0.5151458836647463, "grad_norm": 0.5656507015228271, "learning_rate": 0.00016938538279379772, "loss": 1.2659, "step": 2772 }, { "epoch": 0.5153317227281174, "grad_norm": 0.545505702495575, "learning_rate": 0.00016936431940295106, "loss": 1.0758, "step": 2773 }, { "epoch": 0.5155175617914886, "grad_norm": 0.5941258668899536, "learning_rate": 0.00016934325007922417, "loss": 0.9539, "step": 2774 }, { "epoch": 0.5157034008548597, "grad_norm": 0.5610512495040894, "learning_rate": 0.00016932217482441928, "loss": 1.2581, "step": 2775 }, { "epoch": 0.5158892399182308, "grad_norm": 0.655250072479248, "learning_rate": 0.00016930109364033893, "loss": 1.1122, "step": 2776 }, { "epoch": 0.516075078981602, "grad_norm": 0.5420289635658264, "learning_rate": 0.00016928000652878628, "loss": 0.9652, "step": 2777 }, { "epoch": 0.5162609180449731, "grad_norm": 0.5724037289619446, "learning_rate": 0.00016925891349156493, "loss": 1.0721, "step": 2778 }, { "epoch": 0.5164467571083442, "grad_norm": 0.4908211827278137, "learning_rate": 0.000169237814530479, "loss": 1.1539, "step": 2779 }, { "epoch": 0.5166325961717153, "grad_norm": 0.690380334854126, "learning_rate": 0.0001692167096473332, "loss": 1.3287, "step": 2780 }, { "epoch": 0.5168184352350864, "grad_norm": 0.5678565502166748, "learning_rate": 0.00016919559884393258, "loss": 1.4168, "step": 2781 }, { "epoch": 0.5170042742984575, "grad_norm": 5.411264419555664, "learning_rate": 0.00016917448212208283, "loss": 3.2767, "step": 2782 }, { "epoch": 0.5171901133618286, "grad_norm": 0.5628544688224792, "learning_rate": 0.00016915335948359018, "loss": 1.141, "step": 2783 }, { "epoch": 0.5173759524251997, "grad_norm": 0.4003027081489563, "learning_rate": 0.0001691322309302612, "loss": 0.6452, "step": 2784 }, { "epoch": 0.5175617914885708, "grad_norm": 0.5444847345352173, "learning_rate": 0.00016911109646390306, "loss": 1.0142, "step": 2785 }, { "epoch": 0.5177476305519421, "grad_norm": 0.498196005821228, "learning_rate": 0.0001690899560863235, "loss": 0.918, "step": 2786 }, { "epoch": 0.5179334696153132, "grad_norm": 0.5275631546974182, "learning_rate": 0.00016906880979933068, "loss": 1.3385, "step": 2787 }, { "epoch": 0.5181193086786843, "grad_norm": 0.5868602991104126, "learning_rate": 0.00016904765760473328, "loss": 1.2404, "step": 2788 }, { "epoch": 0.5183051477420554, "grad_norm": 0.5333208441734314, "learning_rate": 0.0001690264995043405, "loss": 0.8179, "step": 2789 }, { "epoch": 0.5184909868054265, "grad_norm": 0.6185091137886047, "learning_rate": 0.00016900533549996203, "loss": 1.0924, "step": 2790 }, { "epoch": 0.5186768258687976, "grad_norm": 0.517379641532898, "learning_rate": 0.00016898416559340805, "loss": 0.8873, "step": 2791 }, { "epoch": 0.5188626649321687, "grad_norm": 0.6053199768066406, "learning_rate": 0.0001689629897864893, "loss": 0.9401, "step": 2792 }, { "epoch": 0.5190485039955398, "grad_norm": 0.6556801199913025, "learning_rate": 0.00016894180808101698, "loss": 1.0591, "step": 2793 }, { "epoch": 0.519234343058911, "grad_norm": 0.60248863697052, "learning_rate": 0.00016892062047880283, "loss": 1.1708, "step": 2794 }, { "epoch": 0.5194201821222821, "grad_norm": 0.5984275937080383, "learning_rate": 0.00016889942698165907, "loss": 0.9006, "step": 2795 }, { "epoch": 0.5196060211856532, "grad_norm": 0.5322675704956055, "learning_rate": 0.00016887822759139836, "loss": 1.0042, "step": 2796 }, { "epoch": 0.5197918602490244, "grad_norm": 0.5319322943687439, "learning_rate": 0.00016885702230983403, "loss": 1.0598, "step": 2797 }, { "epoch": 0.5199776993123955, "grad_norm": 0.6360406875610352, "learning_rate": 0.00016883581113877973, "loss": 0.9902, "step": 2798 }, { "epoch": 0.5201635383757666, "grad_norm": 0.5423364043235779, "learning_rate": 0.0001688145940800497, "loss": 1.0555, "step": 2799 }, { "epoch": 0.5203493774391377, "grad_norm": 0.5447130799293518, "learning_rate": 0.00016879337113545877, "loss": 1.0019, "step": 2800 }, { "epoch": 0.5205352165025088, "grad_norm": 0.6677650213241577, "learning_rate": 0.0001687721423068221, "loss": 1.3007, "step": 2801 }, { "epoch": 0.52072105556588, "grad_norm": 0.6144031882286072, "learning_rate": 0.00016875090759595546, "loss": 1.2085, "step": 2802 }, { "epoch": 0.5209068946292511, "grad_norm": 0.5660108327865601, "learning_rate": 0.0001687296670046751, "loss": 1.1709, "step": 2803 }, { "epoch": 0.5210927336926222, "grad_norm": 0.49554237723350525, "learning_rate": 0.0001687084205347978, "loss": 1.0253, "step": 2804 }, { "epoch": 0.5212785727559933, "grad_norm": 0.7389078140258789, "learning_rate": 0.0001686871681881408, "loss": 1.0017, "step": 2805 }, { "epoch": 0.5214644118193644, "grad_norm": 0.598188042640686, "learning_rate": 0.0001686659099665218, "loss": 1.024, "step": 2806 }, { "epoch": 0.5216502508827355, "grad_norm": 0.5329128503799438, "learning_rate": 0.00016864464587175912, "loss": 1.3472, "step": 2807 }, { "epoch": 0.5218360899461066, "grad_norm": 0.6566879749298096, "learning_rate": 0.00016862337590567157, "loss": 1.0788, "step": 2808 }, { "epoch": 0.5220219290094777, "grad_norm": 0.6406189799308777, "learning_rate": 0.00016860210007007832, "loss": 1.0195, "step": 2809 }, { "epoch": 0.522207768072849, "grad_norm": 0.612294614315033, "learning_rate": 0.00016858081836679923, "loss": 1.1664, "step": 2810 }, { "epoch": 0.5223936071362201, "grad_norm": 0.5791414976119995, "learning_rate": 0.00016855953079765448, "loss": 1.0084, "step": 2811 }, { "epoch": 0.5225794461995912, "grad_norm": 0.552058219909668, "learning_rate": 0.00016853823736446487, "loss": 1.3248, "step": 2812 }, { "epoch": 0.5227652852629623, "grad_norm": 0.6082121729850769, "learning_rate": 0.0001685169380690517, "loss": 0.9751, "step": 2813 }, { "epoch": 0.5229511243263334, "grad_norm": 0.5541410446166992, "learning_rate": 0.00016849563291323675, "loss": 1.2256, "step": 2814 }, { "epoch": 0.5231369633897045, "grad_norm": 0.5580676794052124, "learning_rate": 0.0001684743218988423, "loss": 1.1095, "step": 2815 }, { "epoch": 0.5233228024530756, "grad_norm": 0.5919464230537415, "learning_rate": 0.00016845300502769107, "loss": 1.0043, "step": 2816 }, { "epoch": 0.5235086415164467, "grad_norm": 0.5584767460823059, "learning_rate": 0.00016843168230160638, "loss": 1.1576, "step": 2817 }, { "epoch": 0.5236944805798178, "grad_norm": 0.5251010060310364, "learning_rate": 0.000168410353722412, "loss": 1.1172, "step": 2818 }, { "epoch": 0.523880319643189, "grad_norm": 0.5411672592163086, "learning_rate": 0.00016838901929193222, "loss": 1.0392, "step": 2819 }, { "epoch": 0.5240661587065601, "grad_norm": 0.5992206931114197, "learning_rate": 0.0001683676790119918, "loss": 1.056, "step": 2820 }, { "epoch": 0.5242519977699313, "grad_norm": 0.7021682858467102, "learning_rate": 0.00016834633288441607, "loss": 1.5045, "step": 2821 }, { "epoch": 0.5244378368333024, "grad_norm": 0.6393280625343323, "learning_rate": 0.00016832498091103073, "loss": 1.1608, "step": 2822 }, { "epoch": 0.5246236758966735, "grad_norm": 0.6091069579124451, "learning_rate": 0.0001683036230936621, "loss": 1.278, "step": 2823 }, { "epoch": 0.5248095149600446, "grad_norm": 0.6061083078384399, "learning_rate": 0.00016828225943413697, "loss": 1.0032, "step": 2824 }, { "epoch": 0.5249953540234157, "grad_norm": 0.5480764508247375, "learning_rate": 0.00016826088993428264, "loss": 1.1557, "step": 2825 }, { "epoch": 0.5251811930867868, "grad_norm": 0.5672341585159302, "learning_rate": 0.00016823951459592684, "loss": 1.1974, "step": 2826 }, { "epoch": 0.525367032150158, "grad_norm": 0.6575129628181458, "learning_rate": 0.00016821813342089787, "loss": 1.1193, "step": 2827 }, { "epoch": 0.5255528712135291, "grad_norm": 0.5681663751602173, "learning_rate": 0.00016819674641102453, "loss": 0.9461, "step": 2828 }, { "epoch": 0.5257387102769002, "grad_norm": 0.5635049343109131, "learning_rate": 0.00016817535356813604, "loss": 1.0586, "step": 2829 }, { "epoch": 0.5259245493402713, "grad_norm": 0.6108316779136658, "learning_rate": 0.00016815395489406227, "loss": 1.2576, "step": 2830 }, { "epoch": 0.5261103884036424, "grad_norm": 0.6750257611274719, "learning_rate": 0.0001681325503906334, "loss": 1.1878, "step": 2831 }, { "epoch": 0.5262962274670135, "grad_norm": 0.6346907019615173, "learning_rate": 0.00016811114005968025, "loss": 1.0485, "step": 2832 }, { "epoch": 0.5264820665303847, "grad_norm": 0.6290209889411926, "learning_rate": 0.00016808972390303407, "loss": 1.2745, "step": 2833 }, { "epoch": 0.5266679055937558, "grad_norm": 0.6105024218559265, "learning_rate": 0.00016806830192252664, "loss": 1.0585, "step": 2834 }, { "epoch": 0.526853744657127, "grad_norm": 0.5764310359954834, "learning_rate": 0.00016804687411999024, "loss": 1.0018, "step": 2835 }, { "epoch": 0.5270395837204981, "grad_norm": 0.6331760287284851, "learning_rate": 0.00016802544049725763, "loss": 0.9499, "step": 2836 }, { "epoch": 0.5272254227838692, "grad_norm": 0.5340588092803955, "learning_rate": 0.0001680040010561621, "loss": 0.9208, "step": 2837 }, { "epoch": 0.5274112618472403, "grad_norm": 0.6449952721595764, "learning_rate": 0.00016798255579853735, "loss": 1.0791, "step": 2838 }, { "epoch": 0.5275971009106114, "grad_norm": 0.5729663372039795, "learning_rate": 0.00016796110472621766, "loss": 1.1602, "step": 2839 }, { "epoch": 0.5277829399739825, "grad_norm": 0.6265697479248047, "learning_rate": 0.00016793964784103782, "loss": 0.9851, "step": 2840 }, { "epoch": 0.5279687790373536, "grad_norm": 0.5786687731742859, "learning_rate": 0.00016791818514483306, "loss": 1.2305, "step": 2841 }, { "epoch": 0.5281546181007247, "grad_norm": 0.5565997362136841, "learning_rate": 0.00016789671663943912, "loss": 0.9734, "step": 2842 }, { "epoch": 0.528340457164096, "grad_norm": 0.5070616602897644, "learning_rate": 0.00016787524232669232, "loss": 0.9516, "step": 2843 }, { "epoch": 0.528526296227467, "grad_norm": 0.6512296199798584, "learning_rate": 0.0001678537622084293, "loss": 1.2005, "step": 2844 }, { "epoch": 0.5287121352908382, "grad_norm": 0.6731481552124023, "learning_rate": 0.00016783227628648734, "loss": 1.1391, "step": 2845 }, { "epoch": 0.5288979743542093, "grad_norm": 0.5603753328323364, "learning_rate": 0.0001678107845627042, "loss": 1.1172, "step": 2846 }, { "epoch": 0.5290838134175804, "grad_norm": 0.6080132126808167, "learning_rate": 0.0001677892870389181, "loss": 1.0396, "step": 2847 }, { "epoch": 0.5292696524809515, "grad_norm": 0.5874385237693787, "learning_rate": 0.00016776778371696774, "loss": 1.2197, "step": 2848 }, { "epoch": 0.5294554915443226, "grad_norm": 0.615352988243103, "learning_rate": 0.00016774627459869242, "loss": 1.2805, "step": 2849 }, { "epoch": 0.5296413306076937, "grad_norm": 0.6553687453269958, "learning_rate": 0.00016772475968593176, "loss": 1.0638, "step": 2850 }, { "epoch": 0.5298271696710649, "grad_norm": 0.6943587064743042, "learning_rate": 0.00016770323898052607, "loss": 1.0408, "step": 2851 }, { "epoch": 0.530013008734436, "grad_norm": 3.6750288009643555, "learning_rate": 0.00016768171248431602, "loss": 2.4315, "step": 2852 }, { "epoch": 0.5301988477978071, "grad_norm": 0.6327362060546875, "learning_rate": 0.00016766018019914283, "loss": 1.2868, "step": 2853 }, { "epoch": 0.5303846868611782, "grad_norm": 0.5573206543922424, "learning_rate": 0.00016763864212684818, "loss": 1.0675, "step": 2854 }, { "epoch": 0.5305705259245493, "grad_norm": 0.5509533286094666, "learning_rate": 0.0001676170982692743, "loss": 1.0416, "step": 2855 }, { "epoch": 0.5307563649879204, "grad_norm": 0.676953136920929, "learning_rate": 0.00016759554862826384, "loss": 1.4656, "step": 2856 }, { "epoch": 0.5309422040512916, "grad_norm": 0.6586757898330688, "learning_rate": 0.00016757399320566, "loss": 1.4603, "step": 2857 }, { "epoch": 0.5311280431146627, "grad_norm": 0.6420878171920776, "learning_rate": 0.00016755243200330648, "loss": 1.0874, "step": 2858 }, { "epoch": 0.5313138821780338, "grad_norm": 0.5888837575912476, "learning_rate": 0.00016753086502304749, "loss": 1.055, "step": 2859 }, { "epoch": 0.531499721241405, "grad_norm": 0.5788093209266663, "learning_rate": 0.00016750929226672762, "loss": 1.0413, "step": 2860 }, { "epoch": 0.5316855603047761, "grad_norm": 0.5567480325698853, "learning_rate": 0.0001674877137361921, "loss": 1.0751, "step": 2861 }, { "epoch": 0.5318713993681472, "grad_norm": 0.8277568817138672, "learning_rate": 0.00016746612943328656, "loss": 1.0339, "step": 2862 }, { "epoch": 0.5320572384315183, "grad_norm": 0.44506174325942993, "learning_rate": 0.00016744453935985714, "loss": 0.963, "step": 2863 }, { "epoch": 0.5322430774948894, "grad_norm": 0.62749844789505, "learning_rate": 0.00016742294351775053, "loss": 1.2259, "step": 2864 }, { "epoch": 0.5324289165582605, "grad_norm": 0.5781571865081787, "learning_rate": 0.00016740134190881383, "loss": 1.0872, "step": 2865 }, { "epoch": 0.5326147556216316, "grad_norm": 0.5317525863647461, "learning_rate": 0.00016737973453489468, "loss": 0.9075, "step": 2866 }, { "epoch": 0.5328005946850027, "grad_norm": 0.588940441608429, "learning_rate": 0.0001673581213978412, "loss": 1.0165, "step": 2867 }, { "epoch": 0.532986433748374, "grad_norm": 0.57525634765625, "learning_rate": 0.00016733650249950202, "loss": 1.0519, "step": 2868 }, { "epoch": 0.5331722728117451, "grad_norm": 0.6532301902770996, "learning_rate": 0.00016731487784172626, "loss": 1.1657, "step": 2869 }, { "epoch": 0.5333581118751162, "grad_norm": 0.5266667008399963, "learning_rate": 0.0001672932474263635, "loss": 1.1751, "step": 2870 }, { "epoch": 0.5335439509384873, "grad_norm": 0.6372961401939392, "learning_rate": 0.00016727161125526388, "loss": 0.9298, "step": 2871 }, { "epoch": 0.5337297900018584, "grad_norm": 0.5796231031417847, "learning_rate": 0.0001672499693302779, "loss": 1.0757, "step": 2872 }, { "epoch": 0.5339156290652295, "grad_norm": 0.6466894149780273, "learning_rate": 0.00016722832165325672, "loss": 1.1214, "step": 2873 }, { "epoch": 0.5341014681286006, "grad_norm": 0.5859202146530151, "learning_rate": 0.00016720666822605192, "loss": 1.1376, "step": 2874 }, { "epoch": 0.5342873071919717, "grad_norm": 1.9871423244476318, "learning_rate": 0.0001671850090505155, "loss": 1.9804, "step": 2875 }, { "epoch": 0.5344731462553429, "grad_norm": 0.5480605959892273, "learning_rate": 0.00016716334412850007, "loss": 0.9871, "step": 2876 }, { "epoch": 0.534658985318714, "grad_norm": 0.6779521107673645, "learning_rate": 0.00016714167346185862, "loss": 1.1147, "step": 2877 }, { "epoch": 0.5348448243820851, "grad_norm": 0.5643601417541504, "learning_rate": 0.00016711999705244475, "loss": 1.1982, "step": 2878 }, { "epoch": 0.5350306634454562, "grad_norm": 0.662219762802124, "learning_rate": 0.0001670983149021125, "loss": 0.9183, "step": 2879 }, { "epoch": 0.5352165025088274, "grad_norm": 0.6409224271774292, "learning_rate": 0.00016707662701271632, "loss": 1.1694, "step": 2880 }, { "epoch": 0.5354023415721985, "grad_norm": 0.5614042282104492, "learning_rate": 0.00016705493338611124, "loss": 0.9345, "step": 2881 }, { "epoch": 0.5355881806355696, "grad_norm": 0.6522765159606934, "learning_rate": 0.00016703323402415286, "loss": 0.7476, "step": 2882 }, { "epoch": 0.5357740196989407, "grad_norm": 0.6119543313980103, "learning_rate": 0.00016701152892869701, "loss": 0.9991, "step": 2883 }, { "epoch": 0.5359598587623119, "grad_norm": 0.611212968826294, "learning_rate": 0.0001669898181016003, "loss": 0.9248, "step": 2884 }, { "epoch": 0.536145697825683, "grad_norm": 0.5680248141288757, "learning_rate": 0.00016696810154471966, "loss": 0.9241, "step": 2885 }, { "epoch": 0.5363315368890541, "grad_norm": 0.7113536596298218, "learning_rate": 0.00016694637925991255, "loss": 1.2047, "step": 2886 }, { "epoch": 0.5365173759524252, "grad_norm": 0.5832658410072327, "learning_rate": 0.00016692465124903695, "loss": 1.1214, "step": 2887 }, { "epoch": 0.5367032150157963, "grad_norm": 0.6345791816711426, "learning_rate": 0.00016690291751395129, "loss": 1.2211, "step": 2888 }, { "epoch": 0.5368890540791674, "grad_norm": 0.7019723057746887, "learning_rate": 0.00016688117805651446, "loss": 1.2782, "step": 2889 }, { "epoch": 0.5370748931425385, "grad_norm": 0.5996314883232117, "learning_rate": 0.00016685943287858595, "loss": 1.1402, "step": 2890 }, { "epoch": 0.5372607322059096, "grad_norm": 0.6830651164054871, "learning_rate": 0.00016683768198202562, "loss": 1.2012, "step": 2891 }, { "epoch": 0.5374465712692807, "grad_norm": 0.5295338034629822, "learning_rate": 0.00016681592536869392, "loss": 1.2169, "step": 2892 }, { "epoch": 0.537632410332652, "grad_norm": 0.676923394203186, "learning_rate": 0.0001667941630404517, "loss": 1.3105, "step": 2893 }, { "epoch": 0.5378182493960231, "grad_norm": 0.6796035170555115, "learning_rate": 0.00016677239499916037, "loss": 1.2213, "step": 2894 }, { "epoch": 0.5380040884593942, "grad_norm": 0.6050610542297363, "learning_rate": 0.0001667506212466818, "loss": 0.9687, "step": 2895 }, { "epoch": 0.5381899275227653, "grad_norm": 0.5061158537864685, "learning_rate": 0.0001667288417848783, "loss": 0.9394, "step": 2896 }, { "epoch": 0.5383757665861364, "grad_norm": 0.7453888058662415, "learning_rate": 0.0001667070566156127, "loss": 1.2559, "step": 2897 }, { "epoch": 0.5385616056495075, "grad_norm": 0.5495951771736145, "learning_rate": 0.00016668526574074846, "loss": 1.0272, "step": 2898 }, { "epoch": 0.5387474447128786, "grad_norm": 0.5371149182319641, "learning_rate": 0.00016666346916214926, "loss": 1.1627, "step": 2899 }, { "epoch": 0.5389332837762497, "grad_norm": 0.6507683396339417, "learning_rate": 0.00016664166688167947, "loss": 1.024, "step": 2900 }, { "epoch": 0.5391191228396209, "grad_norm": 0.6115374565124512, "learning_rate": 0.0001666198589012039, "loss": 1.0213, "step": 2901 }, { "epoch": 0.539304961902992, "grad_norm": 0.6327989101409912, "learning_rate": 0.0001665980452225878, "loss": 1.1811, "step": 2902 }, { "epoch": 0.5394908009663631, "grad_norm": 0.6254445910453796, "learning_rate": 0.00016657622584769697, "loss": 1.0541, "step": 2903 }, { "epoch": 0.5396766400297343, "grad_norm": 0.6406470537185669, "learning_rate": 0.0001665544007783976, "loss": 0.9014, "step": 2904 }, { "epoch": 0.5398624790931054, "grad_norm": 0.5992531776428223, "learning_rate": 0.00016653257001655652, "loss": 1.2726, "step": 2905 }, { "epoch": 0.5400483181564765, "grad_norm": 0.7481580972671509, "learning_rate": 0.00016651073356404098, "loss": 1.0581, "step": 2906 }, { "epoch": 0.5402341572198476, "grad_norm": 0.5833855271339417, "learning_rate": 0.0001664888914227186, "loss": 1.216, "step": 2907 }, { "epoch": 0.5404199962832187, "grad_norm": 0.5238146185874939, "learning_rate": 0.00016646704359445764, "loss": 1.0835, "step": 2908 }, { "epoch": 0.5406058353465899, "grad_norm": 0.5387495160102844, "learning_rate": 0.0001664451900811268, "loss": 0.9602, "step": 2909 }, { "epoch": 0.540791674409961, "grad_norm": 0.6715323328971863, "learning_rate": 0.00016642333088459524, "loss": 0.8797, "step": 2910 }, { "epoch": 0.5409775134733321, "grad_norm": 0.6089659333229065, "learning_rate": 0.0001664014660067326, "loss": 0.9706, "step": 2911 }, { "epoch": 0.5411633525367032, "grad_norm": 0.5779519081115723, "learning_rate": 0.0001663795954494091, "loss": 1.1606, "step": 2912 }, { "epoch": 0.5413491916000743, "grad_norm": 0.6019100546836853, "learning_rate": 0.0001663577192144953, "loss": 1.1748, "step": 2913 }, { "epoch": 0.5415350306634454, "grad_norm": 0.6508989334106445, "learning_rate": 0.0001663358373038624, "loss": 1.1944, "step": 2914 }, { "epoch": 0.5417208697268165, "grad_norm": 0.630411684513092, "learning_rate": 0.00016631394971938193, "loss": 1.1656, "step": 2915 }, { "epoch": 0.5419067087901877, "grad_norm": 0.7168655395507812, "learning_rate": 0.00016629205646292604, "loss": 1.2304, "step": 2916 }, { "epoch": 0.5420925478535589, "grad_norm": 0.6478018760681152, "learning_rate": 0.00016627015753636727, "loss": 1.1461, "step": 2917 }, { "epoch": 0.54227838691693, "grad_norm": 0.7113860845565796, "learning_rate": 0.0001662482529415787, "loss": 1.3356, "step": 2918 }, { "epoch": 0.5424642259803011, "grad_norm": 0.6111336350440979, "learning_rate": 0.0001662263426804339, "loss": 1.1173, "step": 2919 }, { "epoch": 0.5426500650436722, "grad_norm": 0.6446161866188049, "learning_rate": 0.00016620442675480685, "loss": 1.0263, "step": 2920 }, { "epoch": 0.5428359041070433, "grad_norm": 0.7231898307800293, "learning_rate": 0.0001661825051665721, "loss": 1.3269, "step": 2921 }, { "epoch": 0.5430217431704144, "grad_norm": 4.894625663757324, "learning_rate": 0.0001661605779176046, "loss": 2.7246, "step": 2922 }, { "epoch": 0.5432075822337855, "grad_norm": 0.5375463962554932, "learning_rate": 0.00016613864500977996, "loss": 1.192, "step": 2923 }, { "epoch": 0.5433934212971566, "grad_norm": 0.5629100203514099, "learning_rate": 0.000166116706444974, "loss": 0.7375, "step": 2924 }, { "epoch": 0.5435792603605278, "grad_norm": 0.5796539783477783, "learning_rate": 0.00016609476222506327, "loss": 0.9775, "step": 2925 }, { "epoch": 0.543765099423899, "grad_norm": 0.5823481678962708, "learning_rate": 0.00016607281235192468, "loss": 1.0259, "step": 2926 }, { "epoch": 0.54395093848727, "grad_norm": 0.7050068378448486, "learning_rate": 0.00016605085682743566, "loss": 1.2885, "step": 2927 }, { "epoch": 0.5441367775506412, "grad_norm": 0.5624426007270813, "learning_rate": 0.00016602889565347408, "loss": 1.0661, "step": 2928 }, { "epoch": 0.5443226166140123, "grad_norm": 0.6064251065254211, "learning_rate": 0.00016600692883191834, "loss": 1.0146, "step": 2929 }, { "epoch": 0.5445084556773834, "grad_norm": 0.5572468638420105, "learning_rate": 0.00016598495636464735, "loss": 0.8867, "step": 2930 }, { "epoch": 0.5446942947407545, "grad_norm": 0.7270289063453674, "learning_rate": 0.0001659629782535404, "loss": 1.1612, "step": 2931 }, { "epoch": 0.5448801338041256, "grad_norm": 0.5419548749923706, "learning_rate": 0.00016594099450047736, "loss": 0.9358, "step": 2932 }, { "epoch": 0.5450659728674967, "grad_norm": 0.6731230616569519, "learning_rate": 0.00016591900510733854, "loss": 0.9637, "step": 2933 }, { "epoch": 0.5452518119308679, "grad_norm": 0.6420273184776306, "learning_rate": 0.00016589701007600476, "loss": 1.2075, "step": 2934 }, { "epoch": 0.545437650994239, "grad_norm": 0.48401394486427307, "learning_rate": 0.00016587500940835727, "loss": 0.8759, "step": 2935 }, { "epoch": 0.5456234900576101, "grad_norm": 0.6045053601264954, "learning_rate": 0.00016585300310627786, "loss": 1.1209, "step": 2936 }, { "epoch": 0.5458093291209812, "grad_norm": 0.5479567050933838, "learning_rate": 0.00016583099117164877, "loss": 1.0256, "step": 2937 }, { "epoch": 0.5459951681843523, "grad_norm": 0.5547533631324768, "learning_rate": 0.00016580897360635272, "loss": 0.9392, "step": 2938 }, { "epoch": 0.5461810072477234, "grad_norm": 0.7662529349327087, "learning_rate": 0.00016578695041227293, "loss": 1.0937, "step": 2939 }, { "epoch": 0.5463668463110946, "grad_norm": 0.5871464014053345, "learning_rate": 0.0001657649215912931, "loss": 0.9771, "step": 2940 }, { "epoch": 0.5465526853744657, "grad_norm": 0.5434662699699402, "learning_rate": 0.00016574288714529735, "loss": 0.9684, "step": 2941 }, { "epoch": 0.5467385244378369, "grad_norm": 0.577323853969574, "learning_rate": 0.00016572084707617042, "loss": 1.1616, "step": 2942 }, { "epoch": 0.546924363501208, "grad_norm": 0.7086620330810547, "learning_rate": 0.00016569880138579736, "loss": 1.2126, "step": 2943 }, { "epoch": 0.5471102025645791, "grad_norm": 0.6588655710220337, "learning_rate": 0.00016567675007606383, "loss": 1.1606, "step": 2944 }, { "epoch": 0.5472960416279502, "grad_norm": 0.6107214093208313, "learning_rate": 0.00016565469314885594, "loss": 0.9897, "step": 2945 }, { "epoch": 0.5474818806913213, "grad_norm": 0.6048929691314697, "learning_rate": 0.0001656326306060602, "loss": 0.946, "step": 2946 }, { "epoch": 0.5476677197546924, "grad_norm": 0.6101149916648865, "learning_rate": 0.00016561056244956377, "loss": 1.0612, "step": 2947 }, { "epoch": 0.5478535588180635, "grad_norm": 0.6024320125579834, "learning_rate": 0.00016558848868125411, "loss": 1.0689, "step": 2948 }, { "epoch": 0.5480393978814346, "grad_norm": 0.7112870216369629, "learning_rate": 0.00016556640930301926, "loss": 0.9647, "step": 2949 }, { "epoch": 0.5482252369448058, "grad_norm": 0.6747824549674988, "learning_rate": 0.0001655443243167477, "loss": 1.0305, "step": 2950 }, { "epoch": 0.548411076008177, "grad_norm": 0.5641652941703796, "learning_rate": 0.00016552223372432844, "loss": 1.1324, "step": 2951 }, { "epoch": 0.5485969150715481, "grad_norm": 0.6192693114280701, "learning_rate": 0.0001655001375276509, "loss": 1.1574, "step": 2952 }, { "epoch": 0.5487827541349192, "grad_norm": 0.607811689376831, "learning_rate": 0.00016547803572860504, "loss": 1.0986, "step": 2953 }, { "epoch": 0.5489685931982903, "grad_norm": 0.627953827381134, "learning_rate": 0.00016545592832908126, "loss": 0.9785, "step": 2954 }, { "epoch": 0.5491544322616614, "grad_norm": 0.5804354548454285, "learning_rate": 0.00016543381533097048, "loss": 1.2391, "step": 2955 }, { "epoch": 0.5493402713250325, "grad_norm": 0.7040599584579468, "learning_rate": 0.00016541169673616406, "loss": 0.8315, "step": 2956 }, { "epoch": 0.5495261103884036, "grad_norm": 0.9636831879615784, "learning_rate": 0.00016538957254655384, "loss": 1.0614, "step": 2957 }, { "epoch": 0.5497119494517748, "grad_norm": 0.679150402545929, "learning_rate": 0.00016536744276403212, "loss": 1.0342, "step": 2958 }, { "epoch": 0.5498977885151459, "grad_norm": 0.5816363096237183, "learning_rate": 0.00016534530739049176, "loss": 1.2106, "step": 2959 }, { "epoch": 0.550083627578517, "grad_norm": 0.8184868693351746, "learning_rate": 0.00016532316642782605, "loss": 1.291, "step": 2960 }, { "epoch": 0.5502694666418881, "grad_norm": 0.565895676612854, "learning_rate": 0.0001653010198779287, "loss": 1.245, "step": 2961 }, { "epoch": 0.5504553057052592, "grad_norm": 0.6959781050682068, "learning_rate": 0.000165278867742694, "loss": 1.3576, "step": 2962 }, { "epoch": 0.5506411447686304, "grad_norm": 0.7933211922645569, "learning_rate": 0.00016525671002401668, "loss": 1.2011, "step": 2963 }, { "epoch": 0.5508269838320015, "grad_norm": 0.5321523547172546, "learning_rate": 0.00016523454672379191, "loss": 1.0696, "step": 2964 }, { "epoch": 0.5510128228953726, "grad_norm": 0.5358680486679077, "learning_rate": 0.00016521237784391534, "loss": 1.07, "step": 2965 }, { "epoch": 0.5511986619587437, "grad_norm": 0.5598793029785156, "learning_rate": 0.00016519020338628317, "loss": 1.2435, "step": 2966 }, { "epoch": 0.5513845010221149, "grad_norm": 0.5243983268737793, "learning_rate": 0.000165168023352792, "loss": 1.1711, "step": 2967 }, { "epoch": 0.551570340085486, "grad_norm": 0.5723912715911865, "learning_rate": 0.00016514583774533893, "loss": 1.0636, "step": 2968 }, { "epoch": 0.5517561791488571, "grad_norm": 2.9442546367645264, "learning_rate": 0.0001651236465658216, "loss": 2.5894, "step": 2969 }, { "epoch": 0.5519420182122282, "grad_norm": 0.7351272702217102, "learning_rate": 0.000165101449816138, "loss": 0.9057, "step": 2970 }, { "epoch": 0.5521278572755993, "grad_norm": 0.60108882188797, "learning_rate": 0.0001650792474981867, "loss": 0.9787, "step": 2971 }, { "epoch": 0.5523136963389704, "grad_norm": 0.6729848980903625, "learning_rate": 0.0001650570396138667, "loss": 1.1786, "step": 2972 }, { "epoch": 0.5524995354023415, "grad_norm": 0.6495280265808105, "learning_rate": 0.0001650348261650775, "loss": 1.1397, "step": 2973 }, { "epoch": 0.5526853744657126, "grad_norm": 0.7092133164405823, "learning_rate": 0.00016501260715371906, "loss": 1.346, "step": 2974 }, { "epoch": 0.5528712135290839, "grad_norm": 0.5815352201461792, "learning_rate": 0.0001649903825816918, "loss": 1.1502, "step": 2975 }, { "epoch": 0.553057052592455, "grad_norm": 0.49748045206069946, "learning_rate": 0.00016496815245089672, "loss": 1.0467, "step": 2976 }, { "epoch": 0.5532428916558261, "grad_norm": 0.6422550082206726, "learning_rate": 0.0001649459167632351, "loss": 1.1578, "step": 2977 }, { "epoch": 0.5534287307191972, "grad_norm": 0.6007086634635925, "learning_rate": 0.00016492367552060886, "loss": 1.0099, "step": 2978 }, { "epoch": 0.5536145697825683, "grad_norm": 0.5576412081718445, "learning_rate": 0.00016490142872492032, "loss": 1.0978, "step": 2979 }, { "epoch": 0.5538004088459394, "grad_norm": 0.8643796443939209, "learning_rate": 0.00016487917637807232, "loss": 1.209, "step": 2980 }, { "epoch": 0.5539862479093105, "grad_norm": 0.5304890871047974, "learning_rate": 0.00016485691848196812, "loss": 1.0879, "step": 2981 }, { "epoch": 0.5541720869726816, "grad_norm": 0.6655973196029663, "learning_rate": 0.00016483465503851154, "loss": 1.0499, "step": 2982 }, { "epoch": 0.5543579260360528, "grad_norm": 0.6618862748146057, "learning_rate": 0.00016481238604960677, "loss": 1.0499, "step": 2983 }, { "epoch": 0.5545437650994239, "grad_norm": 0.6254931092262268, "learning_rate": 0.00016479011151715857, "loss": 1.2534, "step": 2984 }, { "epoch": 0.554729604162795, "grad_norm": 0.5860945582389832, "learning_rate": 0.0001647678314430721, "loss": 1.1679, "step": 2985 }, { "epoch": 0.5549154432261661, "grad_norm": 0.6401891112327576, "learning_rate": 0.000164745545829253, "loss": 1.1576, "step": 2986 }, { "epoch": 0.5551012822895373, "grad_norm": 0.5567334890365601, "learning_rate": 0.00016472325467760743, "loss": 1.0597, "step": 2987 }, { "epoch": 0.5552871213529084, "grad_norm": 0.5591250061988831, "learning_rate": 0.000164700957990042, "loss": 0.9265, "step": 2988 }, { "epoch": 0.5554729604162795, "grad_norm": 0.6554935574531555, "learning_rate": 0.0001646786557684638, "loss": 0.8308, "step": 2989 }, { "epoch": 0.5556587994796506, "grad_norm": 0.7079557180404663, "learning_rate": 0.00016465634801478044, "loss": 1.1058, "step": 2990 }, { "epoch": 0.5558446385430218, "grad_norm": 0.613620936870575, "learning_rate": 0.0001646340347308998, "loss": 1.1594, "step": 2991 }, { "epoch": 0.5560304776063929, "grad_norm": 0.5244887471199036, "learning_rate": 0.00016461171591873055, "loss": 0.7287, "step": 2992 }, { "epoch": 0.556216316669764, "grad_norm": 0.832699179649353, "learning_rate": 0.00016458939158018156, "loss": 1.0544, "step": 2993 }, { "epoch": 0.5564021557331351, "grad_norm": 0.546475350856781, "learning_rate": 0.00016456706171716232, "loss": 0.9461, "step": 2994 }, { "epoch": 0.5565879947965062, "grad_norm": 0.6768338084220886, "learning_rate": 0.00016454472633158274, "loss": 1.0346, "step": 2995 }, { "epoch": 0.5567738338598773, "grad_norm": 0.7314834594726562, "learning_rate": 0.0001645223854253532, "loss": 1.1819, "step": 2996 }, { "epoch": 0.5569596729232484, "grad_norm": 0.600952684879303, "learning_rate": 0.00016450003900038461, "loss": 1.2066, "step": 2997 }, { "epoch": 0.5571455119866195, "grad_norm": 0.4913482367992401, "learning_rate": 0.00016447768705858828, "loss": 0.8089, "step": 2998 }, { "epoch": 0.5573313510499907, "grad_norm": 0.6177974343299866, "learning_rate": 0.00016445532960187602, "loss": 1.179, "step": 2999 }, { "epoch": 0.5575171901133619, "grad_norm": 0.5990849137306213, "learning_rate": 0.0001644329666321601, "loss": 1.0434, "step": 3000 }, { "epoch": 0.557703029176733, "grad_norm": 0.685162365436554, "learning_rate": 0.0001644105981513533, "loss": 0.5839, "step": 3001 }, { "epoch": 0.5578888682401041, "grad_norm": 0.5808719992637634, "learning_rate": 0.00016438822416136881, "loss": 1.2187, "step": 3002 }, { "epoch": 0.5580747073034752, "grad_norm": 0.5311502814292908, "learning_rate": 0.00016436584466412036, "loss": 1.1093, "step": 3003 }, { "epoch": 0.5582605463668463, "grad_norm": 0.525296688079834, "learning_rate": 0.0001643434596615221, "loss": 1.0029, "step": 3004 }, { "epoch": 0.5584463854302174, "grad_norm": 0.7411049604415894, "learning_rate": 0.0001643210691554887, "loss": 1.2051, "step": 3005 }, { "epoch": 0.5586322244935885, "grad_norm": 0.6270830631256104, "learning_rate": 0.00016429867314793518, "loss": 0.9569, "step": 3006 }, { "epoch": 0.5588180635569596, "grad_norm": 0.6153439283370972, "learning_rate": 0.00016427627164077724, "loss": 1.1596, "step": 3007 }, { "epoch": 0.5590039026203308, "grad_norm": 0.5672575831413269, "learning_rate": 0.00016425386463593083, "loss": 0.969, "step": 3008 }, { "epoch": 0.559189741683702, "grad_norm": 0.673835813999176, "learning_rate": 0.00016423145213531251, "loss": 1.428, "step": 3009 }, { "epoch": 0.559375580747073, "grad_norm": 0.5811061859130859, "learning_rate": 0.00016420903414083926, "loss": 1.0928, "step": 3010 }, { "epoch": 0.5595614198104442, "grad_norm": 0.5861353874206543, "learning_rate": 0.00016418661065442858, "loss": 1.1264, "step": 3011 }, { "epoch": 0.5597472588738153, "grad_norm": 0.6847012042999268, "learning_rate": 0.0001641641816779983, "loss": 1.3918, "step": 3012 }, { "epoch": 0.5599330979371864, "grad_norm": 0.6058685779571533, "learning_rate": 0.00016414174721346696, "loss": 1.066, "step": 3013 }, { "epoch": 0.5601189370005575, "grad_norm": 0.6190561056137085, "learning_rate": 0.00016411930726275333, "loss": 1.1328, "step": 3014 }, { "epoch": 0.5603047760639286, "grad_norm": 0.6666110754013062, "learning_rate": 0.00016409686182777673, "loss": 1.258, "step": 3015 }, { "epoch": 0.5604906151272998, "grad_norm": 0.59378981590271, "learning_rate": 0.00016407441091045706, "loss": 1.2295, "step": 3016 }, { "epoch": 0.5606764541906709, "grad_norm": 0.6505338549613953, "learning_rate": 0.0001640519545127145, "loss": 1.1902, "step": 3017 }, { "epoch": 0.560862293254042, "grad_norm": 0.5768793225288391, "learning_rate": 0.00016402949263646984, "loss": 0.9078, "step": 3018 }, { "epoch": 0.5610481323174131, "grad_norm": 0.5398329496383667, "learning_rate": 0.00016400702528364432, "loss": 1.4141, "step": 3019 }, { "epoch": 0.5612339713807842, "grad_norm": 0.6005418300628662, "learning_rate": 0.0001639845524561596, "loss": 1.0095, "step": 3020 }, { "epoch": 0.5614198104441553, "grad_norm": 0.5389949679374695, "learning_rate": 0.00016396207415593777, "loss": 0.9768, "step": 3021 }, { "epoch": 0.5616056495075264, "grad_norm": 0.7075584530830383, "learning_rate": 0.0001639395903849015, "loss": 0.999, "step": 3022 }, { "epoch": 0.5617914885708976, "grad_norm": 0.5680679082870483, "learning_rate": 0.0001639171011449739, "loss": 1.2847, "step": 3023 }, { "epoch": 0.5619773276342688, "grad_norm": 0.8404898643493652, "learning_rate": 0.00016389460643807845, "loss": 1.3546, "step": 3024 }, { "epoch": 0.5621631666976399, "grad_norm": 0.5268566012382507, "learning_rate": 0.0001638721062661393, "loss": 1.1228, "step": 3025 }, { "epoch": 0.562349005761011, "grad_norm": 0.591393232345581, "learning_rate": 0.0001638496006310808, "loss": 0.9728, "step": 3026 }, { "epoch": 0.5625348448243821, "grad_norm": 0.5140171051025391, "learning_rate": 0.00016382708953482795, "loss": 1.0738, "step": 3027 }, { "epoch": 0.5627206838877532, "grad_norm": 0.5632948279380798, "learning_rate": 0.00016380457297930623, "loss": 1.1832, "step": 3028 }, { "epoch": 0.5629065229511243, "grad_norm": 0.6482004523277283, "learning_rate": 0.00016378205096644145, "loss": 1.2979, "step": 3029 }, { "epoch": 0.5630923620144954, "grad_norm": 0.6070230007171631, "learning_rate": 0.00016375952349816, "loss": 1.1065, "step": 3030 }, { "epoch": 0.5632782010778665, "grad_norm": 0.5631871223449707, "learning_rate": 0.0001637369905763887, "loss": 1.1694, "step": 3031 }, { "epoch": 0.5634640401412377, "grad_norm": 0.6535444259643555, "learning_rate": 0.00016371445220305487, "loss": 1.1037, "step": 3032 }, { "epoch": 0.5636498792046089, "grad_norm": 0.5825177431106567, "learning_rate": 0.0001636919083800862, "loss": 0.8396, "step": 3033 }, { "epoch": 0.56383571826798, "grad_norm": 0.49941158294677734, "learning_rate": 0.00016366935910941098, "loss": 0.8458, "step": 3034 }, { "epoch": 0.5640215573313511, "grad_norm": 0.5689782500267029, "learning_rate": 0.00016364680439295785, "loss": 1.1708, "step": 3035 }, { "epoch": 0.5642073963947222, "grad_norm": 8.309523582458496, "learning_rate": 0.00016362424423265598, "loss": 5.9499, "step": 3036 }, { "epoch": 0.5643932354580933, "grad_norm": 0.7303856015205383, "learning_rate": 0.000163601678630435, "loss": 0.9228, "step": 3037 }, { "epoch": 0.5645790745214644, "grad_norm": 0.6042539477348328, "learning_rate": 0.00016357910758822502, "loss": 1.0577, "step": 3038 }, { "epoch": 0.5647649135848355, "grad_norm": 0.7997288703918457, "learning_rate": 0.0001635565311079565, "loss": 1.2253, "step": 3039 }, { "epoch": 0.5649507526482066, "grad_norm": 0.554591715335846, "learning_rate": 0.00016353394919156052, "loss": 1.1309, "step": 3040 }, { "epoch": 0.5651365917115778, "grad_norm": 0.6072766184806824, "learning_rate": 0.0001635113618409686, "loss": 1.0735, "step": 3041 }, { "epoch": 0.5653224307749489, "grad_norm": 0.693913996219635, "learning_rate": 0.00016348876905811257, "loss": 0.9651, "step": 3042 }, { "epoch": 0.56550826983832, "grad_norm": 0.5827034115791321, "learning_rate": 0.00016346617084492493, "loss": 0.9317, "step": 3043 }, { "epoch": 0.5656941089016911, "grad_norm": 0.6475703716278076, "learning_rate": 0.00016344356720333854, "loss": 1.1251, "step": 3044 }, { "epoch": 0.5658799479650622, "grad_norm": 0.5898675322532654, "learning_rate": 0.00016342095813528672, "loss": 1.0938, "step": 3045 }, { "epoch": 0.5660657870284334, "grad_norm": 0.5563342571258545, "learning_rate": 0.00016339834364270325, "loss": 1.0096, "step": 3046 }, { "epoch": 0.5662516260918045, "grad_norm": 0.6287338733673096, "learning_rate": 0.0001633757237275225, "loss": 1.2837, "step": 3047 }, { "epoch": 0.5664374651551756, "grad_norm": 0.71528160572052, "learning_rate": 0.00016335309839167906, "loss": 1.1279, "step": 3048 }, { "epoch": 0.5666233042185468, "grad_norm": 0.6881284117698669, "learning_rate": 0.00016333046763710823, "loss": 1.1145, "step": 3049 }, { "epoch": 0.5668091432819179, "grad_norm": 0.5819098353385925, "learning_rate": 0.00016330783146574558, "loss": 1.2411, "step": 3050 }, { "epoch": 0.566994982345289, "grad_norm": 0.5303094983100891, "learning_rate": 0.0001632851898795273, "loss": 1.1144, "step": 3051 }, { "epoch": 0.5671808214086601, "grad_norm": 0.7490119934082031, "learning_rate": 0.00016326254288038998, "loss": 1.1791, "step": 3052 }, { "epoch": 0.5673666604720312, "grad_norm": 0.5890512466430664, "learning_rate": 0.00016323989047027063, "loss": 0.752, "step": 3053 }, { "epoch": 0.5675524995354023, "grad_norm": 0.5626150965690613, "learning_rate": 0.00016321723265110675, "loss": 1.1475, "step": 3054 }, { "epoch": 0.5677383385987734, "grad_norm": 0.6347929239273071, "learning_rate": 0.00016319456942483633, "loss": 1.4247, "step": 3055 }, { "epoch": 0.5679241776621445, "grad_norm": 0.6369238495826721, "learning_rate": 0.00016317190079339784, "loss": 1.1994, "step": 3056 }, { "epoch": 0.5681100167255158, "grad_norm": 0.5677321553230286, "learning_rate": 0.0001631492267587301, "loss": 1.1179, "step": 3057 }, { "epoch": 0.5682958557888869, "grad_norm": 0.6081541776657104, "learning_rate": 0.00016312654732277254, "loss": 1.1653, "step": 3058 }, { "epoch": 0.568481694852258, "grad_norm": 0.5795355439186096, "learning_rate": 0.0001631038624874649, "loss": 0.839, "step": 3059 }, { "epoch": 0.5686675339156291, "grad_norm": 0.7947715520858765, "learning_rate": 0.00016308117225474758, "loss": 1.2332, "step": 3060 }, { "epoch": 0.5688533729790002, "grad_norm": 0.5681910514831543, "learning_rate": 0.00016305847662656127, "loss": 1.0693, "step": 3061 }, { "epoch": 0.5690392120423713, "grad_norm": 0.6374632716178894, "learning_rate": 0.00016303577560484713, "loss": 1.0293, "step": 3062 }, { "epoch": 0.5692250511057424, "grad_norm": 0.5624377131462097, "learning_rate": 0.00016301306919154685, "loss": 1.0867, "step": 3063 }, { "epoch": 0.5694108901691135, "grad_norm": 0.5568638443946838, "learning_rate": 0.0001629903573886026, "loss": 1.2122, "step": 3064 }, { "epoch": 0.5695967292324847, "grad_norm": 0.7387270927429199, "learning_rate": 0.00016296764019795693, "loss": 1.1513, "step": 3065 }, { "epoch": 0.5697825682958558, "grad_norm": 0.6199186444282532, "learning_rate": 0.0001629449176215529, "loss": 1.0889, "step": 3066 }, { "epoch": 0.5699684073592269, "grad_norm": 0.5959807634353638, "learning_rate": 0.00016292218966133405, "loss": 0.9602, "step": 3067 }, { "epoch": 0.570154246422598, "grad_norm": 0.555349588394165, "learning_rate": 0.0001628994563192443, "loss": 1.3097, "step": 3068 }, { "epoch": 0.5703400854859692, "grad_norm": 3.8160369396209717, "learning_rate": 0.00016287671759722814, "loss": 2.6136, "step": 3069 }, { "epoch": 0.5705259245493403, "grad_norm": 0.6192765831947327, "learning_rate": 0.00016285397349723039, "loss": 1.293, "step": 3070 }, { "epoch": 0.5707117636127114, "grad_norm": 0.5929650068283081, "learning_rate": 0.00016283122402119645, "loss": 0.9145, "step": 3071 }, { "epoch": 0.5708976026760825, "grad_norm": 0.6493182182312012, "learning_rate": 0.00016280846917107215, "loss": 1.1481, "step": 3072 }, { "epoch": 0.5710834417394536, "grad_norm": 0.5538601875305176, "learning_rate": 0.00016278570894880372, "loss": 1.0587, "step": 3073 }, { "epoch": 0.5712692808028248, "grad_norm": 0.5534785985946655, "learning_rate": 0.0001627629433563379, "loss": 0.985, "step": 3074 }, { "epoch": 0.5714551198661959, "grad_norm": 0.6992794275283813, "learning_rate": 0.0001627401723956219, "loss": 1.1699, "step": 3075 }, { "epoch": 0.571640958929567, "grad_norm": 0.545943558216095, "learning_rate": 0.0001627173960686033, "loss": 1.0472, "step": 3076 }, { "epoch": 0.5718267979929381, "grad_norm": 0.5662815570831299, "learning_rate": 0.00016269461437723034, "loss": 0.7866, "step": 3077 }, { "epoch": 0.5720126370563092, "grad_norm": 0.6630524396896362, "learning_rate": 0.00016267182732345145, "loss": 1.1837, "step": 3078 }, { "epoch": 0.5721984761196803, "grad_norm": 0.527268648147583, "learning_rate": 0.00016264903490921574, "loss": 1.0633, "step": 3079 }, { "epoch": 0.5723843151830514, "grad_norm": 0.6216735243797302, "learning_rate": 0.0001626262371364727, "loss": 1.0545, "step": 3080 }, { "epoch": 0.5725701542464225, "grad_norm": 0.5457433462142944, "learning_rate": 0.00016260343400717223, "loss": 1.121, "step": 3081 }, { "epoch": 0.5727559933097938, "grad_norm": 0.6372676491737366, "learning_rate": 0.00016258062552326473, "loss": 1.1908, "step": 3082 }, { "epoch": 0.5729418323731649, "grad_norm": 0.5404830574989319, "learning_rate": 0.00016255781168670108, "loss": 0.9754, "step": 3083 }, { "epoch": 0.573127671436536, "grad_norm": 0.6268422603607178, "learning_rate": 0.00016253499249943261, "loss": 1.1317, "step": 3084 }, { "epoch": 0.5733135104999071, "grad_norm": 0.5977557301521301, "learning_rate": 0.00016251216796341105, "loss": 1.3657, "step": 3085 }, { "epoch": 0.5734993495632782, "grad_norm": 0.5620158314704895, "learning_rate": 0.00016248933808058865, "loss": 1.1449, "step": 3086 }, { "epoch": 0.5736851886266493, "grad_norm": 0.6281202435493469, "learning_rate": 0.00016246650285291813, "loss": 1.044, "step": 3087 }, { "epoch": 0.5738710276900204, "grad_norm": 0.5784666538238525, "learning_rate": 0.00016244366228235262, "loss": 1.0241, "step": 3088 }, { "epoch": 0.5740568667533915, "grad_norm": 0.5703860521316528, "learning_rate": 0.0001624208163708457, "loss": 1.0912, "step": 3089 }, { "epoch": 0.5742427058167627, "grad_norm": 0.6101926565170288, "learning_rate": 0.00016239796512035145, "loss": 1.2968, "step": 3090 }, { "epoch": 0.5744285448801338, "grad_norm": 0.5332390666007996, "learning_rate": 0.0001623751085328244, "loss": 0.9362, "step": 3091 }, { "epoch": 0.574614383943505, "grad_norm": 2.209155321121216, "learning_rate": 0.00016235224661021946, "loss": 2.555, "step": 3092 }, { "epoch": 0.574800223006876, "grad_norm": 0.5431200265884399, "learning_rate": 0.00016232937935449214, "loss": 0.9771, "step": 3093 }, { "epoch": 0.5749860620702472, "grad_norm": 0.7131924629211426, "learning_rate": 0.0001623065067675983, "loss": 0.9736, "step": 3094 }, { "epoch": 0.5751719011336183, "grad_norm": 0.5774680376052856, "learning_rate": 0.00016228362885149425, "loss": 0.8636, "step": 3095 }, { "epoch": 0.5753577401969894, "grad_norm": 0.5699325203895569, "learning_rate": 0.00016226074560813682, "loss": 1.2025, "step": 3096 }, { "epoch": 0.5755435792603605, "grad_norm": 0.5468953847885132, "learning_rate": 0.00016223785703948327, "loss": 1.0436, "step": 3097 }, { "epoch": 0.5757294183237317, "grad_norm": 0.5742730498313904, "learning_rate": 0.0001622149631474913, "loss": 1.25, "step": 3098 }, { "epoch": 0.5759152573871028, "grad_norm": 0.8017017841339111, "learning_rate": 0.00016219206393411903, "loss": 1.2828, "step": 3099 }, { "epoch": 0.5761010964504739, "grad_norm": 0.6201735138893127, "learning_rate": 0.00016216915940132513, "loss": 1.2306, "step": 3100 }, { "epoch": 0.576286935513845, "grad_norm": 0.5407164096832275, "learning_rate": 0.00016214624955106867, "loss": 1.1117, "step": 3101 }, { "epoch": 0.5764727745772161, "grad_norm": 0.6462402939796448, "learning_rate": 0.00016212333438530918, "loss": 1.3912, "step": 3102 }, { "epoch": 0.5766586136405872, "grad_norm": 0.6825852990150452, "learning_rate": 0.00016210041390600662, "loss": 1.0587, "step": 3103 }, { "epoch": 0.5768444527039583, "grad_norm": 0.5548142790794373, "learning_rate": 0.00016207748811512146, "loss": 0.8915, "step": 3104 }, { "epoch": 0.5770302917673295, "grad_norm": 0.599079966545105, "learning_rate": 0.00016205455701461454, "loss": 0.8589, "step": 3105 }, { "epoch": 0.5772161308307006, "grad_norm": 0.5806437134742737, "learning_rate": 0.00016203162060644726, "loss": 1.1802, "step": 3106 }, { "epoch": 0.5774019698940718, "grad_norm": 0.7707300186157227, "learning_rate": 0.00016200867889258142, "loss": 1.0741, "step": 3107 }, { "epoch": 0.5775878089574429, "grad_norm": 0.5511063933372498, "learning_rate": 0.0001619857318749792, "loss": 1.0173, "step": 3108 }, { "epoch": 0.577773648020814, "grad_norm": 0.5827605128288269, "learning_rate": 0.00016196277955560345, "loss": 1.0117, "step": 3109 }, { "epoch": 0.5779594870841851, "grad_norm": 0.5755023956298828, "learning_rate": 0.00016193982193641717, "loss": 1.0535, "step": 3110 }, { "epoch": 0.5781453261475562, "grad_norm": 0.5576872229576111, "learning_rate": 0.00016191685901938407, "loss": 0.9355, "step": 3111 }, { "epoch": 0.5783311652109273, "grad_norm": 0.6232908964157104, "learning_rate": 0.00016189389080646823, "loss": 1.0984, "step": 3112 }, { "epoch": 0.5785170042742984, "grad_norm": 0.558789074420929, "learning_rate": 0.00016187091729963409, "loss": 1.1206, "step": 3113 }, { "epoch": 0.5787028433376695, "grad_norm": 0.5674159526824951, "learning_rate": 0.0001618479385008467, "loss": 1.0263, "step": 3114 }, { "epoch": 0.5788886824010407, "grad_norm": 0.6567408442497253, "learning_rate": 0.00016182495441207148, "loss": 1.2807, "step": 3115 }, { "epoch": 0.5790745214644119, "grad_norm": 0.6544413566589355, "learning_rate": 0.00016180196503527426, "loss": 1.0089, "step": 3116 }, { "epoch": 0.579260360527783, "grad_norm": 0.6177968382835388, "learning_rate": 0.0001617789703724214, "loss": 1.2623, "step": 3117 }, { "epoch": 0.5794461995911541, "grad_norm": 0.6059399843215942, "learning_rate": 0.0001617559704254797, "loss": 1.1049, "step": 3118 }, { "epoch": 0.5796320386545252, "grad_norm": 0.5528789162635803, "learning_rate": 0.00016173296519641635, "loss": 1.039, "step": 3119 }, { "epoch": 0.5798178777178963, "grad_norm": 0.5349646210670471, "learning_rate": 0.00016170995468719904, "loss": 0.9782, "step": 3120 }, { "epoch": 0.5800037167812674, "grad_norm": 0.5830336809158325, "learning_rate": 0.00016168693889979601, "loss": 1.3013, "step": 3121 }, { "epoch": 0.5801895558446385, "grad_norm": 0.5634336471557617, "learning_rate": 0.00016166391783617573, "loss": 1.022, "step": 3122 }, { "epoch": 0.5803753949080097, "grad_norm": 0.57351154088974, "learning_rate": 0.00016164089149830728, "loss": 1.0899, "step": 3123 }, { "epoch": 0.5805612339713808, "grad_norm": 0.5886953473091125, "learning_rate": 0.00016161785988816018, "loss": 1.2963, "step": 3124 }, { "epoch": 0.5807470730347519, "grad_norm": 0.6066156029701233, "learning_rate": 0.00016159482300770432, "loss": 1.1767, "step": 3125 }, { "epoch": 0.580932912098123, "grad_norm": 0.6183861494064331, "learning_rate": 0.00016157178085891014, "loss": 1.1532, "step": 3126 }, { "epoch": 0.5811187511614941, "grad_norm": 0.5010476112365723, "learning_rate": 0.00016154873344374846, "loss": 1.0039, "step": 3127 }, { "epoch": 0.5813045902248652, "grad_norm": 0.5397800803184509, "learning_rate": 0.00016152568076419058, "loss": 0.9084, "step": 3128 }, { "epoch": 0.5814904292882364, "grad_norm": 0.5681976675987244, "learning_rate": 0.00016150262282220828, "loss": 0.9105, "step": 3129 }, { "epoch": 0.5816762683516075, "grad_norm": 0.686458170413971, "learning_rate": 0.0001614795596197737, "loss": 1.3427, "step": 3130 }, { "epoch": 0.5818621074149787, "grad_norm": 0.6114237904548645, "learning_rate": 0.00016145649115885952, "loss": 1.0371, "step": 3131 }, { "epoch": 0.5820479464783498, "grad_norm": 0.6143605709075928, "learning_rate": 0.00016143341744143884, "loss": 0.8177, "step": 3132 }, { "epoch": 0.5822337855417209, "grad_norm": 0.569019079208374, "learning_rate": 0.00016141033846948517, "loss": 0.9501, "step": 3133 }, { "epoch": 0.582419624605092, "grad_norm": 0.5694284439086914, "learning_rate": 0.00016138725424497252, "loss": 1.0626, "step": 3134 }, { "epoch": 0.5826054636684631, "grad_norm": 0.5643896460533142, "learning_rate": 0.00016136416476987534, "loss": 1.111, "step": 3135 }, { "epoch": 0.5827913027318342, "grad_norm": 0.7149565815925598, "learning_rate": 0.00016134107004616853, "loss": 1.3123, "step": 3136 }, { "epoch": 0.5829771417952053, "grad_norm": 0.6421211361885071, "learning_rate": 0.00016131797007582743, "loss": 0.9872, "step": 3137 }, { "epoch": 0.5831629808585764, "grad_norm": 0.5691509246826172, "learning_rate": 0.00016129486486082782, "loss": 1.2416, "step": 3138 }, { "epoch": 0.5833488199219476, "grad_norm": 0.6846590042114258, "learning_rate": 0.00016127175440314596, "loss": 1.0807, "step": 3139 }, { "epoch": 0.5835346589853188, "grad_norm": 0.6126776933670044, "learning_rate": 0.00016124863870475847, "loss": 1.0882, "step": 3140 }, { "epoch": 0.5837204980486899, "grad_norm": 0.8353591561317444, "learning_rate": 0.0001612255177676426, "loss": 1.3001, "step": 3141 }, { "epoch": 0.583906337112061, "grad_norm": 0.6794141530990601, "learning_rate": 0.00016120239159377582, "loss": 1.1654, "step": 3142 }, { "epoch": 0.5840921761754321, "grad_norm": 0.6769196391105652, "learning_rate": 0.00016117926018513622, "loss": 1.2275, "step": 3143 }, { "epoch": 0.5842780152388032, "grad_norm": 0.6092231273651123, "learning_rate": 0.0001611561235437023, "loss": 0.9933, "step": 3144 }, { "epoch": 0.5844638543021743, "grad_norm": 0.5706498622894287, "learning_rate": 0.00016113298167145292, "loss": 1.0099, "step": 3145 }, { "epoch": 0.5846496933655454, "grad_norm": 0.5918547511100769, "learning_rate": 0.00016110983457036753, "loss": 1.1331, "step": 3146 }, { "epoch": 0.5848355324289165, "grad_norm": 0.6145870089530945, "learning_rate": 0.00016108668224242587, "loss": 0.9591, "step": 3147 }, { "epoch": 0.5850213714922877, "grad_norm": 0.5824666023254395, "learning_rate": 0.00016106352468960825, "loss": 1.2682, "step": 3148 }, { "epoch": 0.5852072105556588, "grad_norm": 0.6346530318260193, "learning_rate": 0.0001610403619138954, "loss": 1.0673, "step": 3149 }, { "epoch": 0.5853930496190299, "grad_norm": 0.5495458841323853, "learning_rate": 0.00016101719391726853, "loss": 1.0197, "step": 3150 }, { "epoch": 0.585578888682401, "grad_norm": 0.6226973533630371, "learning_rate": 0.00016099402070170912, "loss": 1.1676, "step": 3151 }, { "epoch": 0.5857647277457722, "grad_norm": 0.687059760093689, "learning_rate": 0.00016097084226919932, "loss": 0.8824, "step": 3152 }, { "epoch": 0.5859505668091433, "grad_norm": 0.5375981330871582, "learning_rate": 0.0001609476586217216, "loss": 1.0266, "step": 3153 }, { "epoch": 0.5861364058725144, "grad_norm": 0.5993423461914062, "learning_rate": 0.00016092446976125892, "loss": 0.9489, "step": 3154 }, { "epoch": 0.5863222449358855, "grad_norm": 0.670810878276825, "learning_rate": 0.00016090127568979464, "loss": 1.406, "step": 3155 }, { "epoch": 0.5865080839992567, "grad_norm": 0.5641027688980103, "learning_rate": 0.00016087807640931265, "loss": 1.0512, "step": 3156 }, { "epoch": 0.5866939230626278, "grad_norm": 0.6967179775238037, "learning_rate": 0.0001608548719217972, "loss": 1.0222, "step": 3157 }, { "epoch": 0.5868797621259989, "grad_norm": 0.9438024163246155, "learning_rate": 0.00016083166222923304, "loss": 1.0211, "step": 3158 }, { "epoch": 0.58706560118937, "grad_norm": 0.595064640045166, "learning_rate": 0.00016080844733360535, "loss": 0.9823, "step": 3159 }, { "epoch": 0.5872514402527411, "grad_norm": 0.5441033244132996, "learning_rate": 0.0001607852272368997, "loss": 1.1615, "step": 3160 }, { "epoch": 0.5874372793161122, "grad_norm": 0.5815873146057129, "learning_rate": 0.0001607620019411022, "loss": 0.9964, "step": 3161 }, { "epoch": 0.5876231183794833, "grad_norm": 0.5449917912483215, "learning_rate": 0.00016073877144819933, "loss": 1.1755, "step": 3162 }, { "epoch": 0.5878089574428544, "grad_norm": 0.6668300628662109, "learning_rate": 0.0001607155357601781, "loss": 1.2527, "step": 3163 }, { "epoch": 0.5879947965062257, "grad_norm": 0.5341411828994751, "learning_rate": 0.00016069229487902586, "loss": 1.0948, "step": 3164 }, { "epoch": 0.5881806355695968, "grad_norm": 0.6133508086204529, "learning_rate": 0.00016066904880673047, "loss": 1.0737, "step": 3165 }, { "epoch": 0.5883664746329679, "grad_norm": 0.5782324075698853, "learning_rate": 0.00016064579754528023, "loss": 0.9634, "step": 3166 }, { "epoch": 0.588552313696339, "grad_norm": 0.6144787073135376, "learning_rate": 0.0001606225410966638, "loss": 0.9183, "step": 3167 }, { "epoch": 0.5887381527597101, "grad_norm": 0.6035318970680237, "learning_rate": 0.00016059927946287045, "loss": 1.1995, "step": 3168 }, { "epoch": 0.5889239918230812, "grad_norm": 0.822420597076416, "learning_rate": 0.00016057601264588974, "loss": 1.1864, "step": 3169 }, { "epoch": 0.5891098308864523, "grad_norm": 0.5931529402732849, "learning_rate": 0.00016055274064771176, "loss": 0.8851, "step": 3170 }, { "epoch": 0.5892956699498234, "grad_norm": 0.6320171356201172, "learning_rate": 0.00016052946347032697, "loss": 0.8221, "step": 3171 }, { "epoch": 0.5894815090131946, "grad_norm": 0.5823320746421814, "learning_rate": 0.0001605061811157264, "loss": 1.2303, "step": 3172 }, { "epoch": 0.5896673480765657, "grad_norm": 0.6609289050102234, "learning_rate": 0.00016048289358590133, "loss": 0.8762, "step": 3173 }, { "epoch": 0.5898531871399368, "grad_norm": 0.5508584976196289, "learning_rate": 0.0001604596008828437, "loss": 1.3369, "step": 3174 }, { "epoch": 0.590039026203308, "grad_norm": 0.5863260626792908, "learning_rate": 0.0001604363030085457, "loss": 0.9728, "step": 3175 }, { "epoch": 0.590224865266679, "grad_norm": 0.5487418174743652, "learning_rate": 0.0001604129999650001, "loss": 1.2077, "step": 3176 }, { "epoch": 0.5904107043300502, "grad_norm": 0.6902508735656738, "learning_rate": 0.00016038969175420005, "loss": 1.0947, "step": 3177 }, { "epoch": 0.5905965433934213, "grad_norm": 0.5682591199874878, "learning_rate": 0.00016036637837813915, "loss": 1.2425, "step": 3178 }, { "epoch": 0.5907823824567924, "grad_norm": 0.7730516791343689, "learning_rate": 0.00016034305983881142, "loss": 1.1954, "step": 3179 }, { "epoch": 0.5909682215201635, "grad_norm": 0.6822097897529602, "learning_rate": 0.0001603197361382114, "loss": 1.175, "step": 3180 }, { "epoch": 0.5911540605835347, "grad_norm": 0.5383156538009644, "learning_rate": 0.0001602964072783339, "loss": 0.9459, "step": 3181 }, { "epoch": 0.5913398996469058, "grad_norm": 0.6112632155418396, "learning_rate": 0.00016027307326117445, "loss": 1.1796, "step": 3182 }, { "epoch": 0.5915257387102769, "grad_norm": 0.592560350894928, "learning_rate": 0.00016024973408872872, "loss": 0.9495, "step": 3183 }, { "epoch": 0.591711577773648, "grad_norm": 0.6019397377967834, "learning_rate": 0.00016022638976299303, "loss": 1.0865, "step": 3184 }, { "epoch": 0.5918974168370191, "grad_norm": 0.6122562885284424, "learning_rate": 0.00016020304028596408, "loss": 1.373, "step": 3185 }, { "epoch": 0.5920832559003902, "grad_norm": 0.5218804478645325, "learning_rate": 0.00016017968565963897, "loss": 0.78, "step": 3186 }, { "epoch": 0.5922690949637613, "grad_norm": 0.6451453566551208, "learning_rate": 0.00016015632588601528, "loss": 1.087, "step": 3187 }, { "epoch": 0.5924549340271325, "grad_norm": 0.5636645555496216, "learning_rate": 0.000160132960967091, "loss": 1.0118, "step": 3188 }, { "epoch": 0.5926407730905037, "grad_norm": 0.5245563983917236, "learning_rate": 0.00016010959090486464, "loss": 1.0635, "step": 3189 }, { "epoch": 0.5928266121538748, "grad_norm": 0.5599002838134766, "learning_rate": 0.000160086215701335, "loss": 1.0792, "step": 3190 }, { "epoch": 0.5930124512172459, "grad_norm": 0.5101682543754578, "learning_rate": 0.00016006283535850152, "loss": 0.9534, "step": 3191 }, { "epoch": 0.593198290280617, "grad_norm": 0.6640803813934326, "learning_rate": 0.00016003944987836392, "loss": 1.0931, "step": 3192 }, { "epoch": 0.5933841293439881, "grad_norm": 0.5303892493247986, "learning_rate": 0.0001600160592629224, "loss": 0.9207, "step": 3193 }, { "epoch": 0.5935699684073592, "grad_norm": 0.6699718236923218, "learning_rate": 0.0001599926635141776, "loss": 1.1941, "step": 3194 }, { "epoch": 0.5937558074707303, "grad_norm": 0.5338372588157654, "learning_rate": 0.00015996926263413066, "loss": 1.188, "step": 3195 }, { "epoch": 0.5939416465341014, "grad_norm": 0.5052550435066223, "learning_rate": 0.00015994585662478306, "loss": 0.9666, "step": 3196 }, { "epoch": 0.5941274855974726, "grad_norm": 0.6414618492126465, "learning_rate": 0.00015992244548813678, "loss": 1.191, "step": 3197 }, { "epoch": 0.5943133246608437, "grad_norm": 0.5840882658958435, "learning_rate": 0.00015989902922619421, "loss": 1.0446, "step": 3198 }, { "epoch": 0.5944991637242149, "grad_norm": 0.5422523617744446, "learning_rate": 0.00015987560784095823, "loss": 1.0008, "step": 3199 }, { "epoch": 0.594685002787586, "grad_norm": 0.619379460811615, "learning_rate": 0.0001598521813344321, "loss": 1.2946, "step": 3200 }, { "epoch": 0.5948708418509571, "grad_norm": 0.5059217810630798, "learning_rate": 0.00015982874970861955, "loss": 0.9146, "step": 3201 }, { "epoch": 0.5950566809143282, "grad_norm": 0.6319354176521301, "learning_rate": 0.00015980531296552476, "loss": 1.1363, "step": 3202 }, { "epoch": 0.5952425199776993, "grad_norm": 0.6196834444999695, "learning_rate": 0.00015978187110715224, "loss": 1.1737, "step": 3203 }, { "epoch": 0.5954283590410704, "grad_norm": 0.5323222279548645, "learning_rate": 0.0001597584241355071, "loss": 1.0392, "step": 3204 }, { "epoch": 0.5956141981044416, "grad_norm": 0.6693430542945862, "learning_rate": 0.00015973497205259477, "loss": 1.1555, "step": 3205 }, { "epoch": 0.5958000371678127, "grad_norm": 0.5676935911178589, "learning_rate": 0.0001597115148604212, "loss": 1.31, "step": 3206 }, { "epoch": 0.5959858762311838, "grad_norm": 0.5511360764503479, "learning_rate": 0.00015968805256099273, "loss": 1.1491, "step": 3207 }, { "epoch": 0.5961717152945549, "grad_norm": 0.5314706563949585, "learning_rate": 0.0001596645851563161, "loss": 1.0161, "step": 3208 }, { "epoch": 0.596357554357926, "grad_norm": 0.5497950911521912, "learning_rate": 0.00015964111264839853, "loss": 1.1909, "step": 3209 }, { "epoch": 0.5965433934212971, "grad_norm": 0.564395546913147, "learning_rate": 0.00015961763503924772, "loss": 1.0012, "step": 3210 }, { "epoch": 0.5967292324846682, "grad_norm": 0.5982910394668579, "learning_rate": 0.0001595941523308717, "loss": 1.0582, "step": 3211 }, { "epoch": 0.5969150715480394, "grad_norm": 0.5818560123443604, "learning_rate": 0.00015957066452527906, "loss": 0.9864, "step": 3212 }, { "epoch": 0.5971009106114106, "grad_norm": 0.4957444369792938, "learning_rate": 0.00015954717162447874, "loss": 0.9415, "step": 3213 }, { "epoch": 0.5972867496747817, "grad_norm": 0.6664856672286987, "learning_rate": 0.00015952367363048016, "loss": 1.2168, "step": 3214 }, { "epoch": 0.5974725887381528, "grad_norm": 0.5591182708740234, "learning_rate": 0.00015950017054529306, "loss": 1.3039, "step": 3215 }, { "epoch": 0.5976584278015239, "grad_norm": 0.6910569667816162, "learning_rate": 0.00015947666237092783, "loss": 1.228, "step": 3216 }, { "epoch": 0.597844266864895, "grad_norm": 0.533061683177948, "learning_rate": 0.00015945314910939507, "loss": 1.1549, "step": 3217 }, { "epoch": 0.5980301059282661, "grad_norm": 0.6971861720085144, "learning_rate": 0.00015942963076270603, "loss": 1.0395, "step": 3218 }, { "epoch": 0.5982159449916372, "grad_norm": 0.648344874382019, "learning_rate": 0.00015940610733287218, "loss": 1.4975, "step": 3219 }, { "epoch": 0.5984017840550083, "grad_norm": 0.5853761434555054, "learning_rate": 0.0001593825788219056, "loss": 1.0498, "step": 3220 }, { "epoch": 0.5985876231183794, "grad_norm": 0.5414695143699646, "learning_rate": 0.0001593590452318187, "loss": 1.1884, "step": 3221 }, { "epoch": 0.5987734621817506, "grad_norm": 0.6624699234962463, "learning_rate": 0.00015933550656462437, "loss": 1.0484, "step": 3222 }, { "epoch": 0.5989593012451218, "grad_norm": 0.6307032704353333, "learning_rate": 0.00015931196282233594, "loss": 1.0314, "step": 3223 }, { "epoch": 0.5991451403084929, "grad_norm": 0.5564342141151428, "learning_rate": 0.00015928841400696713, "loss": 0.9162, "step": 3224 }, { "epoch": 0.599330979371864, "grad_norm": 0.6190139055252075, "learning_rate": 0.0001592648601205321, "loss": 0.9379, "step": 3225 }, { "epoch": 0.5995168184352351, "grad_norm": 0.5595014691352844, "learning_rate": 0.00015924130116504553, "loss": 1.1747, "step": 3226 }, { "epoch": 0.5997026574986062, "grad_norm": 0.6404126882553101, "learning_rate": 0.00015921773714252244, "loss": 1.0451, "step": 3227 }, { "epoch": 0.5998884965619773, "grad_norm": 0.5678627490997314, "learning_rate": 0.00015919416805497825, "loss": 0.9317, "step": 3228 }, { "epoch": 0.6000743356253484, "grad_norm": 0.5714607238769531, "learning_rate": 0.000159170593904429, "loss": 1.0092, "step": 3229 }, { "epoch": 0.6002601746887196, "grad_norm": 0.5801486372947693, "learning_rate": 0.00015914701469289095, "loss": 1.2109, "step": 3230 }, { "epoch": 0.6004460137520907, "grad_norm": 0.5355969071388245, "learning_rate": 0.00015912343042238087, "loss": 1.0206, "step": 3231 }, { "epoch": 0.6006318528154618, "grad_norm": 0.5958425402641296, "learning_rate": 0.00015909984109491605, "loss": 0.9693, "step": 3232 }, { "epoch": 0.6008176918788329, "grad_norm": 0.644477367401123, "learning_rate": 0.00015907624671251407, "loss": 1.1284, "step": 3233 }, { "epoch": 0.601003530942204, "grad_norm": 0.7528777718544006, "learning_rate": 0.000159052647277193, "loss": 1.0145, "step": 3234 }, { "epoch": 0.6011893700055752, "grad_norm": 0.6112080812454224, "learning_rate": 0.0001590290427909714, "loss": 0.9781, "step": 3235 }, { "epoch": 0.6013752090689463, "grad_norm": 0.6840320229530334, "learning_rate": 0.0001590054332558682, "loss": 1.2317, "step": 3236 }, { "epoch": 0.6015610481323174, "grad_norm": 0.6607563495635986, "learning_rate": 0.00015898181867390277, "loss": 1.0902, "step": 3237 }, { "epoch": 0.6017468871956886, "grad_norm": 0.7982823848724365, "learning_rate": 0.0001589581990470949, "loss": 1.4083, "step": 3238 }, { "epoch": 0.6019327262590597, "grad_norm": 0.5766059160232544, "learning_rate": 0.00015893457437746484, "loss": 1.1384, "step": 3239 }, { "epoch": 0.6021185653224308, "grad_norm": 0.6066285967826843, "learning_rate": 0.00015891094466703325, "loss": 1.0282, "step": 3240 }, { "epoch": 0.6023044043858019, "grad_norm": 0.6731379628181458, "learning_rate": 0.00015888730991782125, "loss": 1.0858, "step": 3241 }, { "epoch": 0.602490243449173, "grad_norm": 0.5809962749481201, "learning_rate": 0.00015886367013185035, "loss": 1.2507, "step": 3242 }, { "epoch": 0.6026760825125441, "grad_norm": 0.5508452653884888, "learning_rate": 0.00015884002531114255, "loss": 0.8706, "step": 3243 }, { "epoch": 0.6028619215759152, "grad_norm": 0.5738691091537476, "learning_rate": 0.0001588163754577202, "loss": 0.9869, "step": 3244 }, { "epoch": 0.6030477606392863, "grad_norm": 0.6129263639450073, "learning_rate": 0.00015879272057360612, "loss": 1.0251, "step": 3245 }, { "epoch": 0.6032335997026576, "grad_norm": 0.5459702014923096, "learning_rate": 0.0001587690606608236, "loss": 1.304, "step": 3246 }, { "epoch": 0.6034194387660287, "grad_norm": 0.6796538233757019, "learning_rate": 0.0001587453957213963, "loss": 0.9258, "step": 3247 }, { "epoch": 0.6036052778293998, "grad_norm": 0.6355002522468567, "learning_rate": 0.00015872172575734833, "loss": 0.7917, "step": 3248 }, { "epoch": 0.6037911168927709, "grad_norm": 0.782307505607605, "learning_rate": 0.00015869805077070427, "loss": 1.2612, "step": 3249 }, { "epoch": 0.603976955956142, "grad_norm": 0.48565757274627686, "learning_rate": 0.00015867437076348906, "loss": 0.878, "step": 3250 }, { "epoch": 0.6041627950195131, "grad_norm": 0.6093220114707947, "learning_rate": 0.00015865068573772808, "loss": 1.12, "step": 3251 }, { "epoch": 0.6043486340828842, "grad_norm": 0.5323510766029358, "learning_rate": 0.0001586269956954472, "loss": 0.9756, "step": 3252 }, { "epoch": 0.6045344731462553, "grad_norm": 0.6668819785118103, "learning_rate": 0.00015860330063867267, "loss": 0.8406, "step": 3253 }, { "epoch": 0.6047203122096264, "grad_norm": 0.6621644496917725, "learning_rate": 0.0001585796005694312, "loss": 1.0424, "step": 3254 }, { "epoch": 0.6049061512729976, "grad_norm": 0.5795639157295227, "learning_rate": 0.00015855589548974982, "loss": 1.0071, "step": 3255 }, { "epoch": 0.6050919903363687, "grad_norm": 0.6052048802375793, "learning_rate": 0.00015853218540165623, "loss": 0.9134, "step": 3256 }, { "epoch": 0.6052778293997398, "grad_norm": 0.6029704809188843, "learning_rate": 0.0001585084703071783, "loss": 1.1407, "step": 3257 }, { "epoch": 0.605463668463111, "grad_norm": 0.5548236966133118, "learning_rate": 0.0001584847502083444, "loss": 1.0075, "step": 3258 }, { "epoch": 0.6056495075264821, "grad_norm": 0.679028332233429, "learning_rate": 0.0001584610251071834, "loss": 1.3603, "step": 3259 }, { "epoch": 0.6058353465898532, "grad_norm": 0.7591668963432312, "learning_rate": 0.00015843729500572464, "loss": 1.1176, "step": 3260 }, { "epoch": 0.6060211856532243, "grad_norm": 0.5672114491462708, "learning_rate": 0.00015841355990599772, "loss": 1.2445, "step": 3261 }, { "epoch": 0.6062070247165954, "grad_norm": 0.5262810587882996, "learning_rate": 0.00015838981981003273, "loss": 1.2205, "step": 3262 }, { "epoch": 0.6063928637799666, "grad_norm": 0.5443841814994812, "learning_rate": 0.00015836607471986033, "loss": 0.7721, "step": 3263 }, { "epoch": 0.6065787028433377, "grad_norm": 0.568134605884552, "learning_rate": 0.00015834232463751135, "loss": 1.2441, "step": 3264 }, { "epoch": 0.6067645419067088, "grad_norm": 0.5961521863937378, "learning_rate": 0.00015831856956501726, "loss": 0.9117, "step": 3265 }, { "epoch": 0.6069503809700799, "grad_norm": 0.6283470988273621, "learning_rate": 0.0001582948095044099, "loss": 0.9078, "step": 3266 }, { "epoch": 0.607136220033451, "grad_norm": 0.6529102921485901, "learning_rate": 0.00015827104445772144, "loss": 1.2285, "step": 3267 }, { "epoch": 0.6073220590968221, "grad_norm": 0.55194491147995, "learning_rate": 0.00015824727442698467, "loss": 0.6904, "step": 3268 }, { "epoch": 0.6075078981601932, "grad_norm": 0.6459614634513855, "learning_rate": 0.0001582234994142326, "loss": 1.027, "step": 3269 }, { "epoch": 0.6076937372235643, "grad_norm": 0.491305410861969, "learning_rate": 0.00015819971942149875, "loss": 0.7555, "step": 3270 }, { "epoch": 0.6078795762869356, "grad_norm": 0.5508794188499451, "learning_rate": 0.00015817593445081716, "loss": 1.0445, "step": 3271 }, { "epoch": 0.6080654153503067, "grad_norm": 0.4993383288383484, "learning_rate": 0.00015815214450422217, "loss": 0.9549, "step": 3272 }, { "epoch": 0.6082512544136778, "grad_norm": 0.6566349864006042, "learning_rate": 0.00015812834958374853, "loss": 1.1194, "step": 3273 }, { "epoch": 0.6084370934770489, "grad_norm": 0.6416013836860657, "learning_rate": 0.00015810454969143156, "loss": 1.0529, "step": 3274 }, { "epoch": 0.60862293254042, "grad_norm": 0.6909444332122803, "learning_rate": 0.00015808074482930685, "loss": 0.8588, "step": 3275 }, { "epoch": 0.6088087716037911, "grad_norm": 0.6137370467185974, "learning_rate": 0.00015805693499941056, "loss": 1.1165, "step": 3276 }, { "epoch": 0.6089946106671622, "grad_norm": 0.6334401369094849, "learning_rate": 0.00015803312020377908, "loss": 1.3178, "step": 3277 }, { "epoch": 0.6091804497305333, "grad_norm": 0.5722105503082275, "learning_rate": 0.00015800930044444943, "loss": 1.1094, "step": 3278 }, { "epoch": 0.6093662887939045, "grad_norm": 0.651226282119751, "learning_rate": 0.00015798547572345892, "loss": 0.8808, "step": 3279 }, { "epoch": 0.6095521278572756, "grad_norm": 0.6072734594345093, "learning_rate": 0.00015796164604284533, "loss": 1.0669, "step": 3280 }, { "epoch": 0.6097379669206467, "grad_norm": 0.5889689326286316, "learning_rate": 0.00015793781140464695, "loss": 1.2274, "step": 3281 }, { "epoch": 0.6099238059840179, "grad_norm": 0.573512852191925, "learning_rate": 0.0001579139718109023, "loss": 1.1719, "step": 3282 }, { "epoch": 0.610109645047389, "grad_norm": 0.55435711145401, "learning_rate": 0.00015789012726365046, "loss": 1.1348, "step": 3283 }, { "epoch": 0.6102954841107601, "grad_norm": 0.7388445734977722, "learning_rate": 0.00015786627776493095, "loss": 1.3228, "step": 3284 }, { "epoch": 0.6104813231741312, "grad_norm": 0.6711801886558533, "learning_rate": 0.0001578424233167836, "loss": 0.9831, "step": 3285 }, { "epoch": 0.6106671622375023, "grad_norm": 0.5652539730072021, "learning_rate": 0.00015781856392124878, "loss": 1.3369, "step": 3286 }, { "epoch": 0.6108530013008734, "grad_norm": 0.6947058439254761, "learning_rate": 0.00015779469958036724, "loss": 1.3382, "step": 3287 }, { "epoch": 0.6110388403642446, "grad_norm": 0.5150282979011536, "learning_rate": 0.00015777083029618015, "loss": 1.0123, "step": 3288 }, { "epoch": 0.6112246794276157, "grad_norm": 0.603293776512146, "learning_rate": 0.00015774695607072902, "loss": 1.0484, "step": 3289 }, { "epoch": 0.6114105184909868, "grad_norm": 0.5617212057113647, "learning_rate": 0.00015772307690605598, "loss": 1.0026, "step": 3290 }, { "epoch": 0.6115963575543579, "grad_norm": 0.5847578644752502, "learning_rate": 0.00015769919280420345, "loss": 0.9991, "step": 3291 }, { "epoch": 0.611782196617729, "grad_norm": 0.4738200604915619, "learning_rate": 0.0001576753037672142, "loss": 0.8151, "step": 3292 }, { "epoch": 0.6119680356811001, "grad_norm": 0.6319113373756409, "learning_rate": 0.00015765140979713158, "loss": 1.1612, "step": 3293 }, { "epoch": 0.6121538747444712, "grad_norm": 2.3821051120758057, "learning_rate": 0.00015762751089599925, "loss": 2.1318, "step": 3294 }, { "epoch": 0.6123397138078424, "grad_norm": 0.6564528942108154, "learning_rate": 0.00015760360706586142, "loss": 1.1375, "step": 3295 }, { "epoch": 0.6125255528712136, "grad_norm": 0.6328415870666504, "learning_rate": 0.00015757969830876254, "loss": 1.149, "step": 3296 }, { "epoch": 0.6127113919345847, "grad_norm": 0.6952791810035706, "learning_rate": 0.00015755578462674764, "loss": 1.1789, "step": 3297 }, { "epoch": 0.6128972309979558, "grad_norm": 0.6253474354743958, "learning_rate": 0.00015753186602186209, "loss": 1.1331, "step": 3298 }, { "epoch": 0.6130830700613269, "grad_norm": 0.6182775497436523, "learning_rate": 0.00015750794249615166, "loss": 0.9761, "step": 3299 }, { "epoch": 0.613268909124698, "grad_norm": 0.3781263530254364, "learning_rate": 0.00015748401405166264, "loss": 0.5632, "step": 3300 }, { "epoch": 0.6134547481880691, "grad_norm": 0.6519707441329956, "learning_rate": 0.00015746008069044166, "loss": 1.0675, "step": 3301 }, { "epoch": 0.6136405872514402, "grad_norm": 0.5394522547721863, "learning_rate": 0.0001574361424145358, "loss": 0.8303, "step": 3302 }, { "epoch": 0.6138264263148113, "grad_norm": 0.6013384461402893, "learning_rate": 0.00015741219922599253, "loss": 1.0751, "step": 3303 }, { "epoch": 0.6140122653781825, "grad_norm": 2.4101686477661133, "learning_rate": 0.00015738825112685976, "loss": 2.4912, "step": 3304 }, { "epoch": 0.6141981044415536, "grad_norm": 0.5980067253112793, "learning_rate": 0.0001573642981191859, "loss": 1.1398, "step": 3305 }, { "epoch": 0.6143839435049248, "grad_norm": 0.6038574576377869, "learning_rate": 0.00015734034020501957, "loss": 1.2101, "step": 3306 }, { "epoch": 0.6145697825682959, "grad_norm": 0.5979435443878174, "learning_rate": 0.00015731637738641005, "loss": 1.2162, "step": 3307 }, { "epoch": 0.614755621631667, "grad_norm": 0.5230640769004822, "learning_rate": 0.0001572924096654069, "loss": 1.1457, "step": 3308 }, { "epoch": 0.6149414606950381, "grad_norm": 0.5864366888999939, "learning_rate": 0.0001572684370440601, "loss": 0.9144, "step": 3309 }, { "epoch": 0.6151272997584092, "grad_norm": 0.5331762433052063, "learning_rate": 0.00015724445952442013, "loss": 1.0474, "step": 3310 }, { "epoch": 0.6153131388217803, "grad_norm": 0.582480788230896, "learning_rate": 0.0001572204771085378, "loss": 1.2336, "step": 3311 }, { "epoch": 0.6154989778851515, "grad_norm": 0.638606607913971, "learning_rate": 0.0001571964897984644, "loss": 1.2695, "step": 3312 }, { "epoch": 0.6156848169485226, "grad_norm": 0.6707527041435242, "learning_rate": 0.00015717249759625163, "loss": 1.1273, "step": 3313 }, { "epoch": 0.6158706560118937, "grad_norm": 0.6105638742446899, "learning_rate": 0.00015714850050395154, "loss": 1.0156, "step": 3314 }, { "epoch": 0.6160564950752648, "grad_norm": 0.5666206479072571, "learning_rate": 0.0001571244985236167, "loss": 1.011, "step": 3315 }, { "epoch": 0.6162423341386359, "grad_norm": 0.5360713005065918, "learning_rate": 0.00015710049165730002, "loss": 0.822, "step": 3316 }, { "epoch": 0.616428173202007, "grad_norm": 0.5776330828666687, "learning_rate": 0.00015707647990705493, "loss": 1.1726, "step": 3317 }, { "epoch": 0.6166140122653782, "grad_norm": 0.5878480672836304, "learning_rate": 0.0001570524632749351, "loss": 1.0647, "step": 3318 }, { "epoch": 0.6167998513287493, "grad_norm": 0.7072561383247375, "learning_rate": 0.0001570284417629948, "loss": 1.3886, "step": 3319 }, { "epoch": 0.6169856903921205, "grad_norm": 0.6697565913200378, "learning_rate": 0.00015700441537328864, "loss": 0.8962, "step": 3320 }, { "epoch": 0.6171715294554916, "grad_norm": 0.6354570388793945, "learning_rate": 0.00015698038410787163, "loss": 1.0034, "step": 3321 }, { "epoch": 0.6173573685188627, "grad_norm": 0.6120606660842896, "learning_rate": 0.00015695634796879915, "loss": 1.1362, "step": 3322 }, { "epoch": 0.6175432075822338, "grad_norm": 0.7120777368545532, "learning_rate": 0.00015693230695812722, "loss": 1.0888, "step": 3323 }, { "epoch": 0.6177290466456049, "grad_norm": 0.5528488159179688, "learning_rate": 0.00015690826107791198, "loss": 1.0157, "step": 3324 }, { "epoch": 0.617914885708976, "grad_norm": 0.6340495347976685, "learning_rate": 0.00015688421033021018, "loss": 0.9895, "step": 3325 }, { "epoch": 0.6181007247723471, "grad_norm": 0.5119288563728333, "learning_rate": 0.00015686015471707892, "loss": 1.185, "step": 3326 }, { "epoch": 0.6182865638357182, "grad_norm": 0.6284135580062866, "learning_rate": 0.00015683609424057578, "loss": 0.9463, "step": 3327 }, { "epoch": 0.6184724028990893, "grad_norm": 0.7082629203796387, "learning_rate": 0.00015681202890275865, "loss": 1.1147, "step": 3328 }, { "epoch": 0.6186582419624606, "grad_norm": 0.5838453769683838, "learning_rate": 0.00015678795870568583, "loss": 1.0441, "step": 3329 }, { "epoch": 0.6188440810258317, "grad_norm": 0.7504107356071472, "learning_rate": 0.00015676388365141623, "loss": 1.102, "step": 3330 }, { "epoch": 0.6190299200892028, "grad_norm": 0.5921317338943481, "learning_rate": 0.00015673980374200896, "loss": 0.9912, "step": 3331 }, { "epoch": 0.6192157591525739, "grad_norm": 0.6119334697723389, "learning_rate": 0.00015671571897952368, "loss": 1.0653, "step": 3332 }, { "epoch": 0.619401598215945, "grad_norm": 0.5734863877296448, "learning_rate": 0.00015669162936602033, "loss": 0.9782, "step": 3333 }, { "epoch": 0.6195874372793161, "grad_norm": 0.6472792029380798, "learning_rate": 0.00015666753490355944, "loss": 1.1381, "step": 3334 }, { "epoch": 0.6197732763426872, "grad_norm": 0.6566014289855957, "learning_rate": 0.00015664343559420178, "loss": 1.1101, "step": 3335 }, { "epoch": 0.6199591154060583, "grad_norm": 0.5882189273834229, "learning_rate": 0.00015661933144000862, "loss": 1.2242, "step": 3336 }, { "epoch": 0.6201449544694295, "grad_norm": 0.6419895887374878, "learning_rate": 0.00015659522244304173, "loss": 1.0805, "step": 3337 }, { "epoch": 0.6203307935328006, "grad_norm": 0.6279326677322388, "learning_rate": 0.00015657110860536314, "loss": 1.0568, "step": 3338 }, { "epoch": 0.6205166325961717, "grad_norm": 0.7833722829818726, "learning_rate": 0.00015654698992903534, "loss": 1.213, "step": 3339 }, { "epoch": 0.6207024716595428, "grad_norm": 0.6131447553634644, "learning_rate": 0.00015652286641612125, "loss": 1.1272, "step": 3340 }, { "epoch": 0.620888310722914, "grad_norm": 0.5262309908866882, "learning_rate": 0.0001564987380686843, "loss": 0.9924, "step": 3341 }, { "epoch": 0.6210741497862851, "grad_norm": 0.6729386448860168, "learning_rate": 0.00015647460488878813, "loss": 1.2097, "step": 3342 }, { "epoch": 0.6212599888496562, "grad_norm": 0.5907789468765259, "learning_rate": 0.00015645046687849697, "loss": 1.1616, "step": 3343 }, { "epoch": 0.6214458279130273, "grad_norm": 0.7607451677322388, "learning_rate": 0.00015642632403987535, "loss": 1.1074, "step": 3344 }, { "epoch": 0.6216316669763985, "grad_norm": 0.6834059357643127, "learning_rate": 0.00015640217637498827, "loss": 1.0589, "step": 3345 }, { "epoch": 0.6218175060397696, "grad_norm": 0.6914005279541016, "learning_rate": 0.00015637802388590122, "loss": 1.167, "step": 3346 }, { "epoch": 0.6220033451031407, "grad_norm": 0.48968812823295593, "learning_rate": 0.00015635386657467985, "loss": 1.0032, "step": 3347 }, { "epoch": 0.6221891841665118, "grad_norm": 0.5632472634315491, "learning_rate": 0.00015632970444339053, "loss": 1.0351, "step": 3348 }, { "epoch": 0.6223750232298829, "grad_norm": 0.5951436758041382, "learning_rate": 0.00015630553749409982, "loss": 1.2193, "step": 3349 }, { "epoch": 0.622560862293254, "grad_norm": 0.5487954616546631, "learning_rate": 0.0001562813657288748, "loss": 0.9555, "step": 3350 }, { "epoch": 0.6227467013566251, "grad_norm": 0.6174929141998291, "learning_rate": 0.00015625718914978292, "loss": 1.1938, "step": 3351 }, { "epoch": 0.6229325404199962, "grad_norm": 0.6154797673225403, "learning_rate": 0.0001562330077588921, "loss": 1.067, "step": 3352 }, { "epoch": 0.6231183794833675, "grad_norm": 0.6975616812705994, "learning_rate": 0.00015620882155827054, "loss": 1.145, "step": 3353 }, { "epoch": 0.6233042185467386, "grad_norm": 0.6247215867042542, "learning_rate": 0.00015618463054998703, "loss": 0.9011, "step": 3354 }, { "epoch": 0.6234900576101097, "grad_norm": 2.737778425216675, "learning_rate": 0.0001561604347361106, "loss": 2.8023, "step": 3355 }, { "epoch": 0.6236758966734808, "grad_norm": 0.6489484906196594, "learning_rate": 0.00015613623411871085, "loss": 1.1984, "step": 3356 }, { "epoch": 0.6238617357368519, "grad_norm": 0.7646564841270447, "learning_rate": 0.00015611202869985765, "loss": 0.9968, "step": 3357 }, { "epoch": 0.624047574800223, "grad_norm": 0.6500785946846008, "learning_rate": 0.00015608781848162136, "loss": 0.9897, "step": 3358 }, { "epoch": 0.6242334138635941, "grad_norm": 0.5417594909667969, "learning_rate": 0.00015606360346607273, "loss": 1.1919, "step": 3359 }, { "epoch": 0.6244192529269652, "grad_norm": 0.68771892786026, "learning_rate": 0.00015603938365528293, "loss": 1.2118, "step": 3360 }, { "epoch": 0.6246050919903363, "grad_norm": 0.5929567217826843, "learning_rate": 0.00015601515905132353, "loss": 1.078, "step": 3361 }, { "epoch": 0.6247909310537075, "grad_norm": 0.7144112586975098, "learning_rate": 0.0001559909296562665, "loss": 1.2353, "step": 3362 }, { "epoch": 0.6249767701170786, "grad_norm": 0.6896948218345642, "learning_rate": 0.00015596669547218427, "loss": 1.0764, "step": 3363 }, { "epoch": 0.6251626091804497, "grad_norm": 0.6907390356063843, "learning_rate": 0.00015594245650114958, "loss": 1.3201, "step": 3364 }, { "epoch": 0.6253484482438209, "grad_norm": 0.5990051031112671, "learning_rate": 0.00015591821274523571, "loss": 1.1825, "step": 3365 }, { "epoch": 0.625534287307192, "grad_norm": 0.6666750907897949, "learning_rate": 0.00015589396420651624, "loss": 1.1073, "step": 3366 }, { "epoch": 0.6257201263705631, "grad_norm": 0.6805914044380188, "learning_rate": 0.00015586971088706522, "loss": 1.3364, "step": 3367 }, { "epoch": 0.6259059654339342, "grad_norm": 0.6045242547988892, "learning_rate": 0.00015584545278895707, "loss": 1.094, "step": 3368 }, { "epoch": 0.6260918044973053, "grad_norm": 0.5706300139427185, "learning_rate": 0.00015582118991426663, "loss": 0.9863, "step": 3369 }, { "epoch": 0.6262776435606765, "grad_norm": 0.6760212779045105, "learning_rate": 0.00015579692226506918, "loss": 1.2693, "step": 3370 }, { "epoch": 0.6264634826240476, "grad_norm": 0.5949139595031738, "learning_rate": 0.00015577264984344038, "loss": 1.1793, "step": 3371 }, { "epoch": 0.6266493216874187, "grad_norm": 0.5809699296951294, "learning_rate": 0.00015574837265145628, "loss": 0.9453, "step": 3372 }, { "epoch": 0.6268351607507898, "grad_norm": 0.6867097020149231, "learning_rate": 0.00015572409069119337, "loss": 1.0772, "step": 3373 }, { "epoch": 0.6270209998141609, "grad_norm": 0.7444250583648682, "learning_rate": 0.0001556998039647286, "loss": 1.0622, "step": 3374 }, { "epoch": 0.627206838877532, "grad_norm": 0.5732178092002869, "learning_rate": 0.0001556755124741392, "loss": 0.9385, "step": 3375 }, { "epoch": 0.6273926779409031, "grad_norm": 0.6235290169715881, "learning_rate": 0.00015565121622150286, "loss": 0.8565, "step": 3376 }, { "epoch": 0.6275785170042743, "grad_norm": 0.6951614618301392, "learning_rate": 0.00015562691520889775, "loss": 1.1083, "step": 3377 }, { "epoch": 0.6277643560676455, "grad_norm": 0.5099678039550781, "learning_rate": 0.0001556026094384023, "loss": 1.1066, "step": 3378 }, { "epoch": 0.6279501951310166, "grad_norm": 0.575692892074585, "learning_rate": 0.00015557829891209554, "loss": 0.9044, "step": 3379 }, { "epoch": 0.6281360341943877, "grad_norm": 0.6911464929580688, "learning_rate": 0.00015555398363205675, "loss": 1.015, "step": 3380 }, { "epoch": 0.6283218732577588, "grad_norm": 0.7588498592376709, "learning_rate": 0.00015552966360036567, "loss": 1.2013, "step": 3381 }, { "epoch": 0.6285077123211299, "grad_norm": 0.6335672736167908, "learning_rate": 0.00015550533881910242, "loss": 0.927, "step": 3382 }, { "epoch": 0.628693551384501, "grad_norm": 0.6448066830635071, "learning_rate": 0.00015548100929034764, "loss": 0.9687, "step": 3383 }, { "epoch": 0.6288793904478721, "grad_norm": 0.7308876514434814, "learning_rate": 0.00015545667501618216, "loss": 0.9884, "step": 3384 }, { "epoch": 0.6290652295112432, "grad_norm": 0.5620437860488892, "learning_rate": 0.00015543233599868742, "loss": 1.04, "step": 3385 }, { "epoch": 0.6292510685746144, "grad_norm": 0.7370177507400513, "learning_rate": 0.0001554079922399452, "loss": 1.0136, "step": 3386 }, { "epoch": 0.6294369076379855, "grad_norm": 0.5509874224662781, "learning_rate": 0.00015538364374203764, "loss": 0.9867, "step": 3387 }, { "epoch": 0.6296227467013567, "grad_norm": 0.7003196477890015, "learning_rate": 0.00015535929050704733, "loss": 1.1006, "step": 3388 }, { "epoch": 0.6298085857647278, "grad_norm": NaN, "learning_rate": 0.00015535929050704733, "loss": 4.1587, "step": 3389 }, { "epoch": 0.6299944248280989, "grad_norm": 0.5861777067184448, "learning_rate": 0.00015533493253705726, "loss": 0.9348, "step": 3390 }, { "epoch": 0.63018026389147, "grad_norm": 0.6192556619644165, "learning_rate": 0.00015531056983415081, "loss": 1.0898, "step": 3391 }, { "epoch": 0.6303661029548411, "grad_norm": 0.6822803616523743, "learning_rate": 0.00015528620240041178, "loss": 1.2059, "step": 3392 }, { "epoch": 0.6305519420182122, "grad_norm": 0.5751301646232605, "learning_rate": 0.00015526183023792438, "loss": 1.2787, "step": 3393 }, { "epoch": 0.6307377810815834, "grad_norm": 0.6449471712112427, "learning_rate": 0.00015523745334877317, "loss": 1.1165, "step": 3394 }, { "epoch": 0.6309236201449545, "grad_norm": 0.612648069858551, "learning_rate": 0.00015521307173504325, "loss": 0.7657, "step": 3395 }, { "epoch": 0.6311094592083256, "grad_norm": 0.5513817667961121, "learning_rate": 0.00015518868539881995, "loss": 0.9294, "step": 3396 }, { "epoch": 0.6312952982716967, "grad_norm": 0.6446556448936462, "learning_rate": 0.00015516429434218908, "loss": 1.0127, "step": 3397 }, { "epoch": 0.6314811373350678, "grad_norm": 0.4763330817222595, "learning_rate": 0.00015513989856723691, "loss": 0.916, "step": 3398 }, { "epoch": 0.6316669763984389, "grad_norm": 14.481205940246582, "learning_rate": 0.00015511549807605004, "loss": 3.4295, "step": 3399 }, { "epoch": 0.63185281546181, "grad_norm": 0.5876346826553345, "learning_rate": 0.00015509109287071548, "loss": 0.9924, "step": 3400 }, { "epoch": 0.6320386545251812, "grad_norm": 0.6718258857727051, "learning_rate": 0.00015506668295332072, "loss": 1.103, "step": 3401 }, { "epoch": 0.6322244935885523, "grad_norm": 0.6530651450157166, "learning_rate": 0.00015504226832595354, "loss": 1.2541, "step": 3402 }, { "epoch": 0.6324103326519235, "grad_norm": 0.5370796918869019, "learning_rate": 0.00015501784899070217, "loss": 1.2188, "step": 3403 }, { "epoch": 0.6325961717152946, "grad_norm": 0.635278582572937, "learning_rate": 0.00015499342494965527, "loss": 1.2426, "step": 3404 }, { "epoch": 0.6327820107786657, "grad_norm": 0.49484050273895264, "learning_rate": 0.0001549689962049019, "loss": 0.8188, "step": 3405 }, { "epoch": 0.6329678498420368, "grad_norm": 0.5422400832176208, "learning_rate": 0.0001549445627585315, "loss": 1.0764, "step": 3406 }, { "epoch": 0.6331536889054079, "grad_norm": 0.637448251247406, "learning_rate": 0.00015492012461263384, "loss": 0.7457, "step": 3407 }, { "epoch": 0.633339527968779, "grad_norm": 0.5702096223831177, "learning_rate": 0.00015489568176929922, "loss": 1.1017, "step": 3408 }, { "epoch": 0.6335253670321501, "grad_norm": 0.5875257849693298, "learning_rate": 0.00015487123423061833, "loss": 0.9366, "step": 3409 }, { "epoch": 0.6337112060955212, "grad_norm": 0.6866803765296936, "learning_rate": 0.00015484678199868219, "loss": 1.1363, "step": 3410 }, { "epoch": 0.6338970451588924, "grad_norm": 0.5591610074043274, "learning_rate": 0.00015482232507558222, "loss": 1.244, "step": 3411 }, { "epoch": 0.6340828842222636, "grad_norm": 0.6071485280990601, "learning_rate": 0.0001547978634634103, "loss": 1.0588, "step": 3412 }, { "epoch": 0.6342687232856347, "grad_norm": 0.6361015439033508, "learning_rate": 0.0001547733971642587, "loss": 0.9244, "step": 3413 }, { "epoch": 0.6344545623490058, "grad_norm": 0.560198187828064, "learning_rate": 0.00015474892618022005, "loss": 1.1407, "step": 3414 }, { "epoch": 0.6346404014123769, "grad_norm": 0.6156582832336426, "learning_rate": 0.0001547244505133874, "loss": 1.0394, "step": 3415 }, { "epoch": 0.634826240475748, "grad_norm": 0.6680965423583984, "learning_rate": 0.00015469997016585426, "loss": 1.0968, "step": 3416 }, { "epoch": 0.6350120795391191, "grad_norm": 0.54937744140625, "learning_rate": 0.00015467548513971443, "loss": 0.7901, "step": 3417 }, { "epoch": 0.6351979186024902, "grad_norm": 0.6295645833015442, "learning_rate": 0.0001546509954370622, "loss": 1.1601, "step": 3418 }, { "epoch": 0.6353837576658614, "grad_norm": 0.6593343615531921, "learning_rate": 0.00015462650105999218, "loss": 1.0454, "step": 3419 }, { "epoch": 0.6355695967292325, "grad_norm": 0.6861504316329956, "learning_rate": 0.0001546020020105995, "loss": 1.2475, "step": 3420 }, { "epoch": 0.6357554357926036, "grad_norm": 0.5935508608818054, "learning_rate": 0.00015457749829097956, "loss": 1.1566, "step": 3421 }, { "epoch": 0.6359412748559747, "grad_norm": 0.6246775984764099, "learning_rate": 0.00015455298990322823, "loss": 0.9687, "step": 3422 }, { "epoch": 0.6361271139193458, "grad_norm": 0.6714119911193848, "learning_rate": 0.00015452847684944176, "loss": 0.9973, "step": 3423 }, { "epoch": 0.636312952982717, "grad_norm": 0.6952091455459595, "learning_rate": 0.00015450395913171685, "loss": 1.3226, "step": 3424 }, { "epoch": 0.6364987920460881, "grad_norm": 0.7437610030174255, "learning_rate": 0.0001544794367521505, "loss": 1.1179, "step": 3425 }, { "epoch": 0.6366846311094592, "grad_norm": 0.5943450331687927, "learning_rate": 0.00015445490971284018, "loss": 1.0157, "step": 3426 }, { "epoch": 0.6368704701728304, "grad_norm": 0.5073950886726379, "learning_rate": 0.0001544303780158837, "loss": 0.9862, "step": 3427 }, { "epoch": 0.6370563092362015, "grad_norm": 0.6207197904586792, "learning_rate": 0.00015440584166337938, "loss": 1.0514, "step": 3428 }, { "epoch": 0.6372421482995726, "grad_norm": 0.6031699776649475, "learning_rate": 0.00015438130065742586, "loss": 1.1663, "step": 3429 }, { "epoch": 0.6374279873629437, "grad_norm": 0.6535128355026245, "learning_rate": 0.00015435675500012212, "loss": 1.2384, "step": 3430 }, { "epoch": 0.6376138264263148, "grad_norm": 0.5577135682106018, "learning_rate": 0.00015433220469356765, "loss": 0.9835, "step": 3431 }, { "epoch": 0.6377996654896859, "grad_norm": 0.5418036580085754, "learning_rate": 0.00015430764973986235, "loss": 0.8347, "step": 3432 }, { "epoch": 0.637985504553057, "grad_norm": 0.7411396503448486, "learning_rate": 0.00015428309014110632, "loss": 1.1138, "step": 3433 }, { "epoch": 0.6381713436164281, "grad_norm": 0.6470693349838257, "learning_rate": 0.00015425852589940025, "loss": 1.1362, "step": 3434 }, { "epoch": 0.6383571826797992, "grad_norm": 0.6360995769500732, "learning_rate": 0.00015423395701684526, "loss": 1.0167, "step": 3435 }, { "epoch": 0.6385430217431705, "grad_norm": 0.5424529910087585, "learning_rate": 0.00015420938349554268, "loss": 0.8663, "step": 3436 }, { "epoch": 0.6387288608065416, "grad_norm": 0.598699688911438, "learning_rate": 0.00015418480533759435, "loss": 1.1082, "step": 3437 }, { "epoch": 0.6389146998699127, "grad_norm": 0.5573627948760986, "learning_rate": 0.00015416022254510257, "loss": 1.006, "step": 3438 }, { "epoch": 0.6391005389332838, "grad_norm": 0.5559731721878052, "learning_rate": 0.00015413563512016985, "loss": 1.0956, "step": 3439 }, { "epoch": 0.6392863779966549, "grad_norm": 0.6015757322311401, "learning_rate": 0.00015411104306489926, "loss": 1.191, "step": 3440 }, { "epoch": 0.639472217060026, "grad_norm": 0.6004772782325745, "learning_rate": 0.0001540864463813942, "loss": 1.2299, "step": 3441 }, { "epoch": 0.6396580561233971, "grad_norm": 0.6631706953048706, "learning_rate": 0.00015406184507175852, "loss": 1.2987, "step": 3442 }, { "epoch": 0.6398438951867682, "grad_norm": 0.6589309573173523, "learning_rate": 0.00015403723913809639, "loss": 1.1748, "step": 3443 }, { "epoch": 0.6400297342501394, "grad_norm": 0.574073851108551, "learning_rate": 0.00015401262858251235, "loss": 1.1021, "step": 3444 }, { "epoch": 0.6402155733135105, "grad_norm": 0.7661458849906921, "learning_rate": 0.0001539880134071115, "loss": 1.2319, "step": 3445 }, { "epoch": 0.6404014123768816, "grad_norm": 0.6638999581336975, "learning_rate": 0.00015396339361399916, "loss": 0.9982, "step": 3446 }, { "epoch": 0.6405872514402527, "grad_norm": 0.5810231566429138, "learning_rate": 0.00015393876920528115, "loss": 1.1258, "step": 3447 }, { "epoch": 0.6407730905036239, "grad_norm": 0.5583173036575317, "learning_rate": 0.00015391414018306362, "loss": 1.1781, "step": 3448 }, { "epoch": 0.640958929566995, "grad_norm": 0.6540656685829163, "learning_rate": 0.00015388950654945314, "loss": 1.1244, "step": 3449 }, { "epoch": 0.6411447686303661, "grad_norm": 0.5573239922523499, "learning_rate": 0.0001538648683065567, "loss": 1.1899, "step": 3450 }, { "epoch": 0.6413306076937372, "grad_norm": 0.6116712093353271, "learning_rate": 0.0001538402254564817, "loss": 1.1629, "step": 3451 }, { "epoch": 0.6415164467571084, "grad_norm": 0.5380821228027344, "learning_rate": 0.0001538155780013358, "loss": 1.199, "step": 3452 }, { "epoch": 0.6417022858204795, "grad_norm": 0.4550449550151825, "learning_rate": 0.00015379092594322723, "loss": 1.0023, "step": 3453 }, { "epoch": 0.6418881248838506, "grad_norm": 0.5812940001487732, "learning_rate": 0.00015376626928426452, "loss": 1.008, "step": 3454 }, { "epoch": 0.6420739639472217, "grad_norm": 0.5325769782066345, "learning_rate": 0.00015374160802655658, "loss": 0.868, "step": 3455 }, { "epoch": 0.6422598030105928, "grad_norm": 0.650091826915741, "learning_rate": 0.00015371694217221273, "loss": 1.03, "step": 3456 }, { "epoch": 0.6424456420739639, "grad_norm": 0.7132396101951599, "learning_rate": 0.00015369227172334272, "loss": 1.4007, "step": 3457 }, { "epoch": 0.642631481137335, "grad_norm": 0.5265616774559021, "learning_rate": 0.0001536675966820567, "loss": 1.074, "step": 3458 }, { "epoch": 0.6428173202007061, "grad_norm": 0.6761186718940735, "learning_rate": 0.00015364291705046514, "loss": 1.0798, "step": 3459 }, { "epoch": 0.6430031592640774, "grad_norm": 0.5889752507209778, "learning_rate": 0.00015361823283067892, "loss": 1.1018, "step": 3460 }, { "epoch": 0.6431889983274485, "grad_norm": 0.6676234006881714, "learning_rate": 0.00015359354402480938, "loss": 1.1207, "step": 3461 }, { "epoch": 0.6433748373908196, "grad_norm": 0.6662126779556274, "learning_rate": 0.00015356885063496818, "loss": 0.8809, "step": 3462 }, { "epoch": 0.6435606764541907, "grad_norm": 0.6595667600631714, "learning_rate": 0.00015354415266326742, "loss": 0.9899, "step": 3463 }, { "epoch": 0.6437465155175618, "grad_norm": 0.7342430949211121, "learning_rate": 0.00015351945011181955, "loss": 0.725, "step": 3464 }, { "epoch": 0.6439323545809329, "grad_norm": 0.5512349009513855, "learning_rate": 0.00015349474298273746, "loss": 1.1645, "step": 3465 }, { "epoch": 0.644118193644304, "grad_norm": 0.6810212135314941, "learning_rate": 0.00015347003127813436, "loss": 1.5384, "step": 3466 }, { "epoch": 0.6443040327076751, "grad_norm": 0.38854822516441345, "learning_rate": 0.0001534453150001239, "loss": 0.498, "step": 3467 }, { "epoch": 0.6444898717710462, "grad_norm": 0.6276911497116089, "learning_rate": 0.0001534205941508202, "loss": 1.0133, "step": 3468 }, { "epoch": 0.6446757108344174, "grad_norm": 0.7050503492355347, "learning_rate": 0.00015339586873233758, "loss": 1.2785, "step": 3469 }, { "epoch": 0.6448615498977885, "grad_norm": 0.6047936677932739, "learning_rate": 0.00015337113874679093, "loss": 1.0657, "step": 3470 }, { "epoch": 0.6450473889611597, "grad_norm": 0.7037943005561829, "learning_rate": 0.0001533464041962954, "loss": 1.1488, "step": 3471 }, { "epoch": 0.6452332280245308, "grad_norm": 0.5448530912399292, "learning_rate": 0.00015332166508296664, "loss": 0.981, "step": 3472 }, { "epoch": 0.6454190670879019, "grad_norm": 0.5085611343383789, "learning_rate": 0.00015329692140892063, "loss": 0.933, "step": 3473 }, { "epoch": 0.645604906151273, "grad_norm": 0.637241780757904, "learning_rate": 0.00015327217317627373, "loss": 1.2294, "step": 3474 }, { "epoch": 0.6457907452146441, "grad_norm": 0.5922691822052002, "learning_rate": 0.00015324742038714276, "loss": 1.1787, "step": 3475 }, { "epoch": 0.6459765842780152, "grad_norm": 0.5249952673912048, "learning_rate": 0.00015322266304364476, "loss": 1.0432, "step": 3476 }, { "epoch": 0.6461624233413864, "grad_norm": 0.591828465461731, "learning_rate": 0.00015319790114789742, "loss": 0.8827, "step": 3477 }, { "epoch": 0.6463482624047575, "grad_norm": 0.646541953086853, "learning_rate": 0.00015317313470201863, "loss": 1.3075, "step": 3478 }, { "epoch": 0.6465341014681286, "grad_norm": 0.660753607749939, "learning_rate": 0.00015314836370812666, "loss": 1.0812, "step": 3479 }, { "epoch": 0.6467199405314997, "grad_norm": 0.5507911443710327, "learning_rate": 0.0001531235881683403, "loss": 0.9117, "step": 3480 }, { "epoch": 0.6469057795948708, "grad_norm": 0.6520745158195496, "learning_rate": 0.00015309880808477862, "loss": 1.0439, "step": 3481 }, { "epoch": 0.6470916186582419, "grad_norm": 0.6552932262420654, "learning_rate": 0.00015307402345956115, "loss": 1.1353, "step": 3482 }, { "epoch": 0.647277457721613, "grad_norm": 0.7491993308067322, "learning_rate": 0.00015304923429480772, "loss": 1.3769, "step": 3483 }, { "epoch": 0.6474632967849842, "grad_norm": 0.4848712980747223, "learning_rate": 0.0001530244405926386, "loss": 0.8271, "step": 3484 }, { "epoch": 0.6476491358483554, "grad_norm": 0.5544674396514893, "learning_rate": 0.00015299964235517455, "loss": 1.1254, "step": 3485 }, { "epoch": 0.6478349749117265, "grad_norm": 0.5810096859931946, "learning_rate": 0.00015297483958453647, "loss": 1.2283, "step": 3486 }, { "epoch": 0.6480208139750976, "grad_norm": 0.5955312252044678, "learning_rate": 0.0001529500322828459, "loss": 1.3159, "step": 3487 }, { "epoch": 0.6482066530384687, "grad_norm": 0.6579847931861877, "learning_rate": 0.00015292522045222463, "loss": 0.9444, "step": 3488 }, { "epoch": 0.6483924921018398, "grad_norm": 0.5204761028289795, "learning_rate": 0.00015290040409479487, "loss": 0.8009, "step": 3489 }, { "epoch": 0.6485783311652109, "grad_norm": 0.6247485876083374, "learning_rate": 0.00015287558321267918, "loss": 1.1415, "step": 3490 }, { "epoch": 0.648764170228582, "grad_norm": 0.6254748106002808, "learning_rate": 0.00015285075780800062, "loss": 1.142, "step": 3491 }, { "epoch": 0.6489500092919531, "grad_norm": 0.7298083305358887, "learning_rate": 0.00015282592788288252, "loss": 1.0529, "step": 3492 }, { "epoch": 0.6491358483553243, "grad_norm": 0.6158937215805054, "learning_rate": 0.0001528010934394486, "loss": 0.9719, "step": 3493 }, { "epoch": 0.6493216874186954, "grad_norm": 0.5640282034873962, "learning_rate": 0.00015277625447982307, "loss": 1.1164, "step": 3494 }, { "epoch": 0.6495075264820666, "grad_norm": 0.5224336981773376, "learning_rate": 0.00015275141100613043, "loss": 0.8594, "step": 3495 }, { "epoch": 0.6496933655454377, "grad_norm": 0.5831015706062317, "learning_rate": 0.00015272656302049557, "loss": 0.9334, "step": 3496 }, { "epoch": 0.6498792046088088, "grad_norm": 0.6398945450782776, "learning_rate": 0.00015270171052504382, "loss": 1.2502, "step": 3497 }, { "epoch": 0.6500650436721799, "grad_norm": 0.5970938205718994, "learning_rate": 0.0001526768535219009, "loss": 1.2101, "step": 3498 }, { "epoch": 0.650250882735551, "grad_norm": 0.6261694431304932, "learning_rate": 0.00015265199201319285, "loss": 0.5574, "step": 3499 }, { "epoch": 0.6504367217989221, "grad_norm": 0.5698041915893555, "learning_rate": 0.0001526271260010461, "loss": 1.0594, "step": 3500 }, { "epoch": 0.6506225608622933, "grad_norm": 0.5439754724502563, "learning_rate": 0.00015260225548758752, "loss": 0.8891, "step": 3501 }, { "epoch": 0.6508083999256644, "grad_norm": 0.7733392119407654, "learning_rate": 0.00015257738047494432, "loss": 1.0131, "step": 3502 }, { "epoch": 0.6509942389890355, "grad_norm": 0.6281485557556152, "learning_rate": 0.0001525525009652442, "loss": 1.1565, "step": 3503 }, { "epoch": 0.6511800780524066, "grad_norm": 0.633788526058197, "learning_rate": 0.00015252761696061501, "loss": 1.2533, "step": 3504 }, { "epoch": 0.6513659171157777, "grad_norm": 0.554947018623352, "learning_rate": 0.00015250272846318528, "loss": 1.1795, "step": 3505 }, { "epoch": 0.6515517561791488, "grad_norm": 0.6328675746917725, "learning_rate": 0.00015247783547508368, "loss": 0.8386, "step": 3506 }, { "epoch": 0.65173759524252, "grad_norm": 0.6410210132598877, "learning_rate": 0.00015245293799843942, "loss": 1.1097, "step": 3507 }, { "epoch": 0.6519234343058911, "grad_norm": 0.6602080464363098, "learning_rate": 0.00015242803603538195, "loss": 1.3585, "step": 3508 }, { "epoch": 0.6521092733692622, "grad_norm": 0.5675439834594727, "learning_rate": 0.00015240312958804132, "loss": 1.135, "step": 3509 }, { "epoch": 0.6522951124326334, "grad_norm": 0.6612635850906372, "learning_rate": 0.0001523782186585477, "loss": 1.3385, "step": 3510 }, { "epoch": 0.6524809514960045, "grad_norm": 0.6298087239265442, "learning_rate": 0.00015235330324903185, "loss": 1.2031, "step": 3511 }, { "epoch": 0.6526667905593756, "grad_norm": 0.6352697610855103, "learning_rate": 0.00015232838336162483, "loss": 0.9642, "step": 3512 }, { "epoch": 0.6528526296227467, "grad_norm": 0.5610973834991455, "learning_rate": 0.0001523034589984581, "loss": 1.1033, "step": 3513 }, { "epoch": 0.6530384686861178, "grad_norm": 0.598053514957428, "learning_rate": 0.00015227853016166345, "loss": 1.0022, "step": 3514 }, { "epoch": 0.6532243077494889, "grad_norm": 0.6255698204040527, "learning_rate": 0.00015225359685337313, "loss": 1.1182, "step": 3515 }, { "epoch": 0.65341014681286, "grad_norm": 0.5280336737632751, "learning_rate": 0.00015222865907571972, "loss": 1.1277, "step": 3516 }, { "epoch": 0.6535959858762311, "grad_norm": 0.6419905424118042, "learning_rate": 0.00015220371683083629, "loss": 1.0459, "step": 3517 }, { "epoch": 0.6537818249396024, "grad_norm": 0.6171340942382812, "learning_rate": 0.00015217877012085605, "loss": 1.1594, "step": 3518 }, { "epoch": 0.6539676640029735, "grad_norm": 0.6222787499427795, "learning_rate": 0.00015215381894791285, "loss": 1.1122, "step": 3519 }, { "epoch": 0.6541535030663446, "grad_norm": 0.6655737161636353, "learning_rate": 0.00015212886331414084, "loss": 1.0083, "step": 3520 }, { "epoch": 0.6543393421297157, "grad_norm": 0.6613302230834961, "learning_rate": 0.00015210390322167446, "loss": 1.0373, "step": 3521 }, { "epoch": 0.6545251811930868, "grad_norm": 0.6962905526161194, "learning_rate": 0.00015207893867264863, "loss": 1.0902, "step": 3522 }, { "epoch": 0.6547110202564579, "grad_norm": 0.581489622592926, "learning_rate": 0.0001520539696691986, "loss": 1.1598, "step": 3523 }, { "epoch": 0.654896859319829, "grad_norm": 0.660275399684906, "learning_rate": 0.00015202899621346005, "loss": 1.0715, "step": 3524 }, { "epoch": 0.6550826983832001, "grad_norm": 0.5727256536483765, "learning_rate": 0.00015200401830756897, "loss": 1.151, "step": 3525 }, { "epoch": 0.6552685374465713, "grad_norm": 0.8974356651306152, "learning_rate": 0.00015197903595366184, "loss": 1.3013, "step": 3526 }, { "epoch": 0.6554543765099424, "grad_norm": 0.6946527361869812, "learning_rate": 0.00015195404915387543, "loss": 1.1257, "step": 3527 }, { "epoch": 0.6556402155733135, "grad_norm": 0.5919240713119507, "learning_rate": 0.00015192905791034693, "loss": 1.2153, "step": 3528 }, { "epoch": 0.6558260546366846, "grad_norm": 0.5989977121353149, "learning_rate": 0.00015190406222521385, "loss": 1.2052, "step": 3529 }, { "epoch": 0.6560118937000557, "grad_norm": 0.5734437108039856, "learning_rate": 0.00015187906210061412, "loss": 0.9683, "step": 3530 }, { "epoch": 0.6561977327634269, "grad_norm": 0.6741088032722473, "learning_rate": 0.00015185405753868612, "loss": 0.9762, "step": 3531 }, { "epoch": 0.656383571826798, "grad_norm": 0.5712679624557495, "learning_rate": 0.0001518290485415685, "loss": 1.2456, "step": 3532 }, { "epoch": 0.6565694108901691, "grad_norm": 0.5627811551094055, "learning_rate": 0.00015180403511140034, "loss": 1.3626, "step": 3533 }, { "epoch": 0.6567552499535403, "grad_norm": 0.5959043502807617, "learning_rate": 0.0001517790172503211, "loss": 1.1376, "step": 3534 }, { "epoch": 0.6569410890169114, "grad_norm": 0.5870547294616699, "learning_rate": 0.0001517539949604706, "loss": 1.0926, "step": 3535 }, { "epoch": 0.6571269280802825, "grad_norm": 0.5044032335281372, "learning_rate": 0.00015172896824398904, "loss": 0.989, "step": 3536 }, { "epoch": 0.6573127671436536, "grad_norm": 0.5552332401275635, "learning_rate": 0.00015170393710301703, "loss": 1.1129, "step": 3537 }, { "epoch": 0.6574986062070247, "grad_norm": 0.6285333633422852, "learning_rate": 0.00015167890153969555, "loss": 1.3035, "step": 3538 }, { "epoch": 0.6576844452703958, "grad_norm": 0.6018518805503845, "learning_rate": 0.0001516538615561659, "loss": 1.1757, "step": 3539 }, { "epoch": 0.6578702843337669, "grad_norm": 0.6804785132408142, "learning_rate": 0.00015162881715456985, "loss": 1.1304, "step": 3540 }, { "epoch": 0.658056123397138, "grad_norm": 0.5028323531150818, "learning_rate": 0.00015160376833704947, "loss": 0.8834, "step": 3541 }, { "epoch": 0.6582419624605091, "grad_norm": 0.6550874710083008, "learning_rate": 0.00015157871510574727, "loss": 1.1537, "step": 3542 }, { "epoch": 0.6584278015238804, "grad_norm": 0.6116010546684265, "learning_rate": 0.0001515536574628061, "loss": 1.1274, "step": 3543 }, { "epoch": 0.6586136405872515, "grad_norm": 0.649966299533844, "learning_rate": 0.0001515285954103692, "loss": 0.7512, "step": 3544 }, { "epoch": 0.6587994796506226, "grad_norm": 0.7527809739112854, "learning_rate": 0.00015150352895058013, "loss": 1.164, "step": 3545 }, { "epoch": 0.6589853187139937, "grad_norm": 0.6256502866744995, "learning_rate": 0.0001514784580855829, "loss": 1.2635, "step": 3546 }, { "epoch": 0.6591711577773648, "grad_norm": 0.5845198631286621, "learning_rate": 0.00015145338281752192, "loss": 1.026, "step": 3547 }, { "epoch": 0.6593569968407359, "grad_norm": 0.6609519720077515, "learning_rate": 0.00015142830314854191, "loss": 1.1224, "step": 3548 }, { "epoch": 0.659542835904107, "grad_norm": 0.6253378987312317, "learning_rate": 0.00015140321908078798, "loss": 1.1397, "step": 3549 }, { "epoch": 0.6597286749674781, "grad_norm": 0.6805525422096252, "learning_rate": 0.00015137813061640563, "loss": 1.1877, "step": 3550 }, { "epoch": 0.6599145140308493, "grad_norm": 0.5571733713150024, "learning_rate": 0.0001513530377575407, "loss": 0.9482, "step": 3551 }, { "epoch": 0.6601003530942204, "grad_norm": 0.6012757420539856, "learning_rate": 0.00015132794050633949, "loss": 1.062, "step": 3552 }, { "epoch": 0.6602861921575915, "grad_norm": 0.6188012361526489, "learning_rate": 0.00015130283886494853, "loss": 0.9913, "step": 3553 }, { "epoch": 0.6604720312209627, "grad_norm": 0.7151011228561401, "learning_rate": 0.00015127773283551494, "loss": 1.2644, "step": 3554 }, { "epoch": 0.6606578702843338, "grad_norm": 0.5433036684989929, "learning_rate": 0.00015125262242018602, "loss": 1.1957, "step": 3555 }, { "epoch": 0.6608437093477049, "grad_norm": 0.6361022591590881, "learning_rate": 0.00015122750762110954, "loss": 1.1375, "step": 3556 }, { "epoch": 0.661029548411076, "grad_norm": 0.6053920984268188, "learning_rate": 0.00015120238844043362, "loss": 1.096, "step": 3557 }, { "epoch": 0.6612153874744471, "grad_norm": 0.5946423411369324, "learning_rate": 0.00015117726488030677, "loss": 0.9535, "step": 3558 }, { "epoch": 0.6614012265378183, "grad_norm": 0.6494511961936951, "learning_rate": 0.0001511521369428778, "loss": 0.9476, "step": 3559 }, { "epoch": 0.6615870656011894, "grad_norm": 0.7746506929397583, "learning_rate": 0.00015112700463029602, "loss": 0.7522, "step": 3560 }, { "epoch": 0.6617729046645605, "grad_norm": 0.6443954110145569, "learning_rate": 0.00015110186794471103, "loss": 1.2498, "step": 3561 }, { "epoch": 0.6619587437279316, "grad_norm": 0.6012477278709412, "learning_rate": 0.00015107672688827287, "loss": 0.8012, "step": 3562 }, { "epoch": 0.6621445827913027, "grad_norm": 0.6210688948631287, "learning_rate": 0.00015105158146313182, "loss": 1.0039, "step": 3563 }, { "epoch": 0.6623304218546738, "grad_norm": 0.621110737323761, "learning_rate": 0.0001510264316714387, "loss": 1.063, "step": 3564 }, { "epoch": 0.6625162609180449, "grad_norm": 0.5509716272354126, "learning_rate": 0.00015100127751534458, "loss": 1.0168, "step": 3565 }, { "epoch": 0.662702099981416, "grad_norm": 0.7248783707618713, "learning_rate": 0.00015097611899700096, "loss": 1.0515, "step": 3566 }, { "epoch": 0.6628879390447873, "grad_norm": 0.5176343321800232, "learning_rate": 0.00015095095611855971, "loss": 0.8917, "step": 3567 }, { "epoch": 0.6630737781081584, "grad_norm": 0.6484975814819336, "learning_rate": 0.00015092578888217307, "loss": 1.0518, "step": 3568 }, { "epoch": 0.6632596171715295, "grad_norm": 0.608376145362854, "learning_rate": 0.00015090061728999365, "loss": 1.1361, "step": 3569 }, { "epoch": 0.6634454562349006, "grad_norm": 0.5764702558517456, "learning_rate": 0.00015087544134417442, "loss": 0.8332, "step": 3570 }, { "epoch": 0.6636312952982717, "grad_norm": 0.5989474654197693, "learning_rate": 0.00015085026104686873, "loss": 0.9523, "step": 3571 }, { "epoch": 0.6638171343616428, "grad_norm": 0.5990676879882812, "learning_rate": 0.00015082507640023032, "loss": 0.941, "step": 3572 }, { "epoch": 0.6640029734250139, "grad_norm": 0.504645824432373, "learning_rate": 0.00015079988740641326, "loss": 0.8054, "step": 3573 }, { "epoch": 0.664188812488385, "grad_norm": 0.6226556897163391, "learning_rate": 0.00015077469406757206, "loss": 1.1862, "step": 3574 }, { "epoch": 0.6643746515517562, "grad_norm": 0.6347982287406921, "learning_rate": 0.0001507494963858615, "loss": 1.4854, "step": 3575 }, { "epoch": 0.6645604906151273, "grad_norm": 0.570537805557251, "learning_rate": 0.00015072429436343687, "loss": 0.9307, "step": 3576 }, { "epoch": 0.6647463296784984, "grad_norm": 0.6247116923332214, "learning_rate": 0.0001506990880024537, "loss": 1.2029, "step": 3577 }, { "epoch": 0.6649321687418696, "grad_norm": 0.7195250988006592, "learning_rate": 0.00015067387730506793, "loss": 1.0774, "step": 3578 }, { "epoch": 0.6651180078052407, "grad_norm": 0.5821033120155334, "learning_rate": 0.00015064866227343596, "loss": 1.1147, "step": 3579 }, { "epoch": 0.6653038468686118, "grad_norm": 0.6598196625709534, "learning_rate": 0.00015062344290971442, "loss": 1.2274, "step": 3580 }, { "epoch": 0.6654896859319829, "grad_norm": 0.5154291987419128, "learning_rate": 0.0001505982192160604, "loss": 1.0469, "step": 3581 }, { "epoch": 0.665675524995354, "grad_norm": 0.5266774892807007, "learning_rate": 0.00015057299119463128, "loss": 0.9102, "step": 3582 }, { "epoch": 0.6658613640587251, "grad_norm": 0.6214563250541687, "learning_rate": 0.00015054775884758498, "loss": 1.0344, "step": 3583 }, { "epoch": 0.6660472031220963, "grad_norm": 0.6299375891685486, "learning_rate": 0.00015052252217707962, "loss": 1.1272, "step": 3584 }, { "epoch": 0.6662330421854674, "grad_norm": 3.086122989654541, "learning_rate": 0.00015049728118527375, "loss": 2.7928, "step": 3585 }, { "epoch": 0.6664188812488385, "grad_norm": 0.556920051574707, "learning_rate": 0.00015047203587432626, "loss": 1.1483, "step": 3586 }, { "epoch": 0.6666047203122096, "grad_norm": 0.5242355465888977, "learning_rate": 0.00015044678624639648, "loss": 0.9973, "step": 3587 }, { "epoch": 0.6667905593755807, "grad_norm": 0.6420267820358276, "learning_rate": 0.000150421532303644, "loss": 1.1282, "step": 3588 }, { "epoch": 0.6669763984389518, "grad_norm": 0.6128627061843872, "learning_rate": 0.00015039627404822892, "loss": 1.0652, "step": 3589 }, { "epoch": 0.667162237502323, "grad_norm": 0.5869479775428772, "learning_rate": 0.00015037101148231158, "loss": 0.9901, "step": 3590 }, { "epoch": 0.6673480765656941, "grad_norm": 0.5982796549797058, "learning_rate": 0.00015034574460805279, "loss": 0.9696, "step": 3591 }, { "epoch": 0.6675339156290653, "grad_norm": 0.608452320098877, "learning_rate": 0.0001503204734276136, "loss": 1.2884, "step": 3592 }, { "epoch": 0.6677197546924364, "grad_norm": 0.6519098281860352, "learning_rate": 0.0001502951979431556, "loss": 1.132, "step": 3593 }, { "epoch": 0.6679055937558075, "grad_norm": 0.606254518032074, "learning_rate": 0.00015026991815684064, "loss": 1.1658, "step": 3594 }, { "epoch": 0.6680914328191786, "grad_norm": 0.6207895278930664, "learning_rate": 0.00015024463407083087, "loss": 1.0944, "step": 3595 }, { "epoch": 0.6682772718825497, "grad_norm": 0.6666253209114075, "learning_rate": 0.00015021934568728898, "loss": 1.2198, "step": 3596 }, { "epoch": 0.6684631109459208, "grad_norm": 0.5867999792098999, "learning_rate": 0.00015019405300837794, "loss": 1.0747, "step": 3597 }, { "epoch": 0.6686489500092919, "grad_norm": 0.6101366281509399, "learning_rate": 0.00015016875603626105, "loss": 1.2563, "step": 3598 }, { "epoch": 0.668834789072663, "grad_norm": 0.6357227563858032, "learning_rate": 0.00015014345477310203, "loss": 1.0178, "step": 3599 }, { "epoch": 0.6690206281360342, "grad_norm": 0.6158996224403381, "learning_rate": 0.00015011814922106495, "loss": 1.2432, "step": 3600 }, { "epoch": 0.6692064671994054, "grad_norm": 0.5483404994010925, "learning_rate": 0.00015009283938231422, "loss": 1.1968, "step": 3601 }, { "epoch": 0.6693923062627765, "grad_norm": 0.5520810484886169, "learning_rate": 0.00015006752525901468, "loss": 1.1107, "step": 3602 }, { "epoch": 0.6695781453261476, "grad_norm": 0.5998095870018005, "learning_rate": 0.00015004220685333153, "loss": 0.9836, "step": 3603 }, { "epoch": 0.6697639843895187, "grad_norm": 0.48578593134880066, "learning_rate": 0.00015001688416743024, "loss": 0.7812, "step": 3604 }, { "epoch": 0.6699498234528898, "grad_norm": 0.5815861821174622, "learning_rate": 0.00014999155720347675, "loss": 1.2315, "step": 3605 }, { "epoch": 0.6701356625162609, "grad_norm": 0.6169844269752502, "learning_rate": 0.00014996622596363733, "loss": 0.8476, "step": 3606 }, { "epoch": 0.670321501579632, "grad_norm": 0.5464169979095459, "learning_rate": 0.00014994089045007865, "loss": 0.9376, "step": 3607 }, { "epoch": 0.6705073406430032, "grad_norm": 0.6786235570907593, "learning_rate": 0.00014991555066496762, "loss": 1.0984, "step": 3608 }, { "epoch": 0.6706931797063743, "grad_norm": 0.7276574969291687, "learning_rate": 0.00014989020661047167, "loss": 1.0315, "step": 3609 }, { "epoch": 0.6708790187697454, "grad_norm": 0.60429847240448, "learning_rate": 0.00014986485828875855, "loss": 1.0041, "step": 3610 }, { "epoch": 0.6710648578331165, "grad_norm": 0.548711895942688, "learning_rate": 0.0001498395057019963, "loss": 0.9941, "step": 3611 }, { "epoch": 0.6712506968964876, "grad_norm": 0.610514760017395, "learning_rate": 0.00014981414885235342, "loss": 1.2205, "step": 3612 }, { "epoch": 0.6714365359598587, "grad_norm": 0.5238599181175232, "learning_rate": 0.0001497887877419987, "loss": 1.0296, "step": 3613 }, { "epoch": 0.6716223750232299, "grad_norm": 0.5102425217628479, "learning_rate": 0.0001497634223731014, "loss": 0.9913, "step": 3614 }, { "epoch": 0.671808214086601, "grad_norm": 0.6130759119987488, "learning_rate": 0.00014973805274783101, "loss": 1.0176, "step": 3615 }, { "epoch": 0.6719940531499721, "grad_norm": 0.5231581926345825, "learning_rate": 0.00014971267886835748, "loss": 0.969, "step": 3616 }, { "epoch": 0.6721798922133433, "grad_norm": 0.5673276782035828, "learning_rate": 0.00014968730073685108, "loss": 1.0535, "step": 3617 }, { "epoch": 0.6723657312767144, "grad_norm": 0.6071367263793945, "learning_rate": 0.00014966191835548247, "loss": 1.0199, "step": 3618 }, { "epoch": 0.6725515703400855, "grad_norm": 0.6172305345535278, "learning_rate": 0.0001496365317264226, "loss": 1.099, "step": 3619 }, { "epoch": 0.6727374094034566, "grad_norm": 0.5747281312942505, "learning_rate": 0.00014961114085184293, "loss": 1.2114, "step": 3620 }, { "epoch": 0.6729232484668277, "grad_norm": 0.5879877805709839, "learning_rate": 0.00014958574573391517, "loss": 1.2082, "step": 3621 }, { "epoch": 0.6731090875301988, "grad_norm": 0.5075231194496155, "learning_rate": 0.0001495603463748114, "loss": 0.8392, "step": 3622 }, { "epoch": 0.6732949265935699, "grad_norm": 0.6247887015342712, "learning_rate": 0.00014953494277670408, "loss": 1.0629, "step": 3623 }, { "epoch": 0.673480765656941, "grad_norm": 0.5654303431510925, "learning_rate": 0.00014950953494176605, "loss": 1.0257, "step": 3624 }, { "epoch": 0.6736666047203123, "grad_norm": 0.5614563226699829, "learning_rate": 0.0001494841228721705, "loss": 1.0373, "step": 3625 }, { "epoch": 0.6738524437836834, "grad_norm": 0.47630125284194946, "learning_rate": 0.00014945870657009095, "loss": 0.9697, "step": 3626 }, { "epoch": 0.6740382828470545, "grad_norm": 0.6390812397003174, "learning_rate": 0.00014943328603770137, "loss": 1.0709, "step": 3627 }, { "epoch": 0.6742241219104256, "grad_norm": 0.6576070189476013, "learning_rate": 0.00014940786127717596, "loss": 1.1442, "step": 3628 }, { "epoch": 0.6744099609737967, "grad_norm": 0.6565425395965576, "learning_rate": 0.00014938243229068942, "loss": 1.2, "step": 3629 }, { "epoch": 0.6745958000371678, "grad_norm": 0.6493493318557739, "learning_rate": 0.00014935699908041666, "loss": 1.0959, "step": 3630 }, { "epoch": 0.6747816391005389, "grad_norm": 0.5119420886039734, "learning_rate": 0.00014933156164853318, "loss": 1.0431, "step": 3631 }, { "epoch": 0.67496747816391, "grad_norm": 0.7568409442901611, "learning_rate": 0.00014930611999721457, "loss": 1.3266, "step": 3632 }, { "epoch": 0.6751533172272812, "grad_norm": 0.596426248550415, "learning_rate": 0.0001492806741286369, "loss": 1.1316, "step": 3633 }, { "epoch": 0.6753391562906523, "grad_norm": 0.6918880939483643, "learning_rate": 0.00014925522404497674, "loss": 1.286, "step": 3634 }, { "epoch": 0.6755249953540234, "grad_norm": 0.5288131237030029, "learning_rate": 0.00014922976974841078, "loss": 0.965, "step": 3635 }, { "epoch": 0.6757108344173945, "grad_norm": 0.6257370114326477, "learning_rate": 0.00014920431124111624, "loss": 1.0397, "step": 3636 }, { "epoch": 0.6758966734807657, "grad_norm": 0.581321656703949, "learning_rate": 0.00014917884852527058, "loss": 1.1978, "step": 3637 }, { "epoch": 0.6760825125441368, "grad_norm": 0.602165162563324, "learning_rate": 0.00014915338160305174, "loss": 1.2811, "step": 3638 }, { "epoch": 0.6762683516075079, "grad_norm": 0.4515889286994934, "learning_rate": 0.00014912791047663791, "loss": 0.9584, "step": 3639 }, { "epoch": 0.676454190670879, "grad_norm": 0.6313900351524353, "learning_rate": 0.00014910243514820777, "loss": 1.1116, "step": 3640 }, { "epoch": 0.6766400297342502, "grad_norm": 0.6517963409423828, "learning_rate": 0.00014907695561994016, "loss": 1.1079, "step": 3641 }, { "epoch": 0.6768258687976213, "grad_norm": 0.5934468507766724, "learning_rate": 0.00014905147189401449, "loss": 1.0876, "step": 3642 }, { "epoch": 0.6770117078609924, "grad_norm": 0.5406286120414734, "learning_rate": 0.00014902598397261043, "loss": 0.9613, "step": 3643 }, { "epoch": 0.6771975469243635, "grad_norm": 0.6092156767845154, "learning_rate": 0.000149000491857908, "loss": 0.8997, "step": 3644 }, { "epoch": 0.6773833859877346, "grad_norm": 0.5583072304725647, "learning_rate": 0.00014897499555208761, "loss": 1.2435, "step": 3645 }, { "epoch": 0.6775692250511057, "grad_norm": 0.6321995854377747, "learning_rate": 0.00014894949505732998, "loss": 1.0082, "step": 3646 }, { "epoch": 0.6777550641144768, "grad_norm": 0.5312910079956055, "learning_rate": 0.00014892399037581627, "loss": 1.0634, "step": 3647 }, { "epoch": 0.6779409031778479, "grad_norm": 0.6095939874649048, "learning_rate": 0.00014889848150972788, "loss": 0.9725, "step": 3648 }, { "epoch": 0.678126742241219, "grad_norm": 0.7284290790557861, "learning_rate": 0.0001488729684612467, "loss": 1.1883, "step": 3649 }, { "epoch": 0.6783125813045903, "grad_norm": 0.5818365216255188, "learning_rate": 0.00014884745123255487, "loss": 1.0793, "step": 3650 }, { "epoch": 0.6784984203679614, "grad_norm": 0.6459710001945496, "learning_rate": 0.00014882192982583502, "loss": 1.0533, "step": 3651 }, { "epoch": 0.6786842594313325, "grad_norm": 0.5360872745513916, "learning_rate": 0.00014879640424327, "loss": 0.9852, "step": 3652 }, { "epoch": 0.6788700984947036, "grad_norm": 0.5609398484230042, "learning_rate": 0.00014877087448704303, "loss": 1.0946, "step": 3653 }, { "epoch": 0.6790559375580747, "grad_norm": 0.7220416069030762, "learning_rate": 0.00014874534055933774, "loss": 0.8899, "step": 3654 }, { "epoch": 0.6792417766214458, "grad_norm": 0.5624337196350098, "learning_rate": 0.00014871980246233815, "loss": 1.2113, "step": 3655 }, { "epoch": 0.6794276156848169, "grad_norm": 0.5820707082748413, "learning_rate": 0.00014869426019822855, "loss": 1.1378, "step": 3656 }, { "epoch": 0.679613454748188, "grad_norm": 0.6045162677764893, "learning_rate": 0.0001486687137691936, "loss": 1.2984, "step": 3657 }, { "epoch": 0.6797992938115592, "grad_norm": 0.6777940392494202, "learning_rate": 0.00014864316317741842, "loss": 1.2248, "step": 3658 }, { "epoch": 0.6799851328749303, "grad_norm": 0.6275390386581421, "learning_rate": 0.00014861760842508836, "loss": 1.2587, "step": 3659 }, { "epoch": 0.6801709719383014, "grad_norm": 0.5438305139541626, "learning_rate": 0.00014859204951438916, "loss": 1.104, "step": 3660 }, { "epoch": 0.6803568110016726, "grad_norm": 0.7176220417022705, "learning_rate": 0.00014856648644750696, "loss": 1.2366, "step": 3661 }, { "epoch": 0.6805426500650437, "grad_norm": 0.5285633206367493, "learning_rate": 0.00014854091922662818, "loss": 0.9695, "step": 3662 }, { "epoch": 0.6807284891284148, "grad_norm": 0.5242528915405273, "learning_rate": 0.0001485153478539397, "loss": 1.0871, "step": 3663 }, { "epoch": 0.6809143281917859, "grad_norm": 0.6045339107513428, "learning_rate": 0.00014848977233162863, "loss": 1.1065, "step": 3664 }, { "epoch": 0.681100167255157, "grad_norm": 0.6746090650558472, "learning_rate": 0.00014846419266188255, "loss": 1.1891, "step": 3665 }, { "epoch": 0.6812860063185282, "grad_norm": 0.6344028115272522, "learning_rate": 0.0001484386088468893, "loss": 1.2639, "step": 3666 }, { "epoch": 0.6814718453818993, "grad_norm": 0.7116642594337463, "learning_rate": 0.0001484130208888372, "loss": 1.0406, "step": 3667 }, { "epoch": 0.6816576844452704, "grad_norm": 0.5471962094306946, "learning_rate": 0.00014838742878991475, "loss": 1.1569, "step": 3668 }, { "epoch": 0.6818435235086415, "grad_norm": 0.6500441431999207, "learning_rate": 0.00014836183255231096, "loss": 0.8941, "step": 3669 }, { "epoch": 0.6820293625720126, "grad_norm": 0.7653695940971375, "learning_rate": 0.00014833623217821507, "loss": 1.2929, "step": 3670 }, { "epoch": 0.6822152016353837, "grad_norm": 0.7275819182395935, "learning_rate": 0.00014831062766981678, "loss": 1.0589, "step": 3671 }, { "epoch": 0.6824010406987548, "grad_norm": 0.4899131655693054, "learning_rate": 0.00014828501902930613, "loss": 0.6845, "step": 3672 }, { "epoch": 0.682586879762126, "grad_norm": 0.5876761674880981, "learning_rate": 0.00014825940625887342, "loss": 1.0859, "step": 3673 }, { "epoch": 0.6827727188254972, "grad_norm": 0.5021051168441772, "learning_rate": 0.0001482337893607094, "loss": 0.9498, "step": 3674 }, { "epoch": 0.6829585578888683, "grad_norm": 0.8564853072166443, "learning_rate": 0.00014820816833700513, "loss": 1.3126, "step": 3675 }, { "epoch": 0.6831443969522394, "grad_norm": 0.5066491365432739, "learning_rate": 0.00014818254318995204, "loss": 0.9441, "step": 3676 }, { "epoch": 0.6833302360156105, "grad_norm": 0.6260133981704712, "learning_rate": 0.00014815691392174187, "loss": 1.203, "step": 3677 }, { "epoch": 0.6835160750789816, "grad_norm": 0.67519611120224, "learning_rate": 0.00014813128053456676, "loss": 1.2433, "step": 3678 }, { "epoch": 0.6837019141423527, "grad_norm": 0.5620138049125671, "learning_rate": 0.00014810564303061922, "loss": 1.0496, "step": 3679 }, { "epoch": 0.6838877532057238, "grad_norm": 0.6018763780593872, "learning_rate": 0.00014808000141209207, "loss": 1.0698, "step": 3680 }, { "epoch": 0.6840735922690949, "grad_norm": 0.6768195629119873, "learning_rate": 0.0001480543556811785, "loss": 1.1439, "step": 3681 }, { "epoch": 0.6842594313324661, "grad_norm": 0.4748595654964447, "learning_rate": 0.000148028705840072, "loss": 1.0552, "step": 3682 }, { "epoch": 0.6844452703958372, "grad_norm": 0.5992226004600525, "learning_rate": 0.00014800305189096647, "loss": 1.1218, "step": 3683 }, { "epoch": 0.6846311094592084, "grad_norm": 0.5371170043945312, "learning_rate": 0.0001479773938360562, "loss": 0.6813, "step": 3684 }, { "epoch": 0.6848169485225795, "grad_norm": 0.6463387608528137, "learning_rate": 0.00014795173167753573, "loss": 0.9385, "step": 3685 }, { "epoch": 0.6850027875859506, "grad_norm": 0.6344730257987976, "learning_rate": 0.0001479260654176, "loss": 0.9097, "step": 3686 }, { "epoch": 0.6851886266493217, "grad_norm": 0.7050276398658752, "learning_rate": 0.00014790039505844434, "loss": 1.0801, "step": 3687 }, { "epoch": 0.6853744657126928, "grad_norm": 0.6179599761962891, "learning_rate": 0.00014787472060226432, "loss": 1.1076, "step": 3688 }, { "epoch": 0.6855603047760639, "grad_norm": 0.587262749671936, "learning_rate": 0.00014784904205125603, "loss": 1.0408, "step": 3689 }, { "epoch": 0.685746143839435, "grad_norm": 0.6305316686630249, "learning_rate": 0.00014782335940761573, "loss": 1.2521, "step": 3690 }, { "epoch": 0.6859319829028062, "grad_norm": 0.6304357647895813, "learning_rate": 0.00014779767267354016, "loss": 1.1457, "step": 3691 }, { "epoch": 0.6861178219661773, "grad_norm": 0.5930676460266113, "learning_rate": 0.0001477719818512263, "loss": 0.9276, "step": 3692 }, { "epoch": 0.6863036610295484, "grad_norm": 0.6224808096885681, "learning_rate": 0.0001477462869428716, "loss": 1.0416, "step": 3693 }, { "epoch": 0.6864895000929195, "grad_norm": 0.7327170372009277, "learning_rate": 0.00014772058795067377, "loss": 1.255, "step": 3694 }, { "epoch": 0.6866753391562906, "grad_norm": 3.124260187149048, "learning_rate": 0.00014769488487683096, "loss": 1.9742, "step": 3695 }, { "epoch": 0.6868611782196617, "grad_norm": 0.6538242101669312, "learning_rate": 0.00014766917772354154, "loss": 1.1323, "step": 3696 }, { "epoch": 0.6870470172830329, "grad_norm": 0.5959080457687378, "learning_rate": 0.00014764346649300434, "loss": 1.0653, "step": 3697 }, { "epoch": 0.687232856346404, "grad_norm": 0.6007588505744934, "learning_rate": 0.00014761775118741848, "loss": 1.1666, "step": 3698 }, { "epoch": 0.6874186954097752, "grad_norm": 0.6197354793548584, "learning_rate": 0.0001475920318089834, "loss": 1.0461, "step": 3699 }, { "epoch": 0.6876045344731463, "grad_norm": 0.6801838874816895, "learning_rate": 0.000147566308359899, "loss": 1.061, "step": 3700 }, { "epoch": 0.6877903735365174, "grad_norm": 0.662079393863678, "learning_rate": 0.00014754058084236546, "loss": 1.1442, "step": 3701 }, { "epoch": 0.6879762125998885, "grad_norm": 0.6772270202636719, "learning_rate": 0.00014751484925858328, "loss": 1.1368, "step": 3702 }, { "epoch": 0.6881620516632596, "grad_norm": 0.9411020874977112, "learning_rate": 0.00014748911361075337, "loss": 1.209, "step": 3703 }, { "epoch": 0.6883478907266307, "grad_norm": 0.5544502139091492, "learning_rate": 0.00014746337390107692, "loss": 0.9643, "step": 3704 }, { "epoch": 0.6885337297900018, "grad_norm": 0.6655852198600769, "learning_rate": 0.00014743763013175552, "loss": 1.1832, "step": 3705 }, { "epoch": 0.6887195688533729, "grad_norm": 0.5229987502098083, "learning_rate": 0.00014741188230499105, "loss": 0.9048, "step": 3706 }, { "epoch": 0.6889054079167442, "grad_norm": 0.5792685747146606, "learning_rate": 0.00014738613042298586, "loss": 1.1681, "step": 3707 }, { "epoch": 0.6890912469801153, "grad_norm": 0.6217467784881592, "learning_rate": 0.0001473603744879425, "loss": 0.9995, "step": 3708 }, { "epoch": 0.6892770860434864, "grad_norm": 0.5169404745101929, "learning_rate": 0.00014733461450206395, "loss": 0.9465, "step": 3709 }, { "epoch": 0.6894629251068575, "grad_norm": 0.5788629055023193, "learning_rate": 0.00014730885046755355, "loss": 1.092, "step": 3710 }, { "epoch": 0.6896487641702286, "grad_norm": 0.5894346833229065, "learning_rate": 0.0001472830823866149, "loss": 1.16, "step": 3711 }, { "epoch": 0.6898346032335997, "grad_norm": 0.597175121307373, "learning_rate": 0.00014725731026145203, "loss": 1.2234, "step": 3712 }, { "epoch": 0.6900204422969708, "grad_norm": 0.6156522035598755, "learning_rate": 0.00014723153409426927, "loss": 1.1736, "step": 3713 }, { "epoch": 0.6902062813603419, "grad_norm": 0.5588281154632568, "learning_rate": 0.00014720575388727132, "loss": 1.1153, "step": 3714 }, { "epoch": 0.6903921204237131, "grad_norm": 0.5758021473884583, "learning_rate": 0.0001471799696426632, "loss": 1.0781, "step": 3715 }, { "epoch": 0.6905779594870842, "grad_norm": 0.6536004543304443, "learning_rate": 0.0001471541813626503, "loss": 1.0397, "step": 3716 }, { "epoch": 0.6907637985504553, "grad_norm": 0.6770461797714233, "learning_rate": 0.0001471283890494384, "loss": 0.9885, "step": 3717 }, { "epoch": 0.6909496376138264, "grad_norm": 0.5764928460121155, "learning_rate": 0.0001471025927052335, "loss": 1.0299, "step": 3718 }, { "epoch": 0.6911354766771975, "grad_norm": 0.6257376074790955, "learning_rate": 0.00014707679233224205, "loss": 1.0675, "step": 3719 }, { "epoch": 0.6913213157405687, "grad_norm": 0.606505811214447, "learning_rate": 0.00014705098793267078, "loss": 1.0614, "step": 3720 }, { "epoch": 0.6915071548039398, "grad_norm": 0.6218563914299011, "learning_rate": 0.00014702517950872685, "loss": 1.1444, "step": 3721 }, { "epoch": 0.6916929938673109, "grad_norm": 0.591198205947876, "learning_rate": 0.00014699936706261767, "loss": 1.1453, "step": 3722 }, { "epoch": 0.691878832930682, "grad_norm": 0.657878041267395, "learning_rate": 0.00014697355059655105, "loss": 1.1282, "step": 3723 }, { "epoch": 0.6920646719940532, "grad_norm": 0.5806160569190979, "learning_rate": 0.00014694773011273514, "loss": 1.1001, "step": 3724 }, { "epoch": 0.6922505110574243, "grad_norm": 0.5884582996368408, "learning_rate": 0.00014692190561337837, "loss": 1.0167, "step": 3725 }, { "epoch": 0.6924363501207954, "grad_norm": 0.6860553622245789, "learning_rate": 0.0001468960771006896, "loss": 1.3315, "step": 3726 }, { "epoch": 0.6926221891841665, "grad_norm": 0.5938162207603455, "learning_rate": 0.00014687024457687805, "loss": 0.9759, "step": 3727 }, { "epoch": 0.6928080282475376, "grad_norm": 0.5152567028999329, "learning_rate": 0.00014684440804415313, "loss": 1.0112, "step": 3728 }, { "epoch": 0.6929938673109087, "grad_norm": 0.5616927742958069, "learning_rate": 0.00014681856750472475, "loss": 1.1259, "step": 3729 }, { "epoch": 0.6931797063742798, "grad_norm": 0.6486595273017883, "learning_rate": 0.00014679272296080312, "loss": 1.0842, "step": 3730 }, { "epoch": 0.6933655454376509, "grad_norm": 0.7352533340454102, "learning_rate": 0.0001467668744145988, "loss": 1.1292, "step": 3731 }, { "epoch": 0.6935513845010222, "grad_norm": 0.6770464181900024, "learning_rate": 0.0001467410218683226, "loss": 0.9304, "step": 3732 }, { "epoch": 0.6937372235643933, "grad_norm": 0.9361321330070496, "learning_rate": 0.0001467151653241858, "loss": 1.1675, "step": 3733 }, { "epoch": 0.6939230626277644, "grad_norm": 0.6761358976364136, "learning_rate": 0.00014668930478439987, "loss": 0.8866, "step": 3734 }, { "epoch": 0.6941089016911355, "grad_norm": 0.5960943698883057, "learning_rate": 0.00014666344025117685, "loss": 0.9362, "step": 3735 }, { "epoch": 0.6942947407545066, "grad_norm": 0.4672621488571167, "learning_rate": 0.00014663757172672895, "loss": 0.923, "step": 3736 }, { "epoch": 0.6944805798178777, "grad_norm": 0.9298095107078552, "learning_rate": 0.00014661169921326872, "loss": 1.1617, "step": 3737 }, { "epoch": 0.6946664188812488, "grad_norm": 0.47797614336013794, "learning_rate": 0.00014658582271300912, "loss": 0.973, "step": 3738 }, { "epoch": 0.6948522579446199, "grad_norm": 0.6770762801170349, "learning_rate": 0.0001465599422281634, "loss": 1.0214, "step": 3739 }, { "epoch": 0.6950380970079911, "grad_norm": 0.6708875894546509, "learning_rate": 0.00014653405776094523, "loss": 1.0464, "step": 3740 }, { "epoch": 0.6952239360713622, "grad_norm": 0.7865105271339417, "learning_rate": 0.00014650816931356848, "loss": 1.3261, "step": 3741 }, { "epoch": 0.6954097751347333, "grad_norm": 0.5648350119590759, "learning_rate": 0.00014648227688824752, "loss": 0.882, "step": 3742 }, { "epoch": 0.6955956141981045, "grad_norm": 0.6764323711395264, "learning_rate": 0.00014645638048719695, "loss": 1.0985, "step": 3743 }, { "epoch": 0.6957814532614756, "grad_norm": 0.5773393511772156, "learning_rate": 0.00014643048011263175, "loss": 1.0442, "step": 3744 }, { "epoch": 0.6959672923248467, "grad_norm": 0.6438664793968201, "learning_rate": 0.00014640457576676723, "loss": 1.2884, "step": 3745 }, { "epoch": 0.6961531313882178, "grad_norm": 0.5628366470336914, "learning_rate": 0.00014637866745181906, "loss": 1.0877, "step": 3746 }, { "epoch": 0.6963389704515889, "grad_norm": 0.6594421863555908, "learning_rate": 0.00014635275517000323, "loss": 1.0294, "step": 3747 }, { "epoch": 0.6965248095149601, "grad_norm": 0.5636132955551147, "learning_rate": 0.00014632683892353604, "loss": 1.1675, "step": 3748 }, { "epoch": 0.6967106485783312, "grad_norm": 0.551912784576416, "learning_rate": 0.00014630091871463424, "loss": 0.9442, "step": 3749 }, { "epoch": 0.6968964876417023, "grad_norm": 0.6579510569572449, "learning_rate": 0.00014627499454551477, "loss": 1.1139, "step": 3750 }, { "epoch": 0.6970823267050734, "grad_norm": 0.7005996108055115, "learning_rate": 0.00014624906641839497, "loss": 1.1257, "step": 3751 }, { "epoch": 0.6972681657684445, "grad_norm": 0.6416681408882141, "learning_rate": 0.0001462231343354926, "loss": 0.9766, "step": 3752 }, { "epoch": 0.6974540048318156, "grad_norm": 0.6373915672302246, "learning_rate": 0.00014619719829902565, "loss": 1.3631, "step": 3753 }, { "epoch": 0.6976398438951867, "grad_norm": 0.6103551387786865, "learning_rate": 0.0001461712583112125, "loss": 1.0276, "step": 3754 }, { "epoch": 0.6978256829585578, "grad_norm": 0.5814418196678162, "learning_rate": 0.0001461453143742718, "loss": 1.0218, "step": 3755 }, { "epoch": 0.698011522021929, "grad_norm": 0.6234370470046997, "learning_rate": 0.00014611936649042265, "loss": 1.1922, "step": 3756 }, { "epoch": 0.6981973610853002, "grad_norm": 0.6188172698020935, "learning_rate": 0.00014609341466188436, "loss": 1.1766, "step": 3757 }, { "epoch": 0.6983832001486713, "grad_norm": 0.7053810954093933, "learning_rate": 0.00014606745889087674, "loss": 1.2396, "step": 3758 }, { "epoch": 0.6985690392120424, "grad_norm": 0.553032636642456, "learning_rate": 0.0001460414991796198, "loss": 1.1424, "step": 3759 }, { "epoch": 0.6987548782754135, "grad_norm": 0.6534532308578491, "learning_rate": 0.0001460155355303339, "loss": 1.2171, "step": 3760 }, { "epoch": 0.6989407173387846, "grad_norm": 0.595773458480835, "learning_rate": 0.00014598956794523984, "loss": 1.35, "step": 3761 }, { "epoch": 0.6991265564021557, "grad_norm": 0.5927870869636536, "learning_rate": 0.00014596359642655857, "loss": 0.8717, "step": 3762 }, { "epoch": 0.6993123954655268, "grad_norm": 0.5296638607978821, "learning_rate": 0.0001459376209765116, "loss": 1.2105, "step": 3763 }, { "epoch": 0.6994982345288979, "grad_norm": 0.6530826687812805, "learning_rate": 0.00014591164159732063, "loss": 1.1223, "step": 3764 }, { "epoch": 0.6996840735922691, "grad_norm": 0.725963294506073, "learning_rate": 0.00014588565829120774, "loss": 0.9904, "step": 3765 }, { "epoch": 0.6998699126556402, "grad_norm": 0.6792590022087097, "learning_rate": 0.00014585967106039527, "loss": 1.0106, "step": 3766 }, { "epoch": 0.7000557517190114, "grad_norm": 0.657814621925354, "learning_rate": 0.00014583367990710604, "loss": 0.9676, "step": 3767 }, { "epoch": 0.7002415907823825, "grad_norm": 0.7891011834144592, "learning_rate": 0.00014580768483356312, "loss": 0.7333, "step": 3768 }, { "epoch": 0.7004274298457536, "grad_norm": 0.6137871742248535, "learning_rate": 0.00014578168584198988, "loss": 1.0334, "step": 3769 }, { "epoch": 0.7006132689091247, "grad_norm": 0.6025509238243103, "learning_rate": 0.00014575568293461014, "loss": 0.7372, "step": 3770 }, { "epoch": 0.7007991079724958, "grad_norm": 0.5863919854164124, "learning_rate": 0.00014572967611364794, "loss": 1.1867, "step": 3771 }, { "epoch": 0.7009849470358669, "grad_norm": 0.6096488833427429, "learning_rate": 0.0001457036653813277, "loss": 1.1034, "step": 3772 }, { "epoch": 0.7011707860992381, "grad_norm": 0.6400436162948608, "learning_rate": 0.00014567765073987419, "loss": 1.0334, "step": 3773 }, { "epoch": 0.7013566251626092, "grad_norm": 0.6100161075592041, "learning_rate": 0.00014565163219151247, "loss": 1.186, "step": 3774 }, { "epoch": 0.7015424642259803, "grad_norm": 0.5866125822067261, "learning_rate": 0.00014562560973846802, "loss": 1.0076, "step": 3775 }, { "epoch": 0.7017283032893514, "grad_norm": 0.5028868913650513, "learning_rate": 0.0001455995833829665, "loss": 0.9723, "step": 3776 }, { "epoch": 0.7019141423527225, "grad_norm": 0.5891440510749817, "learning_rate": 0.00014557355312723412, "loss": 0.8673, "step": 3777 }, { "epoch": 0.7020999814160936, "grad_norm": 0.7121338248252869, "learning_rate": 0.0001455475189734972, "loss": 1.2381, "step": 3778 }, { "epoch": 0.7022858204794648, "grad_norm": 0.5803287625312805, "learning_rate": 0.00014552148092398257, "loss": 0.9524, "step": 3779 }, { "epoch": 0.7024716595428359, "grad_norm": 0.6246668696403503, "learning_rate": 0.00014549543898091728, "loss": 1.0676, "step": 3780 }, { "epoch": 0.7026574986062071, "grad_norm": 0.638159453868866, "learning_rate": 0.00014546939314652877, "loss": 1.1794, "step": 3781 }, { "epoch": 0.7028433376695782, "grad_norm": 0.7397798299789429, "learning_rate": 0.00014544334342304479, "loss": 1.1995, "step": 3782 }, { "epoch": 0.7030291767329493, "grad_norm": 0.6656100749969482, "learning_rate": 0.00014541728981269344, "loss": 1.2414, "step": 3783 }, { "epoch": 0.7032150157963204, "grad_norm": 0.6307308673858643, "learning_rate": 0.0001453912323177031, "loss": 1.2007, "step": 3784 }, { "epoch": 0.7034008548596915, "grad_norm": 0.6740713715553284, "learning_rate": 0.0001453651709403026, "loss": 1.1527, "step": 3785 }, { "epoch": 0.7035866939230626, "grad_norm": 0.5341428518295288, "learning_rate": 0.00014533910568272096, "loss": 1.0795, "step": 3786 }, { "epoch": 0.7037725329864337, "grad_norm": 0.5600630640983582, "learning_rate": 0.0001453130365471876, "loss": 1.1013, "step": 3787 }, { "epoch": 0.7039583720498048, "grad_norm": 0.5706908106803894, "learning_rate": 0.0001452869635359323, "loss": 1.0242, "step": 3788 }, { "epoch": 0.704144211113176, "grad_norm": 0.7494182586669922, "learning_rate": 0.00014526088665118517, "loss": 1.24, "step": 3789 }, { "epoch": 0.7043300501765472, "grad_norm": 0.860882580280304, "learning_rate": 0.00014523480589517652, "loss": 1.2362, "step": 3790 }, { "epoch": 0.7045158892399183, "grad_norm": 0.573000431060791, "learning_rate": 0.00014520872127013723, "loss": 1.3139, "step": 3791 }, { "epoch": 0.7047017283032894, "grad_norm": 0.621658980846405, "learning_rate": 0.00014518263277829823, "loss": 1.0566, "step": 3792 }, { "epoch": 0.7048875673666605, "grad_norm": 0.6506567001342773, "learning_rate": 0.00014515654042189107, "loss": 1.1673, "step": 3793 }, { "epoch": 0.7050734064300316, "grad_norm": 0.5923455357551575, "learning_rate": 0.00014513044420314737, "loss": 0.8684, "step": 3794 }, { "epoch": 0.7052592454934027, "grad_norm": 0.6548746228218079, "learning_rate": 0.00014510434412429922, "loss": 1.0784, "step": 3795 }, { "epoch": 0.7054450845567738, "grad_norm": 0.5360590815544128, "learning_rate": 0.00014507824018757906, "loss": 1.0521, "step": 3796 }, { "epoch": 0.7056309236201449, "grad_norm": 0.5973278284072876, "learning_rate": 0.00014505213239521955, "loss": 0.9411, "step": 3797 }, { "epoch": 0.7058167626835161, "grad_norm": 0.6145676970481873, "learning_rate": 0.0001450260207494538, "loss": 1.0747, "step": 3798 }, { "epoch": 0.7060026017468872, "grad_norm": 0.5279918313026428, "learning_rate": 0.00014499990525251524, "loss": 1.1325, "step": 3799 }, { "epoch": 0.7061884408102583, "grad_norm": 0.7617853283882141, "learning_rate": 0.0001449737859066375, "loss": 1.2461, "step": 3800 }, { "epoch": 0.7063742798736294, "grad_norm": 0.535610020160675, "learning_rate": 0.00014494766271405462, "loss": 0.873, "step": 3801 }, { "epoch": 0.7065601189370005, "grad_norm": 0.7088138461112976, "learning_rate": 0.00014492153567700102, "loss": 1.329, "step": 3802 }, { "epoch": 0.7067459580003717, "grad_norm": 0.5399606227874756, "learning_rate": 0.00014489540479771138, "loss": 0.8983, "step": 3803 }, { "epoch": 0.7069317970637428, "grad_norm": 0.6456025838851929, "learning_rate": 0.00014486927007842073, "loss": 1.3894, "step": 3804 }, { "epoch": 0.7071176361271139, "grad_norm": 0.5173892378807068, "learning_rate": 0.00014484313152136443, "loss": 1.0707, "step": 3805 }, { "epoch": 0.7073034751904851, "grad_norm": 0.6266672015190125, "learning_rate": 0.00014481698912877818, "loss": 1.0166, "step": 3806 }, { "epoch": 0.7074893142538562, "grad_norm": 0.6050211787223816, "learning_rate": 0.000144790842902898, "loss": 1.2041, "step": 3807 }, { "epoch": 0.7076751533172273, "grad_norm": 0.6445580124855042, "learning_rate": 0.00014476469284596024, "loss": 1.2802, "step": 3808 }, { "epoch": 0.7078609923805984, "grad_norm": 0.6184760332107544, "learning_rate": 0.0001447385389602015, "loss": 1.1176, "step": 3809 }, { "epoch": 0.7080468314439695, "grad_norm": 0.6994800567626953, "learning_rate": 0.00014471238124785888, "loss": 0.9874, "step": 3810 }, { "epoch": 0.7082326705073406, "grad_norm": 0.6110354065895081, "learning_rate": 0.00014468621971116963, "loss": 0.9592, "step": 3811 }, { "epoch": 0.7084185095707117, "grad_norm": 0.5898203253746033, "learning_rate": 0.0001446600543523714, "loss": 1.1581, "step": 3812 }, { "epoch": 0.7086043486340828, "grad_norm": 0.6678897142410278, "learning_rate": 0.00014463388517370224, "loss": 1.1353, "step": 3813 }, { "epoch": 0.708790187697454, "grad_norm": 0.601813018321991, "learning_rate": 0.0001446077121774004, "loss": 1.1991, "step": 3814 }, { "epoch": 0.7089760267608252, "grad_norm": 0.48424074053764343, "learning_rate": 0.00014458153536570456, "loss": 0.985, "step": 3815 }, { "epoch": 0.7091618658241963, "grad_norm": 11.190242767333984, "learning_rate": 0.00014455535474085362, "loss": 2.6148, "step": 3816 }, { "epoch": 0.7093477048875674, "grad_norm": 0.5649991035461426, "learning_rate": 0.00014452917030508688, "loss": 1.0995, "step": 3817 }, { "epoch": 0.7095335439509385, "grad_norm": 0.5765203237533569, "learning_rate": 0.00014450298206064395, "loss": 0.879, "step": 3818 }, { "epoch": 0.7097193830143096, "grad_norm": 0.6534028649330139, "learning_rate": 0.00014447679000976482, "loss": 1.0186, "step": 3819 }, { "epoch": 0.7099052220776807, "grad_norm": 0.6540850400924683, "learning_rate": 0.00014445059415468968, "loss": 1.3179, "step": 3820 }, { "epoch": 0.7100910611410518, "grad_norm": 0.5826019644737244, "learning_rate": 0.0001444243944976592, "loss": 1.0053, "step": 3821 }, { "epoch": 0.710276900204423, "grad_norm": 0.5010527968406677, "learning_rate": 0.00014439819104091422, "loss": 0.8363, "step": 3822 }, { "epoch": 0.7104627392677941, "grad_norm": 0.470480740070343, "learning_rate": 0.00014437198378669598, "loss": 0.8458, "step": 3823 }, { "epoch": 0.7106485783311652, "grad_norm": 0.5956453680992126, "learning_rate": 0.00014434577273724609, "loss": 1.2399, "step": 3824 }, { "epoch": 0.7108344173945363, "grad_norm": 0.7176646590232849, "learning_rate": 0.00014431955789480637, "loss": 0.9899, "step": 3825 }, { "epoch": 0.7110202564579075, "grad_norm": 0.4969988167285919, "learning_rate": 0.00014429333926161913, "loss": 0.7034, "step": 3826 }, { "epoch": 0.7112060955212786, "grad_norm": 0.4980323314666748, "learning_rate": 0.00014426711683992685, "loss": 1.0628, "step": 3827 }, { "epoch": 0.7113919345846497, "grad_norm": 0.6527779698371887, "learning_rate": 0.00014424089063197238, "loss": 1.0731, "step": 3828 }, { "epoch": 0.7115777736480208, "grad_norm": 0.6407890319824219, "learning_rate": 0.0001442146606399989, "loss": 1.126, "step": 3829 }, { "epoch": 0.7117636127113919, "grad_norm": 0.6298221349716187, "learning_rate": 0.00014418842686624998, "loss": 0.9776, "step": 3830 }, { "epoch": 0.7119494517747631, "grad_norm": 0.6135900616645813, "learning_rate": 0.00014416218931296936, "loss": 1.2445, "step": 3831 }, { "epoch": 0.7121352908381342, "grad_norm": 0.5972301959991455, "learning_rate": 0.00014413594798240125, "loss": 1.3075, "step": 3832 }, { "epoch": 0.7123211299015053, "grad_norm": 0.47998619079589844, "learning_rate": 0.00014410970287679012, "loss": 0.9496, "step": 3833 }, { "epoch": 0.7125069689648764, "grad_norm": 0.6807659268379211, "learning_rate": 0.00014408345399838078, "loss": 1.0152, "step": 3834 }, { "epoch": 0.7126928080282475, "grad_norm": 0.5685128569602966, "learning_rate": 0.00014405720134941835, "loss": 0.911, "step": 3835 }, { "epoch": 0.7128786470916186, "grad_norm": 0.636781632900238, "learning_rate": 0.00014403094493214825, "loss": 0.9629, "step": 3836 }, { "epoch": 0.7130644861549897, "grad_norm": 0.570308268070221, "learning_rate": 0.0001440046847488163, "loss": 1.2221, "step": 3837 }, { "epoch": 0.7132503252183608, "grad_norm": 0.5886622071266174, "learning_rate": 0.00014397842080166853, "loss": 1.202, "step": 3838 }, { "epoch": 0.7134361642817321, "grad_norm": 0.5327900648117065, "learning_rate": 0.00014395215309295138, "loss": 1.1774, "step": 3839 }, { "epoch": 0.7136220033451032, "grad_norm": 0.5286857485771179, "learning_rate": 0.00014392588162491157, "loss": 1.0491, "step": 3840 }, { "epoch": 0.7138078424084743, "grad_norm": 0.5948933362960815, "learning_rate": 0.00014389960639979617, "loss": 1.0008, "step": 3841 }, { "epoch": 0.7139936814718454, "grad_norm": 0.5479618906974792, "learning_rate": 0.00014387332741985258, "loss": 0.9369, "step": 3842 }, { "epoch": 0.7141795205352165, "grad_norm": 0.6125556826591492, "learning_rate": 0.00014384704468732847, "loss": 1.299, "step": 3843 }, { "epoch": 0.7143653595985876, "grad_norm": 0.615397036075592, "learning_rate": 0.00014382075820447185, "loss": 0.8941, "step": 3844 }, { "epoch": 0.7145511986619587, "grad_norm": 0.5345596671104431, "learning_rate": 0.00014379446797353106, "loss": 1.0561, "step": 3845 }, { "epoch": 0.7147370377253298, "grad_norm": 0.600506603717804, "learning_rate": 0.00014376817399675479, "loss": 1.0501, "step": 3846 }, { "epoch": 0.714922876788701, "grad_norm": 0.6428175568580627, "learning_rate": 0.00014374187627639196, "loss": 1.2755, "step": 3847 }, { "epoch": 0.7151087158520721, "grad_norm": 0.6733381152153015, "learning_rate": 0.00014371557481469198, "loss": 1.2535, "step": 3848 }, { "epoch": 0.7152945549154432, "grad_norm": 0.7081629633903503, "learning_rate": 0.00014368926961390438, "loss": 1.1677, "step": 3849 }, { "epoch": 0.7154803939788144, "grad_norm": 0.5885323286056519, "learning_rate": 0.0001436629606762791, "loss": 1.1147, "step": 3850 }, { "epoch": 0.7156662330421855, "grad_norm": 0.6808738112449646, "learning_rate": 0.00014363664800406647, "loss": 1.1371, "step": 3851 }, { "epoch": 0.7158520721055566, "grad_norm": 0.6646922826766968, "learning_rate": 0.00014361033159951699, "loss": 1.1832, "step": 3852 }, { "epoch": 0.7160379111689277, "grad_norm": 0.6628801226615906, "learning_rate": 0.0001435840114648816, "loss": 1.1926, "step": 3853 }, { "epoch": 0.7162237502322988, "grad_norm": 0.7184207439422607, "learning_rate": 0.0001435576876024115, "loss": 1.304, "step": 3854 }, { "epoch": 0.71640958929567, "grad_norm": 0.623140275478363, "learning_rate": 0.00014353136001435827, "loss": 1.1043, "step": 3855 }, { "epoch": 0.7165954283590411, "grad_norm": 0.639744222164154, "learning_rate": 0.00014350502870297374, "loss": 1.1718, "step": 3856 }, { "epoch": 0.7167812674224122, "grad_norm": 0.7367560267448425, "learning_rate": 0.00014347869367051005, "loss": 1.2005, "step": 3857 }, { "epoch": 0.7169671064857833, "grad_norm": 0.6177274584770203, "learning_rate": 0.00014345235491921974, "loss": 1.1259, "step": 3858 }, { "epoch": 0.7171529455491544, "grad_norm": 0.5447367429733276, "learning_rate": 0.00014342601245135556, "loss": 1.0504, "step": 3859 }, { "epoch": 0.7173387846125255, "grad_norm": 0.596401572227478, "learning_rate": 0.00014339966626917072, "loss": 1.0481, "step": 3860 }, { "epoch": 0.7175246236758966, "grad_norm": 0.5914924144744873, "learning_rate": 0.0001433733163749186, "loss": 1.2006, "step": 3861 }, { "epoch": 0.7177104627392678, "grad_norm": 0.6990320682525635, "learning_rate": 0.000143346962770853, "loss": 1.1984, "step": 3862 }, { "epoch": 0.717896301802639, "grad_norm": 0.5579658150672913, "learning_rate": 0.00014332060545922798, "loss": 0.996, "step": 3863 }, { "epoch": 0.7180821408660101, "grad_norm": 0.6074368357658386, "learning_rate": 0.00014329424444229797, "loss": 1.1579, "step": 3864 }, { "epoch": 0.7182679799293812, "grad_norm": 0.5289203524589539, "learning_rate": 0.00014326787972231765, "loss": 0.8867, "step": 3865 }, { "epoch": 0.7184538189927523, "grad_norm": 0.6656838059425354, "learning_rate": 0.00014324151130154207, "loss": 1.2723, "step": 3866 }, { "epoch": 0.7186396580561234, "grad_norm": 1.7440506219863892, "learning_rate": 0.00014321513918222657, "loss": 1.8616, "step": 3867 }, { "epoch": 0.7188254971194945, "grad_norm": 0.5146296620368958, "learning_rate": 0.0001431887633666268, "loss": 0.958, "step": 3868 }, { "epoch": 0.7190113361828656, "grad_norm": 0.6164444088935852, "learning_rate": 0.00014316238385699877, "loss": 1.2226, "step": 3869 }, { "epoch": 0.7191971752462367, "grad_norm": 0.6518514752388, "learning_rate": 0.00014313600065559876, "loss": 1.117, "step": 3870 }, { "epoch": 0.7193830143096078, "grad_norm": 0.5010316967964172, "learning_rate": 0.00014310961376468338, "loss": 0.9683, "step": 3871 }, { "epoch": 0.719568853372979, "grad_norm": 0.6240313649177551, "learning_rate": 0.00014308322318650957, "loss": 1.129, "step": 3872 }, { "epoch": 0.7197546924363502, "grad_norm": 0.5419994592666626, "learning_rate": 0.00014305682892333453, "loss": 1.1179, "step": 3873 }, { "epoch": 0.7199405314997213, "grad_norm": 0.6009571552276611, "learning_rate": 0.0001430304309774159, "loss": 1.2359, "step": 3874 }, { "epoch": 0.7201263705630924, "grad_norm": 0.7590610384941101, "learning_rate": 0.0001430040293510115, "loss": 1.1678, "step": 3875 }, { "epoch": 0.7203122096264635, "grad_norm": 0.6211830973625183, "learning_rate": 0.0001429776240463795, "loss": 1.2744, "step": 3876 }, { "epoch": 0.7204980486898346, "grad_norm": 0.7066710591316223, "learning_rate": 0.00014295121506577844, "loss": 1.1257, "step": 3877 }, { "epoch": 0.7206838877532057, "grad_norm": 0.716054379940033, "learning_rate": 0.00014292480241146716, "loss": 0.941, "step": 3878 }, { "epoch": 0.7208697268165768, "grad_norm": 0.6112728714942932, "learning_rate": 0.00014289838608570472, "loss": 1.3149, "step": 3879 }, { "epoch": 0.721055565879948, "grad_norm": 0.5588074326515198, "learning_rate": 0.00014287196609075062, "loss": 0.8184, "step": 3880 }, { "epoch": 0.7212414049433191, "grad_norm": 0.646625280380249, "learning_rate": 0.00014284554242886458, "loss": 0.9852, "step": 3881 }, { "epoch": 0.7214272440066902, "grad_norm": 0.5708473324775696, "learning_rate": 0.00014281911510230673, "loss": 0.8922, "step": 3882 }, { "epoch": 0.7216130830700613, "grad_norm": 0.5914095044136047, "learning_rate": 0.0001427926841133374, "loss": 1.1814, "step": 3883 }, { "epoch": 0.7217989221334324, "grad_norm": 0.7340452671051025, "learning_rate": 0.00014276624946421728, "loss": 1.09, "step": 3884 }, { "epoch": 0.7219847611968035, "grad_norm": 0.6473243832588196, "learning_rate": 0.00014273981115720745, "loss": 1.1023, "step": 3885 }, { "epoch": 0.7221706002601747, "grad_norm": 0.6255431175231934, "learning_rate": 0.0001427133691945692, "loss": 0.8885, "step": 3886 }, { "epoch": 0.7223564393235458, "grad_norm": 0.5867379307746887, "learning_rate": 0.00014268692357856415, "loss": 0.9642, "step": 3887 }, { "epoch": 0.722542278386917, "grad_norm": 0.5964205861091614, "learning_rate": 0.00014266047431145428, "loss": 1.1225, "step": 3888 }, { "epoch": 0.7227281174502881, "grad_norm": 0.6257895827293396, "learning_rate": 0.00014263402139550184, "loss": 1.0778, "step": 3889 }, { "epoch": 0.7229139565136592, "grad_norm": 0.7534006237983704, "learning_rate": 0.0001426075648329694, "loss": 1.1555, "step": 3890 }, { "epoch": 0.7230997955770303, "grad_norm": 0.7284505367279053, "learning_rate": 0.00014258110462611986, "loss": 0.8882, "step": 3891 }, { "epoch": 0.7232856346404014, "grad_norm": 0.54058837890625, "learning_rate": 0.00014255464077721642, "loss": 0.6677, "step": 3892 }, { "epoch": 0.7234714737037725, "grad_norm": 0.5655109882354736, "learning_rate": 0.00014252817328852256, "loss": 0.9088, "step": 3893 }, { "epoch": 0.7236573127671436, "grad_norm": 0.6212193369865417, "learning_rate": 0.0001425017021623021, "loss": 1.1465, "step": 3894 }, { "epoch": 0.7238431518305147, "grad_norm": 0.6020740866661072, "learning_rate": 0.00014247522740081924, "loss": 0.9097, "step": 3895 }, { "epoch": 0.724028990893886, "grad_norm": 0.5451498627662659, "learning_rate": 0.00014244874900633839, "loss": 0.8835, "step": 3896 }, { "epoch": 0.7242148299572571, "grad_norm": 0.6161134243011475, "learning_rate": 0.00014242226698112426, "loss": 1.1958, "step": 3897 }, { "epoch": 0.7244006690206282, "grad_norm": 0.6352266669273376, "learning_rate": 0.00014239578132744195, "loss": 1.1156, "step": 3898 }, { "epoch": 0.7245865080839993, "grad_norm": 0.6710106134414673, "learning_rate": 0.00014236929204755684, "loss": 0.9735, "step": 3899 }, { "epoch": 0.7247723471473704, "grad_norm": 0.6463992595672607, "learning_rate": 0.00014234279914373464, "loss": 1.0622, "step": 3900 }, { "epoch": 0.7249581862107415, "grad_norm": 0.6164476275444031, "learning_rate": 0.00014231630261824128, "loss": 1.0894, "step": 3901 }, { "epoch": 0.7251440252741126, "grad_norm": 0.5363966226577759, "learning_rate": 0.0001422898024733431, "loss": 1.2831, "step": 3902 }, { "epoch": 0.7253298643374837, "grad_norm": 0.533882200717926, "learning_rate": 0.00014226329871130672, "loss": 1.0128, "step": 3903 }, { "epoch": 0.7255157034008548, "grad_norm": 0.5951628088951111, "learning_rate": 0.00014223679133439906, "loss": 1.0399, "step": 3904 }, { "epoch": 0.725701542464226, "grad_norm": 0.5895074605941772, "learning_rate": 0.00014221028034488734, "loss": 1.0381, "step": 3905 }, { "epoch": 0.7258873815275971, "grad_norm": 0.740982174873352, "learning_rate": 0.00014218376574503915, "loss": 1.1248, "step": 3906 }, { "epoch": 0.7260732205909682, "grad_norm": 0.6732906103134155, "learning_rate": 0.00014215724753712227, "loss": 1.0312, "step": 3907 }, { "epoch": 0.7262590596543393, "grad_norm": 0.5585139393806458, "learning_rate": 0.0001421307257234049, "loss": 0.9479, "step": 3908 }, { "epoch": 0.7264448987177105, "grad_norm": 0.5866591334342957, "learning_rate": 0.0001421042003061555, "loss": 0.929, "step": 3909 }, { "epoch": 0.7266307377810816, "grad_norm": 0.6339684128761292, "learning_rate": 0.00014207767128764288, "loss": 1.0042, "step": 3910 }, { "epoch": 0.7268165768444527, "grad_norm": 0.5119479894638062, "learning_rate": 0.00014205113867013603, "loss": 1.0338, "step": 3911 }, { "epoch": 0.7270024159078238, "grad_norm": 0.7797014713287354, "learning_rate": 0.00014202460245590445, "loss": 1.1913, "step": 3912 }, { "epoch": 0.727188254971195, "grad_norm": 0.5340668559074402, "learning_rate": 0.00014199806264721777, "loss": 1.1246, "step": 3913 }, { "epoch": 0.7273740940345661, "grad_norm": 0.5588401556015015, "learning_rate": 0.00014197151924634607, "loss": 1.1095, "step": 3914 }, { "epoch": 0.7275599330979372, "grad_norm": 0.5594502687454224, "learning_rate": 0.00014194497225555956, "loss": 1.0379, "step": 3915 }, { "epoch": 0.7277457721613083, "grad_norm": 0.6729309558868408, "learning_rate": 0.00014191842167712894, "loss": 1.0472, "step": 3916 }, { "epoch": 0.7279316112246794, "grad_norm": 0.5249831676483154, "learning_rate": 0.00014189186751332512, "loss": 0.7952, "step": 3917 }, { "epoch": 0.7281174502880505, "grad_norm": 0.9038906693458557, "learning_rate": 0.00014186530976641935, "loss": 1.0048, "step": 3918 }, { "epoch": 0.7283032893514216, "grad_norm": 0.8185818791389465, "learning_rate": 0.00014183874843868313, "loss": 1.2085, "step": 3919 }, { "epoch": 0.7284891284147927, "grad_norm": 0.6493542194366455, "learning_rate": 0.00014181218353238832, "loss": 1.3127, "step": 3920 }, { "epoch": 0.728674967478164, "grad_norm": 0.5967090129852295, "learning_rate": 0.00014178561504980713, "loss": 1.0868, "step": 3921 }, { "epoch": 0.7288608065415351, "grad_norm": 0.6878150701522827, "learning_rate": 0.0001417590429932119, "loss": 1.2544, "step": 3922 }, { "epoch": 0.7290466456049062, "grad_norm": 0.598172128200531, "learning_rate": 0.00014173246736487552, "loss": 1.0597, "step": 3923 }, { "epoch": 0.7292324846682773, "grad_norm": 0.5575773119926453, "learning_rate": 0.00014170588816707103, "loss": 0.9363, "step": 3924 }, { "epoch": 0.7294183237316484, "grad_norm": 0.6191855072975159, "learning_rate": 0.00014167930540207174, "loss": 1.1353, "step": 3925 }, { "epoch": 0.7296041627950195, "grad_norm": 0.635525643825531, "learning_rate": 0.0001416527190721514, "loss": 1.23, "step": 3926 }, { "epoch": 0.7297900018583906, "grad_norm": 0.5543552041053772, "learning_rate": 0.00014162612917958397, "loss": 1.1228, "step": 3927 }, { "epoch": 0.7299758409217617, "grad_norm": 0.4938865602016449, "learning_rate": 0.00014159953572664375, "loss": 1.0233, "step": 3928 }, { "epoch": 0.7301616799851329, "grad_norm": 0.523423433303833, "learning_rate": 0.00014157293871560532, "loss": 0.8668, "step": 3929 }, { "epoch": 0.730347519048504, "grad_norm": 0.6982086300849915, "learning_rate": 0.00014154633814874361, "loss": 1.3362, "step": 3930 }, { "epoch": 0.7305333581118751, "grad_norm": 0.5337725281715393, "learning_rate": 0.0001415197340283338, "loss": 0.9793, "step": 3931 }, { "epoch": 0.7307191971752462, "grad_norm": 0.5501512885093689, "learning_rate": 0.0001414931263566514, "loss": 1.0592, "step": 3932 }, { "epoch": 0.7309050362386174, "grad_norm": 0.5574057698249817, "learning_rate": 0.0001414665151359722, "loss": 1.1248, "step": 3933 }, { "epoch": 0.7310908753019885, "grad_norm": 0.5722219347953796, "learning_rate": 0.0001414399003685724, "loss": 1.0139, "step": 3934 }, { "epoch": 0.7312767143653596, "grad_norm": 0.599181056022644, "learning_rate": 0.0001414132820567283, "loss": 1.0779, "step": 3935 }, { "epoch": 0.7314625534287307, "grad_norm": 0.6454370021820068, "learning_rate": 0.00014138666020271668, "loss": 0.847, "step": 3936 }, { "epoch": 0.7316483924921018, "grad_norm": 0.5219981074333191, "learning_rate": 0.0001413600348088146, "loss": 0.9294, "step": 3937 }, { "epoch": 0.731834231555473, "grad_norm": 0.5672059059143066, "learning_rate": 0.00014133340587729932, "loss": 1.0441, "step": 3938 }, { "epoch": 0.7320200706188441, "grad_norm": 0.6195349097251892, "learning_rate": 0.00014130677341044854, "loss": 0.8654, "step": 3939 }, { "epoch": 0.7322059096822152, "grad_norm": 0.6063309907913208, "learning_rate": 0.00014128013741054014, "loss": 1.2963, "step": 3940 }, { "epoch": 0.7323917487455863, "grad_norm": 0.5881887078285217, "learning_rate": 0.00014125349787985235, "loss": 1.1092, "step": 3941 }, { "epoch": 0.7325775878089574, "grad_norm": 0.6515393257141113, "learning_rate": 0.0001412268548206637, "loss": 1.2598, "step": 3942 }, { "epoch": 0.7327634268723285, "grad_norm": 0.6397451162338257, "learning_rate": 0.00014120020823525311, "loss": 1.4133, "step": 3943 }, { "epoch": 0.7329492659356996, "grad_norm": 0.6065839529037476, "learning_rate": 0.00014117355812589963, "loss": 1.0174, "step": 3944 }, { "epoch": 0.7331351049990708, "grad_norm": 0.6057631373405457, "learning_rate": 0.00014114690449488278, "loss": 1.2133, "step": 3945 }, { "epoch": 0.733320944062442, "grad_norm": 0.6335853934288025, "learning_rate": 0.00014112024734448222, "loss": 1.3003, "step": 3946 }, { "epoch": 0.7335067831258131, "grad_norm": 0.763322651386261, "learning_rate": 0.00014109358667697806, "loss": 1.2127, "step": 3947 }, { "epoch": 0.7336926221891842, "grad_norm": 0.7434218525886536, "learning_rate": 0.0001410669224946506, "loss": 1.3435, "step": 3948 }, { "epoch": 0.7338784612525553, "grad_norm": 0.843048095703125, "learning_rate": 0.00014104025479978047, "loss": 1.0375, "step": 3949 }, { "epoch": 0.7340643003159264, "grad_norm": 0.5597147941589355, "learning_rate": 0.00014101358359464868, "loss": 1.191, "step": 3950 }, { "epoch": 0.7342501393792975, "grad_norm": 0.6737159490585327, "learning_rate": 0.00014098690888153642, "loss": 1.1248, "step": 3951 }, { "epoch": 0.7344359784426686, "grad_norm": 0.6314411759376526, "learning_rate": 0.0001409602306627253, "loss": 1.0922, "step": 3952 }, { "epoch": 0.7346218175060397, "grad_norm": 0.6439374089241028, "learning_rate": 0.0001409335489404971, "loss": 1.0697, "step": 3953 }, { "epoch": 0.7348076565694109, "grad_norm": 0.6825480461120605, "learning_rate": 0.00014090686371713402, "loss": 1.2774, "step": 3954 }, { "epoch": 0.734993495632782, "grad_norm": 0.5277664661407471, "learning_rate": 0.00014088017499491842, "loss": 1.0449, "step": 3955 }, { "epoch": 0.7351793346961532, "grad_norm": 0.6415640115737915, "learning_rate": 0.00014085348277613312, "loss": 1.1013, "step": 3956 }, { "epoch": 0.7353651737595243, "grad_norm": 0.8472128510475159, "learning_rate": 0.00014082678706306117, "loss": 1.1158, "step": 3957 }, { "epoch": 0.7355510128228954, "grad_norm": 0.5654361248016357, "learning_rate": 0.00014080008785798584, "loss": 1.1237, "step": 3958 }, { "epoch": 0.7357368518862665, "grad_norm": 0.5454433560371399, "learning_rate": 0.00014077338516319085, "loss": 1.1398, "step": 3959 }, { "epoch": 0.7359226909496376, "grad_norm": 0.5983814597129822, "learning_rate": 0.0001407466789809601, "loss": 1.3361, "step": 3960 }, { "epoch": 0.7361085300130087, "grad_norm": 0.6201050281524658, "learning_rate": 0.00014071996931357781, "loss": 1.0744, "step": 3961 }, { "epoch": 0.7362943690763799, "grad_norm": 0.5997368693351746, "learning_rate": 0.0001406932561633286, "loss": 1.0716, "step": 3962 }, { "epoch": 0.736480208139751, "grad_norm": 0.6071730852127075, "learning_rate": 0.00014066653953249717, "loss": 1.2947, "step": 3963 }, { "epoch": 0.7366660472031221, "grad_norm": 0.5586613416671753, "learning_rate": 0.00014063981942336872, "loss": 1.0362, "step": 3964 }, { "epoch": 0.7368518862664932, "grad_norm": 0.5747679471969604, "learning_rate": 0.0001406130958382287, "loss": 1.1044, "step": 3965 }, { "epoch": 0.7370377253298643, "grad_norm": 0.6741050481796265, "learning_rate": 0.00014058636877936284, "loss": 1.5444, "step": 3966 }, { "epoch": 0.7372235643932354, "grad_norm": 0.7054770588874817, "learning_rate": 0.0001405596382490571, "loss": 1.1852, "step": 3967 }, { "epoch": 0.7374094034566065, "grad_norm": 0.6902971863746643, "learning_rate": 0.00014053290424959788, "loss": 1.1828, "step": 3968 }, { "epoch": 0.7375952425199777, "grad_norm": 0.6020306944847107, "learning_rate": 0.00014050616678327175, "loss": 1.2144, "step": 3969 }, { "epoch": 0.7377810815833489, "grad_norm": 0.5189749598503113, "learning_rate": 0.00014047942585236563, "loss": 1.1501, "step": 3970 }, { "epoch": 0.73796692064672, "grad_norm": 0.5975673198699951, "learning_rate": 0.0001404526814591667, "loss": 1.0416, "step": 3971 }, { "epoch": 0.7381527597100911, "grad_norm": 0.571418046951294, "learning_rate": 0.0001404259336059625, "loss": 0.9414, "step": 3972 }, { "epoch": 0.7383385987734622, "grad_norm": 0.7188184261322021, "learning_rate": 0.00014039918229504083, "loss": 1.2489, "step": 3973 }, { "epoch": 0.7385244378368333, "grad_norm": 0.7322155833244324, "learning_rate": 0.0001403724275286898, "loss": 1.0685, "step": 3974 }, { "epoch": 0.7387102769002044, "grad_norm": 0.5024031400680542, "learning_rate": 0.0001403456693091978, "loss": 0.8796, "step": 3975 }, { "epoch": 0.7388961159635755, "grad_norm": 0.5071271061897278, "learning_rate": 0.00014031890763885346, "loss": 0.9038, "step": 3976 }, { "epoch": 0.7390819550269466, "grad_norm": 0.5381253957748413, "learning_rate": 0.00014029214251994585, "loss": 0.8357, "step": 3977 }, { "epoch": 0.7392677940903177, "grad_norm": 0.7139803767204285, "learning_rate": 0.00014026537395476417, "loss": 1.0646, "step": 3978 }, { "epoch": 0.739453633153689, "grad_norm": 0.5862656831741333, "learning_rate": 0.00014023860194559807, "loss": 1.3679, "step": 3979 }, { "epoch": 0.7396394722170601, "grad_norm": 0.5597640872001648, "learning_rate": 0.00014021182649473734, "loss": 1.0942, "step": 3980 }, { "epoch": 0.7398253112804312, "grad_norm": 0.7823113203048706, "learning_rate": 0.0001401850476044722, "loss": 1.0347, "step": 3981 }, { "epoch": 0.7400111503438023, "grad_norm": 0.5648866295814514, "learning_rate": 0.0001401582652770931, "loss": 0.9956, "step": 3982 }, { "epoch": 0.7401969894071734, "grad_norm": 0.5973963141441345, "learning_rate": 0.00014013147951489076, "loss": 0.7815, "step": 3983 }, { "epoch": 0.7403828284705445, "grad_norm": 0.7022433876991272, "learning_rate": 0.00014010469032015626, "loss": 1.0487, "step": 3984 }, { "epoch": 0.7405686675339156, "grad_norm": 0.7050195336341858, "learning_rate": 0.0001400778976951809, "loss": 1.0073, "step": 3985 }, { "epoch": 0.7407545065972867, "grad_norm": 0.6723904013633728, "learning_rate": 0.00014005110164225636, "loss": 0.962, "step": 3986 }, { "epoch": 0.7409403456606579, "grad_norm": 0.5963743329048157, "learning_rate": 0.0001400243021636745, "loss": 1.1638, "step": 3987 }, { "epoch": 0.741126184724029, "grad_norm": 0.6054471731185913, "learning_rate": 0.00013999749926172755, "loss": 1.1227, "step": 3988 }, { "epoch": 0.7413120237874001, "grad_norm": 0.578852117061615, "learning_rate": 0.00013997069293870807, "loss": 1.087, "step": 3989 }, { "epoch": 0.7414978628507712, "grad_norm": 0.7209727764129639, "learning_rate": 0.00013994388319690888, "loss": 1.1167, "step": 3990 }, { "epoch": 0.7416837019141423, "grad_norm": 0.7866323590278625, "learning_rate": 0.00013991707003862297, "loss": 1.1326, "step": 3991 }, { "epoch": 0.7418695409775135, "grad_norm": 0.6872072815895081, "learning_rate": 0.00013989025346614382, "loss": 0.8651, "step": 3992 }, { "epoch": 0.7420553800408846, "grad_norm": 0.568540096282959, "learning_rate": 0.00013986343348176505, "loss": 1.1095, "step": 3993 }, { "epoch": 0.7422412191042557, "grad_norm": 0.6199185252189636, "learning_rate": 0.00013983661008778065, "loss": 1.0038, "step": 3994 }, { "epoch": 0.7424270581676269, "grad_norm": 0.6343973875045776, "learning_rate": 0.00013980978328648492, "loss": 1.2641, "step": 3995 }, { "epoch": 0.742612897230998, "grad_norm": 0.723204493522644, "learning_rate": 0.0001397829530801724, "loss": 1.283, "step": 3996 }, { "epoch": 0.7427987362943691, "grad_norm": 0.5674862861633301, "learning_rate": 0.00013975611947113792, "loss": 1.1703, "step": 3997 }, { "epoch": 0.7429845753577402, "grad_norm": 0.5449051856994629, "learning_rate": 0.00013972928246167658, "loss": 0.8722, "step": 3998 }, { "epoch": 0.7431704144211113, "grad_norm": 0.6609678864479065, "learning_rate": 0.00013970244205408386, "loss": 1.2703, "step": 3999 }, { "epoch": 0.7433562534844824, "grad_norm": 0.6188348531723022, "learning_rate": 0.00013967559825065547, "loss": 1.3705, "step": 4000 }, { "epoch": 0.7435420925478535, "grad_norm": 0.5565700531005859, "learning_rate": 0.0001396487510536874, "loss": 0.821, "step": 4001 }, { "epoch": 0.7437279316112246, "grad_norm": 0.5666722655296326, "learning_rate": 0.00013962190046547596, "loss": 0.9848, "step": 4002 }, { "epoch": 0.7439137706745959, "grad_norm": 0.5390679836273193, "learning_rate": 0.00013959504648831776, "loss": 1.1785, "step": 4003 }, { "epoch": 0.744099609737967, "grad_norm": 0.5793556571006775, "learning_rate": 0.00013956818912450963, "loss": 1.204, "step": 4004 }, { "epoch": 0.7442854488013381, "grad_norm": 0.689157247543335, "learning_rate": 0.0001395413283763488, "loss": 1.4102, "step": 4005 }, { "epoch": 0.7444712878647092, "grad_norm": 0.6170220375061035, "learning_rate": 0.00013951446424613267, "loss": 1.0307, "step": 4006 }, { "epoch": 0.7446571269280803, "grad_norm": 0.7064149379730225, "learning_rate": 0.000139487596736159, "loss": 1.1184, "step": 4007 }, { "epoch": 0.7448429659914514, "grad_norm": 0.5319786667823792, "learning_rate": 0.00013946072584872587, "loss": 0.9286, "step": 4008 }, { "epoch": 0.7450288050548225, "grad_norm": 0.6800196766853333, "learning_rate": 0.00013943385158613153, "loss": 1.0468, "step": 4009 }, { "epoch": 0.7452146441181936, "grad_norm": 0.5505868196487427, "learning_rate": 0.00013940697395067465, "loss": 0.8411, "step": 4010 }, { "epoch": 0.7454004831815647, "grad_norm": 0.5862759351730347, "learning_rate": 0.00013938009294465415, "loss": 1.2071, "step": 4011 }, { "epoch": 0.7455863222449359, "grad_norm": 0.5666061639785767, "learning_rate": 0.00013935320857036917, "loss": 1.0593, "step": 4012 }, { "epoch": 0.745772161308307, "grad_norm": 0.6835244297981262, "learning_rate": 0.0001393263208301192, "loss": 1.0247, "step": 4013 }, { "epoch": 0.7459580003716781, "grad_norm": 0.5994834899902344, "learning_rate": 0.000139299429726204, "loss": 0.9067, "step": 4014 }, { "epoch": 0.7461438394350492, "grad_norm": 0.6289177536964417, "learning_rate": 0.00013927253526092368, "loss": 1.2112, "step": 4015 }, { "epoch": 0.7463296784984204, "grad_norm": 0.655208170413971, "learning_rate": 0.0001392456374365785, "loss": 1.1013, "step": 4016 }, { "epoch": 0.7465155175617915, "grad_norm": 0.5383544564247131, "learning_rate": 0.00013921873625546914, "loss": 1.1126, "step": 4017 }, { "epoch": 0.7467013566251626, "grad_norm": 0.4992688000202179, "learning_rate": 0.00013919183171989653, "loss": 0.6822, "step": 4018 }, { "epoch": 0.7468871956885337, "grad_norm": 0.6615790128707886, "learning_rate": 0.00013916492383216184, "loss": 0.9034, "step": 4019 }, { "epoch": 0.7470730347519049, "grad_norm": 0.6461586356163025, "learning_rate": 0.00013913801259456653, "loss": 1.0132, "step": 4020 }, { "epoch": 0.747258873815276, "grad_norm": 0.5311331152915955, "learning_rate": 0.00013911109800941244, "loss": 0.907, "step": 4021 }, { "epoch": 0.7474447128786471, "grad_norm": 0.6406325101852417, "learning_rate": 0.00013908418007900164, "loss": 0.9742, "step": 4022 }, { "epoch": 0.7476305519420182, "grad_norm": 0.6342041492462158, "learning_rate": 0.0001390572588056364, "loss": 1.2472, "step": 4023 }, { "epoch": 0.7478163910053893, "grad_norm": 0.6052384972572327, "learning_rate": 0.00013903033419161942, "loss": 1.1535, "step": 4024 }, { "epoch": 0.7480022300687604, "grad_norm": 0.7497447729110718, "learning_rate": 0.0001390034062392536, "loss": 1.0163, "step": 4025 }, { "epoch": 0.7481880691321315, "grad_norm": 0.5682515501976013, "learning_rate": 0.00013897647495084213, "loss": 0.935, "step": 4026 }, { "epoch": 0.7483739081955026, "grad_norm": 0.6002129912376404, "learning_rate": 0.00013894954032868853, "loss": 0.9931, "step": 4027 }, { "epoch": 0.7485597472588739, "grad_norm": 0.5856931209564209, "learning_rate": 0.0001389226023750966, "loss": 0.9945, "step": 4028 }, { "epoch": 0.748745586322245, "grad_norm": 0.6775208115577698, "learning_rate": 0.00013889566109237032, "loss": 1.2281, "step": 4029 }, { "epoch": 0.7489314253856161, "grad_norm": 0.5849632024765015, "learning_rate": 0.0001388687164828141, "loss": 1.3146, "step": 4030 }, { "epoch": 0.7491172644489872, "grad_norm": 0.6817806363105774, "learning_rate": 0.00013884176854873253, "loss": 1.1723, "step": 4031 }, { "epoch": 0.7493031035123583, "grad_norm": 0.6314253211021423, "learning_rate": 0.00013881481729243055, "loss": 0.9815, "step": 4032 }, { "epoch": 0.7494889425757294, "grad_norm": 0.5545886158943176, "learning_rate": 0.0001387878627162134, "loss": 1.1997, "step": 4033 }, { "epoch": 0.7496747816391005, "grad_norm": 0.627219557762146, "learning_rate": 0.00013876090482238646, "loss": 0.8301, "step": 4034 }, { "epoch": 0.7498606207024716, "grad_norm": 0.5215731859207153, "learning_rate": 0.0001387339436132556, "loss": 0.9639, "step": 4035 }, { "epoch": 0.7500464597658428, "grad_norm": 0.5535773634910583, "learning_rate": 0.00013870697909112683, "loss": 0.9157, "step": 4036 }, { "epoch": 0.7502322988292139, "grad_norm": 0.6636160612106323, "learning_rate": 0.00013868001125830648, "loss": 0.9123, "step": 4037 }, { "epoch": 0.750418137892585, "grad_norm": 0.5647329688072205, "learning_rate": 0.00013865304011710113, "loss": 0.8311, "step": 4038 }, { "epoch": 0.750418137892585, "eval_loss": 1.0377047061920166, "eval_runtime": 23.2105, "eval_samples_per_second": 47.048, "eval_steps_per_second": 23.524, "step": 4038 }, { "epoch": 0.7506039769559562, "grad_norm": 0.6448737382888794, "learning_rate": 0.00013862606566981774, "loss": 1.054, "step": 4039 }, { "epoch": 0.7507898160193273, "grad_norm": 0.6743420958518982, "learning_rate": 0.00013859908791876352, "loss": 1.2603, "step": 4040 }, { "epoch": 0.7509756550826984, "grad_norm": 0.5818408131599426, "learning_rate": 0.0001385721068662458, "loss": 1.0092, "step": 4041 }, { "epoch": 0.7511614941460695, "grad_norm": 0.6513164043426514, "learning_rate": 0.00013854512251457247, "loss": 1.0878, "step": 4042 }, { "epoch": 0.7513473332094406, "grad_norm": 0.6026800274848938, "learning_rate": 0.0001385181348660515, "loss": 0.8527, "step": 4043 }, { "epoch": 0.7515331722728118, "grad_norm": 0.6424375176429749, "learning_rate": 0.0001384911439229912, "loss": 1.0664, "step": 4044 }, { "epoch": 0.7517190113361829, "grad_norm": 0.7439508438110352, "learning_rate": 0.00013846414968770015, "loss": 1.2037, "step": 4045 }, { "epoch": 0.751904850399554, "grad_norm": 0.6531807780265808, "learning_rate": 0.00013843715216248727, "loss": 1.1257, "step": 4046 }, { "epoch": 0.7520906894629251, "grad_norm": 0.6059077978134155, "learning_rate": 0.0001384101513496617, "loss": 0.9629, "step": 4047 }, { "epoch": 0.7522765285262962, "grad_norm": 0.5011289119720459, "learning_rate": 0.00013838314725153286, "loss": 0.6582, "step": 4048 }, { "epoch": 0.7524623675896673, "grad_norm": 0.894359827041626, "learning_rate": 0.00013835613987041046, "loss": 1.2763, "step": 4049 }, { "epoch": 0.7526482066530384, "grad_norm": 0.5608694553375244, "learning_rate": 0.00013832912920860456, "loss": 1.0542, "step": 4050 }, { "epoch": 0.7528340457164096, "grad_norm": 0.6905021667480469, "learning_rate": 0.00013830211526842537, "loss": 1.0222, "step": 4051 }, { "epoch": 0.7530198847797807, "grad_norm": 0.5437237620353699, "learning_rate": 0.00013827509805218347, "loss": 1.0084, "step": 4052 }, { "epoch": 0.7532057238431519, "grad_norm": 0.665327787399292, "learning_rate": 0.0001382480775621897, "loss": 1.1029, "step": 4053 }, { "epoch": 0.753391562906523, "grad_norm": 0.6304662823677063, "learning_rate": 0.0001382210538007552, "loss": 1.3091, "step": 4054 }, { "epoch": 0.7535774019698941, "grad_norm": 0.736091136932373, "learning_rate": 0.00013819402677019138, "loss": 1.0107, "step": 4055 }, { "epoch": 0.7537632410332652, "grad_norm": 0.5813354849815369, "learning_rate": 0.0001381669964728099, "loss": 1.12, "step": 4056 }, { "epoch": 0.7539490800966363, "grad_norm": 0.629216194152832, "learning_rate": 0.00013813996291092275, "loss": 0.9777, "step": 4057 }, { "epoch": 0.7541349191600074, "grad_norm": 0.7185510993003845, "learning_rate": 0.0001381129260868421, "loss": 1.0516, "step": 4058 }, { "epoch": 0.7543207582233785, "grad_norm": 0.5125520825386047, "learning_rate": 0.00013808588600288052, "loss": 1.0353, "step": 4059 }, { "epoch": 0.7545065972867496, "grad_norm": 0.5914701819419861, "learning_rate": 0.00013805884266135078, "loss": 1.0866, "step": 4060 }, { "epoch": 0.7546924363501208, "grad_norm": 0.5655017495155334, "learning_rate": 0.00013803179606456599, "loss": 1.0189, "step": 4061 }, { "epoch": 0.754878275413492, "grad_norm": 0.5555375814437866, "learning_rate": 0.0001380047462148395, "loss": 1.0671, "step": 4062 }, { "epoch": 0.7550641144768631, "grad_norm": 0.5526087284088135, "learning_rate": 0.0001379776931144849, "loss": 0.9678, "step": 4063 }, { "epoch": 0.7552499535402342, "grad_norm": 0.6122016906738281, "learning_rate": 0.00013795063676581612, "loss": 1.1348, "step": 4064 }, { "epoch": 0.7554357926036053, "grad_norm": 0.5869781970977783, "learning_rate": 0.00013792357717114738, "loss": 1.0977, "step": 4065 }, { "epoch": 0.7556216316669764, "grad_norm": 0.619445264339447, "learning_rate": 0.0001378965143327931, "loss": 1.1654, "step": 4066 }, { "epoch": 0.7558074707303475, "grad_norm": 0.7364014387130737, "learning_rate": 0.00013786944825306806, "loss": 1.3876, "step": 4067 }, { "epoch": 0.7559933097937186, "grad_norm": 0.5614713430404663, "learning_rate": 0.00013784237893428724, "loss": 1.0382, "step": 4068 }, { "epoch": 0.7561791488570898, "grad_norm": 0.7402245402336121, "learning_rate": 0.00013781530637876598, "loss": 1.1889, "step": 4069 }, { "epoch": 0.7563649879204609, "grad_norm": 0.5417977571487427, "learning_rate": 0.00013778823058881984, "loss": 0.9613, "step": 4070 }, { "epoch": 0.756550826983832, "grad_norm": 0.6280809640884399, "learning_rate": 0.00013776115156676467, "loss": 0.9137, "step": 4071 }, { "epoch": 0.7567366660472031, "grad_norm": 0.5001559257507324, "learning_rate": 0.00013773406931491658, "loss": 0.9083, "step": 4072 }, { "epoch": 0.7569225051105742, "grad_norm": 0.7508971691131592, "learning_rate": 0.00013770698383559201, "loss": 1.0583, "step": 4073 }, { "epoch": 0.7571083441739453, "grad_norm": 0.642376720905304, "learning_rate": 0.0001376798951311076, "loss": 1.1538, "step": 4074 }, { "epoch": 0.7572941832373165, "grad_norm": 0.5976815223693848, "learning_rate": 0.00013765280320378033, "loss": 0.8483, "step": 4075 }, { "epoch": 0.7574800223006876, "grad_norm": 0.6116734743118286, "learning_rate": 0.00013762570805592742, "loss": 1.204, "step": 4076 }, { "epoch": 0.7576658613640588, "grad_norm": 0.7391072511672974, "learning_rate": 0.00013759860968986636, "loss": 1.0961, "step": 4077 }, { "epoch": 0.7578517004274299, "grad_norm": 0.6154507398605347, "learning_rate": 0.00013757150810791497, "loss": 1.2466, "step": 4078 }, { "epoch": 0.758037539490801, "grad_norm": 0.5805959105491638, "learning_rate": 0.00013754440331239132, "loss": 1.2261, "step": 4079 }, { "epoch": 0.7582233785541721, "grad_norm": 0.6683850288391113, "learning_rate": 0.00013751729530561368, "loss": 1.1683, "step": 4080 }, { "epoch": 0.7584092176175432, "grad_norm": 0.5736557245254517, "learning_rate": 0.0001374901840899007, "loss": 0.6839, "step": 4081 }, { "epoch": 0.7585950566809143, "grad_norm": 0.599035918712616, "learning_rate": 0.0001374630696675712, "loss": 0.8846, "step": 4082 }, { "epoch": 0.7587808957442854, "grad_norm": 0.6209955215454102, "learning_rate": 0.0001374359520409444, "loss": 1.1019, "step": 4083 }, { "epoch": 0.7589667348076565, "grad_norm": 0.5841337442398071, "learning_rate": 0.00013740883121233973, "loss": 1.1662, "step": 4084 }, { "epoch": 0.7591525738710276, "grad_norm": 0.6548193693161011, "learning_rate": 0.00013738170718407687, "loss": 1.0279, "step": 4085 }, { "epoch": 0.7593384129343989, "grad_norm": 0.5952484011650085, "learning_rate": 0.00013735457995847582, "loss": 1.1303, "step": 4086 }, { "epoch": 0.75952425199777, "grad_norm": 0.6196330189704895, "learning_rate": 0.0001373274495378568, "loss": 1.3022, "step": 4087 }, { "epoch": 0.7597100910611411, "grad_norm": 0.5573399066925049, "learning_rate": 0.00013730031592454036, "loss": 0.8631, "step": 4088 }, { "epoch": 0.7598959301245122, "grad_norm": 0.48187312483787537, "learning_rate": 0.00013727317912084726, "loss": 0.7207, "step": 4089 }, { "epoch": 0.7600817691878833, "grad_norm": 0.5117285847663879, "learning_rate": 0.00013724603912909858, "loss": 0.9147, "step": 4090 }, { "epoch": 0.7602676082512544, "grad_norm": 0.6108545660972595, "learning_rate": 0.0001372188959516157, "loss": 1.0902, "step": 4091 }, { "epoch": 0.7604534473146255, "grad_norm": 0.5700968503952026, "learning_rate": 0.00013719174959072022, "loss": 1.1427, "step": 4092 }, { "epoch": 0.7606392863779966, "grad_norm": 0.6507374048233032, "learning_rate": 0.00013716460004873398, "loss": 0.9902, "step": 4093 }, { "epoch": 0.7608251254413678, "grad_norm": 0.5942047834396362, "learning_rate": 0.00013713744732797922, "loss": 0.7129, "step": 4094 }, { "epoch": 0.7610109645047389, "grad_norm": 0.7793191075325012, "learning_rate": 0.0001371102914307783, "loss": 0.9558, "step": 4095 }, { "epoch": 0.76119680356811, "grad_norm": 0.5532881021499634, "learning_rate": 0.0001370831323594539, "loss": 0.9851, "step": 4096 }, { "epoch": 0.7613826426314811, "grad_norm": 0.6629320979118347, "learning_rate": 0.0001370559701163291, "loss": 0.9608, "step": 4097 }, { "epoch": 0.7615684816948523, "grad_norm": 0.6066349744796753, "learning_rate": 0.00013702880470372708, "loss": 1.1944, "step": 4098 }, { "epoch": 0.7617543207582234, "grad_norm": 0.5362082719802856, "learning_rate": 0.00013700163612397135, "loss": 0.9813, "step": 4099 }, { "epoch": 0.7619401598215945, "grad_norm": 0.6526452302932739, "learning_rate": 0.00013697446437938571, "loss": 1.29, "step": 4100 }, { "epoch": 0.7621259988849656, "grad_norm": 0.553182065486908, "learning_rate": 0.00013694728947229425, "loss": 1.1332, "step": 4101 }, { "epoch": 0.7623118379483368, "grad_norm": 0.5968033671379089, "learning_rate": 0.00013692011140502126, "loss": 1.0957, "step": 4102 }, { "epoch": 0.7624976770117079, "grad_norm": 0.7517340779304504, "learning_rate": 0.0001368929301798913, "loss": 1.2294, "step": 4103 }, { "epoch": 0.762683516075079, "grad_norm": 0.5200783014297485, "learning_rate": 0.00013686574579922935, "loss": 1.1681, "step": 4104 }, { "epoch": 0.7628693551384501, "grad_norm": 0.6289031505584717, "learning_rate": 0.00013683855826536042, "loss": 1.1485, "step": 4105 }, { "epoch": 0.7630551942018212, "grad_norm": 0.6423609256744385, "learning_rate": 0.00013681136758061005, "loss": 1.1549, "step": 4106 }, { "epoch": 0.7632410332651923, "grad_norm": 0.7103132605552673, "learning_rate": 0.0001367841737473038, "loss": 0.9711, "step": 4107 }, { "epoch": 0.7634268723285634, "grad_norm": 0.553540050983429, "learning_rate": 0.00013675697676776772, "loss": 1.1464, "step": 4108 }, { "epoch": 0.7636127113919345, "grad_norm": 0.5573582053184509, "learning_rate": 0.00013672977664432794, "loss": 1.0289, "step": 4109 }, { "epoch": 0.7637985504553058, "grad_norm": 0.6105955243110657, "learning_rate": 0.00013670257337931093, "loss": 1.1219, "step": 4110 }, { "epoch": 0.7639843895186769, "grad_norm": 0.5916879177093506, "learning_rate": 0.00013667536697504356, "loss": 1.1944, "step": 4111 }, { "epoch": 0.764170228582048, "grad_norm": 0.5850638151168823, "learning_rate": 0.00013664815743385278, "loss": 1.1136, "step": 4112 }, { "epoch": 0.7643560676454191, "grad_norm": 0.6730114817619324, "learning_rate": 0.0001366209447580659, "loss": 1.3724, "step": 4113 }, { "epoch": 0.7645419067087902, "grad_norm": 0.5630990266799927, "learning_rate": 0.00013659372895001042, "loss": 0.9657, "step": 4114 }, { "epoch": 0.7647277457721613, "grad_norm": 3.9506986141204834, "learning_rate": 0.00013656651001201425, "loss": 2.3019, "step": 4115 }, { "epoch": 0.7649135848355324, "grad_norm": 0.5540075898170471, "learning_rate": 0.00013653928794640544, "loss": 0.9883, "step": 4116 }, { "epoch": 0.7650994238989035, "grad_norm": 0.6030392050743103, "learning_rate": 0.00013651206275551233, "loss": 1.0255, "step": 4117 }, { "epoch": 0.7652852629622746, "grad_norm": 0.6220700144767761, "learning_rate": 0.0001364848344416636, "loss": 1.1406, "step": 4118 }, { "epoch": 0.7654711020256458, "grad_norm": 0.5235512256622314, "learning_rate": 0.00013645760300718814, "loss": 1.1161, "step": 4119 }, { "epoch": 0.7656569410890169, "grad_norm": 0.5732916593551636, "learning_rate": 0.0001364303684544151, "loss": 1.074, "step": 4120 }, { "epoch": 0.765842780152388, "grad_norm": 0.5863698124885559, "learning_rate": 0.0001364031307856739, "loss": 1.1758, "step": 4121 }, { "epoch": 0.7660286192157592, "grad_norm": 0.500783383846283, "learning_rate": 0.00013637589000329428, "loss": 1.0494, "step": 4122 }, { "epoch": 0.7662144582791303, "grad_norm": 0.5018875002861023, "learning_rate": 0.00013634864610960614, "loss": 1.0457, "step": 4123 }, { "epoch": 0.7664002973425014, "grad_norm": 0.6175665855407715, "learning_rate": 0.0001363213991069397, "loss": 1.0481, "step": 4124 }, { "epoch": 0.7665861364058725, "grad_norm": 0.5728722214698792, "learning_rate": 0.00013629414899762554, "loss": 1.0837, "step": 4125 }, { "epoch": 0.7667719754692436, "grad_norm": 0.5949525237083435, "learning_rate": 0.00013626689578399438, "loss": 0.9858, "step": 4126 }, { "epoch": 0.7669578145326148, "grad_norm": 0.6486298441886902, "learning_rate": 0.00013623963946837722, "loss": 0.849, "step": 4127 }, { "epoch": 0.7671436535959859, "grad_norm": 0.6250497102737427, "learning_rate": 0.00013621238005310536, "loss": 1.2374, "step": 4128 }, { "epoch": 0.767329492659357, "grad_norm": 0.6734606027603149, "learning_rate": 0.0001361851175405104, "loss": 1.2558, "step": 4129 }, { "epoch": 0.7675153317227281, "grad_norm": 0.811181902885437, "learning_rate": 0.0001361578519329241, "loss": 1.2633, "step": 4130 }, { "epoch": 0.7677011707860992, "grad_norm": 0.6883793473243713, "learning_rate": 0.0001361305832326786, "loss": 1.2465, "step": 4131 }, { "epoch": 0.7678870098494703, "grad_norm": 0.5060800313949585, "learning_rate": 0.00013610331144210623, "loss": 0.9639, "step": 4132 }, { "epoch": 0.7680728489128414, "grad_norm": 0.49882808327674866, "learning_rate": 0.0001360760365635396, "loss": 0.9654, "step": 4133 }, { "epoch": 0.7682586879762126, "grad_norm": 0.6372555494308472, "learning_rate": 0.00013604875859931156, "loss": 1.1524, "step": 4134 }, { "epoch": 0.7684445270395838, "grad_norm": 0.48308369517326355, "learning_rate": 0.00013602147755175533, "loss": 0.5496, "step": 4135 }, { "epoch": 0.7686303661029549, "grad_norm": 0.6130794286727905, "learning_rate": 0.00013599419342320425, "loss": 0.7401, "step": 4136 }, { "epoch": 0.768816205166326, "grad_norm": 0.6539893746376038, "learning_rate": 0.000135966906215992, "loss": 1.0096, "step": 4137 }, { "epoch": 0.7690020442296971, "grad_norm": 0.537489652633667, "learning_rate": 0.00013593961593245254, "loss": 1.0363, "step": 4138 }, { "epoch": 0.7691878832930682, "grad_norm": 0.6301590204238892, "learning_rate": 0.00013591232257492009, "loss": 0.9674, "step": 4139 }, { "epoch": 0.7693737223564393, "grad_norm": 0.5909221768379211, "learning_rate": 0.00013588502614572903, "loss": 1.0572, "step": 4140 }, { "epoch": 0.7695595614198104, "grad_norm": 0.7259015440940857, "learning_rate": 0.00013585772664721413, "loss": 1.2155, "step": 4141 }, { "epoch": 0.7697454004831815, "grad_norm": 0.59591144323349, "learning_rate": 0.00013583042408171037, "loss": 0.8733, "step": 4142 }, { "epoch": 0.7699312395465527, "grad_norm": 1.9292752742767334, "learning_rate": 0.00013580311845155302, "loss": 2.0443, "step": 4143 }, { "epoch": 0.7701170786099238, "grad_norm": 0.5641506910324097, "learning_rate": 0.00013577580975907756, "loss": 1.0628, "step": 4144 }, { "epoch": 0.770302917673295, "grad_norm": 0.6879433989524841, "learning_rate": 0.00013574849800661976, "loss": 1.5132, "step": 4145 }, { "epoch": 0.7704887567366661, "grad_norm": 0.5869988799095154, "learning_rate": 0.00013572118319651565, "loss": 1.0678, "step": 4146 }, { "epoch": 0.7706745958000372, "grad_norm": 0.5482993721961975, "learning_rate": 0.00013569386533110155, "loss": 0.9921, "step": 4147 }, { "epoch": 0.7708604348634083, "grad_norm": 0.6312058568000793, "learning_rate": 0.000135666544412714, "loss": 1.0242, "step": 4148 }, { "epoch": 0.7710462739267794, "grad_norm": 0.6670210361480713, "learning_rate": 0.00013563922044368984, "loss": 1.0649, "step": 4149 }, { "epoch": 0.7712321129901505, "grad_norm": 0.6085960865020752, "learning_rate": 0.00013561189342636615, "loss": 0.9379, "step": 4150 }, { "epoch": 0.7714179520535217, "grad_norm": 0.5336554050445557, "learning_rate": 0.0001355845633630802, "loss": 0.869, "step": 4151 }, { "epoch": 0.7716037911168928, "grad_norm": 0.5766181945800781, "learning_rate": 0.00013555723025616964, "loss": 0.9011, "step": 4152 }, { "epoch": 0.7717896301802639, "grad_norm": 0.589514434337616, "learning_rate": 0.00013552989410797235, "loss": 1.0555, "step": 4153 }, { "epoch": 0.771975469243635, "grad_norm": 0.6114048361778259, "learning_rate": 0.00013550255492082643, "loss": 1.1759, "step": 4154 }, { "epoch": 0.7721613083070061, "grad_norm": 0.5874496698379517, "learning_rate": 0.00013547521269707024, "loss": 1.1696, "step": 4155 }, { "epoch": 0.7723471473703772, "grad_norm": 0.6765058040618896, "learning_rate": 0.0001354478674390424, "loss": 1.0971, "step": 4156 }, { "epoch": 0.7725329864337483, "grad_norm": 0.5729826092720032, "learning_rate": 0.0001354205191490819, "loss": 1.1221, "step": 4157 }, { "epoch": 0.7727188254971195, "grad_norm": 0.6108171343803406, "learning_rate": 0.00013539316782952785, "loss": 0.9023, "step": 4158 }, { "epoch": 0.7729046645604906, "grad_norm": 0.4902413487434387, "learning_rate": 0.00013536581348271958, "loss": 0.7808, "step": 4159 }, { "epoch": 0.7730905036238618, "grad_norm": 0.5325641632080078, "learning_rate": 0.00013533845611099694, "loss": 0.8381, "step": 4160 }, { "epoch": 0.7732763426872329, "grad_norm": 0.8668013215065002, "learning_rate": 0.00013531109571669972, "loss": 1.2558, "step": 4161 }, { "epoch": 0.773462181750604, "grad_norm": 0.6640474796295166, "learning_rate": 0.0001352837323021682, "loss": 1.246, "step": 4162 }, { "epoch": 0.7736480208139751, "grad_norm": 0.6278790235519409, "learning_rate": 0.00013525636586974278, "loss": 0.9943, "step": 4163 }, { "epoch": 0.7738338598773462, "grad_norm": 0.5972362756729126, "learning_rate": 0.00013522899642176418, "loss": 1.1249, "step": 4164 }, { "epoch": 0.7740196989407173, "grad_norm": 0.4734021723270416, "learning_rate": 0.00013520162396057342, "loss": 0.8517, "step": 4165 }, { "epoch": 0.7742055380040884, "grad_norm": 0.6646032333374023, "learning_rate": 0.00013517424848851162, "loss": 1.387, "step": 4166 }, { "epoch": 0.7743913770674595, "grad_norm": 0.6340314745903015, "learning_rate": 0.00013514687000792036, "loss": 1.1976, "step": 4167 }, { "epoch": 0.7745772161308307, "grad_norm": 0.4301804304122925, "learning_rate": 0.00013511948852114132, "loss": 0.5702, "step": 4168 }, { "epoch": 0.7747630551942019, "grad_norm": 0.5795063972473145, "learning_rate": 0.00013509210403051656, "loss": 1.0796, "step": 4169 }, { "epoch": 0.774948894257573, "grad_norm": 0.6031147241592407, "learning_rate": 0.00013506471653838825, "loss": 1.2061, "step": 4170 }, { "epoch": 0.7751347333209441, "grad_norm": 0.6553006172180176, "learning_rate": 0.000135037326047099, "loss": 0.9015, "step": 4171 }, { "epoch": 0.7753205723843152, "grad_norm": 0.7210426330566406, "learning_rate": 0.00013500993255899152, "loss": 1.1031, "step": 4172 }, { "epoch": 0.7755064114476863, "grad_norm": 0.5744313597679138, "learning_rate": 0.0001349825360764088, "loss": 0.9968, "step": 4173 }, { "epoch": 0.7756922505110574, "grad_norm": 0.7675306797027588, "learning_rate": 0.0001349551366016942, "loss": 1.0349, "step": 4174 }, { "epoch": 0.7758780895744285, "grad_norm": 0.6847807765007019, "learning_rate": 0.00013492773413719121, "loss": 0.9701, "step": 4175 }, { "epoch": 0.7760639286377997, "grad_norm": 0.6042717695236206, "learning_rate": 0.00013490032868524365, "loss": 1.1616, "step": 4176 }, { "epoch": 0.7762497677011708, "grad_norm": 0.6150924563407898, "learning_rate": 0.00013487292024819554, "loss": 0.8984, "step": 4177 }, { "epoch": 0.7764356067645419, "grad_norm": 0.5927572250366211, "learning_rate": 0.00013484550882839115, "loss": 1.2649, "step": 4178 }, { "epoch": 0.776621445827913, "grad_norm": 0.5676189064979553, "learning_rate": 0.00013481809442817511, "loss": 0.979, "step": 4179 }, { "epoch": 0.7768072848912841, "grad_norm": 0.48436301946640015, "learning_rate": 0.0001347906770498922, "loss": 0.8431, "step": 4180 }, { "epoch": 0.7769931239546553, "grad_norm": 0.6204550862312317, "learning_rate": 0.00013476325669588748, "loss": 1.1561, "step": 4181 }, { "epoch": 0.7771789630180264, "grad_norm": 0.512209415435791, "learning_rate": 0.00013473583336850631, "loss": 1.0287, "step": 4182 }, { "epoch": 0.7773648020813975, "grad_norm": 0.6418192386627197, "learning_rate": 0.00013470840707009422, "loss": 1.1377, "step": 4183 }, { "epoch": 0.7775506411447687, "grad_norm": 0.6207935214042664, "learning_rate": 0.00013468097780299708, "loss": 0.9253, "step": 4184 }, { "epoch": 0.7777364802081398, "grad_norm": 0.6537068486213684, "learning_rate": 0.00013465354556956092, "loss": 1.3023, "step": 4185 }, { "epoch": 0.7779223192715109, "grad_norm": 0.5210212469100952, "learning_rate": 0.0001346261103721321, "loss": 0.9315, "step": 4186 }, { "epoch": 0.778108158334882, "grad_norm": 0.4942516088485718, "learning_rate": 0.00013459867221305723, "loss": 0.8469, "step": 4187 }, { "epoch": 0.7782939973982531, "grad_norm": 0.6087897419929504, "learning_rate": 0.00013457123109468318, "loss": 0.954, "step": 4188 }, { "epoch": 0.7784798364616242, "grad_norm": 0.5794795155525208, "learning_rate": 0.000134543787019357, "loss": 1.1739, "step": 4189 }, { "epoch": 0.7786656755249953, "grad_norm": 0.6037955284118652, "learning_rate": 0.00013451633998942607, "loss": 1.064, "step": 4190 }, { "epoch": 0.7788515145883664, "grad_norm": 0.8131178021430969, "learning_rate": 0.00013448889000723796, "loss": 1.0455, "step": 4191 }, { "epoch": 0.7790373536517375, "grad_norm": 0.6027346253395081, "learning_rate": 0.00013446143707514056, "loss": 1.0113, "step": 4192 }, { "epoch": 0.7792231927151088, "grad_norm": 0.6223542094230652, "learning_rate": 0.00013443398119548195, "loss": 0.9856, "step": 4193 }, { "epoch": 0.7794090317784799, "grad_norm": 0.5715983510017395, "learning_rate": 0.0001344065223706105, "loss": 1.0353, "step": 4194 }, { "epoch": 0.779594870841851, "grad_norm": 0.5402487516403198, "learning_rate": 0.00013437906060287486, "loss": 1.0995, "step": 4195 }, { "epoch": 0.7797807099052221, "grad_norm": 0.6275144815444946, "learning_rate": 0.00013435159589462385, "loss": 1.2073, "step": 4196 }, { "epoch": 0.7799665489685932, "grad_norm": 0.6028453707695007, "learning_rate": 0.0001343241282482066, "loss": 1.2034, "step": 4197 }, { "epoch": 0.7801523880319643, "grad_norm": 0.6037322878837585, "learning_rate": 0.0001342966576659725, "loss": 0.9155, "step": 4198 }, { "epoch": 0.7803382270953354, "grad_norm": 0.5991843938827515, "learning_rate": 0.0001342691841502711, "loss": 0.9529, "step": 4199 }, { "epoch": 0.7805240661587065, "grad_norm": 0.6946239471435547, "learning_rate": 0.00013424170770345235, "loss": 1.349, "step": 4200 }, { "epoch": 0.7807099052220777, "grad_norm": 0.5432325601577759, "learning_rate": 0.0001342142283278663, "loss": 0.9882, "step": 4201 }, { "epoch": 0.7808957442854488, "grad_norm": 0.9075375199317932, "learning_rate": 0.00013418674602586338, "loss": 1.2515, "step": 4202 }, { "epoch": 0.7810815833488199, "grad_norm": 0.599430501461029, "learning_rate": 0.00013415926079979418, "loss": 0.9525, "step": 4203 }, { "epoch": 0.781267422412191, "grad_norm": 0.6083348989486694, "learning_rate": 0.00013413177265200957, "loss": 1.0576, "step": 4204 }, { "epoch": 0.7814532614755622, "grad_norm": 0.5876964926719666, "learning_rate": 0.0001341042815848607, "loss": 1.0482, "step": 4205 }, { "epoch": 0.7816391005389333, "grad_norm": 0.8127071857452393, "learning_rate": 0.00013407678760069891, "loss": 0.9777, "step": 4206 }, { "epoch": 0.7818249396023044, "grad_norm": 0.6084738969802856, "learning_rate": 0.00013404929070187578, "loss": 0.9079, "step": 4207 }, { "epoch": 0.7820107786656755, "grad_norm": 0.5878585577011108, "learning_rate": 0.00013402179089074326, "loss": 1.2447, "step": 4208 }, { "epoch": 0.7821966177290467, "grad_norm": 0.6077066659927368, "learning_rate": 0.00013399428816965344, "loss": 1.2663, "step": 4209 }, { "epoch": 0.7823824567924178, "grad_norm": 0.7702139616012573, "learning_rate": 0.0001339667825409587, "loss": 1.3534, "step": 4210 }, { "epoch": 0.7825682958557889, "grad_norm": 0.6338496208190918, "learning_rate": 0.0001339392740070116, "loss": 1.0933, "step": 4211 }, { "epoch": 0.78275413491916, "grad_norm": 0.6644789576530457, "learning_rate": 0.0001339117625701651, "loss": 1.2102, "step": 4212 }, { "epoch": 0.7829399739825311, "grad_norm": 0.6646575927734375, "learning_rate": 0.0001338842482327722, "loss": 1.1216, "step": 4213 }, { "epoch": 0.7831258130459022, "grad_norm": 0.6456634998321533, "learning_rate": 0.0001338567309971863, "loss": 0.988, "step": 4214 }, { "epoch": 0.7833116521092733, "grad_norm": 0.625597357749939, "learning_rate": 0.00013382921086576105, "loss": 1.2121, "step": 4215 }, { "epoch": 0.7834974911726444, "grad_norm": 0.5920417308807373, "learning_rate": 0.00013380168784085027, "loss": 1.1116, "step": 4216 }, { "epoch": 0.7836833302360157, "grad_norm": 0.7123020887374878, "learning_rate": 0.00013377416192480809, "loss": 1.0922, "step": 4217 }, { "epoch": 0.7838691692993868, "grad_norm": 0.5441176295280457, "learning_rate": 0.00013374663311998885, "loss": 1.0325, "step": 4218 }, { "epoch": 0.7840550083627579, "grad_norm": 0.5781022906303406, "learning_rate": 0.00013371910142874715, "loss": 1.0484, "step": 4219 }, { "epoch": 0.784240847426129, "grad_norm": 0.6541292667388916, "learning_rate": 0.0001336915668534378, "loss": 1.1349, "step": 4220 }, { "epoch": 0.7844266864895001, "grad_norm": 0.6895448565483093, "learning_rate": 0.00013366402939641594, "loss": 1.1096, "step": 4221 }, { "epoch": 0.7846125255528712, "grad_norm": 0.5254290699958801, "learning_rate": 0.00013363648906003687, "loss": 0.9549, "step": 4222 }, { "epoch": 0.7847983646162423, "grad_norm": 0.5432946681976318, "learning_rate": 0.0001336089458466562, "loss": 1.0367, "step": 4223 }, { "epoch": 0.7849842036796134, "grad_norm": 0.5595942735671997, "learning_rate": 0.00013358139975862976, "loss": 1.0784, "step": 4224 }, { "epoch": 0.7851700427429846, "grad_norm": 0.5201842784881592, "learning_rate": 0.00013355385079831362, "loss": 0.9406, "step": 4225 }, { "epoch": 0.7853558818063557, "grad_norm": 0.5573832988739014, "learning_rate": 0.00013352629896806409, "loss": 1.1733, "step": 4226 }, { "epoch": 0.7855417208697268, "grad_norm": 0.6541811227798462, "learning_rate": 0.00013349874427023777, "loss": 1.1657, "step": 4227 }, { "epoch": 0.785727559933098, "grad_norm": 0.5593380928039551, "learning_rate": 0.00013347118670719142, "loss": 0.8143, "step": 4228 }, { "epoch": 0.7859133989964691, "grad_norm": 0.6524107456207275, "learning_rate": 0.00013344362628128214, "loss": 0.9966, "step": 4229 }, { "epoch": 0.7860992380598402, "grad_norm": 0.5087578296661377, "learning_rate": 0.00013341606299486721, "loss": 0.9258, "step": 4230 }, { "epoch": 0.7862850771232113, "grad_norm": 0.5728389620780945, "learning_rate": 0.00013338849685030424, "loss": 1.0884, "step": 4231 }, { "epoch": 0.7864709161865824, "grad_norm": 0.7049938440322876, "learning_rate": 0.00013336092784995093, "loss": 1.2024, "step": 4232 }, { "epoch": 0.7866567552499535, "grad_norm": 0.5433733463287354, "learning_rate": 0.0001333333559961654, "loss": 0.9988, "step": 4233 }, { "epoch": 0.7868425943133247, "grad_norm": 0.5832632184028625, "learning_rate": 0.00013330578129130583, "loss": 1.0733, "step": 4234 }, { "epoch": 0.7870284333766958, "grad_norm": 0.48426976799964905, "learning_rate": 0.0001332782037377309, "loss": 0.778, "step": 4235 }, { "epoch": 0.7872142724400669, "grad_norm": 0.6360462307929993, "learning_rate": 0.00013325062333779917, "loss": 1.2133, "step": 4236 }, { "epoch": 0.787400111503438, "grad_norm": 0.6086122393608093, "learning_rate": 0.0001332230400938698, "loss": 1.0383, "step": 4237 }, { "epoch": 0.7875859505668091, "grad_norm": 0.5904986262321472, "learning_rate": 0.000133195454008302, "loss": 1.2318, "step": 4238 }, { "epoch": 0.7877717896301802, "grad_norm": 0.9779626131057739, "learning_rate": 0.00013316786508345532, "loss": 1.4215, "step": 4239 }, { "epoch": 0.7879576286935513, "grad_norm": 0.5626901388168335, "learning_rate": 0.0001331402733216894, "loss": 1.0571, "step": 4240 }, { "epoch": 0.7881434677569225, "grad_norm": 0.5035316348075867, "learning_rate": 0.0001331126787253643, "loss": 0.9109, "step": 4241 }, { "epoch": 0.7883293068202937, "grad_norm": 0.6667996644973755, "learning_rate": 0.00013308508129684024, "loss": 1.2582, "step": 4242 }, { "epoch": 0.7885151458836648, "grad_norm": 0.5744646191596985, "learning_rate": 0.00013305748103847764, "loss": 1.1911, "step": 4243 }, { "epoch": 0.7887009849470359, "grad_norm": 0.6686093211174011, "learning_rate": 0.00013302987795263724, "loss": 1.2849, "step": 4244 }, { "epoch": 0.788886824010407, "grad_norm": 0.5513818860054016, "learning_rate": 0.00013300227204168002, "loss": 0.836, "step": 4245 }, { "epoch": 0.7890726630737781, "grad_norm": 0.4881809949874878, "learning_rate": 0.0001329746633079671, "loss": 0.7554, "step": 4246 }, { "epoch": 0.7892585021371492, "grad_norm": 0.5863307118415833, "learning_rate": 0.00013294705175386003, "loss": 1.2168, "step": 4247 }, { "epoch": 0.7894443412005203, "grad_norm": 0.5727426409721375, "learning_rate": 0.00013291943738172037, "loss": 1.2246, "step": 4248 }, { "epoch": 0.7896301802638914, "grad_norm": 0.562233567237854, "learning_rate": 0.00013289182019391008, "loss": 0.9127, "step": 4249 }, { "epoch": 0.7898160193272626, "grad_norm": 0.7686207890510559, "learning_rate": 0.0001328642001927913, "loss": 1.1063, "step": 4250 }, { "epoch": 0.7900018583906337, "grad_norm": 0.5708333849906921, "learning_rate": 0.00013283657738072648, "loss": 1.1547, "step": 4251 }, { "epoch": 0.7901876974540049, "grad_norm": 0.6480849981307983, "learning_rate": 0.0001328089517600782, "loss": 1.0502, "step": 4252 }, { "epoch": 0.790373536517376, "grad_norm": 0.5901347398757935, "learning_rate": 0.0001327813233332094, "loss": 1.0596, "step": 4253 }, { "epoch": 0.7905593755807471, "grad_norm": 0.597624659538269, "learning_rate": 0.00013275369210248317, "loss": 0.9652, "step": 4254 }, { "epoch": 0.7907452146441182, "grad_norm": 0.5878126621246338, "learning_rate": 0.00013272605807026285, "loss": 1.0158, "step": 4255 }, { "epoch": 0.7909310537074893, "grad_norm": 0.5607028007507324, "learning_rate": 0.00013269842123891205, "loss": 1.1119, "step": 4256 }, { "epoch": 0.7911168927708604, "grad_norm": 0.6098755598068237, "learning_rate": 0.0001326707816107946, "loss": 1.2164, "step": 4257 }, { "epoch": 0.7913027318342316, "grad_norm": 0.640153706073761, "learning_rate": 0.00013264313918827462, "loss": 0.9233, "step": 4258 }, { "epoch": 0.7914885708976027, "grad_norm": 0.6072023510932922, "learning_rate": 0.00013261549397371636, "loss": 0.9476, "step": 4259 }, { "epoch": 0.7916744099609738, "grad_norm": 0.6095691919326782, "learning_rate": 0.00013258784596948444, "loss": 1.1407, "step": 4260 }, { "epoch": 0.7918602490243449, "grad_norm": 0.8312792181968689, "learning_rate": 0.0001325601951779436, "loss": 0.9949, "step": 4261 }, { "epoch": 0.792046088087716, "grad_norm": 0.7518773674964905, "learning_rate": 0.00013253254160145892, "loss": 1.1936, "step": 4262 }, { "epoch": 0.7922319271510871, "grad_norm": 0.5493016242980957, "learning_rate": 0.00013250488524239559, "loss": 1.0946, "step": 4263 }, { "epoch": 0.7924177662144583, "grad_norm": 0.5355292558670044, "learning_rate": 0.0001324772261031192, "loss": 0.9688, "step": 4264 }, { "epoch": 0.7926036052778294, "grad_norm": 0.7269943356513977, "learning_rate": 0.0001324495641859955, "loss": 0.8984, "step": 4265 }, { "epoch": 0.7927894443412005, "grad_norm": 0.5987350940704346, "learning_rate": 0.00013242189949339041, "loss": 0.967, "step": 4266 }, { "epoch": 0.7929752834045717, "grad_norm": 0.7158120274543762, "learning_rate": 0.0001323942320276702, "loss": 1.1093, "step": 4267 }, { "epoch": 0.7931611224679428, "grad_norm": 0.6944933533668518, "learning_rate": 0.0001323665617912013, "loss": 1.2342, "step": 4268 }, { "epoch": 0.7933469615313139, "grad_norm": 0.5208589434623718, "learning_rate": 0.00013233888878635044, "loss": 1.0631, "step": 4269 }, { "epoch": 0.793532800594685, "grad_norm": 0.588266134262085, "learning_rate": 0.00013231121301548448, "loss": 1.0709, "step": 4270 }, { "epoch": 0.7937186396580561, "grad_norm": 0.7578076124191284, "learning_rate": 0.00013228353448097068, "loss": 1.0585, "step": 4271 }, { "epoch": 0.7939044787214272, "grad_norm": 0.573829710483551, "learning_rate": 0.00013225585318517638, "loss": 1.0457, "step": 4272 }, { "epoch": 0.7940903177847983, "grad_norm": 0.6806961297988892, "learning_rate": 0.00013222816913046928, "loss": 1.0456, "step": 4273 }, { "epoch": 0.7942761568481694, "grad_norm": 0.57918381690979, "learning_rate": 0.0001322004823192172, "loss": 1.066, "step": 4274 }, { "epoch": 0.7944619959115407, "grad_norm": 0.6446574926376343, "learning_rate": 0.00013217279275378824, "loss": 1.0582, "step": 4275 }, { "epoch": 0.7946478349749118, "grad_norm": 0.6489240527153015, "learning_rate": 0.00013214510043655083, "loss": 1.0116, "step": 4276 }, { "epoch": 0.7948336740382829, "grad_norm": 0.5904452800750732, "learning_rate": 0.00013211740536987345, "loss": 1.1452, "step": 4277 }, { "epoch": 0.795019513101654, "grad_norm": 0.7266445159912109, "learning_rate": 0.00013208970755612502, "loss": 1.0409, "step": 4278 }, { "epoch": 0.7952053521650251, "grad_norm": 0.6511578559875488, "learning_rate": 0.00013206200699767453, "loss": 1.0528, "step": 4279 }, { "epoch": 0.7953911912283962, "grad_norm": 0.6173616051673889, "learning_rate": 0.0001320343036968913, "loss": 1.0912, "step": 4280 }, { "epoch": 0.7955770302917673, "grad_norm": 0.5258021354675293, "learning_rate": 0.00013200659765614484, "loss": 0.8425, "step": 4281 }, { "epoch": 0.7957628693551384, "grad_norm": 0.6236848831176758, "learning_rate": 0.00013197888887780487, "loss": 1.1233, "step": 4282 }, { "epoch": 0.7959487084185096, "grad_norm": 0.7987383008003235, "learning_rate": 0.00013195117736424146, "loss": 1.2049, "step": 4283 }, { "epoch": 0.7961345474818807, "grad_norm": 0.6573415994644165, "learning_rate": 0.00013192346311782476, "loss": 1.2696, "step": 4284 }, { "epoch": 0.7963203865452518, "grad_norm": 0.5358014106750488, "learning_rate": 0.0001318957461409253, "loss": 1.1063, "step": 4285 }, { "epoch": 0.7965062256086229, "grad_norm": 0.6531534194946289, "learning_rate": 0.00013186802643591373, "loss": 1.3224, "step": 4286 }, { "epoch": 0.796692064671994, "grad_norm": 0.5929547548294067, "learning_rate": 0.00013184030400516095, "loss": 1.0583, "step": 4287 }, { "epoch": 0.7968779037353652, "grad_norm": 0.5735682249069214, "learning_rate": 0.00013181257885103818, "loss": 1.0209, "step": 4288 }, { "epoch": 0.7970637427987363, "grad_norm": 0.5685105323791504, "learning_rate": 0.00013178485097591676, "loss": 0.8461, "step": 4289 }, { "epoch": 0.7972495818621074, "grad_norm": 2.1148364543914795, "learning_rate": 0.00013175712038216838, "loss": 2.0309, "step": 4290 }, { "epoch": 0.7974354209254786, "grad_norm": 0.6161953210830688, "learning_rate": 0.00013172938707216482, "loss": 1.0573, "step": 4291 }, { "epoch": 0.7976212599888497, "grad_norm": 0.559108316898346, "learning_rate": 0.00013170165104827824, "loss": 0.9276, "step": 4292 }, { "epoch": 0.7978070990522208, "grad_norm": 0.6372076869010925, "learning_rate": 0.0001316739123128809, "loss": 1.0523, "step": 4293 }, { "epoch": 0.7979929381155919, "grad_norm": 0.5434962511062622, "learning_rate": 0.00013164617086834543, "loss": 0.9008, "step": 4294 }, { "epoch": 0.798178777178963, "grad_norm": 0.5048145651817322, "learning_rate": 0.00013161842671704452, "loss": 0.9488, "step": 4295 }, { "epoch": 0.7983646162423341, "grad_norm": 0.56045001745224, "learning_rate": 0.00013159067986135125, "loss": 1.1348, "step": 4296 }, { "epoch": 0.7985504553057052, "grad_norm": 0.6737070083618164, "learning_rate": 0.00013156293030363886, "loss": 1.0575, "step": 4297 }, { "epoch": 0.7987362943690763, "grad_norm": 0.6539118885993958, "learning_rate": 0.00013153517804628082, "loss": 1.0982, "step": 4298 }, { "epoch": 0.7989221334324474, "grad_norm": 0.5994445085525513, "learning_rate": 0.00013150742309165085, "loss": 1.3265, "step": 4299 }, { "epoch": 0.7991079724958187, "grad_norm": 0.7722712755203247, "learning_rate": 0.0001314796654421229, "loss": 1.0636, "step": 4300 }, { "epoch": 0.7992938115591898, "grad_norm": 0.6560238003730774, "learning_rate": 0.0001314519051000711, "loss": 1.244, "step": 4301 }, { "epoch": 0.7994796506225609, "grad_norm": 0.5150291919708252, "learning_rate": 0.0001314241420678699, "loss": 1.0826, "step": 4302 }, { "epoch": 0.799665489685932, "grad_norm": 0.6745650768280029, "learning_rate": 0.0001313963763478939, "loss": 1.1411, "step": 4303 }, { "epoch": 0.7998513287493031, "grad_norm": 0.5777543187141418, "learning_rate": 0.000131368607942518, "loss": 0.9485, "step": 4304 }, { "epoch": 0.8000371678126742, "grad_norm": 0.6721982955932617, "learning_rate": 0.00013134083685411723, "loss": 1.2325, "step": 4305 }, { "epoch": 0.8002230068760453, "grad_norm": 0.5857947468757629, "learning_rate": 0.00013131306308506696, "loss": 0.8454, "step": 4306 }, { "epoch": 0.8004088459394164, "grad_norm": 0.6334595680236816, "learning_rate": 0.00013128528663774276, "loss": 1.1562, "step": 4307 }, { "epoch": 0.8005946850027876, "grad_norm": 0.6741576194763184, "learning_rate": 0.00013125750751452034, "loss": 0.9842, "step": 4308 }, { "epoch": 0.8007805240661587, "grad_norm": 0.5686050057411194, "learning_rate": 0.00013122972571777576, "loss": 1.0821, "step": 4309 }, { "epoch": 0.8009663631295298, "grad_norm": 0.6299645304679871, "learning_rate": 0.00013120194124988522, "loss": 1.1056, "step": 4310 }, { "epoch": 0.801152202192901, "grad_norm": 3.2690067291259766, "learning_rate": 0.00013117415411322523, "loss": 1.9315, "step": 4311 }, { "epoch": 0.8013380412562721, "grad_norm": 0.7266578078269958, "learning_rate": 0.00013114636431017248, "loss": 1.2857, "step": 4312 }, { "epoch": 0.8015238803196432, "grad_norm": 0.5830214619636536, "learning_rate": 0.00013111857184310382, "loss": 0.8759, "step": 4313 }, { "epoch": 0.8017097193830143, "grad_norm": 0.5984452962875366, "learning_rate": 0.00013109077671439652, "loss": 1.2159, "step": 4314 }, { "epoch": 0.8018955584463854, "grad_norm": 0.6634993553161621, "learning_rate": 0.00013106297892642786, "loss": 1.0644, "step": 4315 }, { "epoch": 0.8020813975097566, "grad_norm": 0.5921594500541687, "learning_rate": 0.00013103517848157546, "loss": 0.9128, "step": 4316 }, { "epoch": 0.8022672365731277, "grad_norm": 0.501189112663269, "learning_rate": 0.0001310073753822172, "loss": 0.8117, "step": 4317 }, { "epoch": 0.8024530756364988, "grad_norm": 0.5862453579902649, "learning_rate": 0.00013097956963073107, "loss": 1.0291, "step": 4318 }, { "epoch": 0.8026389146998699, "grad_norm": 0.5692201852798462, "learning_rate": 0.00013095176122949543, "loss": 0.9335, "step": 4319 }, { "epoch": 0.802824753763241, "grad_norm": 0.5870519280433655, "learning_rate": 0.00013092395018088872, "loss": 1.1602, "step": 4320 }, { "epoch": 0.8030105928266121, "grad_norm": 0.532107412815094, "learning_rate": 0.00013089613648728976, "loss": 1.0165, "step": 4321 }, { "epoch": 0.8031964318899832, "grad_norm": 0.7711302042007446, "learning_rate": 0.00013086832015107746, "loss": 1.2256, "step": 4322 }, { "epoch": 0.8033822709533543, "grad_norm": 0.5713910460472107, "learning_rate": 0.000130840501174631, "loss": 1.064, "step": 4323 }, { "epoch": 0.8035681100167256, "grad_norm": 0.5301079154014587, "learning_rate": 0.00013081267956032986, "loss": 0.9982, "step": 4324 }, { "epoch": 0.8037539490800967, "grad_norm": 0.47264859080314636, "learning_rate": 0.0001307848553105536, "loss": 0.6809, "step": 4325 }, { "epoch": 0.8039397881434678, "grad_norm": 0.7126772403717041, "learning_rate": 0.00013075702842768212, "loss": 1.3468, "step": 4326 }, { "epoch": 0.8041256272068389, "grad_norm": 0.5951479077339172, "learning_rate": 0.00013072919891409556, "loss": 0.8848, "step": 4327 }, { "epoch": 0.80431146627021, "grad_norm": 0.8132142424583435, "learning_rate": 0.0001307013667721742, "loss": 1.1889, "step": 4328 }, { "epoch": 0.8044973053335811, "grad_norm": 0.6405729055404663, "learning_rate": 0.00013067353200429857, "loss": 1.2084, "step": 4329 }, { "epoch": 0.8046831443969522, "grad_norm": 0.5916075110435486, "learning_rate": 0.00013064569461284945, "loss": 0.9809, "step": 4330 }, { "epoch": 0.8048689834603233, "grad_norm": 0.5530146956443787, "learning_rate": 0.00013061785460020785, "loss": 1.0646, "step": 4331 }, { "epoch": 0.8050548225236945, "grad_norm": 0.5294923186302185, "learning_rate": 0.00013059001196875496, "loss": 0.8103, "step": 4332 }, { "epoch": 0.8052406615870656, "grad_norm": 0.6035967469215393, "learning_rate": 0.00013056216672087222, "loss": 1.0413, "step": 4333 }, { "epoch": 0.8054265006504367, "grad_norm": 0.720556378364563, "learning_rate": 0.00013053431885894133, "loss": 1.2473, "step": 4334 }, { "epoch": 0.8056123397138079, "grad_norm": 0.6400815844535828, "learning_rate": 0.00013050646838534417, "loss": 0.9757, "step": 4335 }, { "epoch": 0.805798178777179, "grad_norm": 0.5894456505775452, "learning_rate": 0.00013047861530246282, "loss": 1.1899, "step": 4336 }, { "epoch": 0.8059840178405501, "grad_norm": 0.6233096122741699, "learning_rate": 0.00013045075961267964, "loss": 1.0853, "step": 4337 }, { "epoch": 0.8061698569039212, "grad_norm": 0.5251320004463196, "learning_rate": 0.00013042290131837718, "loss": 1.0457, "step": 4338 }, { "epoch": 0.8063556959672923, "grad_norm": 0.6254708170890808, "learning_rate": 0.00013039504042193818, "loss": 0.9582, "step": 4339 }, { "epoch": 0.8065415350306634, "grad_norm": 0.5413371920585632, "learning_rate": 0.0001303671769257457, "loss": 0.9513, "step": 4340 }, { "epoch": 0.8067273740940346, "grad_norm": 0.5372425317764282, "learning_rate": 0.00013033931083218295, "loss": 1.0867, "step": 4341 }, { "epoch": 0.8069132131574057, "grad_norm": 0.6267648935317993, "learning_rate": 0.00013031144214363337, "loss": 1.256, "step": 4342 }, { "epoch": 0.8070990522207768, "grad_norm": 0.9133261442184448, "learning_rate": 0.00013028357086248063, "loss": 0.8639, "step": 4343 }, { "epoch": 0.8072848912841479, "grad_norm": 0.4667738378047943, "learning_rate": 0.00013025569699110862, "loss": 0.9185, "step": 4344 }, { "epoch": 0.807470730347519, "grad_norm": 0.5194012522697449, "learning_rate": 0.00013022782053190147, "loss": 0.8947, "step": 4345 }, { "epoch": 0.8076565694108901, "grad_norm": 1.2933731079101562, "learning_rate": 0.00013019994148724348, "loss": 1.4954, "step": 4346 }, { "epoch": 0.8078424084742613, "grad_norm": 0.686096727848053, "learning_rate": 0.00013017205985951926, "loss": 1.2909, "step": 4347 }, { "epoch": 0.8080282475376324, "grad_norm": 0.6402808427810669, "learning_rate": 0.0001301441756511135, "loss": 1.1911, "step": 4348 }, { "epoch": 0.8082140866010036, "grad_norm": 0.5459903478622437, "learning_rate": 0.00013011628886441129, "loss": 0.9079, "step": 4349 }, { "epoch": 0.8083999256643747, "grad_norm": 0.6057734489440918, "learning_rate": 0.00013008839950179776, "loss": 1.1206, "step": 4350 }, { "epoch": 0.8085857647277458, "grad_norm": 0.6145203113555908, "learning_rate": 0.0001300605075656584, "loss": 1.2271, "step": 4351 }, { "epoch": 0.8087716037911169, "grad_norm": 0.5688883662223816, "learning_rate": 0.00013003261305837891, "loss": 0.6687, "step": 4352 }, { "epoch": 0.808957442854488, "grad_norm": 0.5707416534423828, "learning_rate": 0.00013000471598234508, "loss": 1.0749, "step": 4353 }, { "epoch": 0.8091432819178591, "grad_norm": 0.5867960453033447, "learning_rate": 0.00012997681633994304, "loss": 1.0557, "step": 4354 }, { "epoch": 0.8093291209812302, "grad_norm": 0.575100302696228, "learning_rate": 0.0001299489141335591, "loss": 1.0123, "step": 4355 }, { "epoch": 0.8095149600446013, "grad_norm": 0.5517423152923584, "learning_rate": 0.00012992100936557984, "loss": 1.0448, "step": 4356 }, { "epoch": 0.8097007991079725, "grad_norm": 0.552761971950531, "learning_rate": 0.00012989310203839193, "loss": 0.6827, "step": 4357 }, { "epoch": 0.8098866381713437, "grad_norm": 0.531324565410614, "learning_rate": 0.00012986519215438244, "loss": 1.0561, "step": 4358 }, { "epoch": 0.8100724772347148, "grad_norm": 0.6074504852294922, "learning_rate": 0.0001298372797159385, "loss": 0.9044, "step": 4359 }, { "epoch": 0.8102583162980859, "grad_norm": 0.6233542561531067, "learning_rate": 0.00012980936472544756, "loss": 1.2859, "step": 4360 }, { "epoch": 0.810444155361457, "grad_norm": 0.6285319924354553, "learning_rate": 0.00012978144718529718, "loss": 1.2288, "step": 4361 }, { "epoch": 0.8106299944248281, "grad_norm": 0.5883091688156128, "learning_rate": 0.00012975352709787528, "loss": 0.8492, "step": 4362 }, { "epoch": 0.8108158334881992, "grad_norm": 0.6085577011108398, "learning_rate": 0.0001297256044655699, "loss": 1.0563, "step": 4363 }, { "epoch": 0.8110016725515703, "grad_norm": 0.552869439125061, "learning_rate": 0.00012969767929076935, "loss": 0.8394, "step": 4364 }, { "epoch": 0.8111875116149415, "grad_norm": 0.6783784627914429, "learning_rate": 0.0001296697515758621, "loss": 1.2056, "step": 4365 }, { "epoch": 0.8113733506783126, "grad_norm": 0.6433395743370056, "learning_rate": 0.00012964182132323688, "loss": 1.1556, "step": 4366 }, { "epoch": 0.8115591897416837, "grad_norm": 0.7210237979888916, "learning_rate": 0.0001296138885352826, "loss": 1.1526, "step": 4367 }, { "epoch": 0.8117450288050548, "grad_norm": 0.5519304275512695, "learning_rate": 0.0001295859532143884, "loss": 0.6599, "step": 4368 }, { "epoch": 0.8119308678684259, "grad_norm": 0.6399482488632202, "learning_rate": 0.00012955801536294371, "loss": 1.2954, "step": 4369 }, { "epoch": 0.812116706931797, "grad_norm": 0.6463430523872375, "learning_rate": 0.00012953007498333808, "loss": 0.9671, "step": 4370 }, { "epoch": 0.8123025459951682, "grad_norm": 0.6632814407348633, "learning_rate": 0.0001295021320779613, "loss": 1.2656, "step": 4371 }, { "epoch": 0.8124883850585393, "grad_norm": 0.585106611251831, "learning_rate": 0.00012947418664920345, "loss": 1.0554, "step": 4372 }, { "epoch": 0.8126742241219104, "grad_norm": 0.6526263356208801, "learning_rate": 0.00012944623869945465, "loss": 1.1726, "step": 4373 }, { "epoch": 0.8128600631852816, "grad_norm": 0.6378617286682129, "learning_rate": 0.00012941828823110545, "loss": 1.031, "step": 4374 }, { "epoch": 0.8130459022486527, "grad_norm": 2.6388754844665527, "learning_rate": 0.00012939033524654646, "loss": 2.0343, "step": 4375 }, { "epoch": 0.8132317413120238, "grad_norm": 0.5578187704086304, "learning_rate": 0.0001293623797481686, "loss": 1.0333, "step": 4376 }, { "epoch": 0.8134175803753949, "grad_norm": 1.881386637687683, "learning_rate": 0.00012933442173836287, "loss": 1.8941, "step": 4377 }, { "epoch": 0.813603419438766, "grad_norm": 0.6024147272109985, "learning_rate": 0.00012930646121952073, "loss": 1.0478, "step": 4378 }, { "epoch": 0.8137892585021371, "grad_norm": 0.6307503581047058, "learning_rate": 0.00012927849819403358, "loss": 1.0855, "step": 4379 }, { "epoch": 0.8139750975655082, "grad_norm": 0.6026185154914856, "learning_rate": 0.00012925053266429323, "loss": 1.0046, "step": 4380 }, { "epoch": 0.8141609366288793, "grad_norm": 0.7271862626075745, "learning_rate": 0.00012922256463269159, "loss": 0.9055, "step": 4381 }, { "epoch": 0.8143467756922506, "grad_norm": 0.5896093249320984, "learning_rate": 0.00012919459410162083, "loss": 1.064, "step": 4382 }, { "epoch": 0.8145326147556217, "grad_norm": 0.5362374186515808, "learning_rate": 0.00012916662107347333, "loss": 0.9173, "step": 4383 }, { "epoch": 0.8147184538189928, "grad_norm": 0.5842663645744324, "learning_rate": 0.0001291386455506417, "loss": 0.9661, "step": 4384 }, { "epoch": 0.8149042928823639, "grad_norm": 0.7104371190071106, "learning_rate": 0.00012911066753551874, "loss": 1.162, "step": 4385 }, { "epoch": 0.815090131945735, "grad_norm": 0.6280097365379333, "learning_rate": 0.0001290826870304975, "loss": 1.0367, "step": 4386 }, { "epoch": 0.8152759710091061, "grad_norm": 0.5821822285652161, "learning_rate": 0.00012905470403797113, "loss": 1.2102, "step": 4387 }, { "epoch": 0.8154618100724772, "grad_norm": 0.658132791519165, "learning_rate": 0.00012902671856033316, "loss": 1.1359, "step": 4388 }, { "epoch": 0.8156476491358483, "grad_norm": 0.563515841960907, "learning_rate": 0.0001289987305999772, "loss": 1.3327, "step": 4389 }, { "epoch": 0.8158334881992195, "grad_norm": 0.6515486240386963, "learning_rate": 0.00012897074015929717, "loss": 1.3089, "step": 4390 }, { "epoch": 0.8160193272625906, "grad_norm": 0.6316953897476196, "learning_rate": 0.00012894274724068708, "loss": 1.0272, "step": 4391 }, { "epoch": 0.8162051663259617, "grad_norm": 0.5534893274307251, "learning_rate": 0.00012891475184654126, "loss": 1.1062, "step": 4392 }, { "epoch": 0.8163910053893328, "grad_norm": 0.5467495918273926, "learning_rate": 0.00012888675397925426, "loss": 1.2019, "step": 4393 }, { "epoch": 0.816576844452704, "grad_norm": 0.6885834336280823, "learning_rate": 0.00012885875364122075, "loss": 1.1425, "step": 4394 }, { "epoch": 0.8167626835160751, "grad_norm": 0.508720874786377, "learning_rate": 0.00012883075083483566, "loss": 0.9065, "step": 4395 }, { "epoch": 0.8169485225794462, "grad_norm": 0.6153755187988281, "learning_rate": 0.00012880274556249414, "loss": 1.1685, "step": 4396 }, { "epoch": 0.8171343616428173, "grad_norm": 0.6323347091674805, "learning_rate": 0.00012877473782659156, "loss": 1.2422, "step": 4397 }, { "epoch": 0.8173202007061885, "grad_norm": 0.6620142459869385, "learning_rate": 0.00012874672762952345, "loss": 0.8893, "step": 4398 }, { "epoch": 0.8175060397695596, "grad_norm": 0.5536710023880005, "learning_rate": 0.00012871871497368559, "loss": 1.0329, "step": 4399 }, { "epoch": 0.8176918788329307, "grad_norm": 0.6210238933563232, "learning_rate": 0.00012869069986147394, "loss": 1.1554, "step": 4400 }, { "epoch": 0.8178777178963018, "grad_norm": 0.5708968043327332, "learning_rate": 0.0001286626822952848, "loss": 1.0671, "step": 4401 }, { "epoch": 0.8180635569596729, "grad_norm": 0.5646460652351379, "learning_rate": 0.00012863466227751443, "loss": 1.1507, "step": 4402 }, { "epoch": 0.818249396023044, "grad_norm": 0.7381437420845032, "learning_rate": 0.0001286066398105595, "loss": 1.179, "step": 4403 }, { "epoch": 0.8184352350864151, "grad_norm": 0.5559968948364258, "learning_rate": 0.00012857861489681688, "loss": 0.9457, "step": 4404 }, { "epoch": 0.8186210741497862, "grad_norm": 0.6272557377815247, "learning_rate": 0.00012855058753868357, "loss": 1.0855, "step": 4405 }, { "epoch": 0.8188069132131574, "grad_norm": 0.5753204822540283, "learning_rate": 0.00012852255773855676, "loss": 0.8327, "step": 4406 }, { "epoch": 0.8189927522765286, "grad_norm": 0.6473789811134338, "learning_rate": 0.00012849452549883398, "loss": 1.1054, "step": 4407 }, { "epoch": 0.8191785913398997, "grad_norm": 0.6232612729072571, "learning_rate": 0.00012846649082191283, "loss": 1.2546, "step": 4408 }, { "epoch": 0.8193644304032708, "grad_norm": 0.4950093626976013, "learning_rate": 0.0001284384537101912, "loss": 1.1192, "step": 4409 }, { "epoch": 0.8195502694666419, "grad_norm": 0.6465705633163452, "learning_rate": 0.00012841041416606714, "loss": 1.0576, "step": 4410 }, { "epoch": 0.819736108530013, "grad_norm": 0.7886804342269897, "learning_rate": 0.00012838237219193896, "loss": 1.2127, "step": 4411 }, { "epoch": 0.8199219475933841, "grad_norm": 0.5513986349105835, "learning_rate": 0.00012835432779020515, "loss": 0.897, "step": 4412 }, { "epoch": 0.8201077866567552, "grad_norm": 0.5260704755783081, "learning_rate": 0.0001283262809632644, "loss": 0.8837, "step": 4413 }, { "epoch": 0.8202936257201263, "grad_norm": 0.701880693435669, "learning_rate": 0.00012829823171351564, "loss": 1.0562, "step": 4414 }, { "epoch": 0.8204794647834975, "grad_norm": 0.5578733086585999, "learning_rate": 0.00012827018004335793, "loss": 1.0216, "step": 4415 }, { "epoch": 0.8206653038468686, "grad_norm": 0.5386905670166016, "learning_rate": 0.00012824212595519065, "loss": 0.7909, "step": 4416 }, { "epoch": 0.8208511429102398, "grad_norm": 0.7121390700340271, "learning_rate": 0.00012821406945141326, "loss": 1.1079, "step": 4417 }, { "epoch": 0.8210369819736109, "grad_norm": 0.5872541666030884, "learning_rate": 0.0001281860105344256, "loss": 1.0154, "step": 4418 }, { "epoch": 0.821222821036982, "grad_norm": 0.8177608847618103, "learning_rate": 0.00012815794920662747, "loss": 1.0999, "step": 4419 }, { "epoch": 0.8214086601003531, "grad_norm": 0.5959897637367249, "learning_rate": 0.00012812988547041912, "loss": 1.1842, "step": 4420 }, { "epoch": 0.8215944991637242, "grad_norm": 0.5651900768280029, "learning_rate": 0.00012810181932820086, "loss": 1.2421, "step": 4421 }, { "epoch": 0.8217803382270953, "grad_norm": 0.5698773264884949, "learning_rate": 0.00012807375078237326, "loss": 1.1008, "step": 4422 }, { "epoch": 0.8219661772904665, "grad_norm": 0.5098702907562256, "learning_rate": 0.00012804567983533708, "loss": 0.9125, "step": 4423 }, { "epoch": 0.8221520163538376, "grad_norm": 0.6269254088401794, "learning_rate": 0.00012801760648949327, "loss": 0.7964, "step": 4424 }, { "epoch": 0.8223378554172087, "grad_norm": 6.160284996032715, "learning_rate": 0.00012798953074724307, "loss": 1.8079, "step": 4425 }, { "epoch": 0.8225236944805798, "grad_norm": 0.5508794188499451, "learning_rate": 0.00012796145261098778, "loss": 1.0323, "step": 4426 }, { "epoch": 0.8227095335439509, "grad_norm": 0.8076217174530029, "learning_rate": 0.00012793337208312903, "loss": 1.226, "step": 4427 }, { "epoch": 0.822895372607322, "grad_norm": 0.6926615834236145, "learning_rate": 0.00012790528916606857, "loss": 0.9492, "step": 4428 }, { "epoch": 0.8230812116706931, "grad_norm": 0.5568311214447021, "learning_rate": 0.00012787720386220843, "loss": 0.8471, "step": 4429 }, { "epoch": 0.8232670507340643, "grad_norm": 0.5556146502494812, "learning_rate": 0.00012784911617395078, "loss": 1.1586, "step": 4430 }, { "epoch": 0.8234528897974355, "grad_norm": 0.6666334867477417, "learning_rate": 0.00012782102610369803, "loss": 1.175, "step": 4431 }, { "epoch": 0.8236387288608066, "grad_norm": 0.6975376009941101, "learning_rate": 0.00012779293365385282, "loss": 1.1044, "step": 4432 }, { "epoch": 0.8238245679241777, "grad_norm": 0.5512378215789795, "learning_rate": 0.00012776483882681792, "loss": 1.0993, "step": 4433 }, { "epoch": 0.8240104069875488, "grad_norm": 0.5872717499732971, "learning_rate": 0.00012773674162499634, "loss": 0.885, "step": 4434 }, { "epoch": 0.8241962460509199, "grad_norm": 0.6425865292549133, "learning_rate": 0.00012770864205079128, "loss": 1.1616, "step": 4435 }, { "epoch": 0.824382085114291, "grad_norm": 0.5967339873313904, "learning_rate": 0.00012768054010660622, "loss": 1.1622, "step": 4436 }, { "epoch": 0.8245679241776621, "grad_norm": 0.5792629718780518, "learning_rate": 0.00012765243579484473, "loss": 0.9509, "step": 4437 }, { "epoch": 0.8247537632410332, "grad_norm": 0.5237547159194946, "learning_rate": 0.00012762432911791061, "loss": 1.0155, "step": 4438 }, { "epoch": 0.8249396023044044, "grad_norm": 0.6082087755203247, "learning_rate": 0.00012759622007820794, "loss": 1.1278, "step": 4439 }, { "epoch": 0.8251254413677755, "grad_norm": 0.5648608803749084, "learning_rate": 0.00012756810867814092, "loss": 1.2205, "step": 4440 }, { "epoch": 0.8253112804311467, "grad_norm": 0.6407959461212158, "learning_rate": 0.000127539994920114, "loss": 0.8724, "step": 4441 }, { "epoch": 0.8254971194945178, "grad_norm": 0.5947341322898865, "learning_rate": 0.00012751187880653174, "loss": 1.0559, "step": 4442 }, { "epoch": 0.8256829585578889, "grad_norm": 0.5259736180305481, "learning_rate": 0.00012748376033979908, "loss": 1.1746, "step": 4443 }, { "epoch": 0.82586879762126, "grad_norm": 0.5010737180709839, "learning_rate": 0.00012745563952232098, "loss": 0.8902, "step": 4444 }, { "epoch": 0.8260546366846311, "grad_norm": 0.5889286398887634, "learning_rate": 0.00012742751635650267, "loss": 1.0432, "step": 4445 }, { "epoch": 0.8262404757480022, "grad_norm": 2.0364248752593994, "learning_rate": 0.00012739939084474962, "loss": 1.4645, "step": 4446 }, { "epoch": 0.8264263148113733, "grad_norm": 0.7073319554328918, "learning_rate": 0.00012737126298946748, "loss": 0.9539, "step": 4447 }, { "epoch": 0.8266121538747445, "grad_norm": 0.5749005079269409, "learning_rate": 0.00012734313279306206, "loss": 1.0574, "step": 4448 }, { "epoch": 0.8267979929381156, "grad_norm": 0.6676498651504517, "learning_rate": 0.00012731500025793936, "loss": 1.2505, "step": 4449 }, { "epoch": 0.8269838320014867, "grad_norm": 0.6630710959434509, "learning_rate": 0.00012728686538650565, "loss": 1.2037, "step": 4450 }, { "epoch": 0.8271696710648578, "grad_norm": 0.8507979512214661, "learning_rate": 0.0001272587281811674, "loss": 1.3893, "step": 4451 }, { "epoch": 0.8273555101282289, "grad_norm": 0.4904545247554779, "learning_rate": 0.00012723058864433118, "loss": 1.1238, "step": 4452 }, { "epoch": 0.8275413491916, "grad_norm": 0.5739341378211975, "learning_rate": 0.00012720244677840388, "loss": 0.9467, "step": 4453 }, { "epoch": 0.8277271882549712, "grad_norm": 0.49247053265571594, "learning_rate": 0.00012717430258579252, "loss": 0.781, "step": 4454 }, { "epoch": 0.8279130273183423, "grad_norm": 0.6822273135185242, "learning_rate": 0.00012714615606890435, "loss": 1.0274, "step": 4455 }, { "epoch": 0.8280988663817135, "grad_norm": 0.5292199850082397, "learning_rate": 0.00012711800723014675, "loss": 1.2045, "step": 4456 }, { "epoch": 0.8282847054450846, "grad_norm": 0.5441524982452393, "learning_rate": 0.0001270898560719274, "loss": 1.0811, "step": 4457 }, { "epoch": 0.8284705445084557, "grad_norm": 0.6144747138023376, "learning_rate": 0.0001270617025966541, "loss": 1.1179, "step": 4458 }, { "epoch": 0.8286563835718268, "grad_norm": 0.6470378041267395, "learning_rate": 0.00012703354680673485, "loss": 1.0405, "step": 4459 }, { "epoch": 0.8288422226351979, "grad_norm": 0.8334536552429199, "learning_rate": 0.00012700538870457796, "loss": 1.1096, "step": 4460 }, { "epoch": 0.829028061698569, "grad_norm": 0.6929413080215454, "learning_rate": 0.0001269772282925918, "loss": 0.9405, "step": 4461 }, { "epoch": 0.8292139007619401, "grad_norm": 0.7080973982810974, "learning_rate": 0.000126949065573185, "loss": 1.0241, "step": 4462 }, { "epoch": 0.8293997398253112, "grad_norm": 0.5871732234954834, "learning_rate": 0.0001269209005487664, "loss": 0.9457, "step": 4463 }, { "epoch": 0.8295855788886825, "grad_norm": 3.319326639175415, "learning_rate": 0.00012689273322174493, "loss": 2.2102, "step": 4464 }, { "epoch": 0.8297714179520536, "grad_norm": 0.7650028467178345, "learning_rate": 0.00012686456359452993, "loss": 0.8271, "step": 4465 }, { "epoch": 0.8299572570154247, "grad_norm": 0.7382581830024719, "learning_rate": 0.0001268363916695307, "loss": 1.135, "step": 4466 }, { "epoch": 0.8301430960787958, "grad_norm": 0.5419159531593323, "learning_rate": 0.0001268082174491569, "loss": 0.6879, "step": 4467 }, { "epoch": 0.8303289351421669, "grad_norm": 0.5916446447372437, "learning_rate": 0.00012678004093581835, "loss": 1.0661, "step": 4468 }, { "epoch": 0.830514774205538, "grad_norm": 0.6891621351242065, "learning_rate": 0.000126751862131925, "loss": 1.1768, "step": 4469 }, { "epoch": 0.8307006132689091, "grad_norm": 0.5724967122077942, "learning_rate": 0.00012672368103988705, "loss": 1.1755, "step": 4470 }, { "epoch": 0.8308864523322802, "grad_norm": 0.6919474601745605, "learning_rate": 0.00012669549766211493, "loss": 1.6258, "step": 4471 }, { "epoch": 0.8310722913956514, "grad_norm": 0.5278167724609375, "learning_rate": 0.00012666731200101916, "loss": 0.9755, "step": 4472 }, { "epoch": 0.8312581304590225, "grad_norm": 0.6204642653465271, "learning_rate": 0.00012663912405901057, "loss": 1.1226, "step": 4473 }, { "epoch": 0.8314439695223936, "grad_norm": 0.5770162343978882, "learning_rate": 0.00012661093383850014, "loss": 1.0097, "step": 4474 }, { "epoch": 0.8316298085857647, "grad_norm": 0.6649433970451355, "learning_rate": 0.00012658274134189903, "loss": 1.1369, "step": 4475 }, { "epoch": 0.8318156476491358, "grad_norm": 0.70816570520401, "learning_rate": 0.00012655454657161858, "loss": 1.0995, "step": 4476 }, { "epoch": 0.832001486712507, "grad_norm": 0.5342292785644531, "learning_rate": 0.00012652634953007037, "loss": 1.1424, "step": 4477 }, { "epoch": 0.8321873257758781, "grad_norm": 0.47927406430244446, "learning_rate": 0.0001264981502196662, "loss": 0.9822, "step": 4478 }, { "epoch": 0.8323731648392492, "grad_norm": 0.5756170153617859, "learning_rate": 0.0001264699486428179, "loss": 1.1123, "step": 4479 }, { "epoch": 0.8325590039026203, "grad_norm": 0.5887073874473572, "learning_rate": 0.0001264417448019377, "loss": 1.3087, "step": 4480 }, { "epoch": 0.8327448429659915, "grad_norm": 0.6383137702941895, "learning_rate": 0.00012641353869943797, "loss": 1.0229, "step": 4481 }, { "epoch": 0.8329306820293626, "grad_norm": 0.4909496605396271, "learning_rate": 0.00012638533033773114, "loss": 1.0681, "step": 4482 }, { "epoch": 0.8331165210927337, "grad_norm": 0.6577247977256775, "learning_rate": 0.00012635711971923, "loss": 1.0942, "step": 4483 }, { "epoch": 0.8333023601561048, "grad_norm": 0.5949928760528564, "learning_rate": 0.00012632890684634746, "loss": 1.0045, "step": 4484 }, { "epoch": 0.8334881992194759, "grad_norm": 0.6063514351844788, "learning_rate": 0.0001263006917214966, "loss": 0.9075, "step": 4485 }, { "epoch": 0.833674038282847, "grad_norm": 0.774755597114563, "learning_rate": 0.0001262724743470907, "loss": 1.1189, "step": 4486 }, { "epoch": 0.8338598773462181, "grad_norm": 0.5430833697319031, "learning_rate": 0.0001262442547255433, "loss": 1.0773, "step": 4487 }, { "epoch": 0.8340457164095892, "grad_norm": 0.5545639395713806, "learning_rate": 0.0001262160328592681, "loss": 0.8883, "step": 4488 }, { "epoch": 0.8342315554729605, "grad_norm": 0.5910719633102417, "learning_rate": 0.00012618780875067894, "loss": 0.9673, "step": 4489 }, { "epoch": 0.8344173945363316, "grad_norm": 0.5268061757087708, "learning_rate": 0.0001261595824021899, "loss": 0.9255, "step": 4490 }, { "epoch": 0.8346032335997027, "grad_norm": 0.7021967768669128, "learning_rate": 0.00012613135381621526, "loss": 1.0996, "step": 4491 }, { "epoch": 0.8347890726630738, "grad_norm": 0.4780430793762207, "learning_rate": 0.0001261031229951694, "loss": 0.8551, "step": 4492 }, { "epoch": 0.8349749117264449, "grad_norm": 0.5368556976318359, "learning_rate": 0.00012607488994146704, "loss": 0.9341, "step": 4493 }, { "epoch": 0.835160750789816, "grad_norm": 0.6084364056587219, "learning_rate": 0.00012604665465752298, "loss": 1.0467, "step": 4494 }, { "epoch": 0.8353465898531871, "grad_norm": 0.5446452498435974, "learning_rate": 0.00012601841714575228, "loss": 1.1537, "step": 4495 }, { "epoch": 0.8355324289165582, "grad_norm": 0.5627488493919373, "learning_rate": 0.0001259901774085701, "loss": 0.9802, "step": 4496 }, { "epoch": 0.8357182679799294, "grad_norm": 0.6170056462287903, "learning_rate": 0.00012596193544839188, "loss": 1.1555, "step": 4497 }, { "epoch": 0.8359041070433005, "grad_norm": 0.6171839237213135, "learning_rate": 0.00012593369126763322, "loss": 1.1423, "step": 4498 }, { "epoch": 0.8360899461066716, "grad_norm": 0.6114042401313782, "learning_rate": 0.00012590544486870987, "loss": 0.9804, "step": 4499 }, { "epoch": 0.8362757851700428, "grad_norm": 0.6916267275810242, "learning_rate": 0.00012587719625403786, "loss": 1.1785, "step": 4500 }, { "epoch": 0.8364616242334139, "grad_norm": 0.650570273399353, "learning_rate": 0.0001258489454260333, "loss": 0.9031, "step": 4501 }, { "epoch": 0.836647463296785, "grad_norm": 0.6784061789512634, "learning_rate": 0.00012582069238711258, "loss": 0.9206, "step": 4502 }, { "epoch": 0.8368333023601561, "grad_norm": 0.66778564453125, "learning_rate": 0.00012579243713969223, "loss": 0.9456, "step": 4503 }, { "epoch": 0.8370191414235272, "grad_norm": 0.6047230362892151, "learning_rate": 0.00012576417968618902, "loss": 1.2266, "step": 4504 }, { "epoch": 0.8372049804868984, "grad_norm": 0.5893747806549072, "learning_rate": 0.00012573592002901982, "loss": 1.2147, "step": 4505 }, { "epoch": 0.8373908195502695, "grad_norm": 0.5346536636352539, "learning_rate": 0.00012570765817060175, "loss": 1.0172, "step": 4506 }, { "epoch": 0.8375766586136406, "grad_norm": 0.6562202572822571, "learning_rate": 0.0001256793941133521, "loss": 0.8101, "step": 4507 }, { "epoch": 0.8377624976770117, "grad_norm": 0.59455806016922, "learning_rate": 0.0001256511278596884, "loss": 1.0672, "step": 4508 }, { "epoch": 0.8379483367403828, "grad_norm": 0.6395320892333984, "learning_rate": 0.00012562285941202826, "loss": 1.0752, "step": 4509 }, { "epoch": 0.8381341758037539, "grad_norm": 0.594071626663208, "learning_rate": 0.00012559458877278961, "loss": 1.1381, "step": 4510 }, { "epoch": 0.838320014867125, "grad_norm": 0.7788839936256409, "learning_rate": 0.00012556631594439047, "loss": 1.0234, "step": 4511 }, { "epoch": 0.8385058539304961, "grad_norm": 0.6716021299362183, "learning_rate": 0.00012553804092924905, "loss": 1.3516, "step": 4512 }, { "epoch": 0.8386916929938674, "grad_norm": 0.5156703591346741, "learning_rate": 0.0001255097637297838, "loss": 0.8481, "step": 4513 }, { "epoch": 0.8388775320572385, "grad_norm": 0.5573685169219971, "learning_rate": 0.0001254814843484133, "loss": 0.9987, "step": 4514 }, { "epoch": 0.8390633711206096, "grad_norm": 0.5675413608551025, "learning_rate": 0.00012545320278755642, "loss": 1.0343, "step": 4515 }, { "epoch": 0.8392492101839807, "grad_norm": 0.6181820631027222, "learning_rate": 0.00012542491904963206, "loss": 1.3209, "step": 4516 }, { "epoch": 0.8394350492473518, "grad_norm": 0.6297406554222107, "learning_rate": 0.00012539663313705944, "loss": 1.0885, "step": 4517 }, { "epoch": 0.8396208883107229, "grad_norm": 0.5502396821975708, "learning_rate": 0.00012536834505225787, "loss": 0.9801, "step": 4518 }, { "epoch": 0.839806727374094, "grad_norm": 0.6094892024993896, "learning_rate": 0.00012534005479764696, "loss": 0.9609, "step": 4519 }, { "epoch": 0.8399925664374651, "grad_norm": 0.48954108357429504, "learning_rate": 0.00012531176237564636, "loss": 0.8465, "step": 4520 }, { "epoch": 0.8401784055008362, "grad_norm": 0.5575180649757385, "learning_rate": 0.00012528346778867603, "loss": 1.0366, "step": 4521 }, { "epoch": 0.8403642445642074, "grad_norm": 0.6245620250701904, "learning_rate": 0.00012525517103915607, "loss": 1.0974, "step": 4522 }, { "epoch": 0.8405500836275785, "grad_norm": 0.7433310747146606, "learning_rate": 0.00012522687212950675, "loss": 1.2692, "step": 4523 }, { "epoch": 0.8407359226909497, "grad_norm": 0.5833187699317932, "learning_rate": 0.0001251985710621485, "loss": 1.2388, "step": 4524 }, { "epoch": 0.8409217617543208, "grad_norm": 0.6372852921485901, "learning_rate": 0.00012517026783950204, "loss": 1.0083, "step": 4525 }, { "epoch": 0.8411076008176919, "grad_norm": 0.5090671181678772, "learning_rate": 0.00012514196246398815, "loss": 0.8843, "step": 4526 }, { "epoch": 0.841293439881063, "grad_norm": 0.6583060026168823, "learning_rate": 0.0001251136549380279, "loss": 1.0564, "step": 4527 }, { "epoch": 0.8414792789444341, "grad_norm": 0.6386400461196899, "learning_rate": 0.0001250853452640424, "loss": 1.0424, "step": 4528 }, { "epoch": 0.8416651180078052, "grad_norm": 0.576964795589447, "learning_rate": 0.00012505703344445316, "loss": 1.1201, "step": 4529 }, { "epoch": 0.8418509570711764, "grad_norm": 0.6124757528305054, "learning_rate": 0.0001250287194816817, "loss": 1.1534, "step": 4530 }, { "epoch": 0.8420367961345475, "grad_norm": 0.5833020210266113, "learning_rate": 0.00012500040337814973, "loss": 0.9472, "step": 4531 }, { "epoch": 0.8422226351979186, "grad_norm": 0.556035041809082, "learning_rate": 0.00012497208513627926, "loss": 1.0616, "step": 4532 }, { "epoch": 0.8424084742612897, "grad_norm": 0.5945399403572083, "learning_rate": 0.00012494376475849237, "loss": 1.2011, "step": 4533 }, { "epoch": 0.8425943133246608, "grad_norm": 0.5541200041770935, "learning_rate": 0.00012491544224721136, "loss": 1.0238, "step": 4534 }, { "epoch": 0.842780152388032, "grad_norm": 0.5658981204032898, "learning_rate": 0.00012488711760485873, "loss": 1.1086, "step": 4535 }, { "epoch": 0.842965991451403, "grad_norm": 0.5613319277763367, "learning_rate": 0.00012485879083385713, "loss": 1.2952, "step": 4536 }, { "epoch": 0.8431518305147742, "grad_norm": 0.5648307204246521, "learning_rate": 0.00012483046193662945, "loss": 1.1365, "step": 4537 }, { "epoch": 0.8433376695781454, "grad_norm": 0.5618223547935486, "learning_rate": 0.00012480213091559868, "loss": 1.2456, "step": 4538 }, { "epoch": 0.8435235086415165, "grad_norm": 0.4825553297996521, "learning_rate": 0.00012477379777318804, "loss": 0.8803, "step": 4539 }, { "epoch": 0.8437093477048876, "grad_norm": 0.5788162350654602, "learning_rate": 0.00012474546251182094, "loss": 0.9082, "step": 4540 }, { "epoch": 0.8438951867682587, "grad_norm": 0.5924403667449951, "learning_rate": 0.00012471712513392096, "loss": 0.9885, "step": 4541 }, { "epoch": 0.8440810258316298, "grad_norm": 0.5519680976867676, "learning_rate": 0.00012468878564191182, "loss": 0.9727, "step": 4542 }, { "epoch": 0.8442668648950009, "grad_norm": 0.594264030456543, "learning_rate": 0.0001246604440382175, "loss": 0.9876, "step": 4543 }, { "epoch": 0.844452703958372, "grad_norm": 0.5373499989509583, "learning_rate": 0.0001246321003252621, "loss": 1.0743, "step": 4544 }, { "epoch": 0.8446385430217431, "grad_norm": 0.5786815881729126, "learning_rate": 0.00012460375450546994, "loss": 1.1302, "step": 4545 }, { "epoch": 0.8448243820851143, "grad_norm": 0.7555760145187378, "learning_rate": 0.00012457540658126546, "loss": 1.288, "step": 4546 }, { "epoch": 0.8450102211484855, "grad_norm": 0.7258514165878296, "learning_rate": 0.00012454705655507334, "loss": 1.1075, "step": 4547 }, { "epoch": 0.8451960602118566, "grad_norm": 0.49702054262161255, "learning_rate": 0.00012451870442931845, "loss": 0.9364, "step": 4548 }, { "epoch": 0.8453818992752277, "grad_norm": 0.5207595825195312, "learning_rate": 0.00012449035020642573, "loss": 0.957, "step": 4549 }, { "epoch": 0.8455677383385988, "grad_norm": 0.5674048662185669, "learning_rate": 0.00012446199388882047, "loss": 1.0289, "step": 4550 }, { "epoch": 0.8457535774019699, "grad_norm": 0.5538954734802246, "learning_rate": 0.000124433635478928, "loss": 1.0743, "step": 4551 }, { "epoch": 0.845939416465341, "grad_norm": 0.6047095060348511, "learning_rate": 0.00012440527497917386, "loss": 1.2751, "step": 4552 }, { "epoch": 0.8461252555287121, "grad_norm": 0.5031073689460754, "learning_rate": 0.00012437691239198379, "loss": 0.9235, "step": 4553 }, { "epoch": 0.8463110945920832, "grad_norm": 0.7293369770050049, "learning_rate": 0.00012434854771978374, "loss": 1.1129, "step": 4554 }, { "epoch": 0.8464969336554544, "grad_norm": 0.7080215215682983, "learning_rate": 0.0001243201809649998, "loss": 1.1998, "step": 4555 }, { "epoch": 0.8466827727188255, "grad_norm": 0.6878696084022522, "learning_rate": 0.00012429181213005817, "loss": 1.146, "step": 4556 }, { "epoch": 0.8468686117821966, "grad_norm": 0.6264361143112183, "learning_rate": 0.00012426344121738538, "loss": 1.1465, "step": 4557 }, { "epoch": 0.8470544508455677, "grad_norm": 0.8152109384536743, "learning_rate": 0.00012423506822940804, "loss": 1.2251, "step": 4558 }, { "epoch": 0.8472402899089388, "grad_norm": 0.5891522765159607, "learning_rate": 0.0001242066931685529, "loss": 0.9574, "step": 4559 }, { "epoch": 0.84742612897231, "grad_norm": 0.524802029132843, "learning_rate": 0.00012417831603724698, "loss": 0.9342, "step": 4560 }, { "epoch": 0.8476119680356811, "grad_norm": 0.56324702501297, "learning_rate": 0.00012414993683791746, "loss": 1.0783, "step": 4561 }, { "epoch": 0.8477978070990522, "grad_norm": 0.5322441458702087, "learning_rate": 0.00012412155557299162, "loss": 0.8267, "step": 4562 }, { "epoch": 0.8479836461624234, "grad_norm": 0.6256717443466187, "learning_rate": 0.00012409317224489698, "loss": 1.0611, "step": 4563 }, { "epoch": 0.8481694852257945, "grad_norm": 0.5989834666252136, "learning_rate": 0.00012406478685606128, "loss": 1.1289, "step": 4564 }, { "epoch": 0.8483553242891656, "grad_norm": 0.5230849385261536, "learning_rate": 0.00012403639940891233, "loss": 1.0077, "step": 4565 }, { "epoch": 0.8485411633525367, "grad_norm": 0.6140810251235962, "learning_rate": 0.00012400800990587822, "loss": 0.97, "step": 4566 }, { "epoch": 0.8487270024159078, "grad_norm": 0.5292965769767761, "learning_rate": 0.00012397961834938713, "loss": 0.9602, "step": 4567 }, { "epoch": 0.8489128414792789, "grad_norm": 0.7063047289848328, "learning_rate": 0.00012395122474186742, "loss": 1.0562, "step": 4568 }, { "epoch": 0.84909868054265, "grad_norm": 0.6304416060447693, "learning_rate": 0.0001239228290857477, "loss": 0.9928, "step": 4569 }, { "epoch": 0.8492845196060211, "grad_norm": 0.6376837491989136, "learning_rate": 0.00012389443138345672, "loss": 1.3453, "step": 4570 }, { "epoch": 0.8494703586693924, "grad_norm": 0.5010297298431396, "learning_rate": 0.00012386603163742338, "loss": 0.7436, "step": 4571 }, { "epoch": 0.8496561977327635, "grad_norm": 0.5087937116622925, "learning_rate": 0.00012383762985007678, "loss": 1.0635, "step": 4572 }, { "epoch": 0.8498420367961346, "grad_norm": 0.49811092019081116, "learning_rate": 0.00012380922602384616, "loss": 1.0109, "step": 4573 }, { "epoch": 0.8500278758595057, "grad_norm": 0.6603782176971436, "learning_rate": 0.000123780820161161, "loss": 1.0893, "step": 4574 }, { "epoch": 0.8502137149228768, "grad_norm": 0.6798657178878784, "learning_rate": 0.00012375241226445088, "loss": 1.2919, "step": 4575 }, { "epoch": 0.8503995539862479, "grad_norm": 0.6117194294929504, "learning_rate": 0.0001237240023361456, "loss": 1.1965, "step": 4576 }, { "epoch": 0.850585393049619, "grad_norm": 0.621590793132782, "learning_rate": 0.00012369559037867512, "loss": 1.0154, "step": 4577 }, { "epoch": 0.8507712321129901, "grad_norm": 0.6007041335105896, "learning_rate": 0.0001236671763944696, "loss": 1.2543, "step": 4578 }, { "epoch": 0.8509570711763613, "grad_norm": 0.5366314649581909, "learning_rate": 0.00012363876038595933, "loss": 1.0525, "step": 4579 }, { "epoch": 0.8511429102397324, "grad_norm": 0.5456633567810059, "learning_rate": 0.00012361034235557482, "loss": 1.0457, "step": 4580 }, { "epoch": 0.8513287493031035, "grad_norm": 0.5707237720489502, "learning_rate": 0.00012358192230574666, "loss": 1.1579, "step": 4581 }, { "epoch": 0.8515145883664746, "grad_norm": 2.736707925796509, "learning_rate": 0.00012355350023890573, "loss": 2.0639, "step": 4582 }, { "epoch": 0.8517004274298458, "grad_norm": 0.7945477962493896, "learning_rate": 0.00012352507615748302, "loss": 0.9384, "step": 4583 }, { "epoch": 0.8518862664932169, "grad_norm": 0.5095336437225342, "learning_rate": 0.00012349665006390967, "loss": 1.0477, "step": 4584 }, { "epoch": 0.852072105556588, "grad_norm": 0.6436726450920105, "learning_rate": 0.0001234682219606171, "loss": 1.2062, "step": 4585 }, { "epoch": 0.8522579446199591, "grad_norm": 0.625449001789093, "learning_rate": 0.00012343979185003678, "loss": 1.1338, "step": 4586 }, { "epoch": 0.8524437836833302, "grad_norm": 0.6556345224380493, "learning_rate": 0.0001234113597346004, "loss": 1.0076, "step": 4587 }, { "epoch": 0.8526296227467014, "grad_norm": 0.6119154095649719, "learning_rate": 0.0001233829256167398, "loss": 1.1119, "step": 4588 }, { "epoch": 0.8528154618100725, "grad_norm": 0.7122162580490112, "learning_rate": 0.00012335448949888706, "loss": 1.0975, "step": 4589 }, { "epoch": 0.8530013008734436, "grad_norm": 0.472249835729599, "learning_rate": 0.00012332605138347434, "loss": 0.7964, "step": 4590 }, { "epoch": 0.8531871399368147, "grad_norm": 0.5455360412597656, "learning_rate": 0.00012329761127293404, "loss": 1.0847, "step": 4591 }, { "epoch": 0.8533729790001858, "grad_norm": 0.5112693309783936, "learning_rate": 0.0001232691691696987, "loss": 0.9491, "step": 4592 }, { "epoch": 0.8535588180635569, "grad_norm": 0.5319565534591675, "learning_rate": 0.000123240725076201, "loss": 1.2175, "step": 4593 }, { "epoch": 0.853744657126928, "grad_norm": 0.5167549252510071, "learning_rate": 0.0001232122789948739, "loss": 0.7037, "step": 4594 }, { "epoch": 0.8539304961902991, "grad_norm": 0.6013134121894836, "learning_rate": 0.0001231838309281504, "loss": 1.1435, "step": 4595 }, { "epoch": 0.8541163352536704, "grad_norm": 0.5791240334510803, "learning_rate": 0.00012315538087846373, "loss": 1.2345, "step": 4596 }, { "epoch": 0.8543021743170415, "grad_norm": 0.5475298166275024, "learning_rate": 0.00012312692884824725, "loss": 1.1015, "step": 4597 }, { "epoch": 0.8544880133804126, "grad_norm": 0.6367332935333252, "learning_rate": 0.0001230984748399346, "loss": 0.9356, "step": 4598 }, { "epoch": 0.8546738524437837, "grad_norm": 0.6773303151130676, "learning_rate": 0.00012307001885595946, "loss": 1.1413, "step": 4599 }, { "epoch": 0.8548596915071548, "grad_norm": 0.4587186276912689, "learning_rate": 0.00012304156089875575, "loss": 0.8376, "step": 4600 }, { "epoch": 0.8550455305705259, "grad_norm": 0.6566428542137146, "learning_rate": 0.00012301310097075755, "loss": 1.0847, "step": 4601 }, { "epoch": 0.855231369633897, "grad_norm": 0.5402763485908508, "learning_rate": 0.00012298463907439909, "loss": 0.841, "step": 4602 }, { "epoch": 0.8554172086972681, "grad_norm": 0.6253241896629333, "learning_rate": 0.00012295617521211478, "loss": 0.9708, "step": 4603 }, { "epoch": 0.8556030477606393, "grad_norm": 0.5683613419532776, "learning_rate": 0.00012292770938633916, "loss": 0.928, "step": 4604 }, { "epoch": 0.8557888868240104, "grad_norm": 0.5675630569458008, "learning_rate": 0.000122899241599507, "loss": 1.0236, "step": 4605 }, { "epoch": 0.8559747258873815, "grad_norm": 0.5932213068008423, "learning_rate": 0.00012287077185405321, "loss": 1.2021, "step": 4606 }, { "epoch": 0.8561605649507527, "grad_norm": 0.7319698929786682, "learning_rate": 0.0001228423001524129, "loss": 0.9715, "step": 4607 }, { "epoch": 0.8563464040141238, "grad_norm": 0.6660357713699341, "learning_rate": 0.00012281382649702129, "loss": 1.1203, "step": 4608 }, { "epoch": 0.8565322430774949, "grad_norm": 0.6667255163192749, "learning_rate": 0.00012278535089031378, "loss": 1.2861, "step": 4609 }, { "epoch": 0.856718082140866, "grad_norm": 0.707119345664978, "learning_rate": 0.00012275687333472598, "loss": 1.2471, "step": 4610 }, { "epoch": 0.8569039212042371, "grad_norm": 0.6665619015693665, "learning_rate": 0.00012272839383269357, "loss": 1.1468, "step": 4611 }, { "epoch": 0.8570897602676083, "grad_norm": 0.6399442553520203, "learning_rate": 0.00012269991238665254, "loss": 1.0455, "step": 4612 }, { "epoch": 0.8572755993309794, "grad_norm": 0.607695996761322, "learning_rate": 0.00012267142899903893, "loss": 1.1208, "step": 4613 }, { "epoch": 0.8574614383943505, "grad_norm": 0.6243822574615479, "learning_rate": 0.00012264294367228903, "loss": 1.0788, "step": 4614 }, { "epoch": 0.8576472774577216, "grad_norm": 0.5382381677627563, "learning_rate": 0.00012261445640883922, "loss": 0.8672, "step": 4615 }, { "epoch": 0.8578331165210927, "grad_norm": 0.5765889883041382, "learning_rate": 0.00012258596721112608, "loss": 0.9701, "step": 4616 }, { "epoch": 0.8580189555844638, "grad_norm": 0.577860951423645, "learning_rate": 0.00012255747608158634, "loss": 1.1989, "step": 4617 }, { "epoch": 0.858204794647835, "grad_norm": 0.7148776054382324, "learning_rate": 0.00012252898302265693, "loss": 1.0507, "step": 4618 }, { "epoch": 0.858390633711206, "grad_norm": 0.6425639390945435, "learning_rate": 0.00012250048803677486, "loss": 0.9726, "step": 4619 }, { "epoch": 0.8585764727745773, "grad_norm": 0.5930686593055725, "learning_rate": 0.00012247199112637745, "loss": 1.0762, "step": 4620 }, { "epoch": 0.8587623118379484, "grad_norm": 0.5590258240699768, "learning_rate": 0.00012244349229390206, "loss": 1.0442, "step": 4621 }, { "epoch": 0.8589481509013195, "grad_norm": 0.5646161437034607, "learning_rate": 0.00012241499154178628, "loss": 1.048, "step": 4622 }, { "epoch": 0.8591339899646906, "grad_norm": 0.674748957157135, "learning_rate": 0.00012238648887246783, "loss": 1.1122, "step": 4623 }, { "epoch": 0.8593198290280617, "grad_norm": 0.811205267906189, "learning_rate": 0.0001223579842883846, "loss": 1.111, "step": 4624 }, { "epoch": 0.8595056680914328, "grad_norm": 0.6467451453208923, "learning_rate": 0.00012232947779197467, "loss": 1.0201, "step": 4625 }, { "epoch": 0.8596915071548039, "grad_norm": 0.5541864633560181, "learning_rate": 0.00012230096938567621, "loss": 1.0174, "step": 4626 }, { "epoch": 0.859877346218175, "grad_norm": 0.5226763486862183, "learning_rate": 0.00012227245907192765, "loss": 0.7749, "step": 4627 }, { "epoch": 0.8600631852815461, "grad_norm": 0.5585047602653503, "learning_rate": 0.00012224394685316753, "loss": 1.1309, "step": 4628 }, { "epoch": 0.8602490243449173, "grad_norm": 0.5343335866928101, "learning_rate": 0.00012221543273183459, "loss": 0.9149, "step": 4629 }, { "epoch": 0.8604348634082885, "grad_norm": 0.6110146045684814, "learning_rate": 0.00012218691671036765, "loss": 1.2287, "step": 4630 }, { "epoch": 0.8606207024716596, "grad_norm": 0.6890124678611755, "learning_rate": 0.00012215839879120577, "loss": 1.1387, "step": 4631 }, { "epoch": 0.8608065415350307, "grad_norm": 0.511184573173523, "learning_rate": 0.00012212987897678815, "loss": 0.9056, "step": 4632 }, { "epoch": 0.8609923805984018, "grad_norm": 0.6041712760925293, "learning_rate": 0.00012210135726955414, "loss": 1.0268, "step": 4633 }, { "epoch": 0.8611782196617729, "grad_norm": 0.7157412171363831, "learning_rate": 0.00012207283367194325, "loss": 1.3321, "step": 4634 }, { "epoch": 0.861364058725144, "grad_norm": 0.6423292756080627, "learning_rate": 0.00012204430818639525, "loss": 1.1673, "step": 4635 }, { "epoch": 0.8615498977885151, "grad_norm": 0.6783196926116943, "learning_rate": 0.00012201578081534992, "loss": 0.8651, "step": 4636 }, { "epoch": 0.8617357368518863, "grad_norm": 0.6569127440452576, "learning_rate": 0.00012198725156124727, "loss": 1.0608, "step": 4637 }, { "epoch": 0.8619215759152574, "grad_norm": 0.5302185416221619, "learning_rate": 0.00012195872042652745, "loss": 0.7633, "step": 4638 }, { "epoch": 0.8621074149786285, "grad_norm": 0.5998936295509338, "learning_rate": 0.00012193018741363084, "loss": 1.1865, "step": 4639 }, { "epoch": 0.8622932540419996, "grad_norm": 0.5633741617202759, "learning_rate": 0.00012190165252499787, "loss": 1.2283, "step": 4640 }, { "epoch": 0.8624790931053707, "grad_norm": 0.5973196625709534, "learning_rate": 0.00012187311576306926, "loss": 1.1307, "step": 4641 }, { "epoch": 0.8626649321687418, "grad_norm": 0.6969146132469177, "learning_rate": 0.00012184457713028575, "loss": 1.0946, "step": 4642 }, { "epoch": 0.862850771232113, "grad_norm": 0.5954970717430115, "learning_rate": 0.00012181603662908835, "loss": 1.2309, "step": 4643 }, { "epoch": 0.8630366102954841, "grad_norm": 0.5562779307365417, "learning_rate": 0.00012178749426191822, "loss": 1.0062, "step": 4644 }, { "epoch": 0.8632224493588553, "grad_norm": 0.5769143104553223, "learning_rate": 0.0001217589500312166, "loss": 1.0062, "step": 4645 }, { "epoch": 0.8634082884222264, "grad_norm": 0.5674699544906616, "learning_rate": 0.00012173040393942496, "loss": 1.0515, "step": 4646 }, { "epoch": 0.8635941274855975, "grad_norm": 0.5838675498962402, "learning_rate": 0.00012170185598898489, "loss": 1.2049, "step": 4647 }, { "epoch": 0.8637799665489686, "grad_norm": 0.6601033210754395, "learning_rate": 0.00012167330618233819, "loss": 0.896, "step": 4648 }, { "epoch": 0.8639658056123397, "grad_norm": 0.6087871789932251, "learning_rate": 0.00012164475452192674, "loss": 1.0509, "step": 4649 }, { "epoch": 0.8641516446757108, "grad_norm": 0.6676885485649109, "learning_rate": 0.00012161620101019269, "loss": 0.9619, "step": 4650 }, { "epoch": 0.8643374837390819, "grad_norm": 0.7278332710266113, "learning_rate": 0.00012158764564957823, "loss": 1.2801, "step": 4651 }, { "epoch": 0.864523322802453, "grad_norm": 0.6235469579696655, "learning_rate": 0.0001215590884425258, "loss": 1.1447, "step": 4652 }, { "epoch": 0.8647091618658242, "grad_norm": 0.6712271571159363, "learning_rate": 0.00012153052939147793, "loss": 0.9625, "step": 4653 }, { "epoch": 0.8648950009291954, "grad_norm": 0.6502339839935303, "learning_rate": 0.00012150196849887734, "loss": 1.2098, "step": 4654 }, { "epoch": 0.8650808399925665, "grad_norm": 0.6269952058792114, "learning_rate": 0.00012147340576716693, "loss": 1.0694, "step": 4655 }, { "epoch": 0.8652666790559376, "grad_norm": 0.6714789271354675, "learning_rate": 0.00012144484119878967, "loss": 1.0536, "step": 4656 }, { "epoch": 0.8654525181193087, "grad_norm": 0.602345883846283, "learning_rate": 0.00012141627479618885, "loss": 0.8036, "step": 4657 }, { "epoch": 0.8656383571826798, "grad_norm": 0.5539709329605103, "learning_rate": 0.00012138770656180774, "loss": 1.0422, "step": 4658 }, { "epoch": 0.8658241962460509, "grad_norm": 0.5680656433105469, "learning_rate": 0.00012135913649808985, "loss": 1.0184, "step": 4659 }, { "epoch": 0.866010035309422, "grad_norm": 0.6097427606582642, "learning_rate": 0.00012133056460747887, "loss": 0.7894, "step": 4660 }, { "epoch": 0.8661958743727931, "grad_norm": 0.4813399612903595, "learning_rate": 0.00012130199089241856, "loss": 0.8799, "step": 4661 }, { "epoch": 0.8663817134361643, "grad_norm": 0.6142000555992126, "learning_rate": 0.00012127341535535297, "loss": 1.0662, "step": 4662 }, { "epoch": 0.8665675524995354, "grad_norm": 0.6486777663230896, "learning_rate": 0.00012124483799872619, "loss": 0.9835, "step": 4663 }, { "epoch": 0.8667533915629065, "grad_norm": 0.6400505900382996, "learning_rate": 0.00012121625882498247, "loss": 1.1977, "step": 4664 }, { "epoch": 0.8669392306262776, "grad_norm": 0.6632579565048218, "learning_rate": 0.00012118767783656629, "loss": 1.1879, "step": 4665 }, { "epoch": 0.8671250696896488, "grad_norm": 0.4944020211696625, "learning_rate": 0.00012115909503592224, "loss": 0.8731, "step": 4666 }, { "epoch": 0.8673109087530199, "grad_norm": 0.5768524408340454, "learning_rate": 0.00012113051042549506, "loss": 1.1558, "step": 4667 }, { "epoch": 0.867496747816391, "grad_norm": 0.647330105304718, "learning_rate": 0.00012110192400772964, "loss": 1.038, "step": 4668 }, { "epoch": 0.8676825868797621, "grad_norm": 0.6742308139801025, "learning_rate": 0.00012107333578507108, "loss": 1.2654, "step": 4669 }, { "epoch": 0.8678684259431333, "grad_norm": 0.5209482312202454, "learning_rate": 0.00012104474575996454, "loss": 1.0273, "step": 4670 }, { "epoch": 0.8680542650065044, "grad_norm": 0.6833703517913818, "learning_rate": 0.00012101615393485539, "loss": 1.0157, "step": 4671 }, { "epoch": 0.8682401040698755, "grad_norm": 0.583544909954071, "learning_rate": 0.00012098756031218921, "loss": 0.957, "step": 4672 }, { "epoch": 0.8684259431332466, "grad_norm": 0.5707135796546936, "learning_rate": 0.00012095896489441163, "loss": 1.0744, "step": 4673 }, { "epoch": 0.8686117821966177, "grad_norm": 0.6191301941871643, "learning_rate": 0.00012093036768396849, "loss": 1.2617, "step": 4674 }, { "epoch": 0.8687976212599888, "grad_norm": 3.5594120025634766, "learning_rate": 0.00012090176868330574, "loss": 1.8595, "step": 4675 }, { "epoch": 0.8689834603233599, "grad_norm": 0.5981330871582031, "learning_rate": 0.00012087316789486958, "loss": 1.1521, "step": 4676 }, { "epoch": 0.869169299386731, "grad_norm": 0.5382730960845947, "learning_rate": 0.00012084456532110624, "loss": 0.8385, "step": 4677 }, { "epoch": 0.8693551384501023, "grad_norm": 0.599388599395752, "learning_rate": 0.00012081596096446215, "loss": 1.1152, "step": 4678 }, { "epoch": 0.8695409775134734, "grad_norm": 0.5467658042907715, "learning_rate": 0.00012078735482738395, "loss": 1.1479, "step": 4679 }, { "epoch": 0.8697268165768445, "grad_norm": 0.5545910000801086, "learning_rate": 0.00012075874691231837, "loss": 1.158, "step": 4680 }, { "epoch": 0.8699126556402156, "grad_norm": 0.5862640738487244, "learning_rate": 0.00012073013722171227, "loss": 1.2503, "step": 4681 }, { "epoch": 0.8700984947035867, "grad_norm": 0.6473658084869385, "learning_rate": 0.00012070152575801276, "loss": 0.9587, "step": 4682 }, { "epoch": 0.8702843337669578, "grad_norm": 0.48189878463745117, "learning_rate": 0.00012067291252366702, "loss": 0.9056, "step": 4683 }, { "epoch": 0.8704701728303289, "grad_norm": 0.6109098196029663, "learning_rate": 0.00012064429752112238, "loss": 1.1096, "step": 4684 }, { "epoch": 0.8706560118937, "grad_norm": 1.7112163305282593, "learning_rate": 0.00012061568075282634, "loss": 1.7075, "step": 4685 }, { "epoch": 0.8708418509570712, "grad_norm": 0.5883646011352539, "learning_rate": 0.00012058706222122657, "loss": 0.9264, "step": 4686 }, { "epoch": 0.8710276900204423, "grad_norm": 0.5947024822235107, "learning_rate": 0.0001205584419287709, "loss": 0.9626, "step": 4687 }, { "epoch": 0.8712135290838134, "grad_norm": 0.7193747758865356, "learning_rate": 0.00012052981987790724, "loss": 1.4634, "step": 4688 }, { "epoch": 0.8713993681471845, "grad_norm": 0.5391495823860168, "learning_rate": 0.00012050119607108372, "loss": 1.0638, "step": 4689 }, { "epoch": 0.8715852072105557, "grad_norm": 0.5702479481697083, "learning_rate": 0.00012047257051074861, "loss": 1.0789, "step": 4690 }, { "epoch": 0.8717710462739268, "grad_norm": 0.6023498177528381, "learning_rate": 0.00012044394319935026, "loss": 1.0356, "step": 4691 }, { "epoch": 0.8719568853372979, "grad_norm": 0.6121519804000854, "learning_rate": 0.00012041531413933728, "loss": 1.028, "step": 4692 }, { "epoch": 0.872142724400669, "grad_norm": 0.5752167701721191, "learning_rate": 0.00012038668333315835, "loss": 1.0593, "step": 4693 }, { "epoch": 0.8723285634640402, "grad_norm": 0.5538478493690491, "learning_rate": 0.00012035805078326235, "loss": 1.1715, "step": 4694 }, { "epoch": 0.8725144025274113, "grad_norm": 0.5828766822814941, "learning_rate": 0.00012032941649209824, "loss": 1.1091, "step": 4695 }, { "epoch": 0.8727002415907824, "grad_norm": 0.7071311473846436, "learning_rate": 0.00012030078046211523, "loss": 0.8833, "step": 4696 }, { "epoch": 0.8728860806541535, "grad_norm": 0.5653672218322754, "learning_rate": 0.00012027214269576258, "loss": 0.9883, "step": 4697 }, { "epoch": 0.8730719197175246, "grad_norm": 0.6679391860961914, "learning_rate": 0.00012024350319548976, "loss": 1.2291, "step": 4698 }, { "epoch": 0.8732577587808957, "grad_norm": 0.6757986545562744, "learning_rate": 0.00012021486196374636, "loss": 1.0122, "step": 4699 }, { "epoch": 0.8734435978442668, "grad_norm": 0.5338380932807922, "learning_rate": 0.00012018621900298211, "loss": 0.9106, "step": 4700 }, { "epoch": 0.873629436907638, "grad_norm": 0.5537553429603577, "learning_rate": 0.00012015757431564691, "loss": 1.0227, "step": 4701 }, { "epoch": 0.873815275971009, "grad_norm": 0.5925634503364563, "learning_rate": 0.00012012892790419083, "loss": 0.861, "step": 4702 }, { "epoch": 0.8740011150343803, "grad_norm": 0.7933574914932251, "learning_rate": 0.00012010027977106404, "loss": 1.3481, "step": 4703 }, { "epoch": 0.8741869540977514, "grad_norm": 0.6480244398117065, "learning_rate": 0.0001200716299187169, "loss": 1.0297, "step": 4704 }, { "epoch": 0.8743727931611225, "grad_norm": 0.5648460388183594, "learning_rate": 0.00012004297834959986, "loss": 1.3159, "step": 4705 }, { "epoch": 0.8745586322244936, "grad_norm": 0.6062881350517273, "learning_rate": 0.00012001432506616354, "loss": 0.954, "step": 4706 }, { "epoch": 0.8747444712878647, "grad_norm": 0.5937121510505676, "learning_rate": 0.00011998567007085874, "loss": 1.0219, "step": 4707 }, { "epoch": 0.8749303103512358, "grad_norm": 0.6274353265762329, "learning_rate": 0.00011995701336613643, "loss": 1.0806, "step": 4708 }, { "epoch": 0.8751161494146069, "grad_norm": 0.6611014008522034, "learning_rate": 0.00011992835495444758, "loss": 1.177, "step": 4709 }, { "epoch": 0.875301988477978, "grad_norm": 0.7381565570831299, "learning_rate": 0.00011989969483824349, "loss": 1.0864, "step": 4710 }, { "epoch": 0.8754878275413492, "grad_norm": 0.7044957876205444, "learning_rate": 0.00011987103301997548, "loss": 1.16, "step": 4711 }, { "epoch": 0.8756736666047203, "grad_norm": 0.617867112159729, "learning_rate": 0.00011984236950209508, "loss": 1.2796, "step": 4712 }, { "epoch": 0.8758595056680915, "grad_norm": 0.7515525817871094, "learning_rate": 0.00011981370428705394, "loss": 1.1917, "step": 4713 }, { "epoch": 0.8760453447314626, "grad_norm": 0.5283318161964417, "learning_rate": 0.00011978503737730384, "loss": 0.9867, "step": 4714 }, { "epoch": 0.8762311837948337, "grad_norm": 0.6197617650032043, "learning_rate": 0.00011975636877529673, "loss": 1.0821, "step": 4715 }, { "epoch": 0.8764170228582048, "grad_norm": 0.547167956829071, "learning_rate": 0.00011972769848348466, "loss": 1.1527, "step": 4716 }, { "epoch": 0.8766028619215759, "grad_norm": 0.614820122718811, "learning_rate": 0.00011969902650431995, "loss": 1.0837, "step": 4717 }, { "epoch": 0.876788700984947, "grad_norm": 0.5309334993362427, "learning_rate": 0.00011967035284025492, "loss": 0.8339, "step": 4718 }, { "epoch": 0.8769745400483182, "grad_norm": 0.6812418103218079, "learning_rate": 0.00011964167749374211, "loss": 1.1013, "step": 4719 }, { "epoch": 0.8771603791116893, "grad_norm": 0.5131886005401611, "learning_rate": 0.00011961300046723416, "loss": 0.961, "step": 4720 }, { "epoch": 0.8773462181750604, "grad_norm": 0.5880168676376343, "learning_rate": 0.0001195843217631839, "loss": 1.1434, "step": 4721 }, { "epoch": 0.8775320572384315, "grad_norm": 0.6584733724594116, "learning_rate": 0.00011955564138404425, "loss": 0.9723, "step": 4722 }, { "epoch": 0.8777178963018026, "grad_norm": 0.592173159122467, "learning_rate": 0.00011952695933226832, "loss": 1.375, "step": 4723 }, { "epoch": 0.8779037353651737, "grad_norm": 0.6685372591018677, "learning_rate": 0.00011949827561030937, "loss": 0.9787, "step": 4724 }, { "epoch": 0.8780895744285449, "grad_norm": 0.6100190281867981, "learning_rate": 0.00011946959022062078, "loss": 1.146, "step": 4725 }, { "epoch": 0.878275413491916, "grad_norm": 0.5250088572502136, "learning_rate": 0.00011944090316565605, "loss": 0.9508, "step": 4726 }, { "epoch": 0.8784612525552872, "grad_norm": 0.4799655079841614, "learning_rate": 0.00011941221444786887, "loss": 0.8568, "step": 4727 }, { "epoch": 0.8786470916186583, "grad_norm": 0.6235849857330322, "learning_rate": 0.00011938352406971303, "loss": 1.1144, "step": 4728 }, { "epoch": 0.8788329306820294, "grad_norm": 0.6182804107666016, "learning_rate": 0.00011935483203364243, "loss": 1.1426, "step": 4729 }, { "epoch": 0.8790187697454005, "grad_norm": 0.601972758769989, "learning_rate": 0.00011932613834211123, "loss": 1.0688, "step": 4730 }, { "epoch": 0.8792046088087716, "grad_norm": 0.6925641894340515, "learning_rate": 0.00011929744299757369, "loss": 0.9577, "step": 4731 }, { "epoch": 0.8793904478721427, "grad_norm": 0.6421496272087097, "learning_rate": 0.00011926874600248415, "loss": 1.1401, "step": 4732 }, { "epoch": 0.8795762869355138, "grad_norm": 0.5793421268463135, "learning_rate": 0.00011924004735929708, "loss": 1.1798, "step": 4733 }, { "epoch": 0.8797621259988849, "grad_norm": 0.5658807158470154, "learning_rate": 0.0001192113470704672, "loss": 0.894, "step": 4734 }, { "epoch": 0.879947965062256, "grad_norm": 0.6436983942985535, "learning_rate": 0.00011918264513844928, "loss": 1.3174, "step": 4735 }, { "epoch": 0.8801338041256273, "grad_norm": 0.6653681993484497, "learning_rate": 0.00011915394156569824, "loss": 1.2076, "step": 4736 }, { "epoch": 0.8803196431889984, "grad_norm": 0.6940290927886963, "learning_rate": 0.00011912523635466922, "loss": 1.2932, "step": 4737 }, { "epoch": 0.8805054822523695, "grad_norm": 0.5589834451675415, "learning_rate": 0.00011909652950781735, "loss": 1.0923, "step": 4738 }, { "epoch": 0.8806913213157406, "grad_norm": 0.7990158200263977, "learning_rate": 0.00011906782102759808, "loss": 0.9076, "step": 4739 }, { "epoch": 0.8808771603791117, "grad_norm": 0.6897647380828857, "learning_rate": 0.00011903911091646684, "loss": 1.1487, "step": 4740 }, { "epoch": 0.8810629994424828, "grad_norm": 0.6083943247795105, "learning_rate": 0.00011901039917687932, "loss": 1.106, "step": 4741 }, { "epoch": 0.8812488385058539, "grad_norm": 0.7685713171958923, "learning_rate": 0.00011898168581129125, "loss": 1.1485, "step": 4742 }, { "epoch": 0.881434677569225, "grad_norm": 0.6682976484298706, "learning_rate": 0.0001189529708221586, "loss": 1.1954, "step": 4743 }, { "epoch": 0.8816205166325962, "grad_norm": 0.6300533413887024, "learning_rate": 0.00011892425421193734, "loss": 0.9926, "step": 4744 }, { "epoch": 0.8818063556959673, "grad_norm": 0.6247344613075256, "learning_rate": 0.00011889553598308374, "loss": 1.1316, "step": 4745 }, { "epoch": 0.8819921947593384, "grad_norm": 0.6410258412361145, "learning_rate": 0.00011886681613805411, "loss": 1.2052, "step": 4746 }, { "epoch": 0.8821780338227095, "grad_norm": 0.7149096727371216, "learning_rate": 0.00011883809467930494, "loss": 0.9866, "step": 4747 }, { "epoch": 0.8823638728860806, "grad_norm": 0.5473355650901794, "learning_rate": 0.0001188093716092928, "loss": 0.9347, "step": 4748 }, { "epoch": 0.8825497119494518, "grad_norm": 0.7847073078155518, "learning_rate": 0.00011878064693047447, "loss": 1.3707, "step": 4749 }, { "epoch": 0.8827355510128229, "grad_norm": 0.6489022374153137, "learning_rate": 0.0001187519206453068, "loss": 1.1489, "step": 4750 }, { "epoch": 0.882921390076194, "grad_norm": 0.6962729096412659, "learning_rate": 0.00011872319275624682, "loss": 1.0444, "step": 4751 }, { "epoch": 0.8831072291395652, "grad_norm": 0.5032536387443542, "learning_rate": 0.00011869446326575169, "loss": 0.9388, "step": 4752 }, { "epoch": 0.8832930682029363, "grad_norm": 0.5279998779296875, "learning_rate": 0.00011866573217627872, "loss": 1.1328, "step": 4753 }, { "epoch": 0.8834789072663074, "grad_norm": 0.6550498008728027, "learning_rate": 0.00011863699949028534, "loss": 1.0532, "step": 4754 }, { "epoch": 0.8836647463296785, "grad_norm": 0.5503681898117065, "learning_rate": 0.00011860826521022914, "loss": 1.1704, "step": 4755 }, { "epoch": 0.8838505853930496, "grad_norm": 0.7000329494476318, "learning_rate": 0.0001185795293385678, "loss": 0.8628, "step": 4756 }, { "epoch": 0.8840364244564207, "grad_norm": 0.597890317440033, "learning_rate": 0.00011855079187775912, "loss": 1.1687, "step": 4757 }, { "epoch": 0.8842222635197918, "grad_norm": 0.6725625395774841, "learning_rate": 0.00011852205283026115, "loss": 1.0957, "step": 4758 }, { "epoch": 0.8844081025831629, "grad_norm": 0.5943846106529236, "learning_rate": 0.00011849331219853195, "loss": 1.0617, "step": 4759 }, { "epoch": 0.8845939416465342, "grad_norm": 0.6239001750946045, "learning_rate": 0.00011846456998502977, "loss": 1.1119, "step": 4760 }, { "epoch": 0.8847797807099053, "grad_norm": 2.7174880504608154, "learning_rate": 0.00011843582619221305, "loss": 2.1165, "step": 4761 }, { "epoch": 0.8849656197732764, "grad_norm": 0.5697281360626221, "learning_rate": 0.00011840708082254026, "loss": 1.1697, "step": 4762 }, { "epoch": 0.8851514588366475, "grad_norm": 0.6207005977630615, "learning_rate": 0.00011837833387847006, "loss": 1.0138, "step": 4763 }, { "epoch": 0.8853372979000186, "grad_norm": 0.83244788646698, "learning_rate": 0.00011834958536246126, "loss": 1.237, "step": 4764 }, { "epoch": 0.8855231369633897, "grad_norm": 0.6972323060035706, "learning_rate": 0.00011832083527697273, "loss": 1.122, "step": 4765 }, { "epoch": 0.8857089760267608, "grad_norm": 0.5799198150634766, "learning_rate": 0.00011829208362446358, "loss": 0.9897, "step": 4766 }, { "epoch": 0.8858948150901319, "grad_norm": 0.6393404603004456, "learning_rate": 0.00011826333040739296, "loss": 0.9211, "step": 4767 }, { "epoch": 0.886080654153503, "grad_norm": 0.5956047773361206, "learning_rate": 0.00011823457562822027, "loss": 1.0445, "step": 4768 }, { "epoch": 0.8862664932168742, "grad_norm": 0.6870486736297607, "learning_rate": 0.00011820581928940488, "loss": 1.0243, "step": 4769 }, { "epoch": 0.8864523322802453, "grad_norm": 0.7557746171951294, "learning_rate": 0.00011817706139340643, "loss": 1.0572, "step": 4770 }, { "epoch": 0.8866381713436164, "grad_norm": 0.652820348739624, "learning_rate": 0.00011814830194268465, "loss": 1.0466, "step": 4771 }, { "epoch": 0.8868240104069876, "grad_norm": 0.6921366453170776, "learning_rate": 0.00011811954093969935, "loss": 1.1726, "step": 4772 }, { "epoch": 0.8870098494703587, "grad_norm": 0.6798288226127625, "learning_rate": 0.00011809077838691057, "loss": 1.1054, "step": 4773 }, { "epoch": 0.8871956885337298, "grad_norm": 0.5644552111625671, "learning_rate": 0.0001180620142867784, "loss": 1.0747, "step": 4774 }, { "epoch": 0.8873815275971009, "grad_norm": 0.7290164828300476, "learning_rate": 0.0001180332486417631, "loss": 1.4222, "step": 4775 }, { "epoch": 0.887567366660472, "grad_norm": 0.5924285650253296, "learning_rate": 0.0001180044814543251, "loss": 1.1481, "step": 4776 }, { "epoch": 0.8877532057238432, "grad_norm": 0.5664063096046448, "learning_rate": 0.00011797571272692489, "loss": 1.0801, "step": 4777 }, { "epoch": 0.8879390447872143, "grad_norm": 0.620488166809082, "learning_rate": 0.00011794694246202311, "loss": 0.9766, "step": 4778 }, { "epoch": 0.8881248838505854, "grad_norm": 0.6897662281990051, "learning_rate": 0.00011791817066208053, "loss": 1.0818, "step": 4779 }, { "epoch": 0.8883107229139565, "grad_norm": 0.6509714126586914, "learning_rate": 0.0001178893973295581, "loss": 1.0346, "step": 4780 }, { "epoch": 0.8884965619773276, "grad_norm": 0.615665853023529, "learning_rate": 0.00011786062246691685, "loss": 1.0627, "step": 4781 }, { "epoch": 0.8886824010406987, "grad_norm": 0.5649377107620239, "learning_rate": 0.00011783184607661799, "loss": 0.8559, "step": 4782 }, { "epoch": 0.8888682401040698, "grad_norm": 0.6220218539237976, "learning_rate": 0.00011780306816112277, "loss": 1.0661, "step": 4783 }, { "epoch": 0.889054079167441, "grad_norm": 0.641806960105896, "learning_rate": 0.00011777428872289267, "loss": 1.0956, "step": 4784 }, { "epoch": 0.8892399182308122, "grad_norm": 0.6103681921958923, "learning_rate": 0.00011774550776438922, "loss": 1.1282, "step": 4785 }, { "epoch": 0.8894257572941833, "grad_norm": 0.6140033602714539, "learning_rate": 0.00011771672528807414, "loss": 1.0339, "step": 4786 }, { "epoch": 0.8896115963575544, "grad_norm": 0.6332442164421082, "learning_rate": 0.00011768794129640925, "loss": 1.1501, "step": 4787 }, { "epoch": 0.8897974354209255, "grad_norm": 0.6983381509780884, "learning_rate": 0.00011765915579185654, "loss": 1.1905, "step": 4788 }, { "epoch": 0.8899832744842966, "grad_norm": 0.7200410962104797, "learning_rate": 0.00011763036877687804, "loss": 1.4506, "step": 4789 }, { "epoch": 0.8901691135476677, "grad_norm": 0.6325451731681824, "learning_rate": 0.00011760158025393599, "loss": 1.0697, "step": 4790 }, { "epoch": 0.8903549526110388, "grad_norm": 0.5252197980880737, "learning_rate": 0.00011757279022549277, "loss": 0.9902, "step": 4791 }, { "epoch": 0.8905407916744099, "grad_norm": 0.5685787796974182, "learning_rate": 0.0001175439986940108, "loss": 1.1247, "step": 4792 }, { "epoch": 0.8907266307377811, "grad_norm": 0.5685696005821228, "learning_rate": 0.0001175152056619527, "loss": 1.0047, "step": 4793 }, { "epoch": 0.8909124698011522, "grad_norm": 0.5405831933021545, "learning_rate": 0.0001174864111317812, "loss": 1.0133, "step": 4794 }, { "epoch": 0.8910983088645233, "grad_norm": 0.6638238430023193, "learning_rate": 0.0001174576151059592, "loss": 1.0997, "step": 4795 }, { "epoch": 0.8912841479278945, "grad_norm": 0.6298685669898987, "learning_rate": 0.00011742881758694961, "loss": 1.1055, "step": 4796 }, { "epoch": 0.8914699869912656, "grad_norm": 0.6404034495353699, "learning_rate": 0.00011740001857721559, "loss": 1.057, "step": 4797 }, { "epoch": 0.8916558260546367, "grad_norm": 0.547170102596283, "learning_rate": 0.0001173712180792204, "loss": 1.072, "step": 4798 }, { "epoch": 0.8918416651180078, "grad_norm": 0.5974907279014587, "learning_rate": 0.00011734241609542738, "loss": 1.046, "step": 4799 }, { "epoch": 0.8920275041813789, "grad_norm": 0.6565002202987671, "learning_rate": 0.00011731361262829998, "loss": 1.1398, "step": 4800 }, { "epoch": 0.8922133432447501, "grad_norm": 0.6506026387214661, "learning_rate": 0.00011728480768030192, "loss": 1.0015, "step": 4801 }, { "epoch": 0.8923991823081212, "grad_norm": 0.6083167195320129, "learning_rate": 0.0001172560012538969, "loss": 0.9871, "step": 4802 }, { "epoch": 0.8925850213714923, "grad_norm": 0.5694814920425415, "learning_rate": 0.00011722719335154877, "loss": 1.1955, "step": 4803 }, { "epoch": 0.8927708604348634, "grad_norm": 0.5358350872993469, "learning_rate": 0.00011719838397572158, "loss": 1.0018, "step": 4804 }, { "epoch": 0.8929566994982345, "grad_norm": 0.6407806873321533, "learning_rate": 0.00011716957312887944, "loss": 1.1107, "step": 4805 }, { "epoch": 0.8931425385616056, "grad_norm": 0.6780750155448914, "learning_rate": 0.00011714076081348661, "loss": 1.382, "step": 4806 }, { "epoch": 0.8933283776249767, "grad_norm": 0.5691828727722168, "learning_rate": 0.00011711194703200743, "loss": 1.012, "step": 4807 }, { "epoch": 0.8935142166883479, "grad_norm": 0.6187720894813538, "learning_rate": 0.00011708313178690646, "loss": 1.2116, "step": 4808 }, { "epoch": 0.893700055751719, "grad_norm": 0.5498912930488586, "learning_rate": 0.00011705431508064831, "loss": 0.9507, "step": 4809 }, { "epoch": 0.8938858948150902, "grad_norm": 0.5937941670417786, "learning_rate": 0.00011702549691569773, "loss": 1.0066, "step": 4810 }, { "epoch": 0.8940717338784613, "grad_norm": 3.8109848499298096, "learning_rate": 0.00011699667729451957, "loss": 2.2158, "step": 4811 }, { "epoch": 0.8942575729418324, "grad_norm": 0.7385560870170593, "learning_rate": 0.00011696785621957891, "loss": 1.125, "step": 4812 }, { "epoch": 0.8944434120052035, "grad_norm": 0.5790786743164062, "learning_rate": 0.00011693903369334083, "loss": 0.8837, "step": 4813 }, { "epoch": 0.8946292510685746, "grad_norm": 0.6423563957214355, "learning_rate": 0.00011691020971827056, "loss": 1.0456, "step": 4814 }, { "epoch": 0.8948150901319457, "grad_norm": 0.6066467761993408, "learning_rate": 0.00011688138429683353, "loss": 1.0813, "step": 4815 }, { "epoch": 0.8950009291953168, "grad_norm": 0.7395979166030884, "learning_rate": 0.0001168525574314952, "loss": 1.1017, "step": 4816 }, { "epoch": 0.8951867682586879, "grad_norm": 0.7192192077636719, "learning_rate": 0.00011682372912472121, "loss": 0.8663, "step": 4817 }, { "epoch": 0.8953726073220591, "grad_norm": 0.6796870827674866, "learning_rate": 0.00011679489937897732, "loss": 1.1221, "step": 4818 }, { "epoch": 0.8955584463854303, "grad_norm": 0.6365312337875366, "learning_rate": 0.00011676606819672938, "loss": 1.2095, "step": 4819 }, { "epoch": 0.8957442854488014, "grad_norm": 0.6555750370025635, "learning_rate": 0.00011673723558044341, "loss": 0.9862, "step": 4820 }, { "epoch": 0.8959301245121725, "grad_norm": 0.6303730010986328, "learning_rate": 0.00011670840153258547, "loss": 1.2718, "step": 4821 }, { "epoch": 0.8961159635755436, "grad_norm": 0.6512117385864258, "learning_rate": 0.00011667956605562185, "loss": 1.0232, "step": 4822 }, { "epoch": 0.8963018026389147, "grad_norm": 0.6685510277748108, "learning_rate": 0.00011665072915201892, "loss": 0.8669, "step": 4823 }, { "epoch": 0.8964876417022858, "grad_norm": 0.5733498930931091, "learning_rate": 0.00011662189082424311, "loss": 0.9483, "step": 4824 }, { "epoch": 0.8966734807656569, "grad_norm": 0.6234797835350037, "learning_rate": 0.00011659305107476104, "loss": 1.0886, "step": 4825 }, { "epoch": 0.8968593198290281, "grad_norm": 0.6397889852523804, "learning_rate": 0.00011656420990603947, "loss": 1.1993, "step": 4826 }, { "epoch": 0.8970451588923992, "grad_norm": 0.5726931691169739, "learning_rate": 0.00011653536732054525, "loss": 1.1539, "step": 4827 }, { "epoch": 0.8972309979557703, "grad_norm": 0.5924124121665955, "learning_rate": 0.00011650652332074527, "loss": 1.0823, "step": 4828 }, { "epoch": 0.8974168370191414, "grad_norm": 0.5201457738876343, "learning_rate": 0.00011647767790910673, "loss": 0.925, "step": 4829 }, { "epoch": 0.8976026760825125, "grad_norm": 0.6730798482894897, "learning_rate": 0.00011644883108809676, "loss": 1.306, "step": 4830 }, { "epoch": 0.8977885151458836, "grad_norm": 0.6653159856796265, "learning_rate": 0.00011641998286018272, "loss": 1.1408, "step": 4831 }, { "epoch": 0.8979743542092548, "grad_norm": 0.5166033506393433, "learning_rate": 0.00011639113322783206, "loss": 0.7456, "step": 4832 }, { "epoch": 0.8981601932726259, "grad_norm": 0.5565785765647888, "learning_rate": 0.00011636228219351235, "loss": 1.1999, "step": 4833 }, { "epoch": 0.8983460323359971, "grad_norm": 0.5805851817131042, "learning_rate": 0.00011633342975969128, "loss": 1.134, "step": 4834 }, { "epoch": 0.8985318713993682, "grad_norm": 0.5019768476486206, "learning_rate": 0.00011630457592883663, "loss": 0.9231, "step": 4835 }, { "epoch": 0.8987177104627393, "grad_norm": 0.7813116312026978, "learning_rate": 0.00011627572070341641, "loss": 1.2229, "step": 4836 }, { "epoch": 0.8989035495261104, "grad_norm": 0.6496708989143372, "learning_rate": 0.0001162468640858986, "loss": 0.9698, "step": 4837 }, { "epoch": 0.8990893885894815, "grad_norm": 0.5925761461257935, "learning_rate": 0.0001162180060787514, "loss": 1.0714, "step": 4838 }, { "epoch": 0.8992752276528526, "grad_norm": 0.7705389261245728, "learning_rate": 0.0001161891466844431, "loss": 1.2492, "step": 4839 }, { "epoch": 0.8994610667162237, "grad_norm": 2.839663028717041, "learning_rate": 0.00011616028590544206, "loss": 1.8876, "step": 4840 }, { "epoch": 0.8996469057795948, "grad_norm": 0.5334593057632446, "learning_rate": 0.00011613142374421686, "loss": 1.017, "step": 4841 }, { "epoch": 0.8998327448429659, "grad_norm": 0.6567533612251282, "learning_rate": 0.00011610256020323609, "loss": 1.0719, "step": 4842 }, { "epoch": 0.9000185839063372, "grad_norm": 0.6591853499412537, "learning_rate": 0.00011607369528496858, "loss": 1.0512, "step": 4843 }, { "epoch": 0.9002044229697083, "grad_norm": 0.6436012387275696, "learning_rate": 0.00011604482899188318, "loss": 1.2593, "step": 4844 }, { "epoch": 0.9003902620330794, "grad_norm": 0.5583667159080505, "learning_rate": 0.00011601596132644887, "loss": 1.197, "step": 4845 }, { "epoch": 0.9005761010964505, "grad_norm": 0.7685452103614807, "learning_rate": 0.00011598709229113476, "loss": 1.0777, "step": 4846 }, { "epoch": 0.9007619401598216, "grad_norm": 0.560444176197052, "learning_rate": 0.0001159582218884101, "loss": 0.8902, "step": 4847 }, { "epoch": 0.9009477792231927, "grad_norm": 0.5692372918128967, "learning_rate": 0.00011592935012074421, "loss": 1.107, "step": 4848 }, { "epoch": 0.9011336182865638, "grad_norm": 0.5790915489196777, "learning_rate": 0.0001159004769906066, "loss": 1.0338, "step": 4849 }, { "epoch": 0.9013194573499349, "grad_norm": 0.6954670548439026, "learning_rate": 0.00011587160250046682, "loss": 1.3241, "step": 4850 }, { "epoch": 0.9015052964133061, "grad_norm": 0.7290377020835876, "learning_rate": 0.00011584272665279461, "loss": 1.012, "step": 4851 }, { "epoch": 0.9016911354766772, "grad_norm": 0.5656014084815979, "learning_rate": 0.00011581384945005971, "loss": 0.9303, "step": 4852 }, { "epoch": 0.9018769745400483, "grad_norm": 0.7426737546920776, "learning_rate": 0.0001157849708947321, "loss": 1.1145, "step": 4853 }, { "epoch": 0.9020628136034194, "grad_norm": 0.6836064457893372, "learning_rate": 0.00011575609098928179, "loss": 1.3366, "step": 4854 }, { "epoch": 0.9022486526667906, "grad_norm": 0.5508992671966553, "learning_rate": 0.00011572720973617901, "loss": 0.9922, "step": 4855 }, { "epoch": 0.9024344917301617, "grad_norm": 0.6973763108253479, "learning_rate": 0.00011569832713789395, "loss": 1.1187, "step": 4856 }, { "epoch": 0.9026203307935328, "grad_norm": 0.607064962387085, "learning_rate": 0.00011566944319689705, "loss": 1.0759, "step": 4857 }, { "epoch": 0.9028061698569039, "grad_norm": 0.6120737791061401, "learning_rate": 0.00011564055791565882, "loss": 1.0567, "step": 4858 }, { "epoch": 0.9029920089202751, "grad_norm": 0.5953623056411743, "learning_rate": 0.00011561167129664987, "loss": 1.0374, "step": 4859 }, { "epoch": 0.9031778479836462, "grad_norm": 0.6712900400161743, "learning_rate": 0.00011558278334234094, "loss": 1.3432, "step": 4860 }, { "epoch": 0.9033636870470173, "grad_norm": 2.0790538787841797, "learning_rate": 0.00011555389405520285, "loss": 1.9842, "step": 4861 }, { "epoch": 0.9035495261103884, "grad_norm": 0.6319670081138611, "learning_rate": 0.00011552500343770658, "loss": 1.022, "step": 4862 }, { "epoch": 0.9037353651737595, "grad_norm": 0.6280139088630676, "learning_rate": 0.0001154961114923232, "loss": 0.8457, "step": 4863 }, { "epoch": 0.9039212042371306, "grad_norm": 0.5777485370635986, "learning_rate": 0.00011546721822152395, "loss": 0.8473, "step": 4864 }, { "epoch": 0.9041070433005017, "grad_norm": 0.7556338310241699, "learning_rate": 0.0001154383236277801, "loss": 0.9622, "step": 4865 }, { "epoch": 0.9042928823638728, "grad_norm": 0.5529469847679138, "learning_rate": 0.00011540942771356305, "loss": 1.38, "step": 4866 }, { "epoch": 0.9044787214272441, "grad_norm": 0.55649334192276, "learning_rate": 0.00011538053048134434, "loss": 1.2111, "step": 4867 }, { "epoch": 0.9046645604906152, "grad_norm": 0.4702974259853363, "learning_rate": 0.00011535163193359562, "loss": 0.8591, "step": 4868 }, { "epoch": 0.9048503995539863, "grad_norm": 0.673514187335968, "learning_rate": 0.00011532273207278862, "loss": 1.1053, "step": 4869 }, { "epoch": 0.9050362386173574, "grad_norm": 0.47570809721946716, "learning_rate": 0.00011529383090139522, "loss": 0.7493, "step": 4870 }, { "epoch": 0.9052220776807285, "grad_norm": 0.5453622937202454, "learning_rate": 0.00011526492842188745, "loss": 0.9597, "step": 4871 }, { "epoch": 0.9054079167440996, "grad_norm": 0.6614927649497986, "learning_rate": 0.00011523602463673734, "loss": 0.9449, "step": 4872 }, { "epoch": 0.9055937558074707, "grad_norm": 0.5573998093605042, "learning_rate": 0.00011520711954841714, "loss": 0.9135, "step": 4873 }, { "epoch": 0.9057795948708418, "grad_norm": 0.6102240681648254, "learning_rate": 0.00011517821315939912, "loss": 1.0506, "step": 4874 }, { "epoch": 0.905965433934213, "grad_norm": 0.7087007164955139, "learning_rate": 0.00011514930547215573, "loss": 0.9975, "step": 4875 }, { "epoch": 0.9061512729975841, "grad_norm": 0.6605527400970459, "learning_rate": 0.00011512039648915947, "loss": 1.0797, "step": 4876 }, { "epoch": 0.9063371120609552, "grad_norm": 0.5774627327919006, "learning_rate": 0.00011509148621288304, "loss": 1.0385, "step": 4877 }, { "epoch": 0.9065229511243263, "grad_norm": 0.6432042717933655, "learning_rate": 0.00011506257464579917, "loss": 1.0954, "step": 4878 }, { "epoch": 0.9067087901876975, "grad_norm": 0.7714658975601196, "learning_rate": 0.00011503366179038075, "loss": 1.0423, "step": 4879 }, { "epoch": 0.9068946292510686, "grad_norm": 0.5681400299072266, "learning_rate": 0.00011500474764910074, "loss": 0.9482, "step": 4880 }, { "epoch": 0.9070804683144397, "grad_norm": 0.6567079424858093, "learning_rate": 0.00011497583222443225, "loss": 0.9561, "step": 4881 }, { "epoch": 0.9072663073778108, "grad_norm": 0.6021323204040527, "learning_rate": 0.00011494691551884844, "loss": 0.9268, "step": 4882 }, { "epoch": 0.9074521464411819, "grad_norm": 0.6198539733886719, "learning_rate": 0.00011491799753482265, "loss": 1.1215, "step": 4883 }, { "epoch": 0.9076379855045531, "grad_norm": 0.5479195713996887, "learning_rate": 0.00011488907827482829, "loss": 0.5888, "step": 4884 }, { "epoch": 0.9078238245679242, "grad_norm": 0.6176124811172485, "learning_rate": 0.00011486015774133885, "loss": 1.1016, "step": 4885 }, { "epoch": 0.9080096636312953, "grad_norm": 0.49869436025619507, "learning_rate": 0.00011483123593682806, "loss": 1.005, "step": 4886 }, { "epoch": 0.9081955026946664, "grad_norm": 0.6559669971466064, "learning_rate": 0.00011480231286376956, "loss": 1.0295, "step": 4887 }, { "epoch": 0.9083813417580375, "grad_norm": 0.8178464770317078, "learning_rate": 0.00011477338852463729, "loss": 1.2055, "step": 4888 }, { "epoch": 0.9085671808214086, "grad_norm": 0.6112135052680969, "learning_rate": 0.00011474446292190515, "loss": 1.1929, "step": 4889 }, { "epoch": 0.9087530198847797, "grad_norm": 0.6317768096923828, "learning_rate": 0.00011471553605804721, "loss": 1.1349, "step": 4890 }, { "epoch": 0.9089388589481509, "grad_norm": 12.977190017700195, "learning_rate": 0.00011468660793553769, "loss": 2.2248, "step": 4891 }, { "epoch": 0.9091246980115221, "grad_norm": 0.614732563495636, "learning_rate": 0.00011465767855685081, "loss": 1.1119, "step": 4892 }, { "epoch": 0.9093105370748932, "grad_norm": 0.5960726737976074, "learning_rate": 0.00011462874792446105, "loss": 0.9675, "step": 4893 }, { "epoch": 0.9094963761382643, "grad_norm": 0.5411145687103271, "learning_rate": 0.00011459981604084287, "loss": 0.8035, "step": 4894 }, { "epoch": 0.9096822152016354, "grad_norm": 0.7136313319206238, "learning_rate": 0.00011457088290847084, "loss": 0.9104, "step": 4895 }, { "epoch": 0.9098680542650065, "grad_norm": 0.56341552734375, "learning_rate": 0.00011454194852981972, "loss": 0.9796, "step": 4896 }, { "epoch": 0.9100538933283776, "grad_norm": 0.863733172416687, "learning_rate": 0.0001145130129073643, "loss": 1.2648, "step": 4897 }, { "epoch": 0.9102397323917487, "grad_norm": 0.4566417634487152, "learning_rate": 0.00011448407604357954, "loss": 0.9554, "step": 4898 }, { "epoch": 0.9104255714551198, "grad_norm": 0.5524247288703918, "learning_rate": 0.00011445513794094042, "loss": 1.0031, "step": 4899 }, { "epoch": 0.910611410518491, "grad_norm": 0.5676782727241516, "learning_rate": 0.00011442619860192212, "loss": 1.0623, "step": 4900 }, { "epoch": 0.9107972495818621, "grad_norm": 0.4784383475780487, "learning_rate": 0.0001143972580289999, "loss": 0.6645, "step": 4901 }, { "epoch": 0.9109830886452333, "grad_norm": 0.5696948766708374, "learning_rate": 0.00011436831622464907, "loss": 0.8933, "step": 4902 }, { "epoch": 0.9111689277086044, "grad_norm": 0.6131220459938049, "learning_rate": 0.00011433937319134511, "loss": 1.0181, "step": 4903 }, { "epoch": 0.9113547667719755, "grad_norm": 0.5868662595748901, "learning_rate": 0.00011431042893156353, "loss": 0.9421, "step": 4904 }, { "epoch": 0.9115406058353466, "grad_norm": 0.6456340551376343, "learning_rate": 0.00011428148344778007, "loss": 0.9885, "step": 4905 }, { "epoch": 0.9117264448987177, "grad_norm": 0.5382920503616333, "learning_rate": 0.00011425253674247045, "loss": 0.85, "step": 4906 }, { "epoch": 0.9119122839620888, "grad_norm": 0.7104405760765076, "learning_rate": 0.00011422358881811056, "loss": 1.1623, "step": 4907 }, { "epoch": 0.91209812302546, "grad_norm": 0.6414424777030945, "learning_rate": 0.00011419463967717638, "loss": 1.2485, "step": 4908 }, { "epoch": 0.9122839620888311, "grad_norm": 0.6622775197029114, "learning_rate": 0.000114165689322144, "loss": 0.7905, "step": 4909 }, { "epoch": 0.9124698011522022, "grad_norm": 0.5562495589256287, "learning_rate": 0.00011413673775548959, "loss": 0.9664, "step": 4910 }, { "epoch": 0.9126556402155733, "grad_norm": 0.5962749123573303, "learning_rate": 0.00011410778497968944, "loss": 1.1049, "step": 4911 }, { "epoch": 0.9128414792789444, "grad_norm": 0.5652308464050293, "learning_rate": 0.00011407883099721994, "loss": 0.792, "step": 4912 }, { "epoch": 0.9130273183423155, "grad_norm": 0.5293900966644287, "learning_rate": 0.00011404987581055761, "loss": 1.0509, "step": 4913 }, { "epoch": 0.9132131574056866, "grad_norm": 0.6027974486351013, "learning_rate": 0.00011402091942217903, "loss": 0.9318, "step": 4914 }, { "epoch": 0.9133989964690578, "grad_norm": 0.6635562181472778, "learning_rate": 0.00011399196183456092, "loss": 0.9232, "step": 4915 }, { "epoch": 0.9135848355324289, "grad_norm": 0.6220401525497437, "learning_rate": 0.00011396300305018008, "loss": 1.1271, "step": 4916 }, { "epoch": 0.9137706745958001, "grad_norm": 0.6705507040023804, "learning_rate": 0.00011393404307151341, "loss": 1.15, "step": 4917 }, { "epoch": 0.9139565136591712, "grad_norm": 0.6669270992279053, "learning_rate": 0.00011390508190103787, "loss": 1.0627, "step": 4918 }, { "epoch": 0.9141423527225423, "grad_norm": 0.5870175957679749, "learning_rate": 0.00011387611954123068, "loss": 1.2826, "step": 4919 }, { "epoch": 0.9143281917859134, "grad_norm": 0.5145399570465088, "learning_rate": 0.00011384715599456898, "loss": 0.6593, "step": 4920 }, { "epoch": 0.9145140308492845, "grad_norm": 0.6042763590812683, "learning_rate": 0.00011381819126353008, "loss": 1.2208, "step": 4921 }, { "epoch": 0.9146998699126556, "grad_norm": 0.655913770198822, "learning_rate": 0.00011378922535059145, "loss": 1.1689, "step": 4922 }, { "epoch": 0.9148857089760267, "grad_norm": 0.5491055846214294, "learning_rate": 0.00011376025825823055, "loss": 0.8107, "step": 4923 }, { "epoch": 0.9150715480393978, "grad_norm": 0.6189053654670715, "learning_rate": 0.00011373128998892504, "loss": 1.1629, "step": 4924 }, { "epoch": 0.915257387102769, "grad_norm": 0.6688449382781982, "learning_rate": 0.0001137023205451526, "loss": 1.2761, "step": 4925 }, { "epoch": 0.9154432261661402, "grad_norm": 0.6299698352813721, "learning_rate": 0.00011367334992939107, "loss": 1.113, "step": 4926 }, { "epoch": 0.9156290652295113, "grad_norm": 0.60389643907547, "learning_rate": 0.00011364437814411839, "loss": 1.1595, "step": 4927 }, { "epoch": 0.9158149042928824, "grad_norm": 0.49639827013015747, "learning_rate": 0.00011361540519181253, "loss": 1.085, "step": 4928 }, { "epoch": 0.9160007433562535, "grad_norm": 0.5939173102378845, "learning_rate": 0.00011358643107495163, "loss": 1.3346, "step": 4929 }, { "epoch": 0.9161865824196246, "grad_norm": 0.5937512516975403, "learning_rate": 0.00011355745579601395, "loss": 0.9499, "step": 4930 }, { "epoch": 0.9163724214829957, "grad_norm": 0.6250744462013245, "learning_rate": 0.00011352847935747777, "loss": 1.0898, "step": 4931 }, { "epoch": 0.9165582605463668, "grad_norm": 0.5503117442131042, "learning_rate": 0.0001134995017618215, "loss": 1.0074, "step": 4932 }, { "epoch": 0.916744099609738, "grad_norm": 0.7058550119400024, "learning_rate": 0.00011347052301152368, "loss": 1.1851, "step": 4933 }, { "epoch": 0.9169299386731091, "grad_norm": 0.6272091865539551, "learning_rate": 0.0001134415431090629, "loss": 1.0675, "step": 4934 }, { "epoch": 0.9171157777364802, "grad_norm": 0.5527566075325012, "learning_rate": 0.0001134125620569179, "loss": 1.0778, "step": 4935 }, { "epoch": 0.9173016167998513, "grad_norm": 0.65137779712677, "learning_rate": 0.00011338357985756745, "loss": 0.9924, "step": 4936 }, { "epoch": 0.9174874558632224, "grad_norm": 0.5755317807197571, "learning_rate": 0.00011335459651349053, "loss": 1.0271, "step": 4937 }, { "epoch": 0.9176732949265936, "grad_norm": 0.5749362707138062, "learning_rate": 0.00011332561202716608, "loss": 1.1052, "step": 4938 }, { "epoch": 0.9178591339899647, "grad_norm": 0.6218981146812439, "learning_rate": 0.00011329662640107322, "loss": 0.9382, "step": 4939 }, { "epoch": 0.9180449730533358, "grad_norm": 0.5158394575119019, "learning_rate": 0.00011326763963769119, "loss": 1.2352, "step": 4940 }, { "epoch": 0.918230812116707, "grad_norm": 0.6450227499008179, "learning_rate": 0.00011323865173949926, "loss": 1.0512, "step": 4941 }, { "epoch": 0.9184166511800781, "grad_norm": 0.5990210771560669, "learning_rate": 0.00011320966270897684, "loss": 0.853, "step": 4942 }, { "epoch": 0.9186024902434492, "grad_norm": 0.5773190855979919, "learning_rate": 0.00011318067254860341, "loss": 1.1168, "step": 4943 }, { "epoch": 0.9187883293068203, "grad_norm": 0.6192017197608948, "learning_rate": 0.00011315168126085857, "loss": 0.899, "step": 4944 }, { "epoch": 0.9189741683701914, "grad_norm": 0.6629911661148071, "learning_rate": 0.00011312268884822201, "loss": 1.128, "step": 4945 }, { "epoch": 0.9191600074335625, "grad_norm": 0.7068147659301758, "learning_rate": 0.0001130936953131735, "loss": 1.4354, "step": 4946 }, { "epoch": 0.9193458464969336, "grad_norm": 0.6272721886634827, "learning_rate": 0.00011306470065819295, "loss": 1.0152, "step": 4947 }, { "epoch": 0.9195316855603047, "grad_norm": 0.5584651827812195, "learning_rate": 0.00011303570488576032, "loss": 0.8876, "step": 4948 }, { "epoch": 0.9197175246236758, "grad_norm": 0.6266582608222961, "learning_rate": 0.00011300670799835567, "loss": 0.9633, "step": 4949 }, { "epoch": 0.9199033636870471, "grad_norm": 0.5822092294692993, "learning_rate": 0.00011297770999845915, "loss": 1.1593, "step": 4950 }, { "epoch": 0.9200892027504182, "grad_norm": 0.5857718586921692, "learning_rate": 0.00011294871088855107, "loss": 0.9418, "step": 4951 }, { "epoch": 0.9202750418137893, "grad_norm": 0.5828437209129333, "learning_rate": 0.00011291971067111178, "loss": 0.8059, "step": 4952 }, { "epoch": 0.9204608808771604, "grad_norm": 0.5660130977630615, "learning_rate": 0.00011289070934862167, "loss": 1.1642, "step": 4953 }, { "epoch": 0.9206467199405315, "grad_norm": 0.5477927923202515, "learning_rate": 0.00011286170692356134, "loss": 0.9768, "step": 4954 }, { "epoch": 0.9208325590039026, "grad_norm": 0.5076239109039307, "learning_rate": 0.00011283270339841144, "loss": 0.7493, "step": 4955 }, { "epoch": 0.9210183980672737, "grad_norm": 0.5687392354011536, "learning_rate": 0.0001128036987756527, "loss": 0.9457, "step": 4956 }, { "epoch": 0.9212042371306448, "grad_norm": 0.5765700936317444, "learning_rate": 0.0001127746930577659, "loss": 1.0497, "step": 4957 }, { "epoch": 0.921390076194016, "grad_norm": 0.5494324564933777, "learning_rate": 0.00011274568624723197, "loss": 1.0566, "step": 4958 }, { "epoch": 0.9215759152573871, "grad_norm": 0.615958034992218, "learning_rate": 0.00011271667834653196, "loss": 1.216, "step": 4959 }, { "epoch": 0.9217617543207582, "grad_norm": 0.8911369442939758, "learning_rate": 0.00011268766935814698, "loss": 1.3917, "step": 4960 }, { "epoch": 0.9219475933841293, "grad_norm": 0.6073392033576965, "learning_rate": 0.00011265865928455824, "loss": 1.051, "step": 4961 }, { "epoch": 0.9221334324475005, "grad_norm": 0.6068910956382751, "learning_rate": 0.00011262964812824699, "loss": 0.9606, "step": 4962 }, { "epoch": 0.9223192715108716, "grad_norm": 0.5832070708274841, "learning_rate": 0.00011260063589169465, "loss": 1.1758, "step": 4963 }, { "epoch": 0.9225051105742427, "grad_norm": 0.5867427587509155, "learning_rate": 0.00011257162257738268, "loss": 1.1755, "step": 4964 }, { "epoch": 0.9226909496376138, "grad_norm": 0.7116524577140808, "learning_rate": 0.00011254260818779263, "loss": 1.0624, "step": 4965 }, { "epoch": 0.922876788700985, "grad_norm": 0.6223874688148499, "learning_rate": 0.00011251359272540624, "loss": 1.2023, "step": 4966 }, { "epoch": 0.9230626277643561, "grad_norm": 0.6258384585380554, "learning_rate": 0.00011248457619270518, "loss": 0.9243, "step": 4967 }, { "epoch": 0.9232484668277272, "grad_norm": 0.5584441423416138, "learning_rate": 0.00011245555859217136, "loss": 1.1783, "step": 4968 }, { "epoch": 0.9234343058910983, "grad_norm": 0.6257378458976746, "learning_rate": 0.00011242653992628672, "loss": 1.1093, "step": 4969 }, { "epoch": 0.9236201449544694, "grad_norm": 0.6018193364143372, "learning_rate": 0.00011239752019753323, "loss": 0.9174, "step": 4970 }, { "epoch": 0.9238059840178405, "grad_norm": 0.5290696620941162, "learning_rate": 0.00011236849940839305, "loss": 1.0616, "step": 4971 }, { "epoch": 0.9239918230812116, "grad_norm": 0.5852720141410828, "learning_rate": 0.00011233947756134835, "loss": 1.0018, "step": 4972 }, { "epoch": 0.9241776621445827, "grad_norm": 0.5290955901145935, "learning_rate": 0.00011231045465888151, "loss": 1.2372, "step": 4973 }, { "epoch": 0.924363501207954, "grad_norm": 0.674185574054718, "learning_rate": 0.00011228143070347486, "loss": 0.9144, "step": 4974 }, { "epoch": 0.9245493402713251, "grad_norm": 0.7288258671760559, "learning_rate": 0.00011225240569761089, "loss": 0.9673, "step": 4975 }, { "epoch": 0.9247351793346962, "grad_norm": 0.5527795553207397, "learning_rate": 0.0001122233796437722, "loss": 1.0264, "step": 4976 }, { "epoch": 0.9249210183980673, "grad_norm": 0.5867929458618164, "learning_rate": 0.00011219435254444141, "loss": 1.3047, "step": 4977 }, { "epoch": 0.9251068574614384, "grad_norm": 0.6463168859481812, "learning_rate": 0.00011216532440210133, "loss": 1.1629, "step": 4978 }, { "epoch": 0.9252926965248095, "grad_norm": 0.6795656085014343, "learning_rate": 0.00011213629521923472, "loss": 1.4026, "step": 4979 }, { "epoch": 0.9254785355881806, "grad_norm": 0.8002610802650452, "learning_rate": 0.00011210726499832456, "loss": 1.1641, "step": 4980 }, { "epoch": 0.9256643746515517, "grad_norm": 0.6516452431678772, "learning_rate": 0.00011207823374185384, "loss": 1.3039, "step": 4981 }, { "epoch": 0.9258502137149229, "grad_norm": 0.6306900382041931, "learning_rate": 0.00011204920145230571, "loss": 1.2405, "step": 4982 }, { "epoch": 0.926036052778294, "grad_norm": 0.560518741607666, "learning_rate": 0.00011202016813216334, "loss": 0.8887, "step": 4983 }, { "epoch": 0.9262218918416651, "grad_norm": 0.5832896828651428, "learning_rate": 0.00011199113378391002, "loss": 1.0638, "step": 4984 }, { "epoch": 0.9264077309050363, "grad_norm": 0.5987124443054199, "learning_rate": 0.00011196209841002909, "loss": 1.0041, "step": 4985 }, { "epoch": 0.9265935699684074, "grad_norm": 0.5337582230567932, "learning_rate": 0.00011193306201300404, "loss": 0.8817, "step": 4986 }, { "epoch": 0.9267794090317785, "grad_norm": 0.644491970539093, "learning_rate": 0.0001119040245953184, "loss": 1.319, "step": 4987 }, { "epoch": 0.9269652480951496, "grad_norm": 0.7071481347084045, "learning_rate": 0.00011187498615945578, "loss": 0.9309, "step": 4988 }, { "epoch": 0.9271510871585207, "grad_norm": 0.5608466267585754, "learning_rate": 0.00011184594670789996, "loss": 1.2428, "step": 4989 }, { "epoch": 0.9273369262218918, "grad_norm": 0.6157172322273254, "learning_rate": 0.00011181690624313472, "loss": 0.9522, "step": 4990 }, { "epoch": 0.927522765285263, "grad_norm": 0.5183684229850769, "learning_rate": 0.00011178786476764393, "loss": 0.8553, "step": 4991 }, { "epoch": 0.9277086043486341, "grad_norm": 0.6620053052902222, "learning_rate": 0.00011175882228391161, "loss": 1.3053, "step": 4992 }, { "epoch": 0.9278944434120052, "grad_norm": 0.6395905017852783, "learning_rate": 0.00011172977879442182, "loss": 1.1837, "step": 4993 }, { "epoch": 0.9280802824753763, "grad_norm": 0.5325583815574646, "learning_rate": 0.00011170073430165864, "loss": 1.0923, "step": 4994 }, { "epoch": 0.9282661215387474, "grad_norm": 0.5025383830070496, "learning_rate": 0.00011167168880810639, "loss": 0.8869, "step": 4995 }, { "epoch": 0.9284519606021185, "grad_norm": 0.5871690511703491, "learning_rate": 0.0001116426423162494, "loss": 1.1298, "step": 4996 }, { "epoch": 0.9286377996654896, "grad_norm": 0.6282622218132019, "learning_rate": 0.00011161359482857204, "loss": 1.0211, "step": 4997 }, { "epoch": 0.9288236387288608, "grad_norm": 0.6937336325645447, "learning_rate": 0.0001115845463475588, "loss": 1.1674, "step": 4998 }, { "epoch": 0.929009477792232, "grad_norm": 0.6456194519996643, "learning_rate": 0.00011155549687569429, "loss": 1.1737, "step": 4999 }, { "epoch": 0.9291953168556031, "grad_norm": 0.6184737682342529, "learning_rate": 0.00011152644641546315, "loss": 1.0585, "step": 5000 }, { "epoch": 0.9293811559189742, "grad_norm": 0.5586627125740051, "learning_rate": 0.00011149739496935012, "loss": 1.1264, "step": 5001 }, { "epoch": 0.9295669949823453, "grad_norm": 0.6428977251052856, "learning_rate": 0.00011146834253984006, "loss": 0.9552, "step": 5002 }, { "epoch": 0.9297528340457164, "grad_norm": 1.159934401512146, "learning_rate": 0.00011143928912941785, "loss": 1.123, "step": 5003 }, { "epoch": 0.9299386731090875, "grad_norm": 0.552125871181488, "learning_rate": 0.00011141023474056856, "loss": 1.0911, "step": 5004 }, { "epoch": 0.9301245121724586, "grad_norm": 0.685393214225769, "learning_rate": 0.00011138117937577721, "loss": 1.2168, "step": 5005 }, { "epoch": 0.9303103512358297, "grad_norm": 0.550447404384613, "learning_rate": 0.00011135212303752902, "loss": 1.2536, "step": 5006 }, { "epoch": 0.9304961902992009, "grad_norm": 0.5898290872573853, "learning_rate": 0.0001113230657283092, "loss": 0.9594, "step": 5007 }, { "epoch": 0.930682029362572, "grad_norm": 0.6642700433731079, "learning_rate": 0.00011129400745060308, "loss": 1.4289, "step": 5008 }, { "epoch": 0.9308678684259432, "grad_norm": 2.138993501663208, "learning_rate": 0.00011126494820689608, "loss": 1.4129, "step": 5009 }, { "epoch": 0.9310537074893143, "grad_norm": 0.6226675510406494, "learning_rate": 0.0001112358879996737, "loss": 1.0456, "step": 5010 }, { "epoch": 0.9312395465526854, "grad_norm": 0.705071210861206, "learning_rate": 0.00011120682683142159, "loss": 1.0938, "step": 5011 }, { "epoch": 0.9314253856160565, "grad_norm": 0.6553571820259094, "learning_rate": 0.00011117776470462533, "loss": 1.0687, "step": 5012 }, { "epoch": 0.9316112246794276, "grad_norm": 0.6681797504425049, "learning_rate": 0.0001111487016217707, "loss": 1.1589, "step": 5013 }, { "epoch": 0.9317970637427987, "grad_norm": 0.6915502548217773, "learning_rate": 0.00011111963758534353, "loss": 1.0384, "step": 5014 }, { "epoch": 0.9319829028061699, "grad_norm": 0.6186158657073975, "learning_rate": 0.00011109057259782976, "loss": 0.964, "step": 5015 }, { "epoch": 0.932168741869541, "grad_norm": 0.5619001388549805, "learning_rate": 0.00011106150666171528, "loss": 1.1913, "step": 5016 }, { "epoch": 0.9323545809329121, "grad_norm": 0.5783942937850952, "learning_rate": 0.00011103243977948623, "loss": 1.0368, "step": 5017 }, { "epoch": 0.9325404199962832, "grad_norm": 0.6250287294387817, "learning_rate": 0.0001110033719536288, "loss": 0.9352, "step": 5018 }, { "epoch": 0.9327262590596543, "grad_norm": 0.6437892317771912, "learning_rate": 0.00011097430318662918, "loss": 1.2505, "step": 5019 }, { "epoch": 0.9329120981230254, "grad_norm": 0.6726879477500916, "learning_rate": 0.00011094523348097369, "loss": 1.1019, "step": 5020 }, { "epoch": 0.9330979371863966, "grad_norm": 0.5041570663452148, "learning_rate": 0.00011091616283914872, "loss": 1.0098, "step": 5021 }, { "epoch": 0.9332837762497677, "grad_norm": 0.5859299302101135, "learning_rate": 0.00011088709126364074, "loss": 1.0304, "step": 5022 }, { "epoch": 0.9334696153131388, "grad_norm": 0.5126475691795349, "learning_rate": 0.00011085801875693631, "loss": 0.9202, "step": 5023 }, { "epoch": 0.93365545437651, "grad_norm": 0.5984481573104858, "learning_rate": 0.00011082894532152206, "loss": 1.1288, "step": 5024 }, { "epoch": 0.9338412934398811, "grad_norm": 0.6451727151870728, "learning_rate": 0.00011079987095988476, "loss": 1.0977, "step": 5025 }, { "epoch": 0.9340271325032522, "grad_norm": 0.6106451153755188, "learning_rate": 0.00011077079567451111, "loss": 1.0946, "step": 5026 }, { "epoch": 0.9342129715666233, "grad_norm": 1.9733521938323975, "learning_rate": 0.00011074171946788805, "loss": 1.9512, "step": 5027 }, { "epoch": 0.9343988106299944, "grad_norm": 0.6016564965248108, "learning_rate": 0.00011071264234250252, "loss": 1.1447, "step": 5028 }, { "epoch": 0.9345846496933655, "grad_norm": 0.5035417675971985, "learning_rate": 0.00011068356430084153, "loss": 0.8372, "step": 5029 }, { "epoch": 0.9347704887567366, "grad_norm": 0.6445589661598206, "learning_rate": 0.00011065448534539216, "loss": 1.2813, "step": 5030 }, { "epoch": 0.9349563278201077, "grad_norm": 0.5566248297691345, "learning_rate": 0.00011062540547864166, "loss": 1.1497, "step": 5031 }, { "epoch": 0.935142166883479, "grad_norm": 0.6071885824203491, "learning_rate": 0.00011059632470307723, "loss": 1.0647, "step": 5032 }, { "epoch": 0.9353280059468501, "grad_norm": 0.703399658203125, "learning_rate": 0.00011056724302118628, "loss": 1.1353, "step": 5033 }, { "epoch": 0.9355138450102212, "grad_norm": 0.6310158371925354, "learning_rate": 0.00011053816043545621, "loss": 1.1485, "step": 5034 }, { "epoch": 0.9356996840735923, "grad_norm": 0.4967382848262787, "learning_rate": 0.00011050907694837449, "loss": 0.7314, "step": 5035 }, { "epoch": 0.9358855231369634, "grad_norm": 0.5645965933799744, "learning_rate": 0.00011047999256242872, "loss": 1.1769, "step": 5036 }, { "epoch": 0.9360713622003345, "grad_norm": 0.6344399452209473, "learning_rate": 0.0001104509072801065, "loss": 0.9947, "step": 5037 }, { "epoch": 0.9362572012637056, "grad_norm": 0.6954240202903748, "learning_rate": 0.0001104218211038956, "loss": 1.043, "step": 5038 }, { "epoch": 0.9364430403270767, "grad_norm": 0.5661229491233826, "learning_rate": 0.0001103927340362838, "loss": 1.1193, "step": 5039 }, { "epoch": 0.9366288793904479, "grad_norm": 2.2776856422424316, "learning_rate": 0.00011036364607975904, "loss": 1.9404, "step": 5040 }, { "epoch": 0.936814718453819, "grad_norm": 1.624625563621521, "learning_rate": 0.00011033455723680922, "loss": 1.8305, "step": 5041 }, { "epoch": 0.9370005575171901, "grad_norm": 0.6585975885391235, "learning_rate": 0.00011030546750992239, "loss": 0.9251, "step": 5042 }, { "epoch": 0.9371863965805612, "grad_norm": 0.5694805383682251, "learning_rate": 0.00011027637690158663, "loss": 1.1026, "step": 5043 }, { "epoch": 0.9373722356439323, "grad_norm": 0.5340913534164429, "learning_rate": 0.00011024728541429015, "loss": 0.9748, "step": 5044 }, { "epoch": 0.9375580747073035, "grad_norm": 0.6588858366012573, "learning_rate": 0.00011021819305052119, "loss": 1.1756, "step": 5045 }, { "epoch": 0.9377439137706746, "grad_norm": 0.5693681836128235, "learning_rate": 0.0001101890998127681, "loss": 1.047, "step": 5046 }, { "epoch": 0.9379297528340457, "grad_norm": 0.7040513753890991, "learning_rate": 0.0001101600057035193, "loss": 1.1306, "step": 5047 }, { "epoch": 0.9381155918974169, "grad_norm": 0.5445013046264648, "learning_rate": 0.00011013091072526322, "loss": 1.0133, "step": 5048 }, { "epoch": 0.938301430960788, "grad_norm": 0.6809645891189575, "learning_rate": 0.0001101018148804885, "loss": 1.1443, "step": 5049 }, { "epoch": 0.9384872700241591, "grad_norm": 0.659029483795166, "learning_rate": 0.0001100727181716837, "loss": 1.1365, "step": 5050 }, { "epoch": 0.9386731090875302, "grad_norm": 0.6074685454368591, "learning_rate": 0.00011004362060133751, "loss": 0.9799, "step": 5051 }, { "epoch": 0.9388589481509013, "grad_norm": 0.5053578615188599, "learning_rate": 0.00011001452217193877, "loss": 0.9078, "step": 5052 }, { "epoch": 0.9390447872142724, "grad_norm": 0.77981036901474, "learning_rate": 0.0001099854228859763, "loss": 1.3562, "step": 5053 }, { "epoch": 0.9392306262776435, "grad_norm": 0.5708858966827393, "learning_rate": 0.00010995632274593904, "loss": 1.2978, "step": 5054 }, { "epoch": 0.9394164653410146, "grad_norm": 0.661683976650238, "learning_rate": 0.00010992722175431595, "loss": 1.1618, "step": 5055 }, { "epoch": 0.9396023044043857, "grad_norm": 0.631465494632721, "learning_rate": 0.00010989811991359618, "loss": 0.9292, "step": 5056 }, { "epoch": 0.939788143467757, "grad_norm": 0.57979816198349, "learning_rate": 0.00010986901722626879, "loss": 1.1043, "step": 5057 }, { "epoch": 0.9399739825311281, "grad_norm": 0.5770615935325623, "learning_rate": 0.00010983991369482301, "loss": 1.1172, "step": 5058 }, { "epoch": 0.9401598215944992, "grad_norm": 0.5192406177520752, "learning_rate": 0.00010981080932174816, "loss": 1.0143, "step": 5059 }, { "epoch": 0.9403456606578703, "grad_norm": 4.092175006866455, "learning_rate": 0.00010978170410953359, "loss": 1.7, "step": 5060 }, { "epoch": 0.9405314997212414, "grad_norm": 0.6853275895118713, "learning_rate": 0.00010975259806066871, "loss": 1.109, "step": 5061 }, { "epoch": 0.9407173387846125, "grad_norm": 0.6233556866645813, "learning_rate": 0.00010972349117764304, "loss": 1.2134, "step": 5062 }, { "epoch": 0.9409031778479836, "grad_norm": 0.6518190503120422, "learning_rate": 0.00010969438346294618, "loss": 1.3395, "step": 5063 }, { "epoch": 0.9410890169113547, "grad_norm": 0.5534566044807434, "learning_rate": 0.00010966527491906775, "loss": 1.0615, "step": 5064 }, { "epoch": 0.9412748559747259, "grad_norm": 0.5693931579589844, "learning_rate": 0.00010963616554849743, "loss": 0.9612, "step": 5065 }, { "epoch": 0.941460695038097, "grad_norm": 0.6822101473808289, "learning_rate": 0.00010960705535372509, "loss": 1.1194, "step": 5066 }, { "epoch": 0.9416465341014681, "grad_norm": 0.626566469669342, "learning_rate": 0.00010957794433724051, "loss": 1.1189, "step": 5067 }, { "epoch": 0.9418323731648393, "grad_norm": 0.5476789474487305, "learning_rate": 0.00010954883250153365, "loss": 0.967, "step": 5068 }, { "epoch": 0.9420182122282104, "grad_norm": 1.7039079666137695, "learning_rate": 0.00010951971984909452, "loss": 1.4795, "step": 5069 }, { "epoch": 0.9422040512915815, "grad_norm": 0.6259586811065674, "learning_rate": 0.00010949060638241318, "loss": 1.0563, "step": 5070 }, { "epoch": 0.9423898903549526, "grad_norm": 0.6064488291740417, "learning_rate": 0.00010946149210397976, "loss": 0.9932, "step": 5071 }, { "epoch": 0.9425757294183237, "grad_norm": 0.5755789279937744, "learning_rate": 0.00010943237701628446, "loss": 0.9768, "step": 5072 }, { "epoch": 0.9427615684816949, "grad_norm": 0.6496974229812622, "learning_rate": 0.00010940326112181758, "loss": 1.28, "step": 5073 }, { "epoch": 0.942947407545066, "grad_norm": 0.5570834875106812, "learning_rate": 0.00010937414442306946, "loss": 1.1327, "step": 5074 }, { "epoch": 0.9431332466084371, "grad_norm": 0.5175633430480957, "learning_rate": 0.00010934502692253048, "loss": 0.8864, "step": 5075 }, { "epoch": 0.9433190856718082, "grad_norm": 0.4660826027393341, "learning_rate": 0.00010931590862269117, "loss": 0.7515, "step": 5076 }, { "epoch": 0.9435049247351793, "grad_norm": 0.6771041750907898, "learning_rate": 0.00010928678952604205, "loss": 1.2769, "step": 5077 }, { "epoch": 0.9436907637985504, "grad_norm": 0.6460321545600891, "learning_rate": 0.00010925766963507378, "loss": 0.8766, "step": 5078 }, { "epoch": 0.9438766028619215, "grad_norm": 0.7063780426979065, "learning_rate": 0.00010922854895227699, "loss": 1.1556, "step": 5079 }, { "epoch": 0.9440624419252927, "grad_norm": 0.5997153520584106, "learning_rate": 0.00010919942748014247, "loss": 1.1245, "step": 5080 }, { "epoch": 0.9442482809886639, "grad_norm": 0.6136290431022644, "learning_rate": 0.00010917030522116104, "loss": 0.9105, "step": 5081 }, { "epoch": 0.944434120052035, "grad_norm": 0.6878195405006409, "learning_rate": 0.00010914118217782358, "loss": 0.9943, "step": 5082 }, { "epoch": 0.9446199591154061, "grad_norm": 0.5871633291244507, "learning_rate": 0.00010911205835262104, "loss": 1.198, "step": 5083 }, { "epoch": 0.9448057981787772, "grad_norm": 0.7107055187225342, "learning_rate": 0.00010908293374804446, "loss": 1.2383, "step": 5084 }, { "epoch": 0.9449916372421483, "grad_norm": 0.7144885659217834, "learning_rate": 0.00010905380836658495, "loss": 1.1298, "step": 5085 }, { "epoch": 0.9451774763055194, "grad_norm": 0.6252016425132751, "learning_rate": 0.0001090246822107336, "loss": 1.0093, "step": 5086 }, { "epoch": 0.9453633153688905, "grad_norm": 0.5267205834388733, "learning_rate": 0.00010899555528298171, "loss": 1.0455, "step": 5087 }, { "epoch": 0.9455491544322616, "grad_norm": 0.7700502872467041, "learning_rate": 0.00010896642758582052, "loss": 1.0634, "step": 5088 }, { "epoch": 0.9457349934956328, "grad_norm": 0.6101939678192139, "learning_rate": 0.00010893729912174142, "loss": 0.8266, "step": 5089 }, { "epoch": 0.9459208325590039, "grad_norm": 0.6290589570999146, "learning_rate": 0.00010890816989323577, "loss": 1.0146, "step": 5090 }, { "epoch": 0.946106671622375, "grad_norm": 0.5946857929229736, "learning_rate": 0.00010887903990279514, "loss": 0.8321, "step": 5091 }, { "epoch": 0.9462925106857462, "grad_norm": 0.583081066608429, "learning_rate": 0.000108849909152911, "loss": 0.8929, "step": 5092 }, { "epoch": 0.9464783497491173, "grad_norm": 0.5924783945083618, "learning_rate": 0.00010882077764607501, "loss": 0.8518, "step": 5093 }, { "epoch": 0.9466641888124884, "grad_norm": 0.5845556259155273, "learning_rate": 0.00010879164538477886, "loss": 1.0834, "step": 5094 }, { "epoch": 0.9468500278758595, "grad_norm": 0.6627733707427979, "learning_rate": 0.00010876251237151429, "loss": 1.0807, "step": 5095 }, { "epoch": 0.9470358669392306, "grad_norm": 0.5590043663978577, "learning_rate": 0.00010873337860877308, "loss": 1.1807, "step": 5096 }, { "epoch": 0.9472217060026017, "grad_norm": 0.4764077961444855, "learning_rate": 0.00010870424409904711, "loss": 1.0865, "step": 5097 }, { "epoch": 0.9474075450659729, "grad_norm": 0.5997889637947083, "learning_rate": 0.00010867510884482837, "loss": 1.3383, "step": 5098 }, { "epoch": 0.947593384129344, "grad_norm": 0.5681039094924927, "learning_rate": 0.00010864597284860876, "loss": 1.0977, "step": 5099 }, { "epoch": 0.9477792231927151, "grad_norm": 0.6533553004264832, "learning_rate": 0.00010861683611288045, "loss": 1.1249, "step": 5100 }, { "epoch": 0.9479650622560862, "grad_norm": 0.6179550886154175, "learning_rate": 0.00010858769864013551, "loss": 1.0567, "step": 5101 }, { "epoch": 0.9481509013194573, "grad_norm": 0.5242693424224854, "learning_rate": 0.00010855856043286616, "loss": 0.7278, "step": 5102 }, { "epoch": 0.9483367403828284, "grad_norm": 0.579545795917511, "learning_rate": 0.00010852942149356464, "loss": 1.1227, "step": 5103 }, { "epoch": 0.9485225794461996, "grad_norm": 0.6305685639381409, "learning_rate": 0.00010850028182472328, "loss": 1.2344, "step": 5104 }, { "epoch": 0.9487084185095707, "grad_norm": 0.6166912317276001, "learning_rate": 0.0001084711414288344, "loss": 1.1048, "step": 5105 }, { "epoch": 0.9488942575729419, "grad_norm": 0.549769401550293, "learning_rate": 0.00010844200030839047, "loss": 0.9258, "step": 5106 }, { "epoch": 0.949080096636313, "grad_norm": 0.6159671545028687, "learning_rate": 0.00010841285846588405, "loss": 1.0702, "step": 5107 }, { "epoch": 0.9492659356996841, "grad_norm": 0.6680212616920471, "learning_rate": 0.00010838371590380765, "loss": 1.0217, "step": 5108 }, { "epoch": 0.9494517747630552, "grad_norm": 3.235112428665161, "learning_rate": 0.00010835457262465391, "loss": 1.5417, "step": 5109 }, { "epoch": 0.9496376138264263, "grad_norm": 0.6026039123535156, "learning_rate": 0.00010832542863091553, "loss": 1.0344, "step": 5110 }, { "epoch": 0.9498234528897974, "grad_norm": 0.660992443561554, "learning_rate": 0.00010829628392508525, "loss": 1.3435, "step": 5111 }, { "epoch": 0.9500092919531685, "grad_norm": 0.5971868634223938, "learning_rate": 0.00010826713850965584, "loss": 1.0527, "step": 5112 }, { "epoch": 0.9501951310165396, "grad_norm": 0.7074155807495117, "learning_rate": 0.00010823799238712022, "loss": 0.9946, "step": 5113 }, { "epoch": 0.9503809700799108, "grad_norm": 0.5157049298286438, "learning_rate": 0.0001082088455599713, "loss": 1.0142, "step": 5114 }, { "epoch": 0.950566809143282, "grad_norm": 0.6906478404998779, "learning_rate": 0.00010817969803070211, "loss": 1.3297, "step": 5115 }, { "epoch": 0.9507526482066531, "grad_norm": 0.5686500072479248, "learning_rate": 0.00010815054980180566, "loss": 0.9322, "step": 5116 }, { "epoch": 0.9509384872700242, "grad_norm": 0.6842989921569824, "learning_rate": 0.00010812140087577504, "loss": 1.0405, "step": 5117 }, { "epoch": 0.9511243263333953, "grad_norm": 0.6816885471343994, "learning_rate": 0.0001080922512551035, "loss": 1.3582, "step": 5118 }, { "epoch": 0.9513101653967664, "grad_norm": 0.5930526256561279, "learning_rate": 0.00010806310094228415, "loss": 1.0377, "step": 5119 }, { "epoch": 0.9514960044601375, "grad_norm": 0.6771032214164734, "learning_rate": 0.00010803394993981038, "loss": 1.2365, "step": 5120 }, { "epoch": 0.9516818435235086, "grad_norm": 0.5323301553726196, "learning_rate": 0.00010800479825017553, "loss": 0.8862, "step": 5121 }, { "epoch": 0.9518676825868798, "grad_norm": 0.5637294054031372, "learning_rate": 0.00010797564587587299, "loss": 1.1621, "step": 5122 }, { "epoch": 0.9520535216502509, "grad_norm": 0.6822587251663208, "learning_rate": 0.0001079464928193962, "loss": 0.9469, "step": 5123 }, { "epoch": 0.952239360713622, "grad_norm": 0.7079898118972778, "learning_rate": 0.0001079173390832387, "loss": 1.1282, "step": 5124 }, { "epoch": 0.9524251997769931, "grad_norm": 0.6180583238601685, "learning_rate": 0.0001078881846698941, "loss": 1.2146, "step": 5125 }, { "epoch": 0.9526110388403642, "grad_norm": 0.5927160978317261, "learning_rate": 0.00010785902958185599, "loss": 1.0453, "step": 5126 }, { "epoch": 0.9527968779037354, "grad_norm": 0.690459668636322, "learning_rate": 0.0001078298738216181, "loss": 1.105, "step": 5127 }, { "epoch": 0.9529827169671065, "grad_norm": 0.5838513374328613, "learning_rate": 0.00010780071739167416, "loss": 1.0425, "step": 5128 }, { "epoch": 0.9531685560304776, "grad_norm": 0.5430728793144226, "learning_rate": 0.00010777156029451805, "loss": 0.9739, "step": 5129 }, { "epoch": 0.9533543950938487, "grad_norm": 0.5774003863334656, "learning_rate": 0.00010774240253264356, "loss": 0.9745, "step": 5130 }, { "epoch": 0.9535402341572199, "grad_norm": 0.6548106670379639, "learning_rate": 0.00010771324410854466, "loss": 0.9721, "step": 5131 }, { "epoch": 0.953726073220591, "grad_norm": 0.5925551056861877, "learning_rate": 0.00010768408502471534, "loss": 1.1294, "step": 5132 }, { "epoch": 0.9539119122839621, "grad_norm": 0.66984623670578, "learning_rate": 0.0001076549252836496, "loss": 1.2196, "step": 5133 }, { "epoch": 0.9540977513473332, "grad_norm": 0.6303706765174866, "learning_rate": 0.00010762576488784153, "loss": 1.09, "step": 5134 }, { "epoch": 0.9542835904107043, "grad_norm": 0.5988698601722717, "learning_rate": 0.00010759660383978532, "loss": 1.0056, "step": 5135 }, { "epoch": 0.9544694294740754, "grad_norm": 0.5174127221107483, "learning_rate": 0.00010756744214197518, "loss": 0.8531, "step": 5136 }, { "epoch": 0.9546552685374465, "grad_norm": 0.6051040291786194, "learning_rate": 0.00010753827979690536, "loss": 0.9742, "step": 5137 }, { "epoch": 0.9548411076008176, "grad_norm": 0.6123926043510437, "learning_rate": 0.00010750911680707016, "loss": 1.13, "step": 5138 }, { "epoch": 0.9550269466641889, "grad_norm": 0.9708890914916992, "learning_rate": 0.000107479953174964, "loss": 1.2774, "step": 5139 }, { "epoch": 0.95521278572756, "grad_norm": 0.47982341051101685, "learning_rate": 0.00010745078890308127, "loss": 0.9506, "step": 5140 }, { "epoch": 0.9553986247909311, "grad_norm": 0.5771783590316772, "learning_rate": 0.00010742162399391642, "loss": 1.2626, "step": 5141 }, { "epoch": 0.9555844638543022, "grad_norm": 0.5633402466773987, "learning_rate": 0.00010739245844996406, "loss": 1.2364, "step": 5142 }, { "epoch": 0.9557703029176733, "grad_norm": 0.6553140878677368, "learning_rate": 0.00010736329227371876, "loss": 1.1984, "step": 5143 }, { "epoch": 0.9559561419810444, "grad_norm": 0.5633707642555237, "learning_rate": 0.00010733412546767513, "loss": 0.966, "step": 5144 }, { "epoch": 0.9561419810444155, "grad_norm": 0.5353180766105652, "learning_rate": 0.00010730495803432794, "loss": 0.9972, "step": 5145 }, { "epoch": 0.9563278201077866, "grad_norm": 0.6058616638183594, "learning_rate": 0.00010727578997617189, "loss": 1.0613, "step": 5146 }, { "epoch": 0.9565136591711578, "grad_norm": 0.7414457201957703, "learning_rate": 0.00010724662129570177, "loss": 1.1391, "step": 5147 }, { "epoch": 0.9566994982345289, "grad_norm": 0.6592400670051575, "learning_rate": 0.0001072174519954125, "loss": 1.1756, "step": 5148 }, { "epoch": 0.9568853372979, "grad_norm": 0.5891792178153992, "learning_rate": 0.00010718828207779894, "loss": 1.1063, "step": 5149 }, { "epoch": 0.9570711763612711, "grad_norm": 0.7033983469009399, "learning_rate": 0.00010715911154535608, "loss": 1.2711, "step": 5150 }, { "epoch": 0.9572570154246423, "grad_norm": 0.5749421715736389, "learning_rate": 0.00010712994040057894, "loss": 1.3341, "step": 5151 }, { "epoch": 0.9574428544880134, "grad_norm": 0.5675082802772522, "learning_rate": 0.0001071007686459626, "loss": 0.9953, "step": 5152 }, { "epoch": 0.9576286935513845, "grad_norm": 0.5799729824066162, "learning_rate": 0.00010707159628400216, "loss": 1.0317, "step": 5153 }, { "epoch": 0.9578145326147556, "grad_norm": 0.5823427438735962, "learning_rate": 0.0001070424233171928, "loss": 1.1268, "step": 5154 }, { "epoch": 0.9580003716781268, "grad_norm": 0.6185470819473267, "learning_rate": 0.00010701324974802975, "loss": 1.0075, "step": 5155 }, { "epoch": 0.9581862107414979, "grad_norm": 1.9274375438690186, "learning_rate": 0.00010698407557900832, "loss": 1.1037, "step": 5156 }, { "epoch": 0.958372049804869, "grad_norm": 0.875440239906311, "learning_rate": 0.00010695490081262376, "loss": 0.9812, "step": 5157 }, { "epoch": 0.9585578888682401, "grad_norm": 0.5173926949501038, "learning_rate": 0.00010692572545137154, "loss": 0.8942, "step": 5158 }, { "epoch": 0.9587437279316112, "grad_norm": 0.6853187084197998, "learning_rate": 0.00010689654949774706, "loss": 0.9427, "step": 5159 }, { "epoch": 0.9589295669949823, "grad_norm": 0.5588276386260986, "learning_rate": 0.00010686737295424578, "loss": 1.1392, "step": 5160 }, { "epoch": 0.9591154060583534, "grad_norm": 0.7493889331817627, "learning_rate": 0.00010683819582336327, "loss": 1.1085, "step": 5161 }, { "epoch": 0.9593012451217245, "grad_norm": 0.6834607720375061, "learning_rate": 0.00010680901810759505, "loss": 1.3353, "step": 5162 }, { "epoch": 0.9594870841850958, "grad_norm": 0.7147015929222107, "learning_rate": 0.00010677983980943684, "loss": 0.8831, "step": 5163 }, { "epoch": 0.9596729232484669, "grad_norm": 0.5647204518318176, "learning_rate": 0.00010675066093138424, "loss": 0.9929, "step": 5164 }, { "epoch": 0.959858762311838, "grad_norm": 0.5069839954376221, "learning_rate": 0.00010672148147593304, "loss": 1.0218, "step": 5165 }, { "epoch": 0.9600446013752091, "grad_norm": 0.5167731046676636, "learning_rate": 0.000106692301445579, "loss": 0.9308, "step": 5166 }, { "epoch": 0.9602304404385802, "grad_norm": 0.5342850089073181, "learning_rate": 0.00010666312084281795, "loss": 1.1225, "step": 5167 }, { "epoch": 0.9604162795019513, "grad_norm": 0.726222038269043, "learning_rate": 0.00010663393967014579, "loss": 1.2616, "step": 5168 }, { "epoch": 0.9606021185653224, "grad_norm": 0.45744410157203674, "learning_rate": 0.00010660475793005842, "loss": 0.6474, "step": 5169 }, { "epoch": 0.9607879576286935, "grad_norm": 0.677802324295044, "learning_rate": 0.00010657557562505183, "loss": 1.092, "step": 5170 }, { "epoch": 0.9609737966920646, "grad_norm": 0.571209728717804, "learning_rate": 0.00010654639275762202, "loss": 1.031, "step": 5171 }, { "epoch": 0.9611596357554358, "grad_norm": 0.6979965567588806, "learning_rate": 0.00010651720933026513, "loss": 1.1168, "step": 5172 }, { "epoch": 0.9613454748188069, "grad_norm": 0.5759708881378174, "learning_rate": 0.00010648802534547721, "loss": 1.1165, "step": 5173 }, { "epoch": 0.961531313882178, "grad_norm": 1.40087890625, "learning_rate": 0.00010645884080575448, "loss": 1.6289, "step": 5174 }, { "epoch": 0.9617171529455492, "grad_norm": 0.559787392616272, "learning_rate": 0.00010642965571359313, "loss": 1.1658, "step": 5175 }, { "epoch": 0.9619029920089203, "grad_norm": 0.7585298418998718, "learning_rate": 0.00010640047007148944, "loss": 0.9917, "step": 5176 }, { "epoch": 0.9620888310722914, "grad_norm": 0.6009484529495239, "learning_rate": 0.00010637128388193969, "loss": 0.9554, "step": 5177 }, { "epoch": 0.9622746701356625, "grad_norm": 0.5697795748710632, "learning_rate": 0.00010634209714744027, "loss": 1.0104, "step": 5178 }, { "epoch": 0.9624605091990336, "grad_norm": 0.5680558681488037, "learning_rate": 0.00010631290987048757, "loss": 0.991, "step": 5179 }, { "epoch": 0.9626463482624048, "grad_norm": 0.645099937915802, "learning_rate": 0.00010628372205357806, "loss": 1.2512, "step": 5180 }, { "epoch": 0.9628321873257759, "grad_norm": 0.6746792793273926, "learning_rate": 0.00010625453369920822, "loss": 1.0832, "step": 5181 }, { "epoch": 0.963018026389147, "grad_norm": 0.6117158532142639, "learning_rate": 0.00010622534480987459, "loss": 1.1984, "step": 5182 }, { "epoch": 0.9632038654525181, "grad_norm": 0.6414322853088379, "learning_rate": 0.00010619615538807374, "loss": 1.1448, "step": 5183 }, { "epoch": 0.9633897045158892, "grad_norm": 0.6798393726348877, "learning_rate": 0.00010616696543630235, "loss": 1.073, "step": 5184 }, { "epoch": 0.9635755435792603, "grad_norm": 0.5420655608177185, "learning_rate": 0.00010613777495705708, "loss": 1.0895, "step": 5185 }, { "epoch": 0.9637613826426314, "grad_norm": 0.7268361449241638, "learning_rate": 0.00010610858395283462, "loss": 1.0711, "step": 5186 }, { "epoch": 0.9639472217060026, "grad_norm": 0.6947235465049744, "learning_rate": 0.00010607939242613183, "loss": 1.0781, "step": 5187 }, { "epoch": 0.9641330607693738, "grad_norm": 0.5997654795646667, "learning_rate": 0.00010605020037944545, "loss": 1.2603, "step": 5188 }, { "epoch": 0.9643188998327449, "grad_norm": 0.6570544838905334, "learning_rate": 0.00010602100781527233, "loss": 1.0687, "step": 5189 }, { "epoch": 0.964504738896116, "grad_norm": 0.5399557948112488, "learning_rate": 0.0001059918147361094, "loss": 1.1065, "step": 5190 }, { "epoch": 0.9646905779594871, "grad_norm": 0.581305742263794, "learning_rate": 0.00010596262114445363, "loss": 1.0601, "step": 5191 }, { "epoch": 0.9648764170228582, "grad_norm": 0.6789758801460266, "learning_rate": 0.00010593342704280198, "loss": 0.9875, "step": 5192 }, { "epoch": 0.9650622560862293, "grad_norm": 0.6596348881721497, "learning_rate": 0.00010590423243365146, "loss": 1.1663, "step": 5193 }, { "epoch": 0.9652480951496004, "grad_norm": 0.5195457339286804, "learning_rate": 0.00010587503731949921, "loss": 0.6786, "step": 5194 }, { "epoch": 0.9654339342129715, "grad_norm": 0.4248422682285309, "learning_rate": 0.00010584584170284233, "loss": 0.7107, "step": 5195 }, { "epoch": 0.9656197732763427, "grad_norm": 0.5786765813827515, "learning_rate": 0.00010581664558617795, "loss": 1.0612, "step": 5196 }, { "epoch": 0.9658056123397138, "grad_norm": 0.5303447842597961, "learning_rate": 0.0001057874489720033, "loss": 0.8196, "step": 5197 }, { "epoch": 0.965991451403085, "grad_norm": 0.5511763095855713, "learning_rate": 0.00010575825186281565, "loss": 0.8464, "step": 5198 }, { "epoch": 0.9661772904664561, "grad_norm": 0.698489248752594, "learning_rate": 0.00010572905426111226, "loss": 1.0296, "step": 5199 }, { "epoch": 0.9663631295298272, "grad_norm": 0.6211963891983032, "learning_rate": 0.00010569985616939049, "loss": 1.0672, "step": 5200 }, { "epoch": 0.9665489685931983, "grad_norm": 0.5647724270820618, "learning_rate": 0.00010567065759014767, "loss": 1.2416, "step": 5201 }, { "epoch": 0.9667348076565694, "grad_norm": 0.5544220805168152, "learning_rate": 0.0001056414585258813, "loss": 1.1426, "step": 5202 }, { "epoch": 0.9669206467199405, "grad_norm": 0.6381922960281372, "learning_rate": 0.00010561225897908875, "loss": 1.2999, "step": 5203 }, { "epoch": 0.9671064857833116, "grad_norm": 0.5554980635643005, "learning_rate": 0.00010558305895226757, "loss": 1.0449, "step": 5204 }, { "epoch": 0.9672923248466828, "grad_norm": 0.5876613855361938, "learning_rate": 0.0001055538584479153, "loss": 1.0988, "step": 5205 }, { "epoch": 0.9674781639100539, "grad_norm": 0.6700899004936218, "learning_rate": 0.0001055246574685295, "loss": 1.1554, "step": 5206 }, { "epoch": 0.967664002973425, "grad_norm": 0.605531632900238, "learning_rate": 0.00010549545601660784, "loss": 1.1517, "step": 5207 }, { "epoch": 0.9678498420367961, "grad_norm": 0.5939875245094299, "learning_rate": 0.00010546625409464789, "loss": 0.7665, "step": 5208 }, { "epoch": 0.9680356811001672, "grad_norm": 0.7355973124504089, "learning_rate": 0.00010543705170514746, "loss": 0.9617, "step": 5209 }, { "epoch": 0.9682215201635384, "grad_norm": 0.6694480776786804, "learning_rate": 0.00010540784885060424, "loss": 1.0366, "step": 5210 }, { "epoch": 0.9684073592269095, "grad_norm": 0.580572247505188, "learning_rate": 0.000105378645533516, "loss": 0.9349, "step": 5211 }, { "epoch": 0.9685931982902806, "grad_norm": 0.5605287551879883, "learning_rate": 0.00010534944175638059, "loss": 1.1615, "step": 5212 }, { "epoch": 0.9687790373536518, "grad_norm": 0.6726819276809692, "learning_rate": 0.00010532023752169589, "loss": 1.249, "step": 5213 }, { "epoch": 0.9689648764170229, "grad_norm": 0.6232327222824097, "learning_rate": 0.00010529103283195976, "loss": 1.0537, "step": 5214 }, { "epoch": 0.969150715480394, "grad_norm": 0.6396980285644531, "learning_rate": 0.00010526182768967015, "loss": 0.9684, "step": 5215 }, { "epoch": 0.9693365545437651, "grad_norm": 0.6493716239929199, "learning_rate": 0.00010523262209732505, "loss": 1.1652, "step": 5216 }, { "epoch": 0.9695223936071362, "grad_norm": 0.7912707924842834, "learning_rate": 0.00010520341605742247, "loss": 1.598, "step": 5217 }, { "epoch": 0.9697082326705073, "grad_norm": 0.6064230799674988, "learning_rate": 0.00010517420957246047, "loss": 0.9873, "step": 5218 }, { "epoch": 0.9698940717338784, "grad_norm": 0.7344752550125122, "learning_rate": 0.00010514500264493714, "loss": 1.1776, "step": 5219 }, { "epoch": 0.9700799107972495, "grad_norm": 0.5158562660217285, "learning_rate": 0.00010511579527735063, "loss": 0.9558, "step": 5220 }, { "epoch": 0.9702657498606208, "grad_norm": 0.5321223735809326, "learning_rate": 0.00010508658747219908, "loss": 0.9554, "step": 5221 }, { "epoch": 0.9704515889239919, "grad_norm": 0.5637287497520447, "learning_rate": 0.00010505737923198069, "loss": 0.9914, "step": 5222 }, { "epoch": 0.970637427987363, "grad_norm": 0.5985487699508667, "learning_rate": 0.00010502817055919376, "loss": 1.0695, "step": 5223 }, { "epoch": 0.9708232670507341, "grad_norm": 0.6146731972694397, "learning_rate": 0.00010499896145633652, "loss": 1.19, "step": 5224 }, { "epoch": 0.9710091061141052, "grad_norm": 0.6370542049407959, "learning_rate": 0.00010496975192590728, "loss": 1.047, "step": 5225 }, { "epoch": 0.9711949451774763, "grad_norm": 0.574313223361969, "learning_rate": 0.00010494054197040444, "loss": 1.2502, "step": 5226 }, { "epoch": 0.9713807842408474, "grad_norm": 0.5866028666496277, "learning_rate": 0.00010491133159232638, "loss": 0.9155, "step": 5227 }, { "epoch": 0.9715666233042185, "grad_norm": 0.5359747409820557, "learning_rate": 0.0001048821207941715, "loss": 1.0809, "step": 5228 }, { "epoch": 0.9717524623675897, "grad_norm": 0.5584895610809326, "learning_rate": 0.00010485290957843829, "loss": 1.1471, "step": 5229 }, { "epoch": 0.9719383014309608, "grad_norm": 0.5170494318008423, "learning_rate": 0.00010482369794762521, "loss": 1.0803, "step": 5230 }, { "epoch": 0.9721241404943319, "grad_norm": 0.6326431035995483, "learning_rate": 0.00010479448590423082, "loss": 0.8558, "step": 5231 }, { "epoch": 0.972309979557703, "grad_norm": 0.5903434753417969, "learning_rate": 0.00010476527345075369, "loss": 1.0103, "step": 5232 }, { "epoch": 0.9724958186210741, "grad_norm": 0.6057657599449158, "learning_rate": 0.00010473606058969242, "loss": 1.1428, "step": 5233 }, { "epoch": 0.9726816576844453, "grad_norm": 0.5869147777557373, "learning_rate": 0.0001047068473235457, "loss": 1.2471, "step": 5234 }, { "epoch": 0.9728674967478164, "grad_norm": 0.6794689893722534, "learning_rate": 0.00010467763365481211, "loss": 1.1385, "step": 5235 }, { "epoch": 0.9730533358111875, "grad_norm": 0.6516013741493225, "learning_rate": 0.00010464841958599043, "loss": 1.177, "step": 5236 }, { "epoch": 0.9732391748745586, "grad_norm": 0.6525418758392334, "learning_rate": 0.00010461920511957934, "loss": 1.1904, "step": 5237 }, { "epoch": 0.9734250139379298, "grad_norm": 0.5970218777656555, "learning_rate": 0.00010458999025807767, "loss": 0.95, "step": 5238 }, { "epoch": 0.9736108530013009, "grad_norm": 0.5643072128295898, "learning_rate": 0.00010456077500398421, "loss": 1.1804, "step": 5239 }, { "epoch": 0.973796692064672, "grad_norm": 0.7211046814918518, "learning_rate": 0.00010453155935979781, "loss": 1.0568, "step": 5240 }, { "epoch": 0.9739825311280431, "grad_norm": 0.5928149223327637, "learning_rate": 0.00010450234332801737, "loss": 0.9441, "step": 5241 }, { "epoch": 0.9741683701914142, "grad_norm": 0.49844658374786377, "learning_rate": 0.00010447312691114175, "loss": 0.7909, "step": 5242 }, { "epoch": 0.9743542092547853, "grad_norm": 0.5963571667671204, "learning_rate": 0.00010444391011166992, "loss": 1.0928, "step": 5243 }, { "epoch": 0.9745400483181564, "grad_norm": 0.6606438755989075, "learning_rate": 0.00010441469293210083, "loss": 0.775, "step": 5244 }, { "epoch": 0.9747258873815275, "grad_norm": 0.40258845686912537, "learning_rate": 0.00010438547537493355, "loss": 0.555, "step": 5245 }, { "epoch": 0.9749117264448988, "grad_norm": 0.7338322401046753, "learning_rate": 0.00010435625744266704, "loss": 1.0914, "step": 5246 }, { "epoch": 0.9750975655082699, "grad_norm": 0.5825830698013306, "learning_rate": 0.00010432703913780044, "loss": 1.0547, "step": 5247 }, { "epoch": 0.975283404571641, "grad_norm": 0.7490541338920593, "learning_rate": 0.00010429782046283285, "loss": 1.3439, "step": 5248 }, { "epoch": 0.9754692436350121, "grad_norm": 0.5554697513580322, "learning_rate": 0.00010426860142026338, "loss": 1.0599, "step": 5249 }, { "epoch": 0.9756550826983832, "grad_norm": 0.6733309030532837, "learning_rate": 0.0001042393820125912, "loss": 1.0955, "step": 5250 }, { "epoch": 0.9758409217617543, "grad_norm": 0.6073945164680481, "learning_rate": 0.0001042101622423155, "loss": 1.1162, "step": 5251 }, { "epoch": 0.9760267608251254, "grad_norm": 0.5888856053352356, "learning_rate": 0.00010418094211193552, "loss": 1.1074, "step": 5252 }, { "epoch": 0.9762125998884965, "grad_norm": 0.7521282434463501, "learning_rate": 0.00010415172162395052, "loss": 1.1723, "step": 5253 }, { "epoch": 0.9763984389518677, "grad_norm": 0.7571302652359009, "learning_rate": 0.00010412250078085983, "loss": 1.207, "step": 5254 }, { "epoch": 0.9765842780152388, "grad_norm": 0.5502439141273499, "learning_rate": 0.00010409327958516273, "loss": 0.8738, "step": 5255 }, { "epoch": 0.97677011707861, "grad_norm": 0.6101023554801941, "learning_rate": 0.00010406405803935857, "loss": 0.955, "step": 5256 }, { "epoch": 0.976955956141981, "grad_norm": 0.5006557106971741, "learning_rate": 0.00010403483614594675, "loss": 1.0782, "step": 5257 }, { "epoch": 0.9771417952053522, "grad_norm": 0.5799928307533264, "learning_rate": 0.00010400561390742668, "loss": 0.893, "step": 5258 }, { "epoch": 0.9773276342687233, "grad_norm": 0.5734309554100037, "learning_rate": 0.00010397639132629778, "loss": 1.1346, "step": 5259 }, { "epoch": 0.9775134733320944, "grad_norm": 0.6255081295967102, "learning_rate": 0.00010394716840505951, "loss": 1.0596, "step": 5260 }, { "epoch": 0.9776993123954655, "grad_norm": 0.6332784295082092, "learning_rate": 0.00010391794514621143, "loss": 1.1199, "step": 5261 }, { "epoch": 0.9778851514588367, "grad_norm": 0.524943470954895, "learning_rate": 0.00010388872155225304, "loss": 0.811, "step": 5262 }, { "epoch": 0.9780709905222078, "grad_norm": 0.5607344508171082, "learning_rate": 0.00010385949762568387, "loss": 1.035, "step": 5263 }, { "epoch": 0.9782568295855789, "grad_norm": 0.737356960773468, "learning_rate": 0.00010383027336900355, "loss": 1.2161, "step": 5264 }, { "epoch": 0.97844266864895, "grad_norm": 0.5566937923431396, "learning_rate": 0.00010380104878471167, "loss": 1.0571, "step": 5265 }, { "epoch": 0.9786285077123211, "grad_norm": 0.7953934073448181, "learning_rate": 0.00010377182387530784, "loss": 1.1642, "step": 5266 }, { "epoch": 0.9788143467756922, "grad_norm": 0.5225697755813599, "learning_rate": 0.00010374259864329177, "loss": 1.0756, "step": 5267 }, { "epoch": 0.9790001858390633, "grad_norm": 0.5242747664451599, "learning_rate": 0.00010371337309116316, "loss": 1.1096, "step": 5268 }, { "epoch": 0.9791860249024344, "grad_norm": 0.6449241042137146, "learning_rate": 0.00010368414722142172, "loss": 0.9631, "step": 5269 }, { "epoch": 0.9793718639658057, "grad_norm": 0.7144589424133301, "learning_rate": 0.00010365492103656723, "loss": 1.0049, "step": 5270 }, { "epoch": 0.9795577030291768, "grad_norm": 0.6562042832374573, "learning_rate": 0.00010362569453909942, "loss": 0.8826, "step": 5271 }, { "epoch": 0.9797435420925479, "grad_norm": 0.44900819659233093, "learning_rate": 0.00010359646773151814, "loss": 0.8152, "step": 5272 }, { "epoch": 0.979929381155919, "grad_norm": 0.4567967355251312, "learning_rate": 0.00010356724061632316, "loss": 0.8494, "step": 5273 }, { "epoch": 0.9801152202192901, "grad_norm": 0.7190163135528564, "learning_rate": 0.00010353801319601441, "loss": 1.0996, "step": 5274 }, { "epoch": 0.9803010592826612, "grad_norm": 0.6902177333831787, "learning_rate": 0.00010350878547309173, "loss": 1.2026, "step": 5275 }, { "epoch": 0.9804868983460323, "grad_norm": 0.5994812250137329, "learning_rate": 0.00010347955745005506, "loss": 0.8673, "step": 5276 }, { "epoch": 0.9806727374094034, "grad_norm": 0.5680616497993469, "learning_rate": 0.00010345032912940433, "loss": 1.1063, "step": 5277 }, { "epoch": 0.9808585764727745, "grad_norm": 0.5536043643951416, "learning_rate": 0.0001034211005136395, "loss": 1.1424, "step": 5278 }, { "epoch": 0.9810444155361457, "grad_norm": 0.687205970287323, "learning_rate": 0.00010339187160526051, "loss": 0.9672, "step": 5279 }, { "epoch": 0.9812302545995168, "grad_norm": 0.6523383259773254, "learning_rate": 0.00010336264240676745, "loss": 0.9638, "step": 5280 }, { "epoch": 0.981416093662888, "grad_norm": 1.3020282983779907, "learning_rate": 0.00010333341292066028, "loss": 1.6678, "step": 5281 }, { "epoch": 0.9816019327262591, "grad_norm": 0.6111271977424622, "learning_rate": 0.00010330418314943911, "loss": 1.0743, "step": 5282 }, { "epoch": 0.9817877717896302, "grad_norm": 0.6352726817131042, "learning_rate": 0.00010327495309560403, "loss": 1.1428, "step": 5283 }, { "epoch": 0.9819736108530013, "grad_norm": 2.6539978981018066, "learning_rate": 0.00010324572276165512, "loss": 2.0701, "step": 5284 }, { "epoch": 0.9821594499163724, "grad_norm": 0.5675703287124634, "learning_rate": 0.00010321649215009253, "loss": 0.9753, "step": 5285 }, { "epoch": 0.9823452889797435, "grad_norm": 0.634197473526001, "learning_rate": 0.00010318726126341642, "loss": 1.1443, "step": 5286 }, { "epoch": 0.9825311280431147, "grad_norm": 0.6586654186248779, "learning_rate": 0.00010315803010412697, "loss": 1.0275, "step": 5287 }, { "epoch": 0.9827169671064858, "grad_norm": 0.657075047492981, "learning_rate": 0.00010312879867472436, "loss": 1.2704, "step": 5288 }, { "epoch": 0.9829028061698569, "grad_norm": 0.6138443946838379, "learning_rate": 0.00010309956697770881, "loss": 0.9674, "step": 5289 }, { "epoch": 0.983088645233228, "grad_norm": 0.8176442384719849, "learning_rate": 0.00010307033501558065, "loss": 1.3409, "step": 5290 }, { "epoch": 0.9832744842965991, "grad_norm": 0.5863398909568787, "learning_rate": 0.0001030411027908401, "loss": 0.8171, "step": 5291 }, { "epoch": 0.9834603233599702, "grad_norm": 0.5948399305343628, "learning_rate": 0.00010301187030598744, "loss": 0.9919, "step": 5292 }, { "epoch": 0.9836461624233414, "grad_norm": 0.5358024835586548, "learning_rate": 0.00010298263756352302, "loss": 1.0402, "step": 5293 }, { "epoch": 0.9838320014867125, "grad_norm": 0.7168769836425781, "learning_rate": 0.00010295340456594714, "loss": 0.9947, "step": 5294 }, { "epoch": 0.9840178405500837, "grad_norm": 0.6695325374603271, "learning_rate": 0.00010292417131576018, "loss": 1.2622, "step": 5295 }, { "epoch": 0.9842036796134548, "grad_norm": 0.6197385191917419, "learning_rate": 0.00010289493781546256, "loss": 1.1881, "step": 5296 }, { "epoch": 0.9843895186768259, "grad_norm": 0.6674443483352661, "learning_rate": 0.00010286570406755461, "loss": 1.0887, "step": 5297 }, { "epoch": 0.984575357740197, "grad_norm": 0.6339353322982788, "learning_rate": 0.00010283647007453681, "loss": 1.1297, "step": 5298 }, { "epoch": 0.9847611968035681, "grad_norm": 0.5957179069519043, "learning_rate": 0.0001028072358389096, "loss": 0.9305, "step": 5299 }, { "epoch": 0.9849470358669392, "grad_norm": 0.782689094543457, "learning_rate": 0.00010277800136317346, "loss": 1.2162, "step": 5300 }, { "epoch": 0.9851328749303103, "grad_norm": 0.5918034315109253, "learning_rate": 0.00010274876664982883, "loss": 0.8853, "step": 5301 }, { "epoch": 0.9853187139936814, "grad_norm": 0.5832744836807251, "learning_rate": 0.00010271953170137628, "loss": 1.0583, "step": 5302 }, { "epoch": 0.9855045530570526, "grad_norm": 0.6906222105026245, "learning_rate": 0.00010269029652031627, "loss": 0.8884, "step": 5303 }, { "epoch": 0.9856903921204238, "grad_norm": 0.6617158651351929, "learning_rate": 0.0001026610611091494, "loss": 1.2843, "step": 5304 }, { "epoch": 0.9858762311837949, "grad_norm": 0.7408176064491272, "learning_rate": 0.00010263182547037622, "loss": 1.0618, "step": 5305 }, { "epoch": 0.986062070247166, "grad_norm": 0.6209059357643127, "learning_rate": 0.00010260258960649734, "loss": 1.1427, "step": 5306 }, { "epoch": 0.9862479093105371, "grad_norm": 0.6133381128311157, "learning_rate": 0.00010257335352001332, "loss": 1.1346, "step": 5307 }, { "epoch": 0.9864337483739082, "grad_norm": 0.7502332925796509, "learning_rate": 0.00010254411721342481, "loss": 1.2057, "step": 5308 }, { "epoch": 0.9866195874372793, "grad_norm": 0.6478795409202576, "learning_rate": 0.00010251488068923248, "loss": 1.1604, "step": 5309 }, { "epoch": 0.9868054265006504, "grad_norm": 0.6442230939865112, "learning_rate": 0.00010248564394993694, "loss": 1.0143, "step": 5310 }, { "epoch": 0.9869912655640215, "grad_norm": 0.7198289036750793, "learning_rate": 0.00010245640699803892, "loss": 1.1354, "step": 5311 }, { "epoch": 0.9871771046273927, "grad_norm": 0.6207783818244934, "learning_rate": 0.00010242716983603909, "loss": 1.184, "step": 5312 }, { "epoch": 0.9873629436907638, "grad_norm": 0.6826710104942322, "learning_rate": 0.00010239793246643819, "loss": 1.2905, "step": 5313 }, { "epoch": 0.9875487827541349, "grad_norm": 0.6749565005302429, "learning_rate": 0.00010236869489173695, "loss": 1.2629, "step": 5314 }, { "epoch": 0.987734621817506, "grad_norm": 0.6928743124008179, "learning_rate": 0.00010233945711443609, "loss": 1.0613, "step": 5315 }, { "epoch": 0.9879204608808771, "grad_norm": 0.5754216909408569, "learning_rate": 0.00010231021913703643, "loss": 1.119, "step": 5316 }, { "epoch": 0.9881062999442483, "grad_norm": 0.5343156456947327, "learning_rate": 0.00010228098096203871, "loss": 0.9631, "step": 5317 }, { "epoch": 0.9882921390076194, "grad_norm": 0.526121973991394, "learning_rate": 0.00010225174259194377, "loss": 0.9348, "step": 5318 }, { "epoch": 0.9884779780709905, "grad_norm": 0.7248528003692627, "learning_rate": 0.00010222250402925242, "loss": 1.204, "step": 5319 }, { "epoch": 0.9886638171343617, "grad_norm": 0.7399142980575562, "learning_rate": 0.00010219326527646551, "loss": 1.2729, "step": 5320 }, { "epoch": 0.9888496561977328, "grad_norm": 0.5740293860435486, "learning_rate": 0.00010216402633608389, "loss": 1.1019, "step": 5321 }, { "epoch": 0.9890354952611039, "grad_norm": 0.6210380792617798, "learning_rate": 0.00010213478721060838, "loss": 1.1636, "step": 5322 }, { "epoch": 0.989221334324475, "grad_norm": 0.5840253233909607, "learning_rate": 0.00010210554790253992, "loss": 1.1467, "step": 5323 }, { "epoch": 0.9894071733878461, "grad_norm": 0.5882552266120911, "learning_rate": 0.0001020763084143794, "loss": 1.0356, "step": 5324 }, { "epoch": 0.9895930124512172, "grad_norm": 0.5915971398353577, "learning_rate": 0.00010204706874862777, "loss": 1.3001, "step": 5325 }, { "epoch": 0.9897788515145883, "grad_norm": 0.6104952692985535, "learning_rate": 0.00010201782890778586, "loss": 1.1029, "step": 5326 }, { "epoch": 0.9899646905779594, "grad_norm": 0.6219309568405151, "learning_rate": 0.00010198858889435475, "loss": 1.1958, "step": 5327 }, { "epoch": 0.9901505296413307, "grad_norm": 0.748560905456543, "learning_rate": 0.00010195934871083528, "loss": 0.8558, "step": 5328 }, { "epoch": 0.9903363687047018, "grad_norm": 0.6281746625900269, "learning_rate": 0.00010193010835972849, "loss": 0.9296, "step": 5329 }, { "epoch": 0.9905222077680729, "grad_norm": 0.5284553170204163, "learning_rate": 0.00010190086784353538, "loss": 1.2851, "step": 5330 }, { "epoch": 0.990708046831444, "grad_norm": 0.5357339382171631, "learning_rate": 0.00010187162716475694, "loss": 0.9738, "step": 5331 }, { "epoch": 0.9908938858948151, "grad_norm": 0.7003484964370728, "learning_rate": 0.00010184238632589416, "loss": 1.1544, "step": 5332 }, { "epoch": 0.9910797249581862, "grad_norm": 2.5034542083740234, "learning_rate": 0.0001018131453294481, "loss": 1.7585, "step": 5333 }, { "epoch": 0.9912655640215573, "grad_norm": 0.5779796242713928, "learning_rate": 0.0001017839041779198, "loss": 1.2848, "step": 5334 }, { "epoch": 0.9914514030849284, "grad_norm": 0.5366520881652832, "learning_rate": 0.00010175466287381034, "loss": 0.9984, "step": 5335 }, { "epoch": 0.9916372421482996, "grad_norm": 0.6219021081924438, "learning_rate": 0.00010172542141962074, "loss": 1.137, "step": 5336 }, { "epoch": 0.9918230812116707, "grad_norm": 0.5030076503753662, "learning_rate": 0.00010169617981785213, "loss": 0.8376, "step": 5337 }, { "epoch": 0.9920089202750418, "grad_norm": 0.5434117317199707, "learning_rate": 0.00010166693807100562, "loss": 1.0558, "step": 5338 }, { "epoch": 0.992194759338413, "grad_norm": 0.5399218201637268, "learning_rate": 0.00010163769618158227, "loss": 1.0514, "step": 5339 }, { "epoch": 0.992380598401784, "grad_norm": 0.7620237469673157, "learning_rate": 0.00010160845415208322, "loss": 0.9099, "step": 5340 }, { "epoch": 0.9925664374651552, "grad_norm": 0.6571434736251831, "learning_rate": 0.00010157921198500961, "loss": 1.2444, "step": 5341 }, { "epoch": 0.9927522765285263, "grad_norm": 0.5245940089225769, "learning_rate": 0.00010154996968286261, "loss": 0.9128, "step": 5342 }, { "epoch": 0.9929381155918974, "grad_norm": 0.49775126576423645, "learning_rate": 0.00010152072724814332, "loss": 0.589, "step": 5343 }, { "epoch": 0.9931239546552686, "grad_norm": 0.5503200888633728, "learning_rate": 0.00010149148468335296, "loss": 1.0956, "step": 5344 }, { "epoch": 0.9933097937186397, "grad_norm": 0.599502444267273, "learning_rate": 0.00010146224199099273, "loss": 1.0305, "step": 5345 }, { "epoch": 0.9934956327820108, "grad_norm": 0.5318445563316345, "learning_rate": 0.00010143299917356373, "loss": 1.1173, "step": 5346 }, { "epoch": 0.9936814718453819, "grad_norm": 2.255668878555298, "learning_rate": 0.00010140375623356724, "loss": 2.0431, "step": 5347 }, { "epoch": 0.993867310908753, "grad_norm": 0.7480182647705078, "learning_rate": 0.00010137451317350442, "loss": 1.2049, "step": 5348 }, { "epoch": 0.9940531499721241, "grad_norm": 0.6217489838600159, "learning_rate": 0.00010134526999587653, "loss": 0.8339, "step": 5349 }, { "epoch": 0.9942389890354952, "grad_norm": 0.7519838213920593, "learning_rate": 0.00010131602670318477, "loss": 0.9099, "step": 5350 }, { "epoch": 0.9944248280988663, "grad_norm": 0.5862378478050232, "learning_rate": 0.00010128678329793042, "loss": 1.1724, "step": 5351 }, { "epoch": 0.9946106671622374, "grad_norm": 0.6280035376548767, "learning_rate": 0.00010125753978261475, "loss": 1.1678, "step": 5352 }, { "epoch": 0.9947965062256087, "grad_norm": 0.6822145581245422, "learning_rate": 0.00010122829615973895, "loss": 1.2307, "step": 5353 }, { "epoch": 0.9949823452889798, "grad_norm": 0.5909119844436646, "learning_rate": 0.00010119905243180432, "loss": 1.1142, "step": 5354 }, { "epoch": 0.9951681843523509, "grad_norm": 0.6381353139877319, "learning_rate": 0.00010116980860131214, "loss": 1.0991, "step": 5355 }, { "epoch": 0.995354023415722, "grad_norm": 0.5839842557907104, "learning_rate": 0.00010114056467076372, "loss": 1.1673, "step": 5356 }, { "epoch": 0.9955398624790931, "grad_norm": 0.49389171600341797, "learning_rate": 0.0001011113206426603, "loss": 0.9878, "step": 5357 }, { "epoch": 0.9957257015424642, "grad_norm": 0.6194879412651062, "learning_rate": 0.00010108207651950325, "loss": 1.15, "step": 5358 }, { "epoch": 0.9959115406058353, "grad_norm": 0.6395465731620789, "learning_rate": 0.00010105283230379387, "loss": 0.9558, "step": 5359 }, { "epoch": 0.9960973796692064, "grad_norm": 0.5701009631156921, "learning_rate": 0.00010102358799803347, "loss": 0.9805, "step": 5360 }, { "epoch": 0.9962832187325776, "grad_norm": 0.5622742772102356, "learning_rate": 0.00010099434360472337, "loss": 1.3543, "step": 5361 }, { "epoch": 0.9964690577959487, "grad_norm": 0.5801060199737549, "learning_rate": 0.00010096509912636489, "loss": 0.8837, "step": 5362 }, { "epoch": 0.9966548968593198, "grad_norm": 0.6651684045791626, "learning_rate": 0.00010093585456545942, "loss": 1.1386, "step": 5363 }, { "epoch": 0.996840735922691, "grad_norm": 0.6739777326583862, "learning_rate": 0.00010090660992450826, "loss": 1.2695, "step": 5364 }, { "epoch": 0.9970265749860621, "grad_norm": 0.5893535614013672, "learning_rate": 0.00010087736520601281, "loss": 0.8915, "step": 5365 }, { "epoch": 0.9972124140494332, "grad_norm": 0.6335767507553101, "learning_rate": 0.00010084812041247442, "loss": 0.9015, "step": 5366 }, { "epoch": 0.9973982531128043, "grad_norm": 0.5435164570808411, "learning_rate": 0.00010081887554639447, "loss": 0.9378, "step": 5367 }, { "epoch": 0.9975840921761754, "grad_norm": 0.6234378814697266, "learning_rate": 0.00010078963061027433, "loss": 0.9928, "step": 5368 }, { "epoch": 0.9977699312395466, "grad_norm": 0.727533221244812, "learning_rate": 0.00010076038560661534, "loss": 1.1077, "step": 5369 }, { "epoch": 0.9979557703029177, "grad_norm": 0.5350732207298279, "learning_rate": 0.00010073114053791894, "loss": 1.0918, "step": 5370 }, { "epoch": 0.9981416093662888, "grad_norm": 0.6955908536911011, "learning_rate": 0.00010070189540668651, "loss": 1.0372, "step": 5371 }, { "epoch": 0.9983274484296599, "grad_norm": 0.7087992429733276, "learning_rate": 0.00010067265021541944, "loss": 1.1127, "step": 5372 }, { "epoch": 0.998513287493031, "grad_norm": 0.646903932094574, "learning_rate": 0.00010064340496661918, "loss": 1.074, "step": 5373 }, { "epoch": 0.9986991265564021, "grad_norm": 0.5559451580047607, "learning_rate": 0.00010061415966278708, "loss": 1.0225, "step": 5374 }, { "epoch": 0.9988849656197732, "grad_norm": 0.6143019795417786, "learning_rate": 0.00010058491430642458, "loss": 1.1226, "step": 5375 }, { "epoch": 0.9990708046831444, "grad_norm": 0.6290916204452515, "learning_rate": 0.00010055566890003309, "loss": 1.2038, "step": 5376 }, { "epoch": 0.9992566437465156, "grad_norm": 0.6341618299484253, "learning_rate": 0.00010052642344611402, "loss": 1.0323, "step": 5377 }, { "epoch": 0.9994424828098867, "grad_norm": 0.5217851996421814, "learning_rate": 0.00010049717794716884, "loss": 1.0468, "step": 5378 }, { "epoch": 0.9996283218732578, "grad_norm": 0.6314026117324829, "learning_rate": 0.00010046793240569894, "loss": 1.0798, "step": 5379 }, { "epoch": 0.9998141609366289, "grad_norm": 0.5676242709159851, "learning_rate": 0.00010043868682420579, "loss": 1.239, "step": 5380 }, { "epoch": 1.0, "grad_norm": 0.6378072500228882, "learning_rate": 0.0001004094412051908, "loss": 1.1327, "step": 5381 }, { "epoch": 1.000185839063371, "grad_norm": 0.6097329258918762, "learning_rate": 0.00010038019555115543, "loss": 1.0793, "step": 5382 }, { "epoch": 1.0003716781267422, "grad_norm": 0.6979326605796814, "learning_rate": 0.00010035094986460112, "loss": 1.4729, "step": 5383 }, { "epoch": 1.0005575171901133, "grad_norm": 0.4561534523963928, "learning_rate": 0.00010032170414802926, "loss": 0.9795, "step": 5384 }, { "epoch": 1.0005575171901133, "eval_loss": 1.0240511894226074, "eval_runtime": 23.0165, "eval_samples_per_second": 47.444, "eval_steps_per_second": 23.722, "step": 5384 }, { "epoch": 1.0007433562534844, "grad_norm": 0.76151442527771, "learning_rate": 0.00010029245840394137, "loss": 1.1695, "step": 5385 }, { "epoch": 1.0009291953168555, "grad_norm": 0.5457819104194641, "learning_rate": 0.0001002632126348389, "loss": 0.8923, "step": 5386 }, { "epoch": 1.0011150343802266, "grad_norm": 0.57408207654953, "learning_rate": 0.00010023396684322325, "loss": 0.7989, "step": 5387 }, { "epoch": 1.0013008734435977, "grad_norm": 0.5285831093788147, "learning_rate": 0.00010020472103159595, "loss": 0.9254, "step": 5388 }, { "epoch": 1.0014867125069689, "grad_norm": 0.5257111191749573, "learning_rate": 0.0001001754752024584, "loss": 0.9928, "step": 5389 }, { "epoch": 1.0016725515703402, "grad_norm": 0.5929964184761047, "learning_rate": 0.0001001462293583121, "loss": 1.17, "step": 5390 }, { "epoch": 1.0018583906337113, "grad_norm": 0.6292116641998291, "learning_rate": 0.00010011698350165844, "loss": 1.2623, "step": 5391 }, { "epoch": 1.0020442296970824, "grad_norm": 0.9348997473716736, "learning_rate": 0.00010008773763499897, "loss": 1.1079, "step": 5392 }, { "epoch": 1.0022300687604535, "grad_norm": 0.6319088339805603, "learning_rate": 0.00010005849176083507, "loss": 1.253, "step": 5393 }, { "epoch": 1.0024159078238246, "grad_norm": Infinity, "learning_rate": 0.00010005849176083507, "loss": 2.0546, "step": 5394 }, { "epoch": 1.0026017468871957, "grad_norm": 0.6206772327423096, "learning_rate": 0.00010002924588166827, "loss": 1.1599, "step": 5395 }, { "epoch": 1.0027875859505668, "grad_norm": 0.6307805776596069, "learning_rate": 0.0001, "loss": 1.1913, "step": 5396 }, { "epoch": 1.002973425013938, "grad_norm": 0.6121634840965271, "learning_rate": 9.997075411833174e-05, "loss": 1.3004, "step": 5397 }, { "epoch": 1.003159264077309, "grad_norm": 0.5531421899795532, "learning_rate": 9.994150823916494e-05, "loss": 0.9743, "step": 5398 }, { "epoch": 1.0033451031406802, "grad_norm": 0.49836012721061707, "learning_rate": 9.991226236500106e-05, "loss": 1.1118, "step": 5399 }, { "epoch": 1.0035309422040513, "grad_norm": 0.6006326079368591, "learning_rate": 9.988301649834157e-05, "loss": 1.15, "step": 5400 }, { "epoch": 1.0037167812674224, "grad_norm": 0.5552299618721008, "learning_rate": 9.985377064168792e-05, "loss": 0.7896, "step": 5401 }, { "epoch": 1.0039026203307935, "grad_norm": 0.5896706581115723, "learning_rate": 9.982452479754163e-05, "loss": 1.0079, "step": 5402 }, { "epoch": 1.0040884593941646, "grad_norm": 0.5791441202163696, "learning_rate": 9.979527896840407e-05, "loss": 1.0957, "step": 5403 }, { "epoch": 1.0042742984575357, "grad_norm": 0.6279758214950562, "learning_rate": 9.976603315677674e-05, "loss": 1.0818, "step": 5404 }, { "epoch": 1.0044601375209068, "grad_norm": 0.6163389086723328, "learning_rate": 9.973678736516113e-05, "loss": 0.7001, "step": 5405 }, { "epoch": 1.0046459765842781, "grad_norm": 0.7090188264846802, "learning_rate": 9.970754159605864e-05, "loss": 1.1302, "step": 5406 }, { "epoch": 1.0048318156476492, "grad_norm": 0.5731585025787354, "learning_rate": 9.967829585197076e-05, "loss": 1.1057, "step": 5407 }, { "epoch": 1.0050176547110203, "grad_norm": 0.5643604397773743, "learning_rate": 9.964905013539892e-05, "loss": 1.0489, "step": 5408 }, { "epoch": 1.0052034937743914, "grad_norm": 0.6144115924835205, "learning_rate": 9.961980444884461e-05, "loss": 1.0398, "step": 5409 }, { "epoch": 1.0053893328377626, "grad_norm": 0.6871153116226196, "learning_rate": 9.959055879480921e-05, "loss": 0.9326, "step": 5410 }, { "epoch": 1.0055751719011337, "grad_norm": 0.6981655359268188, "learning_rate": 9.956131317579421e-05, "loss": 1.0339, "step": 5411 }, { "epoch": 1.0057610109645048, "grad_norm": 0.5825377702713013, "learning_rate": 9.953206759430107e-05, "loss": 0.906, "step": 5412 }, { "epoch": 1.0059468500278759, "grad_norm": 0.6913666129112244, "learning_rate": 9.950282205283117e-05, "loss": 1.0416, "step": 5413 }, { "epoch": 1.006132689091247, "grad_norm": 0.6037838459014893, "learning_rate": 9.947357655388599e-05, "loss": 1.0389, "step": 5414 }, { "epoch": 1.006318528154618, "grad_norm": 0.6384328603744507, "learning_rate": 9.944433109996692e-05, "loss": 1.1716, "step": 5415 }, { "epoch": 1.0065043672179892, "grad_norm": 0.6024790406227112, "learning_rate": 9.941508569357545e-05, "loss": 0.9292, "step": 5416 }, { "epoch": 1.0066902062813603, "grad_norm": 0.5940973162651062, "learning_rate": 9.938584033721294e-05, "loss": 1.002, "step": 5417 }, { "epoch": 1.0068760453447314, "grad_norm": 0.5688101053237915, "learning_rate": 9.935659503338082e-05, "loss": 0.9342, "step": 5418 }, { "epoch": 1.0070618844081025, "grad_norm": 0.6713395118713379, "learning_rate": 9.932734978458057e-05, "loss": 1.1313, "step": 5419 }, { "epoch": 1.0072477234714736, "grad_norm": 0.597057580947876, "learning_rate": 9.929810459331349e-05, "loss": 1.0405, "step": 5420 }, { "epoch": 1.0074335625348447, "grad_norm": 0.5775242447853088, "learning_rate": 9.926885946208108e-05, "loss": 1.1132, "step": 5421 }, { "epoch": 1.0076194015982158, "grad_norm": 0.4729931950569153, "learning_rate": 9.923961439338468e-05, "loss": 0.8111, "step": 5422 }, { "epoch": 1.0078052406615872, "grad_norm": 0.7457094192504883, "learning_rate": 9.921036938972572e-05, "loss": 1.4792, "step": 5423 }, { "epoch": 1.0079910797249583, "grad_norm": 0.5729731321334839, "learning_rate": 9.918112445360554e-05, "loss": 1.0344, "step": 5424 }, { "epoch": 1.0081769187883294, "grad_norm": 0.6670306324958801, "learning_rate": 9.915187958752556e-05, "loss": 1.0718, "step": 5425 }, { "epoch": 1.0083627578517005, "grad_norm": 0.5350519418716431, "learning_rate": 9.91226347939872e-05, "loss": 1.0075, "step": 5426 }, { "epoch": 1.0085485969150716, "grad_norm": 0.5211275219917297, "learning_rate": 9.909339007549175e-05, "loss": 1.0576, "step": 5427 }, { "epoch": 1.0087344359784427, "grad_norm": 0.6536908149719238, "learning_rate": 9.906414543454063e-05, "loss": 1.0691, "step": 5428 }, { "epoch": 1.0089202750418138, "grad_norm": 0.5251681208610535, "learning_rate": 9.903490087363514e-05, "loss": 0.8627, "step": 5429 }, { "epoch": 1.009106114105185, "grad_norm": 0.5836920142173767, "learning_rate": 9.900565639527668e-05, "loss": 1.0218, "step": 5430 }, { "epoch": 1.009291953168556, "grad_norm": 0.5761560797691345, "learning_rate": 9.897641200196656e-05, "loss": 1.1952, "step": 5431 }, { "epoch": 1.0094777922319271, "grad_norm": 0.5624791383743286, "learning_rate": 9.894716769620612e-05, "loss": 1.0669, "step": 5432 }, { "epoch": 1.0096636312952982, "grad_norm": 0.5359252095222473, "learning_rate": 9.891792348049676e-05, "loss": 1.156, "step": 5433 }, { "epoch": 1.0098494703586693, "grad_norm": 0.6264462471008301, "learning_rate": 9.88886793573397e-05, "loss": 0.8701, "step": 5434 }, { "epoch": 1.0100353094220405, "grad_norm": 0.5481414198875427, "learning_rate": 9.885943532923633e-05, "loss": 0.9676, "step": 5435 }, { "epoch": 1.0102211484854116, "grad_norm": 0.5599638819694519, "learning_rate": 9.883019139868789e-05, "loss": 1.0764, "step": 5436 }, { "epoch": 1.0104069875487827, "grad_norm": 0.5427635312080383, "learning_rate": 9.880094756819572e-05, "loss": 1.1117, "step": 5437 }, { "epoch": 1.0105928266121538, "grad_norm": 0.56276535987854, "learning_rate": 9.877170384026109e-05, "loss": 1.2726, "step": 5438 }, { "epoch": 1.010778665675525, "grad_norm": 0.6589742302894592, "learning_rate": 9.874246021738526e-05, "loss": 1.0081, "step": 5439 }, { "epoch": 1.0109645047388962, "grad_norm": 0.6330572366714478, "learning_rate": 9.87132167020696e-05, "loss": 0.9855, "step": 5440 }, { "epoch": 1.0111503438022673, "grad_norm": 0.5472434163093567, "learning_rate": 9.868397329681523e-05, "loss": 1.0102, "step": 5441 }, { "epoch": 1.0113361828656384, "grad_norm": 0.6067285537719727, "learning_rate": 9.865473000412352e-05, "loss": 0.7558, "step": 5442 }, { "epoch": 1.0115220219290095, "grad_norm": 0.7218596339225769, "learning_rate": 9.86254868264956e-05, "loss": 1.2293, "step": 5443 }, { "epoch": 1.0117078609923806, "grad_norm": 9.477654457092285, "learning_rate": 9.859624376643281e-05, "loss": 1.7758, "step": 5444 }, { "epoch": 1.0118937000557517, "grad_norm": 0.6567401885986328, "learning_rate": 9.856700082643628e-05, "loss": 0.9737, "step": 5445 }, { "epoch": 1.0120795391191229, "grad_norm": 0.6458907127380371, "learning_rate": 9.853775800900728e-05, "loss": 1.2239, "step": 5446 }, { "epoch": 1.012265378182494, "grad_norm": 0.5758485198020935, "learning_rate": 9.850851531664705e-05, "loss": 0.8985, "step": 5447 }, { "epoch": 1.012451217245865, "grad_norm": 0.7790505290031433, "learning_rate": 9.847927275185667e-05, "loss": 1.1216, "step": 5448 }, { "epoch": 1.0126370563092362, "grad_norm": 0.6475933194160461, "learning_rate": 9.845003031713743e-05, "loss": 1.2642, "step": 5449 }, { "epoch": 1.0128228953726073, "grad_norm": 0.585686981678009, "learning_rate": 9.84207880149904e-05, "loss": 1.2002, "step": 5450 }, { "epoch": 1.0130087344359784, "grad_norm": 0.5584164261817932, "learning_rate": 9.83915458479168e-05, "loss": 0.9101, "step": 5451 }, { "epoch": 1.0131945734993495, "grad_norm": 0.6801714897155762, "learning_rate": 9.836230381841775e-05, "loss": 1.1562, "step": 5452 }, { "epoch": 1.0133804125627206, "grad_norm": 0.6504145860671997, "learning_rate": 9.833306192899438e-05, "loss": 1.0963, "step": 5453 }, { "epoch": 1.0135662516260917, "grad_norm": 0.651237428188324, "learning_rate": 9.830382018214788e-05, "loss": 1.3058, "step": 5454 }, { "epoch": 1.0137520906894628, "grad_norm": 0.5830672979354858, "learning_rate": 9.827457858037926e-05, "loss": 0.9947, "step": 5455 }, { "epoch": 1.0139379297528341, "grad_norm": 0.6010664701461792, "learning_rate": 9.824533712618971e-05, "loss": 1.2405, "step": 5456 }, { "epoch": 1.0141237688162053, "grad_norm": 0.5090926885604858, "learning_rate": 9.82160958220802e-05, "loss": 0.8201, "step": 5457 }, { "epoch": 1.0143096078795764, "grad_norm": 0.6615139245986938, "learning_rate": 9.818685467055193e-05, "loss": 1.3562, "step": 5458 }, { "epoch": 1.0144954469429475, "grad_norm": 0.5601671934127808, "learning_rate": 9.815761367410585e-05, "loss": 0.9291, "step": 5459 }, { "epoch": 1.0146812860063186, "grad_norm": 0.6100068688392639, "learning_rate": 9.812837283524306e-05, "loss": 1.0619, "step": 5460 }, { "epoch": 1.0148671250696897, "grad_norm": 0.7463274598121643, "learning_rate": 9.809913215646464e-05, "loss": 0.9276, "step": 5461 }, { "epoch": 1.0150529641330608, "grad_norm": 0.6056248545646667, "learning_rate": 9.80698916402715e-05, "loss": 0.9859, "step": 5462 }, { "epoch": 1.015238803196432, "grad_norm": 0.5375638008117676, "learning_rate": 9.804065128916475e-05, "loss": 0.8173, "step": 5463 }, { "epoch": 1.015424642259803, "grad_norm": 0.6325738430023193, "learning_rate": 9.801141110564529e-05, "loss": 0.9961, "step": 5464 }, { "epoch": 1.015610481323174, "grad_norm": 0.5336713790893555, "learning_rate": 9.798217109221415e-05, "loss": 1.0304, "step": 5465 }, { "epoch": 1.0157963203865452, "grad_norm": 0.6309736371040344, "learning_rate": 9.795293125137225e-05, "loss": 0.8847, "step": 5466 }, { "epoch": 1.0159821594499163, "grad_norm": 0.633857250213623, "learning_rate": 9.79236915856206e-05, "loss": 0.842, "step": 5467 }, { "epoch": 1.0161679985132874, "grad_norm": 0.689595639705658, "learning_rate": 9.78944520974601e-05, "loss": 0.9321, "step": 5468 }, { "epoch": 1.0163538375766585, "grad_norm": 0.6718122959136963, "learning_rate": 9.786521278939163e-05, "loss": 1.0637, "step": 5469 }, { "epoch": 1.0165396766400296, "grad_norm": 0.6352799534797668, "learning_rate": 9.783597366391616e-05, "loss": 1.1004, "step": 5470 }, { "epoch": 1.0167255157034008, "grad_norm": 0.6477722525596619, "learning_rate": 9.780673472353451e-05, "loss": 1.145, "step": 5471 }, { "epoch": 1.016911354766772, "grad_norm": 0.5410536527633667, "learning_rate": 9.777749597074761e-05, "loss": 1.0556, "step": 5472 }, { "epoch": 1.0170971938301432, "grad_norm": 0.5520496368408203, "learning_rate": 9.774825740805626e-05, "loss": 0.8118, "step": 5473 }, { "epoch": 1.0172830328935143, "grad_norm": 0.8518608808517456, "learning_rate": 9.77190190379613e-05, "loss": 1.0747, "step": 5474 }, { "epoch": 1.000185839063371, "grad_norm": 0.561589241027832, "learning_rate": 9.768978086296361e-05, "loss": 1.0477, "step": 5475 }, { "epoch": 1.0003716781267422, "grad_norm": 0.6613037586212158, "learning_rate": 9.766054288556392e-05, "loss": 1.1642, "step": 5476 }, { "epoch": 1.0005575171901133, "grad_norm": 0.5221126079559326, "learning_rate": 9.76313051082631e-05, "loss": 0.8971, "step": 5477 }, { "epoch": 1.0007433562534844, "grad_norm": 0.5021414756774902, "learning_rate": 9.760206753356184e-05, "loss": 0.6635, "step": 5478 }, { "epoch": 1.0009291953168555, "grad_norm": 0.6210930943489075, "learning_rate": 9.757283016396096e-05, "loss": 0.8806, "step": 5479 }, { "epoch": 1.0011150343802266, "grad_norm": 0.5278712511062622, "learning_rate": 9.754359300196112e-05, "loss": 0.81, "step": 5480 }, { "epoch": 1.0013008734435977, "grad_norm": 0.6328251957893372, "learning_rate": 9.751435605006306e-05, "loss": 1.0801, "step": 5481 }, { "epoch": 1.0014867125069689, "grad_norm": 0.5093181133270264, "learning_rate": 9.748511931076756e-05, "loss": 0.9716, "step": 5482 }, { "epoch": 1.0016725515703402, "grad_norm": 0.5930023193359375, "learning_rate": 9.74558827865752e-05, "loss": 1.1575, "step": 5483 }, { "epoch": 1.0018583906337113, "grad_norm": 0.5147334337234497, "learning_rate": 9.742664647998673e-05, "loss": 0.9622, "step": 5484 }, { "epoch": 1.0020442296970824, "grad_norm": 0.7055521011352539, "learning_rate": 9.73974103935027e-05, "loss": 0.9228, "step": 5485 }, { "epoch": 1.0022300687604535, "grad_norm": 0.550630509853363, "learning_rate": 9.736817452962382e-05, "loss": 1.0508, "step": 5486 }, { "epoch": 1.0024159078238246, "grad_norm": 0.5721721053123474, "learning_rate": 9.733893889085063e-05, "loss": 0.7281, "step": 5487 }, { "epoch": 1.0026017468871957, "grad_norm": 0.49507665634155273, "learning_rate": 9.730970347968374e-05, "loss": 1.0323, "step": 5488 }, { "epoch": 1.0027875859505668, "grad_norm": 0.5477926731109619, "learning_rate": 9.728046829862376e-05, "loss": 0.9139, "step": 5489 }, { "epoch": 1.002973425013938, "grad_norm": 0.5683122873306274, "learning_rate": 9.725123335017117e-05, "loss": 0.8201, "step": 5490 }, { "epoch": 1.003159264077309, "grad_norm": 0.7958745360374451, "learning_rate": 9.722199863682659e-05, "loss": 1.2047, "step": 5491 }, { "epoch": 1.0033451031406802, "grad_norm": 0.6453753113746643, "learning_rate": 9.719276416109042e-05, "loss": 1.2061, "step": 5492 }, { "epoch": 1.0035309422040513, "grad_norm": 0.5566391944885254, "learning_rate": 9.716352992546323e-05, "loss": 1.0988, "step": 5493 }, { "epoch": 1.0037167812674224, "grad_norm": 0.5664240717887878, "learning_rate": 9.713429593244541e-05, "loss": 1.0277, "step": 5494 }, { "epoch": 1.0039026203307935, "grad_norm": 0.5620260238647461, "learning_rate": 9.710506218453748e-05, "loss": 0.9684, "step": 5495 }, { "epoch": 1.0040884593941646, "grad_norm": 0.5455220341682434, "learning_rate": 9.707582868423984e-05, "loss": 0.6991, "step": 5496 }, { "epoch": 1.0042742984575357, "grad_norm": 0.5864872336387634, "learning_rate": 9.704659543405287e-05, "loss": 0.9655, "step": 5497 }, { "epoch": 1.0044601375209068, "grad_norm": 0.49677610397338867, "learning_rate": 9.701736243647703e-05, "loss": 0.7792, "step": 5498 }, { "epoch": 1.0046459765842781, "grad_norm": 0.517552375793457, "learning_rate": 9.698812969401257e-05, "loss": 0.6622, "step": 5499 }, { "epoch": 1.0048318156476492, "grad_norm": 0.6072260141372681, "learning_rate": 9.695889720915994e-05, "loss": 0.9556, "step": 5500 }, { "epoch": 1.0050176547110203, "grad_norm": 0.6447886824607849, "learning_rate": 9.692966498441937e-05, "loss": 0.8409, "step": 5501 }, { "epoch": 1.0052034937743914, "grad_norm": 0.6072793006896973, "learning_rate": 9.690043302229117e-05, "loss": 0.8845, "step": 5502 }, { "epoch": 1.0053893328377626, "grad_norm": 0.5621128082275391, "learning_rate": 9.687120132527568e-05, "loss": 1.0741, "step": 5503 }, { "epoch": 1.0055751719011337, "grad_norm": 0.5688579678535461, "learning_rate": 9.684196989587305e-05, "loss": 0.9511, "step": 5504 }, { "epoch": 1.0057610109645048, "grad_norm": 0.5930265188217163, "learning_rate": 9.681273873658362e-05, "loss": 0.9649, "step": 5505 }, { "epoch": 1.0059468500278759, "grad_norm": 0.575786292552948, "learning_rate": 9.678350784990748e-05, "loss": 1.1046, "step": 5506 }, { "epoch": 1.006132689091247, "grad_norm": 0.5332211256027222, "learning_rate": 9.675427723834493e-05, "loss": 0.9617, "step": 5507 }, { "epoch": 1.006318528154618, "grad_norm": 0.6209401488304138, "learning_rate": 9.672504690439601e-05, "loss": 0.8199, "step": 5508 }, { "epoch": 1.0065043672179892, "grad_norm": 0.5827673673629761, "learning_rate": 9.669581685056089e-05, "loss": 0.9387, "step": 5509 }, { "epoch": 1.0066902062813603, "grad_norm": 0.5273739695549011, "learning_rate": 9.666658707933974e-05, "loss": 0.8929, "step": 5510 }, { "epoch": 1.0068760453447314, "grad_norm": 0.6448649168014526, "learning_rate": 9.663735759323258e-05, "loss": 0.9621, "step": 5511 }, { "epoch": 1.0070618844081025, "grad_norm": 0.5527610182762146, "learning_rate": 9.660812839473952e-05, "loss": 0.9894, "step": 5512 }, { "epoch": 1.0072477234714736, "grad_norm": 0.6742172837257385, "learning_rate": 9.657889948636053e-05, "loss": 0.9834, "step": 5513 }, { "epoch": 1.0074335625348447, "grad_norm": 0.7824978828430176, "learning_rate": 9.654967087059572e-05, "loss": 0.9771, "step": 5514 }, { "epoch": 1.0076194015982158, "grad_norm": 0.5018360614776611, "learning_rate": 9.652044254994495e-05, "loss": 0.4235, "step": 5515 }, { "epoch": 1.0078052406615872, "grad_norm": 0.5719367265701294, "learning_rate": 9.649121452690826e-05, "loss": 1.044, "step": 5516 }, { "epoch": 1.0079910797249583, "grad_norm": 0.5926535725593567, "learning_rate": 9.64619868039856e-05, "loss": 0.8208, "step": 5517 }, { "epoch": 1.0081769187883294, "grad_norm": 0.5106927752494812, "learning_rate": 9.643275938367685e-05, "loss": 0.8282, "step": 5518 }, { "epoch": 1.0083627578517005, "grad_norm": 0.5779476165771484, "learning_rate": 9.64035322684819e-05, "loss": 1.0108, "step": 5519 }, { "epoch": 1.0085485969150716, "grad_norm": 0.8551607131958008, "learning_rate": 9.637430546090059e-05, "loss": 0.9728, "step": 5520 }, { "epoch": 1.0087344359784427, "grad_norm": 0.5459856986999512, "learning_rate": 9.634507896343281e-05, "loss": 0.9183, "step": 5521 }, { "epoch": 1.0089202750418138, "grad_norm": 0.5723996758460999, "learning_rate": 9.631585277857829e-05, "loss": 1.1975, "step": 5522 }, { "epoch": 1.009106114105185, "grad_norm": 0.5988956093788147, "learning_rate": 9.628662690883685e-05, "loss": 0.9579, "step": 5523 }, { "epoch": 1.009291953168556, "grad_norm": 0.6050418615341187, "learning_rate": 9.625740135670825e-05, "loss": 0.8478, "step": 5524 }, { "epoch": 1.0094777922319271, "grad_norm": 0.6499052047729492, "learning_rate": 9.622817612469218e-05, "loss": 0.9527, "step": 5525 }, { "epoch": 1.0096636312952982, "grad_norm": 0.6234305500984192, "learning_rate": 9.619895121528838e-05, "loss": 0.9708, "step": 5526 }, { "epoch": 1.0098494703586693, "grad_norm": 0.6313818097114563, "learning_rate": 9.616972663099647e-05, "loss": 1.0714, "step": 5527 }, { "epoch": 1.0100353094220405, "grad_norm": 0.634307861328125, "learning_rate": 9.614050237431616e-05, "loss": 0.9302, "step": 5528 }, { "epoch": 1.0102211484854116, "grad_norm": 0.5377451181411743, "learning_rate": 9.611127844774699e-05, "loss": 0.8374, "step": 5529 }, { "epoch": 1.0104069875487827, "grad_norm": 0.5915167927742004, "learning_rate": 9.608205485378858e-05, "loss": 0.9394, "step": 5530 }, { "epoch": 1.0105928266121538, "grad_norm": 0.764411211013794, "learning_rate": 9.605283159494051e-05, "loss": 0.9691, "step": 5531 }, { "epoch": 1.010778665675525, "grad_norm": 0.608314037322998, "learning_rate": 9.602360867370226e-05, "loss": 0.965, "step": 5532 }, { "epoch": 1.0109645047388962, "grad_norm": 0.5562364459037781, "learning_rate": 9.599438609257336e-05, "loss": 0.9165, "step": 5533 }, { "epoch": 1.0111503438022673, "grad_norm": 0.6700636148452759, "learning_rate": 9.596516385405327e-05, "loss": 1.0618, "step": 5534 }, { "epoch": 1.0113361828656384, "grad_norm": 0.5265184044837952, "learning_rate": 9.593594196064147e-05, "loss": 1.036, "step": 5535 }, { "epoch": 1.0115220219290095, "grad_norm": 0.5445021390914917, "learning_rate": 9.590672041483731e-05, "loss": 1.0627, "step": 5536 }, { "epoch": 1.0117078609923806, "grad_norm": 0.859940767288208, "learning_rate": 9.587749921914018e-05, "loss": 1.1261, "step": 5537 }, { "epoch": 1.0118937000557517, "grad_norm": 1.5261372327804565, "learning_rate": 9.584827837604949e-05, "loss": 1.7484, "step": 5538 }, { "epoch": 1.0120795391191229, "grad_norm": 0.5151029825210571, "learning_rate": 9.58190578880645e-05, "loss": 0.7694, "step": 5539 }, { "epoch": 1.012265378182494, "grad_norm": 0.5031622052192688, "learning_rate": 9.578983775768454e-05, "loss": 0.6745, "step": 5540 }, { "epoch": 1.012451217245865, "grad_norm": 0.5906434059143066, "learning_rate": 9.576061798740882e-05, "loss": 0.6502, "step": 5541 }, { "epoch": 1.0126370563092362, "grad_norm": 0.6443628072738647, "learning_rate": 9.573139857973668e-05, "loss": 0.8002, "step": 5542 }, { "epoch": 1.0128228953726073, "grad_norm": 0.6574161052703857, "learning_rate": 9.570217953716718e-05, "loss": 0.8415, "step": 5543 }, { "epoch": 1.0130087344359784, "grad_norm": 0.6678338646888733, "learning_rate": 9.567296086219955e-05, "loss": 1.129, "step": 5544 }, { "epoch": 1.0131945734993495, "grad_norm": 0.6403939723968506, "learning_rate": 9.564374255733298e-05, "loss": 0.9736, "step": 5545 }, { "epoch": 1.0133804125627206, "grad_norm": 0.7221897840499878, "learning_rate": 9.561452462506648e-05, "loss": 0.9191, "step": 5546 }, { "epoch": 1.0135662516260917, "grad_norm": 0.6155973672866821, "learning_rate": 9.558530706789918e-05, "loss": 0.8291, "step": 5547 }, { "epoch": 1.0137520906894628, "grad_norm": 0.5788849592208862, "learning_rate": 9.55560898883301e-05, "loss": 0.8192, "step": 5548 }, { "epoch": 1.0139379297528341, "grad_norm": 0.5767815709114075, "learning_rate": 9.55268730888583e-05, "loss": 0.8011, "step": 5549 }, { "epoch": 1.0141237688162053, "grad_norm": 0.5603548884391785, "learning_rate": 9.549765667198267e-05, "loss": 0.8195, "step": 5550 }, { "epoch": 1.0143096078795764, "grad_norm": 0.6302038431167603, "learning_rate": 9.546844064020218e-05, "loss": 1.0481, "step": 5551 }, { "epoch": 1.0144954469429475, "grad_norm": 0.5809621214866638, "learning_rate": 9.543922499601582e-05, "loss": 0.739, "step": 5552 }, { "epoch": 1.0146812860063186, "grad_norm": 0.5936046838760376, "learning_rate": 9.541000974192234e-05, "loss": 1.0261, "step": 5553 }, { "epoch": 1.0148671250696897, "grad_norm": 0.6521549820899963, "learning_rate": 9.538079488042069e-05, "loss": 1.1009, "step": 5554 }, { "epoch": 1.0150529641330608, "grad_norm": 0.6870414614677429, "learning_rate": 9.53515804140096e-05, "loss": 0.8791, "step": 5555 }, { "epoch": 1.015238803196432, "grad_norm": 0.5661901235580444, "learning_rate": 9.532236634518793e-05, "loss": 1.0703, "step": 5556 }, { "epoch": 1.015424642259803, "grad_norm": 0.7870084047317505, "learning_rate": 9.529315267645433e-05, "loss": 1.0597, "step": 5557 }, { "epoch": 1.015610481323174, "grad_norm": 0.7044654488563538, "learning_rate": 9.526393941030756e-05, "loss": 0.7709, "step": 5558 }, { "epoch": 1.0157963203865452, "grad_norm": 0.6198689341545105, "learning_rate": 9.523472654924633e-05, "loss": 0.7839, "step": 5559 }, { "epoch": 1.0159821594499163, "grad_norm": 0.5579044222831726, "learning_rate": 9.520551409576919e-05, "loss": 0.7988, "step": 5560 }, { "epoch": 1.0161679985132874, "grad_norm": 0.47388043999671936, "learning_rate": 9.517630205237482e-05, "loss": 0.7251, "step": 5561 }, { "epoch": 1.0163538375766585, "grad_norm": 0.6593843698501587, "learning_rate": 9.514709042156173e-05, "loss": 0.8527, "step": 5562 }, { "epoch": 1.0165396766400296, "grad_norm": 0.6089546084403992, "learning_rate": 9.511787920582855e-05, "loss": 0.8445, "step": 5563 }, { "epoch": 1.0167255157034008, "grad_norm": 0.7765074372291565, "learning_rate": 9.508866840767365e-05, "loss": 0.942, "step": 5564 }, { "epoch": 1.016911354766772, "grad_norm": 0.5820897221565247, "learning_rate": 9.505945802959555e-05, "loss": 0.8503, "step": 5565 }, { "epoch": 1.0170971938301432, "grad_norm": 1.7710691690444946, "learning_rate": 9.503024807409274e-05, "loss": 1.5448, "step": 5566 }, { "epoch": 1.0172830328935143, "grad_norm": 0.637711763381958, "learning_rate": 9.500103854366351e-05, "loss": 0.8379, "step": 5567 }, { "epoch": 1.0174688719568854, "grad_norm": 0.5511047840118408, "learning_rate": 9.497182944080628e-05, "loss": 0.9107, "step": 5568 }, { "epoch": 1.0176547110202565, "grad_norm": 0.6024526953697205, "learning_rate": 9.494262076801933e-05, "loss": 0.9868, "step": 5569 }, { "epoch": 1.0178405500836276, "grad_norm": 1.2761352062225342, "learning_rate": 9.491341252780097e-05, "loss": 1.3302, "step": 5570 }, { "epoch": 1.0180263891469987, "grad_norm": 0.6507533192634583, "learning_rate": 9.48842047226494e-05, "loss": 1.0497, "step": 5571 }, { "epoch": 1.0182122282103698, "grad_norm": 0.6174860596656799, "learning_rate": 9.485499735506286e-05, "loss": 0.8212, "step": 5572 }, { "epoch": 1.018398067273741, "grad_norm": 0.5975897312164307, "learning_rate": 9.482579042753956e-05, "loss": 0.7139, "step": 5573 }, { "epoch": 1.018583906337112, "grad_norm": 0.6369990706443787, "learning_rate": 9.479658394257754e-05, "loss": 1.1584, "step": 5574 }, { "epoch": 1.0187697454004832, "grad_norm": 0.6980920433998108, "learning_rate": 9.476737790267501e-05, "loss": 0.9404, "step": 5575 }, { "epoch": 1.0189555844638543, "grad_norm": 0.7139065861701965, "learning_rate": 9.473817231032989e-05, "loss": 1.0755, "step": 5576 }, { "epoch": 1.0191414235272254, "grad_norm": 0.7761198878288269, "learning_rate": 9.470896716804029e-05, "loss": 1.0533, "step": 5577 }, { "epoch": 1.0193272625905965, "grad_norm": 0.545531153678894, "learning_rate": 9.467976247830413e-05, "loss": 1.0633, "step": 5578 }, { "epoch": 1.0195131016539676, "grad_norm": 0.6415326595306396, "learning_rate": 9.46505582436194e-05, "loss": 1.209, "step": 5579 }, { "epoch": 1.0196989407173387, "grad_norm": 0.6412090063095093, "learning_rate": 9.462135446648402e-05, "loss": 0.9798, "step": 5580 }, { "epoch": 1.01988477978071, "grad_norm": 0.6819164156913757, "learning_rate": 9.459215114939578e-05, "loss": 0.991, "step": 5581 }, { "epoch": 1.0200706188440811, "grad_norm": 0.6056860089302063, "learning_rate": 9.456294829485258e-05, "loss": 0.6767, "step": 5582 }, { "epoch": 1.0202564579074522, "grad_norm": 0.7711376547813416, "learning_rate": 9.453374590535212e-05, "loss": 0.9817, "step": 5583 }, { "epoch": 1.0204422969708233, "grad_norm": 0.5763733983039856, "learning_rate": 9.450454398339221e-05, "loss": 1.2615, "step": 5584 }, { "epoch": 1.0206281360341944, "grad_norm": 0.631025493144989, "learning_rate": 9.44753425314705e-05, "loss": 1.0366, "step": 5585 }, { "epoch": 1.0208139750975656, "grad_norm": 0.6333885192871094, "learning_rate": 9.44461415520847e-05, "loss": 0.9011, "step": 5586 }, { "epoch": 1.0209998141609367, "grad_norm": 0.621618926525116, "learning_rate": 9.441694104773245e-05, "loss": 0.8351, "step": 5587 }, { "epoch": 1.0211856532243078, "grad_norm": 0.5549609661102295, "learning_rate": 9.438774102091127e-05, "loss": 0.9983, "step": 5588 }, { "epoch": 1.0213714922876789, "grad_norm": 0.668755829334259, "learning_rate": 9.435854147411875e-05, "loss": 1.006, "step": 5589 }, { "epoch": 1.02155733135105, "grad_norm": 0.6217575073242188, "learning_rate": 9.432934240985234e-05, "loss": 0.7929, "step": 5590 }, { "epoch": 1.021743170414421, "grad_norm": 0.609810471534729, "learning_rate": 9.430014383060956e-05, "loss": 0.863, "step": 5591 }, { "epoch": 1.0219290094777922, "grad_norm": 0.5248240828514099, "learning_rate": 9.427094573888776e-05, "loss": 0.8328, "step": 5592 }, { "epoch": 1.0221148485411633, "grad_norm": 0.7486118078231812, "learning_rate": 9.424174813718436e-05, "loss": 1.1021, "step": 5593 }, { "epoch": 1.0223006876045344, "grad_norm": 0.6330525875091553, "learning_rate": 9.421255102799672e-05, "loss": 0.9775, "step": 5594 }, { "epoch": 1.0224865266679055, "grad_norm": 0.7093864679336548, "learning_rate": 9.418335441382206e-05, "loss": 0.9141, "step": 5595 }, { "epoch": 1.0226723657312766, "grad_norm": 0.5771816968917847, "learning_rate": 9.415415829715772e-05, "loss": 0.6835, "step": 5596 }, { "epoch": 1.0228582047946477, "grad_norm": 0.7342430949211121, "learning_rate": 9.412496268050082e-05, "loss": 0.8209, "step": 5597 }, { "epoch": 1.023044043858019, "grad_norm": 0.6433789730072021, "learning_rate": 9.409576756634856e-05, "loss": 1.0434, "step": 5598 }, { "epoch": 1.0232298829213902, "grad_norm": 0.513936460018158, "learning_rate": 9.406657295719805e-05, "loss": 0.6459, "step": 5599 }, { "epoch": 1.0234157219847613, "grad_norm": 0.6257773041725159, "learning_rate": 9.403737885554638e-05, "loss": 1.1122, "step": 5600 }, { "epoch": 1.0236015610481324, "grad_norm": 0.736271858215332, "learning_rate": 9.400818526389063e-05, "loss": 1.0869, "step": 5601 }, { "epoch": 1.0237874001115035, "grad_norm": 0.5881569385528564, "learning_rate": 9.397899218472769e-05, "loss": 1.034, "step": 5602 }, { "epoch": 1.0239732391748746, "grad_norm": 0.6881264448165894, "learning_rate": 9.39497996205546e-05, "loss": 0.8233, "step": 5603 }, { "epoch": 1.0241590782382457, "grad_norm": 0.5099987983703613, "learning_rate": 9.392060757386819e-05, "loss": 0.8129, "step": 5604 }, { "epoch": 1.0243449173016168, "grad_norm": 0.7611659169197083, "learning_rate": 9.389141604716536e-05, "loss": 1.0172, "step": 5605 }, { "epoch": 1.024530756364988, "grad_norm": 0.49309414625167847, "learning_rate": 9.386222504294293e-05, "loss": 0.7155, "step": 5606 }, { "epoch": 1.024716595428359, "grad_norm": 0.6789926886558533, "learning_rate": 9.383303456369764e-05, "loss": 1.3201, "step": 5607 }, { "epoch": 1.0249024344917301, "grad_norm": 0.5713966488838196, "learning_rate": 9.380384461192628e-05, "loss": 0.9373, "step": 5608 }, { "epoch": 1.0250882735551012, "grad_norm": 0.6206234693527222, "learning_rate": 9.377465519012543e-05, "loss": 0.896, "step": 5609 }, { "epoch": 1.0252741126184723, "grad_norm": 0.6701042652130127, "learning_rate": 9.374546630079183e-05, "loss": 0.8984, "step": 5610 }, { "epoch": 1.0254599516818435, "grad_norm": 0.5585264563560486, "learning_rate": 9.371627794642196e-05, "loss": 1.0708, "step": 5611 }, { "epoch": 1.0256457907452146, "grad_norm": 0.6523463726043701, "learning_rate": 9.368709012951243e-05, "loss": 1.0648, "step": 5612 }, { "epoch": 1.0258316298085857, "grad_norm": 0.6141130924224854, "learning_rate": 9.365790285255974e-05, "loss": 0.9465, "step": 5613 }, { "epoch": 1.0260174688719568, "grad_norm": 0.6018891334533691, "learning_rate": 9.362871611806032e-05, "loss": 0.9618, "step": 5614 }, { "epoch": 1.026203307935328, "grad_norm": 0.5785146951675415, "learning_rate": 9.35995299285106e-05, "loss": 0.6976, "step": 5615 }, { "epoch": 1.0263891469986992, "grad_norm": 0.6223296523094177, "learning_rate": 9.357034428640688e-05, "loss": 1.0349, "step": 5616 }, { "epoch": 1.0265749860620703, "grad_norm": 0.6218807697296143, "learning_rate": 9.354115919424556e-05, "loss": 0.9761, "step": 5617 }, { "epoch": 1.0267608251254414, "grad_norm": 0.6906150579452515, "learning_rate": 9.35119746545228e-05, "loss": 0.8762, "step": 5618 }, { "epoch": 1.0269466641888125, "grad_norm": 0.732918918132782, "learning_rate": 9.348279066973488e-05, "loss": 1.1839, "step": 5619 }, { "epoch": 1.0271325032521836, "grad_norm": 0.6837500333786011, "learning_rate": 9.345360724237799e-05, "loss": 0.8754, "step": 5620 }, { "epoch": 1.0273183423155547, "grad_norm": 0.6494518518447876, "learning_rate": 9.342442437494819e-05, "loss": 1.1035, "step": 5621 }, { "epoch": 1.0275041813789259, "grad_norm": 0.5419272184371948, "learning_rate": 9.339524206994162e-05, "loss": 0.9875, "step": 5622 }, { "epoch": 1.027690020442297, "grad_norm": 0.6595784425735474, "learning_rate": 9.336606032985422e-05, "loss": 0.9096, "step": 5623 }, { "epoch": 1.027875859505668, "grad_norm": 0.5965380668640137, "learning_rate": 9.333687915718207e-05, "loss": 0.8697, "step": 5624 }, { "epoch": 1.0280616985690392, "grad_norm": 0.6148014664649963, "learning_rate": 9.330769855442102e-05, "loss": 0.9502, "step": 5625 }, { "epoch": 1.0282475376324103, "grad_norm": 0.5834745764732361, "learning_rate": 9.327851852406696e-05, "loss": 0.8174, "step": 5626 }, { "epoch": 1.0284333766957814, "grad_norm": 0.5997938513755798, "learning_rate": 9.32493390686158e-05, "loss": 0.8045, "step": 5627 }, { "epoch": 1.0286192157591525, "grad_norm": 0.5949342846870422, "learning_rate": 9.322016019056319e-05, "loss": 0.9448, "step": 5628 }, { "epoch": 1.0288050548225236, "grad_norm": 0.6385858654975891, "learning_rate": 9.319098189240496e-05, "loss": 1.0089, "step": 5629 }, { "epoch": 1.0289908938858947, "grad_norm": 0.5652914047241211, "learning_rate": 9.316180417663675e-05, "loss": 0.9545, "step": 5630 }, { "epoch": 1.029176732949266, "grad_norm": 0.5521414279937744, "learning_rate": 9.313262704575426e-05, "loss": 0.9898, "step": 5631 }, { "epoch": 1.0293625720126371, "grad_norm": 0.6309528946876526, "learning_rate": 9.310345050225297e-05, "loss": 0.9947, "step": 5632 }, { "epoch": 1.0295484110760083, "grad_norm": 0.6038761138916016, "learning_rate": 9.307427454862844e-05, "loss": 0.9867, "step": 5633 }, { "epoch": 1.0297342501393794, "grad_norm": 0.720798909664154, "learning_rate": 9.304509918737626e-05, "loss": 0.9726, "step": 5634 }, { "epoch": 1.0299200892027505, "grad_norm": 0.5871129035949707, "learning_rate": 9.301592442099172e-05, "loss": 1.1046, "step": 5635 }, { "epoch": 1.0301059282661216, "grad_norm": 0.5038182735443115, "learning_rate": 9.298675025197027e-05, "loss": 0.8985, "step": 5636 }, { "epoch": 1.0302917673294927, "grad_norm": 0.4841303527355194, "learning_rate": 9.295757668280722e-05, "loss": 0.5612, "step": 5637 }, { "epoch": 1.0304776063928638, "grad_norm": 0.7170572876930237, "learning_rate": 9.292840371599789e-05, "loss": 1.0366, "step": 5638 }, { "epoch": 1.030663445456235, "grad_norm": 0.6779294610023499, "learning_rate": 9.289923135403743e-05, "loss": 1.0401, "step": 5639 }, { "epoch": 1.030849284519606, "grad_norm": 0.615813136100769, "learning_rate": 9.287005959942107e-05, "loss": 1.0795, "step": 5640 }, { "epoch": 1.031035123582977, "grad_norm": 0.7855369448661804, "learning_rate": 9.284088845464396e-05, "loss": 1.3407, "step": 5641 }, { "epoch": 1.0312209626463482, "grad_norm": 0.6328584551811218, "learning_rate": 9.281171792220107e-05, "loss": 0.8972, "step": 5642 }, { "epoch": 1.0314068017097193, "grad_norm": 0.6222116351127625, "learning_rate": 9.278254800458753e-05, "loss": 0.7151, "step": 5643 }, { "epoch": 1.0315926407730904, "grad_norm": 0.5513842105865479, "learning_rate": 9.275337870429824e-05, "loss": 0.9599, "step": 5644 }, { "epoch": 1.0317784798364615, "grad_norm": 0.683917224407196, "learning_rate": 9.272421002382816e-05, "loss": 1.2183, "step": 5645 }, { "epoch": 1.0319643188998326, "grad_norm": 0.5576063990592957, "learning_rate": 9.269504196567209e-05, "loss": 0.9097, "step": 5646 }, { "epoch": 1.032150157963204, "grad_norm": 0.7101729512214661, "learning_rate": 9.266587453232486e-05, "loss": 0.8636, "step": 5647 }, { "epoch": 1.032335997026575, "grad_norm": 0.6904899477958679, "learning_rate": 9.263670772628126e-05, "loss": 1.136, "step": 5648 }, { "epoch": 1.0325218360899462, "grad_norm": 0.6535913348197937, "learning_rate": 9.260754155003595e-05, "loss": 1.0645, "step": 5649 }, { "epoch": 1.0327076751533173, "grad_norm": 0.7509424090385437, "learning_rate": 9.257837600608359e-05, "loss": 1.1562, "step": 5650 }, { "epoch": 1.0328935142166884, "grad_norm": 0.6997819542884827, "learning_rate": 9.254921109691877e-05, "loss": 1.1857, "step": 5651 }, { "epoch": 1.0330793532800595, "grad_norm": 0.573625922203064, "learning_rate": 9.252004682503604e-05, "loss": 0.863, "step": 5652 }, { "epoch": 1.0332651923434306, "grad_norm": 0.5840150117874146, "learning_rate": 9.249088319292986e-05, "loss": 0.8821, "step": 5653 }, { "epoch": 1.0334510314068017, "grad_norm": 0.7319598197937012, "learning_rate": 9.246172020309465e-05, "loss": 1.0052, "step": 5654 }, { "epoch": 1.0336368704701728, "grad_norm": 0.6403792500495911, "learning_rate": 9.243255785802484e-05, "loss": 0.8859, "step": 5655 }, { "epoch": 1.033822709533544, "grad_norm": 0.6094220280647278, "learning_rate": 9.240339616021469e-05, "loss": 1.0, "step": 5656 }, { "epoch": 1.034008548596915, "grad_norm": 0.5595227479934692, "learning_rate": 9.23742351121585e-05, "loss": 0.9861, "step": 5657 }, { "epoch": 1.0341943876602862, "grad_norm": 0.688662588596344, "learning_rate": 9.234507471635043e-05, "loss": 1.0202, "step": 5658 }, { "epoch": 1.0343802267236573, "grad_norm": 0.6431695818901062, "learning_rate": 9.23159149752847e-05, "loss": 0.9423, "step": 5659 }, { "epoch": 1.0345660657870284, "grad_norm": 0.6177623867988586, "learning_rate": 9.228675589145536e-05, "loss": 1.0328, "step": 5660 }, { "epoch": 1.0347519048503995, "grad_norm": 0.6663259863853455, "learning_rate": 9.225759746735643e-05, "loss": 0.9991, "step": 5661 }, { "epoch": 1.0349377439137706, "grad_norm": 0.7278993129730225, "learning_rate": 9.222843970548198e-05, "loss": 0.9877, "step": 5662 }, { "epoch": 1.0351235829771417, "grad_norm": 0.6169386506080627, "learning_rate": 9.219928260832584e-05, "loss": 1.06, "step": 5663 }, { "epoch": 1.035309422040513, "grad_norm": 0.7969826459884644, "learning_rate": 9.217012617838192e-05, "loss": 0.8952, "step": 5664 }, { "epoch": 1.0354952611038841, "grad_norm": 0.6824052333831787, "learning_rate": 9.214097041814403e-05, "loss": 0.8959, "step": 5665 }, { "epoch": 1.0356811001672552, "grad_norm": 0.657074511051178, "learning_rate": 9.211181533010595e-05, "loss": 0.8222, "step": 5666 }, { "epoch": 1.0358669392306263, "grad_norm": 0.658909022808075, "learning_rate": 9.20826609167613e-05, "loss": 1.0181, "step": 5667 }, { "epoch": 1.0360527782939974, "grad_norm": 0.9237680435180664, "learning_rate": 9.205350718060381e-05, "loss": 1.093, "step": 5668 }, { "epoch": 1.0362386173573686, "grad_norm": 0.6500981450080872, "learning_rate": 9.202435412412705e-05, "loss": 1.0164, "step": 5669 }, { "epoch": 1.0364244564207397, "grad_norm": 0.5961865186691284, "learning_rate": 9.199520174982446e-05, "loss": 0.9181, "step": 5670 }, { "epoch": 1.0366102954841108, "grad_norm": 0.5907772183418274, "learning_rate": 9.196605006018963e-05, "loss": 0.9159, "step": 5671 }, { "epoch": 1.0367961345474819, "grad_norm": 0.61663419008255, "learning_rate": 9.193689905771586e-05, "loss": 0.9584, "step": 5672 }, { "epoch": 1.036981973610853, "grad_norm": 0.6415398716926575, "learning_rate": 9.190774874489655e-05, "loss": 1.1932, "step": 5673 }, { "epoch": 1.037167812674224, "grad_norm": 0.5706160664558411, "learning_rate": 9.187859912422497e-05, "loss": 0.882, "step": 5674 }, { "epoch": 1.0373536517375952, "grad_norm": 0.6299116015434265, "learning_rate": 9.184945019819435e-05, "loss": 0.6879, "step": 5675 }, { "epoch": 1.0375394908009663, "grad_norm": 0.6006621718406677, "learning_rate": 9.182030196929791e-05, "loss": 0.9731, "step": 5676 }, { "epoch": 1.0377253298643374, "grad_norm": 0.6359907388687134, "learning_rate": 9.17911544400287e-05, "loss": 0.9504, "step": 5677 }, { "epoch": 1.0379111689277085, "grad_norm": 0.6705753803253174, "learning_rate": 9.17620076128798e-05, "loss": 0.9331, "step": 5678 }, { "epoch": 1.0380970079910796, "grad_norm": 0.733623743057251, "learning_rate": 9.173286149034419e-05, "loss": 1.0244, "step": 5679 }, { "epoch": 1.038282847054451, "grad_norm": 0.5766941905021667, "learning_rate": 9.170371607491479e-05, "loss": 0.9199, "step": 5680 }, { "epoch": 1.038468686117822, "grad_norm": 0.5844184756278992, "learning_rate": 9.167457136908448e-05, "loss": 0.927, "step": 5681 }, { "epoch": 1.0386545251811932, "grad_norm": 0.6267839670181274, "learning_rate": 9.164542737534607e-05, "loss": 0.8806, "step": 5682 }, { "epoch": 1.0388403642445643, "grad_norm": 0.6894923448562622, "learning_rate": 9.161628409619236e-05, "loss": 0.7935, "step": 5683 }, { "epoch": 1.0390262033079354, "grad_norm": 0.5978687405586243, "learning_rate": 9.158714153411595e-05, "loss": 0.854, "step": 5684 }, { "epoch": 1.0392120423713065, "grad_norm": 0.8842291235923767, "learning_rate": 9.155799969160955e-05, "loss": 0.8898, "step": 5685 }, { "epoch": 1.0393978814346776, "grad_norm": 0.6014558672904968, "learning_rate": 9.152885857116564e-05, "loss": 0.9935, "step": 5686 }, { "epoch": 1.0395837204980487, "grad_norm": 0.6135658025741577, "learning_rate": 9.149971817527679e-05, "loss": 1.0825, "step": 5687 }, { "epoch": 1.0397695595614198, "grad_norm": 0.56199049949646, "learning_rate": 9.147057850643537e-05, "loss": 0.8605, "step": 5688 }, { "epoch": 1.039955398624791, "grad_norm": 0.5822799205780029, "learning_rate": 9.144143956713384e-05, "loss": 0.903, "step": 5689 }, { "epoch": 1.040141237688162, "grad_norm": 0.6802158355712891, "learning_rate": 9.14123013598645e-05, "loss": 0.9905, "step": 5690 }, { "epoch": 1.0403270767515331, "grad_norm": 0.6485140919685364, "learning_rate": 9.138316388711955e-05, "loss": 1.0835, "step": 5691 }, { "epoch": 1.0405129158149042, "grad_norm": 0.5066680312156677, "learning_rate": 9.135402715139125e-05, "loss": 0.7497, "step": 5692 }, { "epoch": 1.0406987548782753, "grad_norm": 0.7781651616096497, "learning_rate": 9.132489115517166e-05, "loss": 1.0902, "step": 5693 }, { "epoch": 1.0408845939416465, "grad_norm": 0.6752843260765076, "learning_rate": 9.12957559009529e-05, "loss": 0.837, "step": 5694 }, { "epoch": 1.0410704330050176, "grad_norm": 0.7520736455917358, "learning_rate": 9.126662139122694e-05, "loss": 1.125, "step": 5695 }, { "epoch": 1.0412562720683889, "grad_norm": 0.6787281036376953, "learning_rate": 9.123748762848571e-05, "loss": 1.1425, "step": 5696 }, { "epoch": 1.04144211113176, "grad_norm": 0.5874269604682922, "learning_rate": 9.120835461522115e-05, "loss": 1.002, "step": 5697 }, { "epoch": 1.041627950195131, "grad_norm": 0.6762674450874329, "learning_rate": 9.117922235392497e-05, "loss": 0.8577, "step": 5698 }, { "epoch": 1.0418137892585022, "grad_norm": 0.7589949369430542, "learning_rate": 9.115009084708903e-05, "loss": 1.3228, "step": 5699 }, { "epoch": 1.0419996283218733, "grad_norm": 0.5638782978057861, "learning_rate": 9.112096009720489e-05, "loss": 0.778, "step": 5700 }, { "epoch": 1.0421854673852444, "grad_norm": 0.6516910791397095, "learning_rate": 9.109183010676424e-05, "loss": 0.9716, "step": 5701 }, { "epoch": 1.0423713064486155, "grad_norm": 0.6196531653404236, "learning_rate": 9.10627008782586e-05, "loss": 0.8716, "step": 5702 }, { "epoch": 1.0425571455119866, "grad_norm": 0.5779221057891846, "learning_rate": 9.103357241417947e-05, "loss": 0.931, "step": 5703 }, { "epoch": 1.0427429845753577, "grad_norm": 0.7300266623497009, "learning_rate": 9.100444471701831e-05, "loss": 0.955, "step": 5704 }, { "epoch": 1.0429288236387289, "grad_norm": 0.5698464512825012, "learning_rate": 9.097531778926641e-05, "loss": 0.891, "step": 5705 }, { "epoch": 1.0431146627021, "grad_norm": 0.6704815030097961, "learning_rate": 9.09461916334151e-05, "loss": 0.8461, "step": 5706 }, { "epoch": 1.043300501765471, "grad_norm": 0.6242592334747314, "learning_rate": 9.091706625195555e-05, "loss": 0.8964, "step": 5707 }, { "epoch": 1.0434863408288422, "grad_norm": 0.6542682647705078, "learning_rate": 9.0887941647379e-05, "loss": 0.9589, "step": 5708 }, { "epoch": 1.0436721798922133, "grad_norm": 0.655167281627655, "learning_rate": 9.085881782217645e-05, "loss": 0.8934, "step": 5709 }, { "epoch": 1.0438580189555844, "grad_norm": 0.7744964957237244, "learning_rate": 9.082969477883899e-05, "loss": 0.9864, "step": 5710 }, { "epoch": 1.0440438580189555, "grad_norm": 0.6355542540550232, "learning_rate": 9.080057251985756e-05, "loss": 1.0679, "step": 5711 }, { "epoch": 1.0442296970823266, "grad_norm": 0.5906370878219604, "learning_rate": 9.077145104772302e-05, "loss": 0.9372, "step": 5712 }, { "epoch": 1.044415536145698, "grad_norm": 0.6469417214393616, "learning_rate": 9.074233036492626e-05, "loss": 1.0032, "step": 5713 }, { "epoch": 1.044601375209069, "grad_norm": 0.771095871925354, "learning_rate": 9.071321047395796e-05, "loss": 1.0268, "step": 5714 }, { "epoch": 1.0447872142724401, "grad_norm": 0.796539843082428, "learning_rate": 9.068409137730885e-05, "loss": 1.1293, "step": 5715 }, { "epoch": 1.0449730533358113, "grad_norm": 0.6017162799835205, "learning_rate": 9.065497307746956e-05, "loss": 0.9915, "step": 5716 }, { "epoch": 1.0451588923991824, "grad_norm": 0.5909117460250854, "learning_rate": 9.062585557693057e-05, "loss": 0.8477, "step": 5717 }, { "epoch": 1.0453447314625535, "grad_norm": 0.5846933722496033, "learning_rate": 9.059673887818245e-05, "loss": 0.7227, "step": 5718 }, { "epoch": 1.0455305705259246, "grad_norm": 0.5901908278465271, "learning_rate": 9.056762298371555e-05, "loss": 0.8948, "step": 5719 }, { "epoch": 1.0457164095892957, "grad_norm": 0.6081728339195251, "learning_rate": 9.053850789602029e-05, "loss": 1.0266, "step": 5720 }, { "epoch": 1.0459022486526668, "grad_norm": 0.6434999704360962, "learning_rate": 9.050939361758685e-05, "loss": 0.89, "step": 5721 }, { "epoch": 1.046088087716038, "grad_norm": 0.6816750168800354, "learning_rate": 9.048028015090553e-05, "loss": 0.9765, "step": 5722 }, { "epoch": 1.046273926779409, "grad_norm": 0.6385290026664734, "learning_rate": 9.045116749846638e-05, "loss": 0.8714, "step": 5723 }, { "epoch": 1.04645976584278, "grad_norm": 0.7308647036552429, "learning_rate": 9.042205566275951e-05, "loss": 0.9376, "step": 5724 }, { "epoch": 1.0466456049061512, "grad_norm": 0.6339771747589111, "learning_rate": 9.039294464627495e-05, "loss": 0.9761, "step": 5725 }, { "epoch": 1.0468314439695223, "grad_norm": 0.6732003688812256, "learning_rate": 9.036383445150255e-05, "loss": 0.9374, "step": 5726 }, { "epoch": 1.0470172830328934, "grad_norm": 0.6548773050308228, "learning_rate": 9.033472508093229e-05, "loss": 1.0606, "step": 5727 }, { "epoch": 1.0472031220962645, "grad_norm": 0.8904339671134949, "learning_rate": 9.030561653705384e-05, "loss": 1.0021, "step": 5728 }, { "epoch": 1.0473889611596356, "grad_norm": 0.6712539792060852, "learning_rate": 9.027650882235698e-05, "loss": 1.0839, "step": 5729 }, { "epoch": 1.047574800223007, "grad_norm": 0.6670980453491211, "learning_rate": 9.024740193933131e-05, "loss": 0.7674, "step": 5730 }, { "epoch": 1.047760639286378, "grad_norm": 0.5966160297393799, "learning_rate": 9.021829589046643e-05, "loss": 0.9512, "step": 5731 }, { "epoch": 1.0479464783497492, "grad_norm": 0.7879358530044556, "learning_rate": 9.018919067825186e-05, "loss": 1.1443, "step": 5732 }, { "epoch": 1.0481323174131203, "grad_norm": 0.5953929424285889, "learning_rate": 9.0160086305177e-05, "loss": 0.9683, "step": 5733 }, { "epoch": 1.0483181564764914, "grad_norm": 0.6400251388549805, "learning_rate": 9.013098277373127e-05, "loss": 0.8876, "step": 5734 }, { "epoch": 1.0485039955398625, "grad_norm": 1.7397582530975342, "learning_rate": 9.010188008640386e-05, "loss": 1.4435, "step": 5735 }, { "epoch": 1.0486898346032336, "grad_norm": 0.6329631209373474, "learning_rate": 9.007277824568407e-05, "loss": 0.9844, "step": 5736 }, { "epoch": 1.0488756736666047, "grad_norm": 0.7148013710975647, "learning_rate": 9.004367725406098e-05, "loss": 0.9352, "step": 5737 }, { "epoch": 1.0490615127299758, "grad_norm": 0.644572377204895, "learning_rate": 9.001457711402371e-05, "loss": 0.8407, "step": 5738 }, { "epoch": 1.049247351793347, "grad_norm": 0.585382342338562, "learning_rate": 8.998547782806124e-05, "loss": 0.7984, "step": 5739 }, { "epoch": 1.049433190856718, "grad_norm": 0.6851204037666321, "learning_rate": 8.995637939866249e-05, "loss": 0.9389, "step": 5740 }, { "epoch": 1.0496190299200892, "grad_norm": 0.6227951049804688, "learning_rate": 8.992728182831636e-05, "loss": 1.0817, "step": 5741 }, { "epoch": 1.0498048689834603, "grad_norm": 0.4151616394519806, "learning_rate": 8.989818511951153e-05, "loss": 0.3853, "step": 5742 }, { "epoch": 1.0499907080468314, "grad_norm": 0.7411801218986511, "learning_rate": 8.98690892747368e-05, "loss": 1.0406, "step": 5743 }, { "epoch": 1.0501765471102025, "grad_norm": 0.6021711826324463, "learning_rate": 8.983999429648074e-05, "loss": 0.5269, "step": 5744 }, { "epoch": 1.0503623861735736, "grad_norm": 0.6130529642105103, "learning_rate": 8.981090018723191e-05, "loss": 0.9034, "step": 5745 }, { "epoch": 1.050548225236945, "grad_norm": 0.605331540107727, "learning_rate": 8.978180694947884e-05, "loss": 0.7755, "step": 5746 }, { "epoch": 1.050734064300316, "grad_norm": 0.6502954959869385, "learning_rate": 8.975271458570986e-05, "loss": 0.9871, "step": 5747 }, { "epoch": 1.0509199033636871, "grad_norm": 0.6627545356750488, "learning_rate": 8.972362309841341e-05, "loss": 0.9986, "step": 5748 }, { "epoch": 1.0511057424270582, "grad_norm": 1.8269603252410889, "learning_rate": 8.969453249007764e-05, "loss": 1.5947, "step": 5749 }, { "epoch": 1.0512915814904293, "grad_norm": 0.6196630597114563, "learning_rate": 8.966544276319083e-05, "loss": 0.9685, "step": 5750 }, { "epoch": 1.0514774205538004, "grad_norm": 0.7198757529258728, "learning_rate": 8.9636353920241e-05, "loss": 0.9335, "step": 5751 }, { "epoch": 1.0516632596171716, "grad_norm": 0.6331172585487366, "learning_rate": 8.960726596371619e-05, "loss": 0.934, "step": 5752 }, { "epoch": 1.0518490986805427, "grad_norm": 0.5823332667350769, "learning_rate": 8.957817889610442e-05, "loss": 1.1294, "step": 5753 }, { "epoch": 1.0520349377439138, "grad_norm": 0.7582717537879944, "learning_rate": 8.954909271989351e-05, "loss": 1.0328, "step": 5754 }, { "epoch": 1.0522207768072849, "grad_norm": 0.598598301410675, "learning_rate": 8.952000743757133e-05, "loss": 0.7204, "step": 5755 }, { "epoch": 1.052406615870656, "grad_norm": 0.720652163028717, "learning_rate": 8.949092305162553e-05, "loss": 0.9257, "step": 5756 }, { "epoch": 1.052592454934027, "grad_norm": 0.610378623008728, "learning_rate": 8.946183956454384e-05, "loss": 0.9195, "step": 5757 }, { "epoch": 1.0527782939973982, "grad_norm": 0.6636180281639099, "learning_rate": 8.943275697881373e-05, "loss": 1.0782, "step": 5758 }, { "epoch": 1.0529641330607693, "grad_norm": 0.7740611433982849, "learning_rate": 8.940367529692275e-05, "loss": 0.954, "step": 5759 }, { "epoch": 1.0531499721241404, "grad_norm": 0.5274054408073425, "learning_rate": 8.937459452135837e-05, "loss": 0.6807, "step": 5760 }, { "epoch": 1.0533358111875115, "grad_norm": 0.659879744052887, "learning_rate": 8.934551465460786e-05, "loss": 1.052, "step": 5761 }, { "epoch": 1.0535216502508828, "grad_norm": 0.667706310749054, "learning_rate": 8.931643569915852e-05, "loss": 1.0115, "step": 5762 }, { "epoch": 1.053707489314254, "grad_norm": 0.6441750526428223, "learning_rate": 8.92873576574975e-05, "loss": 1.1946, "step": 5763 }, { "epoch": 1.053893328377625, "grad_norm": 0.5615370869636536, "learning_rate": 8.925828053211198e-05, "loss": 0.829, "step": 5764 }, { "epoch": 1.0540791674409962, "grad_norm": 0.5814454555511475, "learning_rate": 8.92292043254889e-05, "loss": 0.8492, "step": 5765 }, { "epoch": 1.0542650065043673, "grad_norm": 0.7165383100509644, "learning_rate": 8.920012904011525e-05, "loss": 0.976, "step": 5766 }, { "epoch": 1.0544508455677384, "grad_norm": 0.6098819971084595, "learning_rate": 8.917105467847795e-05, "loss": 0.826, "step": 5767 }, { "epoch": 1.0546366846311095, "grad_norm": 0.6676499843597412, "learning_rate": 8.914198124306371e-05, "loss": 0.8987, "step": 5768 }, { "epoch": 1.0548225236944806, "grad_norm": 0.679675281047821, "learning_rate": 8.911290873635928e-05, "loss": 1.0335, "step": 5769 }, { "epoch": 1.0550083627578517, "grad_norm": 0.6107022166252136, "learning_rate": 8.90838371608513e-05, "loss": 0.9001, "step": 5770 }, { "epoch": 1.0551942018212228, "grad_norm": 0.6810942888259888, "learning_rate": 8.905476651902636e-05, "loss": 1.0118, "step": 5771 }, { "epoch": 1.055380040884594, "grad_norm": 0.6379483342170715, "learning_rate": 8.902569681337083e-05, "loss": 1.0096, "step": 5772 }, { "epoch": 1.055565879947965, "grad_norm": 0.842753529548645, "learning_rate": 8.89966280463712e-05, "loss": 0.9862, "step": 5773 }, { "epoch": 1.0557517190113361, "grad_norm": 0.7353532910346985, "learning_rate": 8.896756022051378e-05, "loss": 1.2512, "step": 5774 }, { "epoch": 1.0559375580747072, "grad_norm": 0.6169674396514893, "learning_rate": 8.893849333828474e-05, "loss": 0.9022, "step": 5775 }, { "epoch": 1.0561233971380783, "grad_norm": 0.5542333126068115, "learning_rate": 8.89094274021703e-05, "loss": 0.9013, "step": 5776 }, { "epoch": 1.0563092362014495, "grad_norm": 2.630976915359497, "learning_rate": 8.888036241465647e-05, "loss": 1.7921, "step": 5777 }, { "epoch": 1.0564950752648206, "grad_norm": 0.6785982251167297, "learning_rate": 8.885129837822933e-05, "loss": 1.0074, "step": 5778 }, { "epoch": 1.056680914328192, "grad_norm": 0.6288561820983887, "learning_rate": 8.882223529537468e-05, "loss": 0.9505, "step": 5779 }, { "epoch": 1.056866753391563, "grad_norm": 1.0694950819015503, "learning_rate": 8.879317316857841e-05, "loss": 1.1911, "step": 5780 }, { "epoch": 1.057052592454934, "grad_norm": 0.6061035990715027, "learning_rate": 8.876411200032631e-05, "loss": 0.9956, "step": 5781 }, { "epoch": 1.0572384315183052, "grad_norm": 0.6882989406585693, "learning_rate": 8.873505179310394e-05, "loss": 0.9454, "step": 5782 }, { "epoch": 1.0574242705816763, "grad_norm": 0.6553504467010498, "learning_rate": 8.870599254939696e-05, "loss": 0.9591, "step": 5783 }, { "epoch": 1.0576101096450474, "grad_norm": 0.7941915392875671, "learning_rate": 8.867693427169084e-05, "loss": 0.9654, "step": 5784 }, { "epoch": 1.0577959487084185, "grad_norm": 0.6306182742118835, "learning_rate": 8.864787696247103e-05, "loss": 0.896, "step": 5785 }, { "epoch": 1.0579817877717896, "grad_norm": 0.8002976775169373, "learning_rate": 8.86188206242228e-05, "loss": 1.2667, "step": 5786 }, { "epoch": 1.0581676268351607, "grad_norm": 0.6710218191146851, "learning_rate": 8.858976525943144e-05, "loss": 0.9895, "step": 5787 }, { "epoch": 1.0583534658985319, "grad_norm": 1.6976367235183716, "learning_rate": 8.856071087058216e-05, "loss": 1.5448, "step": 5788 }, { "epoch": 1.058539304961903, "grad_norm": 0.581591784954071, "learning_rate": 8.853165746015997e-05, "loss": 0.8481, "step": 5789 }, { "epoch": 1.058725144025274, "grad_norm": 0.7091907262802124, "learning_rate": 8.850260503064992e-05, "loss": 0.9044, "step": 5790 }, { "epoch": 1.0589109830886452, "grad_norm": 0.6425524353981018, "learning_rate": 8.847355358453687e-05, "loss": 0.9289, "step": 5791 }, { "epoch": 1.0590968221520163, "grad_norm": 0.5482159852981567, "learning_rate": 8.844450312430576e-05, "loss": 0.9106, "step": 5792 }, { "epoch": 1.0592826612153874, "grad_norm": 0.5962523221969604, "learning_rate": 8.841545365244123e-05, "loss": 0.9134, "step": 5793 }, { "epoch": 1.0594685002787585, "grad_norm": 0.6635329723358154, "learning_rate": 8.838640517142797e-05, "loss": 0.8846, "step": 5794 }, { "epoch": 1.0596543393421296, "grad_norm": 0.6011704206466675, "learning_rate": 8.835735768375063e-05, "loss": 0.8404, "step": 5795 }, { "epoch": 1.059840178405501, "grad_norm": 0.6340507864952087, "learning_rate": 8.832831119189361e-05, "loss": 0.8327, "step": 5796 }, { "epoch": 1.060026017468872, "grad_norm": 0.6549215316772461, "learning_rate": 8.829926569834137e-05, "loss": 1.1531, "step": 5797 }, { "epoch": 1.0602118565322431, "grad_norm": 0.7567423582077026, "learning_rate": 8.827022120557822e-05, "loss": 1.0995, "step": 5798 }, { "epoch": 1.0603976955956143, "grad_norm": 0.6789305210113525, "learning_rate": 8.824117771608842e-05, "loss": 1.0125, "step": 5799 }, { "epoch": 1.0605835346589854, "grad_norm": 0.8210663795471191, "learning_rate": 8.821213523235608e-05, "loss": 1.1365, "step": 5800 }, { "epoch": 1.0607693737223565, "grad_norm": 0.752582848072052, "learning_rate": 8.818309375686529e-05, "loss": 0.9847, "step": 5801 }, { "epoch": 1.0609552127857276, "grad_norm": 0.561202347278595, "learning_rate": 8.815405329210007e-05, "loss": 0.7168, "step": 5802 }, { "epoch": 1.0611410518490987, "grad_norm": 0.5668363571166992, "learning_rate": 8.812501384054422e-05, "loss": 0.9466, "step": 5803 }, { "epoch": 1.0613268909124698, "grad_norm": 0.7196042537689209, "learning_rate": 8.809597540468164e-05, "loss": 0.9273, "step": 5804 }, { "epoch": 1.061512729975841, "grad_norm": 0.6628761887550354, "learning_rate": 8.806693798699598e-05, "loss": 1.0514, "step": 5805 }, { "epoch": 1.061698569039212, "grad_norm": 0.6018509864807129, "learning_rate": 8.803790158997095e-05, "loss": 1.0525, "step": 5806 }, { "epoch": 1.061884408102583, "grad_norm": 0.7145560383796692, "learning_rate": 8.800886621609001e-05, "loss": 1.0421, "step": 5807 }, { "epoch": 1.0620702471659542, "grad_norm": 0.5981913805007935, "learning_rate": 8.797983186783666e-05, "loss": 0.793, "step": 5808 }, { "epoch": 1.0622560862293253, "grad_norm": 0.6121430993080139, "learning_rate": 8.79507985476943e-05, "loss": 0.8296, "step": 5809 }, { "epoch": 1.0624419252926964, "grad_norm": 0.6061347723007202, "learning_rate": 8.792176625814615e-05, "loss": 0.924, "step": 5810 }, { "epoch": 1.0626277643560678, "grad_norm": 0.6806914806365967, "learning_rate": 8.789273500167547e-05, "loss": 1.1121, "step": 5811 }, { "epoch": 1.0628136034194389, "grad_norm": 0.6414180397987366, "learning_rate": 8.78637047807653e-05, "loss": 0.9736, "step": 5812 }, { "epoch": 1.06299944248281, "grad_norm": 0.7345332503318787, "learning_rate": 8.783467559789872e-05, "loss": 0.9447, "step": 5813 }, { "epoch": 1.063185281546181, "grad_norm": 0.6870766282081604, "learning_rate": 8.78056474555586e-05, "loss": 1.0822, "step": 5814 }, { "epoch": 1.0633711206095522, "grad_norm": 0.6061105132102966, "learning_rate": 8.777662035622781e-05, "loss": 0.7237, "step": 5815 }, { "epoch": 1.0635569596729233, "grad_norm": 0.5764292478561401, "learning_rate": 8.774759430238912e-05, "loss": 0.9691, "step": 5816 }, { "epoch": 1.0637427987362944, "grad_norm": 0.6762557029724121, "learning_rate": 8.771856929652515e-05, "loss": 1.107, "step": 5817 }, { "epoch": 1.0639286377996655, "grad_norm": 0.7746602296829224, "learning_rate": 8.768954534111853e-05, "loss": 0.8857, "step": 5818 }, { "epoch": 1.0641144768630366, "grad_norm": 0.5895851850509644, "learning_rate": 8.766052243865166e-05, "loss": 0.9329, "step": 5819 }, { "epoch": 1.0643003159264077, "grad_norm": 0.6938430666923523, "learning_rate": 8.7631500591607e-05, "loss": 0.9406, "step": 5820 }, { "epoch": 1.0644861549897788, "grad_norm": 0.6056532263755798, "learning_rate": 8.760247980246679e-05, "loss": 0.8533, "step": 5821 }, { "epoch": 1.06467199405315, "grad_norm": 0.6963773965835571, "learning_rate": 8.75734600737133e-05, "loss": 1.1039, "step": 5822 }, { "epoch": 1.064857833116521, "grad_norm": 1.3031995296478271, "learning_rate": 8.754444140782866e-05, "loss": 1.4601, "step": 5823 }, { "epoch": 1.0650436721798922, "grad_norm": 0.6234591603279114, "learning_rate": 8.751542380729482e-05, "loss": 0.9837, "step": 5824 }, { "epoch": 1.0652295112432633, "grad_norm": 0.6526420712471008, "learning_rate": 8.74864072745938e-05, "loss": 0.7511, "step": 5825 }, { "epoch": 1.0654153503066344, "grad_norm": 0.651283323764801, "learning_rate": 8.745739181220738e-05, "loss": 1.0071, "step": 5826 }, { "epoch": 1.0656011893700055, "grad_norm": 0.5934510231018066, "learning_rate": 8.742837742261737e-05, "loss": 0.9859, "step": 5827 }, { "epoch": 1.0657870284333768, "grad_norm": 0.5294697880744934, "learning_rate": 8.739936410830538e-05, "loss": 0.8086, "step": 5828 }, { "epoch": 1.065972867496748, "grad_norm": 0.6745396256446838, "learning_rate": 8.737035187175302e-05, "loss": 1.0394, "step": 5829 }, { "epoch": 1.066158706560119, "grad_norm": 0.6572603583335876, "learning_rate": 8.73413407154418e-05, "loss": 0.7895, "step": 5830 }, { "epoch": 1.0663445456234901, "grad_norm": 0.7055622339248657, "learning_rate": 8.731233064185302e-05, "loss": 1.028, "step": 5831 }, { "epoch": 1.0665303846868612, "grad_norm": 0.6637229323387146, "learning_rate": 8.728332165346806e-05, "loss": 1.1012, "step": 5832 }, { "epoch": 1.0667162237502323, "grad_norm": 0.5706199407577515, "learning_rate": 8.725431375276804e-05, "loss": 0.9943, "step": 5833 }, { "epoch": 1.0669020628136034, "grad_norm": 0.6235327124595642, "learning_rate": 8.722530694223415e-05, "loss": 0.9364, "step": 5834 }, { "epoch": 1.0670879018769746, "grad_norm": 0.5939970016479492, "learning_rate": 8.719630122434734e-05, "loss": 0.8755, "step": 5835 }, { "epoch": 1.0672737409403457, "grad_norm": 0.6441448330879211, "learning_rate": 8.716729660158856e-05, "loss": 0.865, "step": 5836 }, { "epoch": 1.0674595800037168, "grad_norm": 0.5464093685150146, "learning_rate": 8.713829307643867e-05, "loss": 0.8899, "step": 5837 }, { "epoch": 1.0676454190670879, "grad_norm": 0.6502255797386169, "learning_rate": 8.710929065137834e-05, "loss": 1.046, "step": 5838 }, { "epoch": 1.067831258130459, "grad_norm": 0.6012847423553467, "learning_rate": 8.708028932888828e-05, "loss": 0.7964, "step": 5839 }, { "epoch": 1.06801709719383, "grad_norm": 0.64566969871521, "learning_rate": 8.705128911144894e-05, "loss": 1.147, "step": 5840 }, { "epoch": 1.0682029362572012, "grad_norm": 0.73211669921875, "learning_rate": 8.702229000154087e-05, "loss": 1.2799, "step": 5841 }, { "epoch": 1.0683887753205723, "grad_norm": 0.5920413136482239, "learning_rate": 8.699329200164434e-05, "loss": 0.9163, "step": 5842 }, { "epoch": 1.0685746143839434, "grad_norm": 0.6712371706962585, "learning_rate": 8.696429511423968e-05, "loss": 1.1072, "step": 5843 }, { "epoch": 1.0687604534473145, "grad_norm": 0.6476579308509827, "learning_rate": 8.693529934180706e-05, "loss": 0.9671, "step": 5844 }, { "epoch": 1.0689462925106858, "grad_norm": 0.6458027362823486, "learning_rate": 8.69063046868265e-05, "loss": 0.9178, "step": 5845 }, { "epoch": 1.069132131574057, "grad_norm": 0.6158638000488281, "learning_rate": 8.687731115177802e-05, "loss": 0.9185, "step": 5846 }, { "epoch": 1.069317970637428, "grad_norm": 0.6359545588493347, "learning_rate": 8.684831873914145e-05, "loss": 0.913, "step": 5847 }, { "epoch": 1.0695038097007992, "grad_norm": 0.5854082107543945, "learning_rate": 8.681932745139662e-05, "loss": 0.6984, "step": 5848 }, { "epoch": 1.0696896487641703, "grad_norm": 0.5653736591339111, "learning_rate": 8.679033729102317e-05, "loss": 0.8022, "step": 5849 }, { "epoch": 1.0698754878275414, "grad_norm": 0.7520171403884888, "learning_rate": 8.676134826050072e-05, "loss": 1.1837, "step": 5850 }, { "epoch": 1.0700613268909125, "grad_norm": 0.6621032953262329, "learning_rate": 8.673236036230882e-05, "loss": 0.971, "step": 5851 }, { "epoch": 1.0702471659542836, "grad_norm": 0.6689890027046204, "learning_rate": 8.670337359892678e-05, "loss": 1.07, "step": 5852 }, { "epoch": 1.0704330050176547, "grad_norm": 0.6319896578788757, "learning_rate": 8.667438797283397e-05, "loss": 1.1009, "step": 5853 }, { "epoch": 1.0706188440810258, "grad_norm": 0.843813419342041, "learning_rate": 8.664540348650951e-05, "loss": 0.9687, "step": 5854 }, { "epoch": 1.070804683144397, "grad_norm": 0.5611274838447571, "learning_rate": 8.661642014243258e-05, "loss": 0.7719, "step": 5855 }, { "epoch": 1.070990522207768, "grad_norm": 0.6595000624656677, "learning_rate": 8.658743794308212e-05, "loss": 0.7983, "step": 5856 }, { "epoch": 1.0711763612711391, "grad_norm": 0.7374562621116638, "learning_rate": 8.655845689093712e-05, "loss": 0.869, "step": 5857 }, { "epoch": 1.0713622003345102, "grad_norm": 0.7460797429084778, "learning_rate": 8.652947698847636e-05, "loss": 0.9378, "step": 5858 }, { "epoch": 1.0715480393978813, "grad_norm": 0.6679945588111877, "learning_rate": 8.650049823817851e-05, "loss": 0.9014, "step": 5859 }, { "epoch": 1.0717338784612525, "grad_norm": 0.7798948884010315, "learning_rate": 8.647152064252226e-05, "loss": 1.0417, "step": 5860 }, { "epoch": 1.0719197175246236, "grad_norm": 0.7319615483283997, "learning_rate": 8.644254420398607e-05, "loss": 0.894, "step": 5861 }, { "epoch": 1.072105556587995, "grad_norm": 0.7500199675559998, "learning_rate": 8.641356892504838e-05, "loss": 1.1208, "step": 5862 }, { "epoch": 1.072291395651366, "grad_norm": 0.7801706194877625, "learning_rate": 8.63845948081875e-05, "loss": 1.1542, "step": 5863 }, { "epoch": 1.072477234714737, "grad_norm": 0.6065161228179932, "learning_rate": 8.635562185588164e-05, "loss": 0.8436, "step": 5864 }, { "epoch": 1.0726630737781082, "grad_norm": 0.7084574103355408, "learning_rate": 8.632665007060896e-05, "loss": 1.0358, "step": 5865 }, { "epoch": 1.0728489128414793, "grad_norm": 0.5427306294441223, "learning_rate": 8.629767945484741e-05, "loss": 0.6025, "step": 5866 }, { "epoch": 1.0730347519048504, "grad_norm": 0.5843170285224915, "learning_rate": 8.626871001107501e-05, "loss": 0.9039, "step": 5867 }, { "epoch": 1.0732205909682215, "grad_norm": 0.6646682024002075, "learning_rate": 8.623974174176947e-05, "loss": 0.7304, "step": 5868 }, { "epoch": 1.0734064300315926, "grad_norm": 0.4983295500278473, "learning_rate": 8.62107746494086e-05, "loss": 0.6743, "step": 5869 }, { "epoch": 1.0735922690949637, "grad_norm": 0.8558180332183838, "learning_rate": 8.618180873646994e-05, "loss": 1.0591, "step": 5870 }, { "epoch": 1.0737781081583349, "grad_norm": 0.6745973229408264, "learning_rate": 8.615284400543104e-05, "loss": 0.9655, "step": 5871 }, { "epoch": 1.073963947221706, "grad_norm": 0.6001264452934265, "learning_rate": 8.612388045876934e-05, "loss": 1.064, "step": 5872 }, { "epoch": 1.074149786285077, "grad_norm": 0.6128737330436707, "learning_rate": 8.609491809896212e-05, "loss": 0.8209, "step": 5873 }, { "epoch": 1.0743356253484482, "grad_norm": 0.7345498204231262, "learning_rate": 8.606595692848664e-05, "loss": 0.9531, "step": 5874 }, { "epoch": 1.0745214644118193, "grad_norm": 0.6948199272155762, "learning_rate": 8.603699694981995e-05, "loss": 0.942, "step": 5875 }, { "epoch": 1.0747073034751904, "grad_norm": 0.568913996219635, "learning_rate": 8.600803816543912e-05, "loss": 1.0326, "step": 5876 }, { "epoch": 1.0748931425385617, "grad_norm": 0.6175299882888794, "learning_rate": 8.5979080577821e-05, "loss": 1.0009, "step": 5877 }, { "epoch": 1.0750789816019328, "grad_norm": 2.101738452911377, "learning_rate": 8.59501241894424e-05, "loss": 1.8329, "step": 5878 }, { "epoch": 1.075264820665304, "grad_norm": 0.6964477896690369, "learning_rate": 8.592116900278008e-05, "loss": 0.9948, "step": 5879 }, { "epoch": 1.075450659728675, "grad_norm": 0.6877242922782898, "learning_rate": 8.589221502031057e-05, "loss": 0.8998, "step": 5880 }, { "epoch": 1.0756364987920461, "grad_norm": 0.7231934666633606, "learning_rate": 8.586326224451045e-05, "loss": 0.8386, "step": 5881 }, { "epoch": 1.0758223378554173, "grad_norm": 0.6604142189025879, "learning_rate": 8.583431067785602e-05, "loss": 0.9283, "step": 5882 }, { "epoch": 1.0760081769187884, "grad_norm": 0.6713509559631348, "learning_rate": 8.580536032282366e-05, "loss": 1.0227, "step": 5883 }, { "epoch": 1.0761940159821595, "grad_norm": 0.5723233819007874, "learning_rate": 8.577641118188945e-05, "loss": 0.8204, "step": 5884 }, { "epoch": 1.0763798550455306, "grad_norm": 0.6917603611946106, "learning_rate": 8.574746325752956e-05, "loss": 1.1986, "step": 5885 }, { "epoch": 1.0765656941089017, "grad_norm": 0.6363825798034668, "learning_rate": 8.571851655221995e-05, "loss": 0.9761, "step": 5886 }, { "epoch": 1.0767515331722728, "grad_norm": 0.6058530807495117, "learning_rate": 8.568957106843648e-05, "loss": 0.997, "step": 5887 }, { "epoch": 1.076937372235644, "grad_norm": 0.7039767503738403, "learning_rate": 8.566062680865494e-05, "loss": 1.1381, "step": 5888 }, { "epoch": 1.077123211299015, "grad_norm": 0.7130062580108643, "learning_rate": 8.563168377535095e-05, "loss": 1.0521, "step": 5889 }, { "epoch": 1.0773090503623861, "grad_norm": 0.6804975867271423, "learning_rate": 8.560274197100014e-05, "loss": 0.9519, "step": 5890 }, { "epoch": 1.0774948894257572, "grad_norm": 0.6732782125473022, "learning_rate": 8.557380139807789e-05, "loss": 0.8481, "step": 5891 }, { "epoch": 1.0776807284891283, "grad_norm": 0.6793938279151917, "learning_rate": 8.554486205905959e-05, "loss": 0.8197, "step": 5892 }, { "epoch": 1.0778665675524994, "grad_norm": 0.6705108880996704, "learning_rate": 8.551592395642048e-05, "loss": 0.9954, "step": 5893 }, { "epoch": 1.0780524066158708, "grad_norm": 0.6985551714897156, "learning_rate": 8.54869870926357e-05, "loss": 0.9462, "step": 5894 }, { "epoch": 1.0782382456792419, "grad_norm": 0.6636293530464172, "learning_rate": 8.545805147018031e-05, "loss": 0.9449, "step": 5895 }, { "epoch": 1.078424084742613, "grad_norm": 0.7172894477844238, "learning_rate": 8.542911709152917e-05, "loss": 1.0889, "step": 5896 }, { "epoch": 1.078609923805984, "grad_norm": 0.7706103920936584, "learning_rate": 8.540018395915718e-05, "loss": 1.0417, "step": 5897 }, { "epoch": 1.0787957628693552, "grad_norm": 0.6476448774337769, "learning_rate": 8.537125207553897e-05, "loss": 0.999, "step": 5898 }, { "epoch": 1.0789816019327263, "grad_norm": 0.572585940361023, "learning_rate": 8.534232144314918e-05, "loss": 0.759, "step": 5899 }, { "epoch": 1.0791674409960974, "grad_norm": 0.630120038986206, "learning_rate": 8.531339206446234e-05, "loss": 0.8345, "step": 5900 }, { "epoch": 1.0793532800594685, "grad_norm": 0.5526821613311768, "learning_rate": 8.52844639419528e-05, "loss": 0.9357, "step": 5901 }, { "epoch": 1.0795391191228396, "grad_norm": 0.6959087252616882, "learning_rate": 8.525553707809491e-05, "loss": 0.8057, "step": 5902 }, { "epoch": 1.0797249581862107, "grad_norm": 0.75086510181427, "learning_rate": 8.522661147536274e-05, "loss": 1.0193, "step": 5903 }, { "epoch": 1.0799107972495818, "grad_norm": 0.6601825952529907, "learning_rate": 8.519768713623047e-05, "loss": 1.0793, "step": 5904 }, { "epoch": 1.080096636312953, "grad_norm": 0.5780887603759766, "learning_rate": 8.516876406317198e-05, "loss": 0.9708, "step": 5905 }, { "epoch": 1.080282475376324, "grad_norm": 0.6262563467025757, "learning_rate": 8.513984225866114e-05, "loss": 0.8962, "step": 5906 }, { "epoch": 1.0804683144396952, "grad_norm": 0.6281941533088684, "learning_rate": 8.511092172517175e-05, "loss": 0.9141, "step": 5907 }, { "epoch": 1.0806541535030663, "grad_norm": 0.8933135867118835, "learning_rate": 8.508200246517739e-05, "loss": 1.0007, "step": 5908 }, { "epoch": 1.0808399925664374, "grad_norm": 0.6440215706825256, "learning_rate": 8.50530844811516e-05, "loss": 1.1193, "step": 5909 }, { "epoch": 1.0810258316298085, "grad_norm": 0.618796706199646, "learning_rate": 8.502416777556778e-05, "loss": 0.8557, "step": 5910 }, { "epoch": 1.0812116706931798, "grad_norm": 0.7517762780189514, "learning_rate": 8.49952523508993e-05, "loss": 1.0686, "step": 5911 }, { "epoch": 1.081397509756551, "grad_norm": 0.6386401057243347, "learning_rate": 8.496633820961927e-05, "loss": 0.956, "step": 5912 }, { "epoch": 1.081583348819922, "grad_norm": 0.6620604991912842, "learning_rate": 8.493742535420083e-05, "loss": 0.8032, "step": 5913 }, { "epoch": 1.0817691878832931, "grad_norm": 0.6698114275932312, "learning_rate": 8.4908513787117e-05, "loss": 0.8991, "step": 5914 }, { "epoch": 1.0819550269466642, "grad_norm": 0.6888457536697388, "learning_rate": 8.487960351084054e-05, "loss": 0.8746, "step": 5915 }, { "epoch": 1.0821408660100353, "grad_norm": 0.6484079957008362, "learning_rate": 8.485069452784431e-05, "loss": 0.822, "step": 5916 }, { "epoch": 1.0823267050734064, "grad_norm": 0.743229329586029, "learning_rate": 8.48217868406009e-05, "loss": 0.8274, "step": 5917 }, { "epoch": 1.0825125441367776, "grad_norm": 0.6204001307487488, "learning_rate": 8.479288045158291e-05, "loss": 0.8803, "step": 5918 }, { "epoch": 1.0826983832001487, "grad_norm": 0.7072806358337402, "learning_rate": 8.476397536326266e-05, "loss": 1.1312, "step": 5919 }, { "epoch": 1.0828842222635198, "grad_norm": 0.6361244320869446, "learning_rate": 8.473507157811254e-05, "loss": 0.7503, "step": 5920 }, { "epoch": 1.0830700613268909, "grad_norm": 0.6259986758232117, "learning_rate": 8.470616909860479e-05, "loss": 1.0263, "step": 5921 }, { "epoch": 1.083255900390262, "grad_norm": 0.7605748176574707, "learning_rate": 8.46772679272114e-05, "loss": 0.9734, "step": 5922 }, { "epoch": 1.083441739453633, "grad_norm": 0.6772533059120178, "learning_rate": 8.464836806640443e-05, "loss": 0.9361, "step": 5923 }, { "epoch": 1.0836275785170042, "grad_norm": 0.5974377393722534, "learning_rate": 8.46194695186557e-05, "loss": 0.9405, "step": 5924 }, { "epoch": 1.0838134175803753, "grad_norm": 0.7039512991905212, "learning_rate": 8.4590572286437e-05, "loss": 1.1505, "step": 5925 }, { "epoch": 1.0839992566437464, "grad_norm": 0.6239109635353088, "learning_rate": 8.456167637221993e-05, "loss": 0.9038, "step": 5926 }, { "epoch": 1.0841850957071175, "grad_norm": 0.6761141419410706, "learning_rate": 8.453278177847606e-05, "loss": 0.8879, "step": 5927 }, { "epoch": 1.0843709347704888, "grad_norm": 0.580737829208374, "learning_rate": 8.45038885076768e-05, "loss": 1.1171, "step": 5928 }, { "epoch": 1.08455677383386, "grad_norm": 0.702734112739563, "learning_rate": 8.447499656229344e-05, "loss": 0.8489, "step": 5929 }, { "epoch": 1.084742612897231, "grad_norm": 0.664533257484436, "learning_rate": 8.444610594479718e-05, "loss": 0.9631, "step": 5930 }, { "epoch": 1.0849284519606022, "grad_norm": 0.887567400932312, "learning_rate": 8.441721665765909e-05, "loss": 0.9973, "step": 5931 }, { "epoch": 1.0851142910239733, "grad_norm": 0.6015354990959167, "learning_rate": 8.438832870335018e-05, "loss": 0.901, "step": 5932 }, { "epoch": 1.0853001300873444, "grad_norm": 0.7177890539169312, "learning_rate": 8.43594420843412e-05, "loss": 1.0315, "step": 5933 }, { "epoch": 1.0854859691507155, "grad_norm": 0.667842447757721, "learning_rate": 8.433055680310295e-05, "loss": 1.1921, "step": 5934 }, { "epoch": 1.0856718082140866, "grad_norm": 0.7782358527183533, "learning_rate": 8.430167286210608e-05, "loss": 0.9981, "step": 5935 }, { "epoch": 1.0858576472774577, "grad_norm": 0.6504238247871399, "learning_rate": 8.427279026382103e-05, "loss": 0.8409, "step": 5936 }, { "epoch": 1.0860434863408288, "grad_norm": 0.6968525648117065, "learning_rate": 8.424390901071822e-05, "loss": 0.9398, "step": 5937 }, { "epoch": 1.0862293254042, "grad_norm": 0.6084544658660889, "learning_rate": 8.421502910526791e-05, "loss": 1.0434, "step": 5938 }, { "epoch": 1.086415164467571, "grad_norm": 0.5817170143127441, "learning_rate": 8.418615054994034e-05, "loss": 0.8075, "step": 5939 }, { "epoch": 1.0866010035309421, "grad_norm": 0.708842396736145, "learning_rate": 8.415727334720543e-05, "loss": 1.01, "step": 5940 }, { "epoch": 1.0867868425943132, "grad_norm": 0.6967892050743103, "learning_rate": 8.412839749953316e-05, "loss": 1.1844, "step": 5941 }, { "epoch": 1.0869726816576843, "grad_norm": 0.5187710523605347, "learning_rate": 8.409952300939343e-05, "loss": 0.709, "step": 5942 }, { "epoch": 1.0871585207210557, "grad_norm": 0.7596275210380554, "learning_rate": 8.40706498792558e-05, "loss": 0.9373, "step": 5943 }, { "epoch": 1.0873443597844268, "grad_norm": 0.4423999786376953, "learning_rate": 8.404177811158994e-05, "loss": 0.4896, "step": 5944 }, { "epoch": 1.087530198847798, "grad_norm": 0.6767209768295288, "learning_rate": 8.401290770886525e-05, "loss": 1.0141, "step": 5945 }, { "epoch": 1.087716037911169, "grad_norm": 0.5728704333305359, "learning_rate": 8.398403867355118e-05, "loss": 0.7946, "step": 5946 }, { "epoch": 1.08790187697454, "grad_norm": 0.6678792238235474, "learning_rate": 8.395517100811685e-05, "loss": 0.8289, "step": 5947 }, { "epoch": 1.0880877160379112, "grad_norm": 0.6508834362030029, "learning_rate": 8.392630471503141e-05, "loss": 0.9429, "step": 5948 }, { "epoch": 1.0882735551012823, "grad_norm": 0.6657683253288269, "learning_rate": 8.389743979676392e-05, "loss": 0.8843, "step": 5949 }, { "epoch": 1.0884593941646534, "grad_norm": 0.5776364803314209, "learning_rate": 8.386857625578317e-05, "loss": 0.7137, "step": 5950 }, { "epoch": 1.0886452332280245, "grad_norm": 0.6650852560997009, "learning_rate": 8.383971409455798e-05, "loss": 0.896, "step": 5951 }, { "epoch": 1.0888310722913956, "grad_norm": 0.6167944073677063, "learning_rate": 8.381085331555693e-05, "loss": 0.8864, "step": 5952 }, { "epoch": 1.0890169113547667, "grad_norm": 0.7045409679412842, "learning_rate": 8.378199392124864e-05, "loss": 0.8547, "step": 5953 }, { "epoch": 1.0892027504181379, "grad_norm": 0.5888659954071045, "learning_rate": 8.375313591410141e-05, "loss": 0.8933, "step": 5954 }, { "epoch": 1.089388589481509, "grad_norm": 0.6576387882232666, "learning_rate": 8.37242792965836e-05, "loss": 0.7533, "step": 5955 }, { "epoch": 1.08957442854488, "grad_norm": 0.5648308992385864, "learning_rate": 8.369542407116338e-05, "loss": 0.9638, "step": 5956 }, { "epoch": 1.0897602676082512, "grad_norm": 0.6234437227249146, "learning_rate": 8.366657024030876e-05, "loss": 0.9858, "step": 5957 }, { "epoch": 1.0899461066716223, "grad_norm": 1.7195453643798828, "learning_rate": 8.363771780648768e-05, "loss": 1.3238, "step": 5958 }, { "epoch": 1.0901319457349934, "grad_norm": 0.6707358360290527, "learning_rate": 8.360886677216797e-05, "loss": 0.7908, "step": 5959 }, { "epoch": 1.0903177847983647, "grad_norm": 0.657151997089386, "learning_rate": 8.358001713981731e-05, "loss": 1.1087, "step": 5960 }, { "epoch": 1.0905036238617358, "grad_norm": 0.7293577790260315, "learning_rate": 8.355116891190325e-05, "loss": 0.8512, "step": 5961 }, { "epoch": 1.090689462925107, "grad_norm": 0.5471261739730835, "learning_rate": 8.352232209089328e-05, "loss": 0.6389, "step": 5962 }, { "epoch": 1.090875301988478, "grad_norm": 0.679717481136322, "learning_rate": 8.349347667925474e-05, "loss": 0.8887, "step": 5963 }, { "epoch": 1.0910611410518491, "grad_norm": 0.6497548222541809, "learning_rate": 8.346463267945478e-05, "loss": 0.7635, "step": 5964 }, { "epoch": 1.0912469801152203, "grad_norm": 0.6662367582321167, "learning_rate": 8.343579009396057e-05, "loss": 0.8766, "step": 5965 }, { "epoch": 1.0914328191785914, "grad_norm": 0.7309990525245667, "learning_rate": 8.340694892523898e-05, "loss": 1.0396, "step": 5966 }, { "epoch": 1.0916186582419625, "grad_norm": 0.6899405717849731, "learning_rate": 8.337810917575694e-05, "loss": 1.0736, "step": 5967 }, { "epoch": 1.0918044973053336, "grad_norm": 0.7755197286605835, "learning_rate": 8.334927084798112e-05, "loss": 0.8966, "step": 5968 }, { "epoch": 1.0919903363687047, "grad_norm": 0.996249258518219, "learning_rate": 8.332043394437815e-05, "loss": 1.0628, "step": 5969 }, { "epoch": 1.0921761754320758, "grad_norm": 0.6849657893180847, "learning_rate": 8.329159846741457e-05, "loss": 0.7763, "step": 5970 }, { "epoch": 1.092362014495447, "grad_norm": 0.5944156646728516, "learning_rate": 8.326276441955663e-05, "loss": 0.9079, "step": 5971 }, { "epoch": 1.092547853558818, "grad_norm": 0.6766306757926941, "learning_rate": 8.323393180327066e-05, "loss": 1.0014, "step": 5972 }, { "epoch": 1.0927336926221891, "grad_norm": 0.6476278901100159, "learning_rate": 8.320510062102272e-05, "loss": 1.1559, "step": 5973 }, { "epoch": 1.0929195316855602, "grad_norm": 0.6700665354728699, "learning_rate": 8.317627087527882e-05, "loss": 0.892, "step": 5974 }, { "epoch": 1.0931053707489313, "grad_norm": 0.6307082176208496, "learning_rate": 8.314744256850482e-05, "loss": 0.8528, "step": 5975 }, { "epoch": 1.0932912098123024, "grad_norm": 0.6499921083450317, "learning_rate": 8.311861570316648e-05, "loss": 1.0078, "step": 5976 }, { "epoch": 1.0934770488756738, "grad_norm": 0.5335561633110046, "learning_rate": 8.308979028172948e-05, "loss": 0.6987, "step": 5977 }, { "epoch": 1.0936628879390449, "grad_norm": 1.7582954168319702, "learning_rate": 8.306096630665921e-05, "loss": 1.4374, "step": 5978 }, { "epoch": 1.093848727002416, "grad_norm": 0.8077002763748169, "learning_rate": 8.303214378042114e-05, "loss": 0.8452, "step": 5979 }, { "epoch": 1.094034566065787, "grad_norm": 0.6156245470046997, "learning_rate": 8.300332270548044e-05, "loss": 0.9902, "step": 5980 }, { "epoch": 1.0942204051291582, "grad_norm": 0.6083616018295288, "learning_rate": 8.297450308430231e-05, "loss": 0.9913, "step": 5981 }, { "epoch": 1.0944062441925293, "grad_norm": 0.5292848348617554, "learning_rate": 8.29456849193517e-05, "loss": 0.8474, "step": 5982 }, { "epoch": 1.0945920832559004, "grad_norm": 0.6098827123641968, "learning_rate": 8.291686821309353e-05, "loss": 0.8643, "step": 5983 }, { "epoch": 1.0947779223192715, "grad_norm": 0.7073376178741455, "learning_rate": 8.288805296799259e-05, "loss": 0.9915, "step": 5984 }, { "epoch": 1.0949637613826426, "grad_norm": 0.6034481525421143, "learning_rate": 8.285923918651341e-05, "loss": 1.0649, "step": 5985 }, { "epoch": 1.0951496004460137, "grad_norm": 0.6212545037269592, "learning_rate": 8.28304268711206e-05, "loss": 0.7996, "step": 5986 }, { "epoch": 1.0953354395093848, "grad_norm": 0.6276280283927917, "learning_rate": 8.280161602427845e-05, "loss": 0.945, "step": 5987 }, { "epoch": 1.095521278572756, "grad_norm": 0.7413529753684998, "learning_rate": 8.277280664845125e-05, "loss": 0.8979, "step": 5988 }, { "epoch": 1.095707117636127, "grad_norm": 0.6977653503417969, "learning_rate": 8.274399874610312e-05, "loss": 1.0445, "step": 5989 }, { "epoch": 1.0958929566994982, "grad_norm": 1.6293609142303467, "learning_rate": 8.271519231969807e-05, "loss": 1.4387, "step": 5990 }, { "epoch": 1.0960787957628693, "grad_norm": 0.5372196435928345, "learning_rate": 8.268638737170004e-05, "loss": 0.7948, "step": 5991 }, { "epoch": 1.0962646348262406, "grad_norm": 0.6748941540718079, "learning_rate": 8.265758390457266e-05, "loss": 0.6746, "step": 5992 }, { "epoch": 1.0964504738896117, "grad_norm": 0.8666142225265503, "learning_rate": 8.262878192077965e-05, "loss": 1.3074, "step": 5993 }, { "epoch": 1.0966363129529828, "grad_norm": 0.6674154996871948, "learning_rate": 8.259998142278442e-05, "loss": 1.1204, "step": 5994 }, { "epoch": 1.096822152016354, "grad_norm": 0.6468871235847473, "learning_rate": 8.257118241305042e-05, "loss": 1.0995, "step": 5995 }, { "epoch": 1.097007991079725, "grad_norm": 0.7309514880180359, "learning_rate": 8.254238489404083e-05, "loss": 1.0042, "step": 5996 }, { "epoch": 1.0971938301430961, "grad_norm": 0.7083085179328918, "learning_rate": 8.251358886821878e-05, "loss": 1.0292, "step": 5997 }, { "epoch": 1.0973796692064672, "grad_norm": 0.6642683744430542, "learning_rate": 8.248479433804732e-05, "loss": 1.1411, "step": 5998 }, { "epoch": 1.0975655082698383, "grad_norm": 0.7239875793457031, "learning_rate": 8.245600130598922e-05, "loss": 0.7915, "step": 5999 }, { "epoch": 1.0977513473332094, "grad_norm": 0.8348709940910339, "learning_rate": 8.242720977450728e-05, "loss": 1.1414, "step": 6000 }, { "epoch": 1.0979371863965806, "grad_norm": 0.6298574209213257, "learning_rate": 8.239841974606403e-05, "loss": 1.1092, "step": 6001 }, { "epoch": 1.0981230254599517, "grad_norm": 1.916271686553955, "learning_rate": 8.2369631223122e-05, "loss": 1.7966, "step": 6002 }, { "epoch": 1.0983088645233228, "grad_norm": 0.7313844561576843, "learning_rate": 8.23408442081435e-05, "loss": 0.9692, "step": 6003 }, { "epoch": 1.0984947035866939, "grad_norm": 0.5213794112205505, "learning_rate": 8.231205870359077e-05, "loss": 0.7192, "step": 6004 }, { "epoch": 1.098680542650065, "grad_norm": 0.6529609560966492, "learning_rate": 8.22832747119259e-05, "loss": 1.0331, "step": 6005 }, { "epoch": 1.098866381713436, "grad_norm": 0.6834220290184021, "learning_rate": 8.225449223561081e-05, "loss": 0.9095, "step": 6006 }, { "epoch": 1.0990522207768072, "grad_norm": 0.6474689841270447, "learning_rate": 8.222571127710738e-05, "loss": 1.1353, "step": 6007 }, { "epoch": 1.0992380598401783, "grad_norm": 0.8416957855224609, "learning_rate": 8.219693183887726e-05, "loss": 1.3315, "step": 6008 }, { "epoch": 1.0994238989035496, "grad_norm": 0.631695568561554, "learning_rate": 8.216815392338205e-05, "loss": 0.9463, "step": 6009 }, { "epoch": 1.0996097379669207, "grad_norm": 0.6356896758079529, "learning_rate": 8.213937753308316e-05, "loss": 1.0267, "step": 6010 }, { "epoch": 1.0997955770302918, "grad_norm": 0.6175408363342285, "learning_rate": 8.211060267044191e-05, "loss": 0.9336, "step": 6011 }, { "epoch": 1.099981416093663, "grad_norm": 1.0049728155136108, "learning_rate": 8.20818293379195e-05, "loss": 1.0377, "step": 6012 }, { "epoch": 1.100167255157034, "grad_norm": 0.5931586027145386, "learning_rate": 8.205305753797691e-05, "loss": 0.9247, "step": 6013 }, { "epoch": 1.1003530942204052, "grad_norm": 0.7019885778427124, "learning_rate": 8.202428727307516e-05, "loss": 0.9673, "step": 6014 }, { "epoch": 1.1005389332837763, "grad_norm": 0.666867196559906, "learning_rate": 8.199551854567492e-05, "loss": 0.7624, "step": 6015 }, { "epoch": 1.1007247723471474, "grad_norm": 0.6581511497497559, "learning_rate": 8.196675135823689e-05, "loss": 0.7796, "step": 6016 }, { "epoch": 1.1009106114105185, "grad_norm": 0.6427851319313049, "learning_rate": 8.193798571322164e-05, "loss": 0.9612, "step": 6017 }, { "epoch": 1.1010964504738896, "grad_norm": 0.7291167378425598, "learning_rate": 8.190922161308946e-05, "loss": 1.08, "step": 6018 }, { "epoch": 1.1012822895372607, "grad_norm": 0.6328938007354736, "learning_rate": 8.188045906030069e-05, "loss": 0.9169, "step": 6019 }, { "epoch": 1.1014681286006318, "grad_norm": 0.7824090123176575, "learning_rate": 8.185169805731537e-05, "loss": 0.9103, "step": 6020 }, { "epoch": 1.101653967664003, "grad_norm": 0.6461734771728516, "learning_rate": 8.18229386065936e-05, "loss": 0.8457, "step": 6021 }, { "epoch": 1.101839806727374, "grad_norm": 0.6358814239501953, "learning_rate": 8.179418071059514e-05, "loss": 0.8985, "step": 6022 }, { "epoch": 1.1020256457907451, "grad_norm": 0.6370760202407837, "learning_rate": 8.176542437177974e-05, "loss": 0.8347, "step": 6023 }, { "epoch": 1.1022114848541162, "grad_norm": 0.7202227115631104, "learning_rate": 8.173666959260705e-05, "loss": 1.3205, "step": 6024 }, { "epoch": 1.1023973239174873, "grad_norm": 0.611150324344635, "learning_rate": 8.170791637553643e-05, "loss": 0.8695, "step": 6025 }, { "epoch": 1.1025831629808587, "grad_norm": 0.5846618413925171, "learning_rate": 8.16791647230273e-05, "loss": 0.8868, "step": 6026 }, { "epoch": 1.1027690020442298, "grad_norm": 0.7279630303382874, "learning_rate": 8.165041463753876e-05, "loss": 1.0683, "step": 6027 }, { "epoch": 1.102954841107601, "grad_norm": 0.6632223725318909, "learning_rate": 8.162166612152998e-05, "loss": 0.9749, "step": 6028 }, { "epoch": 1.103140680170972, "grad_norm": 0.7467746138572693, "learning_rate": 8.159291917745976e-05, "loss": 0.8073, "step": 6029 }, { "epoch": 1.103326519234343, "grad_norm": 0.6717825531959534, "learning_rate": 8.156417380778696e-05, "loss": 0.827, "step": 6030 }, { "epoch": 1.1035123582977142, "grad_norm": 0.7068509459495544, "learning_rate": 8.153543001497024e-05, "loss": 0.837, "step": 6031 }, { "epoch": 1.1036981973610853, "grad_norm": 0.6751839518547058, "learning_rate": 8.150668780146807e-05, "loss": 0.975, "step": 6032 }, { "epoch": 1.1038840364244564, "grad_norm": 0.5865058898925781, "learning_rate": 8.147794716973889e-05, "loss": 0.6932, "step": 6033 }, { "epoch": 1.1040698754878275, "grad_norm": 0.6836567521095276, "learning_rate": 8.144920812224089e-05, "loss": 0.9753, "step": 6034 }, { "epoch": 1.1042557145511986, "grad_norm": 0.6463656425476074, "learning_rate": 8.142047066143226e-05, "loss": 1.0225, "step": 6035 }, { "epoch": 1.1044415536145697, "grad_norm": 0.6912021636962891, "learning_rate": 8.139173478977087e-05, "loss": 1.0064, "step": 6036 }, { "epoch": 1.1046273926779409, "grad_norm": 0.6893657445907593, "learning_rate": 8.136300050971464e-05, "loss": 0.8931, "step": 6037 }, { "epoch": 1.104813231741312, "grad_norm": 0.811796247959137, "learning_rate": 8.13342678237213e-05, "loss": 0.6038, "step": 6038 }, { "epoch": 1.104999070804683, "grad_norm": 0.6037810444831848, "learning_rate": 8.130553673424832e-05, "loss": 0.9233, "step": 6039 }, { "epoch": 1.1051849098680542, "grad_norm": 0.730668842792511, "learning_rate": 8.127680724375322e-05, "loss": 0.8656, "step": 6040 }, { "epoch": 1.1053707489314253, "grad_norm": 0.7074486017227173, "learning_rate": 8.124807935469323e-05, "loss": 0.9637, "step": 6041 }, { "epoch": 1.1055565879947964, "grad_norm": 0.6678810715675354, "learning_rate": 8.121935306952558e-05, "loss": 0.9592, "step": 6042 }, { "epoch": 1.1057424270581677, "grad_norm": 0.6933256983757019, "learning_rate": 8.119062839070723e-05, "loss": 1.1061, "step": 6043 }, { "epoch": 1.1059282661215388, "grad_norm": 1.7148810625076294, "learning_rate": 8.116190532069507e-05, "loss": 1.7415, "step": 6044 }, { "epoch": 1.10611410518491, "grad_norm": 0.7014167308807373, "learning_rate": 8.11331838619459e-05, "loss": 1.0122, "step": 6045 }, { "epoch": 1.106299944248281, "grad_norm": 0.8506425619125366, "learning_rate": 8.110446401691627e-05, "loss": 1.0112, "step": 6046 }, { "epoch": 1.1064857833116521, "grad_norm": 0.7145703434944153, "learning_rate": 8.107574578806267e-05, "loss": 0.841, "step": 6047 }, { "epoch": 1.1066716223750233, "grad_norm": 0.6292847990989685, "learning_rate": 8.104702917784143e-05, "loss": 0.898, "step": 6048 }, { "epoch": 1.1068574614383944, "grad_norm": 0.709343671798706, "learning_rate": 8.101831418870877e-05, "loss": 0.8223, "step": 6049 }, { "epoch": 1.1070433005017655, "grad_norm": 0.6488339900970459, "learning_rate": 8.09896008231207e-05, "loss": 0.7187, "step": 6050 }, { "epoch": 1.1072291395651366, "grad_norm": 0.6357553005218506, "learning_rate": 8.096088908353315e-05, "loss": 0.7479, "step": 6051 }, { "epoch": 1.1074149786285077, "grad_norm": 0.6017008423805237, "learning_rate": 8.093217897240195e-05, "loss": 0.9325, "step": 6052 }, { "epoch": 1.1076008176918788, "grad_norm": 0.613470196723938, "learning_rate": 8.090347049218266e-05, "loss": 1.0354, "step": 6053 }, { "epoch": 1.10778665675525, "grad_norm": 0.7553649544715881, "learning_rate": 8.087476364533082e-05, "loss": 1.0809, "step": 6054 }, { "epoch": 1.107972495818621, "grad_norm": 0.6359599828720093, "learning_rate": 8.084605843430177e-05, "loss": 0.9372, "step": 6055 }, { "epoch": 1.1081583348819921, "grad_norm": 0.8213351964950562, "learning_rate": 8.081735486155076e-05, "loss": 0.9785, "step": 6056 }, { "epoch": 1.1083441739453632, "grad_norm": 0.6647172570228577, "learning_rate": 8.078865292953281e-05, "loss": 0.7293, "step": 6057 }, { "epoch": 1.1085300130087345, "grad_norm": 0.5401493310928345, "learning_rate": 8.075995264070291e-05, "loss": 0.6513, "step": 6058 }, { "epoch": 1.1087158520721057, "grad_norm": 0.4942319691181183, "learning_rate": 8.073125399751589e-05, "loss": 0.6459, "step": 6059 }, { "epoch": 1.1089016911354768, "grad_norm": 0.6275574564933777, "learning_rate": 8.07025570024263e-05, "loss": 0.7735, "step": 6060 }, { "epoch": 1.1090875301988479, "grad_norm": 0.5724532008171082, "learning_rate": 8.067386165788878e-05, "loss": 0.8382, "step": 6061 }, { "epoch": 1.109273369262219, "grad_norm": 0.7397722601890564, "learning_rate": 8.064516796635758e-05, "loss": 0.9636, "step": 6062 }, { "epoch": 1.10945920832559, "grad_norm": 0.629734456539154, "learning_rate": 8.061647593028703e-05, "loss": 1.06, "step": 6063 }, { "epoch": 1.1096450473889612, "grad_norm": 0.6586220264434814, "learning_rate": 8.058778555213116e-05, "loss": 1.0629, "step": 6064 }, { "epoch": 1.1098308864523323, "grad_norm": 0.5675712823867798, "learning_rate": 8.055909683434395e-05, "loss": 0.9236, "step": 6065 }, { "epoch": 1.1100167255157034, "grad_norm": 0.6634374260902405, "learning_rate": 8.053040977937924e-05, "loss": 0.8846, "step": 6066 }, { "epoch": 1.1102025645790745, "grad_norm": 0.6590657234191895, "learning_rate": 8.050172438969063e-05, "loss": 0.8831, "step": 6067 }, { "epoch": 1.1103884036424456, "grad_norm": 0.6578685641288757, "learning_rate": 8.04730406677317e-05, "loss": 0.9238, "step": 6068 }, { "epoch": 1.1105742427058167, "grad_norm": 0.7250857949256897, "learning_rate": 8.044435861595578e-05, "loss": 0.9559, "step": 6069 }, { "epoch": 1.1107600817691878, "grad_norm": 0.6505029797554016, "learning_rate": 8.041567823681615e-05, "loss": 0.8535, "step": 6070 }, { "epoch": 1.110945920832559, "grad_norm": 0.7013585567474365, "learning_rate": 8.038699953276587e-05, "loss": 0.7633, "step": 6071 }, { "epoch": 1.11113175989593, "grad_norm": 0.754403293132782, "learning_rate": 8.035832250625788e-05, "loss": 1.1925, "step": 6072 }, { "epoch": 1.1113175989593012, "grad_norm": 0.7387733459472656, "learning_rate": 8.032964715974509e-05, "loss": 1.0599, "step": 6073 }, { "epoch": 1.1115034380226723, "grad_norm": 0.6414195895195007, "learning_rate": 8.030097349568004e-05, "loss": 0.9555, "step": 6074 }, { "epoch": 1.1116892770860436, "grad_norm": 0.6676441431045532, "learning_rate": 8.027230151651535e-05, "loss": 1.1428, "step": 6075 }, { "epoch": 1.1118751161494147, "grad_norm": 0.6780575513839722, "learning_rate": 8.024363122470331e-05, "loss": 0.9103, "step": 6076 }, { "epoch": 1.1120609552127858, "grad_norm": 0.6453208923339844, "learning_rate": 8.02149626226962e-05, "loss": 1.0003, "step": 6077 }, { "epoch": 1.112246794276157, "grad_norm": 0.702014148235321, "learning_rate": 8.018629571294607e-05, "loss": 1.0396, "step": 6078 }, { "epoch": 1.112432633339528, "grad_norm": 0.6062483191490173, "learning_rate": 8.01576304979049e-05, "loss": 1.0062, "step": 6079 }, { "epoch": 1.1126184724028991, "grad_norm": 0.7950749397277832, "learning_rate": 8.012896698002454e-05, "loss": 0.8965, "step": 6080 }, { "epoch": 1.1128043114662702, "grad_norm": 0.5671384334564209, "learning_rate": 8.010030516175651e-05, "loss": 0.6552, "step": 6081 }, { "epoch": 1.1129901505296413, "grad_norm": 0.7059739232063293, "learning_rate": 8.007164504555244e-05, "loss": 1.2306, "step": 6082 }, { "epoch": 1.1131759895930124, "grad_norm": 0.8960155844688416, "learning_rate": 8.00429866338636e-05, "loss": 1.0406, "step": 6083 }, { "epoch": 1.1133618286563836, "grad_norm": 0.7356314063072205, "learning_rate": 8.001432992914127e-05, "loss": 1.0919, "step": 6084 }, { "epoch": 1.1135476677197547, "grad_norm": 0.6466111540794373, "learning_rate": 7.998567493383647e-05, "loss": 0.9251, "step": 6085 }, { "epoch": 1.1137335067831258, "grad_norm": 3.527097463607788, "learning_rate": 7.995702165040015e-05, "loss": 1.8559, "step": 6086 }, { "epoch": 1.1139193458464969, "grad_norm": 0.5798239707946777, "learning_rate": 7.992837008128313e-05, "loss": 0.9931, "step": 6087 }, { "epoch": 1.114105184909868, "grad_norm": 0.580335795879364, "learning_rate": 7.989972022893595e-05, "loss": 0.9003, "step": 6088 }, { "epoch": 1.114291023973239, "grad_norm": 0.5507724285125732, "learning_rate": 7.98710720958092e-05, "loss": 0.8943, "step": 6089 }, { "epoch": 1.1144768630366102, "grad_norm": 0.6352560520172119, "learning_rate": 7.98424256843531e-05, "loss": 0.9323, "step": 6090 }, { "epoch": 1.1146627020999813, "grad_norm": 0.7881081700325012, "learning_rate": 7.981378099701793e-05, "loss": 1.044, "step": 6091 }, { "epoch": 1.1148485411633526, "grad_norm": 0.7111803293228149, "learning_rate": 7.978513803625366e-05, "loss": 1.0815, "step": 6092 }, { "epoch": 1.1150343802267237, "grad_norm": 0.714192807674408, "learning_rate": 7.975649680451024e-05, "loss": 1.2705, "step": 6093 }, { "epoch": 1.1152202192900948, "grad_norm": 0.6725999116897583, "learning_rate": 7.972785730423744e-05, "loss": 0.9721, "step": 6094 }, { "epoch": 1.115406058353466, "grad_norm": 0.6231698393821716, "learning_rate": 7.969921953788478e-05, "loss": 0.9319, "step": 6095 }, { "epoch": 1.115591897416837, "grad_norm": 0.632483959197998, "learning_rate": 7.967058350790177e-05, "loss": 0.8918, "step": 6096 }, { "epoch": 1.1157777364802082, "grad_norm": 2.3974478244781494, "learning_rate": 7.964194921673766e-05, "loss": 1.8149, "step": 6097 }, { "epoch": 1.1159635755435793, "grad_norm": 0.5865818858146667, "learning_rate": 7.961331666684166e-05, "loss": 1.0894, "step": 6098 }, { "epoch": 1.1161494146069504, "grad_norm": 0.8524091243743896, "learning_rate": 7.958468586066273e-05, "loss": 1.0211, "step": 6099 }, { "epoch": 1.1163352536703215, "grad_norm": 0.7423559427261353, "learning_rate": 7.955605680064975e-05, "loss": 1.1801, "step": 6100 }, { "epoch": 1.1165210927336926, "grad_norm": 0.7492449283599854, "learning_rate": 7.952742948925143e-05, "loss": 0.906, "step": 6101 }, { "epoch": 1.1167069317970637, "grad_norm": 0.5786088705062866, "learning_rate": 7.949880392891628e-05, "loss": 0.7568, "step": 6102 }, { "epoch": 1.1168927708604348, "grad_norm": 0.536382257938385, "learning_rate": 7.947018012209279e-05, "loss": 0.8471, "step": 6103 }, { "epoch": 1.117078609923806, "grad_norm": 0.6602856516838074, "learning_rate": 7.944155807122912e-05, "loss": 0.8139, "step": 6104 }, { "epoch": 1.117264448987177, "grad_norm": 0.6715288162231445, "learning_rate": 7.941293777877344e-05, "loss": 0.8542, "step": 6105 }, { "epoch": 1.1174502880505481, "grad_norm": 0.7064169645309448, "learning_rate": 7.938431924717368e-05, "loss": 1.1756, "step": 6106 }, { "epoch": 1.1176361271139192, "grad_norm": 0.6816471815109253, "learning_rate": 7.935570247887764e-05, "loss": 0.9043, "step": 6107 }, { "epoch": 1.1178219661772903, "grad_norm": 0.5369113087654114, "learning_rate": 7.932708747633299e-05, "loss": 0.8338, "step": 6108 }, { "epoch": 1.1180078052406617, "grad_norm": 0.6738365292549133, "learning_rate": 7.929847424198723e-05, "loss": 0.8163, "step": 6109 }, { "epoch": 1.1181936443040328, "grad_norm": 0.7385087609291077, "learning_rate": 7.926986277828774e-05, "loss": 1.0322, "step": 6110 }, { "epoch": 1.118379483367404, "grad_norm": 0.6074032783508301, "learning_rate": 7.924125308768165e-05, "loss": 0.8479, "step": 6111 }, { "epoch": 1.118565322430775, "grad_norm": 0.6593636870384216, "learning_rate": 7.92126451726161e-05, "loss": 0.8993, "step": 6112 }, { "epoch": 1.118751161494146, "grad_norm": 0.7033354043960571, "learning_rate": 7.918403903553788e-05, "loss": 1.056, "step": 6113 }, { "epoch": 1.1189370005575172, "grad_norm": 0.6170438528060913, "learning_rate": 7.915543467889379e-05, "loss": 1.0231, "step": 6114 }, { "epoch": 1.1191228396208883, "grad_norm": 0.6546794176101685, "learning_rate": 7.912683210513046e-05, "loss": 0.9556, "step": 6115 }, { "epoch": 1.1193086786842594, "grad_norm": 0.6962534785270691, "learning_rate": 7.909823131669425e-05, "loss": 1.077, "step": 6116 }, { "epoch": 1.1194945177476305, "grad_norm": 0.7631762623786926, "learning_rate": 7.906963231603155e-05, "loss": 0.9765, "step": 6117 }, { "epoch": 1.1196803568110016, "grad_norm": 0.6790252327919006, "learning_rate": 7.904103510558838e-05, "loss": 0.9533, "step": 6118 }, { "epoch": 1.1198661958743727, "grad_norm": 0.774306058883667, "learning_rate": 7.901243968781083e-05, "loss": 1.3034, "step": 6119 }, { "epoch": 1.1200520349377439, "grad_norm": 0.6652829051017761, "learning_rate": 7.898384606514462e-05, "loss": 0.7831, "step": 6120 }, { "epoch": 1.120237874001115, "grad_norm": 0.7548511624336243, "learning_rate": 7.89552542400355e-05, "loss": 1.0368, "step": 6121 }, { "epoch": 1.120423713064486, "grad_norm": 0.7231112122535706, "learning_rate": 7.892666421492896e-05, "loss": 1.1899, "step": 6122 }, { "epoch": 1.1206095521278572, "grad_norm": 0.7107078433036804, "learning_rate": 7.889807599227037e-05, "loss": 1.0542, "step": 6123 }, { "epoch": 1.1207953911912285, "grad_norm": 0.5880662798881531, "learning_rate": 7.886948957450498e-05, "loss": 1.0688, "step": 6124 }, { "epoch": 1.1209812302545996, "grad_norm": 0.6817053556442261, "learning_rate": 7.884090496407779e-05, "loss": 1.0867, "step": 6125 }, { "epoch": 1.1211670693179707, "grad_norm": 0.6306520104408264, "learning_rate": 7.881232216343375e-05, "loss": 0.8813, "step": 6126 }, { "epoch": 1.1213529083813418, "grad_norm": 0.5859300494194031, "learning_rate": 7.878374117501757e-05, "loss": 0.7759, "step": 6127 }, { "epoch": 1.121538747444713, "grad_norm": 1.0821082592010498, "learning_rate": 7.875516200127385e-05, "loss": 0.7981, "step": 6128 }, { "epoch": 1.121724586508084, "grad_norm": 0.5714467167854309, "learning_rate": 7.872658464464704e-05, "loss": 1.0366, "step": 6129 }, { "epoch": 1.1219104255714551, "grad_norm": 0.6259623169898987, "learning_rate": 7.869800910758143e-05, "loss": 0.7744, "step": 6130 }, { "epoch": 1.1220962646348263, "grad_norm": 0.5901505947113037, "learning_rate": 7.866943539252118e-05, "loss": 0.9946, "step": 6131 }, { "epoch": 1.1222821036981974, "grad_norm": 0.5895738005638123, "learning_rate": 7.864086350191017e-05, "loss": 0.7583, "step": 6132 }, { "epoch": 1.1224679427615685, "grad_norm": 0.6195260882377625, "learning_rate": 7.861229343819232e-05, "loss": 0.811, "step": 6133 }, { "epoch": 1.1226537818249396, "grad_norm": 0.614778995513916, "learning_rate": 7.858372520381119e-05, "loss": 1.1254, "step": 6134 }, { "epoch": 1.1228396208883107, "grad_norm": 0.5922778844833374, "learning_rate": 7.855515880121032e-05, "loss": 0.9307, "step": 6135 }, { "epoch": 1.1230254599516818, "grad_norm": 0.5865144729614258, "learning_rate": 7.85265942328331e-05, "loss": 1.0438, "step": 6136 }, { "epoch": 1.123211299015053, "grad_norm": 0.6751108765602112, "learning_rate": 7.849803150112267e-05, "loss": 0.9653, "step": 6137 }, { "epoch": 1.123397138078424, "grad_norm": 0.7323153614997864, "learning_rate": 7.84694706085221e-05, "loss": 0.9208, "step": 6138 }, { "epoch": 1.1235829771417951, "grad_norm": 0.6893547773361206, "learning_rate": 7.844091155747421e-05, "loss": 1.108, "step": 6139 }, { "epoch": 1.1237688162051662, "grad_norm": 0.5926496982574463, "learning_rate": 7.84123543504218e-05, "loss": 0.8949, "step": 6140 }, { "epoch": 1.1239546552685376, "grad_norm": 0.6670359373092651, "learning_rate": 7.838379898980734e-05, "loss": 1.0495, "step": 6141 }, { "epoch": 1.1241404943319087, "grad_norm": 0.7171518802642822, "learning_rate": 7.835524547807326e-05, "loss": 0.9813, "step": 6142 }, { "epoch": 1.1243263333952798, "grad_norm": 0.6894052028656006, "learning_rate": 7.832669381766183e-05, "loss": 0.9703, "step": 6143 }, { "epoch": 1.1245121724586509, "grad_norm": 0.639323890209198, "learning_rate": 7.829814401101512e-05, "loss": 0.9347, "step": 6144 }, { "epoch": 1.124698011522022, "grad_norm": 0.783460259437561, "learning_rate": 7.826959606057508e-05, "loss": 0.8925, "step": 6145 }, { "epoch": 1.124883850585393, "grad_norm": 0.605365514755249, "learning_rate": 7.824104996878342e-05, "loss": 0.9144, "step": 6146 }, { "epoch": 1.1250696896487642, "grad_norm": 0.6294218301773071, "learning_rate": 7.821250573808181e-05, "loss": 0.8606, "step": 6147 }, { "epoch": 1.1252555287121353, "grad_norm": 0.6887063384056091, "learning_rate": 7.818396337091166e-05, "loss": 0.6776, "step": 6148 }, { "epoch": 1.1254413677755064, "grad_norm": 0.6241545677185059, "learning_rate": 7.815542286971426e-05, "loss": 0.9171, "step": 6149 }, { "epoch": 1.1256272068388775, "grad_norm": 0.6879689693450928, "learning_rate": 7.812688423693078e-05, "loss": 1.0305, "step": 6150 }, { "epoch": 1.1258130459022486, "grad_norm": 0.7353564500808716, "learning_rate": 7.809834747500215e-05, "loss": 1.0796, "step": 6151 }, { "epoch": 1.1259988849656197, "grad_norm": 0.6647047400474548, "learning_rate": 7.80698125863692e-05, "loss": 0.7962, "step": 6152 }, { "epoch": 1.1261847240289908, "grad_norm": 0.7037851810455322, "learning_rate": 7.804127957347256e-05, "loss": 0.8475, "step": 6153 }, { "epoch": 1.126370563092362, "grad_norm": 0.7323387265205383, "learning_rate": 7.80127484387528e-05, "loss": 0.9171, "step": 6154 }, { "epoch": 1.126556402155733, "grad_norm": 0.6864535808563232, "learning_rate": 7.79842191846501e-05, "loss": 0.7967, "step": 6155 }, { "epoch": 1.1267422412191044, "grad_norm": 0.6619288921356201, "learning_rate": 7.795569181360474e-05, "loss": 0.836, "step": 6156 }, { "epoch": 1.1269280802824753, "grad_norm": 0.6207095980644226, "learning_rate": 7.792716632805676e-05, "loss": 0.9877, "step": 6157 }, { "epoch": 1.1271139193458466, "grad_norm": 0.6751691699028015, "learning_rate": 7.78986427304459e-05, "loss": 0.8719, "step": 6158 }, { "epoch": 1.1272997584092177, "grad_norm": 0.6725651025772095, "learning_rate": 7.787012102321189e-05, "loss": 1.0775, "step": 6159 }, { "epoch": 1.1274855974725888, "grad_norm": 0.6328722238540649, "learning_rate": 7.784160120879427e-05, "loss": 1.1374, "step": 6160 }, { "epoch": 1.12767143653596, "grad_norm": 0.6430485844612122, "learning_rate": 7.781308328963242e-05, "loss": 1.0459, "step": 6161 }, { "epoch": 1.127857275599331, "grad_norm": 0.835422158241272, "learning_rate": 7.778456726816545e-05, "loss": 1.0023, "step": 6162 }, { "epoch": 1.1280431146627021, "grad_norm": 0.6023844480514526, "learning_rate": 7.775605314683248e-05, "loss": 1.0418, "step": 6163 }, { "epoch": 1.1282289537260732, "grad_norm": 0.6241095662117004, "learning_rate": 7.772754092807239e-05, "loss": 0.8114, "step": 6164 }, { "epoch": 1.1284147927894443, "grad_norm": 0.6519508361816406, "learning_rate": 7.769903061432381e-05, "loss": 0.9586, "step": 6165 }, { "epoch": 1.1286006318528155, "grad_norm": 0.7015917301177979, "learning_rate": 7.767052220802538e-05, "loss": 0.8807, "step": 6166 }, { "epoch": 1.1287864709161866, "grad_norm": 0.6102322340011597, "learning_rate": 7.764201571161541e-05, "loss": 0.7752, "step": 6167 }, { "epoch": 1.1289723099795577, "grad_norm": 0.6366463899612427, "learning_rate": 7.761351112753222e-05, "loss": 1.0049, "step": 6168 }, { "epoch": 1.1291581490429288, "grad_norm": 0.646867036819458, "learning_rate": 7.758500845821374e-05, "loss": 1.0766, "step": 6169 }, { "epoch": 1.1293439881062999, "grad_norm": 0.745267391204834, "learning_rate": 7.755650770609793e-05, "loss": 0.9621, "step": 6170 }, { "epoch": 1.129529827169671, "grad_norm": 0.7232451438903809, "learning_rate": 7.752800887362259e-05, "loss": 0.9204, "step": 6171 }, { "epoch": 1.129715666233042, "grad_norm": 0.6933814883232117, "learning_rate": 7.749951196322516e-05, "loss": 0.8385, "step": 6172 }, { "epoch": 1.1299015052964134, "grad_norm": 0.6276084780693054, "learning_rate": 7.747101697734313e-05, "loss": 0.8876, "step": 6173 }, { "epoch": 1.1300873443597843, "grad_norm": 0.6545588970184326, "learning_rate": 7.74425239184137e-05, "loss": 1.0254, "step": 6174 }, { "epoch": 1.1302731834231556, "grad_norm": 0.6565459966659546, "learning_rate": 7.741403278887397e-05, "loss": 0.9012, "step": 6175 }, { "epoch": 1.1304590224865267, "grad_norm": 0.6083407998085022, "learning_rate": 7.738554359116079e-05, "loss": 0.8095, "step": 6176 }, { "epoch": 1.1306448615498979, "grad_norm": 0.6915955543518066, "learning_rate": 7.735705632771095e-05, "loss": 0.9657, "step": 6177 }, { "epoch": 1.130830700613269, "grad_norm": 0.8749962449073792, "learning_rate": 7.732857100096107e-05, "loss": 0.866, "step": 6178 }, { "epoch": 1.13101653967664, "grad_norm": 0.7088390588760376, "learning_rate": 7.730008761334747e-05, "loss": 0.8856, "step": 6179 }, { "epoch": 1.1312023787400112, "grad_norm": 0.7398287653923035, "learning_rate": 7.727160616730644e-05, "loss": 1.0043, "step": 6180 }, { "epoch": 1.1313882178033823, "grad_norm": 0.7247969508171082, "learning_rate": 7.724312666527406e-05, "loss": 0.8159, "step": 6181 }, { "epoch": 1.1315740568667534, "grad_norm": 0.9901308417320251, "learning_rate": 7.721464910968627e-05, "loss": 1.0269, "step": 6182 }, { "epoch": 1.1317598959301245, "grad_norm": 0.7300354242324829, "learning_rate": 7.718617350297874e-05, "loss": 0.9958, "step": 6183 }, { "epoch": 1.1319457349934956, "grad_norm": 0.7227690815925598, "learning_rate": 7.71576998475871e-05, "loss": 0.8632, "step": 6184 }, { "epoch": 1.1321315740568667, "grad_norm": 0.7816814184188843, "learning_rate": 7.712922814594681e-05, "loss": 1.0998, "step": 6185 }, { "epoch": 1.1323174131202378, "grad_norm": 0.625962495803833, "learning_rate": 7.710075840049302e-05, "loss": 0.9012, "step": 6186 }, { "epoch": 1.132503252183609, "grad_norm": 0.6541265845298767, "learning_rate": 7.707229061366089e-05, "loss": 0.9436, "step": 6187 }, { "epoch": 1.13268909124698, "grad_norm": 0.6720447540283203, "learning_rate": 7.704382478788526e-05, "loss": 1.2808, "step": 6188 }, { "epoch": 1.1328749303103511, "grad_norm": 0.6707186698913574, "learning_rate": 7.701536092560095e-05, "loss": 0.8202, "step": 6189 }, { "epoch": 1.1330607693737225, "grad_norm": 0.560600996017456, "learning_rate": 7.698689902924246e-05, "loss": 0.6307, "step": 6190 }, { "epoch": 1.1332466084370936, "grad_norm": 0.8436470031738281, "learning_rate": 7.695843910124423e-05, "loss": 0.9579, "step": 6191 }, { "epoch": 1.1334324475004647, "grad_norm": 0.5533195734024048, "learning_rate": 7.692998114404055e-05, "loss": 0.6506, "step": 6192 }, { "epoch": 1.1336182865638358, "grad_norm": 0.7694109678268433, "learning_rate": 7.690152516006542e-05, "loss": 0.7243, "step": 6193 }, { "epoch": 1.133804125627207, "grad_norm": 0.7304584980010986, "learning_rate": 7.687307115175275e-05, "loss": 1.1268, "step": 6194 }, { "epoch": 1.133989964690578, "grad_norm": 0.6806715130805969, "learning_rate": 7.684461912153629e-05, "loss": 1.0232, "step": 6195 }, { "epoch": 1.134175803753949, "grad_norm": 0.7814947962760925, "learning_rate": 7.681616907184964e-05, "loss": 1.0942, "step": 6196 }, { "epoch": 1.1343616428173202, "grad_norm": 0.6447503566741943, "learning_rate": 7.678772100512611e-05, "loss": 1.0311, "step": 6197 }, { "epoch": 1.1345474818806913, "grad_norm": 0.5829337239265442, "learning_rate": 7.675927492379898e-05, "loss": 0.9055, "step": 6198 }, { "epoch": 1.1347333209440624, "grad_norm": 0.6542435884475708, "learning_rate": 7.673083083030133e-05, "loss": 1.0454, "step": 6199 }, { "epoch": 1.1349191600074335, "grad_norm": 0.5979851484298706, "learning_rate": 7.670238872706597e-05, "loss": 1.196, "step": 6200 }, { "epoch": 1.1351049990708046, "grad_norm": 0.7708069682121277, "learning_rate": 7.667394861652567e-05, "loss": 1.039, "step": 6201 }, { "epoch": 1.1352908381341758, "grad_norm": 0.8553258180618286, "learning_rate": 7.664551050111298e-05, "loss": 1.1342, "step": 6202 }, { "epoch": 1.1354766771975469, "grad_norm": 0.8040032386779785, "learning_rate": 7.661707438326023e-05, "loss": 1.1618, "step": 6203 }, { "epoch": 1.135662516260918, "grad_norm": 0.6485744118690491, "learning_rate": 7.658864026539962e-05, "loss": 1.0778, "step": 6204 }, { "epoch": 1.135848355324289, "grad_norm": 0.6490263342857361, "learning_rate": 7.656020814996323e-05, "loss": 0.8483, "step": 6205 }, { "epoch": 1.1360341943876602, "grad_norm": 0.6252707839012146, "learning_rate": 7.653177803938292e-05, "loss": 0.9277, "step": 6206 }, { "epoch": 1.1362200334510315, "grad_norm": 0.7012428045272827, "learning_rate": 7.650334993609031e-05, "loss": 0.8191, "step": 6207 }, { "epoch": 1.1364058725144026, "grad_norm": 0.7016780376434326, "learning_rate": 7.647492384251703e-05, "loss": 0.9852, "step": 6208 }, { "epoch": 1.1365917115777737, "grad_norm": 0.5683523416519165, "learning_rate": 7.64464997610943e-05, "loss": 0.8534, "step": 6209 }, { "epoch": 1.1367775506411448, "grad_norm": 0.8635490536689758, "learning_rate": 7.641807769425337e-05, "loss": 1.1012, "step": 6210 }, { "epoch": 1.136963389704516, "grad_norm": 0.5886553525924683, "learning_rate": 7.638965764442521e-05, "loss": 0.9735, "step": 6211 }, { "epoch": 1.137149228767887, "grad_norm": 0.7273619174957275, "learning_rate": 7.636123961404065e-05, "loss": 0.9081, "step": 6212 }, { "epoch": 1.1373350678312582, "grad_norm": 0.5543389320373535, "learning_rate": 7.633282360553041e-05, "loss": 0.747, "step": 6213 }, { "epoch": 1.1375209068946293, "grad_norm": 0.6617942452430725, "learning_rate": 7.630440962132486e-05, "loss": 0.8867, "step": 6214 }, { "epoch": 1.1377067459580004, "grad_norm": 0.5964873433113098, "learning_rate": 7.627599766385443e-05, "loss": 0.9283, "step": 6215 }, { "epoch": 1.1378925850213715, "grad_norm": 0.6347508430480957, "learning_rate": 7.624758773554914e-05, "loss": 0.8036, "step": 6216 }, { "epoch": 1.1380784240847426, "grad_norm": 0.6257861852645874, "learning_rate": 7.621917983883903e-05, "loss": 0.8181, "step": 6217 }, { "epoch": 1.1382642631481137, "grad_norm": 0.6525983810424805, "learning_rate": 7.619077397615384e-05, "loss": 0.9738, "step": 6218 }, { "epoch": 1.1384501022114848, "grad_norm": 0.6202676296234131, "learning_rate": 7.616237014992322e-05, "loss": 1.0333, "step": 6219 }, { "epoch": 1.138635941274856, "grad_norm": 0.8427987694740295, "learning_rate": 7.613396836257663e-05, "loss": 1.0282, "step": 6220 }, { "epoch": 1.138821780338227, "grad_norm": 0.7445005774497986, "learning_rate": 7.610556861654327e-05, "loss": 1.2364, "step": 6221 }, { "epoch": 1.1390076194015983, "grad_norm": 0.5864232182502747, "learning_rate": 7.607717091425233e-05, "loss": 0.9195, "step": 6222 }, { "epoch": 1.1391934584649692, "grad_norm": 0.5960657596588135, "learning_rate": 7.604877525813261e-05, "loss": 0.6124, "step": 6223 }, { "epoch": 1.1393792975283406, "grad_norm": 0.8077784776687622, "learning_rate": 7.602038165061292e-05, "loss": 0.8756, "step": 6224 }, { "epoch": 1.1395651365917117, "grad_norm": 0.6285901665687561, "learning_rate": 7.59919900941218e-05, "loss": 0.9791, "step": 6225 }, { "epoch": 1.1397509756550828, "grad_norm": 0.5540950298309326, "learning_rate": 7.596360059108765e-05, "loss": 0.6467, "step": 6226 }, { "epoch": 1.1399368147184539, "grad_norm": 0.6270549297332764, "learning_rate": 7.593521314393875e-05, "loss": 1.1263, "step": 6227 }, { "epoch": 1.140122653781825, "grad_norm": 0.6695981621742249, "learning_rate": 7.590682775510302e-05, "loss": 0.8308, "step": 6228 }, { "epoch": 1.140308492845196, "grad_norm": 0.7288331389427185, "learning_rate": 7.587844442700843e-05, "loss": 0.9318, "step": 6229 }, { "epoch": 1.1404943319085672, "grad_norm": 0.657455325126648, "learning_rate": 7.585006316208258e-05, "loss": 0.9138, "step": 6230 }, { "epoch": 1.1406801709719383, "grad_norm": 0.7142748832702637, "learning_rate": 7.582168396275304e-05, "loss": 1.02, "step": 6231 }, { "epoch": 1.1408660100353094, "grad_norm": 0.5973772406578064, "learning_rate": 7.579330683144712e-05, "loss": 1.0011, "step": 6232 }, { "epoch": 1.1410518490986805, "grad_norm": 0.6535288095474243, "learning_rate": 7.576493177059197e-05, "loss": 1.1361, "step": 6233 }, { "epoch": 1.1412376881620516, "grad_norm": 0.7035378217697144, "learning_rate": 7.573655878261464e-05, "loss": 0.964, "step": 6234 }, { "epoch": 1.1414235272254227, "grad_norm": 0.7315672636032104, "learning_rate": 7.570818786994183e-05, "loss": 1.0567, "step": 6235 }, { "epoch": 1.1416093662887938, "grad_norm": 0.7773432731628418, "learning_rate": 7.567981903500025e-05, "loss": 1.069, "step": 6236 }, { "epoch": 1.141795205352165, "grad_norm": 0.6205809116363525, "learning_rate": 7.565145228021627e-05, "loss": 0.7578, "step": 6237 }, { "epoch": 1.141981044415536, "grad_norm": 0.6082891225814819, "learning_rate": 7.562308760801622e-05, "loss": 0.6793, "step": 6238 }, { "epoch": 1.1421668834789074, "grad_norm": 0.6637825965881348, "learning_rate": 7.559472502082617e-05, "loss": 0.8839, "step": 6239 }, { "epoch": 1.1423527225422783, "grad_norm": 0.7240639925003052, "learning_rate": 7.5566364521072e-05, "loss": 1.0576, "step": 6240 }, { "epoch": 1.1425385616056496, "grad_norm": 0.6650069355964661, "learning_rate": 7.553800611117956e-05, "loss": 0.8786, "step": 6241 }, { "epoch": 1.1427244006690207, "grad_norm": 0.7037933468818665, "learning_rate": 7.550964979357427e-05, "loss": 1.0313, "step": 6242 }, { "epoch": 1.1429102397323918, "grad_norm": 0.6801198720932007, "learning_rate": 7.54812955706816e-05, "loss": 1.0486, "step": 6243 }, { "epoch": 1.143096078795763, "grad_norm": 0.6622380018234253, "learning_rate": 7.545294344492667e-05, "loss": 0.9577, "step": 6244 }, { "epoch": 1.143281917859134, "grad_norm": 0.8831017017364502, "learning_rate": 7.542459341873456e-05, "loss": 1.0314, "step": 6245 }, { "epoch": 1.1434677569225051, "grad_norm": 0.6996762752532959, "learning_rate": 7.539624549453008e-05, "loss": 0.9683, "step": 6246 }, { "epoch": 1.1436535959858762, "grad_norm": 0.7951368093490601, "learning_rate": 7.53678996747379e-05, "loss": 1.0881, "step": 6247 }, { "epoch": 1.1438394350492473, "grad_norm": 0.5819055438041687, "learning_rate": 7.533955596178252e-05, "loss": 1.0429, "step": 6248 }, { "epoch": 1.1440252741126185, "grad_norm": 0.654751718044281, "learning_rate": 7.531121435808819e-05, "loss": 1.0643, "step": 6249 }, { "epoch": 1.1442111131759896, "grad_norm": 0.6705732345581055, "learning_rate": 7.528287486607909e-05, "loss": 1.1292, "step": 6250 }, { "epoch": 1.1443969522393607, "grad_norm": 0.5722493529319763, "learning_rate": 7.525453748817908e-05, "loss": 0.9427, "step": 6251 }, { "epoch": 1.1445827913027318, "grad_norm": 0.660951554775238, "learning_rate": 7.5226202226812e-05, "loss": 0.9269, "step": 6252 }, { "epoch": 1.1447686303661029, "grad_norm": 0.7391307950019836, "learning_rate": 7.519786908440136e-05, "loss": 1.01, "step": 6253 }, { "epoch": 1.144954469429474, "grad_norm": 0.6185791492462158, "learning_rate": 7.516953806337058e-05, "loss": 0.9853, "step": 6254 }, { "epoch": 1.145140308492845, "grad_norm": 0.7332004904747009, "learning_rate": 7.514120916614289e-05, "loss": 1.1023, "step": 6255 }, { "epoch": 1.1453261475562164, "grad_norm": 0.5954209566116333, "learning_rate": 7.511288239514128e-05, "loss": 0.9286, "step": 6256 }, { "epoch": 1.1455119866195875, "grad_norm": 0.6706781983375549, "learning_rate": 7.508455775278867e-05, "loss": 1.0266, "step": 6257 }, { "epoch": 1.1456978256829586, "grad_norm": 0.6400855779647827, "learning_rate": 7.505623524150765e-05, "loss": 1.0605, "step": 6258 }, { "epoch": 1.1458836647463297, "grad_norm": 0.6486907005310059, "learning_rate": 7.502791486372079e-05, "loss": 0.8922, "step": 6259 }, { "epoch": 1.1460695038097009, "grad_norm": 0.7294444441795349, "learning_rate": 7.499959662185029e-05, "loss": 1.0198, "step": 6260 }, { "epoch": 1.146255342873072, "grad_norm": 0.5519627332687378, "learning_rate": 7.497128051831832e-05, "loss": 0.5194, "step": 6261 }, { "epoch": 1.146441181936443, "grad_norm": 0.7215742468833923, "learning_rate": 7.494296655554685e-05, "loss": 0.8087, "step": 6262 }, { "epoch": 1.1466270209998142, "grad_norm": 0.6595014929771423, "learning_rate": 7.491465473595758e-05, "loss": 1.1404, "step": 6263 }, { "epoch": 1.1468128600631853, "grad_norm": 0.6985663771629333, "learning_rate": 7.488634506197216e-05, "loss": 0.8644, "step": 6264 }, { "epoch": 1.1469986991265564, "grad_norm": 0.744734525680542, "learning_rate": 7.485803753601188e-05, "loss": 0.819, "step": 6265 }, { "epoch": 1.1471845381899275, "grad_norm": 0.7613779902458191, "learning_rate": 7.482973216049803e-05, "loss": 0.9787, "step": 6266 }, { "epoch": 1.1473703772532986, "grad_norm": 0.8032128810882568, "learning_rate": 7.480142893785152e-05, "loss": 0.9408, "step": 6267 }, { "epoch": 1.1475562163166697, "grad_norm": 0.6917648911476135, "learning_rate": 7.477312787049328e-05, "loss": 0.9462, "step": 6268 }, { "epoch": 1.1477420553800408, "grad_norm": 0.7514396905899048, "learning_rate": 7.474482896084397e-05, "loss": 1.0154, "step": 6269 }, { "epoch": 1.147927894443412, "grad_norm": 0.6715410351753235, "learning_rate": 7.471653221132397e-05, "loss": 0.7154, "step": 6270 }, { "epoch": 1.148113733506783, "grad_norm": 0.6557510495185852, "learning_rate": 7.468823762435367e-05, "loss": 0.81, "step": 6271 }, { "epoch": 1.1482995725701541, "grad_norm": 0.7080697417259216, "learning_rate": 7.465994520235306e-05, "loss": 0.914, "step": 6272 }, { "epoch": 1.1484854116335255, "grad_norm": 0.6797797083854675, "learning_rate": 7.463165494774216e-05, "loss": 0.9553, "step": 6273 }, { "epoch": 1.1486712506968966, "grad_norm": 0.5941304564476013, "learning_rate": 7.460336686294061e-05, "loss": 1.0521, "step": 6274 }, { "epoch": 1.1488570897602677, "grad_norm": 0.7724999189376831, "learning_rate": 7.457508095036796e-05, "loss": 1.052, "step": 6275 }, { "epoch": 1.1490429288236388, "grad_norm": 0.6262985467910767, "learning_rate": 7.45467972124436e-05, "loss": 0.892, "step": 6276 }, { "epoch": 1.14922876788701, "grad_norm": 0.6632989645004272, "learning_rate": 7.451851565158669e-05, "loss": 0.8847, "step": 6277 }, { "epoch": 1.149414606950381, "grad_norm": 0.6198441386222839, "learning_rate": 7.449023627021624e-05, "loss": 0.9052, "step": 6278 }, { "epoch": 1.149600446013752, "grad_norm": 0.6501045823097229, "learning_rate": 7.446195907075098e-05, "loss": 1.0629, "step": 6279 }, { "epoch": 1.1497862850771232, "grad_norm": 0.6493845582008362, "learning_rate": 7.443368405560958e-05, "loss": 0.6935, "step": 6280 }, { "epoch": 1.1499721241404943, "grad_norm": 0.6532225012779236, "learning_rate": 7.440541122721041e-05, "loss": 0.923, "step": 6281 }, { "epoch": 1.1501579632038654, "grad_norm": 0.6956079006195068, "learning_rate": 7.437714058797173e-05, "loss": 0.8909, "step": 6282 }, { "epoch": 1.1503438022672365, "grad_norm": 0.6475423574447632, "learning_rate": 7.434887214031163e-05, "loss": 0.7268, "step": 6283 }, { "epoch": 1.1505296413306076, "grad_norm": 0.6966208815574646, "learning_rate": 7.43206058866479e-05, "loss": 0.9642, "step": 6284 }, { "epoch": 1.1507154803939788, "grad_norm": 0.7085195183753967, "learning_rate": 7.42923418293983e-05, "loss": 0.8918, "step": 6285 }, { "epoch": 1.1509013194573499, "grad_norm": 0.6598019003868103, "learning_rate": 7.42640799709802e-05, "loss": 0.8213, "step": 6286 }, { "epoch": 1.151087158520721, "grad_norm": 0.6603427529335022, "learning_rate": 7.423582031381102e-05, "loss": 0.9549, "step": 6287 }, { "epoch": 1.1512729975840923, "grad_norm": 0.7119370698928833, "learning_rate": 7.420756286030778e-05, "loss": 1.2205, "step": 6288 }, { "epoch": 1.1514588366474632, "grad_norm": 0.7540510296821594, "learning_rate": 7.417930761288743e-05, "loss": 0.9597, "step": 6289 }, { "epoch": 1.1516446757108345, "grad_norm": 0.6168947219848633, "learning_rate": 7.415105457396671e-05, "loss": 0.8439, "step": 6290 }, { "epoch": 1.1518305147742056, "grad_norm": 0.7342690825462341, "learning_rate": 7.412280374596215e-05, "loss": 0.9151, "step": 6291 }, { "epoch": 1.1520163538375767, "grad_norm": 0.6422035694122314, "learning_rate": 7.409455513129015e-05, "loss": 0.933, "step": 6292 }, { "epoch": 1.1522021929009478, "grad_norm": 0.5854395627975464, "learning_rate": 7.406630873236681e-05, "loss": 0.9118, "step": 6293 }, { "epoch": 1.152388031964319, "grad_norm": 0.7802146077156067, "learning_rate": 7.403806455160817e-05, "loss": 0.8353, "step": 6294 }, { "epoch": 1.15257387102769, "grad_norm": 0.7460753917694092, "learning_rate": 7.400982259142994e-05, "loss": 0.8547, "step": 6295 }, { "epoch": 1.1527597100910612, "grad_norm": 0.7687903642654419, "learning_rate": 7.398158285424774e-05, "loss": 0.9714, "step": 6296 }, { "epoch": 1.1529455491544323, "grad_norm": 0.6683840155601501, "learning_rate": 7.395334534247703e-05, "loss": 0.9438, "step": 6297 }, { "epoch": 1.1531313882178034, "grad_norm": 0.6488221883773804, "learning_rate": 7.392511005853297e-05, "loss": 1.0556, "step": 6298 }, { "epoch": 1.1533172272811745, "grad_norm": 0.6396840810775757, "learning_rate": 7.389687700483064e-05, "loss": 0.9994, "step": 6299 }, { "epoch": 1.1535030663445456, "grad_norm": 0.693667471408844, "learning_rate": 7.386864618378478e-05, "loss": 1.134, "step": 6300 }, { "epoch": 1.1536889054079167, "grad_norm": 0.6322534084320068, "learning_rate": 7.384041759781013e-05, "loss": 0.8274, "step": 6301 }, { "epoch": 1.1538747444712878, "grad_norm": 0.840552806854248, "learning_rate": 7.381219124932107e-05, "loss": 0.9961, "step": 6302 }, { "epoch": 1.154060583534659, "grad_norm": 0.7284782528877258, "learning_rate": 7.37839671407319e-05, "loss": 0.9661, "step": 6303 }, { "epoch": 1.15424642259803, "grad_norm": 0.681910514831543, "learning_rate": 7.37557452744567e-05, "loss": 1.0821, "step": 6304 }, { "epoch": 1.1544322616614013, "grad_norm": 0.6733909249305725, "learning_rate": 7.37275256529093e-05, "loss": 0.9578, "step": 6305 }, { "epoch": 1.1546181007247722, "grad_norm": 0.6376176476478577, "learning_rate": 7.369930827850345e-05, "loss": 0.9386, "step": 6306 }, { "epoch": 1.1548039397881436, "grad_norm": 0.6968693733215332, "learning_rate": 7.367109315365256e-05, "loss": 1.0013, "step": 6307 }, { "epoch": 1.1549897788515147, "grad_norm": 0.6921547651290894, "learning_rate": 7.364288028077003e-05, "loss": 0.8702, "step": 6308 }, { "epoch": 1.1551756179148858, "grad_norm": 0.7634596824645996, "learning_rate": 7.361466966226886e-05, "loss": 0.9821, "step": 6309 }, { "epoch": 1.1553614569782569, "grad_norm": 0.594638466835022, "learning_rate": 7.358646130056204e-05, "loss": 0.9611, "step": 6310 }, { "epoch": 1.155547296041628, "grad_norm": 0.6549757122993469, "learning_rate": 7.35582551980623e-05, "loss": 0.9084, "step": 6311 }, { "epoch": 1.155733135104999, "grad_norm": 0.6684489250183105, "learning_rate": 7.35300513571821e-05, "loss": 1.0863, "step": 6312 }, { "epoch": 1.1559189741683702, "grad_norm": 0.6731480956077576, "learning_rate": 7.350184978033386e-05, "loss": 0.9022, "step": 6313 }, { "epoch": 1.1561048132317413, "grad_norm": 0.6278926730155945, "learning_rate": 7.347365046992962e-05, "loss": 1.1561, "step": 6314 }, { "epoch": 1.1562906522951124, "grad_norm": 1.9784194231033325, "learning_rate": 7.344545342838146e-05, "loss": 1.6575, "step": 6315 }, { "epoch": 1.1564764913584835, "grad_norm": 0.6113159656524658, "learning_rate": 7.341725865810099e-05, "loss": 0.7758, "step": 6316 }, { "epoch": 1.1566623304218546, "grad_norm": 0.7391144633293152, "learning_rate": 7.338906616149986e-05, "loss": 1.129, "step": 6317 }, { "epoch": 1.1568481694852257, "grad_norm": 0.6916064023971558, "learning_rate": 7.336087594098946e-05, "loss": 0.9741, "step": 6318 }, { "epoch": 1.1570340085485968, "grad_norm": 0.6826386451721191, "learning_rate": 7.333268799898085e-05, "loss": 1.2518, "step": 6319 }, { "epoch": 1.157219847611968, "grad_norm": 0.6638817191123962, "learning_rate": 7.330450233788512e-05, "loss": 0.9266, "step": 6320 }, { "epoch": 1.157405686675339, "grad_norm": 0.7738638520240784, "learning_rate": 7.327631896011297e-05, "loss": 1.1382, "step": 6321 }, { "epoch": 1.1575915257387104, "grad_norm": 0.7370550036430359, "learning_rate": 7.324813786807506e-05, "loss": 1.0067, "step": 6322 }, { "epoch": 1.1577773648020815, "grad_norm": 0.625015377998352, "learning_rate": 7.321995906418169e-05, "loss": 1.0133, "step": 6323 }, { "epoch": 1.1579632038654526, "grad_norm": 0.5457345247268677, "learning_rate": 7.31917825508431e-05, "loss": 0.6307, "step": 6324 }, { "epoch": 1.1581490429288237, "grad_norm": 0.7099806070327759, "learning_rate": 7.316360833046934e-05, "loss": 1.1707, "step": 6325 }, { "epoch": 1.1583348819921948, "grad_norm": 0.7791039347648621, "learning_rate": 7.31354364054701e-05, "loss": 0.9305, "step": 6326 }, { "epoch": 1.158520721055566, "grad_norm": 0.6316432356834412, "learning_rate": 7.310726677825508e-05, "loss": 0.9449, "step": 6327 }, { "epoch": 1.158706560118937, "grad_norm": 1.1281682252883911, "learning_rate": 7.307909945123363e-05, "loss": 1.1191, "step": 6328 }, { "epoch": 1.1588923991823081, "grad_norm": 0.7638713717460632, "learning_rate": 7.305093442681503e-05, "loss": 1.0541, "step": 6329 }, { "epoch": 1.1590782382456792, "grad_norm": 0.554898202419281, "learning_rate": 7.302277170740821e-05, "loss": 0.8519, "step": 6330 }, { "epoch": 1.1592640773090503, "grad_norm": 0.6649084687232971, "learning_rate": 7.299461129542205e-05, "loss": 0.8424, "step": 6331 }, { "epoch": 1.1594499163724215, "grad_norm": 0.5770688652992249, "learning_rate": 7.296645319326516e-05, "loss": 0.9126, "step": 6332 }, { "epoch": 1.1596357554357926, "grad_norm": 0.6516635417938232, "learning_rate": 7.293829740334594e-05, "loss": 0.8695, "step": 6333 }, { "epoch": 1.1598215944991637, "grad_norm": 0.5726830959320068, "learning_rate": 7.291014392807264e-05, "loss": 0.891, "step": 6334 }, { "epoch": 1.1600074335625348, "grad_norm": 0.7533463835716248, "learning_rate": 7.288199276985327e-05, "loss": 1.1437, "step": 6335 }, { "epoch": 1.1601932726259059, "grad_norm": 0.6398054957389832, "learning_rate": 7.28538439310957e-05, "loss": 0.7641, "step": 6336 }, { "epoch": 1.1603791116892772, "grad_norm": 0.8049401640892029, "learning_rate": 7.282569741420749e-05, "loss": 0.8801, "step": 6337 }, { "epoch": 1.160564950752648, "grad_norm": 0.6398137211799622, "learning_rate": 7.279755322159611e-05, "loss": 0.9682, "step": 6338 }, { "epoch": 1.1607507898160194, "grad_norm": 0.6790042519569397, "learning_rate": 7.276941135566884e-05, "loss": 1.0039, "step": 6339 }, { "epoch": 1.1609366288793905, "grad_norm": 0.7095458507537842, "learning_rate": 7.274127181883262e-05, "loss": 0.756, "step": 6340 }, { "epoch": 1.1611224679427616, "grad_norm": 0.7210416197776794, "learning_rate": 7.271313461349436e-05, "loss": 0.7686, "step": 6341 }, { "epoch": 1.1613083070061327, "grad_norm": 0.6349940299987793, "learning_rate": 7.268499974206067e-05, "loss": 0.4654, "step": 6342 }, { "epoch": 1.1614941460695039, "grad_norm": 0.5711683034896851, "learning_rate": 7.2656867206938e-05, "loss": 0.6471, "step": 6343 }, { "epoch": 1.161679985132875, "grad_norm": 0.7057211399078369, "learning_rate": 7.262873701053254e-05, "loss": 1.0344, "step": 6344 }, { "epoch": 1.161865824196246, "grad_norm": 0.8090634942054749, "learning_rate": 7.260060915525037e-05, "loss": 1.065, "step": 6345 }, { "epoch": 1.1620516632596172, "grad_norm": 0.7852050065994263, "learning_rate": 7.257248364349736e-05, "loss": 0.9761, "step": 6346 }, { "epoch": 1.1622375023229883, "grad_norm": 1.0304369926452637, "learning_rate": 7.254436047767905e-05, "loss": 1.071, "step": 6347 }, { "epoch": 1.1624233413863594, "grad_norm": 0.7340937256813049, "learning_rate": 7.251623966020095e-05, "loss": 1.0406, "step": 6348 }, { "epoch": 1.1626091804497305, "grad_norm": 0.6150162220001221, "learning_rate": 7.248812119346827e-05, "loss": 0.9738, "step": 6349 }, { "epoch": 1.1627950195131016, "grad_norm": 0.6172744631767273, "learning_rate": 7.246000507988606e-05, "loss": 0.7907, "step": 6350 }, { "epoch": 1.1629808585764727, "grad_norm": 0.6725603938102722, "learning_rate": 7.24318913218591e-05, "loss": 0.8283, "step": 6351 }, { "epoch": 1.1631666976398438, "grad_norm": 0.6481371521949768, "learning_rate": 7.240377992179206e-05, "loss": 0.7308, "step": 6352 }, { "epoch": 1.163352536703215, "grad_norm": 0.574016273021698, "learning_rate": 7.237567088208942e-05, "loss": 0.9847, "step": 6353 }, { "epoch": 1.1635383757665863, "grad_norm": 0.6835063695907593, "learning_rate": 7.234756420515531e-05, "loss": 0.8516, "step": 6354 }, { "epoch": 1.1637242148299571, "grad_norm": 0.6407470107078552, "learning_rate": 7.231945989339383e-05, "loss": 0.6269, "step": 6355 }, { "epoch": 1.1639100538933285, "grad_norm": 0.7623193264007568, "learning_rate": 7.229135794920874e-05, "loss": 1.1166, "step": 6356 }, { "epoch": 1.1640958929566996, "grad_norm": 0.6505951881408691, "learning_rate": 7.226325837500369e-05, "loss": 0.8555, "step": 6357 }, { "epoch": 1.1642817320200707, "grad_norm": 0.7478532195091248, "learning_rate": 7.22351611731821e-05, "loss": 1.0839, "step": 6358 }, { "epoch": 1.1644675710834418, "grad_norm": 0.6620163321495056, "learning_rate": 7.220706634614717e-05, "loss": 0.8757, "step": 6359 }, { "epoch": 1.164653410146813, "grad_norm": 0.6156098246574402, "learning_rate": 7.217897389630197e-05, "loss": 0.9406, "step": 6360 }, { "epoch": 1.164839249210184, "grad_norm": 0.7431986927986145, "learning_rate": 7.215088382604922e-05, "loss": 0.9566, "step": 6361 }, { "epoch": 1.165025088273555, "grad_norm": 0.797284722328186, "learning_rate": 7.212279613779161e-05, "loss": 0.8231, "step": 6362 }, { "epoch": 1.1652109273369262, "grad_norm": 0.6188994646072388, "learning_rate": 7.209471083393146e-05, "loss": 0.732, "step": 6363 }, { "epoch": 1.1653967664002973, "grad_norm": 0.7715564370155334, "learning_rate": 7.206662791687102e-05, "loss": 1.188, "step": 6364 }, { "epoch": 1.1655826054636684, "grad_norm": 0.7624145746231079, "learning_rate": 7.203854738901223e-05, "loss": 1.0448, "step": 6365 }, { "epoch": 1.1657684445270395, "grad_norm": 0.7245141863822937, "learning_rate": 7.201046925275694e-05, "loss": 0.9017, "step": 6366 }, { "epoch": 1.1659542835904106, "grad_norm": 0.7528111934661865, "learning_rate": 7.198239351050673e-05, "loss": 0.7053, "step": 6367 }, { "epoch": 1.1661401226537818, "grad_norm": 0.6484794616699219, "learning_rate": 7.195432016466293e-05, "loss": 0.895, "step": 6368 }, { "epoch": 1.1663259617171529, "grad_norm": 1.6364943981170654, "learning_rate": 7.192624921762679e-05, "loss": 1.6132, "step": 6369 }, { "epoch": 1.166511800780524, "grad_norm": 0.9956699013710022, "learning_rate": 7.189818067179916e-05, "loss": 0.9769, "step": 6370 }, { "epoch": 1.1666976398438953, "grad_norm": 0.7614436745643616, "learning_rate": 7.187011452958092e-05, "loss": 0.7649, "step": 6371 }, { "epoch": 1.1668834789072664, "grad_norm": 0.6507315039634705, "learning_rate": 7.184205079337254e-05, "loss": 0.9658, "step": 6372 }, { "epoch": 1.1670693179706375, "grad_norm": 0.8169115781784058, "learning_rate": 7.181398946557443e-05, "loss": 1.0792, "step": 6373 }, { "epoch": 1.1672551570340086, "grad_norm": 0.5518690943717957, "learning_rate": 7.178593054858675e-05, "loss": 0.637, "step": 6374 }, { "epoch": 1.1674409960973797, "grad_norm": 0.6700672507286072, "learning_rate": 7.175787404480936e-05, "loss": 1.0321, "step": 6375 }, { "epoch": 1.1676268351607508, "grad_norm": 0.5618696808815002, "learning_rate": 7.17298199566421e-05, "loss": 0.6381, "step": 6376 }, { "epoch": 1.167812674224122, "grad_norm": 0.6601108312606812, "learning_rate": 7.170176828648438e-05, "loss": 1.104, "step": 6377 }, { "epoch": 1.167998513287493, "grad_norm": 0.7242966294288635, "learning_rate": 7.16737190367356e-05, "loss": 0.8377, "step": 6378 }, { "epoch": 1.1681843523508642, "grad_norm": 0.6647973656654358, "learning_rate": 7.164567220979486e-05, "loss": 0.962, "step": 6379 }, { "epoch": 1.1683701914142353, "grad_norm": 0.8982508182525635, "learning_rate": 7.161762780806103e-05, "loss": 1.087, "step": 6380 }, { "epoch": 1.1685560304776064, "grad_norm": 0.5682864189147949, "learning_rate": 7.158958583393289e-05, "loss": 0.8412, "step": 6381 }, { "epoch": 1.1687418695409775, "grad_norm": 0.6696249842643738, "learning_rate": 7.156154628980883e-05, "loss": 0.7829, "step": 6382 }, { "epoch": 1.1689277086043486, "grad_norm": 0.8441759347915649, "learning_rate": 7.153350917808723e-05, "loss": 1.1044, "step": 6383 }, { "epoch": 1.1691135476677197, "grad_norm": 0.5896120667457581, "learning_rate": 7.150547450116606e-05, "loss": 0.8167, "step": 6384 }, { "epoch": 1.1692993867310908, "grad_norm": 0.6768901348114014, "learning_rate": 7.147744226144327e-05, "loss": 0.8811, "step": 6385 }, { "epoch": 1.169485225794462, "grad_norm": 0.6406312584877014, "learning_rate": 7.144941246131647e-05, "loss": 0.8857, "step": 6386 }, { "epoch": 1.169671064857833, "grad_norm": 0.562627911567688, "learning_rate": 7.142138510318311e-05, "loss": 0.786, "step": 6387 }, { "epoch": 1.1698569039212043, "grad_norm": 0.6905426383018494, "learning_rate": 7.13933601894405e-05, "loss": 0.9298, "step": 6388 }, { "epoch": 1.1700427429845754, "grad_norm": 0.6309052109718323, "learning_rate": 7.136533772248559e-05, "loss": 1.1465, "step": 6389 }, { "epoch": 1.1702285820479466, "grad_norm": 0.6853886842727661, "learning_rate": 7.133731770471526e-05, "loss": 0.7632, "step": 6390 }, { "epoch": 1.1704144211113177, "grad_norm": 0.6898705363273621, "learning_rate": 7.130930013852606e-05, "loss": 0.9551, "step": 6391 }, { "epoch": 1.1706002601746888, "grad_norm": 0.6434992551803589, "learning_rate": 7.128128502631445e-05, "loss": 1.015, "step": 6392 }, { "epoch": 1.1707860992380599, "grad_norm": 0.7179650068283081, "learning_rate": 7.125327237047657e-05, "loss": 0.9536, "step": 6393 }, { "epoch": 1.170971938301431, "grad_norm": 0.650525689125061, "learning_rate": 7.122526217340847e-05, "loss": 1.073, "step": 6394 }, { "epoch": 1.171157777364802, "grad_norm": 0.672072172164917, "learning_rate": 7.119725443750588e-05, "loss": 0.7899, "step": 6395 }, { "epoch": 1.1713436164281732, "grad_norm": 0.665378987789154, "learning_rate": 7.116924916516434e-05, "loss": 1.0183, "step": 6396 }, { "epoch": 1.1715294554915443, "grad_norm": 0.7998149991035461, "learning_rate": 7.114124635877929e-05, "loss": 1.1661, "step": 6397 }, { "epoch": 1.1717152945549154, "grad_norm": 0.740707516670227, "learning_rate": 7.111324602074576e-05, "loss": 1.0706, "step": 6398 }, { "epoch": 1.1719011336182865, "grad_norm": 0.7028396725654602, "learning_rate": 7.108524815345873e-05, "loss": 0.8416, "step": 6399 }, { "epoch": 1.1720869726816576, "grad_norm": 0.7408978343009949, "learning_rate": 7.105725275931296e-05, "loss": 0.9771, "step": 6400 }, { "epoch": 1.1722728117450287, "grad_norm": 0.6476548314094543, "learning_rate": 7.102925984070286e-05, "loss": 0.6686, "step": 6401 }, { "epoch": 1.1724586508083998, "grad_norm": 0.703307032585144, "learning_rate": 7.100126940002282e-05, "loss": 0.9585, "step": 6402 }, { "epoch": 1.1726444898717712, "grad_norm": 0.6147748827934265, "learning_rate": 7.097328143966685e-05, "loss": 0.8826, "step": 6403 }, { "epoch": 1.172830328935142, "grad_norm": 0.7942382097244263, "learning_rate": 7.09452959620289e-05, "loss": 1.0286, "step": 6404 }, { "epoch": 1.1730161679985134, "grad_norm": 0.6308249831199646, "learning_rate": 7.091731296950255e-05, "loss": 0.9641, "step": 6405 }, { "epoch": 1.1732020070618845, "grad_norm": 0.7863003015518188, "learning_rate": 7.088933246448131e-05, "loss": 1.1257, "step": 6406 }, { "epoch": 1.1733878461252556, "grad_norm": 0.5810206532478333, "learning_rate": 7.086135444935833e-05, "loss": 0.9111, "step": 6407 }, { "epoch": 1.1735736851886267, "grad_norm": 0.6318856477737427, "learning_rate": 7.083337892652669e-05, "loss": 0.8775, "step": 6408 }, { "epoch": 1.1737595242519978, "grad_norm": 0.711527407169342, "learning_rate": 7.080540589837921e-05, "loss": 1.0231, "step": 6409 }, { "epoch": 1.173945363315369, "grad_norm": 0.7220918536186218, "learning_rate": 7.077743536730842e-05, "loss": 0.7934, "step": 6410 }, { "epoch": 1.17413120237874, "grad_norm": 0.6480425596237183, "learning_rate": 7.074946733570682e-05, "loss": 0.7943, "step": 6411 }, { "epoch": 1.1743170414421111, "grad_norm": 0.6978838443756104, "learning_rate": 7.072150180596644e-05, "loss": 1.1222, "step": 6412 }, { "epoch": 1.1745028805054822, "grad_norm": 0.7103899717330933, "learning_rate": 7.069353878047933e-05, "loss": 0.8338, "step": 6413 }, { "epoch": 1.1746887195688533, "grad_norm": 0.5447167754173279, "learning_rate": 7.066557826163713e-05, "loss": 0.8314, "step": 6414 }, { "epoch": 1.1748745586322245, "grad_norm": 0.6523606777191162, "learning_rate": 7.063762025183144e-05, "loss": 0.9746, "step": 6415 }, { "epoch": 1.1750603976955956, "grad_norm": 0.7402204871177673, "learning_rate": 7.060966475345356e-05, "loss": 1.1999, "step": 6416 }, { "epoch": 1.1752462367589667, "grad_norm": 0.800520122051239, "learning_rate": 7.058171176889456e-05, "loss": 0.9486, "step": 6417 }, { "epoch": 1.1754320758223378, "grad_norm": 0.6214317083358765, "learning_rate": 7.055376130054539e-05, "loss": 0.6924, "step": 6418 }, { "epoch": 1.1756179148857089, "grad_norm": 0.6461222767829895, "learning_rate": 7.05258133507966e-05, "loss": 0.9349, "step": 6419 }, { "epoch": 1.1758037539490802, "grad_norm": 0.6513392329216003, "learning_rate": 7.049786792203874e-05, "loss": 0.8558, "step": 6420 }, { "epoch": 1.175989593012451, "grad_norm": 0.7250276803970337, "learning_rate": 7.046992501666195e-05, "loss": 0.9061, "step": 6421 }, { "epoch": 1.1761754320758224, "grad_norm": 0.8080319166183472, "learning_rate": 7.044198463705631e-05, "loss": 1.027, "step": 6422 }, { "epoch": 1.1763612711391935, "grad_norm": 0.5869935154914856, "learning_rate": 7.041404678561162e-05, "loss": 1.0799, "step": 6423 }, { "epoch": 1.1765471102025646, "grad_norm": 0.7930959463119507, "learning_rate": 7.038611146471743e-05, "loss": 1.2881, "step": 6424 }, { "epoch": 1.1767329492659357, "grad_norm": 0.6239271759986877, "learning_rate": 7.035817867676317e-05, "loss": 0.9041, "step": 6425 }, { "epoch": 1.1769187883293069, "grad_norm": 0.5888556241989136, "learning_rate": 7.033024842413792e-05, "loss": 0.9099, "step": 6426 }, { "epoch": 1.177104627392678, "grad_norm": 0.7566354274749756, "learning_rate": 7.030232070923069e-05, "loss": 0.9611, "step": 6427 }, { "epoch": 1.177290466456049, "grad_norm": 0.7134408950805664, "learning_rate": 7.02743955344301e-05, "loss": 0.9586, "step": 6428 }, { "epoch": 1.1774763055194202, "grad_norm": 0.7655895352363586, "learning_rate": 7.024647290212472e-05, "loss": 0.9405, "step": 6429 }, { "epoch": 1.1776621445827913, "grad_norm": 0.7383896112442017, "learning_rate": 7.021855281470283e-05, "loss": 1.2127, "step": 6430 }, { "epoch": 1.1778479836461624, "grad_norm": 0.7671892642974854, "learning_rate": 7.019063527455246e-05, "loss": 1.0271, "step": 6431 }, { "epoch": 1.1780338227095335, "grad_norm": 0.5795000791549683, "learning_rate": 7.016272028406153e-05, "loss": 0.8227, "step": 6432 }, { "epoch": 1.1782196617729046, "grad_norm": 0.717526912689209, "learning_rate": 7.013480784561759e-05, "loss": 0.9521, "step": 6433 }, { "epoch": 1.1784055008362757, "grad_norm": 0.6924964189529419, "learning_rate": 7.010689796160805e-05, "loss": 1.1982, "step": 6434 }, { "epoch": 1.1785913398996468, "grad_norm": 0.7154371738433838, "learning_rate": 7.00789906344202e-05, "loss": 1.062, "step": 6435 }, { "epoch": 1.178777178963018, "grad_norm": 0.8084208965301514, "learning_rate": 7.005108586644091e-05, "loss": 1.0382, "step": 6436 }, { "epoch": 1.1789630180263893, "grad_norm": 0.7454267740249634, "learning_rate": 7.002318366005699e-05, "loss": 0.7163, "step": 6437 }, { "epoch": 1.1791488570897604, "grad_norm": 0.5913962125778198, "learning_rate": 6.999528401765494e-05, "loss": 0.7636, "step": 6438 }, { "epoch": 1.1793346961531315, "grad_norm": 0.6394737958908081, "learning_rate": 6.996738694162112e-05, "loss": 0.8579, "step": 6439 }, { "epoch": 1.1795205352165026, "grad_norm": 0.712942898273468, "learning_rate": 6.993949243434159e-05, "loss": 0.8436, "step": 6440 }, { "epoch": 1.1797063742798737, "grad_norm": 0.6500268578529358, "learning_rate": 6.991160049820222e-05, "loss": 0.7198, "step": 6441 }, { "epoch": 1.1798922133432448, "grad_norm": 0.7196839451789856, "learning_rate": 6.988371113558876e-05, "loss": 0.9428, "step": 6442 }, { "epoch": 1.180078052406616, "grad_norm": 0.6893028616905212, "learning_rate": 6.985582434888652e-05, "loss": 0.8055, "step": 6443 }, { "epoch": 1.180263891469987, "grad_norm": 0.6877514123916626, "learning_rate": 6.982794014048077e-05, "loss": 1.1647, "step": 6444 }, { "epoch": 1.180449730533358, "grad_norm": 0.8232098817825317, "learning_rate": 6.980005851275653e-05, "loss": 1.1632, "step": 6445 }, { "epoch": 1.1806355695967292, "grad_norm": 0.5898348093032837, "learning_rate": 6.977217946809857e-05, "loss": 0.9491, "step": 6446 }, { "epoch": 1.1808214086601003, "grad_norm": 0.8944123387336731, "learning_rate": 6.974430300889139e-05, "loss": 1.0263, "step": 6447 }, { "epoch": 1.1810072477234714, "grad_norm": 0.5503453612327576, "learning_rate": 6.971642913751936e-05, "loss": 0.7226, "step": 6448 }, { "epoch": 1.1811930867868425, "grad_norm": 3.2862448692321777, "learning_rate": 6.968855785636665e-05, "loss": 1.5893, "step": 6449 }, { "epoch": 1.1813789258502136, "grad_norm": 0.7002162933349609, "learning_rate": 6.966068916781706e-05, "loss": 0.9923, "step": 6450 }, { "epoch": 1.1815647649135848, "grad_norm": 0.6545072197914124, "learning_rate": 6.963282307425434e-05, "loss": 0.9587, "step": 6451 }, { "epoch": 1.1817506039769559, "grad_norm": 0.7973212599754333, "learning_rate": 6.960495957806185e-05, "loss": 1.0527, "step": 6452 }, { "epoch": 1.181936443040327, "grad_norm": 0.6965364813804626, "learning_rate": 6.957709868162288e-05, "loss": 0.9853, "step": 6453 }, { "epoch": 1.1821222821036983, "grad_norm": 0.6737655401229858, "learning_rate": 6.954924038732038e-05, "loss": 1.1285, "step": 6454 }, { "epoch": 1.1823081211670694, "grad_norm": 0.6464536786079407, "learning_rate": 6.952138469753717e-05, "loss": 0.9844, "step": 6455 }, { "epoch": 1.1824939602304405, "grad_norm": 0.6128717660903931, "learning_rate": 6.949353161465585e-05, "loss": 0.7094, "step": 6456 }, { "epoch": 1.1826797992938116, "grad_norm": 0.6797751784324646, "learning_rate": 6.946568114105866e-05, "loss": 0.9996, "step": 6457 }, { "epoch": 1.1828656383571827, "grad_norm": 0.6216451525688171, "learning_rate": 6.943783327912778e-05, "loss": 0.8072, "step": 6458 }, { "epoch": 1.1830514774205538, "grad_norm": 0.6847977638244629, "learning_rate": 6.940998803124505e-05, "loss": 1.0382, "step": 6459 }, { "epoch": 1.183237316483925, "grad_norm": 0.6069141626358032, "learning_rate": 6.938214539979218e-05, "loss": 0.9816, "step": 6460 }, { "epoch": 1.183423155547296, "grad_norm": 0.7009860873222351, "learning_rate": 6.935430538715056e-05, "loss": 1.0688, "step": 6461 }, { "epoch": 1.1836089946106672, "grad_norm": 0.5075829029083252, "learning_rate": 6.932646799570144e-05, "loss": 0.4973, "step": 6462 }, { "epoch": 1.1837948336740383, "grad_norm": 0.6592481136322021, "learning_rate": 6.929863322782583e-05, "loss": 0.8969, "step": 6463 }, { "epoch": 1.1839806727374094, "grad_norm": 0.8401702642440796, "learning_rate": 6.927080108590445e-05, "loss": 0.9823, "step": 6464 }, { "epoch": 1.1841665118007805, "grad_norm": 0.7158241271972656, "learning_rate": 6.92429715723179e-05, "loss": 0.8727, "step": 6465 }, { "epoch": 1.1843523508641516, "grad_norm": 0.7230640053749084, "learning_rate": 6.921514468944643e-05, "loss": 1.1566, "step": 6466 }, { "epoch": 1.1845381899275227, "grad_norm": 0.6751217842102051, "learning_rate": 6.91873204396702e-05, "loss": 1.094, "step": 6467 }, { "epoch": 1.1847240289908938, "grad_norm": 0.7101399302482605, "learning_rate": 6.915949882536901e-05, "loss": 1.0724, "step": 6468 }, { "epoch": 1.1849098680542651, "grad_norm": 0.7720397710800171, "learning_rate": 6.913167984892255e-05, "loss": 0.9402, "step": 6469 }, { "epoch": 1.185095707117636, "grad_norm": 0.7422873973846436, "learning_rate": 6.910386351271026e-05, "loss": 1.0397, "step": 6470 }, { "epoch": 1.1852815461810073, "grad_norm": 0.7033947110176086, "learning_rate": 6.907604981911127e-05, "loss": 1.0692, "step": 6471 }, { "epoch": 1.1854673852443784, "grad_norm": 0.7778051495552063, "learning_rate": 6.904823877050461e-05, "loss": 0.9041, "step": 6472 }, { "epoch": 1.1856532243077496, "grad_norm": 0.6052567958831787, "learning_rate": 6.902043036926894e-05, "loss": 0.9387, "step": 6473 }, { "epoch": 1.1858390633711207, "grad_norm": 0.7672857642173767, "learning_rate": 6.899262461778284e-05, "loss": 1.0686, "step": 6474 }, { "epoch": 1.1860249024344918, "grad_norm": 0.7012307047843933, "learning_rate": 6.896482151842455e-05, "loss": 1.0861, "step": 6475 }, { "epoch": 1.1862107414978629, "grad_norm": 0.6781837940216064, "learning_rate": 6.893702107357215e-05, "loss": 0.98, "step": 6476 }, { "epoch": 1.186396580561234, "grad_norm": 0.7039365768432617, "learning_rate": 6.890922328560352e-05, "loss": 0.7219, "step": 6477 }, { "epoch": 1.186582419624605, "grad_norm": 0.715893030166626, "learning_rate": 6.888142815689617e-05, "loss": 0.9221, "step": 6478 }, { "epoch": 1.1867682586879762, "grad_norm": 0.5723534226417542, "learning_rate": 6.885363568982757e-05, "loss": 0.8025, "step": 6479 }, { "epoch": 1.1869540977513473, "grad_norm": 0.6755521297454834, "learning_rate": 6.882584588677479e-05, "loss": 0.8706, "step": 6480 }, { "epoch": 1.1871399368147184, "grad_norm": 0.6190736889839172, "learning_rate": 6.87980587501148e-05, "loss": 0.8423, "step": 6481 }, { "epoch": 1.1873257758780895, "grad_norm": 0.6882662773132324, "learning_rate": 6.877027428222427e-05, "loss": 1.1149, "step": 6482 }, { "epoch": 1.1875116149414606, "grad_norm": 0.8642654418945312, "learning_rate": 6.874249248547966e-05, "loss": 0.952, "step": 6483 }, { "epoch": 1.1876974540048317, "grad_norm": 0.6334053874015808, "learning_rate": 6.871471336225728e-05, "loss": 0.9349, "step": 6484 }, { "epoch": 1.1878832930682028, "grad_norm": 0.6225823760032654, "learning_rate": 6.868693691493305e-05, "loss": 0.9393, "step": 6485 }, { "epoch": 1.1880691321315742, "grad_norm": 0.6007322072982788, "learning_rate": 6.86591631458828e-05, "loss": 0.9067, "step": 6486 }, { "epoch": 1.188254971194945, "grad_norm": 0.7253145575523376, "learning_rate": 6.863139205748203e-05, "loss": 0.6779, "step": 6487 }, { "epoch": 1.1884408102583164, "grad_norm": 0.6054568290710449, "learning_rate": 6.86036236521061e-05, "loss": 0.8598, "step": 6488 }, { "epoch": 1.1886266493216875, "grad_norm": 0.8909119367599487, "learning_rate": 6.857585793213011e-05, "loss": 1.0237, "step": 6489 }, { "epoch": 1.1888124883850586, "grad_norm": 0.6840725541114807, "learning_rate": 6.85480948999289e-05, "loss": 0.9126, "step": 6490 }, { "epoch": 1.1889983274484297, "grad_norm": 0.6242436766624451, "learning_rate": 6.852033455787712e-05, "loss": 0.9567, "step": 6491 }, { "epoch": 1.1891841665118008, "grad_norm": 0.6949776411056519, "learning_rate": 6.849257690834914e-05, "loss": 0.9466, "step": 6492 }, { "epoch": 1.189370005575172, "grad_norm": 0.7519738674163818, "learning_rate": 6.84648219537192e-05, "loss": 0.7867, "step": 6493 }, { "epoch": 1.189555844638543, "grad_norm": 0.6986894607543945, "learning_rate": 6.843706969636115e-05, "loss": 1.1686, "step": 6494 }, { "epoch": 1.1897416837019141, "grad_norm": 0.6281919479370117, "learning_rate": 6.840932013864876e-05, "loss": 0.9281, "step": 6495 }, { "epoch": 1.1899275227652852, "grad_norm": 1.0236083269119263, "learning_rate": 6.83815732829555e-05, "loss": 1.3388, "step": 6496 }, { "epoch": 1.1901133618286563, "grad_norm": 0.5717867016792297, "learning_rate": 6.83538291316546e-05, "loss": 0.7795, "step": 6497 }, { "epoch": 1.1902992008920275, "grad_norm": 0.6059176325798035, "learning_rate": 6.83260876871191e-05, "loss": 0.9609, "step": 6498 }, { "epoch": 1.1904850399553986, "grad_norm": 0.6473904848098755, "learning_rate": 6.829834895172175e-05, "loss": 0.9561, "step": 6499 }, { "epoch": 1.1906708790187697, "grad_norm": 0.6453015208244324, "learning_rate": 6.827061292783519e-05, "loss": 0.8863, "step": 6500 }, { "epoch": 1.1908567180821408, "grad_norm": 0.5157768726348877, "learning_rate": 6.824287961783163e-05, "loss": 0.6292, "step": 6501 }, { "epoch": 1.1910425571455119, "grad_norm": 0.6397795677185059, "learning_rate": 6.821514902408326e-05, "loss": 0.7689, "step": 6502 }, { "epoch": 1.1912283962088832, "grad_norm": 0.6399672031402588, "learning_rate": 6.818742114896184e-05, "loss": 0.8271, "step": 6503 }, { "epoch": 1.1914142352722543, "grad_norm": 0.5401207208633423, "learning_rate": 6.815969599483906e-05, "loss": 0.6154, "step": 6504 }, { "epoch": 1.1916000743356254, "grad_norm": 0.790274977684021, "learning_rate": 6.81319735640863e-05, "loss": 0.5777, "step": 6505 }, { "epoch": 1.1917859133989965, "grad_norm": 0.7683626413345337, "learning_rate": 6.810425385907471e-05, "loss": 0.9646, "step": 6506 }, { "epoch": 1.1919717524623676, "grad_norm": 0.6598650813102722, "learning_rate": 6.807653688217526e-05, "loss": 0.9366, "step": 6507 }, { "epoch": 1.1921575915257387, "grad_norm": 0.6899055242538452, "learning_rate": 6.804882263575855e-05, "loss": 0.9253, "step": 6508 }, { "epoch": 1.1923434305891099, "grad_norm": 0.7945172190666199, "learning_rate": 6.802111112219516e-05, "loss": 0.977, "step": 6509 }, { "epoch": 1.192529269652481, "grad_norm": 0.6663496494293213, "learning_rate": 6.799340234385521e-05, "loss": 0.9406, "step": 6510 }, { "epoch": 1.192715108715852, "grad_norm": 0.7770522236824036, "learning_rate": 6.796569630310872e-05, "loss": 1.0315, "step": 6511 }, { "epoch": 1.1929009477792232, "grad_norm": 0.7419028282165527, "learning_rate": 6.793799300232548e-05, "loss": 0.7592, "step": 6512 }, { "epoch": 1.1930867868425943, "grad_norm": 0.7410839796066284, "learning_rate": 6.791029244387499e-05, "loss": 0.752, "step": 6513 }, { "epoch": 1.1932726259059654, "grad_norm": 0.7905089259147644, "learning_rate": 6.788259463012656e-05, "loss": 0.9731, "step": 6514 }, { "epoch": 1.1934584649693365, "grad_norm": 1.5963722467422485, "learning_rate": 6.785489956344921e-05, "loss": 1.6695, "step": 6515 }, { "epoch": 1.1936443040327076, "grad_norm": 0.686390221118927, "learning_rate": 6.782720724621178e-05, "loss": 0.9611, "step": 6516 }, { "epoch": 1.1938301430960787, "grad_norm": 0.6483101844787598, "learning_rate": 6.779951768078284e-05, "loss": 0.8985, "step": 6517 }, { "epoch": 1.19401598215945, "grad_norm": 0.6660516262054443, "learning_rate": 6.777183086953074e-05, "loss": 1.0176, "step": 6518 }, { "epoch": 1.194201821222821, "grad_norm": 0.6592452526092529, "learning_rate": 6.774414681482361e-05, "loss": 0.8554, "step": 6519 }, { "epoch": 1.1943876602861923, "grad_norm": 0.7071273922920227, "learning_rate": 6.771646551902932e-05, "loss": 1.0079, "step": 6520 }, { "epoch": 1.1945734993495634, "grad_norm": 0.7174739241600037, "learning_rate": 6.768878698451553e-05, "loss": 0.9785, "step": 6521 }, { "epoch": 1.1947593384129345, "grad_norm": 0.8033270835876465, "learning_rate": 6.766111121364958e-05, "loss": 1.0207, "step": 6522 }, { "epoch": 1.1949451774763056, "grad_norm": 0.6496764421463013, "learning_rate": 6.763343820879872e-05, "loss": 1.0746, "step": 6523 }, { "epoch": 1.1951310165396767, "grad_norm": 0.6587570905685425, "learning_rate": 6.760576797232983e-05, "loss": 0.9393, "step": 6524 }, { "epoch": 1.1953168556030478, "grad_norm": 0.730059027671814, "learning_rate": 6.757810050660958e-05, "loss": 0.6695, "step": 6525 }, { "epoch": 1.195502694666419, "grad_norm": 0.7065173387527466, "learning_rate": 6.755043581400452e-05, "loss": 0.9532, "step": 6526 }, { "epoch": 1.19568853372979, "grad_norm": 0.7613650560379028, "learning_rate": 6.752277389688078e-05, "loss": 1.1234, "step": 6527 }, { "epoch": 1.1958743727931611, "grad_norm": 0.6849968433380127, "learning_rate": 6.749511475760441e-05, "loss": 0.8637, "step": 6528 }, { "epoch": 1.1960602118565322, "grad_norm": 0.7032272219657898, "learning_rate": 6.746745839854111e-05, "loss": 0.9595, "step": 6529 }, { "epoch": 1.1962460509199033, "grad_norm": 0.6895639300346375, "learning_rate": 6.743980482205644e-05, "loss": 0.9467, "step": 6530 }, { "epoch": 1.1964318899832744, "grad_norm": 0.6482696533203125, "learning_rate": 6.74121540305156e-05, "loss": 0.9559, "step": 6531 }, { "epoch": 1.1966177290466455, "grad_norm": 0.6822243332862854, "learning_rate": 6.738450602628364e-05, "loss": 1.058, "step": 6532 }, { "epoch": 1.1968035681100166, "grad_norm": 0.699733555316925, "learning_rate": 6.73568608117254e-05, "loss": 0.8538, "step": 6533 }, { "epoch": 1.1969894071733878, "grad_norm": 0.6649261713027954, "learning_rate": 6.732921838920538e-05, "loss": 0.7759, "step": 6534 }, { "epoch": 1.197175246236759, "grad_norm": 0.8819229006767273, "learning_rate": 6.730157876108798e-05, "loss": 1.1101, "step": 6535 }, { "epoch": 1.19736108530013, "grad_norm": 0.733377993106842, "learning_rate": 6.727394192973716e-05, "loss": 1.2236, "step": 6536 }, { "epoch": 1.1975469243635013, "grad_norm": 0.6071882247924805, "learning_rate": 6.724630789751688e-05, "loss": 0.9082, "step": 6537 }, { "epoch": 1.1977327634268724, "grad_norm": 0.6043367981910706, "learning_rate": 6.72186766667906e-05, "loss": 0.787, "step": 6538 }, { "epoch": 1.1979186024902435, "grad_norm": 0.5902948379516602, "learning_rate": 6.719104823992178e-05, "loss": 1.0035, "step": 6539 }, { "epoch": 1.1981044415536146, "grad_norm": 0.5684782266616821, "learning_rate": 6.716342261927353e-05, "loss": 0.7045, "step": 6540 }, { "epoch": 1.1982902806169857, "grad_norm": 0.6624257564544678, "learning_rate": 6.713579980720872e-05, "loss": 1.0531, "step": 6541 }, { "epoch": 1.1984761196803568, "grad_norm": 0.7116941809654236, "learning_rate": 6.710817980608996e-05, "loss": 0.8097, "step": 6542 }, { "epoch": 1.198661958743728, "grad_norm": 0.730290412902832, "learning_rate": 6.708056261827966e-05, "loss": 0.9771, "step": 6543 }, { "epoch": 1.198847797807099, "grad_norm": 0.6953971982002258, "learning_rate": 6.705294824614004e-05, "loss": 0.8913, "step": 6544 }, { "epoch": 1.1990336368704702, "grad_norm": 0.6694558262825012, "learning_rate": 6.70253366920329e-05, "loss": 0.9706, "step": 6545 }, { "epoch": 1.1992194759338413, "grad_norm": 0.6435080766677856, "learning_rate": 6.699772795832e-05, "loss": 0.9639, "step": 6546 }, { "epoch": 1.1994053149972124, "grad_norm": 0.6081843376159668, "learning_rate": 6.69701220473628e-05, "loss": 0.7492, "step": 6547 }, { "epoch": 1.1995911540605835, "grad_norm": 0.7480099201202393, "learning_rate": 6.69425189615224e-05, "loss": 0.9639, "step": 6548 }, { "epoch": 1.1997769931239546, "grad_norm": 0.5997082591056824, "learning_rate": 6.69149187031598e-05, "loss": 0.8858, "step": 6549 }, { "epoch": 1.1999628321873257, "grad_norm": 0.6469660401344299, "learning_rate": 6.68873212746357e-05, "loss": 0.7982, "step": 6550 }, { "epoch": 1.2001486712506968, "grad_norm": 0.7918186783790588, "learning_rate": 6.685972667831064e-05, "loss": 1.2226, "step": 6551 }, { "epoch": 1.2003345103140681, "grad_norm": 0.7456339001655579, "learning_rate": 6.683213491654472e-05, "loss": 1.0805, "step": 6552 }, { "epoch": 1.2005203493774392, "grad_norm": 0.7502478957176208, "learning_rate": 6.680454599169799e-05, "loss": 1.0391, "step": 6553 }, { "epoch": 1.2007061884408103, "grad_norm": 0.6726301312446594, "learning_rate": 6.677695990613024e-05, "loss": 1.0379, "step": 6554 }, { "epoch": 1.2008920275041814, "grad_norm": 0.7034464478492737, "learning_rate": 6.674937666220085e-05, "loss": 0.9541, "step": 6555 }, { "epoch": 1.2010778665675526, "grad_norm": 0.6492792367935181, "learning_rate": 6.672179626226917e-05, "loss": 0.717, "step": 6556 }, { "epoch": 1.2012637056309237, "grad_norm": 0.7404565215110779, "learning_rate": 6.669421870869416e-05, "loss": 1.1081, "step": 6557 }, { "epoch": 1.2014495446942948, "grad_norm": 0.7572711706161499, "learning_rate": 6.666664400383465e-05, "loss": 0.9685, "step": 6558 }, { "epoch": 1.2016353837576659, "grad_norm": 0.9167105555534363, "learning_rate": 6.663907215004907e-05, "loss": 1.031, "step": 6559 }, { "epoch": 1.201821222821037, "grad_norm": 1.3603627681732178, "learning_rate": 6.661150314969578e-05, "loss": 1.564, "step": 6560 }, { "epoch": 1.202007061884408, "grad_norm": 0.7942409515380859, "learning_rate": 6.65839370051328e-05, "loss": 1.0709, "step": 6561 }, { "epoch": 1.2021929009477792, "grad_norm": 0.6643564105033875, "learning_rate": 6.655637371871788e-05, "loss": 1.0886, "step": 6562 }, { "epoch": 1.2023787400111503, "grad_norm": 0.6733203530311584, "learning_rate": 6.65288132928086e-05, "loss": 0.9338, "step": 6563 }, { "epoch": 1.2025645790745214, "grad_norm": 0.5864598155021667, "learning_rate": 6.650125572976227e-05, "loss": 0.9149, "step": 6564 }, { "epoch": 1.2027504181378925, "grad_norm": 0.659898042678833, "learning_rate": 6.647370103193595e-05, "loss": 1.0477, "step": 6565 }, { "epoch": 1.2029362572012636, "grad_norm": 0.7071530818939209, "learning_rate": 6.644614920168641e-05, "loss": 0.9551, "step": 6566 }, { "epoch": 1.2031220962646347, "grad_norm": 0.6440016627311707, "learning_rate": 6.641860024137025e-05, "loss": 0.9242, "step": 6567 }, { "epoch": 1.2033079353280058, "grad_norm": 0.5903676152229309, "learning_rate": 6.639105415334383e-05, "loss": 0.9222, "step": 6568 }, { "epoch": 1.2034937743913772, "grad_norm": 0.8212350606918335, "learning_rate": 6.636351093996316e-05, "loss": 0.8822, "step": 6569 }, { "epoch": 1.2036796134547483, "grad_norm": 0.6122947335243225, "learning_rate": 6.63359706035841e-05, "loss": 0.8444, "step": 6570 }, { "epoch": 1.2038654525181194, "grad_norm": 0.6683095097541809, "learning_rate": 6.630843314656221e-05, "loss": 0.9007, "step": 6571 }, { "epoch": 1.2040512915814905, "grad_norm": 0.7547433376312256, "learning_rate": 6.62808985712529e-05, "loss": 0.934, "step": 6572 }, { "epoch": 1.2042371306448616, "grad_norm": 0.6892956495285034, "learning_rate": 6.625336688001117e-05, "loss": 1.1036, "step": 6573 }, { "epoch": 1.2044229697082327, "grad_norm": 0.7414408326148987, "learning_rate": 6.622583807519191e-05, "loss": 1.1518, "step": 6574 }, { "epoch": 1.2046088087716038, "grad_norm": 0.737737238407135, "learning_rate": 6.619831215914974e-05, "loss": 0.877, "step": 6575 }, { "epoch": 1.204794647834975, "grad_norm": 0.7120890021324158, "learning_rate": 6.617078913423895e-05, "loss": 0.9871, "step": 6576 }, { "epoch": 1.204980486898346, "grad_norm": 0.6039966344833374, "learning_rate": 6.614326900281371e-05, "loss": 0.7617, "step": 6577 }, { "epoch": 1.2051663259617171, "grad_norm": 0.6865859031677246, "learning_rate": 6.611575176722783e-05, "loss": 0.7669, "step": 6578 }, { "epoch": 1.2053521650250882, "grad_norm": 0.6812560558319092, "learning_rate": 6.608823742983497e-05, "loss": 0.9179, "step": 6579 }, { "epoch": 1.2055380040884593, "grad_norm": 0.8033499121665955, "learning_rate": 6.60607259929884e-05, "loss": 1.0227, "step": 6580 }, { "epoch": 1.2057238431518305, "grad_norm": 0.6142318844795227, "learning_rate": 6.60332174590413e-05, "loss": 0.6658, "step": 6581 }, { "epoch": 1.2059096822152016, "grad_norm": 0.7506161332130432, "learning_rate": 6.600571183034657e-05, "loss": 0.9, "step": 6582 }, { "epoch": 1.2060955212785727, "grad_norm": 0.7655713558197021, "learning_rate": 6.597820910925674e-05, "loss": 1.2014, "step": 6583 }, { "epoch": 1.206281360341944, "grad_norm": 0.8014414310455322, "learning_rate": 6.595070929812423e-05, "loss": 1.1676, "step": 6584 }, { "epoch": 1.2064671994053149, "grad_norm": 0.6293933987617493, "learning_rate": 6.592321239930112e-05, "loss": 1.0769, "step": 6585 }, { "epoch": 1.2066530384686862, "grad_norm": 0.7731804251670837, "learning_rate": 6.589571841513934e-05, "loss": 1.0054, "step": 6586 }, { "epoch": 1.2068388775320573, "grad_norm": 0.686371922492981, "learning_rate": 6.586822734799044e-05, "loss": 1.0477, "step": 6587 }, { "epoch": 1.2070247165954284, "grad_norm": 0.7579498291015625, "learning_rate": 6.584073920020581e-05, "loss": 1.0027, "step": 6588 }, { "epoch": 1.2072105556587995, "grad_norm": 0.7064153552055359, "learning_rate": 6.581325397413664e-05, "loss": 1.0057, "step": 6589 }, { "epoch": 1.2073963947221706, "grad_norm": 0.6257011294364929, "learning_rate": 6.57857716721337e-05, "loss": 0.8138, "step": 6590 }, { "epoch": 1.2075822337855417, "grad_norm": 0.6599844098091125, "learning_rate": 6.575829229654769e-05, "loss": 0.8032, "step": 6591 }, { "epoch": 1.2077680728489129, "grad_norm": 0.7026282548904419, "learning_rate": 6.573081584972892e-05, "loss": 1.0257, "step": 6592 }, { "epoch": 1.207953911912284, "grad_norm": 0.6733138561248779, "learning_rate": 6.570334233402756e-05, "loss": 0.9201, "step": 6593 }, { "epoch": 1.208139750975655, "grad_norm": 0.6270269155502319, "learning_rate": 6.567587175179342e-05, "loss": 0.8864, "step": 6594 }, { "epoch": 1.2083255900390262, "grad_norm": 0.6575441360473633, "learning_rate": 6.564840410537613e-05, "loss": 1.0867, "step": 6595 }, { "epoch": 1.2085114291023973, "grad_norm": 0.6776338815689087, "learning_rate": 6.562093939712516e-05, "loss": 0.8681, "step": 6596 }, { "epoch": 1.2086972681657684, "grad_norm": 0.67679762840271, "learning_rate": 6.55934776293895e-05, "loss": 0.8977, "step": 6597 }, { "epoch": 1.2088831072291395, "grad_norm": 0.7879827618598938, "learning_rate": 6.55660188045181e-05, "loss": 0.7244, "step": 6598 }, { "epoch": 1.2090689462925106, "grad_norm": 0.6360406875610352, "learning_rate": 6.553856292485947e-05, "loss": 0.9499, "step": 6599 }, { "epoch": 1.2092547853558817, "grad_norm": 1.2036761045455933, "learning_rate": 6.551110999276207e-05, "loss": 1.0049, "step": 6600 }, { "epoch": 1.209440624419253, "grad_norm": 0.8532909750938416, "learning_rate": 6.548366001057395e-05, "loss": 0.926, "step": 6601 }, { "epoch": 1.209626463482624, "grad_norm": 0.7498572468757629, "learning_rate": 6.5456212980643e-05, "loss": 1.0418, "step": 6602 }, { "epoch": 1.2098123025459953, "grad_norm": 0.7277694940567017, "learning_rate": 6.542876890531684e-05, "loss": 1.0088, "step": 6603 }, { "epoch": 1.2099981416093664, "grad_norm": 0.6551283597946167, "learning_rate": 6.540132778694276e-05, "loss": 0.7892, "step": 6604 }, { "epoch": 1.2101839806727375, "grad_norm": 0.6652863621711731, "learning_rate": 6.537388962786793e-05, "loss": 0.8712, "step": 6605 }, { "epoch": 1.2103698197361086, "grad_norm": 0.7065383791923523, "learning_rate": 6.534645443043911e-05, "loss": 1.0349, "step": 6606 }, { "epoch": 1.2105556587994797, "grad_norm": 0.6703068017959595, "learning_rate": 6.531902219700296e-05, "loss": 0.9831, "step": 6607 }, { "epoch": 1.2107414978628508, "grad_norm": 0.7374162673950195, "learning_rate": 6.52915929299058e-05, "loss": 0.95, "step": 6608 }, { "epoch": 1.210927336926222, "grad_norm": 0.689177393913269, "learning_rate": 6.526416663149368e-05, "loss": 0.7996, "step": 6609 }, { "epoch": 1.211113175989593, "grad_norm": 0.6918917894363403, "learning_rate": 6.523674330411253e-05, "loss": 0.8538, "step": 6610 }, { "epoch": 1.2112990150529641, "grad_norm": 0.7495622038841248, "learning_rate": 6.52093229501078e-05, "loss": 1.1756, "step": 6611 }, { "epoch": 1.2114848541163352, "grad_norm": 0.6858820915222168, "learning_rate": 6.518190557182492e-05, "loss": 1.0732, "step": 6612 }, { "epoch": 1.2116706931797063, "grad_norm": 1.2775191068649292, "learning_rate": 6.515449117160886e-05, "loss": 1.497, "step": 6613 }, { "epoch": 1.2118565322430774, "grad_norm": 0.741070568561554, "learning_rate": 6.51270797518045e-05, "loss": 0.8712, "step": 6614 }, { "epoch": 1.2120423713064485, "grad_norm": 0.7354996204376221, "learning_rate": 6.509967131475637e-05, "loss": 1.2991, "step": 6615 }, { "epoch": 1.2122282103698196, "grad_norm": 0.7553620934486389, "learning_rate": 6.507226586280877e-05, "loss": 0.9451, "step": 6616 }, { "epoch": 1.2124140494331908, "grad_norm": 0.7338206171989441, "learning_rate": 6.504486339830581e-05, "loss": 0.9886, "step": 6617 }, { "epoch": 1.212599888496562, "grad_norm": 0.7190319895744324, "learning_rate": 6.501746392359118e-05, "loss": 1.0271, "step": 6618 }, { "epoch": 1.2127857275599332, "grad_norm": 0.6434297561645508, "learning_rate": 6.499006744100853e-05, "loss": 0.9303, "step": 6619 }, { "epoch": 1.2129715666233043, "grad_norm": 0.7200321555137634, "learning_rate": 6.496267395290101e-05, "loss": 1.1825, "step": 6620 }, { "epoch": 1.2131574056866754, "grad_norm": 0.6997349262237549, "learning_rate": 6.493528346161176e-05, "loss": 1.0327, "step": 6621 }, { "epoch": 1.2133432447500465, "grad_norm": 0.7475888729095459, "learning_rate": 6.490789596948345e-05, "loss": 0.904, "step": 6622 }, { "epoch": 1.2135290838134176, "grad_norm": 0.8181599974632263, "learning_rate": 6.488051147885866e-05, "loss": 1.1392, "step": 6623 }, { "epoch": 1.2137149228767887, "grad_norm": 0.7052812576293945, "learning_rate": 6.485312999207967e-05, "loss": 1.1321, "step": 6624 }, { "epoch": 1.2139007619401598, "grad_norm": 0.6384223699569702, "learning_rate": 6.482575151148839e-05, "loss": 0.9949, "step": 6625 }, { "epoch": 1.214086601003531, "grad_norm": 0.7056748867034912, "learning_rate": 6.479837603942665e-05, "loss": 0.8554, "step": 6626 }, { "epoch": 1.214272440066902, "grad_norm": 0.7674799561500549, "learning_rate": 6.477100357823583e-05, "loss": 1.0488, "step": 6627 }, { "epoch": 1.2144582791302732, "grad_norm": 0.6128942966461182, "learning_rate": 6.474363413025724e-05, "loss": 0.8124, "step": 6628 }, { "epoch": 1.2146441181936443, "grad_norm": 0.7572517991065979, "learning_rate": 6.47162676978318e-05, "loss": 1.1974, "step": 6629 }, { "epoch": 1.2148299572570154, "grad_norm": 0.7268834114074707, "learning_rate": 6.468890428330026e-05, "loss": 0.9459, "step": 6630 }, { "epoch": 1.2150157963203865, "grad_norm": 0.5437666773796082, "learning_rate": 6.466154388900309e-05, "loss": 0.8954, "step": 6631 }, { "epoch": 1.2152016353837576, "grad_norm": 0.7641748189926147, "learning_rate": 6.46341865172804e-05, "loss": 0.9749, "step": 6632 }, { "epoch": 1.2153874744471287, "grad_norm": 0.6073144674301147, "learning_rate": 6.46068321704722e-05, "loss": 0.9929, "step": 6633 }, { "epoch": 1.2155733135104998, "grad_norm": 0.6687734127044678, "learning_rate": 6.457948085091812e-05, "loss": 1.0394, "step": 6634 }, { "epoch": 1.2157591525738711, "grad_norm": 0.6596730947494507, "learning_rate": 6.45521325609576e-05, "loss": 0.8353, "step": 6635 }, { "epoch": 1.2159449916372422, "grad_norm": 0.6316202282905579, "learning_rate": 6.45247873029298e-05, "loss": 0.8294, "step": 6636 }, { "epoch": 1.2161308307006133, "grad_norm": 0.616620659828186, "learning_rate": 6.449744507917362e-05, "loss": 0.8058, "step": 6637 }, { "epoch": 1.2163166697639844, "grad_norm": 0.7293714880943298, "learning_rate": 6.447010589202769e-05, "loss": 1.0555, "step": 6638 }, { "epoch": 1.2165025088273556, "grad_norm": 0.6899313926696777, "learning_rate": 6.444276974383037e-05, "loss": 0.9136, "step": 6639 }, { "epoch": 1.2166883478907267, "grad_norm": 0.6521621346473694, "learning_rate": 6.441543663691984e-05, "loss": 0.9844, "step": 6640 }, { "epoch": 1.2168741869540978, "grad_norm": 0.6059038639068604, "learning_rate": 6.43881065736339e-05, "loss": 0.7697, "step": 6641 }, { "epoch": 1.2170600260174689, "grad_norm": 0.6877594590187073, "learning_rate": 6.436077955631017e-05, "loss": 0.8564, "step": 6642 }, { "epoch": 1.21724586508084, "grad_norm": 0.7149982452392578, "learning_rate": 6.4333455587286e-05, "loss": 0.9599, "step": 6643 }, { "epoch": 1.217431704144211, "grad_norm": 0.7110626101493835, "learning_rate": 6.430613466889846e-05, "loss": 0.9073, "step": 6644 }, { "epoch": 1.2176175432075822, "grad_norm": 0.7270492315292358, "learning_rate": 6.427881680348437e-05, "loss": 0.8962, "step": 6645 }, { "epoch": 1.2178033822709533, "grad_norm": 0.721595048904419, "learning_rate": 6.425150199338026e-05, "loss": 0.7376, "step": 6646 }, { "epoch": 1.2179892213343244, "grad_norm": 0.6731526255607605, "learning_rate": 6.422419024092248e-05, "loss": 0.8957, "step": 6647 }, { "epoch": 1.2181750603976955, "grad_norm": 0.6578767895698547, "learning_rate": 6.419688154844701e-05, "loss": 1.0118, "step": 6648 }, { "epoch": 1.2183608994610666, "grad_norm": 0.7469388246536255, "learning_rate": 6.416957591828968e-05, "loss": 1.0622, "step": 6649 }, { "epoch": 1.218546738524438, "grad_norm": 0.6205630898475647, "learning_rate": 6.414227335278589e-05, "loss": 0.891, "step": 6650 }, { "epoch": 1.2187325775878088, "grad_norm": 0.810269296169281, "learning_rate": 6.411497385427099e-05, "loss": 1.2256, "step": 6651 }, { "epoch": 1.2189184166511802, "grad_norm": 0.6002628803253174, "learning_rate": 6.408767742507995e-05, "loss": 0.8674, "step": 6652 }, { "epoch": 1.2191042557145513, "grad_norm": 0.6940420269966125, "learning_rate": 6.406038406754744e-05, "loss": 0.9959, "step": 6653 }, { "epoch": 1.2192900947779224, "grad_norm": 0.7242604494094849, "learning_rate": 6.403309378400801e-05, "loss": 0.9368, "step": 6654 }, { "epoch": 1.2194759338412935, "grad_norm": 0.748961865901947, "learning_rate": 6.400580657679576e-05, "loss": 0.9917, "step": 6655 }, { "epoch": 1.2196617729046646, "grad_norm": 0.6754995584487915, "learning_rate": 6.397852244824472e-05, "loss": 0.9709, "step": 6656 }, { "epoch": 1.2198476119680357, "grad_norm": 0.7186061143875122, "learning_rate": 6.395124140068845e-05, "loss": 1.0488, "step": 6657 }, { "epoch": 1.2200334510314068, "grad_norm": 0.7922396063804626, "learning_rate": 6.392396343646042e-05, "loss": 1.3205, "step": 6658 }, { "epoch": 1.220219290094778, "grad_norm": 0.7937091588973999, "learning_rate": 6.38966885578938e-05, "loss": 0.8431, "step": 6659 }, { "epoch": 1.220405129158149, "grad_norm": 0.6483901739120483, "learning_rate": 6.386941676732139e-05, "loss": 0.986, "step": 6660 }, { "epoch": 1.2205909682215201, "grad_norm": 0.6936187744140625, "learning_rate": 6.384214806707591e-05, "loss": 0.7993, "step": 6661 }, { "epoch": 1.2207768072848912, "grad_norm": 0.6259828805923462, "learning_rate": 6.38148824594896e-05, "loss": 0.894, "step": 6662 }, { "epoch": 1.2209626463482623, "grad_norm": 0.6188904047012329, "learning_rate": 6.378761994689467e-05, "loss": 0.8241, "step": 6663 }, { "epoch": 1.2211484854116335, "grad_norm": 0.7406849265098572, "learning_rate": 6.376036053162281e-05, "loss": 0.9231, "step": 6664 }, { "epoch": 1.2213343244750046, "grad_norm": 0.6560678482055664, "learning_rate": 6.373310421600565e-05, "loss": 1.0361, "step": 6665 }, { "epoch": 1.2215201635383757, "grad_norm": 0.7963076233863831, "learning_rate": 6.370585100237447e-05, "loss": 1.0212, "step": 6666 }, { "epoch": 1.221706002601747, "grad_norm": 0.6689628958702087, "learning_rate": 6.367860089306028e-05, "loss": 0.9403, "step": 6667 }, { "epoch": 1.2218918416651179, "grad_norm": 0.7792874574661255, "learning_rate": 6.365135389039392e-05, "loss": 0.9399, "step": 6668 }, { "epoch": 1.2220776807284892, "grad_norm": 0.7756471633911133, "learning_rate": 6.362410999670577e-05, "loss": 0.9696, "step": 6669 }, { "epoch": 1.2222635197918603, "grad_norm": 0.7439038753509521, "learning_rate": 6.359686921432613e-05, "loss": 1.1564, "step": 6670 }, { "epoch": 1.2224493588552314, "grad_norm": 0.6198158264160156, "learning_rate": 6.356963154558491e-05, "loss": 0.9367, "step": 6671 }, { "epoch": 1.2226351979186025, "grad_norm": 0.6812259554862976, "learning_rate": 6.354239699281186e-05, "loss": 0.8498, "step": 6672 }, { "epoch": 1.2228210369819736, "grad_norm": 0.6730685234069824, "learning_rate": 6.351516555833639e-05, "loss": 0.8412, "step": 6673 }, { "epoch": 1.2230068760453447, "grad_norm": 0.7179238200187683, "learning_rate": 6.348793724448765e-05, "loss": 0.9143, "step": 6674 }, { "epoch": 1.2231927151087159, "grad_norm": 0.7099092602729797, "learning_rate": 6.34607120535946e-05, "loss": 1.1273, "step": 6675 }, { "epoch": 1.223378554172087, "grad_norm": 0.8993801474571228, "learning_rate": 6.343348998798577e-05, "loss": 1.2519, "step": 6676 }, { "epoch": 1.223564393235458, "grad_norm": 0.7195106744766235, "learning_rate": 6.340627104998962e-05, "loss": 0.8705, "step": 6677 }, { "epoch": 1.2237502322988292, "grad_norm": 0.6673482060432434, "learning_rate": 6.337905524193415e-05, "loss": 0.9225, "step": 6678 }, { "epoch": 1.2239360713622003, "grad_norm": 0.6686134934425354, "learning_rate": 6.335184256614723e-05, "loss": 0.9436, "step": 6679 }, { "epoch": 1.2241219104255714, "grad_norm": 0.7211525440216064, "learning_rate": 6.332463302495645e-05, "loss": 0.9579, "step": 6680 }, { "epoch": 1.2243077494889425, "grad_norm": 0.6387026309967041, "learning_rate": 6.329742662068905e-05, "loss": 0.5192, "step": 6681 }, { "epoch": 1.2244935885523136, "grad_norm": 0.68216472864151, "learning_rate": 6.327022335567212e-05, "loss": 0.9198, "step": 6682 }, { "epoch": 1.2246794276156847, "grad_norm": 0.6553606390953064, "learning_rate": 6.324302323223232e-05, "loss": 0.95, "step": 6683 }, { "epoch": 1.224865266679056, "grad_norm": 0.6823198795318604, "learning_rate": 6.321582625269624e-05, "loss": 0.9124, "step": 6684 }, { "epoch": 1.2250511057424271, "grad_norm": 0.8079013824462891, "learning_rate": 6.318863241938999e-05, "loss": 1.1514, "step": 6685 }, { "epoch": 1.2252369448057983, "grad_norm": 0.681402862071991, "learning_rate": 6.316144173463958e-05, "loss": 0.9534, "step": 6686 }, { "epoch": 1.2254227838691694, "grad_norm": 0.6464089751243591, "learning_rate": 6.313425420077069e-05, "loss": 0.9807, "step": 6687 }, { "epoch": 1.2256086229325405, "grad_norm": 0.7560293078422546, "learning_rate": 6.31070698201087e-05, "loss": 1.0426, "step": 6688 }, { "epoch": 1.2257944619959116, "grad_norm": 0.6318104267120361, "learning_rate": 6.307988859497878e-05, "loss": 0.8507, "step": 6689 }, { "epoch": 1.2259803010592827, "grad_norm": 0.7459145188331604, "learning_rate": 6.305271052770576e-05, "loss": 1.145, "step": 6690 }, { "epoch": 1.2261661401226538, "grad_norm": 0.7620947957038879, "learning_rate": 6.302553562061431e-05, "loss": 0.863, "step": 6691 }, { "epoch": 1.226351979186025, "grad_norm": 0.8354644775390625, "learning_rate": 6.299836387602867e-05, "loss": 0.9738, "step": 6692 }, { "epoch": 1.226537818249396, "grad_norm": 0.6539764404296875, "learning_rate": 6.297119529627293e-05, "loss": 1.1549, "step": 6693 }, { "epoch": 1.2267236573127671, "grad_norm": 0.7981652617454529, "learning_rate": 6.294402988367094e-05, "loss": 0.9152, "step": 6694 }, { "epoch": 1.2269094963761382, "grad_norm": 0.6764546036720276, "learning_rate": 6.29168676405461e-05, "loss": 0.8726, "step": 6695 }, { "epoch": 1.2270953354395093, "grad_norm": 0.6629418730735779, "learning_rate": 6.288970856922176e-05, "loss": 0.9232, "step": 6696 }, { "epoch": 1.2272811745028804, "grad_norm": 0.8247547149658203, "learning_rate": 6.286255267202081e-05, "loss": 0.8028, "step": 6697 }, { "epoch": 1.2274670135662515, "grad_norm": 0.6946619749069214, "learning_rate": 6.283539995126607e-05, "loss": 0.9486, "step": 6698 }, { "epoch": 1.2276528526296229, "grad_norm": 0.6491488218307495, "learning_rate": 6.280825040927982e-05, "loss": 0.7698, "step": 6699 }, { "epoch": 1.2278386916929938, "grad_norm": 0.7405893206596375, "learning_rate": 6.27811040483843e-05, "loss": 1.0136, "step": 6700 }, { "epoch": 1.228024530756365, "grad_norm": 2.1106338500976562, "learning_rate": 6.275396087090144e-05, "loss": 1.4144, "step": 6701 }, { "epoch": 1.2282103698197362, "grad_norm": 0.6475285887718201, "learning_rate": 6.272682087915276e-05, "loss": 1.0483, "step": 6702 }, { "epoch": 1.2283962088831073, "grad_norm": 0.7684204578399658, "learning_rate": 6.269968407545968e-05, "loss": 1.1889, "step": 6703 }, { "epoch": 1.2285820479464784, "grad_norm": 0.7069278955459595, "learning_rate": 6.267255046214319e-05, "loss": 0.9133, "step": 6704 }, { "epoch": 1.2287678870098495, "grad_norm": 0.6581943035125732, "learning_rate": 6.264542004152421e-05, "loss": 0.8838, "step": 6705 }, { "epoch": 1.2289537260732206, "grad_norm": 0.6479682326316833, "learning_rate": 6.261829281592314e-05, "loss": 0.9119, "step": 6706 }, { "epoch": 1.2291395651365917, "grad_norm": 0.7059997916221619, "learning_rate": 6.259116878766026e-05, "loss": 1.1096, "step": 6707 }, { "epoch": 1.2293254041999628, "grad_norm": 0.7317528128623962, "learning_rate": 6.256404795905561e-05, "loss": 0.8583, "step": 6708 }, { "epoch": 1.229511243263334, "grad_norm": 0.6883766651153564, "learning_rate": 6.253693033242882e-05, "loss": 0.8437, "step": 6709 }, { "epoch": 1.229697082326705, "grad_norm": 0.6823121905326843, "learning_rate": 6.250981591009936e-05, "loss": 1.1293, "step": 6710 }, { "epoch": 1.2298829213900762, "grad_norm": 0.5986813902854919, "learning_rate": 6.248270469438636e-05, "loss": 0.8529, "step": 6711 }, { "epoch": 1.2300687604534473, "grad_norm": 0.7856054902076721, "learning_rate": 6.245559668760874e-05, "loss": 0.9986, "step": 6712 }, { "epoch": 1.2302545995168184, "grad_norm": 0.5989887118339539, "learning_rate": 6.242849189208504e-05, "loss": 1.002, "step": 6713 }, { "epoch": 1.2304404385801895, "grad_norm": 0.6663972735404968, "learning_rate": 6.240139031013363e-05, "loss": 0.9013, "step": 6714 }, { "epoch": 1.2306262776435606, "grad_norm": 0.7706762552261353, "learning_rate": 6.237429194407263e-05, "loss": 1.0902, "step": 6715 }, { "epoch": 1.230812116706932, "grad_norm": 0.9056258797645569, "learning_rate": 6.234719679621969e-05, "loss": 1.3235, "step": 6716 }, { "epoch": 1.2309979557703028, "grad_norm": 0.6869669556617737, "learning_rate": 6.232010486889242e-05, "loss": 1.0833, "step": 6717 }, { "epoch": 1.2311837948336741, "grad_norm": 0.7842996120452881, "learning_rate": 6.2293016164408e-05, "loss": 1.1114, "step": 6718 }, { "epoch": 1.2313696338970452, "grad_norm": 0.6470237970352173, "learning_rate": 6.226593068508344e-05, "loss": 0.9545, "step": 6719 }, { "epoch": 1.2315554729604163, "grad_norm": 0.6029546856880188, "learning_rate": 6.223884843323534e-05, "loss": 1.0115, "step": 6720 }, { "epoch": 1.2317413120237874, "grad_norm": 0.6828630566596985, "learning_rate": 6.221176941118014e-05, "loss": 1.0884, "step": 6721 }, { "epoch": 1.2319271510871586, "grad_norm": 0.7872723937034607, "learning_rate": 6.218469362123403e-05, "loss": 0.9801, "step": 6722 }, { "epoch": 1.2321129901505297, "grad_norm": 0.630489706993103, "learning_rate": 6.215762106571276e-05, "loss": 0.8454, "step": 6723 }, { "epoch": 1.2322988292139008, "grad_norm": 0.7333617806434631, "learning_rate": 6.213055174693196e-05, "loss": 0.825, "step": 6724 }, { "epoch": 1.2324846682772719, "grad_norm": 0.8034706711769104, "learning_rate": 6.21034856672069e-05, "loss": 1.1837, "step": 6725 }, { "epoch": 1.232670507340643, "grad_norm": 0.5724234580993652, "learning_rate": 6.207642282885267e-05, "loss": 1.0768, "step": 6726 }, { "epoch": 1.232856346404014, "grad_norm": 0.6139940619468689, "learning_rate": 6.20493632341839e-05, "loss": 0.8875, "step": 6727 }, { "epoch": 1.2330421854673852, "grad_norm": 0.8388956189155579, "learning_rate": 6.202230688551511e-05, "loss": 0.7057, "step": 6728 }, { "epoch": 1.2332280245307563, "grad_norm": 0.7120481133460999, "learning_rate": 6.199525378516056e-05, "loss": 1.0757, "step": 6729 }, { "epoch": 1.2334138635941274, "grad_norm": 0.6384205222129822, "learning_rate": 6.196820393543404e-05, "loss": 0.9849, "step": 6730 }, { "epoch": 1.2334138635941274, "eval_loss": 1.0142569541931152, "eval_runtime": 23.213, "eval_samples_per_second": 47.043, "eval_steps_per_second": 23.521, "step": 6730 }, { "epoch": 1.2335997026574985, "grad_norm": 0.6989933252334595, "learning_rate": 6.194115733864924e-05, "loss": 1.1173, "step": 6731 }, { "epoch": 1.2337855417208696, "grad_norm": 0.7638552784919739, "learning_rate": 6.191411399711949e-05, "loss": 0.9186, "step": 6732 }, { "epoch": 1.233971380784241, "grad_norm": 0.608952522277832, "learning_rate": 6.188707391315793e-05, "loss": 1.0625, "step": 6733 }, { "epoch": 1.234157219847612, "grad_norm": 0.7631494998931885, "learning_rate": 6.186003708907729e-05, "loss": 0.9277, "step": 6734 }, { "epoch": 1.2343430589109832, "grad_norm": 0.6393689513206482, "learning_rate": 6.183300352719008e-05, "loss": 0.8245, "step": 6735 }, { "epoch": 1.2345288979743543, "grad_norm": 0.6334121823310852, "learning_rate": 6.180597322980863e-05, "loss": 0.8125, "step": 6736 }, { "epoch": 1.2347147370377254, "grad_norm": 0.6829781532287598, "learning_rate": 6.177894619924479e-05, "loss": 0.9776, "step": 6737 }, { "epoch": 1.2349005761010965, "grad_norm": 0.6575719118118286, "learning_rate": 6.175192243781031e-05, "loss": 0.9262, "step": 6738 }, { "epoch": 1.2350864151644676, "grad_norm": 0.7077308893203735, "learning_rate": 6.172490194781657e-05, "loss": 0.955, "step": 6739 }, { "epoch": 1.2352722542278387, "grad_norm": 0.6063582301139832, "learning_rate": 6.169788473157468e-05, "loss": 0.7382, "step": 6740 }, { "epoch": 1.2354580932912098, "grad_norm": 0.651313066482544, "learning_rate": 6.167087079139549e-05, "loss": 0.8344, "step": 6741 }, { "epoch": 1.235643932354581, "grad_norm": 0.5918038487434387, "learning_rate": 6.164386012958953e-05, "loss": 1.015, "step": 6742 }, { "epoch": 1.235829771417952, "grad_norm": 0.6994732618331909, "learning_rate": 6.161685274846718e-05, "loss": 0.9654, "step": 6743 }, { "epoch": 1.2360156104813231, "grad_norm": 0.6554886698722839, "learning_rate": 6.158984865033833e-05, "loss": 0.866, "step": 6744 }, { "epoch": 1.2362014495446942, "grad_norm": 0.7816451787948608, "learning_rate": 6.156284783751277e-05, "loss": 0.7126, "step": 6745 }, { "epoch": 1.2363872886080653, "grad_norm": 0.6727638244628906, "learning_rate": 6.153585031229987e-05, "loss": 0.9787, "step": 6746 }, { "epoch": 1.2365731276714365, "grad_norm": 0.7645466923713684, "learning_rate": 6.150885607700884e-05, "loss": 1.1703, "step": 6747 }, { "epoch": 1.2367589667348076, "grad_norm": 0.8393552303314209, "learning_rate": 6.148186513394852e-05, "loss": 0.7639, "step": 6748 }, { "epoch": 1.2369448057981787, "grad_norm": 0.6084762215614319, "learning_rate": 6.145487748542753e-05, "loss": 0.7537, "step": 6749 }, { "epoch": 1.23713064486155, "grad_norm": 0.7074626684188843, "learning_rate": 6.142789313375422e-05, "loss": 0.8683, "step": 6750 }, { "epoch": 1.237316483924921, "grad_norm": 0.8581859469413757, "learning_rate": 6.140091208123653e-05, "loss": 0.857, "step": 6751 }, { "epoch": 1.2375023229882922, "grad_norm": 0.6810663938522339, "learning_rate": 6.137393433018228e-05, "loss": 1.166, "step": 6752 }, { "epoch": 1.2376881620516633, "grad_norm": 0.8637286424636841, "learning_rate": 6.134695988289889e-05, "loss": 0.9974, "step": 6753 }, { "epoch": 1.2378740011150344, "grad_norm": 0.6321572661399841, "learning_rate": 6.131998874169358e-05, "loss": 0.6844, "step": 6754 }, { "epoch": 1.2380598401784055, "grad_norm": 0.7431946396827698, "learning_rate": 6.12930209088732e-05, "loss": 0.8477, "step": 6755 }, { "epoch": 1.2382456792417766, "grad_norm": 0.7147607207298279, "learning_rate": 6.12660563867444e-05, "loss": 0.9962, "step": 6756 }, { "epoch": 1.2384315183051477, "grad_norm": 0.7256709337234497, "learning_rate": 6.123909517761355e-05, "loss": 1.1241, "step": 6757 }, { "epoch": 1.2386173573685189, "grad_norm": 0.8592318892478943, "learning_rate": 6.121213728378663e-05, "loss": 0.9181, "step": 6758 }, { "epoch": 1.23880319643189, "grad_norm": 0.693541407585144, "learning_rate": 6.118518270756948e-05, "loss": 0.9073, "step": 6759 }, { "epoch": 1.238989035495261, "grad_norm": 0.6646521091461182, "learning_rate": 6.11582314512675e-05, "loss": 0.8998, "step": 6760 }, { "epoch": 1.2391748745586322, "grad_norm": 0.6509725451469421, "learning_rate": 6.113128351718596e-05, "loss": 0.8526, "step": 6761 }, { "epoch": 1.2393607136220033, "grad_norm": 0.7442356944084167, "learning_rate": 6.11043389076297e-05, "loss": 0.9261, "step": 6762 }, { "epoch": 1.2395465526853744, "grad_norm": 0.693193793296814, "learning_rate": 6.107739762490343e-05, "loss": 1.0336, "step": 6763 }, { "epoch": 1.2397323917487455, "grad_norm": 0.7592474222183228, "learning_rate": 6.105045967131149e-05, "loss": 1.2086, "step": 6764 }, { "epoch": 1.2399182308121168, "grad_norm": 0.7396900653839111, "learning_rate": 6.102352504915787e-05, "loss": 1.1158, "step": 6765 }, { "epoch": 1.2401040698754877, "grad_norm": 0.7751068472862244, "learning_rate": 6.0996593760746445e-05, "loss": 0.9525, "step": 6766 }, { "epoch": 1.240289908938859, "grad_norm": 0.5859838724136353, "learning_rate": 6.0969665808380616e-05, "loss": 0.9032, "step": 6767 }, { "epoch": 1.2404757480022301, "grad_norm": 0.663303017616272, "learning_rate": 6.094274119436363e-05, "loss": 0.8507, "step": 6768 }, { "epoch": 1.2406615870656013, "grad_norm": 0.6255428791046143, "learning_rate": 6.0915819920998394e-05, "loss": 1.1066, "step": 6769 }, { "epoch": 1.2408474261289724, "grad_norm": 0.6422243714332581, "learning_rate": 6.0888901990587544e-05, "loss": 0.946, "step": 6770 }, { "epoch": 1.2410332651923435, "grad_norm": 0.6783014535903931, "learning_rate": 6.086198740543347e-05, "loss": 0.8705, "step": 6771 }, { "epoch": 1.2412191042557146, "grad_norm": 0.5838634967803955, "learning_rate": 6.083507616783819e-05, "loss": 0.9581, "step": 6772 }, { "epoch": 1.2414049433190857, "grad_norm": 0.9073149561882019, "learning_rate": 6.0808168280103513e-05, "loss": 0.7539, "step": 6773 }, { "epoch": 1.2415907823824568, "grad_norm": 0.6188364624977112, "learning_rate": 6.078126374453086e-05, "loss": 0.9521, "step": 6774 }, { "epoch": 1.241776621445828, "grad_norm": 0.6189103722572327, "learning_rate": 6.075436256342152e-05, "loss": 0.6746, "step": 6775 }, { "epoch": 1.241962460509199, "grad_norm": 0.6246206164360046, "learning_rate": 6.0727464739076334e-05, "loss": 0.8267, "step": 6776 }, { "epoch": 1.2421482995725701, "grad_norm": 0.6606687903404236, "learning_rate": 6.0700570273795984e-05, "loss": 0.7699, "step": 6777 }, { "epoch": 1.2423341386359412, "grad_norm": 0.7769799828529358, "learning_rate": 6.0673679169880824e-05, "loss": 1.0587, "step": 6778 }, { "epoch": 1.2425199776993123, "grad_norm": 0.5887297987937927, "learning_rate": 6.0646791429630844e-05, "loss": 0.8285, "step": 6779 }, { "epoch": 1.2427058167626834, "grad_norm": 2.852731704711914, "learning_rate": 6.061990705534588e-05, "loss": 1.3253, "step": 6780 }, { "epoch": 1.2428916558260545, "grad_norm": 0.8008617162704468, "learning_rate": 6.059302604932535e-05, "loss": 0.9496, "step": 6781 }, { "epoch": 1.2430774948894259, "grad_norm": 0.8640806078910828, "learning_rate": 6.05661484138685e-05, "loss": 1.0751, "step": 6782 }, { "epoch": 1.2432633339527968, "grad_norm": 0.6174668073654175, "learning_rate": 6.053927415127416e-05, "loss": 0.9893, "step": 6783 }, { "epoch": 1.243449173016168, "grad_norm": 0.7041611671447754, "learning_rate": 6.051240326384102e-05, "loss": 0.8107, "step": 6784 }, { "epoch": 1.2436350120795392, "grad_norm": 0.6654350161552429, "learning_rate": 6.0485535753867364e-05, "loss": 0.9742, "step": 6785 }, { "epoch": 1.2438208511429103, "grad_norm": 0.7300111651420593, "learning_rate": 6.045867162365122e-05, "loss": 0.7188, "step": 6786 }, { "epoch": 1.2440066902062814, "grad_norm": 0.7048124074935913, "learning_rate": 6.043181087549039e-05, "loss": 1.0385, "step": 6787 }, { "epoch": 1.2441925292696525, "grad_norm": 0.617351770401001, "learning_rate": 6.040495351168227e-05, "loss": 0.9001, "step": 6788 }, { "epoch": 1.2443783683330236, "grad_norm": 0.6402180194854736, "learning_rate": 6.037809953452406e-05, "loss": 0.7944, "step": 6789 }, { "epoch": 1.2445642073963947, "grad_norm": 0.8734924793243408, "learning_rate": 6.035124894631263e-05, "loss": 0.9348, "step": 6790 }, { "epoch": 1.2447500464597658, "grad_norm": 0.5609917640686035, "learning_rate": 6.0324401749344553e-05, "loss": 0.9492, "step": 6791 }, { "epoch": 1.244935885523137, "grad_norm": 0.6634128093719482, "learning_rate": 6.029755794591616e-05, "loss": 0.8949, "step": 6792 }, { "epoch": 1.245121724586508, "grad_norm": 1.507298469543457, "learning_rate": 6.0270717538323426e-05, "loss": 1.4974, "step": 6793 }, { "epoch": 1.2453075636498792, "grad_norm": 0.6983362436294556, "learning_rate": 6.024388052886214e-05, "loss": 0.8417, "step": 6794 }, { "epoch": 1.2454934027132503, "grad_norm": 0.6252066493034363, "learning_rate": 6.021704691982761e-05, "loss": 1.0541, "step": 6795 }, { "epoch": 1.2456792417766214, "grad_norm": 0.5981398224830627, "learning_rate": 6.01902167135151e-05, "loss": 0.6388, "step": 6796 }, { "epoch": 1.2458650808399925, "grad_norm": 0.6744561791419983, "learning_rate": 6.0163389912219346e-05, "loss": 1.0082, "step": 6797 }, { "epoch": 1.2460509199033636, "grad_norm": 0.7297447919845581, "learning_rate": 6.013656651823496e-05, "loss": 0.8587, "step": 6798 }, { "epoch": 1.246236758966735, "grad_norm": 0.7522355914115906, "learning_rate": 6.0109746533856214e-05, "loss": 1.1065, "step": 6799 }, { "epoch": 1.246422598030106, "grad_norm": 0.7894991636276245, "learning_rate": 6.0082929961377035e-05, "loss": 1.1286, "step": 6800 }, { "epoch": 1.2466084370934771, "grad_norm": 0.6512338519096375, "learning_rate": 6.0056116803091175e-05, "loss": 0.9291, "step": 6801 }, { "epoch": 1.2467942761568482, "grad_norm": 0.5885134935379028, "learning_rate": 6.002930706129193e-05, "loss": 1.0359, "step": 6802 }, { "epoch": 1.2469801152202193, "grad_norm": 0.6664581894874573, "learning_rate": 6.0002500738272474e-05, "loss": 0.8518, "step": 6803 }, { "epoch": 1.2471659542835904, "grad_norm": 0.6249924302101135, "learning_rate": 5.997569783632555e-05, "loss": 0.9104, "step": 6804 }, { "epoch": 1.2473517933469616, "grad_norm": 0.706665575504303, "learning_rate": 5.994889835774369e-05, "loss": 1.1926, "step": 6805 }, { "epoch": 1.2475376324103327, "grad_norm": 0.5533158779144287, "learning_rate": 5.992210230481913e-05, "loss": 0.7645, "step": 6806 }, { "epoch": 1.2477234714737038, "grad_norm": 0.6172327399253845, "learning_rate": 5.989530967984376e-05, "loss": 0.9258, "step": 6807 }, { "epoch": 1.2479093105370749, "grad_norm": 0.6446318626403809, "learning_rate": 5.9868520485109266e-05, "loss": 0.8431, "step": 6808 }, { "epoch": 1.248095149600446, "grad_norm": 0.7662816047668457, "learning_rate": 5.984173472290693e-05, "loss": 1.0121, "step": 6809 }, { "epoch": 1.248280988663817, "grad_norm": 0.9095577001571655, "learning_rate": 5.981495239552783e-05, "loss": 1.0564, "step": 6810 }, { "epoch": 1.2484668277271882, "grad_norm": 0.6445630788803101, "learning_rate": 5.978817350526268e-05, "loss": 0.8561, "step": 6811 }, { "epoch": 1.2486526667905593, "grad_norm": 0.7017901539802551, "learning_rate": 5.976139805440195e-05, "loss": 0.9249, "step": 6812 }, { "epoch": 1.2488385058539304, "grad_norm": 0.7360055446624756, "learning_rate": 5.973462604523584e-05, "loss": 1.0012, "step": 6813 }, { "epoch": 1.2490243449173015, "grad_norm": 0.6232417821884155, "learning_rate": 5.970785748005416e-05, "loss": 0.9935, "step": 6814 }, { "epoch": 1.2492101839806726, "grad_norm": 0.6803663372993469, "learning_rate": 5.968109236114656e-05, "loss": 0.7987, "step": 6815 }, { "epoch": 1.249396023044044, "grad_norm": 0.6657290458679199, "learning_rate": 5.965433069080224e-05, "loss": 1.0063, "step": 6816 }, { "epoch": 1.249581862107415, "grad_norm": 0.6280941367149353, "learning_rate": 5.962757247131024e-05, "loss": 0.9978, "step": 6817 }, { "epoch": 1.2497677011707862, "grad_norm": 0.638014554977417, "learning_rate": 5.96008177049592e-05, "loss": 0.8691, "step": 6818 }, { "epoch": 1.2499535402341573, "grad_norm": 0.7058728933334351, "learning_rate": 5.957406639403751e-05, "loss": 0.7765, "step": 6819 }, { "epoch": 1.2501393792975284, "grad_norm": 0.7721114158630371, "learning_rate": 5.954731854083333e-05, "loss": 0.782, "step": 6820 }, { "epoch": 1.2503252183608995, "grad_norm": 0.6177173852920532, "learning_rate": 5.95205741476344e-05, "loss": 0.9974, "step": 6821 }, { "epoch": 1.2505110574242706, "grad_norm": 0.6779488921165466, "learning_rate": 5.94938332167283e-05, "loss": 0.8565, "step": 6822 }, { "epoch": 1.2506968964876417, "grad_norm": 0.7669054865837097, "learning_rate": 5.946709575040213e-05, "loss": 1.0469, "step": 6823 }, { "epoch": 1.2508827355510128, "grad_norm": 0.8133144974708557, "learning_rate": 5.944036175094292e-05, "loss": 0.5655, "step": 6824 }, { "epoch": 1.251068574614384, "grad_norm": 0.6289818286895752, "learning_rate": 5.941363122063719e-05, "loss": 0.9136, "step": 6825 }, { "epoch": 1.251254413677755, "grad_norm": 0.660622239112854, "learning_rate": 5.9386904161771295e-05, "loss": 0.9953, "step": 6826 }, { "epoch": 1.2514402527411261, "grad_norm": 0.6116739511489868, "learning_rate": 5.9360180576631285e-05, "loss": 0.7881, "step": 6827 }, { "epoch": 1.2516260918044972, "grad_norm": 0.7629313468933105, "learning_rate": 5.933346046750284e-05, "loss": 0.9287, "step": 6828 }, { "epoch": 1.2518119308678683, "grad_norm": 0.7508540749549866, "learning_rate": 5.930674383667146e-05, "loss": 0.8278, "step": 6829 }, { "epoch": 1.2519977699312395, "grad_norm": 0.6341012120246887, "learning_rate": 5.928003068642218e-05, "loss": 0.9499, "step": 6830 }, { "epoch": 1.2521836089946108, "grad_norm": 0.8538436889648438, "learning_rate": 5.925332101903994e-05, "loss": 1.1914, "step": 6831 }, { "epoch": 1.2523694480579817, "grad_norm": 0.7098791003227234, "learning_rate": 5.922661483680917e-05, "loss": 0.8967, "step": 6832 }, { "epoch": 1.252555287121353, "grad_norm": 0.7230139374732971, "learning_rate": 5.919991214201415e-05, "loss": 1.1333, "step": 6833 }, { "epoch": 1.252741126184724, "grad_norm": 0.8208269476890564, "learning_rate": 5.917321293693885e-05, "loss": 0.9973, "step": 6834 }, { "epoch": 1.2529269652480952, "grad_norm": 0.6810266375541687, "learning_rate": 5.9146517223866885e-05, "loss": 0.8249, "step": 6835 }, { "epoch": 1.2531128043114663, "grad_norm": 0.6982502937316895, "learning_rate": 5.91198250050816e-05, "loss": 0.8615, "step": 6836 }, { "epoch": 1.2532986433748374, "grad_norm": 0.8237326741218567, "learning_rate": 5.909313628286601e-05, "loss": 1.2216, "step": 6837 }, { "epoch": 1.2534844824382085, "grad_norm": 0.7369891405105591, "learning_rate": 5.906645105950293e-05, "loss": 1.0332, "step": 6838 }, { "epoch": 1.2536703215015796, "grad_norm": 0.8150144815444946, "learning_rate": 5.903976933727473e-05, "loss": 0.9814, "step": 6839 }, { "epoch": 1.2538561605649508, "grad_norm": 0.6736454963684082, "learning_rate": 5.901309111846356e-05, "loss": 0.8444, "step": 6840 }, { "epoch": 1.2540419996283219, "grad_norm": 0.8185814023017883, "learning_rate": 5.898641640535135e-05, "loss": 0.9539, "step": 6841 }, { "epoch": 1.254227838691693, "grad_norm": 0.6659137606620789, "learning_rate": 5.895974520021954e-05, "loss": 0.8065, "step": 6842 }, { "epoch": 1.254413677755064, "grad_norm": 0.6813466548919678, "learning_rate": 5.893307750534944e-05, "loss": 0.8804, "step": 6843 }, { "epoch": 1.2545995168184352, "grad_norm": 0.7067291140556335, "learning_rate": 5.890641332302198e-05, "loss": 1.0552, "step": 6844 }, { "epoch": 1.2547853558818063, "grad_norm": 0.7727149128913879, "learning_rate": 5.8879752655517775e-05, "loss": 0.909, "step": 6845 }, { "epoch": 1.2549711949451776, "grad_norm": 0.7835125923156738, "learning_rate": 5.885309550511726e-05, "loss": 0.8913, "step": 6846 }, { "epoch": 1.2551570340085485, "grad_norm": 0.6659790873527527, "learning_rate": 5.8826441874100355e-05, "loss": 0.9017, "step": 6847 }, { "epoch": 1.2553428730719198, "grad_norm": 0.9084991216659546, "learning_rate": 5.879979176474692e-05, "loss": 0.8669, "step": 6848 }, { "epoch": 1.2555287121352907, "grad_norm": 0.6898093819618225, "learning_rate": 5.87731451793363e-05, "loss": 0.8462, "step": 6849 }, { "epoch": 1.255714551198662, "grad_norm": 0.6940835118293762, "learning_rate": 5.8746502120147696e-05, "loss": 0.8334, "step": 6850 }, { "epoch": 1.2559003902620332, "grad_norm": 0.7318105101585388, "learning_rate": 5.8719862589459894e-05, "loss": 0.9161, "step": 6851 }, { "epoch": 1.2560862293254043, "grad_norm": 0.8148001432418823, "learning_rate": 5.869322658955147e-05, "loss": 1.1776, "step": 6852 }, { "epoch": 1.2562720683887754, "grad_norm": 0.6155508756637573, "learning_rate": 5.86665941227007e-05, "loss": 1.0434, "step": 6853 }, { "epoch": 1.2564579074521465, "grad_norm": 0.6666076183319092, "learning_rate": 5.863996519118542e-05, "loss": 0.8688, "step": 6854 }, { "epoch": 1.2566437465155176, "grad_norm": 0.8823888897895813, "learning_rate": 5.861333979728334e-05, "loss": 1.1275, "step": 6855 }, { "epoch": 1.2568295855788887, "grad_norm": 0.7270902395248413, "learning_rate": 5.858671794327173e-05, "loss": 0.9495, "step": 6856 }, { "epoch": 1.2570154246422598, "grad_norm": 0.7535642385482788, "learning_rate": 5.8560099631427655e-05, "loss": 1.0634, "step": 6857 }, { "epoch": 1.257201263705631, "grad_norm": 0.8336496949195862, "learning_rate": 5.85334848640278e-05, "loss": 1.0138, "step": 6858 }, { "epoch": 1.257387102769002, "grad_norm": 0.6126893162727356, "learning_rate": 5.85068736433486e-05, "loss": 0.778, "step": 6859 }, { "epoch": 1.2575729418323731, "grad_norm": 0.6397730112075806, "learning_rate": 5.848026597166622e-05, "loss": 0.8947, "step": 6860 }, { "epoch": 1.2577587808957442, "grad_norm": 0.7330944538116455, "learning_rate": 5.8453661851256395e-05, "loss": 1.0755, "step": 6861 }, { "epoch": 1.2579446199591153, "grad_norm": 0.824577808380127, "learning_rate": 5.84270612843947e-05, "loss": 0.9779, "step": 6862 }, { "epoch": 1.2581304590224867, "grad_norm": 0.6477912068367004, "learning_rate": 5.840046427335626e-05, "loss": 0.7855, "step": 6863 }, { "epoch": 1.2583162980858575, "grad_norm": 0.6228084564208984, "learning_rate": 5.837387082041605e-05, "loss": 0.9414, "step": 6864 }, { "epoch": 1.2585021371492289, "grad_norm": 0.6834182143211365, "learning_rate": 5.83472809278486e-05, "loss": 1.0634, "step": 6865 }, { "epoch": 1.2586879762125998, "grad_norm": 0.7031686305999756, "learning_rate": 5.832069459792825e-05, "loss": 0.9155, "step": 6866 }, { "epoch": 1.258873815275971, "grad_norm": 0.6353990435600281, "learning_rate": 5.829411183292901e-05, "loss": 0.8913, "step": 6867 }, { "epoch": 1.2590596543393422, "grad_norm": 0.6703015565872192, "learning_rate": 5.826753263512448e-05, "loss": 0.8506, "step": 6868 }, { "epoch": 1.2592454934027133, "grad_norm": 0.6896878480911255, "learning_rate": 5.824095700678811e-05, "loss": 1.0111, "step": 6869 }, { "epoch": 1.2594313324660844, "grad_norm": 0.7479128241539001, "learning_rate": 5.821438495019291e-05, "loss": 1.0428, "step": 6870 }, { "epoch": 1.2596171715294555, "grad_norm": 0.6056257486343384, "learning_rate": 5.818781646761171e-05, "loss": 0.7879, "step": 6871 }, { "epoch": 1.2598030105928266, "grad_norm": 0.5943814516067505, "learning_rate": 5.816125156131691e-05, "loss": 0.9069, "step": 6872 }, { "epoch": 1.2599888496561977, "grad_norm": 0.7249520421028137, "learning_rate": 5.813469023358067e-05, "loss": 0.7768, "step": 6873 }, { "epoch": 1.2601746887195688, "grad_norm": 0.764151394367218, "learning_rate": 5.8108132486674904e-05, "loss": 0.9072, "step": 6874 }, { "epoch": 1.26036052778294, "grad_norm": 0.671393632888794, "learning_rate": 5.8081578322871066e-05, "loss": 1.0368, "step": 6875 }, { "epoch": 1.260546366846311, "grad_norm": 0.8536176085472107, "learning_rate": 5.805502774444045e-05, "loss": 1.0285, "step": 6876 }, { "epoch": 1.2607322059096822, "grad_norm": 0.6377580761909485, "learning_rate": 5.802848075365395e-05, "loss": 0.8253, "step": 6877 }, { "epoch": 1.2609180449730533, "grad_norm": 0.688213586807251, "learning_rate": 5.8001937352782235e-05, "loss": 1.047, "step": 6878 }, { "epoch": 1.2611038840364244, "grad_norm": 0.6255641579627991, "learning_rate": 5.7975397544095556e-05, "loss": 0.9427, "step": 6879 }, { "epoch": 1.2612897230997957, "grad_norm": 0.6308231353759766, "learning_rate": 5.7948861329863954e-05, "loss": 1.0536, "step": 6880 }, { "epoch": 1.2614755621631666, "grad_norm": 0.67474365234375, "learning_rate": 5.7922328712357164e-05, "loss": 0.9614, "step": 6881 }, { "epoch": 1.261661401226538, "grad_norm": 0.6463435888290405, "learning_rate": 5.789579969384451e-05, "loss": 0.8507, "step": 6882 }, { "epoch": 1.2618472402899088, "grad_norm": 0.832118570804596, "learning_rate": 5.786927427659514e-05, "loss": 1.0154, "step": 6883 }, { "epoch": 1.2620330793532801, "grad_norm": 0.658928394317627, "learning_rate": 5.784275246287776e-05, "loss": 0.7714, "step": 6884 }, { "epoch": 1.2622189184166512, "grad_norm": 0.7069569826126099, "learning_rate": 5.781623425496091e-05, "loss": 1.034, "step": 6885 }, { "epoch": 1.2624047574800223, "grad_norm": 0.7191367745399475, "learning_rate": 5.778971965511268e-05, "loss": 0.9983, "step": 6886 }, { "epoch": 1.2625905965433935, "grad_norm": 0.9764262437820435, "learning_rate": 5.7763208665600955e-05, "loss": 0.9769, "step": 6887 }, { "epoch": 1.2627764356067646, "grad_norm": 0.6354040503501892, "learning_rate": 5.773670128869332e-05, "loss": 0.824, "step": 6888 }, { "epoch": 1.2629622746701357, "grad_norm": 0.6636117100715637, "learning_rate": 5.771019752665692e-05, "loss": 1.008, "step": 6889 }, { "epoch": 1.2631481137335068, "grad_norm": 0.6330132484436035, "learning_rate": 5.768369738175874e-05, "loss": 0.9138, "step": 6890 }, { "epoch": 1.2633339527968779, "grad_norm": 0.6355640888214111, "learning_rate": 5.765720085626537e-05, "loss": 1.1154, "step": 6891 }, { "epoch": 1.263519791860249, "grad_norm": 0.6977789402008057, "learning_rate": 5.7630707952443165e-05, "loss": 0.9318, "step": 6892 }, { "epoch": 1.26370563092362, "grad_norm": 0.6667346954345703, "learning_rate": 5.7604218672558054e-05, "loss": 0.9054, "step": 6893 }, { "epoch": 1.2638914699869912, "grad_norm": 0.7689474821090698, "learning_rate": 5.757773301887572e-05, "loss": 0.9534, "step": 6894 }, { "epoch": 1.2640773090503623, "grad_norm": 0.7714675068855286, "learning_rate": 5.755125099366163e-05, "loss": 1.107, "step": 6895 }, { "epoch": 1.2642631481137334, "grad_norm": 0.613943338394165, "learning_rate": 5.7524772599180745e-05, "loss": 0.7832, "step": 6896 }, { "epoch": 1.2644489871771047, "grad_norm": 0.6987224817276001, "learning_rate": 5.74982978376979e-05, "loss": 0.9577, "step": 6897 }, { "epoch": 1.2646348262404756, "grad_norm": 0.7213063836097717, "learning_rate": 5.747182671147746e-05, "loss": 1.0495, "step": 6898 }, { "epoch": 1.264820665303847, "grad_norm": 0.7993615865707397, "learning_rate": 5.7445359222783636e-05, "loss": 0.912, "step": 6899 }, { "epoch": 1.265006504367218, "grad_norm": 0.6639682054519653, "learning_rate": 5.7418895373880166e-05, "loss": 0.6514, "step": 6900 }, { "epoch": 1.2651923434305892, "grad_norm": 0.643459677696228, "learning_rate": 5.739243516703061e-05, "loss": 0.7299, "step": 6901 }, { "epoch": 1.2653781824939603, "grad_norm": 0.7396531105041504, "learning_rate": 5.7365978604498194e-05, "loss": 0.9067, "step": 6902 }, { "epoch": 1.2655640215573314, "grad_norm": 0.6802812814712524, "learning_rate": 5.733952568854575e-05, "loss": 1.0299, "step": 6903 }, { "epoch": 1.2657498606207025, "grad_norm": 0.6861310601234436, "learning_rate": 5.731307642143585e-05, "loss": 1.0433, "step": 6904 }, { "epoch": 1.2659356996840736, "grad_norm": 0.640153169631958, "learning_rate": 5.72866308054308e-05, "loss": 0.8867, "step": 6905 }, { "epoch": 1.2661215387474447, "grad_norm": 0.6789703965187073, "learning_rate": 5.726018884279256e-05, "loss": 1.0454, "step": 6906 }, { "epoch": 1.2663073778108158, "grad_norm": 0.764994204044342, "learning_rate": 5.723375053578271e-05, "loss": 0.8304, "step": 6907 }, { "epoch": 1.266493216874187, "grad_norm": 0.5752626657485962, "learning_rate": 5.7207315886662615e-05, "loss": 0.6994, "step": 6908 }, { "epoch": 1.266679055937558, "grad_norm": 0.5792833566665649, "learning_rate": 5.718088489769331e-05, "loss": 0.856, "step": 6909 }, { "epoch": 1.2668648950009291, "grad_norm": 0.6783587336540222, "learning_rate": 5.715445757113542e-05, "loss": 0.9642, "step": 6910 }, { "epoch": 1.2670507340643002, "grad_norm": 0.7789669036865234, "learning_rate": 5.712803390924941e-05, "loss": 0.9248, "step": 6911 }, { "epoch": 1.2672365731276716, "grad_norm": 0.6985512375831604, "learning_rate": 5.7101613914295296e-05, "loss": 1.1153, "step": 6912 }, { "epoch": 1.2674224121910425, "grad_norm": 0.7611011862754822, "learning_rate": 5.707519758853288e-05, "loss": 0.915, "step": 6913 }, { "epoch": 1.2676082512544138, "grad_norm": 0.6558472514152527, "learning_rate": 5.704878493422156e-05, "loss": 0.7643, "step": 6914 }, { "epoch": 1.2677940903177847, "grad_norm": 0.6821544766426086, "learning_rate": 5.70223759536205e-05, "loss": 1.0508, "step": 6915 }, { "epoch": 1.267979929381156, "grad_norm": 0.7058600783348083, "learning_rate": 5.699597064898854e-05, "loss": 0.9486, "step": 6916 }, { "epoch": 1.268165768444527, "grad_norm": 0.6448752880096436, "learning_rate": 5.696956902258413e-05, "loss": 0.7647, "step": 6917 }, { "epoch": 1.2683516075078982, "grad_norm": 0.7937239408493042, "learning_rate": 5.694317107666549e-05, "loss": 0.8255, "step": 6918 }, { "epoch": 1.2685374465712693, "grad_norm": 0.6356651186943054, "learning_rate": 5.691677681349048e-05, "loss": 0.8717, "step": 6919 }, { "epoch": 1.2687232856346404, "grad_norm": 0.6263054013252258, "learning_rate": 5.689038623531664e-05, "loss": 0.8607, "step": 6920 }, { "epoch": 1.2689091246980115, "grad_norm": 0.6370097994804382, "learning_rate": 5.686399934440125e-05, "loss": 0.922, "step": 6921 }, { "epoch": 1.2690949637613826, "grad_norm": 0.5742283463478088, "learning_rate": 5.683761614300123e-05, "loss": 0.7138, "step": 6922 }, { "epoch": 1.2692808028247538, "grad_norm": 0.673902690410614, "learning_rate": 5.6811236633373224e-05, "loss": 1.0157, "step": 6923 }, { "epoch": 1.2694666418881249, "grad_norm": 0.7027066349983215, "learning_rate": 5.678486081777343e-05, "loss": 0.4939, "step": 6924 }, { "epoch": 1.269652480951496, "grad_norm": 0.6953238844871521, "learning_rate": 5.675848869845796e-05, "loss": 0.9613, "step": 6925 }, { "epoch": 1.269838320014867, "grad_norm": 0.6747790575027466, "learning_rate": 5.673212027768236e-05, "loss": 0.8952, "step": 6926 }, { "epoch": 1.2700241590782382, "grad_norm": 0.6629246473312378, "learning_rate": 5.6705755557702056e-05, "loss": 0.7611, "step": 6927 }, { "epoch": 1.2702099981416093, "grad_norm": 0.644935667514801, "learning_rate": 5.6679394540772026e-05, "loss": 1.0273, "step": 6928 }, { "epoch": 1.2703958372049806, "grad_norm": 0.6592975854873657, "learning_rate": 5.6653037229147e-05, "loss": 1.0137, "step": 6929 }, { "epoch": 1.2705816762683515, "grad_norm": 0.749384880065918, "learning_rate": 5.662668362508143e-05, "loss": 0.9143, "step": 6930 }, { "epoch": 1.2707675153317228, "grad_norm": 0.8060128092765808, "learning_rate": 5.6600333730829315e-05, "loss": 0.9492, "step": 6931 }, { "epoch": 1.2709533543950937, "grad_norm": 0.7280310392379761, "learning_rate": 5.657398754864448e-05, "loss": 0.9502, "step": 6932 }, { "epoch": 1.271139193458465, "grad_norm": 0.7918939590454102, "learning_rate": 5.654764508078032e-05, "loss": 0.8695, "step": 6933 }, { "epoch": 1.2713250325218362, "grad_norm": 0.5838748216629028, "learning_rate": 5.6521306329489976e-05, "loss": 0.7413, "step": 6934 }, { "epoch": 1.2715108715852073, "grad_norm": 0.7349932193756104, "learning_rate": 5.649497129702628e-05, "loss": 1.0556, "step": 6935 }, { "epoch": 1.2716967106485784, "grad_norm": 0.6360459923744202, "learning_rate": 5.6468639985641714e-05, "loss": 0.8109, "step": 6936 }, { "epoch": 1.2718825497119495, "grad_norm": 0.6897345185279846, "learning_rate": 5.64423123975885e-05, "loss": 0.8486, "step": 6937 }, { "epoch": 1.2720683887753206, "grad_norm": 1.3575351238250732, "learning_rate": 5.64159885351184e-05, "loss": 1.4462, "step": 6938 }, { "epoch": 1.2722542278386917, "grad_norm": 0.7821032404899597, "learning_rate": 5.638966840048303e-05, "loss": 0.878, "step": 6939 }, { "epoch": 1.2724400669020628, "grad_norm": 0.6788009405136108, "learning_rate": 5.6363351995933556e-05, "loss": 0.863, "step": 6940 }, { "epoch": 1.272625905965434, "grad_norm": 0.6580929756164551, "learning_rate": 5.633703932372092e-05, "loss": 1.1638, "step": 6941 }, { "epoch": 1.272811745028805, "grad_norm": 0.7011153101921082, "learning_rate": 5.631073038609565e-05, "loss": 0.958, "step": 6942 }, { "epoch": 1.2729975840921761, "grad_norm": 0.6621928215026855, "learning_rate": 5.6284425185308035e-05, "loss": 0.9041, "step": 6943 }, { "epoch": 1.2731834231555472, "grad_norm": 0.6394334435462952, "learning_rate": 5.625812372360805e-05, "loss": 0.8612, "step": 6944 }, { "epoch": 1.2733692622189183, "grad_norm": 0.7178499102592468, "learning_rate": 5.623182600324524e-05, "loss": 1.0288, "step": 6945 }, { "epoch": 1.2735551012822897, "grad_norm": 0.6983404159545898, "learning_rate": 5.620553202646899e-05, "loss": 0.7361, "step": 6946 }, { "epoch": 1.2737409403456605, "grad_norm": 0.6791266798973083, "learning_rate": 5.6179241795528206e-05, "loss": 0.995, "step": 6947 }, { "epoch": 1.2739267794090319, "grad_norm": 0.761229932308197, "learning_rate": 5.615295531267156e-05, "loss": 1.0264, "step": 6948 }, { "epoch": 1.274112618472403, "grad_norm": 0.6744269728660583, "learning_rate": 5.612667258014743e-05, "loss": 0.8984, "step": 6949 }, { "epoch": 1.274298457535774, "grad_norm": 0.6982648968696594, "learning_rate": 5.61003936002038e-05, "loss": 0.8923, "step": 6950 }, { "epoch": 1.2744842965991452, "grad_norm": 0.6110215783119202, "learning_rate": 5.6074118375088444e-05, "loss": 0.9046, "step": 6951 }, { "epoch": 1.2746701356625163, "grad_norm": 0.7817128300666809, "learning_rate": 5.604784690704863e-05, "loss": 0.8979, "step": 6952 }, { "epoch": 1.2748559747258874, "grad_norm": 0.6842471361160278, "learning_rate": 5.6021579198331506e-05, "loss": 0.8434, "step": 6953 }, { "epoch": 1.2750418137892585, "grad_norm": 0.7033209800720215, "learning_rate": 5.5995315251183734e-05, "loss": 0.875, "step": 6954 }, { "epoch": 1.2752276528526296, "grad_norm": 0.5664873123168945, "learning_rate": 5.5969055067851774e-05, "loss": 0.7992, "step": 6955 }, { "epoch": 1.2754134919160007, "grad_norm": 0.6672083735466003, "learning_rate": 5.594279865058168e-05, "loss": 0.6178, "step": 6956 }, { "epoch": 1.2755993309793718, "grad_norm": 0.6838880777359009, "learning_rate": 5.591654600161922e-05, "loss": 1.0042, "step": 6957 }, { "epoch": 1.275785170042743, "grad_norm": 0.7843611836433411, "learning_rate": 5.58902971232099e-05, "loss": 0.9151, "step": 6958 }, { "epoch": 1.275971009106114, "grad_norm": 0.7135986685752869, "learning_rate": 5.586405201759876e-05, "loss": 1.1861, "step": 6959 }, { "epoch": 1.2761568481694852, "grad_norm": 0.7255387902259827, "learning_rate": 5.5837810687030677e-05, "loss": 0.9878, "step": 6960 }, { "epoch": 1.2763426872328563, "grad_norm": 0.7054786682128906, "learning_rate": 5.581157313375006e-05, "loss": 0.8925, "step": 6961 }, { "epoch": 1.2765285262962274, "grad_norm": 1.7965220212936401, "learning_rate": 5.57853393600011e-05, "loss": 1.269, "step": 6962 }, { "epoch": 1.2767143653595987, "grad_norm": 1.0769907236099243, "learning_rate": 5.575910936802766e-05, "loss": 1.0617, "step": 6963 }, { "epoch": 1.2769002044229696, "grad_norm": 0.7031131386756897, "learning_rate": 5.573288316007317e-05, "loss": 1.3079, "step": 6964 }, { "epoch": 1.277086043486341, "grad_norm": 1.0076839923858643, "learning_rate": 5.5706660738380864e-05, "loss": 0.8868, "step": 6965 }, { "epoch": 1.277271882549712, "grad_norm": 0.681121289730072, "learning_rate": 5.5680442105193596e-05, "loss": 1.0442, "step": 6966 }, { "epoch": 1.2774577216130831, "grad_norm": 0.8337486982345581, "learning_rate": 5.565422726275393e-05, "loss": 1.0483, "step": 6967 }, { "epoch": 1.2776435606764542, "grad_norm": 0.803463339805603, "learning_rate": 5.5628016213304025e-05, "loss": 0.7463, "step": 6968 }, { "epoch": 1.2778293997398253, "grad_norm": 0.7833393216133118, "learning_rate": 5.5601808959085833e-05, "loss": 1.0673, "step": 6969 }, { "epoch": 1.2780152388031965, "grad_norm": 0.7391160726547241, "learning_rate": 5.557560550234082e-05, "loss": 0.7825, "step": 6970 }, { "epoch": 1.2782010778665676, "grad_norm": 0.7240591645240784, "learning_rate": 5.55494058453103e-05, "loss": 1.225, "step": 6971 }, { "epoch": 1.2783869169299387, "grad_norm": 0.6331393122673035, "learning_rate": 5.552320999023521e-05, "loss": 1.0397, "step": 6972 }, { "epoch": 1.2785727559933098, "grad_norm": 0.66135174036026, "learning_rate": 5.549701793935604e-05, "loss": 0.8896, "step": 6973 }, { "epoch": 1.2787585950566809, "grad_norm": 0.6168441772460938, "learning_rate": 5.547082969491317e-05, "loss": 0.8593, "step": 6974 }, { "epoch": 1.278944434120052, "grad_norm": 0.7322728633880615, "learning_rate": 5.544464525914642e-05, "loss": 1.1813, "step": 6975 }, { "epoch": 1.279130273183423, "grad_norm": 0.5877998471260071, "learning_rate": 5.54184646342955e-05, "loss": 0.8917, "step": 6976 }, { "epoch": 1.2793161122467942, "grad_norm": 0.6502416133880615, "learning_rate": 5.539228782259962e-05, "loss": 0.8138, "step": 6977 }, { "epoch": 1.2795019513101655, "grad_norm": 0.7527049779891968, "learning_rate": 5.536611482629777e-05, "loss": 1.1709, "step": 6978 }, { "epoch": 1.2796877903735364, "grad_norm": 0.7156680226325989, "learning_rate": 5.533994564762859e-05, "loss": 0.8171, "step": 6979 }, { "epoch": 1.2798736294369077, "grad_norm": 0.7617798447608948, "learning_rate": 5.5313780288830365e-05, "loss": 0.8767, "step": 6980 }, { "epoch": 1.2800594685002786, "grad_norm": 0.7092217206954956, "learning_rate": 5.5287618752141145e-05, "loss": 1.0576, "step": 6981 }, { "epoch": 1.28024530756365, "grad_norm": 0.6281307339668274, "learning_rate": 5.526146103979849e-05, "loss": 0.9587, "step": 6982 }, { "epoch": 1.280431146627021, "grad_norm": 0.6523799300193787, "learning_rate": 5.5235307154039794e-05, "loss": 0.9439, "step": 6983 }, { "epoch": 1.2806169856903922, "grad_norm": 0.6482586860656738, "learning_rate": 5.5209157097101996e-05, "loss": 0.9628, "step": 6984 }, { "epoch": 1.2808028247537633, "grad_norm": 0.7241567373275757, "learning_rate": 5.518301087122179e-05, "loss": 0.9484, "step": 6985 }, { "epoch": 1.2809886638171344, "grad_norm": 0.8451587557792664, "learning_rate": 5.5156868478635584e-05, "loss": 1.1197, "step": 6986 }, { "epoch": 1.2811745028805055, "grad_norm": 0.6632991433143616, "learning_rate": 5.5130729921579283e-05, "loss": 0.8533, "step": 6987 }, { "epoch": 1.2813603419438766, "grad_norm": 0.6490793228149414, "learning_rate": 5.5104595202288667e-05, "loss": 1.086, "step": 6988 }, { "epoch": 1.2815461810072477, "grad_norm": 0.705800473690033, "learning_rate": 5.507846432299901e-05, "loss": 0.8109, "step": 6989 }, { "epoch": 1.2817320200706188, "grad_norm": 0.6534713506698608, "learning_rate": 5.505233728594543e-05, "loss": 0.9947, "step": 6990 }, { "epoch": 1.28191785913399, "grad_norm": 0.7074917554855347, "learning_rate": 5.5026214093362563e-05, "loss": 0.9424, "step": 6991 }, { "epoch": 1.282103698197361, "grad_norm": 0.6349173188209534, "learning_rate": 5.500009474748479e-05, "loss": 0.9836, "step": 6992 }, { "epoch": 1.2822895372607321, "grad_norm": 0.8655628561973572, "learning_rate": 5.497397925054617e-05, "loss": 0.8945, "step": 6993 }, { "epoch": 1.2824753763241032, "grad_norm": 0.8330060839653015, "learning_rate": 5.4947867604780426e-05, "loss": 0.9298, "step": 6994 }, { "epoch": 1.2826612153874746, "grad_norm": 0.7203959226608276, "learning_rate": 5.492175981242097e-05, "loss": 1.025, "step": 6995 }, { "epoch": 1.2828470544508455, "grad_norm": 0.6714663505554199, "learning_rate": 5.489565587570078e-05, "loss": 0.8477, "step": 6996 }, { "epoch": 1.2830328935142168, "grad_norm": 0.7011078596115112, "learning_rate": 5.486955579685268e-05, "loss": 0.953, "step": 6997 }, { "epoch": 1.2832187325775877, "grad_norm": 0.6785321831703186, "learning_rate": 5.484345957810897e-05, "loss": 1.0602, "step": 6998 }, { "epoch": 1.283404571640959, "grad_norm": 0.6448400616645813, "learning_rate": 5.4817367221701744e-05, "loss": 0.9111, "step": 6999 }, { "epoch": 1.28359041070433, "grad_norm": 0.7090216279029846, "learning_rate": 5.47912787298628e-05, "loss": 0.8572, "step": 7000 }, { "epoch": 1.2837762497677012, "grad_norm": 0.7504491209983826, "learning_rate": 5.476519410482346e-05, "loss": 0.8119, "step": 7001 }, { "epoch": 1.2839620888310723, "grad_norm": 0.5962739586830139, "learning_rate": 5.473911334881488e-05, "loss": 0.6943, "step": 7002 }, { "epoch": 1.2841479278944434, "grad_norm": 0.8006013035774231, "learning_rate": 5.47130364640677e-05, "loss": 1.0141, "step": 7003 }, { "epoch": 1.2843337669578145, "grad_norm": 0.718031644821167, "learning_rate": 5.4686963452812435e-05, "loss": 1.0099, "step": 7004 }, { "epoch": 1.2845196060211856, "grad_norm": 0.6614823937416077, "learning_rate": 5.466089431727909e-05, "loss": 0.915, "step": 7005 }, { "epoch": 1.2847054450845568, "grad_norm": 0.6416723132133484, "learning_rate": 5.463482905969743e-05, "loss": 0.9487, "step": 7006 }, { "epoch": 1.2848912841479279, "grad_norm": 0.6111795902252197, "learning_rate": 5.46087676822969e-05, "loss": 0.876, "step": 7007 }, { "epoch": 1.285077123211299, "grad_norm": 0.8280202150344849, "learning_rate": 5.458271018730661e-05, "loss": 1.1683, "step": 7008 }, { "epoch": 1.28526296227467, "grad_norm": 0.7306656241416931, "learning_rate": 5.455665657695524e-05, "loss": 1.0487, "step": 7009 }, { "epoch": 1.2854488013380412, "grad_norm": 0.6812098622322083, "learning_rate": 5.453060685347123e-05, "loss": 0.9802, "step": 7010 }, { "epoch": 1.2856346404014123, "grad_norm": 0.7024182081222534, "learning_rate": 5.4504561019082744e-05, "loss": 0.9556, "step": 7011 }, { "epoch": 1.2858204794647836, "grad_norm": 0.6535548567771912, "learning_rate": 5.447851907601744e-05, "loss": 1.0563, "step": 7012 }, { "epoch": 1.2860063185281545, "grad_norm": 0.6057627201080322, "learning_rate": 5.445248102650279e-05, "loss": 0.831, "step": 7013 }, { "epoch": 1.2861921575915258, "grad_norm": 0.7243484258651733, "learning_rate": 5.4426446872765914e-05, "loss": 1.258, "step": 7014 }, { "epoch": 1.286377996654897, "grad_norm": 0.6408053636550903, "learning_rate": 5.440041661703349e-05, "loss": 0.8989, "step": 7015 }, { "epoch": 1.286563835718268, "grad_norm": 0.6918326020240784, "learning_rate": 5.437439026153204e-05, "loss": 0.8399, "step": 7016 }, { "epoch": 1.2867496747816392, "grad_norm": 0.6725391149520874, "learning_rate": 5.434836780848755e-05, "loss": 1.0227, "step": 7017 }, { "epoch": 1.2869355138450103, "grad_norm": 0.8167325258255005, "learning_rate": 5.432234926012586e-05, "loss": 0.9492, "step": 7018 }, { "epoch": 1.2871213529083814, "grad_norm": 0.6119495630264282, "learning_rate": 5.429633461867234e-05, "loss": 0.9002, "step": 7019 }, { "epoch": 1.2873071919717525, "grad_norm": 0.5399630665779114, "learning_rate": 5.427032388635208e-05, "loss": 0.7491, "step": 7020 }, { "epoch": 1.2874930310351236, "grad_norm": 0.7530606389045715, "learning_rate": 5.4244317065389905e-05, "loss": 0.9089, "step": 7021 }, { "epoch": 1.2876788700984947, "grad_norm": 0.6615011096000671, "learning_rate": 5.421831415801013e-05, "loss": 0.6998, "step": 7022 }, { "epoch": 1.2878647091618658, "grad_norm": 0.7424286007881165, "learning_rate": 5.41923151664369e-05, "loss": 1.0016, "step": 7023 }, { "epoch": 1.288050548225237, "grad_norm": 0.7346042394638062, "learning_rate": 5.4166320092893966e-05, "loss": 0.8475, "step": 7024 }, { "epoch": 1.288236387288608, "grad_norm": 0.6558862924575806, "learning_rate": 5.414032893960477e-05, "loss": 0.7523, "step": 7025 }, { "epoch": 1.2884222263519791, "grad_norm": 0.6409276723861694, "learning_rate": 5.41143417087923e-05, "loss": 1.0147, "step": 7026 }, { "epoch": 1.2886080654153502, "grad_norm": 0.6892153024673462, "learning_rate": 5.4088358402679376e-05, "loss": 0.9573, "step": 7027 }, { "epoch": 1.2887939044787213, "grad_norm": 0.6437703967094421, "learning_rate": 5.406237902348841e-05, "loss": 0.9284, "step": 7028 }, { "epoch": 1.2889797435420927, "grad_norm": 0.637093186378479, "learning_rate": 5.403640357344142e-05, "loss": 0.7124, "step": 7029 }, { "epoch": 1.2891655826054635, "grad_norm": 0.6690963506698608, "learning_rate": 5.401043205476021e-05, "loss": 0.862, "step": 7030 }, { "epoch": 1.2893514216688349, "grad_norm": 0.756441056728363, "learning_rate": 5.3984464469666094e-05, "loss": 0.9872, "step": 7031 }, { "epoch": 1.289537260732206, "grad_norm": 0.6815423369407654, "learning_rate": 5.3958500820380244e-05, "loss": 1.0385, "step": 7032 }, { "epoch": 1.289723099795577, "grad_norm": 0.7535609602928162, "learning_rate": 5.3932541109123266e-05, "loss": 1.1624, "step": 7033 }, { "epoch": 1.2899089388589482, "grad_norm": 1.311153769493103, "learning_rate": 5.390658533811563e-05, "loss": 1.3717, "step": 7034 }, { "epoch": 1.2900947779223193, "grad_norm": 0.6109252572059631, "learning_rate": 5.3880633509577404e-05, "loss": 0.8028, "step": 7035 }, { "epoch": 1.2902806169856904, "grad_norm": 0.7281230092048645, "learning_rate": 5.385468562572823e-05, "loss": 1.0965, "step": 7036 }, { "epoch": 1.2904664560490615, "grad_norm": 0.6285109519958496, "learning_rate": 5.382874168878753e-05, "loss": 0.8566, "step": 7037 }, { "epoch": 1.2906522951124326, "grad_norm": 0.7154156565666199, "learning_rate": 5.3802801700974335e-05, "loss": 1.0242, "step": 7038 }, { "epoch": 1.2908381341758037, "grad_norm": 0.7235013246536255, "learning_rate": 5.377686566450741e-05, "loss": 1.2717, "step": 7039 }, { "epoch": 1.2910239732391748, "grad_norm": 0.7788733243942261, "learning_rate": 5.375093358160501e-05, "loss": 1.1329, "step": 7040 }, { "epoch": 1.291209812302546, "grad_norm": 0.6813734769821167, "learning_rate": 5.372500545448523e-05, "loss": 1.0213, "step": 7041 }, { "epoch": 1.291395651365917, "grad_norm": 0.6252562403678894, "learning_rate": 5.369908128536578e-05, "loss": 0.883, "step": 7042 }, { "epoch": 1.2915814904292882, "grad_norm": 0.7074937224388123, "learning_rate": 5.367316107646394e-05, "loss": 0.8458, "step": 7043 }, { "epoch": 1.2917673294926595, "grad_norm": 0.628176748752594, "learning_rate": 5.3647244829996815e-05, "loss": 0.9994, "step": 7044 }, { "epoch": 1.2919531685560304, "grad_norm": 0.6965638399124146, "learning_rate": 5.3621332548180955e-05, "loss": 0.9085, "step": 7045 }, { "epoch": 1.2921390076194017, "grad_norm": 0.7106294631958008, "learning_rate": 5.35954242332328e-05, "loss": 1.1325, "step": 7046 }, { "epoch": 1.2923248466827726, "grad_norm": 0.623037576675415, "learning_rate": 5.356951988736828e-05, "loss": 0.9159, "step": 7047 }, { "epoch": 1.292510685746144, "grad_norm": 0.6270564794540405, "learning_rate": 5.354361951280307e-05, "loss": 0.9833, "step": 7048 }, { "epoch": 1.292696524809515, "grad_norm": 0.6747012734413147, "learning_rate": 5.351772311175253e-05, "loss": 0.9795, "step": 7049 }, { "epoch": 1.2928823638728861, "grad_norm": 0.7606438994407654, "learning_rate": 5.349183068643154e-05, "loss": 1.018, "step": 7050 }, { "epoch": 1.2930682029362572, "grad_norm": 0.7697432041168213, "learning_rate": 5.34659422390548e-05, "loss": 1.0029, "step": 7051 }, { "epoch": 1.2932540419996283, "grad_norm": 0.7591094374656677, "learning_rate": 5.3440057771836594e-05, "loss": 1.0419, "step": 7052 }, { "epoch": 1.2934398810629995, "grad_norm": 0.6181617379188538, "learning_rate": 5.3414177286990915e-05, "loss": 0.8152, "step": 7053 }, { "epoch": 1.2936257201263706, "grad_norm": 0.7263724207878113, "learning_rate": 5.3388300786731295e-05, "loss": 0.9096, "step": 7054 }, { "epoch": 1.2938115591897417, "grad_norm": 0.8252272605895996, "learning_rate": 5.336242827327105e-05, "loss": 1.0229, "step": 7055 }, { "epoch": 1.2939973982531128, "grad_norm": 0.7154651880264282, "learning_rate": 5.333655974882316e-05, "loss": 1.0228, "step": 7056 }, { "epoch": 1.2941832373164839, "grad_norm": 0.7881542444229126, "learning_rate": 5.331069521560012e-05, "loss": 1.0496, "step": 7057 }, { "epoch": 1.294369076379855, "grad_norm": 0.6441651582717896, "learning_rate": 5.3284834675814264e-05, "loss": 0.829, "step": 7058 }, { "epoch": 1.294554915443226, "grad_norm": 0.7924808859825134, "learning_rate": 5.325897813167742e-05, "loss": 0.9419, "step": 7059 }, { "epoch": 1.2947407545065972, "grad_norm": 0.7063006162643433, "learning_rate": 5.323312558540125e-05, "loss": 1.0371, "step": 7060 }, { "epoch": 1.2949265935699685, "grad_norm": 0.655428409576416, "learning_rate": 5.320727703919688e-05, "loss": 0.8054, "step": 7061 }, { "epoch": 1.2951124326333394, "grad_norm": 0.6543022394180298, "learning_rate": 5.318143249527523e-05, "loss": 0.8574, "step": 7062 }, { "epoch": 1.2952982716967107, "grad_norm": 0.6304433345794678, "learning_rate": 5.31555919558469e-05, "loss": 0.6496, "step": 7063 }, { "epoch": 1.2954841107600816, "grad_norm": 0.7286331653594971, "learning_rate": 5.3129755423121984e-05, "loss": 1.1228, "step": 7064 }, { "epoch": 1.295669949823453, "grad_norm": 0.6797538995742798, "learning_rate": 5.310392289931043e-05, "loss": 0.9312, "step": 7065 }, { "epoch": 1.295855788886824, "grad_norm": 0.708625316619873, "learning_rate": 5.307809438662167e-05, "loss": 1.0594, "step": 7066 }, { "epoch": 1.2960416279501952, "grad_norm": 0.7643126249313354, "learning_rate": 5.305226988726491e-05, "loss": 0.9271, "step": 7067 }, { "epoch": 1.2962274670135663, "grad_norm": 0.5726079940795898, "learning_rate": 5.302644940344896e-05, "loss": 0.8641, "step": 7068 }, { "epoch": 1.2964133060769374, "grad_norm": 0.7435072064399719, "learning_rate": 5.300063293738232e-05, "loss": 1.2463, "step": 7069 }, { "epoch": 1.2965991451403085, "grad_norm": 0.7080186009407043, "learning_rate": 5.297482049127317e-05, "loss": 0.8211, "step": 7070 }, { "epoch": 1.2967849842036796, "grad_norm": 0.7281845808029175, "learning_rate": 5.2949012067329216e-05, "loss": 1.0874, "step": 7071 }, { "epoch": 1.2969708232670507, "grad_norm": 0.6594234704971313, "learning_rate": 5.2923207667758e-05, "loss": 0.7659, "step": 7072 }, { "epoch": 1.2971566623304218, "grad_norm": 0.6785892248153687, "learning_rate": 5.289740729476652e-05, "loss": 0.636, "step": 7073 }, { "epoch": 1.297342501393793, "grad_norm": 0.7195522785186768, "learning_rate": 5.287161095056165e-05, "loss": 0.9129, "step": 7074 }, { "epoch": 1.297528340457164, "grad_norm": 0.8044241666793823, "learning_rate": 5.284581863734971e-05, "loss": 0.9998, "step": 7075 }, { "epoch": 1.2977141795205351, "grad_norm": 0.6575559377670288, "learning_rate": 5.2820030357336815e-05, "loss": 1.0605, "step": 7076 }, { "epoch": 1.2979000185839062, "grad_norm": 0.9150829315185547, "learning_rate": 5.279424611272873e-05, "loss": 1.1433, "step": 7077 }, { "epoch": 1.2980858576472776, "grad_norm": 0.6312849521636963, "learning_rate": 5.276846590573077e-05, "loss": 0.9186, "step": 7078 }, { "epoch": 1.2982716967106485, "grad_norm": 0.6796217560768127, "learning_rate": 5.2742689738548026e-05, "loss": 0.8256, "step": 7079 }, { "epoch": 1.2984575357740198, "grad_norm": 0.6361716985702515, "learning_rate": 5.271691761338513e-05, "loss": 0.9632, "step": 7080 }, { "epoch": 1.298643374837391, "grad_norm": 0.8558089733123779, "learning_rate": 5.269114953244647e-05, "loss": 1.1539, "step": 7081 }, { "epoch": 1.298829213900762, "grad_norm": 0.6186767220497131, "learning_rate": 5.266538549793603e-05, "loss": 0.8281, "step": 7082 }, { "epoch": 1.299015052964133, "grad_norm": 0.6998415589332581, "learning_rate": 5.2639625512057476e-05, "loss": 1.1099, "step": 7083 }, { "epoch": 1.2992008920275042, "grad_norm": 0.753054141998291, "learning_rate": 5.2613869577014154e-05, "loss": 1.0645, "step": 7084 }, { "epoch": 1.2993867310908753, "grad_norm": 0.7787800431251526, "learning_rate": 5.258811769500893e-05, "loss": 0.9746, "step": 7085 }, { "epoch": 1.2995725701542464, "grad_norm": 0.7633613348007202, "learning_rate": 5.256236986824452e-05, "loss": 0.8247, "step": 7086 }, { "epoch": 1.2997584092176175, "grad_norm": 0.6192800998687744, "learning_rate": 5.253662609892311e-05, "loss": 0.9555, "step": 7087 }, { "epoch": 1.2999442482809886, "grad_norm": 0.8007282018661499, "learning_rate": 5.251088638924668e-05, "loss": 0.944, "step": 7088 }, { "epoch": 1.3001300873443598, "grad_norm": 0.6033044457435608, "learning_rate": 5.2485150741416734e-05, "loss": 0.8671, "step": 7089 }, { "epoch": 1.3003159264077309, "grad_norm": 0.865083634853363, "learning_rate": 5.2459419157634546e-05, "loss": 0.9268, "step": 7090 }, { "epoch": 1.300501765471102, "grad_norm": 0.8101431131362915, "learning_rate": 5.243369164010101e-05, "loss": 1.2567, "step": 7091 }, { "epoch": 1.300687604534473, "grad_norm": 0.6751659512519836, "learning_rate": 5.240796819101661e-05, "loss": 0.9784, "step": 7092 }, { "epoch": 1.3008734435978444, "grad_norm": 0.7335007190704346, "learning_rate": 5.238224881258159e-05, "loss": 0.8022, "step": 7093 }, { "epoch": 1.3010592826612153, "grad_norm": 0.6479902267456055, "learning_rate": 5.23565335069957e-05, "loss": 0.926, "step": 7094 }, { "epoch": 1.3012451217245866, "grad_norm": 0.8442709445953369, "learning_rate": 5.2330822276458466e-05, "loss": 0.9154, "step": 7095 }, { "epoch": 1.3014309607879575, "grad_norm": 0.5718405246734619, "learning_rate": 5.2305115123169036e-05, "loss": 0.6149, "step": 7096 }, { "epoch": 1.3016167998513288, "grad_norm": 0.6708202362060547, "learning_rate": 5.2279412049326183e-05, "loss": 1.0, "step": 7097 }, { "epoch": 1.3018026389147, "grad_norm": 0.6423093676567078, "learning_rate": 5.225371305712841e-05, "loss": 0.9844, "step": 7098 }, { "epoch": 1.301988477978071, "grad_norm": 0.7350624799728394, "learning_rate": 5.222801814877369e-05, "loss": 1.2815, "step": 7099 }, { "epoch": 1.3021743170414422, "grad_norm": 0.8177218437194824, "learning_rate": 5.220232732645989e-05, "loss": 0.9045, "step": 7100 }, { "epoch": 1.3023601561048133, "grad_norm": 0.6446688175201416, "learning_rate": 5.217664059238428e-05, "loss": 0.8688, "step": 7101 }, { "epoch": 1.3025459951681844, "grad_norm": 0.690547525882721, "learning_rate": 5.2150957948744005e-05, "loss": 1.0068, "step": 7102 }, { "epoch": 1.3027318342315555, "grad_norm": 0.6597347259521484, "learning_rate": 5.212527939773568e-05, "loss": 0.8027, "step": 7103 }, { "epoch": 1.3029176732949266, "grad_norm": 0.656406044960022, "learning_rate": 5.209960494155568e-05, "loss": 0.999, "step": 7104 }, { "epoch": 1.3031035123582977, "grad_norm": 0.7051059007644653, "learning_rate": 5.2073934582400016e-05, "loss": 1.2237, "step": 7105 }, { "epoch": 1.3032893514216688, "grad_norm": 0.8668679594993591, "learning_rate": 5.204826832246429e-05, "loss": 1.1876, "step": 7106 }, { "epoch": 1.30347519048504, "grad_norm": 0.7734251022338867, "learning_rate": 5.2022606163943846e-05, "loss": 0.8181, "step": 7107 }, { "epoch": 1.303661029548411, "grad_norm": 0.6949523091316223, "learning_rate": 5.199694810903355e-05, "loss": 1.1134, "step": 7108 }, { "epoch": 1.3038468686117821, "grad_norm": 0.6487858891487122, "learning_rate": 5.197129415992803e-05, "loss": 0.811, "step": 7109 }, { "epoch": 1.3040327076751534, "grad_norm": 0.82206791639328, "learning_rate": 5.194564431882156e-05, "loss": 1.0355, "step": 7110 }, { "epoch": 1.3042185467385243, "grad_norm": 0.7289284467697144, "learning_rate": 5.191999858790795e-05, "loss": 0.8096, "step": 7111 }, { "epoch": 1.3044043858018957, "grad_norm": 0.6622374653816223, "learning_rate": 5.1894356969380773e-05, "loss": 0.9715, "step": 7112 }, { "epoch": 1.3045902248652665, "grad_norm": 0.7061960101127625, "learning_rate": 5.1868719465433226e-05, "loss": 1.1033, "step": 7113 }, { "epoch": 1.3047760639286379, "grad_norm": 0.6868487000465393, "learning_rate": 5.184308607825816e-05, "loss": 0.9259, "step": 7114 }, { "epoch": 1.304961902992009, "grad_norm": 0.6865556240081787, "learning_rate": 5.181745681004798e-05, "loss": 1.057, "step": 7115 }, { "epoch": 1.30514774205538, "grad_norm": 0.5928111672401428, "learning_rate": 5.1791831662994906e-05, "loss": 0.9176, "step": 7116 }, { "epoch": 1.3053335811187512, "grad_norm": 0.6681034564971924, "learning_rate": 5.1766210639290615e-05, "loss": 0.993, "step": 7117 }, { "epoch": 1.3055194201821223, "grad_norm": 0.7697048783302307, "learning_rate": 5.174059374112657e-05, "loss": 1.0491, "step": 7118 }, { "epoch": 1.3057052592454934, "grad_norm": 0.682702898979187, "learning_rate": 5.17149809706939e-05, "loss": 0.9057, "step": 7119 }, { "epoch": 1.3058910983088645, "grad_norm": 0.7286297082901001, "learning_rate": 5.1689372330183204e-05, "loss": 1.0486, "step": 7120 }, { "epoch": 1.3060769373722356, "grad_norm": 0.7333587408065796, "learning_rate": 5.1663767821784956e-05, "loss": 1.1101, "step": 7121 }, { "epoch": 1.3062627764356067, "grad_norm": 0.7103135585784912, "learning_rate": 5.163816744768908e-05, "loss": 1.0523, "step": 7122 }, { "epoch": 1.3064486154989778, "grad_norm": 0.7001137137413025, "learning_rate": 5.1612571210085294e-05, "loss": 0.9445, "step": 7123 }, { "epoch": 1.306634454562349, "grad_norm": 0.7391849756240845, "learning_rate": 5.158697911116284e-05, "loss": 1.0721, "step": 7124 }, { "epoch": 1.30682029362572, "grad_norm": 0.7259404063224792, "learning_rate": 5.156139115311069e-05, "loss": 0.9623, "step": 7125 }, { "epoch": 1.3070061326890912, "grad_norm": 0.6957717537879944, "learning_rate": 5.153580733811746e-05, "loss": 0.9799, "step": 7126 }, { "epoch": 1.3071919717524625, "grad_norm": 0.7240549921989441, "learning_rate": 5.151022766837137e-05, "loss": 1.1996, "step": 7127 }, { "epoch": 1.3073778108158334, "grad_norm": 0.7606905698776245, "learning_rate": 5.148465214606033e-05, "loss": 0.8597, "step": 7128 }, { "epoch": 1.3075636498792047, "grad_norm": 0.5958693027496338, "learning_rate": 5.145908077337183e-05, "loss": 0.9355, "step": 7129 }, { "epoch": 1.3077494889425758, "grad_norm": 1.5879743099212646, "learning_rate": 5.1433513552493086e-05, "loss": 1.204, "step": 7130 }, { "epoch": 1.307935328005947, "grad_norm": 0.6255626082420349, "learning_rate": 5.1407950485610856e-05, "loss": 0.8538, "step": 7131 }, { "epoch": 1.308121167069318, "grad_norm": 0.7526214718818665, "learning_rate": 5.1382391574911646e-05, "loss": 1.0025, "step": 7132 }, { "epoch": 1.3083070061326891, "grad_norm": 0.7632701396942139, "learning_rate": 5.135683682258159e-05, "loss": 0.6711, "step": 7133 }, { "epoch": 1.3084928451960602, "grad_norm": 0.7910258769989014, "learning_rate": 5.1331286230806384e-05, "loss": 0.9427, "step": 7134 }, { "epoch": 1.3086786842594313, "grad_norm": 0.753558874130249, "learning_rate": 5.130573980177149e-05, "loss": 0.7711, "step": 7135 }, { "epoch": 1.3088645233228025, "grad_norm": 0.8264387845993042, "learning_rate": 5.128019753766188e-05, "loss": 1.1235, "step": 7136 }, { "epoch": 1.3090503623861736, "grad_norm": 0.7292147278785706, "learning_rate": 5.12546594406623e-05, "loss": 1.0792, "step": 7137 }, { "epoch": 1.3092362014495447, "grad_norm": 0.7105168104171753, "learning_rate": 5.122912551295702e-05, "loss": 1.074, "step": 7138 }, { "epoch": 1.3094220405129158, "grad_norm": 0.7290421724319458, "learning_rate": 5.120359575673004e-05, "loss": 1.1185, "step": 7139 }, { "epoch": 1.3096078795762869, "grad_norm": 0.588783323764801, "learning_rate": 5.1178070174164985e-05, "loss": 0.8375, "step": 7140 }, { "epoch": 1.309793718639658, "grad_norm": 0.692352294921875, "learning_rate": 5.115254876744509e-05, "loss": 1.0739, "step": 7141 }, { "epoch": 1.309979557703029, "grad_norm": 0.8384915590286255, "learning_rate": 5.1127031538753326e-05, "loss": 0.8474, "step": 7142 }, { "epoch": 1.3101653967664002, "grad_norm": 0.532124936580658, "learning_rate": 5.110151849027214e-05, "loss": 0.6912, "step": 7143 }, { "epoch": 1.3103512358297715, "grad_norm": 0.7722443342208862, "learning_rate": 5.10760096241838e-05, "loss": 0.9938, "step": 7144 }, { "epoch": 1.3105370748931424, "grad_norm": 0.7080844044685364, "learning_rate": 5.1050504942670054e-05, "loss": 1.0352, "step": 7145 }, { "epoch": 1.3107229139565137, "grad_norm": 0.6958823204040527, "learning_rate": 5.10250044479124e-05, "loss": 1.1444, "step": 7146 }, { "epoch": 1.3109087530198849, "grad_norm": 0.6618379950523376, "learning_rate": 5.099950814209202e-05, "loss": 0.7356, "step": 7147 }, { "epoch": 1.311094592083256, "grad_norm": 0.7903035879135132, "learning_rate": 5.0974016027389583e-05, "loss": 1.1493, "step": 7148 }, { "epoch": 1.311280431146627, "grad_norm": 0.6709766983985901, "learning_rate": 5.094852810598554e-05, "loss": 1.0465, "step": 7149 }, { "epoch": 1.3114662702099982, "grad_norm": 1.7481948137283325, "learning_rate": 5.092304438005986e-05, "loss": 1.3618, "step": 7150 }, { "epoch": 1.3116521092733693, "grad_norm": 0.707204282283783, "learning_rate": 5.089756485179231e-05, "loss": 1.0906, "step": 7151 }, { "epoch": 1.3118379483367404, "grad_norm": 0.6673077940940857, "learning_rate": 5.087208952336211e-05, "loss": 1.0719, "step": 7152 }, { "epoch": 1.3120237874001115, "grad_norm": 0.812818706035614, "learning_rate": 5.084661839694829e-05, "loss": 1.0051, "step": 7153 }, { "epoch": 1.3122096264634826, "grad_norm": 0.7848548889160156, "learning_rate": 5.082115147472942e-05, "loss": 0.9807, "step": 7154 }, { "epoch": 1.3123954655268537, "grad_norm": 0.9212191104888916, "learning_rate": 5.079568875888381e-05, "loss": 0.8138, "step": 7155 }, { "epoch": 1.3125813045902248, "grad_norm": 0.6669211983680725, "learning_rate": 5.077023025158922e-05, "loss": 0.9154, "step": 7156 }, { "epoch": 1.312767143653596, "grad_norm": 0.9354314208030701, "learning_rate": 5.074477595502326e-05, "loss": 1.0927, "step": 7157 }, { "epoch": 1.312952982716967, "grad_norm": 0.6543704271316528, "learning_rate": 5.07193258713631e-05, "loss": 0.962, "step": 7158 }, { "epoch": 1.3131388217803384, "grad_norm": 0.9563025832176208, "learning_rate": 5.0693880002785456e-05, "loss": 0.8167, "step": 7159 }, { "epoch": 1.3133246608437092, "grad_norm": 0.6064286231994629, "learning_rate": 5.066843835146684e-05, "loss": 0.9669, "step": 7160 }, { "epoch": 1.3135104999070806, "grad_norm": 0.7774001955986023, "learning_rate": 5.064300091958334e-05, "loss": 1.1365, "step": 7161 }, { "epoch": 1.3136963389704515, "grad_norm": 0.6404112577438354, "learning_rate": 5.061756770931061e-05, "loss": 0.8782, "step": 7162 }, { "epoch": 1.3138821780338228, "grad_norm": 0.6021904945373535, "learning_rate": 5.059213872282407e-05, "loss": 0.8356, "step": 7163 }, { "epoch": 1.314068017097194, "grad_norm": 0.6289204955101013, "learning_rate": 5.056671396229866e-05, "loss": 0.8957, "step": 7164 }, { "epoch": 1.314253856160565, "grad_norm": 0.7028182744979858, "learning_rate": 5.054129342990909e-05, "loss": 0.9555, "step": 7165 }, { "epoch": 1.314439695223936, "grad_norm": 0.5707007050514221, "learning_rate": 5.0515877127829536e-05, "loss": 0.9037, "step": 7166 }, { "epoch": 1.3146255342873072, "grad_norm": 1.013501763343811, "learning_rate": 5.049046505823396e-05, "loss": 1.1444, "step": 7167 }, { "epoch": 1.3148113733506783, "grad_norm": 0.5975896716117859, "learning_rate": 5.0465057223295965e-05, "loss": 0.9372, "step": 7168 }, { "epoch": 1.3149972124140494, "grad_norm": 0.7597402334213257, "learning_rate": 5.043965362518863e-05, "loss": 1.2744, "step": 7169 }, { "epoch": 1.3151830514774205, "grad_norm": 0.6761762499809265, "learning_rate": 5.041425426608485e-05, "loss": 1.2111, "step": 7170 }, { "epoch": 1.3153688905407916, "grad_norm": 0.6263930201530457, "learning_rate": 5.038885914815705e-05, "loss": 1.016, "step": 7171 }, { "epoch": 1.3155547296041628, "grad_norm": 0.6915165781974792, "learning_rate": 5.036346827357741e-05, "loss": 0.869, "step": 7172 }, { "epoch": 1.3157405686675339, "grad_norm": 0.6968695521354675, "learning_rate": 5.033808164451757e-05, "loss": 0.9908, "step": 7173 }, { "epoch": 1.315926407730905, "grad_norm": 0.7895475625991821, "learning_rate": 5.031269926314892e-05, "loss": 0.9928, "step": 7174 }, { "epoch": 1.316112246794276, "grad_norm": 0.6815813779830933, "learning_rate": 5.0287321131642553e-05, "loss": 0.7665, "step": 7175 }, { "epoch": 1.3162980858576474, "grad_norm": 0.8258618712425232, "learning_rate": 5.0261947252168995e-05, "loss": 0.9358, "step": 7176 }, { "epoch": 1.3164839249210183, "grad_norm": 0.7376391291618347, "learning_rate": 5.023657762689864e-05, "loss": 0.7476, "step": 7177 }, { "epoch": 1.3166697639843896, "grad_norm": 0.6556116938591003, "learning_rate": 5.021121225800129e-05, "loss": 0.6715, "step": 7178 }, { "epoch": 1.3168556030477605, "grad_norm": 0.8220980167388916, "learning_rate": 5.018585114764662e-05, "loss": 1.163, "step": 7179 }, { "epoch": 1.3170414421111318, "grad_norm": 0.8304170966148376, "learning_rate": 5.0160494298003733e-05, "loss": 0.9653, "step": 7180 }, { "epoch": 1.317227281174503, "grad_norm": 0.672730565071106, "learning_rate": 5.0135141711241476e-05, "loss": 0.872, "step": 7181 }, { "epoch": 1.317413120237874, "grad_norm": 0.6483734846115112, "learning_rate": 5.010979338952837e-05, "loss": 0.9913, "step": 7182 }, { "epoch": 1.3175989593012452, "grad_norm": 0.7637404203414917, "learning_rate": 5.00844493350324e-05, "loss": 1.0753, "step": 7183 }, { "epoch": 1.3177847983646163, "grad_norm": 0.8622449040412903, "learning_rate": 5.005910954992139e-05, "loss": 1.1496, "step": 7184 }, { "epoch": 1.3179706374279874, "grad_norm": 0.6058513522148132, "learning_rate": 5.0033774036362654e-05, "loss": 0.9682, "step": 7185 }, { "epoch": 1.3181564764913585, "grad_norm": 0.6887873411178589, "learning_rate": 5.0008442796523257e-05, "loss": 1.0055, "step": 7186 }, { "epoch": 1.3183423155547296, "grad_norm": 0.6620786786079407, "learning_rate": 4.998311583256976e-05, "loss": 0.8581, "step": 7187 }, { "epoch": 1.3185281546181007, "grad_norm": 0.6699737906455994, "learning_rate": 4.995779314666848e-05, "loss": 0.9666, "step": 7188 }, { "epoch": 1.3187139936814718, "grad_norm": 0.6416510343551636, "learning_rate": 4.993247474098532e-05, "loss": 0.9825, "step": 7189 }, { "epoch": 1.318899832744843, "grad_norm": 0.6661123633384705, "learning_rate": 4.9907160617685786e-05, "loss": 0.6764, "step": 7190 }, { "epoch": 1.319085671808214, "grad_norm": 0.7946585416793823, "learning_rate": 4.98818507789351e-05, "loss": 0.9549, "step": 7191 }, { "epoch": 1.3192715108715851, "grad_norm": 0.7565823793411255, "learning_rate": 4.9856545226898e-05, "loss": 1.103, "step": 7192 }, { "epoch": 1.3194573499349564, "grad_norm": 0.7292872071266174, "learning_rate": 4.983124396373899e-05, "loss": 1.0208, "step": 7193 }, { "epoch": 1.3196431889983273, "grad_norm": 0.6677314639091492, "learning_rate": 4.980594699162209e-05, "loss": 1.0857, "step": 7194 }, { "epoch": 1.3198290280616987, "grad_norm": 0.67109614610672, "learning_rate": 4.9780654312711015e-05, "loss": 1.1204, "step": 7195 }, { "epoch": 1.3200148671250698, "grad_norm": 0.8349084854125977, "learning_rate": 4.975536592916916e-05, "loss": 1.1697, "step": 7196 }, { "epoch": 1.3202007061884409, "grad_norm": 0.7957627773284912, "learning_rate": 4.973008184315941e-05, "loss": 0.9635, "step": 7197 }, { "epoch": 1.320386545251812, "grad_norm": 0.6814358234405518, "learning_rate": 4.970480205684441e-05, "loss": 1.1606, "step": 7198 }, { "epoch": 1.320572384315183, "grad_norm": 0.6425552368164062, "learning_rate": 4.967952657238639e-05, "loss": 0.8352, "step": 7199 }, { "epoch": 1.3207582233785542, "grad_norm": 0.8170636296272278, "learning_rate": 4.965425539194726e-05, "loss": 0.9484, "step": 7200 }, { "epoch": 1.3209440624419253, "grad_norm": 0.659493088722229, "learning_rate": 4.9628988517688434e-05, "loss": 0.7609, "step": 7201 }, { "epoch": 1.3211299015052964, "grad_norm": 0.705998957157135, "learning_rate": 4.9603725951771094e-05, "loss": 1.082, "step": 7202 }, { "epoch": 1.3213157405686675, "grad_norm": 0.7331377863883972, "learning_rate": 4.957846769635602e-05, "loss": 0.9501, "step": 7203 }, { "epoch": 1.3215015796320386, "grad_norm": 0.6525837779045105, "learning_rate": 4.9553213753603554e-05, "loss": 1.0953, "step": 7204 }, { "epoch": 1.3216874186954097, "grad_norm": 0.7075824737548828, "learning_rate": 4.9527964125673776e-05, "loss": 1.0025, "step": 7205 }, { "epoch": 1.3218732577587808, "grad_norm": 0.5878762006759644, "learning_rate": 4.9502718814726276e-05, "loss": 0.8849, "step": 7206 }, { "epoch": 1.322059096822152, "grad_norm": 0.6121537089347839, "learning_rate": 4.947747782292041e-05, "loss": 0.9235, "step": 7207 }, { "epoch": 1.322244935885523, "grad_norm": 0.6786346435546875, "learning_rate": 4.945224115241503e-05, "loss": 0.8459, "step": 7208 }, { "epoch": 1.3224307749488942, "grad_norm": 0.8413114547729492, "learning_rate": 4.94270088053687e-05, "loss": 0.9905, "step": 7209 }, { "epoch": 1.3226166140122655, "grad_norm": 0.6944953799247742, "learning_rate": 4.9401780783939656e-05, "loss": 1.0481, "step": 7210 }, { "epoch": 1.3228024530756364, "grad_norm": 0.6693060398101807, "learning_rate": 4.9376557090285614e-05, "loss": 0.9468, "step": 7211 }, { "epoch": 1.3229882921390077, "grad_norm": 0.6698997616767883, "learning_rate": 4.93513377265641e-05, "loss": 1.0794, "step": 7212 }, { "epoch": 1.3231741312023788, "grad_norm": 0.8010924458503723, "learning_rate": 4.932612269493208e-05, "loss": 0.995, "step": 7213 }, { "epoch": 1.32335997026575, "grad_norm": 0.699874997138977, "learning_rate": 4.930091199754633e-05, "loss": 0.9846, "step": 7214 }, { "epoch": 1.323545809329121, "grad_norm": 0.6646803617477417, "learning_rate": 4.927570563656315e-05, "loss": 0.741, "step": 7215 }, { "epoch": 1.3237316483924921, "grad_norm": 0.7552995085716248, "learning_rate": 4.925050361413849e-05, "loss": 1.048, "step": 7216 }, { "epoch": 1.3239174874558632, "grad_norm": 0.6564279198646545, "learning_rate": 4.922530593242798e-05, "loss": 0.9939, "step": 7217 }, { "epoch": 1.3241033265192343, "grad_norm": 0.641197144985199, "learning_rate": 4.920011259358675e-05, "loss": 0.7629, "step": 7218 }, { "epoch": 1.3242891655826055, "grad_norm": 0.6285445094108582, "learning_rate": 4.917492359976973e-05, "loss": 0.9564, "step": 7219 }, { "epoch": 1.3244750046459766, "grad_norm": 0.6213375329971313, "learning_rate": 4.914973895313128e-05, "loss": 0.8366, "step": 7220 }, { "epoch": 1.3246608437093477, "grad_norm": 0.7667694091796875, "learning_rate": 4.9124558655825626e-05, "loss": 0.9716, "step": 7221 }, { "epoch": 1.3248466827727188, "grad_norm": 0.8510693311691284, "learning_rate": 4.909938271000638e-05, "loss": 0.9444, "step": 7222 }, { "epoch": 1.3250325218360899, "grad_norm": 0.6002587080001831, "learning_rate": 4.907421111782694e-05, "loss": 0.8624, "step": 7223 }, { "epoch": 1.325218360899461, "grad_norm": 0.8189764022827148, "learning_rate": 4.9049043881440315e-05, "loss": 0.8753, "step": 7224 }, { "epoch": 1.3254041999628323, "grad_norm": 0.673364520072937, "learning_rate": 4.9023881002999064e-05, "loss": 0.9, "step": 7225 }, { "epoch": 1.3255900390262032, "grad_norm": 0.9202880859375, "learning_rate": 4.899872248465548e-05, "loss": 1.0446, "step": 7226 }, { "epoch": 1.3257758780895745, "grad_norm": 0.6275797486305237, "learning_rate": 4.897356832856135e-05, "loss": 0.8593, "step": 7227 }, { "epoch": 1.3259617171529454, "grad_norm": 0.7230714559555054, "learning_rate": 4.89484185368682e-05, "loss": 0.8682, "step": 7228 }, { "epoch": 1.3261475562163167, "grad_norm": 0.6627464294433594, "learning_rate": 4.8923273111727155e-05, "loss": 0.8705, "step": 7229 }, { "epoch": 1.3263333952796879, "grad_norm": 0.7997526526451111, "learning_rate": 4.889813205528895e-05, "loss": 0.8549, "step": 7230 }, { "epoch": 1.326519234343059, "grad_norm": 0.7096716165542603, "learning_rate": 4.887299536970399e-05, "loss": 0.9922, "step": 7231 }, { "epoch": 1.32670507340643, "grad_norm": 0.7961505651473999, "learning_rate": 4.8847863057122204e-05, "loss": 1.1905, "step": 7232 }, { "epoch": 1.3268909124698012, "grad_norm": 0.7194200754165649, "learning_rate": 4.8822735119693275e-05, "loss": 1.1028, "step": 7233 }, { "epoch": 1.3270767515331723, "grad_norm": 0.715369462966919, "learning_rate": 4.879761155956639e-05, "loss": 0.7564, "step": 7234 }, { "epoch": 1.3272625905965434, "grad_norm": 0.7175028324127197, "learning_rate": 4.877249237889049e-05, "loss": 1.0886, "step": 7235 }, { "epoch": 1.3274484296599145, "grad_norm": 0.6594383120536804, "learning_rate": 4.874737757981399e-05, "loss": 0.9096, "step": 7236 }, { "epoch": 1.3276342687232856, "grad_norm": 0.67848140001297, "learning_rate": 4.872226716448506e-05, "loss": 1.2045, "step": 7237 }, { "epoch": 1.3278201077866567, "grad_norm": 0.6310969591140747, "learning_rate": 4.869716113505148e-05, "loss": 0.7236, "step": 7238 }, { "epoch": 1.3280059468500278, "grad_norm": 0.5502176284790039, "learning_rate": 4.867205949366055e-05, "loss": 0.6613, "step": 7239 }, { "epoch": 1.328191785913399, "grad_norm": 0.5823808908462524, "learning_rate": 4.8646962242459345e-05, "loss": 0.9206, "step": 7240 }, { "epoch": 1.32837762497677, "grad_norm": 0.7270284295082092, "learning_rate": 4.8621869383594406e-05, "loss": 1.1637, "step": 7241 }, { "epoch": 1.3285634640401414, "grad_norm": 0.7297576665878296, "learning_rate": 4.8596780919212034e-05, "loss": 0.895, "step": 7242 }, { "epoch": 1.3287493031035122, "grad_norm": 0.6301212310791016, "learning_rate": 4.857169685145808e-05, "loss": 0.9788, "step": 7243 }, { "epoch": 1.3289351421668836, "grad_norm": 0.6360543966293335, "learning_rate": 4.854661718247805e-05, "loss": 1.2043, "step": 7244 }, { "epoch": 1.3291209812302545, "grad_norm": 0.8445108532905579, "learning_rate": 4.852154191441709e-05, "loss": 1.1311, "step": 7245 }, { "epoch": 1.3293068202936258, "grad_norm": 0.7138316035270691, "learning_rate": 4.8496471049419866e-05, "loss": 1.1371, "step": 7246 }, { "epoch": 1.329492659356997, "grad_norm": 0.6623853445053101, "learning_rate": 4.847140458963083e-05, "loss": 0.9835, "step": 7247 }, { "epoch": 1.329678498420368, "grad_norm": 0.6935982704162598, "learning_rate": 4.84463425371939e-05, "loss": 0.9587, "step": 7248 }, { "epoch": 1.3298643374837391, "grad_norm": 0.8760302066802979, "learning_rate": 4.8421284894252737e-05, "loss": 1.1497, "step": 7249 }, { "epoch": 1.3300501765471102, "grad_norm": 0.6761434078216553, "learning_rate": 4.839623166295053e-05, "loss": 0.7356, "step": 7250 }, { "epoch": 1.3302360156104813, "grad_norm": 0.6983981728553772, "learning_rate": 4.837118284543015e-05, "loss": 0.933, "step": 7251 }, { "epoch": 1.3304218546738524, "grad_norm": 0.7780141830444336, "learning_rate": 4.834613844383412e-05, "loss": 1.2209, "step": 7252 }, { "epoch": 1.3306076937372235, "grad_norm": 0.7106468677520752, "learning_rate": 4.832109846030448e-05, "loss": 1.0441, "step": 7253 }, { "epoch": 1.3307935328005946, "grad_norm": 0.8280313611030579, "learning_rate": 4.8296062896983005e-05, "loss": 1.0316, "step": 7254 }, { "epoch": 1.3309793718639658, "grad_norm": 0.7968834042549133, "learning_rate": 4.8271031756011e-05, "loss": 0.8459, "step": 7255 }, { "epoch": 1.3311652109273369, "grad_norm": 0.716829776763916, "learning_rate": 4.824600503952943e-05, "loss": 0.8083, "step": 7256 }, { "epoch": 1.331351049990708, "grad_norm": 0.6207119822502136, "learning_rate": 4.822098274967895e-05, "loss": 0.9451, "step": 7257 }, { "epoch": 1.331536889054079, "grad_norm": 0.8303805589675903, "learning_rate": 4.8195964888599685e-05, "loss": 1.814, "step": 7258 }, { "epoch": 1.3317227281174504, "grad_norm": 0.6936972141265869, "learning_rate": 4.817095145843151e-05, "loss": 0.8158, "step": 7259 }, { "epoch": 1.3319085671808213, "grad_norm": 0.840417206287384, "learning_rate": 4.814594246131387e-05, "loss": 1.052, "step": 7260 }, { "epoch": 1.3320944062441926, "grad_norm": 0.6627435684204102, "learning_rate": 4.812093789938589e-05, "loss": 0.8051, "step": 7261 }, { "epoch": 1.3322802453075637, "grad_norm": 0.8406475186347961, "learning_rate": 4.809593777478617e-05, "loss": 1.1079, "step": 7262 }, { "epoch": 1.3324660843709348, "grad_norm": 0.6551578044891357, "learning_rate": 4.807094208965308e-05, "loss": 0.6396, "step": 7263 }, { "epoch": 1.332651923434306, "grad_norm": 0.7471421957015991, "learning_rate": 4.804595084612458e-05, "loss": 0.5304, "step": 7264 }, { "epoch": 1.332837762497677, "grad_norm": 0.8838663697242737, "learning_rate": 4.8020964046338145e-05, "loss": 1.1116, "step": 7265 }, { "epoch": 1.3330236015610482, "grad_norm": 0.8529516458511353, "learning_rate": 4.7995981692431044e-05, "loss": 1.0741, "step": 7266 }, { "epoch": 1.3332094406244193, "grad_norm": 0.7180158495903015, "learning_rate": 4.797100378653998e-05, "loss": 0.8825, "step": 7267 }, { "epoch": 1.3333952796877904, "grad_norm": 0.8214242458343506, "learning_rate": 4.794603033080146e-05, "loss": 0.9276, "step": 7268 }, { "epoch": 1.3335811187511615, "grad_norm": 0.7666495442390442, "learning_rate": 4.7921061327351425e-05, "loss": 0.8363, "step": 7269 }, { "epoch": 1.3337669578145326, "grad_norm": 0.7022069096565247, "learning_rate": 4.789609677832557e-05, "loss": 0.8717, "step": 7270 }, { "epoch": 1.3339527968779037, "grad_norm": 0.7741091847419739, "learning_rate": 4.787113668585921e-05, "loss": 0.9096, "step": 7271 }, { "epoch": 1.3341386359412748, "grad_norm": 0.9301775097846985, "learning_rate": 4.784618105208716e-05, "loss": 1.0445, "step": 7272 }, { "epoch": 1.334324475004646, "grad_norm": 0.6503124833106995, "learning_rate": 4.782122987914395e-05, "loss": 0.9453, "step": 7273 }, { "epoch": 1.3345103140680172, "grad_norm": 0.6455255746841431, "learning_rate": 4.779628316916374e-05, "loss": 0.7312, "step": 7274 }, { "epoch": 1.3346961531313881, "grad_norm": 0.7131131887435913, "learning_rate": 4.777134092428028e-05, "loss": 1.0174, "step": 7275 }, { "epoch": 1.3348819921947594, "grad_norm": 0.703403651714325, "learning_rate": 4.774640314662688e-05, "loss": 0.9192, "step": 7276 }, { "epoch": 1.3350678312581303, "grad_norm": 0.7222033143043518, "learning_rate": 4.772146983833655e-05, "loss": 0.8505, "step": 7277 }, { "epoch": 1.3352536703215017, "grad_norm": 0.6918283104896545, "learning_rate": 4.769654100154194e-05, "loss": 0.9286, "step": 7278 }, { "epoch": 1.3354395093848728, "grad_norm": 0.8968819975852966, "learning_rate": 4.767161663837517e-05, "loss": 0.9518, "step": 7279 }, { "epoch": 1.3356253484482439, "grad_norm": 0.6316736936569214, "learning_rate": 4.764669675096818e-05, "loss": 0.865, "step": 7280 }, { "epoch": 1.335811187511615, "grad_norm": 0.701098620891571, "learning_rate": 4.7621781341452324e-05, "loss": 1.0819, "step": 7281 }, { "epoch": 1.335997026574986, "grad_norm": 0.8178427815437317, "learning_rate": 4.759687041195874e-05, "loss": 0.8916, "step": 7282 }, { "epoch": 1.3361828656383572, "grad_norm": 0.6333885788917542, "learning_rate": 4.757196396461806e-05, "loss": 0.9393, "step": 7283 }, { "epoch": 1.3363687047017283, "grad_norm": 0.7877507209777832, "learning_rate": 4.7547062001560614e-05, "loss": 1.1123, "step": 7284 }, { "epoch": 1.3365545437650994, "grad_norm": 0.5933407545089722, "learning_rate": 4.752216452491636e-05, "loss": 0.8123, "step": 7285 }, { "epoch": 1.3367403828284705, "grad_norm": 0.7034826278686523, "learning_rate": 4.7497271536814744e-05, "loss": 0.9516, "step": 7286 }, { "epoch": 1.3369262218918416, "grad_norm": 0.7386837005615234, "learning_rate": 4.747238303938498e-05, "loss": 0.6906, "step": 7287 }, { "epoch": 1.3371120609552127, "grad_norm": 0.64260333776474, "learning_rate": 4.7447499034755815e-05, "loss": 0.8486, "step": 7288 }, { "epoch": 1.3372979000185838, "grad_norm": 0.9089800119400024, "learning_rate": 4.742261952505568e-05, "loss": 1.0577, "step": 7289 }, { "epoch": 1.337483739081955, "grad_norm": 0.5729821920394897, "learning_rate": 4.73977445124125e-05, "loss": 0.7278, "step": 7290 }, { "epoch": 1.3376695781453263, "grad_norm": 0.6363554000854492, "learning_rate": 4.7372873998953905e-05, "loss": 1.052, "step": 7291 }, { "epoch": 1.3378554172086972, "grad_norm": 0.5810597538948059, "learning_rate": 4.734800798680719e-05, "loss": 0.7631, "step": 7292 }, { "epoch": 1.3380412562720685, "grad_norm": 0.779283881187439, "learning_rate": 4.73231464780991e-05, "loss": 0.8645, "step": 7293 }, { "epoch": 1.3382270953354394, "grad_norm": 0.7239099740982056, "learning_rate": 4.729828947495619e-05, "loss": 1.0195, "step": 7294 }, { "epoch": 1.3384129343988107, "grad_norm": 0.7280988693237305, "learning_rate": 4.727343697950444e-05, "loss": 0.9562, "step": 7295 }, { "epoch": 1.3385987734621818, "grad_norm": 0.7767034769058228, "learning_rate": 4.7248588993869626e-05, "loss": 0.895, "step": 7296 }, { "epoch": 1.338784612525553, "grad_norm": 0.7110447883605957, "learning_rate": 4.7223745520176956e-05, "loss": 1.0914, "step": 7297 }, { "epoch": 1.338970451588924, "grad_norm": 0.7331178188323975, "learning_rate": 4.7198906560551405e-05, "loss": 1.2067, "step": 7298 }, { "epoch": 1.3391562906522951, "grad_norm": 0.7957321405410767, "learning_rate": 4.717407211711753e-05, "loss": 1.0934, "step": 7299 }, { "epoch": 1.3393421297156662, "grad_norm": 0.5885205864906311, "learning_rate": 4.71492421919994e-05, "loss": 0.913, "step": 7300 }, { "epoch": 1.3395279687790373, "grad_norm": 0.7285014986991882, "learning_rate": 4.7124416787320814e-05, "loss": 1.1694, "step": 7301 }, { "epoch": 1.3397138078424085, "grad_norm": 0.7373610734939575, "learning_rate": 4.7099595905205176e-05, "loss": 0.9373, "step": 7302 }, { "epoch": 1.3398996469057796, "grad_norm": 0.7105932831764221, "learning_rate": 4.7074779547775395e-05, "loss": 0.8162, "step": 7303 }, { "epoch": 1.3400854859691507, "grad_norm": 0.6319199204444885, "learning_rate": 4.70499677171541e-05, "loss": 0.9777, "step": 7304 }, { "epoch": 1.3402713250325218, "grad_norm": 0.5829048752784729, "learning_rate": 4.702516041546351e-05, "loss": 0.9162, "step": 7305 }, { "epoch": 1.3404571640958929, "grad_norm": 0.7003679871559143, "learning_rate": 4.700035764482549e-05, "loss": 0.9545, "step": 7306 }, { "epoch": 1.340643003159264, "grad_norm": 0.8310719728469849, "learning_rate": 4.697555940736138e-05, "loss": 1.0496, "step": 7307 }, { "epoch": 1.3408288422226353, "grad_norm": 0.6480697393417358, "learning_rate": 4.6950765705192315e-05, "loss": 0.9615, "step": 7308 }, { "epoch": 1.3410146812860062, "grad_norm": 1.5479086637496948, "learning_rate": 4.6925976540438874e-05, "loss": 1.5403, "step": 7309 }, { "epoch": 1.3412005203493775, "grad_norm": 0.6437709331512451, "learning_rate": 4.690119191522141e-05, "loss": 0.9907, "step": 7310 }, { "epoch": 1.3413863594127486, "grad_norm": 0.6723558306694031, "learning_rate": 4.687641183165971e-05, "loss": 0.8375, "step": 7311 }, { "epoch": 1.3415721984761197, "grad_norm": 0.7868484854698181, "learning_rate": 4.6851636291873346e-05, "loss": 0.9949, "step": 7312 }, { "epoch": 1.3417580375394909, "grad_norm": 0.5984890460968018, "learning_rate": 4.682686529798143e-05, "loss": 0.9826, "step": 7313 }, { "epoch": 1.341943876602862, "grad_norm": 0.7483819127082825, "learning_rate": 4.6802098852102593e-05, "loss": 1.0567, "step": 7314 }, { "epoch": 1.342129715666233, "grad_norm": 0.7057022452354431, "learning_rate": 4.6777336956355263e-05, "loss": 0.9281, "step": 7315 }, { "epoch": 1.3423155547296042, "grad_norm": 0.7427000403404236, "learning_rate": 4.6752579612857305e-05, "loss": 0.8853, "step": 7316 }, { "epoch": 1.3425013937929753, "grad_norm": 0.5980744361877441, "learning_rate": 4.672782682372628e-05, "loss": 0.6722, "step": 7317 }, { "epoch": 1.3426872328563464, "grad_norm": 0.833619236946106, "learning_rate": 4.6703078591079374e-05, "loss": 1.0043, "step": 7318 }, { "epoch": 1.3428730719197175, "grad_norm": 0.6096035838127136, "learning_rate": 4.667833491703335e-05, "loss": 0.9514, "step": 7319 }, { "epoch": 1.3430589109830886, "grad_norm": 0.5686629414558411, "learning_rate": 4.6653595803704606e-05, "loss": 0.7055, "step": 7320 }, { "epoch": 1.3432447500464597, "grad_norm": 0.8507375121116638, "learning_rate": 4.662886125320909e-05, "loss": 1.0759, "step": 7321 }, { "epoch": 1.3434305891098308, "grad_norm": 0.6726318597793579, "learning_rate": 4.660413126766244e-05, "loss": 0.66, "step": 7322 }, { "epoch": 1.343616428173202, "grad_norm": 0.6763295531272888, "learning_rate": 4.657940584917983e-05, "loss": 1.0332, "step": 7323 }, { "epoch": 1.343802267236573, "grad_norm": 0.7093154191970825, "learning_rate": 4.655468499987612e-05, "loss": 1.0181, "step": 7324 }, { "epoch": 1.3439881062999444, "grad_norm": 0.7465590238571167, "learning_rate": 4.652996872186567e-05, "loss": 0.8733, "step": 7325 }, { "epoch": 1.3441739453633152, "grad_norm": 0.7101390361785889, "learning_rate": 4.650525701726256e-05, "loss": 0.8824, "step": 7326 }, { "epoch": 1.3443597844266866, "grad_norm": 0.6595645546913147, "learning_rate": 4.648054988818048e-05, "loss": 0.9001, "step": 7327 }, { "epoch": 1.3445456234900577, "grad_norm": 0.6411036849021912, "learning_rate": 4.6455847336732593e-05, "loss": 0.8999, "step": 7328 }, { "epoch": 1.3447314625534288, "grad_norm": 0.6612711548805237, "learning_rate": 4.6431149365031855e-05, "loss": 1.0747, "step": 7329 }, { "epoch": 1.3449173016168, "grad_norm": 0.7382752299308777, "learning_rate": 4.6406455975190645e-05, "loss": 0.899, "step": 7330 }, { "epoch": 1.345103140680171, "grad_norm": 0.8373868465423584, "learning_rate": 4.638176716932109e-05, "loss": 1.1021, "step": 7331 }, { "epoch": 1.3452889797435421, "grad_norm": 0.5916196703910828, "learning_rate": 4.635708294953487e-05, "loss": 0.6864, "step": 7332 }, { "epoch": 1.3454748188069132, "grad_norm": 0.6688162088394165, "learning_rate": 4.633240331794328e-05, "loss": 1.0403, "step": 7333 }, { "epoch": 1.3456606578702843, "grad_norm": 0.8366694450378418, "learning_rate": 4.630772827665727e-05, "loss": 0.9681, "step": 7334 }, { "epoch": 1.3458464969336554, "grad_norm": 0.7941098809242249, "learning_rate": 4.628305782778727e-05, "loss": 0.9695, "step": 7335 }, { "epoch": 1.3460323359970265, "grad_norm": 0.6894522309303284, "learning_rate": 4.625839197344346e-05, "loss": 0.7946, "step": 7336 }, { "epoch": 1.3462181750603976, "grad_norm": 0.6391467452049255, "learning_rate": 4.62337307157355e-05, "loss": 0.8445, "step": 7337 }, { "epoch": 1.3464040141237688, "grad_norm": 0.6794978976249695, "learning_rate": 4.62090740567728e-05, "loss": 0.9014, "step": 7338 }, { "epoch": 1.3465898531871399, "grad_norm": 0.6898692846298218, "learning_rate": 4.618442199866421e-05, "loss": 0.8583, "step": 7339 }, { "epoch": 1.3467756922505112, "grad_norm": 1.0231659412384033, "learning_rate": 4.615977454351832e-05, "loss": 1.1721, "step": 7340 }, { "epoch": 1.346961531313882, "grad_norm": 0.6773861646652222, "learning_rate": 4.613513169344331e-05, "loss": 0.8633, "step": 7341 }, { "epoch": 1.3471473703772534, "grad_norm": 0.7153002619743347, "learning_rate": 4.6110493450546866e-05, "loss": 0.777, "step": 7342 }, { "epoch": 1.3473332094406243, "grad_norm": 0.7849166989326477, "learning_rate": 4.608585981693643e-05, "loss": 0.866, "step": 7343 }, { "epoch": 1.3475190485039956, "grad_norm": 0.6351103782653809, "learning_rate": 4.606123079471889e-05, "loss": 1.0098, "step": 7344 }, { "epoch": 1.3477048875673667, "grad_norm": 0.6069339513778687, "learning_rate": 4.6036606386000844e-05, "loss": 1.0055, "step": 7345 }, { "epoch": 1.3478907266307378, "grad_norm": 0.7701793313026428, "learning_rate": 4.60119865928885e-05, "loss": 0.7519, "step": 7346 }, { "epoch": 1.348076565694109, "grad_norm": 0.6905173063278198, "learning_rate": 4.598737141748766e-05, "loss": 1.008, "step": 7347 }, { "epoch": 1.34826240475748, "grad_norm": 0.6580630540847778, "learning_rate": 4.5962760861903644e-05, "loss": 0.8601, "step": 7348 }, { "epoch": 1.3484482438208512, "grad_norm": 0.6992924213409424, "learning_rate": 4.593815492824147e-05, "loss": 0.9277, "step": 7349 }, { "epoch": 1.3486340828842223, "grad_norm": 0.7928568124771118, "learning_rate": 4.59135536186058e-05, "loss": 0.9202, "step": 7350 }, { "epoch": 1.3488199219475934, "grad_norm": 0.910819947719574, "learning_rate": 4.5888956935100745e-05, "loss": 1.0964, "step": 7351 }, { "epoch": 1.3490057610109645, "grad_norm": 0.6011480689048767, "learning_rate": 4.586436487983018e-05, "loss": 0.7151, "step": 7352 }, { "epoch": 1.3491916000743356, "grad_norm": 0.67581707239151, "learning_rate": 4.583977745489746e-05, "loss": 0.8922, "step": 7353 }, { "epoch": 1.3493774391377067, "grad_norm": 0.6327905654907227, "learning_rate": 4.5815194662405635e-05, "loss": 0.8786, "step": 7354 }, { "epoch": 1.3495632782010778, "grad_norm": 0.6018652319908142, "learning_rate": 4.579061650445735e-05, "loss": 0.9744, "step": 7355 }, { "epoch": 1.349749117264449, "grad_norm": 0.9206721186637878, "learning_rate": 4.576604298315476e-05, "loss": 0.8008, "step": 7356 }, { "epoch": 1.3499349563278202, "grad_norm": 0.6685934662818909, "learning_rate": 4.5741474100599756e-05, "loss": 0.8541, "step": 7357 }, { "epoch": 1.3501207953911911, "grad_norm": 0.7667933702468872, "learning_rate": 4.571690985889372e-05, "loss": 0.9087, "step": 7358 }, { "epoch": 1.3503066344545624, "grad_norm": 0.6506810784339905, "learning_rate": 4.569235026013773e-05, "loss": 0.8424, "step": 7359 }, { "epoch": 1.3504924735179333, "grad_norm": 0.8654145002365112, "learning_rate": 4.566779530643237e-05, "loss": 0.8636, "step": 7360 }, { "epoch": 1.3506783125813047, "grad_norm": 0.7595291137695312, "learning_rate": 4.56432449998779e-05, "loss": 1.2996, "step": 7361 }, { "epoch": 1.3508641516446758, "grad_norm": 0.6884271502494812, "learning_rate": 4.561869934257417e-05, "loss": 1.0624, "step": 7362 }, { "epoch": 1.3510499907080469, "grad_norm": 0.6496449708938599, "learning_rate": 4.559415833662061e-05, "loss": 0.9485, "step": 7363 }, { "epoch": 1.351235829771418, "grad_norm": 0.7063813805580139, "learning_rate": 4.556962198411631e-05, "loss": 1.0707, "step": 7364 }, { "epoch": 1.351421668834789, "grad_norm": 0.7109731435775757, "learning_rate": 4.5545090287159854e-05, "loss": 1.2407, "step": 7365 }, { "epoch": 1.3516075078981602, "grad_norm": 0.7253541350364685, "learning_rate": 4.552056324784954e-05, "loss": 1.0913, "step": 7366 }, { "epoch": 1.3517933469615313, "grad_norm": 0.7168422937393188, "learning_rate": 4.5496040868283177e-05, "loss": 0.8611, "step": 7367 }, { "epoch": 1.3519791860249024, "grad_norm": 0.7601984143257141, "learning_rate": 4.5471523150558225e-05, "loss": 1.1782, "step": 7368 }, { "epoch": 1.3521650250882735, "grad_norm": 0.7291967272758484, "learning_rate": 4.5447010096771794e-05, "loss": 1.1117, "step": 7369 }, { "epoch": 1.3523508641516446, "grad_norm": 0.7122645378112793, "learning_rate": 4.542250170902045e-05, "loss": 1.0315, "step": 7370 }, { "epoch": 1.3525367032150157, "grad_norm": 0.800542414188385, "learning_rate": 4.5397997989400544e-05, "loss": 0.9357, "step": 7371 }, { "epoch": 1.3527225422783868, "grad_norm": 0.6745566129684448, "learning_rate": 4.5373498940007834e-05, "loss": 1.1384, "step": 7372 }, { "epoch": 1.352908381341758, "grad_norm": 0.6796730160713196, "learning_rate": 4.534900456293786e-05, "loss": 0.8074, "step": 7373 }, { "epoch": 1.3530942204051293, "grad_norm": 0.7584159970283508, "learning_rate": 4.532451486028559e-05, "loss": 1.0569, "step": 7374 }, { "epoch": 1.3532800594685002, "grad_norm": 0.6289095878601074, "learning_rate": 4.530002983414575e-05, "loss": 0.7696, "step": 7375 }, { "epoch": 1.3534658985318715, "grad_norm": 0.6373142600059509, "learning_rate": 4.527554948661258e-05, "loss": 1.1354, "step": 7376 }, { "epoch": 1.3536517375952426, "grad_norm": 0.7420815229415894, "learning_rate": 4.525107381977993e-05, "loss": 0.947, "step": 7377 }, { "epoch": 1.3538375766586137, "grad_norm": 0.6928457617759705, "learning_rate": 4.522660283574132e-05, "loss": 0.8624, "step": 7378 }, { "epoch": 1.3540234157219848, "grad_norm": 0.7249012589454651, "learning_rate": 4.520213653658969e-05, "loss": 0.8162, "step": 7379 }, { "epoch": 1.354209254785356, "grad_norm": 0.6938693523406982, "learning_rate": 4.517767492441781e-05, "loss": 0.9954, "step": 7380 }, { "epoch": 1.354395093848727, "grad_norm": 0.9390332698822021, "learning_rate": 4.515321800131783e-05, "loss": 0.988, "step": 7381 }, { "epoch": 1.3545809329120981, "grad_norm": 0.5386117100715637, "learning_rate": 4.512876576938167e-05, "loss": 0.6488, "step": 7382 }, { "epoch": 1.3547667719754692, "grad_norm": 0.615216076374054, "learning_rate": 4.5104318230700784e-05, "loss": 0.7681, "step": 7383 }, { "epoch": 1.3549526110388403, "grad_norm": 0.8385409116744995, "learning_rate": 4.507987538736618e-05, "loss": 1.171, "step": 7384 }, { "epoch": 1.3551384501022115, "grad_norm": 0.7255057692527771, "learning_rate": 4.5055437241468565e-05, "loss": 0.958, "step": 7385 }, { "epoch": 1.3553242891655826, "grad_norm": 0.7279541492462158, "learning_rate": 4.5031003795098116e-05, "loss": 0.9985, "step": 7386 }, { "epoch": 1.3555101282289537, "grad_norm": 0.6622303128242493, "learning_rate": 4.5006575050344754e-05, "loss": 1.1728, "step": 7387 }, { "epoch": 1.3556959672923248, "grad_norm": 0.7048065662384033, "learning_rate": 4.498215100929786e-05, "loss": 0.9017, "step": 7388 }, { "epoch": 1.3558818063556959, "grad_norm": 0.6078386902809143, "learning_rate": 4.4957731674046474e-05, "loss": 0.9736, "step": 7389 }, { "epoch": 1.356067645419067, "grad_norm": 0.7244036197662354, "learning_rate": 4.493331704667928e-05, "loss": 0.943, "step": 7390 }, { "epoch": 1.3562534844824383, "grad_norm": 0.7068463563919067, "learning_rate": 4.490890712928448e-05, "loss": 0.8434, "step": 7391 }, { "epoch": 1.3564393235458092, "grad_norm": 0.7234067320823669, "learning_rate": 4.488450192394997e-05, "loss": 1.2149, "step": 7392 }, { "epoch": 1.3566251626091805, "grad_norm": 0.6822995543479919, "learning_rate": 4.486010143276309e-05, "loss": 0.8057, "step": 7393 }, { "epoch": 1.3568110016725516, "grad_norm": 0.6011866331100464, "learning_rate": 4.4835705657810946e-05, "loss": 0.9052, "step": 7394 }, { "epoch": 1.3569968407359227, "grad_norm": 0.6928417086601257, "learning_rate": 4.4811314601180076e-05, "loss": 1.017, "step": 7395 }, { "epoch": 1.3571826797992939, "grad_norm": 0.6541099548339844, "learning_rate": 4.4786928264956764e-05, "loss": 0.7197, "step": 7396 }, { "epoch": 1.357368518862665, "grad_norm": 0.7261728644371033, "learning_rate": 4.476254665122683e-05, "loss": 0.9874, "step": 7397 }, { "epoch": 1.357554357926036, "grad_norm": 0.7361965775489807, "learning_rate": 4.473816976207563e-05, "loss": 0.9259, "step": 7398 }, { "epoch": 1.3577401969894072, "grad_norm": 0.672639787197113, "learning_rate": 4.471379759958825e-05, "loss": 0.8932, "step": 7399 }, { "epoch": 1.3579260360527783, "grad_norm": 0.7823107838630676, "learning_rate": 4.468943016584921e-05, "loss": 1.0944, "step": 7400 }, { "epoch": 1.3581118751161494, "grad_norm": 0.7286608219146729, "learning_rate": 4.466506746294279e-05, "loss": 1.1196, "step": 7401 }, { "epoch": 1.3582977141795205, "grad_norm": 0.9249753355979919, "learning_rate": 4.46407094929527e-05, "loss": 0.989, "step": 7402 }, { "epoch": 1.3584835532428916, "grad_norm": 0.7424798607826233, "learning_rate": 4.4616356257962375e-05, "loss": 0.8082, "step": 7403 }, { "epoch": 1.3586693923062627, "grad_norm": 0.5924530029296875, "learning_rate": 4.459200776005484e-05, "loss": 0.7535, "step": 7404 }, { "epoch": 1.3588552313696338, "grad_norm": 0.763904333114624, "learning_rate": 4.45676640013126e-05, "loss": 0.895, "step": 7405 }, { "epoch": 1.3590410704330051, "grad_norm": 0.6832556128501892, "learning_rate": 4.4543324983817855e-05, "loss": 0.8652, "step": 7406 }, { "epoch": 1.359226909496376, "grad_norm": 0.7321500182151794, "learning_rate": 4.451899070965239e-05, "loss": 0.9739, "step": 7407 }, { "epoch": 1.3594127485597474, "grad_norm": 0.7232654094696045, "learning_rate": 4.449466118089759e-05, "loss": 0.96, "step": 7408 }, { "epoch": 1.3595985876231182, "grad_norm": 0.7119808793067932, "learning_rate": 4.447033639963435e-05, "loss": 1.087, "step": 7409 }, { "epoch": 1.3597844266864896, "grad_norm": 0.7229185104370117, "learning_rate": 4.444601636794326e-05, "loss": 0.8998, "step": 7410 }, { "epoch": 1.3599702657498607, "grad_norm": 0.8261592388153076, "learning_rate": 4.4421701087904474e-05, "loss": 0.8719, "step": 7411 }, { "epoch": 1.3601561048132318, "grad_norm": 0.756256639957428, "learning_rate": 4.439739056159769e-05, "loss": 0.8449, "step": 7412 }, { "epoch": 1.360341943876603, "grad_norm": 0.6536577343940735, "learning_rate": 4.4373084791102305e-05, "loss": 0.74, "step": 7413 }, { "epoch": 1.360527782939974, "grad_norm": 0.7058019638061523, "learning_rate": 4.434878377849716e-05, "loss": 0.9292, "step": 7414 }, { "epoch": 1.3607136220033451, "grad_norm": 0.6998428702354431, "learning_rate": 4.432448752586086e-05, "loss": 1.025, "step": 7415 }, { "epoch": 1.3608994610667162, "grad_norm": 0.8001590967178345, "learning_rate": 4.430019603527144e-05, "loss": 0.8781, "step": 7416 }, { "epoch": 1.3610853001300873, "grad_norm": 0.695641815662384, "learning_rate": 4.4275909308806615e-05, "loss": 1.0285, "step": 7417 }, { "epoch": 1.3612711391934584, "grad_norm": 0.7073414921760559, "learning_rate": 4.425162734854375e-05, "loss": 0.9246, "step": 7418 }, { "epoch": 1.3614569782568295, "grad_norm": 0.7373897433280945, "learning_rate": 4.4227350156559654e-05, "loss": 1.0972, "step": 7419 }, { "epoch": 1.3616428173202006, "grad_norm": 0.8083869814872742, "learning_rate": 4.420307773493084e-05, "loss": 0.928, "step": 7420 }, { "epoch": 1.3618286563835718, "grad_norm": 0.6736962795257568, "learning_rate": 4.417881008573338e-05, "loss": 0.9647, "step": 7421 }, { "epoch": 1.3620144954469429, "grad_norm": 0.783660888671875, "learning_rate": 4.4154547211042976e-05, "loss": 0.9698, "step": 7422 }, { "epoch": 1.3622003345103142, "grad_norm": 0.7382241487503052, "learning_rate": 4.41302891129348e-05, "loss": 1.017, "step": 7423 }, { "epoch": 1.362386173573685, "grad_norm": 0.7947919368743896, "learning_rate": 4.410603579348376e-05, "loss": 0.807, "step": 7424 }, { "epoch": 1.3625720126370564, "grad_norm": 0.629784882068634, "learning_rate": 4.408178725476431e-05, "loss": 0.853, "step": 7425 }, { "epoch": 1.3627578517004273, "grad_norm": 0.6849907636642456, "learning_rate": 4.405754349885042e-05, "loss": 0.935, "step": 7426 }, { "epoch": 1.3629436907637986, "grad_norm": 0.7976436614990234, "learning_rate": 4.403330452781577e-05, "loss": 0.9449, "step": 7427 }, { "epoch": 1.3631295298271697, "grad_norm": 0.6531306505203247, "learning_rate": 4.400907034373351e-05, "loss": 0.8535, "step": 7428 }, { "epoch": 1.3633153688905408, "grad_norm": 0.7673757076263428, "learning_rate": 4.398484094867652e-05, "loss": 0.8225, "step": 7429 }, { "epoch": 1.363501207953912, "grad_norm": 0.683928906917572, "learning_rate": 4.39606163447171e-05, "loss": 0.6675, "step": 7430 }, { "epoch": 1.363687047017283, "grad_norm": 0.7557547092437744, "learning_rate": 4.393639653392729e-05, "loss": 0.9763, "step": 7431 }, { "epoch": 1.3638728860806542, "grad_norm": 0.8278974294662476, "learning_rate": 4.391218151837868e-05, "loss": 1.0336, "step": 7432 }, { "epoch": 1.3640587251440253, "grad_norm": 0.8780465722084045, "learning_rate": 4.388797130014238e-05, "loss": 1.1456, "step": 7433 }, { "epoch": 1.3642445642073964, "grad_norm": 0.7346441149711609, "learning_rate": 4.3863765881289165e-05, "loss": 0.9235, "step": 7434 }, { "epoch": 1.3644304032707675, "grad_norm": 1.674783706665039, "learning_rate": 4.3839565263889385e-05, "loss": 1.0321, "step": 7435 }, { "epoch": 1.3646162423341386, "grad_norm": 0.7685444355010986, "learning_rate": 4.3815369450013e-05, "loss": 0.8211, "step": 7436 }, { "epoch": 1.3648020813975097, "grad_norm": 0.6860784292221069, "learning_rate": 4.3791178441729466e-05, "loss": 1.0005, "step": 7437 }, { "epoch": 1.3649879204608808, "grad_norm": 0.7961957454681396, "learning_rate": 4.3766992241107916e-05, "loss": 1.0806, "step": 7438 }, { "epoch": 1.365173759524252, "grad_norm": 0.7249303460121155, "learning_rate": 4.3742810850217084e-05, "loss": 0.9695, "step": 7439 }, { "epoch": 1.3653595985876232, "grad_norm": 0.8331009149551392, "learning_rate": 4.37186342711252e-05, "loss": 0.7105, "step": 7440 }, { "epoch": 1.3655454376509941, "grad_norm": 0.8023040890693665, "learning_rate": 4.3694462505900215e-05, "loss": 0.984, "step": 7441 }, { "epoch": 1.3657312767143654, "grad_norm": 0.8293659090995789, "learning_rate": 4.367029555660949e-05, "loss": 1.0359, "step": 7442 }, { "epoch": 1.3659171157777366, "grad_norm": 0.6645398736000061, "learning_rate": 4.364613342532018e-05, "loss": 0.6134, "step": 7443 }, { "epoch": 1.3661029548411077, "grad_norm": 0.8250023126602173, "learning_rate": 4.3621976114098826e-05, "loss": 0.9576, "step": 7444 }, { "epoch": 1.3662887939044788, "grad_norm": 0.6180599927902222, "learning_rate": 4.3597823625011714e-05, "loss": 0.9442, "step": 7445 }, { "epoch": 1.3664746329678499, "grad_norm": 0.765338659286499, "learning_rate": 4.3573675960124684e-05, "loss": 0.8555, "step": 7446 }, { "epoch": 1.366660472031221, "grad_norm": 0.6484029293060303, "learning_rate": 4.354953312150305e-05, "loss": 0.7575, "step": 7447 }, { "epoch": 1.366846311094592, "grad_norm": 0.625566303730011, "learning_rate": 4.3525395111211877e-05, "loss": 0.9547, "step": 7448 }, { "epoch": 1.3670321501579632, "grad_norm": 0.6785605549812317, "learning_rate": 4.350126193131574e-05, "loss": 1.0458, "step": 7449 }, { "epoch": 1.3672179892213343, "grad_norm": 0.762402355670929, "learning_rate": 4.347713358387875e-05, "loss": 0.9349, "step": 7450 }, { "epoch": 1.3674038282847054, "grad_norm": 0.6979005336761475, "learning_rate": 4.3453010070964684e-05, "loss": 1.0819, "step": 7451 }, { "epoch": 1.3675896673480765, "grad_norm": 0.8139050006866455, "learning_rate": 4.3428891394636874e-05, "loss": 0.9195, "step": 7452 }, { "epoch": 1.3677755064114476, "grad_norm": 0.7950904369354248, "learning_rate": 4.34047775569583e-05, "loss": 0.8346, "step": 7453 }, { "epoch": 1.3679613454748187, "grad_norm": 0.6743336915969849, "learning_rate": 4.3380668559991366e-05, "loss": 0.9345, "step": 7454 }, { "epoch": 1.36814718453819, "grad_norm": 0.7096421718597412, "learning_rate": 4.335656440579827e-05, "loss": 0.7283, "step": 7455 }, { "epoch": 1.368333023601561, "grad_norm": 0.5938029885292053, "learning_rate": 4.33324650964406e-05, "loss": 0.825, "step": 7456 }, { "epoch": 1.3685188626649323, "grad_norm": 0.7945120930671692, "learning_rate": 4.330837063397971e-05, "loss": 1.104, "step": 7457 }, { "epoch": 1.3687047017283032, "grad_norm": 0.7108601331710815, "learning_rate": 4.328428102047636e-05, "loss": 0.8969, "step": 7458 }, { "epoch": 1.3688905407916745, "grad_norm": 0.6471177935600281, "learning_rate": 4.3260196257991035e-05, "loss": 0.9276, "step": 7459 }, { "epoch": 1.3690763798550456, "grad_norm": 0.8287071585655212, "learning_rate": 4.32361163485838e-05, "loss": 0.9662, "step": 7460 }, { "epoch": 1.3692622189184167, "grad_norm": 0.7040210366249084, "learning_rate": 4.321204129431418e-05, "loss": 0.9346, "step": 7461 }, { "epoch": 1.3694480579817878, "grad_norm": 0.7015113234519958, "learning_rate": 4.3187971097241416e-05, "loss": 1.0136, "step": 7462 }, { "epoch": 1.369633897045159, "grad_norm": 0.656891942024231, "learning_rate": 4.3163905759424264e-05, "loss": 0.9282, "step": 7463 }, { "epoch": 1.36981973610853, "grad_norm": 0.6929020285606384, "learning_rate": 4.313984528292108e-05, "loss": 0.958, "step": 7464 }, { "epoch": 1.3700055751719011, "grad_norm": 0.7964670062065125, "learning_rate": 4.311578966978982e-05, "loss": 0.9406, "step": 7465 }, { "epoch": 1.3701914142352722, "grad_norm": 0.7206087708473206, "learning_rate": 4.309173892208801e-05, "loss": 0.9252, "step": 7466 }, { "epoch": 1.3703772532986433, "grad_norm": 0.7929943203926086, "learning_rate": 4.3067693041872805e-05, "loss": 1.1453, "step": 7467 }, { "epoch": 1.3705630923620145, "grad_norm": 0.7283685207366943, "learning_rate": 4.304365203120082e-05, "loss": 1.1698, "step": 7468 }, { "epoch": 1.3707489314253856, "grad_norm": 0.7081707715988159, "learning_rate": 4.301961589212842e-05, "loss": 0.9548, "step": 7469 }, { "epoch": 1.3709347704887567, "grad_norm": 0.7527585029602051, "learning_rate": 4.299558462671137e-05, "loss": 1.0448, "step": 7470 }, { "epoch": 1.3711206095521278, "grad_norm": 0.7582692503929138, "learning_rate": 4.2971558237005225e-05, "loss": 0.901, "step": 7471 }, { "epoch": 1.371306448615499, "grad_norm": 0.7193834781646729, "learning_rate": 4.2947536725064906e-05, "loss": 1.0733, "step": 7472 }, { "epoch": 1.37149228767887, "grad_norm": 0.6494482755661011, "learning_rate": 4.292352009294509e-05, "loss": 0.7626, "step": 7473 }, { "epoch": 1.3716781267422413, "grad_norm": 0.49988317489624023, "learning_rate": 4.289950834269999e-05, "loss": 0.4652, "step": 7474 }, { "epoch": 1.3718639658056122, "grad_norm": 0.7491035461425781, "learning_rate": 4.287550147638332e-05, "loss": 1.0236, "step": 7475 }, { "epoch": 1.3720498048689835, "grad_norm": 0.6134287714958191, "learning_rate": 4.2851499496048507e-05, "loss": 1.1179, "step": 7476 }, { "epoch": 1.3722356439323546, "grad_norm": 0.7895690202713013, "learning_rate": 4.282750240374842e-05, "loss": 0.8788, "step": 7477 }, { "epoch": 1.3724214829957257, "grad_norm": 0.61039799451828, "learning_rate": 4.280351020153561e-05, "loss": 0.9806, "step": 7478 }, { "epoch": 1.3726073220590969, "grad_norm": 0.7818472981452942, "learning_rate": 4.2779522891462196e-05, "loss": 1.1705, "step": 7479 }, { "epoch": 1.372793161122468, "grad_norm": 0.6983256340026855, "learning_rate": 4.275554047557987e-05, "loss": 0.9489, "step": 7480 }, { "epoch": 1.372979000185839, "grad_norm": 0.6386864185333252, "learning_rate": 4.273156295593991e-05, "loss": 0.8734, "step": 7481 }, { "epoch": 1.3731648392492102, "grad_norm": 0.6794342994689941, "learning_rate": 4.270759033459311e-05, "loss": 0.9008, "step": 7482 }, { "epoch": 1.3733506783125813, "grad_norm": 0.7258062362670898, "learning_rate": 4.268362261358997e-05, "loss": 0.9507, "step": 7483 }, { "epoch": 1.3735365173759524, "grad_norm": 0.9591363072395325, "learning_rate": 4.265965979498043e-05, "loss": 0.8545, "step": 7484 }, { "epoch": 1.3737223564393235, "grad_norm": 0.6767122745513916, "learning_rate": 4.263570188081416e-05, "loss": 1.0834, "step": 7485 }, { "epoch": 1.3739081955026946, "grad_norm": 0.63570636510849, "learning_rate": 4.2611748873140246e-05, "loss": 0.7912, "step": 7486 }, { "epoch": 1.3740940345660657, "grad_norm": 0.6445006132125854, "learning_rate": 4.258780077400748e-05, "loss": 0.8576, "step": 7487 }, { "epoch": 1.3742798736294368, "grad_norm": 0.711561918258667, "learning_rate": 4.2563857585464237e-05, "loss": 1.0954, "step": 7488 }, { "epoch": 1.3744657126928082, "grad_norm": 0.7400350570678711, "learning_rate": 4.253991930955835e-05, "loss": 0.8498, "step": 7489 }, { "epoch": 1.374651551756179, "grad_norm": 0.71037358045578, "learning_rate": 4.25159859483374e-05, "loss": 1.0236, "step": 7490 }, { "epoch": 1.3748373908195504, "grad_norm": 0.7470990419387817, "learning_rate": 4.249205750384836e-05, "loss": 1.0152, "step": 7491 }, { "epoch": 1.3750232298829215, "grad_norm": 0.7151445150375366, "learning_rate": 4.2468133978137945e-05, "loss": 0.8797, "step": 7492 }, { "epoch": 1.3752090689462926, "grad_norm": 1.017393946647644, "learning_rate": 4.244421537325237e-05, "loss": 1.1923, "step": 7493 }, { "epoch": 1.3753949080096637, "grad_norm": 0.7744883894920349, "learning_rate": 4.2420301691237487e-05, "loss": 1.015, "step": 7494 }, { "epoch": 1.3755807470730348, "grad_norm": 0.6467261910438538, "learning_rate": 4.239639293413861e-05, "loss": 1.0064, "step": 7495 }, { "epoch": 1.375766586136406, "grad_norm": 0.6293275952339172, "learning_rate": 4.2372489104000736e-05, "loss": 0.7665, "step": 7496 }, { "epoch": 1.375952425199777, "grad_norm": 1.479631781578064, "learning_rate": 4.234859020286845e-05, "loss": 1.2332, "step": 7497 }, { "epoch": 1.3761382642631481, "grad_norm": 0.6856657266616821, "learning_rate": 4.2324696232785824e-05, "loss": 1.0395, "step": 7498 }, { "epoch": 1.3763241033265192, "grad_norm": 0.670069694519043, "learning_rate": 4.230080719579661e-05, "loss": 0.9213, "step": 7499 }, { "epoch": 1.3765099423898903, "grad_norm": 0.79466313123703, "learning_rate": 4.227692309394402e-05, "loss": 1.0373, "step": 7500 }, { "epoch": 1.3766957814532614, "grad_norm": 0.9797833561897278, "learning_rate": 4.2253043929270964e-05, "loss": 1.1574, "step": 7501 }, { "epoch": 1.3768816205166325, "grad_norm": 0.5774651169776917, "learning_rate": 4.2229169703819904e-05, "loss": 0.8187, "step": 7502 }, { "epoch": 1.3770674595800036, "grad_norm": 1.4220142364501953, "learning_rate": 4.2205300419632774e-05, "loss": 1.1914, "step": 7503 }, { "epoch": 1.3772532986433748, "grad_norm": 0.6203631162643433, "learning_rate": 4.2181436078751256e-05, "loss": 0.9958, "step": 7504 }, { "epoch": 1.3774391377067459, "grad_norm": 0.915024995803833, "learning_rate": 4.215757668321643e-05, "loss": 0.951, "step": 7505 }, { "epoch": 1.3776249767701172, "grad_norm": 0.7773443460464478, "learning_rate": 4.213372223506912e-05, "loss": 1.0363, "step": 7506 }, { "epoch": 1.377810815833488, "grad_norm": 0.6697326898574829, "learning_rate": 4.210987273634958e-05, "loss": 0.7743, "step": 7507 }, { "epoch": 1.3779966548968594, "grad_norm": 0.7244952917098999, "learning_rate": 4.208602818909774e-05, "loss": 0.9596, "step": 7508 }, { "epoch": 1.3781824939602305, "grad_norm": 0.6577768921852112, "learning_rate": 4.206218859535308e-05, "loss": 0.6985, "step": 7509 }, { "epoch": 1.3783683330236016, "grad_norm": 0.6292741298675537, "learning_rate": 4.203835395715464e-05, "loss": 0.9942, "step": 7510 }, { "epoch": 1.3785541720869727, "grad_norm": 0.7271589636802673, "learning_rate": 4.20145242765411e-05, "loss": 1.03, "step": 7511 }, { "epoch": 1.3787400111503438, "grad_norm": 0.7603896856307983, "learning_rate": 4.1990699555550595e-05, "loss": 0.8599, "step": 7512 }, { "epoch": 1.378925850213715, "grad_norm": 0.6246262192726135, "learning_rate": 4.196687979622096e-05, "loss": 1.0653, "step": 7513 }, { "epoch": 1.379111689277086, "grad_norm": 0.6769149899482727, "learning_rate": 4.194306500058947e-05, "loss": 0.8994, "step": 7514 }, { "epoch": 1.3792975283404572, "grad_norm": 0.6372380256652832, "learning_rate": 4.191925517069314e-05, "loss": 0.8837, "step": 7515 }, { "epoch": 1.3794833674038283, "grad_norm": 0.6261329054832458, "learning_rate": 4.189545030856846e-05, "loss": 0.9174, "step": 7516 }, { "epoch": 1.3796692064671994, "grad_norm": 0.6543997526168823, "learning_rate": 4.187165041625146e-05, "loss": 1.073, "step": 7517 }, { "epoch": 1.3798550455305705, "grad_norm": 0.664309561252594, "learning_rate": 4.184785549577788e-05, "loss": 0.7874, "step": 7518 }, { "epoch": 1.3800408845939416, "grad_norm": 0.7015162110328674, "learning_rate": 4.182406554918285e-05, "loss": 0.9236, "step": 7519 }, { "epoch": 1.3802267236573127, "grad_norm": 0.6500989198684692, "learning_rate": 4.180028057850127e-05, "loss": 0.9888, "step": 7520 }, { "epoch": 1.380412562720684, "grad_norm": 0.9309687614440918, "learning_rate": 4.177650058576744e-05, "loss": 1.1334, "step": 7521 }, { "epoch": 1.380598401784055, "grad_norm": 0.7783293724060059, "learning_rate": 4.175272557301536e-05, "loss": 0.9452, "step": 7522 }, { "epoch": 1.3807842408474262, "grad_norm": 0.6977812647819519, "learning_rate": 4.1728955542278546e-05, "loss": 1.08, "step": 7523 }, { "epoch": 1.3809700799107971, "grad_norm": 0.7367052435874939, "learning_rate": 4.170519049559011e-05, "loss": 0.8711, "step": 7524 }, { "epoch": 1.3811559189741685, "grad_norm": 0.7824680805206299, "learning_rate": 4.168143043498276e-05, "loss": 0.8856, "step": 7525 }, { "epoch": 1.3813417580375396, "grad_norm": 0.6573517322540283, "learning_rate": 4.1657675362488655e-05, "loss": 1.0912, "step": 7526 }, { "epoch": 1.3815275971009107, "grad_norm": 0.7205615043640137, "learning_rate": 4.163392528013973e-05, "loss": 0.6739, "step": 7527 }, { "epoch": 1.3817134361642818, "grad_norm": 0.7122405171394348, "learning_rate": 4.161018018996727e-05, "loss": 1.0519, "step": 7528 }, { "epoch": 1.3818992752276529, "grad_norm": 0.6043539047241211, "learning_rate": 4.15864400940023e-05, "loss": 1.0287, "step": 7529 }, { "epoch": 1.382085114291024, "grad_norm": 0.7427747845649719, "learning_rate": 4.1562704994275394e-05, "loss": 0.7662, "step": 7530 }, { "epoch": 1.382270953354395, "grad_norm": 0.6464288234710693, "learning_rate": 4.153897489281658e-05, "loss": 0.8072, "step": 7531 }, { "epoch": 1.3824567924177662, "grad_norm": 0.7093892693519592, "learning_rate": 4.1515249791655654e-05, "loss": 0.9883, "step": 7532 }, { "epoch": 1.3826426314811373, "grad_norm": 0.7397565841674805, "learning_rate": 4.149152969282176e-05, "loss": 0.9869, "step": 7533 }, { "epoch": 1.3828284705445084, "grad_norm": 0.9709092378616333, "learning_rate": 4.146781459834384e-05, "loss": 1.0185, "step": 7534 }, { "epoch": 1.3830143096078795, "grad_norm": 0.6332142949104309, "learning_rate": 4.1444104510250184e-05, "loss": 0.9538, "step": 7535 }, { "epoch": 1.3832001486712506, "grad_norm": 0.7227572798728943, "learning_rate": 4.1420399430568834e-05, "loss": 0.974, "step": 7536 }, { "epoch": 1.3833859877346217, "grad_norm": 0.7384819984436035, "learning_rate": 4.139669936132733e-05, "loss": 0.8992, "step": 7537 }, { "epoch": 1.383571826797993, "grad_norm": 0.7241960167884827, "learning_rate": 4.1373004304552786e-05, "loss": 0.9805, "step": 7538 }, { "epoch": 1.383757665861364, "grad_norm": 0.6905572414398193, "learning_rate": 4.134931426227193e-05, "loss": 1.0361, "step": 7539 }, { "epoch": 1.3839435049247353, "grad_norm": 0.5935276746749878, "learning_rate": 4.1325629236510954e-05, "loss": 0.9253, "step": 7540 }, { "epoch": 1.3841293439881062, "grad_norm": 0.7509950399398804, "learning_rate": 4.1301949229295755e-05, "loss": 0.7917, "step": 7541 }, { "epoch": 1.3843151830514775, "grad_norm": 0.6783537268638611, "learning_rate": 4.127827424265166e-05, "loss": 0.8705, "step": 7542 }, { "epoch": 1.3845010221148486, "grad_norm": 0.7498492002487183, "learning_rate": 4.125460427860369e-05, "loss": 0.8801, "step": 7543 }, { "epoch": 1.3846868611782197, "grad_norm": 0.7158042192459106, "learning_rate": 4.123093933917642e-05, "loss": 0.9504, "step": 7544 }, { "epoch": 1.3848727002415908, "grad_norm": 0.7044070363044739, "learning_rate": 4.120727942639389e-05, "loss": 0.9132, "step": 7545 }, { "epoch": 1.385058539304962, "grad_norm": 0.7871155738830566, "learning_rate": 4.118362454227984e-05, "loss": 0.9689, "step": 7546 }, { "epoch": 1.385244378368333, "grad_norm": 0.7555184960365295, "learning_rate": 4.115997468885748e-05, "loss": 0.9615, "step": 7547 }, { "epoch": 1.3854302174317041, "grad_norm": 0.6864029765129089, "learning_rate": 4.113632986814968e-05, "loss": 0.7999, "step": 7548 }, { "epoch": 1.3856160564950752, "grad_norm": 0.812187135219574, "learning_rate": 4.111269008217879e-05, "loss": 0.9588, "step": 7549 }, { "epoch": 1.3858018955584464, "grad_norm": 0.8208745121955872, "learning_rate": 4.1089055332966764e-05, "loss": 1.1081, "step": 7550 }, { "epoch": 1.3859877346218175, "grad_norm": 0.8858060240745544, "learning_rate": 4.1065425622535216e-05, "loss": 0.8471, "step": 7551 }, { "epoch": 1.3861735736851886, "grad_norm": 0.751556396484375, "learning_rate": 4.104180095290514e-05, "loss": 1.0127, "step": 7552 }, { "epoch": 1.3863594127485597, "grad_norm": 0.6057085990905762, "learning_rate": 4.101818132609726e-05, "loss": 0.9103, "step": 7553 }, { "epoch": 1.3865452518119308, "grad_norm": 0.5512748956680298, "learning_rate": 4.09945667441318e-05, "loss": 0.7052, "step": 7554 }, { "epoch": 1.386731090875302, "grad_norm": 0.6736949682235718, "learning_rate": 4.0970957209028616e-05, "loss": 0.8159, "step": 7555 }, { "epoch": 1.386916929938673, "grad_norm": 0.7596806287765503, "learning_rate": 4.094735272280701e-05, "loss": 0.8295, "step": 7556 }, { "epoch": 1.3871027690020443, "grad_norm": 0.7188926935195923, "learning_rate": 4.0923753287485944e-05, "loss": 1.0136, "step": 7557 }, { "epoch": 1.3872886080654154, "grad_norm": 0.7635893821716309, "learning_rate": 4.090015890508399e-05, "loss": 0.9305, "step": 7558 }, { "epoch": 1.3874744471287865, "grad_norm": 0.8576083183288574, "learning_rate": 4.0876569577619125e-05, "loss": 0.9918, "step": 7559 }, { "epoch": 1.3876602861921576, "grad_norm": 0.7648732662200928, "learning_rate": 4.085298530710909e-05, "loss": 1.0151, "step": 7560 }, { "epoch": 1.3878461252555288, "grad_norm": 0.735910177230835, "learning_rate": 4.082940609557102e-05, "loss": 0.966, "step": 7561 }, { "epoch": 1.3880319643188999, "grad_norm": 0.5369407534599304, "learning_rate": 4.0805831945021764e-05, "loss": 0.5685, "step": 7562 }, { "epoch": 1.388217803382271, "grad_norm": 0.7792626619338989, "learning_rate": 4.07822628574776e-05, "loss": 1.1068, "step": 7563 }, { "epoch": 1.388403642445642, "grad_norm": 0.7470696568489075, "learning_rate": 4.075869883495449e-05, "loss": 0.9176, "step": 7564 }, { "epoch": 1.3885894815090132, "grad_norm": 0.6785575151443481, "learning_rate": 4.0735139879467935e-05, "loss": 0.9002, "step": 7565 }, { "epoch": 1.3887753205723843, "grad_norm": 0.8073457479476929, "learning_rate": 4.071158599303292e-05, "loss": 1.1224, "step": 7566 }, { "epoch": 1.3889611596357554, "grad_norm": 0.7951982021331787, "learning_rate": 4.0688037177664085e-05, "loss": 1.0072, "step": 7567 }, { "epoch": 1.3891469986991265, "grad_norm": 0.856749415397644, "learning_rate": 4.066449343537563e-05, "loss": 0.9278, "step": 7568 }, { "epoch": 1.3893328377624976, "grad_norm": 0.71700119972229, "learning_rate": 4.064095476818133e-05, "loss": 1.1046, "step": 7569 }, { "epoch": 1.3895186768258687, "grad_norm": 0.645438015460968, "learning_rate": 4.0617421178094416e-05, "loss": 0.89, "step": 7570 }, { "epoch": 1.3897045158892398, "grad_norm": 0.7050654292106628, "learning_rate": 4.059389266712782e-05, "loss": 0.8633, "step": 7571 }, { "epoch": 1.3898903549526112, "grad_norm": 0.9105086922645569, "learning_rate": 4.0570369237294006e-05, "loss": 1.1659, "step": 7572 }, { "epoch": 1.390076194015982, "grad_norm": 0.7280207276344299, "learning_rate": 4.0546850890604925e-05, "loss": 1.0042, "step": 7573 }, { "epoch": 1.3902620330793534, "grad_norm": 0.6948641538619995, "learning_rate": 4.0523337629072213e-05, "loss": 0.9946, "step": 7574 }, { "epoch": 1.3904478721427245, "grad_norm": 0.62689608335495, "learning_rate": 4.049982945470695e-05, "loss": 0.9436, "step": 7575 }, { "epoch": 1.3906337112060956, "grad_norm": 0.6683322191238403, "learning_rate": 4.04763263695199e-05, "loss": 0.8834, "step": 7576 }, { "epoch": 1.3908195502694667, "grad_norm": 0.7566063404083252, "learning_rate": 4.045282837552128e-05, "loss": 1.025, "step": 7577 }, { "epoch": 1.3910053893328378, "grad_norm": 0.7480912804603577, "learning_rate": 4.042933547472093e-05, "loss": 0.9125, "step": 7578 }, { "epoch": 1.391191228396209, "grad_norm": 0.797137439250946, "learning_rate": 4.0405847669128314e-05, "loss": 0.8781, "step": 7579 }, { "epoch": 1.39137706745958, "grad_norm": 0.6337542533874512, "learning_rate": 4.038236496075231e-05, "loss": 1.0563, "step": 7580 }, { "epoch": 1.3915629065229511, "grad_norm": 0.772243082523346, "learning_rate": 4.0358887351601474e-05, "loss": 1.061, "step": 7581 }, { "epoch": 1.3917487455863222, "grad_norm": 0.6702725887298584, "learning_rate": 4.0335414843683906e-05, "loss": 1.0178, "step": 7582 }, { "epoch": 1.3919345846496933, "grad_norm": 0.6551902294158936, "learning_rate": 4.03119474390073e-05, "loss": 1.0351, "step": 7583 }, { "epoch": 1.3921204237130644, "grad_norm": 0.7127566933631897, "learning_rate": 4.028848513957879e-05, "loss": 0.9753, "step": 7584 }, { "epoch": 1.3923062627764355, "grad_norm": 0.7440797686576843, "learning_rate": 4.026502794740521e-05, "loss": 0.8079, "step": 7585 }, { "epoch": 1.3924921018398067, "grad_norm": 0.8096966743469238, "learning_rate": 4.024157586449292e-05, "loss": 1.1204, "step": 7586 }, { "epoch": 1.392677940903178, "grad_norm": 0.6678999662399292, "learning_rate": 4.021812889284777e-05, "loss": 1.038, "step": 7587 }, { "epoch": 1.3928637799665489, "grad_norm": 0.6104394197463989, "learning_rate": 4.01946870344753e-05, "loss": 0.8797, "step": 7588 }, { "epoch": 1.3930496190299202, "grad_norm": 0.6613131761550903, "learning_rate": 4.0171250291380455e-05, "loss": 1.0774, "step": 7589 }, { "epoch": 1.393235458093291, "grad_norm": 0.7873337864875793, "learning_rate": 4.014781866556793e-05, "loss": 1.1021, "step": 7590 }, { "epoch": 1.3934212971566624, "grad_norm": 0.6655529141426086, "learning_rate": 4.012439215904179e-05, "loss": 0.9414, "step": 7591 }, { "epoch": 1.3936071362200335, "grad_norm": 0.7219845652580261, "learning_rate": 4.010097077380579e-05, "loss": 0.9931, "step": 7592 }, { "epoch": 1.3937929752834046, "grad_norm": 0.9080620408058167, "learning_rate": 4.007755451186327e-05, "loss": 0.8985, "step": 7593 }, { "epoch": 1.3939788143467757, "grad_norm": 0.6404428482055664, "learning_rate": 4.005414337521698e-05, "loss": 0.8768, "step": 7594 }, { "epoch": 1.3941646534101468, "grad_norm": 1.5202301740646362, "learning_rate": 4.003073736586937e-05, "loss": 1.1338, "step": 7595 }, { "epoch": 1.394350492473518, "grad_norm": 0.7384157180786133, "learning_rate": 4.000733648582242e-05, "loss": 1.0007, "step": 7596 }, { "epoch": 1.394536331536889, "grad_norm": 0.6847105026245117, "learning_rate": 3.998394073707763e-05, "loss": 0.9388, "step": 7597 }, { "epoch": 1.3947221706002602, "grad_norm": 0.6476550102233887, "learning_rate": 3.996055012163609e-05, "loss": 1.0399, "step": 7598 }, { "epoch": 1.3949080096636313, "grad_norm": 0.6664227247238159, "learning_rate": 3.993716464149846e-05, "loss": 0.7564, "step": 7599 }, { "epoch": 1.3950938487270024, "grad_norm": 0.7326140403747559, "learning_rate": 3.991378429866499e-05, "loss": 0.883, "step": 7600 }, { "epoch": 1.3952796877903735, "grad_norm": 0.6408746242523193, "learning_rate": 3.989040909513537e-05, "loss": 0.7608, "step": 7601 }, { "epoch": 1.3954655268537446, "grad_norm": 0.670562207698822, "learning_rate": 3.9867039032909015e-05, "loss": 0.9599, "step": 7602 }, { "epoch": 1.3956513659171157, "grad_norm": 0.6591134667396545, "learning_rate": 3.9843674113984744e-05, "loss": 0.8028, "step": 7603 }, { "epoch": 1.395837204980487, "grad_norm": 0.6754502654075623, "learning_rate": 3.9820314340361075e-05, "loss": 0.8671, "step": 7604 }, { "epoch": 1.396023044043858, "grad_norm": 0.713962972164154, "learning_rate": 3.979695971403594e-05, "loss": 1.078, "step": 7605 }, { "epoch": 1.3962088831072292, "grad_norm": 0.8512964248657227, "learning_rate": 3.977361023700696e-05, "loss": 1.0837, "step": 7606 }, { "epoch": 1.3963947221706001, "grad_norm": 0.697532057762146, "learning_rate": 3.975026591127131e-05, "loss": 1.0582, "step": 7607 }, { "epoch": 1.3965805612339715, "grad_norm": 0.866722822189331, "learning_rate": 3.9726926738825585e-05, "loss": 1.1474, "step": 7608 }, { "epoch": 1.3967664002973426, "grad_norm": 0.7146264910697937, "learning_rate": 3.970359272166613e-05, "loss": 0.9921, "step": 7609 }, { "epoch": 1.3969522393607137, "grad_norm": 0.6930286884307861, "learning_rate": 3.968026386178867e-05, "loss": 0.9954, "step": 7610 }, { "epoch": 1.3971380784240848, "grad_norm": 0.6407985687255859, "learning_rate": 3.965694016118861e-05, "loss": 0.9258, "step": 7611 }, { "epoch": 1.3973239174874559, "grad_norm": 0.6215234994888306, "learning_rate": 3.963362162186086e-05, "loss": 0.8348, "step": 7612 }, { "epoch": 1.397509756550827, "grad_norm": 0.7461937665939331, "learning_rate": 3.961030824579994e-05, "loss": 0.9097, "step": 7613 }, { "epoch": 1.397695595614198, "grad_norm": 0.6643327474594116, "learning_rate": 3.958700003499991e-05, "loss": 0.9566, "step": 7614 }, { "epoch": 1.3978814346775692, "grad_norm": 0.89393150806427, "learning_rate": 3.956369699145429e-05, "loss": 0.9473, "step": 7615 }, { "epoch": 1.3980672737409403, "grad_norm": 0.6691911220550537, "learning_rate": 3.954039911715632e-05, "loss": 1.0246, "step": 7616 }, { "epoch": 1.3982531128043114, "grad_norm": 0.7593314051628113, "learning_rate": 3.951710641409867e-05, "loss": 0.9746, "step": 7617 }, { "epoch": 1.3984389518676825, "grad_norm": 0.7830350995063782, "learning_rate": 3.9493818884273646e-05, "loss": 1.0272, "step": 7618 }, { "epoch": 1.3986247909310536, "grad_norm": 0.7209092974662781, "learning_rate": 3.947053652967303e-05, "loss": 0.9343, "step": 7619 }, { "epoch": 1.3988106299944247, "grad_norm": 0.6709997057914734, "learning_rate": 3.944725935228826e-05, "loss": 0.7919, "step": 7620 }, { "epoch": 1.398996469057796, "grad_norm": 0.6931282877922058, "learning_rate": 3.942398735411028e-05, "loss": 0.8118, "step": 7621 }, { "epoch": 1.399182308121167, "grad_norm": 0.6827849745750427, "learning_rate": 3.9400720537129564e-05, "loss": 1.0486, "step": 7622 }, { "epoch": 1.3993681471845383, "grad_norm": 0.6376016139984131, "learning_rate": 3.937745890333623e-05, "loss": 0.787, "step": 7623 }, { "epoch": 1.3995539862479094, "grad_norm": 0.820344090461731, "learning_rate": 3.935420245471982e-05, "loss": 0.94, "step": 7624 }, { "epoch": 1.3997398253112805, "grad_norm": 0.6794884204864502, "learning_rate": 3.9330951193269547e-05, "loss": 0.905, "step": 7625 }, { "epoch": 1.3999256643746516, "grad_norm": 0.748024582862854, "learning_rate": 3.930770512097414e-05, "loss": 0.9723, "step": 7626 }, { "epoch": 1.4001115034380227, "grad_norm": 0.7801584005355835, "learning_rate": 3.9284464239821897e-05, "loss": 0.8886, "step": 7627 }, { "epoch": 1.4002973425013938, "grad_norm": 0.6269378066062927, "learning_rate": 3.926122855180066e-05, "loss": 0.8735, "step": 7628 }, { "epoch": 1.400483181564765, "grad_norm": 0.8104212880134583, "learning_rate": 3.9237998058897806e-05, "loss": 1.1047, "step": 7629 }, { "epoch": 1.400669020628136, "grad_norm": 0.5820780992507935, "learning_rate": 3.921477276310034e-05, "loss": 0.7459, "step": 7630 }, { "epoch": 1.4008548596915071, "grad_norm": 0.8262017369270325, "learning_rate": 3.9191552666394695e-05, "loss": 1.1253, "step": 7631 }, { "epoch": 1.4010406987548782, "grad_norm": 0.7182964086532593, "learning_rate": 3.916833777076699e-05, "loss": 1.0012, "step": 7632 }, { "epoch": 1.4012265378182494, "grad_norm": 0.6813958883285522, "learning_rate": 3.914512807820282e-05, "loss": 0.6448, "step": 7633 }, { "epoch": 1.4014123768816205, "grad_norm": 0.7764411568641663, "learning_rate": 3.912192359068736e-05, "loss": 0.9384, "step": 7634 }, { "epoch": 1.4015982159449916, "grad_norm": 0.6565015316009521, "learning_rate": 3.9098724310205384e-05, "loss": 0.9253, "step": 7635 }, { "epoch": 1.401784055008363, "grad_norm": 0.8751726746559143, "learning_rate": 3.907553023874111e-05, "loss": 0.897, "step": 7636 }, { "epoch": 1.4019698940717338, "grad_norm": 0.7254313826560974, "learning_rate": 3.905234137827845e-05, "loss": 1.0225, "step": 7637 }, { "epoch": 1.402155733135105, "grad_norm": 0.7876704335212708, "learning_rate": 3.902915773080071e-05, "loss": 1.1144, "step": 7638 }, { "epoch": 1.402341572198476, "grad_norm": 0.665797233581543, "learning_rate": 3.9005979298290894e-05, "loss": 0.9349, "step": 7639 }, { "epoch": 1.4025274112618473, "grad_norm": 0.7425183653831482, "learning_rate": 3.8982806082731495e-05, "loss": 0.905, "step": 7640 }, { "epoch": 1.4027132503252184, "grad_norm": 0.7434195280075073, "learning_rate": 3.895963808610459e-05, "loss": 1.0622, "step": 7641 }, { "epoch": 1.4028990893885895, "grad_norm": 0.7471517324447632, "learning_rate": 3.893647531039174e-05, "loss": 1.0465, "step": 7642 }, { "epoch": 1.4030849284519606, "grad_norm": 0.7446435689926147, "learning_rate": 3.891331775757413e-05, "loss": 0.9978, "step": 7643 }, { "epoch": 1.4032707675153318, "grad_norm": 0.709337055683136, "learning_rate": 3.889016542963252e-05, "loss": 0.7238, "step": 7644 }, { "epoch": 1.4034566065787029, "grad_norm": 0.6386134028434753, "learning_rate": 3.886701832854709e-05, "loss": 0.8757, "step": 7645 }, { "epoch": 1.403642445642074, "grad_norm": 0.6459241509437561, "learning_rate": 3.884387645629774e-05, "loss": 1.0497, "step": 7646 }, { "epoch": 1.403828284705445, "grad_norm": 0.7791414260864258, "learning_rate": 3.8820739814863794e-05, "loss": 0.8849, "step": 7647 }, { "epoch": 1.4040141237688162, "grad_norm": 0.7343874573707581, "learning_rate": 3.879760840622419e-05, "loss": 1.0813, "step": 7648 }, { "epoch": 1.4041999628321873, "grad_norm": 0.6905683875083923, "learning_rate": 3.877448223235746e-05, "loss": 1.0526, "step": 7649 }, { "epoch": 1.4043858018955584, "grad_norm": 0.6315404176712036, "learning_rate": 3.8751361295241536e-05, "loss": 0.7624, "step": 7650 }, { "epoch": 1.4045716409589295, "grad_norm": 0.625384509563446, "learning_rate": 3.87282455968541e-05, "loss": 0.9346, "step": 7651 }, { "epoch": 1.4047574800223006, "grad_norm": 0.7913904190063477, "learning_rate": 3.870513513917221e-05, "loss": 1.1093, "step": 7652 }, { "epoch": 1.404943319085672, "grad_norm": 0.7879133820533752, "learning_rate": 3.8682029924172617e-05, "loss": 1.0585, "step": 7653 }, { "epoch": 1.4051291581490428, "grad_norm": 0.8064400553703308, "learning_rate": 3.86589299538315e-05, "loss": 1.0688, "step": 7654 }, { "epoch": 1.4053149972124142, "grad_norm": 0.7478057146072388, "learning_rate": 3.863583523012467e-05, "loss": 1.0208, "step": 7655 }, { "epoch": 1.405500836275785, "grad_norm": 0.6603885889053345, "learning_rate": 3.861274575502749e-05, "loss": 0.934, "step": 7656 }, { "epoch": 1.4056866753391564, "grad_norm": 0.7641489505767822, "learning_rate": 3.858966153051484e-05, "loss": 0.8131, "step": 7657 }, { "epoch": 1.4058725144025275, "grad_norm": 0.6927787661552429, "learning_rate": 3.85665825585612e-05, "loss": 0.9083, "step": 7658 }, { "epoch": 1.4060583534658986, "grad_norm": 0.6479927897453308, "learning_rate": 3.854350884114049e-05, "loss": 0.9776, "step": 7659 }, { "epoch": 1.4062441925292697, "grad_norm": 0.804818868637085, "learning_rate": 3.852044038022633e-05, "loss": 0.9168, "step": 7660 }, { "epoch": 1.4064300315926408, "grad_norm": 0.7272639274597168, "learning_rate": 3.849737717779174e-05, "loss": 1.0321, "step": 7661 }, { "epoch": 1.406615870656012, "grad_norm": 0.7542232871055603, "learning_rate": 3.847431923580941e-05, "loss": 0.9362, "step": 7662 }, { "epoch": 1.406801709719383, "grad_norm": 0.8392438292503357, "learning_rate": 3.845126655625157e-05, "loss": 0.7717, "step": 7663 }, { "epoch": 1.4069875487827541, "grad_norm": 0.756179928779602, "learning_rate": 3.842821914108988e-05, "loss": 0.8892, "step": 7664 }, { "epoch": 1.4071733878461252, "grad_norm": 0.7532819509506226, "learning_rate": 3.840517699229572e-05, "loss": 1.3075, "step": 7665 }, { "epoch": 1.4073592269094963, "grad_norm": 0.8892289996147156, "learning_rate": 3.838214011183986e-05, "loss": 1.123, "step": 7666 }, { "epoch": 1.4075450659728674, "grad_norm": 0.5668398141860962, "learning_rate": 3.835910850169276e-05, "loss": 0.8134, "step": 7667 }, { "epoch": 1.4077309050362385, "grad_norm": 0.7089663147926331, "learning_rate": 3.833608216382431e-05, "loss": 1.1587, "step": 7668 }, { "epoch": 1.4079167440996097, "grad_norm": 0.6543086171150208, "learning_rate": 3.8313061100204015e-05, "loss": 0.8773, "step": 7669 }, { "epoch": 1.408102583162981, "grad_norm": 0.7265071868896484, "learning_rate": 3.829004531280094e-05, "loss": 0.8128, "step": 7670 }, { "epoch": 1.4082884222263519, "grad_norm": 0.6952806115150452, "learning_rate": 3.826703480358366e-05, "loss": 1.0041, "step": 7671 }, { "epoch": 1.4084742612897232, "grad_norm": 0.8172613382339478, "learning_rate": 3.824402957452033e-05, "loss": 1.0239, "step": 7672 }, { "epoch": 1.4086601003530943, "grad_norm": 0.6866876482963562, "learning_rate": 3.822102962757861e-05, "loss": 0.7633, "step": 7673 }, { "epoch": 1.4088459394164654, "grad_norm": 0.8350356221199036, "learning_rate": 3.819803496472574e-05, "loss": 1.117, "step": 7674 }, { "epoch": 1.4090317784798365, "grad_norm": 0.7774361371994019, "learning_rate": 3.8175045587928546e-05, "loss": 0.9895, "step": 7675 }, { "epoch": 1.4092176175432076, "grad_norm": 0.6360732913017273, "learning_rate": 3.8152061499153294e-05, "loss": 0.9731, "step": 7676 }, { "epoch": 1.4094034566065787, "grad_norm": 0.7634127140045166, "learning_rate": 3.8129082700365924e-05, "loss": 1.0542, "step": 7677 }, { "epoch": 1.4095892956699498, "grad_norm": 0.659530758857727, "learning_rate": 3.810610919353179e-05, "loss": 0.9855, "step": 7678 }, { "epoch": 1.409775134733321, "grad_norm": 0.7658821940422058, "learning_rate": 3.808314098061595e-05, "loss": 1.0321, "step": 7679 }, { "epoch": 1.409960973796692, "grad_norm": 0.6265006065368652, "learning_rate": 3.806017806358284e-05, "loss": 0.9206, "step": 7680 }, { "epoch": 1.4101468128600632, "grad_norm": 0.7096154093742371, "learning_rate": 3.8037220444396585e-05, "loss": 1.0367, "step": 7681 }, { "epoch": 1.4103326519234343, "grad_norm": 0.8711316585540771, "learning_rate": 3.80142681250208e-05, "loss": 1.2819, "step": 7682 }, { "epoch": 1.4105184909868054, "grad_norm": 0.536262571811676, "learning_rate": 3.799132110741861e-05, "loss": 0.6076, "step": 7683 }, { "epoch": 1.4107043300501765, "grad_norm": 0.6677773594856262, "learning_rate": 3.796837939355275e-05, "loss": 0.9487, "step": 7684 }, { "epoch": 1.4108901691135476, "grad_norm": 0.9733144044876099, "learning_rate": 3.794544298538545e-05, "loss": 1.2239, "step": 7685 }, { "epoch": 1.4110760081769187, "grad_norm": 0.8237594962120056, "learning_rate": 3.792251188487857e-05, "loss": 0.7716, "step": 7686 }, { "epoch": 1.41126184724029, "grad_norm": 0.6180957555770874, "learning_rate": 3.7899586093993376e-05, "loss": 0.9659, "step": 7687 }, { "epoch": 1.411447686303661, "grad_norm": 0.6425844430923462, "learning_rate": 3.787666561469081e-05, "loss": 0.9726, "step": 7688 }, { "epoch": 1.4116335253670322, "grad_norm": 0.7346746921539307, "learning_rate": 3.7853750448931335e-05, "loss": 1.0017, "step": 7689 }, { "epoch": 1.4118193644304033, "grad_norm": 0.6934552192687988, "learning_rate": 3.7830840598674864e-05, "loss": 1.0284, "step": 7690 }, { "epoch": 1.4120052034937745, "grad_norm": 0.6242930889129639, "learning_rate": 3.780793606588099e-05, "loss": 0.5696, "step": 7691 }, { "epoch": 1.4121910425571456, "grad_norm": 0.6234291791915894, "learning_rate": 3.778503685250873e-05, "loss": 0.891, "step": 7692 }, { "epoch": 1.4123768816205167, "grad_norm": 0.6656005382537842, "learning_rate": 3.7762142960516765e-05, "loss": 0.8927, "step": 7693 }, { "epoch": 1.4125627206838878, "grad_norm": 0.7903649806976318, "learning_rate": 3.7739254391863185e-05, "loss": 0.9798, "step": 7694 }, { "epoch": 1.4127485597472589, "grad_norm": 0.6335200071334839, "learning_rate": 3.771637114850576e-05, "loss": 0.8193, "step": 7695 }, { "epoch": 1.41293439881063, "grad_norm": 0.7716221809387207, "learning_rate": 3.7693493232401746e-05, "loss": 1.0676, "step": 7696 }, { "epoch": 1.413120237874001, "grad_norm": 0.8504044413566589, "learning_rate": 3.7670620645507884e-05, "loss": 1.1039, "step": 7697 }, { "epoch": 1.4133060769373722, "grad_norm": 0.6971676349639893, "learning_rate": 3.764775338978057e-05, "loss": 1.0126, "step": 7698 }, { "epoch": 1.4134919160007433, "grad_norm": 0.6730667948722839, "learning_rate": 3.762489146717565e-05, "loss": 1.0122, "step": 7699 }, { "epoch": 1.4136777550641144, "grad_norm": 0.6071676015853882, "learning_rate": 3.760203487964857e-05, "loss": 0.9624, "step": 7700 }, { "epoch": 1.4138635941274855, "grad_norm": 0.6284236311912537, "learning_rate": 3.7579183629154316e-05, "loss": 0.7958, "step": 7701 }, { "epoch": 1.4140494331908569, "grad_norm": 0.755490779876709, "learning_rate": 3.755633771764738e-05, "loss": 0.9701, "step": 7702 }, { "epoch": 1.4142352722542277, "grad_norm": 0.7259854078292847, "learning_rate": 3.753349714708188e-05, "loss": 0.863, "step": 7703 }, { "epoch": 1.414421111317599, "grad_norm": 0.6856536865234375, "learning_rate": 3.7510661919411336e-05, "loss": 0.8325, "step": 7704 }, { "epoch": 1.41460695038097, "grad_norm": 0.6991071701049805, "learning_rate": 3.7487832036588975e-05, "loss": 1.0775, "step": 7705 }, { "epoch": 1.4147927894443413, "grad_norm": 0.7490563988685608, "learning_rate": 3.7465007500567416e-05, "loss": 0.8755, "step": 7706 }, { "epoch": 1.4149786285077124, "grad_norm": 0.637843906879425, "learning_rate": 3.744218831329895e-05, "loss": 0.9338, "step": 7707 }, { "epoch": 1.4151644675710835, "grad_norm": 0.6465867757797241, "learning_rate": 3.7419374476735295e-05, "loss": 0.9167, "step": 7708 }, { "epoch": 1.4153503066344546, "grad_norm": 0.732671856880188, "learning_rate": 3.739656599282779e-05, "loss": 1.0712, "step": 7709 }, { "epoch": 1.4155361456978257, "grad_norm": 0.7095444202423096, "learning_rate": 3.737376286352733e-05, "loss": 0.5797, "step": 7710 }, { "epoch": 1.4157219847611968, "grad_norm": 0.6976941227912903, "learning_rate": 3.735096509078425e-05, "loss": 0.9708, "step": 7711 }, { "epoch": 1.415907823824568, "grad_norm": 0.7448633313179016, "learning_rate": 3.7328172676548576e-05, "loss": 0.8581, "step": 7712 }, { "epoch": 1.416093662887939, "grad_norm": 0.705802321434021, "learning_rate": 3.7305385622769695e-05, "loss": 0.8562, "step": 7713 }, { "epoch": 1.4162795019513101, "grad_norm": 0.7125275135040283, "learning_rate": 3.72826039313967e-05, "loss": 0.8646, "step": 7714 }, { "epoch": 1.4164653410146812, "grad_norm": 0.6283400654792786, "learning_rate": 3.7259827604378126e-05, "loss": 0.9295, "step": 7715 }, { "epoch": 1.4166511800780524, "grad_norm": 0.6857552528381348, "learning_rate": 3.723705664366211e-05, "loss": 1.1276, "step": 7716 }, { "epoch": 1.4168370191414235, "grad_norm": 0.7191590666770935, "learning_rate": 3.721429105119631e-05, "loss": 0.9671, "step": 7717 }, { "epoch": 1.4170228582047946, "grad_norm": 0.621395468711853, "learning_rate": 3.7191530828927855e-05, "loss": 0.8513, "step": 7718 }, { "epoch": 1.417208697268166, "grad_norm": 0.6662892699241638, "learning_rate": 3.716877597880356e-05, "loss": 0.7902, "step": 7719 }, { "epoch": 1.4173945363315368, "grad_norm": 0.7611411809921265, "learning_rate": 3.714602650276962e-05, "loss": 1.2273, "step": 7720 }, { "epoch": 1.417580375394908, "grad_norm": 0.6876758337020874, "learning_rate": 3.71232824027719e-05, "loss": 1.0438, "step": 7721 }, { "epoch": 1.417766214458279, "grad_norm": 0.7688302397727966, "learning_rate": 3.71005436807557e-05, "loss": 1.0141, "step": 7722 }, { "epoch": 1.4179520535216503, "grad_norm": 0.5873725414276123, "learning_rate": 3.707781033866595e-05, "loss": 0.6356, "step": 7723 }, { "epoch": 1.4181378925850214, "grad_norm": 0.600767970085144, "learning_rate": 3.7055082378447105e-05, "loss": 0.9036, "step": 7724 }, { "epoch": 1.4183237316483925, "grad_norm": 0.7185942530632019, "learning_rate": 3.703235980204307e-05, "loss": 1.0613, "step": 7725 }, { "epoch": 1.4185095707117636, "grad_norm": 0.7245935201644897, "learning_rate": 3.7009642611397435e-05, "loss": 0.8785, "step": 7726 }, { "epoch": 1.4186954097751348, "grad_norm": 1.004029631614685, "learning_rate": 3.698693080845317e-05, "loss": 0.9072, "step": 7727 }, { "epoch": 1.4188812488385059, "grad_norm": 0.7187013030052185, "learning_rate": 3.696422439515289e-05, "loss": 0.8739, "step": 7728 }, { "epoch": 1.419067087901877, "grad_norm": 0.7088367342948914, "learning_rate": 3.694152337343875e-05, "loss": 0.8068, "step": 7729 }, { "epoch": 1.419252926965248, "grad_norm": 0.6215872764587402, "learning_rate": 3.6918827745252394e-05, "loss": 0.9045, "step": 7730 }, { "epoch": 1.4194387660286192, "grad_norm": 0.6514158844947815, "learning_rate": 3.689613751253508e-05, "loss": 0.8785, "step": 7731 }, { "epoch": 1.4196246050919903, "grad_norm": 0.7987901568412781, "learning_rate": 3.6873452677227474e-05, "loss": 1.1281, "step": 7732 }, { "epoch": 1.4198104441553614, "grad_norm": 0.7000588178634644, "learning_rate": 3.685077324126992e-05, "loss": 1.0876, "step": 7733 }, { "epoch": 1.4199962832187325, "grad_norm": 0.8126161098480225, "learning_rate": 3.682809920660218e-05, "loss": 0.9953, "step": 7734 }, { "epoch": 1.4201821222821036, "grad_norm": 0.6847161054611206, "learning_rate": 3.680543057516369e-05, "loss": 0.7406, "step": 7735 }, { "epoch": 1.420367961345475, "grad_norm": 0.7768139243125916, "learning_rate": 3.678276734889326e-05, "loss": 0.932, "step": 7736 }, { "epoch": 1.4205538004088458, "grad_norm": 0.8145734071731567, "learning_rate": 3.676010952972938e-05, "loss": 1.161, "step": 7737 }, { "epoch": 1.4207396394722172, "grad_norm": 0.6331034898757935, "learning_rate": 3.673745711961005e-05, "loss": 0.9342, "step": 7738 }, { "epoch": 1.4209254785355883, "grad_norm": 0.7511361241340637, "learning_rate": 3.67148101204727e-05, "loss": 1.1509, "step": 7739 }, { "epoch": 1.4211113175989594, "grad_norm": 0.8429722189903259, "learning_rate": 3.6692168534254444e-05, "loss": 1.0753, "step": 7740 }, { "epoch": 1.4212971566623305, "grad_norm": 0.7080596089363098, "learning_rate": 3.666953236289181e-05, "loss": 0.8775, "step": 7741 }, { "epoch": 1.4214829957257016, "grad_norm": 0.7115619778633118, "learning_rate": 3.664690160832096e-05, "loss": 1.0145, "step": 7742 }, { "epoch": 1.4216688347890727, "grad_norm": 0.6405045986175537, "learning_rate": 3.6624276272477565e-05, "loss": 1.0048, "step": 7743 }, { "epoch": 1.4218546738524438, "grad_norm": 0.691527247428894, "learning_rate": 3.660165635729675e-05, "loss": 1.0007, "step": 7744 }, { "epoch": 1.422040512915815, "grad_norm": 0.6654130220413208, "learning_rate": 3.6579041864713305e-05, "loss": 0.8106, "step": 7745 }, { "epoch": 1.422226351979186, "grad_norm": 0.8092135787010193, "learning_rate": 3.6556432796661465e-05, "loss": 0.8508, "step": 7746 }, { "epoch": 1.4224121910425571, "grad_norm": 0.7909111976623535, "learning_rate": 3.6533829155075095e-05, "loss": 0.9582, "step": 7747 }, { "epoch": 1.4225980301059282, "grad_norm": 0.7206133604049683, "learning_rate": 3.651123094188744e-05, "loss": 0.7781, "step": 7748 }, { "epoch": 1.4227838691692993, "grad_norm": 0.665252149105072, "learning_rate": 3.6488638159031465e-05, "loss": 0.8502, "step": 7749 }, { "epoch": 1.4229697082326704, "grad_norm": 0.6059937477111816, "learning_rate": 3.6466050808439486e-05, "loss": 1.0482, "step": 7750 }, { "epoch": 1.4231555472960415, "grad_norm": 0.9111020565032959, "learning_rate": 3.644346889204351e-05, "loss": 0.9285, "step": 7751 }, { "epoch": 1.4233413863594127, "grad_norm": 0.7593806385993958, "learning_rate": 3.642089241177503e-05, "loss": 0.8047, "step": 7752 }, { "epoch": 1.423527225422784, "grad_norm": 0.5961986780166626, "learning_rate": 3.6398321369565006e-05, "loss": 0.8097, "step": 7753 }, { "epoch": 1.4237130644861549, "grad_norm": 0.6476420164108276, "learning_rate": 3.637575576734404e-05, "loss": 0.9329, "step": 7754 }, { "epoch": 1.4238989035495262, "grad_norm": 0.6606789231300354, "learning_rate": 3.635319560704217e-05, "loss": 0.9704, "step": 7755 }, { "epoch": 1.4240847426128973, "grad_norm": 0.7609673142433167, "learning_rate": 3.6330640890589065e-05, "loss": 1.0531, "step": 7756 }, { "epoch": 1.4242705816762684, "grad_norm": 0.8701117634773254, "learning_rate": 3.630809161991382e-05, "loss": 1.4154, "step": 7757 }, { "epoch": 1.4244564207396395, "grad_norm": 0.653571605682373, "learning_rate": 3.6285547796945164e-05, "loss": 0.9824, "step": 7758 }, { "epoch": 1.4246422598030106, "grad_norm": 0.7411727905273438, "learning_rate": 3.626300942361131e-05, "loss": 0.8998, "step": 7759 }, { "epoch": 1.4248280988663817, "grad_norm": 0.7065626978874207, "learning_rate": 3.624047650183999e-05, "loss": 0.875, "step": 7760 }, { "epoch": 1.4250139379297528, "grad_norm": 0.7236730456352234, "learning_rate": 3.6217949033558576e-05, "loss": 1.0839, "step": 7761 }, { "epoch": 1.425199776993124, "grad_norm": 0.8109464049339294, "learning_rate": 3.61954270206938e-05, "loss": 0.9164, "step": 7762 }, { "epoch": 1.425385616056495, "grad_norm": 0.8932875394821167, "learning_rate": 3.617291046517206e-05, "loss": 0.9789, "step": 7763 }, { "epoch": 1.4255714551198662, "grad_norm": 0.8092448115348816, "learning_rate": 3.615039936891922e-05, "loss": 1.1064, "step": 7764 }, { "epoch": 1.4257572941832373, "grad_norm": 0.726791262626648, "learning_rate": 3.612789373386072e-05, "loss": 0.8891, "step": 7765 }, { "epoch": 1.4259431332466084, "grad_norm": 0.5402032136917114, "learning_rate": 3.610539356192154e-05, "loss": 0.825, "step": 7766 }, { "epoch": 1.4261289723099795, "grad_norm": 0.5635693073272705, "learning_rate": 3.6082898855026114e-05, "loss": 0.7979, "step": 7767 }, { "epoch": 1.4263148113733508, "grad_norm": 0.7631365060806274, "learning_rate": 3.606040961509853e-05, "loss": 0.8993, "step": 7768 }, { "epoch": 1.4265006504367217, "grad_norm": 0.8184545040130615, "learning_rate": 3.603792584406227e-05, "loss": 1.0553, "step": 7769 }, { "epoch": 1.426686489500093, "grad_norm": 0.7019795775413513, "learning_rate": 3.601544754384049e-05, "loss": 0.9577, "step": 7770 }, { "epoch": 1.426872328563464, "grad_norm": 0.8127749562263489, "learning_rate": 3.599297471635572e-05, "loss": 1.1451, "step": 7771 }, { "epoch": 1.4270581676268352, "grad_norm": 0.795525312423706, "learning_rate": 3.597050736353017e-05, "loss": 0.8848, "step": 7772 }, { "epoch": 1.4272440066902063, "grad_norm": 0.6658499240875244, "learning_rate": 3.594804548728551e-05, "loss": 0.962, "step": 7773 }, { "epoch": 1.4274298457535775, "grad_norm": 0.7584779858589172, "learning_rate": 3.592558908954295e-05, "loss": 0.9173, "step": 7774 }, { "epoch": 1.4276156848169486, "grad_norm": 0.665018618106842, "learning_rate": 3.590313817222328e-05, "loss": 0.8334, "step": 7775 }, { "epoch": 1.4278015238803197, "grad_norm": 0.7542497515678406, "learning_rate": 3.588069273724669e-05, "loss": 0.9799, "step": 7776 }, { "epoch": 1.4279873629436908, "grad_norm": 0.7203447818756104, "learning_rate": 3.5858252786533074e-05, "loss": 1.0654, "step": 7777 }, { "epoch": 1.4281732020070619, "grad_norm": 0.6802030801773071, "learning_rate": 3.583581832200169e-05, "loss": 0.9758, "step": 7778 }, { "epoch": 1.428359041070433, "grad_norm": 0.7026258707046509, "learning_rate": 3.581338934557143e-05, "loss": 0.9786, "step": 7779 }, { "epoch": 1.428544880133804, "grad_norm": 0.802281379699707, "learning_rate": 3.5790965859160766e-05, "loss": 0.967, "step": 7780 }, { "epoch": 1.4287307191971752, "grad_norm": 0.7380053400993347, "learning_rate": 3.576854786468751e-05, "loss": 0.9983, "step": 7781 }, { "epoch": 1.4289165582605463, "grad_norm": 0.642246663570404, "learning_rate": 3.574613536406921e-05, "loss": 0.8811, "step": 7782 }, { "epoch": 1.4291023973239174, "grad_norm": 0.6161243915557861, "learning_rate": 3.57237283592228e-05, "loss": 0.7155, "step": 7783 }, { "epoch": 1.4292882363872885, "grad_norm": 0.7315623760223389, "learning_rate": 3.570132685206485e-05, "loss": 1.0834, "step": 7784 }, { "epoch": 1.4294740754506599, "grad_norm": 0.634521484375, "learning_rate": 3.567893084451135e-05, "loss": 0.7328, "step": 7785 }, { "epoch": 1.4296599145140307, "grad_norm": 0.7404763102531433, "learning_rate": 3.5656540338477906e-05, "loss": 0.9951, "step": 7786 }, { "epoch": 1.429845753577402, "grad_norm": 0.7833670973777771, "learning_rate": 3.5634155335879635e-05, "loss": 0.9775, "step": 7787 }, { "epoch": 1.430031592640773, "grad_norm": 0.7117665410041809, "learning_rate": 3.561177583863121e-05, "loss": 0.8914, "step": 7788 }, { "epoch": 1.4302174317041443, "grad_norm": 0.7110545039176941, "learning_rate": 3.558940184864672e-05, "loss": 1.0174, "step": 7789 }, { "epoch": 1.4304032707675154, "grad_norm": 0.7871912121772766, "learning_rate": 3.556703336783991e-05, "loss": 0.8585, "step": 7790 }, { "epoch": 1.4305891098308865, "grad_norm": 0.5926689505577087, "learning_rate": 3.5544670398124015e-05, "loss": 0.6127, "step": 7791 }, { "epoch": 1.4307749488942576, "grad_norm": 0.7476575374603271, "learning_rate": 3.552231294141174e-05, "loss": 1.1246, "step": 7792 }, { "epoch": 1.4309607879576287, "grad_norm": 0.9882261157035828, "learning_rate": 3.549996099961539e-05, "loss": 0.9569, "step": 7793 }, { "epoch": 1.4311466270209998, "grad_norm": 0.7017543911933899, "learning_rate": 3.547761457464681e-05, "loss": 0.6167, "step": 7794 }, { "epoch": 1.431332466084371, "grad_norm": 0.6456220149993896, "learning_rate": 3.545527366841728e-05, "loss": 1.0104, "step": 7795 }, { "epoch": 1.431518305147742, "grad_norm": 0.8840920329093933, "learning_rate": 3.5432938282837726e-05, "loss": 1.032, "step": 7796 }, { "epoch": 1.4317041442111131, "grad_norm": 0.5940844416618347, "learning_rate": 3.541060841981847e-05, "loss": 0.8061, "step": 7797 }, { "epoch": 1.4318899832744842, "grad_norm": 0.7961482405662537, "learning_rate": 3.5388284081269495e-05, "loss": 1.0628, "step": 7798 }, { "epoch": 1.4320758223378554, "grad_norm": 0.6357424259185791, "learning_rate": 3.536596526910021e-05, "loss": 0.7603, "step": 7799 }, { "epoch": 1.4322616614012265, "grad_norm": 0.6908395886421204, "learning_rate": 3.534365198521961e-05, "loss": 0.9484, "step": 7800 }, { "epoch": 1.4324475004645976, "grad_norm": 0.7572526931762695, "learning_rate": 3.5321344231536216e-05, "loss": 0.9961, "step": 7801 }, { "epoch": 1.432633339527969, "grad_norm": 0.7261570692062378, "learning_rate": 3.5299042009958014e-05, "loss": 0.833, "step": 7802 }, { "epoch": 1.4328191785913398, "grad_norm": 0.6793763041496277, "learning_rate": 3.527674532239258e-05, "loss": 0.8265, "step": 7803 }, { "epoch": 1.433005017654711, "grad_norm": 0.6620837450027466, "learning_rate": 3.525445417074701e-05, "loss": 0.7635, "step": 7804 }, { "epoch": 1.4331908567180822, "grad_norm": 0.7269986271858215, "learning_rate": 3.523216855692795e-05, "loss": 0.9392, "step": 7805 }, { "epoch": 1.4333766957814533, "grad_norm": 0.6936744451522827, "learning_rate": 3.5209888482841445e-05, "loss": 0.7935, "step": 7806 }, { "epoch": 1.4335625348448244, "grad_norm": 0.6548713445663452, "learning_rate": 3.518761395039322e-05, "loss": 0.9881, "step": 7807 }, { "epoch": 1.4337483739081955, "grad_norm": 0.7093619704246521, "learning_rate": 3.516534496148848e-05, "loss": 1.0224, "step": 7808 }, { "epoch": 1.4339342129715666, "grad_norm": 0.7175012826919556, "learning_rate": 3.514308151803187e-05, "loss": 0.8043, "step": 7809 }, { "epoch": 1.4341200520349378, "grad_norm": 0.8724030256271362, "learning_rate": 3.5120823621927715e-05, "loss": 1.0353, "step": 7810 }, { "epoch": 1.4343058910983089, "grad_norm": 0.6106569766998291, "learning_rate": 3.5098571275079706e-05, "loss": 0.7952, "step": 7811 }, { "epoch": 1.43449173016168, "grad_norm": 0.6657244563102722, "learning_rate": 3.50763244793912e-05, "loss": 0.9032, "step": 7812 }, { "epoch": 1.434677569225051, "grad_norm": 0.7090873122215271, "learning_rate": 3.5054083236764934e-05, "loss": 0.952, "step": 7813 }, { "epoch": 1.4348634082884222, "grad_norm": 0.7857263684272766, "learning_rate": 3.5031847549103305e-05, "loss": 1.0373, "step": 7814 }, { "epoch": 1.4350492473517933, "grad_norm": 0.7097312808036804, "learning_rate": 3.500961741830821e-05, "loss": 0.9285, "step": 7815 }, { "epoch": 1.4352350864151644, "grad_norm": 0.7621490955352783, "learning_rate": 3.498739284628095e-05, "loss": 1.0428, "step": 7816 }, { "epoch": 1.4354209254785357, "grad_norm": 0.6671808958053589, "learning_rate": 3.4965173834922505e-05, "loss": 0.8277, "step": 7817 }, { "epoch": 1.4356067645419066, "grad_norm": 0.889748215675354, "learning_rate": 3.4942960386133296e-05, "loss": 0.9986, "step": 7818 }, { "epoch": 1.435792603605278, "grad_norm": 0.7396422028541565, "learning_rate": 3.492075250181333e-05, "loss": 0.9544, "step": 7819 }, { "epoch": 1.4359784426686488, "grad_norm": 0.7094883918762207, "learning_rate": 3.489855018386201e-05, "loss": 1.0928, "step": 7820 }, { "epoch": 1.4361642817320202, "grad_norm": 0.7435691952705383, "learning_rate": 3.487635343417841e-05, "loss": 0.9684, "step": 7821 }, { "epoch": 1.4363501207953913, "grad_norm": 0.6778486967086792, "learning_rate": 3.485416225466107e-05, "loss": 0.9725, "step": 7822 }, { "epoch": 1.4365359598587624, "grad_norm": 0.635127067565918, "learning_rate": 3.483197664720801e-05, "loss": 0.7622, "step": 7823 }, { "epoch": 1.4367217989221335, "grad_norm": 0.7089655995368958, "learning_rate": 3.4809796613716864e-05, "loss": 1.0226, "step": 7824 }, { "epoch": 1.4369076379855046, "grad_norm": 0.8211172819137573, "learning_rate": 3.478762215608468e-05, "loss": 1.0076, "step": 7825 }, { "epoch": 1.4370934770488757, "grad_norm": 0.6057694554328918, "learning_rate": 3.4765453276208136e-05, "loss": 0.8723, "step": 7826 }, { "epoch": 1.4372793161122468, "grad_norm": 0.609369695186615, "learning_rate": 3.474328997598334e-05, "loss": 0.9602, "step": 7827 }, { "epoch": 1.437465155175618, "grad_norm": 0.7126015424728394, "learning_rate": 3.4721132257305986e-05, "loss": 1.2145, "step": 7828 }, { "epoch": 1.437650994238989, "grad_norm": 0.7071675658226013, "learning_rate": 3.469898012207131e-05, "loss": 1.0776, "step": 7829 }, { "epoch": 1.4378368333023601, "grad_norm": 0.6722445487976074, "learning_rate": 3.467683357217397e-05, "loss": 1.0917, "step": 7830 }, { "epoch": 1.4380226723657312, "grad_norm": 0.6246117353439331, "learning_rate": 3.465469260950823e-05, "loss": 1.1885, "step": 7831 }, { "epoch": 1.4382085114291023, "grad_norm": 0.6887637376785278, "learning_rate": 3.4632557235967875e-05, "loss": 0.9325, "step": 7832 }, { "epoch": 1.4383943504924734, "grad_norm": 0.6999205350875854, "learning_rate": 3.461042745344619e-05, "loss": 0.9148, "step": 7833 }, { "epoch": 1.4385801895558448, "grad_norm": 0.6207687854766846, "learning_rate": 3.4588303263835954e-05, "loss": 0.7277, "step": 7834 }, { "epoch": 1.4387660286192157, "grad_norm": 0.6546527147293091, "learning_rate": 3.456618466902951e-05, "loss": 0.8376, "step": 7835 }, { "epoch": 1.438951867682587, "grad_norm": 0.7431588768959045, "learning_rate": 3.4544071670918745e-05, "loss": 0.967, "step": 7836 }, { "epoch": 1.4391377067459579, "grad_norm": 0.8065693378448486, "learning_rate": 3.452196427139496e-05, "loss": 0.9605, "step": 7837 }, { "epoch": 1.4393235458093292, "grad_norm": 2.3515830039978027, "learning_rate": 3.449986247234912e-05, "loss": 1.3481, "step": 7838 }, { "epoch": 1.4395093848727003, "grad_norm": 0.7496092319488525, "learning_rate": 3.447776627567157e-05, "loss": 1.0978, "step": 7839 }, { "epoch": 1.4396952239360714, "grad_norm": 0.7271265983581543, "learning_rate": 3.445567568325233e-05, "loss": 0.9726, "step": 7840 }, { "epoch": 1.4398810629994425, "grad_norm": 0.5585505962371826, "learning_rate": 3.4433590696980764e-05, "loss": 0.6408, "step": 7841 }, { "epoch": 1.4400669020628136, "grad_norm": 0.9318434000015259, "learning_rate": 3.44115113187459e-05, "loss": 1.1037, "step": 7842 }, { "epoch": 1.4402527411261847, "grad_norm": 0.6693130731582642, "learning_rate": 3.4389437550436264e-05, "loss": 0.9345, "step": 7843 }, { "epoch": 1.4404385801895558, "grad_norm": 0.6681878566741943, "learning_rate": 3.43673693939398e-05, "loss": 1.0068, "step": 7844 }, { "epoch": 1.440624419252927, "grad_norm": 2.8995208740234375, "learning_rate": 3.434530685114411e-05, "loss": 1.2537, "step": 7845 }, { "epoch": 1.440810258316298, "grad_norm": 0.6880825161933899, "learning_rate": 3.43232499239362e-05, "loss": 1.0368, "step": 7846 }, { "epoch": 1.4409960973796692, "grad_norm": 0.8894131183624268, "learning_rate": 3.430119861420267e-05, "loss": 1.0654, "step": 7847 }, { "epoch": 1.4411819364430403, "grad_norm": 0.7213008403778076, "learning_rate": 3.427915292382961e-05, "loss": 0.9789, "step": 7848 }, { "epoch": 1.4413677755064114, "grad_norm": 1.57517671585083, "learning_rate": 3.425711285470264e-05, "loss": 1.6136, "step": 7849 }, { "epoch": 1.4415536145697825, "grad_norm": 0.7459681034088135, "learning_rate": 3.4235078408706936e-05, "loss": 1.1973, "step": 7850 }, { "epoch": 1.4417394536331538, "grad_norm": 0.646812379360199, "learning_rate": 3.421304958772708e-05, "loss": 0.932, "step": 7851 }, { "epoch": 1.4419252926965247, "grad_norm": 0.8054854273796082, "learning_rate": 3.419102639364731e-05, "loss": 0.9733, "step": 7852 }, { "epoch": 1.442111131759896, "grad_norm": 0.705011248588562, "learning_rate": 3.416900882835125e-05, "loss": 1.0736, "step": 7853 }, { "epoch": 1.4422969708232671, "grad_norm": 0.6439553499221802, "learning_rate": 3.4146996893722175e-05, "loss": 0.987, "step": 7854 }, { "epoch": 1.4424828098866382, "grad_norm": 0.6906138062477112, "learning_rate": 3.4124990591642744e-05, "loss": 0.8643, "step": 7855 }, { "epoch": 1.4426686489500093, "grad_norm": 0.7618701457977295, "learning_rate": 3.410298992399524e-05, "loss": 1.0197, "step": 7856 }, { "epoch": 1.4428544880133805, "grad_norm": 0.6181477904319763, "learning_rate": 3.408099489266148e-05, "loss": 0.8519, "step": 7857 }, { "epoch": 1.4430403270767516, "grad_norm": 0.6817232370376587, "learning_rate": 3.405900549952266e-05, "loss": 1.0308, "step": 7858 }, { "epoch": 1.4432261661401227, "grad_norm": 0.7186017632484436, "learning_rate": 3.403702174645964e-05, "loss": 1.0243, "step": 7859 }, { "epoch": 1.4434120052034938, "grad_norm": 0.6884559988975525, "learning_rate": 3.401504363535268e-05, "loss": 0.9924, "step": 7860 }, { "epoch": 1.4435978442668649, "grad_norm": 0.7954242825508118, "learning_rate": 3.3993071168081666e-05, "loss": 1.1681, "step": 7861 }, { "epoch": 1.443783683330236, "grad_norm": 0.5953904986381531, "learning_rate": 3.397110434652593e-05, "loss": 0.8374, "step": 7862 }, { "epoch": 1.443969522393607, "grad_norm": 0.6842502355575562, "learning_rate": 3.3949143172564336e-05, "loss": 0.8099, "step": 7863 }, { "epoch": 1.4441553614569782, "grad_norm": 0.7532383799552917, "learning_rate": 3.3927187648075324e-05, "loss": 1.1474, "step": 7864 }, { "epoch": 1.4443412005203493, "grad_norm": 0.7515235543251038, "learning_rate": 3.390523777493672e-05, "loss": 1.0207, "step": 7865 }, { "epoch": 1.4445270395837204, "grad_norm": 0.6641031503677368, "learning_rate": 3.3883293555026005e-05, "loss": 0.8668, "step": 7866 }, { "epoch": 1.4447128786470915, "grad_norm": 0.951924741268158, "learning_rate": 3.386135499022006e-05, "loss": 1.0705, "step": 7867 }, { "epoch": 1.4448987177104629, "grad_norm": 0.6455573439598083, "learning_rate": 3.38394220823954e-05, "loss": 0.946, "step": 7868 }, { "epoch": 1.4450845567738337, "grad_norm": 0.6811406016349792, "learning_rate": 3.3817494833427934e-05, "loss": 0.9002, "step": 7869 }, { "epoch": 1.445270395837205, "grad_norm": 0.6838234066963196, "learning_rate": 3.3795573245193167e-05, "loss": 0.8727, "step": 7870 }, { "epoch": 1.4454562349005762, "grad_norm": 0.9390320777893066, "learning_rate": 3.3773657319566145e-05, "loss": 1.2102, "step": 7871 }, { "epoch": 1.4456420739639473, "grad_norm": 0.6732348799705505, "learning_rate": 3.375174705842131e-05, "loss": 0.873, "step": 7872 }, { "epoch": 1.4458279130273184, "grad_norm": 0.5763305425643921, "learning_rate": 3.3729842463632766e-05, "loss": 0.9149, "step": 7873 }, { "epoch": 1.4460137520906895, "grad_norm": 0.7089604139328003, "learning_rate": 3.3707943537073994e-05, "loss": 0.929, "step": 7874 }, { "epoch": 1.4461995911540606, "grad_norm": 0.6530745625495911, "learning_rate": 3.368605028061808e-05, "loss": 1.0375, "step": 7875 }, { "epoch": 1.4463854302174317, "grad_norm": 0.726687490940094, "learning_rate": 3.366416269613761e-05, "loss": 0.8975, "step": 7876 }, { "epoch": 1.4465712692808028, "grad_norm": 0.7921562194824219, "learning_rate": 3.364228078550468e-05, "loss": 1.1033, "step": 7877 }, { "epoch": 1.446757108344174, "grad_norm": 1.9331955909729004, "learning_rate": 3.362040455059092e-05, "loss": 1.4329, "step": 7878 }, { "epoch": 1.446942947407545, "grad_norm": 0.7026411294937134, "learning_rate": 3.359853399326739e-05, "loss": 1.1135, "step": 7879 }, { "epoch": 1.4471287864709161, "grad_norm": 0.7438810467720032, "learning_rate": 3.35766691154048e-05, "loss": 0.9522, "step": 7880 }, { "epoch": 1.4473146255342872, "grad_norm": 0.7092804908752441, "learning_rate": 3.355480991887322e-05, "loss": 1.0578, "step": 7881 }, { "epoch": 1.4475004645976584, "grad_norm": 0.6761428713798523, "learning_rate": 3.353295640554239e-05, "loss": 0.9218, "step": 7882 }, { "epoch": 1.4476863036610297, "grad_norm": 0.6848258376121521, "learning_rate": 3.351110857728143e-05, "loss": 0.9947, "step": 7883 }, { "epoch": 1.4478721427244006, "grad_norm": 0.8061132431030273, "learning_rate": 3.348926643595904e-05, "loss": 1.1676, "step": 7884 }, { "epoch": 1.448057981787772, "grad_norm": 0.7291356325149536, "learning_rate": 3.346742998344348e-05, "loss": 1.153, "step": 7885 }, { "epoch": 1.4482438208511428, "grad_norm": 0.8108109831809998, "learning_rate": 3.34455992216024e-05, "loss": 1.1443, "step": 7886 }, { "epoch": 1.4484296599145141, "grad_norm": 1.0187431573867798, "learning_rate": 3.342377415230309e-05, "loss": 0.9213, "step": 7887 }, { "epoch": 1.4486154989778852, "grad_norm": 0.819898247718811, "learning_rate": 3.340195477741224e-05, "loss": 1.1785, "step": 7888 }, { "epoch": 1.4488013380412563, "grad_norm": 0.6514377593994141, "learning_rate": 3.338014109879612e-05, "loss": 0.916, "step": 7889 }, { "epoch": 1.4489871771046274, "grad_norm": 0.6867848038673401, "learning_rate": 3.335833311832056e-05, "loss": 0.9396, "step": 7890 }, { "epoch": 1.4491730161679985, "grad_norm": 0.6328677535057068, "learning_rate": 3.333653083785077e-05, "loss": 0.8447, "step": 7891 }, { "epoch": 1.4493588552313696, "grad_norm": 0.6908021569252014, "learning_rate": 3.3314734259251565e-05, "loss": 1.023, "step": 7892 }, { "epoch": 1.4495446942947408, "grad_norm": 0.6348336338996887, "learning_rate": 3.329294338438728e-05, "loss": 0.9131, "step": 7893 }, { "epoch": 1.4497305333581119, "grad_norm": 1.490304946899414, "learning_rate": 3.3271158215121736e-05, "loss": 1.4871, "step": 7894 }, { "epoch": 1.449916372421483, "grad_norm": 0.7178076505661011, "learning_rate": 3.3249378753318225e-05, "loss": 0.9315, "step": 7895 }, { "epoch": 1.450102211484854, "grad_norm": 0.6275186538696289, "learning_rate": 3.3227605000839654e-05, "loss": 1.0205, "step": 7896 }, { "epoch": 1.4502880505482252, "grad_norm": 0.5495182275772095, "learning_rate": 3.3205836959548296e-05, "loss": 0.6146, "step": 7897 }, { "epoch": 1.4504738896115963, "grad_norm": 0.844357967376709, "learning_rate": 3.3184074631306085e-05, "loss": 0.6343, "step": 7898 }, { "epoch": 1.4506597286749674, "grad_norm": 0.7961918115615845, "learning_rate": 3.316231801797439e-05, "loss": 0.8678, "step": 7899 }, { "epoch": 1.4508455677383387, "grad_norm": 0.7114526629447937, "learning_rate": 3.314056712141407e-05, "loss": 1.0124, "step": 7900 }, { "epoch": 1.4510314068017096, "grad_norm": 0.7751607298851013, "learning_rate": 3.311882194348558e-05, "loss": 0.984, "step": 7901 }, { "epoch": 1.451217245865081, "grad_norm": 0.6312512159347534, "learning_rate": 3.309708248604876e-05, "loss": 0.8796, "step": 7902 }, { "epoch": 1.4514030849284518, "grad_norm": 0.6945473551750183, "learning_rate": 3.30753487509631e-05, "loss": 0.9218, "step": 7903 }, { "epoch": 1.4515889239918232, "grad_norm": 0.7502307295799255, "learning_rate": 3.3053620740087475e-05, "loss": 1.1365, "step": 7904 }, { "epoch": 1.4517747630551943, "grad_norm": 0.6966648697853088, "learning_rate": 3.3031898455280364e-05, "loss": 0.9969, "step": 7905 }, { "epoch": 1.4519606021185654, "grad_norm": 0.7188398838043213, "learning_rate": 3.3010181898399715e-05, "loss": 1.0415, "step": 7906 }, { "epoch": 1.4521464411819365, "grad_norm": 1.1256226301193237, "learning_rate": 3.298847107130298e-05, "loss": 1.0763, "step": 7907 }, { "epoch": 1.4523322802453076, "grad_norm": 0.7524394392967224, "learning_rate": 3.296676597584719e-05, "loss": 0.8179, "step": 7908 }, { "epoch": 1.4525181193086787, "grad_norm": 0.6933947801589966, "learning_rate": 3.294506661388875e-05, "loss": 1.0581, "step": 7909 }, { "epoch": 1.4527039583720498, "grad_norm": 0.7244120240211487, "learning_rate": 3.292337298728372e-05, "loss": 0.84, "step": 7910 }, { "epoch": 1.452889797435421, "grad_norm": 0.6906845569610596, "learning_rate": 3.2901685097887535e-05, "loss": 0.9544, "step": 7911 }, { "epoch": 1.453075636498792, "grad_norm": 0.7629197239875793, "learning_rate": 3.288000294755524e-05, "loss": 0.9505, "step": 7912 }, { "epoch": 1.4532614755621631, "grad_norm": 0.6772804260253906, "learning_rate": 3.285832653814139e-05, "loss": 0.9279, "step": 7913 }, { "epoch": 1.4534473146255342, "grad_norm": 0.691500723361969, "learning_rate": 3.2836655871499955e-05, "loss": 0.6032, "step": 7914 }, { "epoch": 1.4536331536889053, "grad_norm": 0.7557622194290161, "learning_rate": 3.281499094948454e-05, "loss": 0.8728, "step": 7915 }, { "epoch": 1.4538189927522764, "grad_norm": 0.7844099998474121, "learning_rate": 3.2793331773948113e-05, "loss": 1.0298, "step": 7916 }, { "epoch": 1.4540048318156478, "grad_norm": 1.0272523164749146, "learning_rate": 3.277167834674331e-05, "loss": 1.2326, "step": 7917 }, { "epoch": 1.4541906708790187, "grad_norm": 0.6383036971092224, "learning_rate": 3.275003066972212e-05, "loss": 0.838, "step": 7918 }, { "epoch": 1.45437650994239, "grad_norm": 0.7620444893836975, "learning_rate": 3.272838874473616e-05, "loss": 0.9757, "step": 7919 }, { "epoch": 1.454562349005761, "grad_norm": 0.7140588760375977, "learning_rate": 3.27067525736365e-05, "loss": 1.0327, "step": 7920 }, { "epoch": 1.4547481880691322, "grad_norm": 0.6811936497688293, "learning_rate": 3.268512215827373e-05, "loss": 0.9918, "step": 7921 }, { "epoch": 1.4549340271325033, "grad_norm": 0.7551881074905396, "learning_rate": 3.266349750049799e-05, "loss": 1.1241, "step": 7922 }, { "epoch": 1.4551198661958744, "grad_norm": 0.6850900650024414, "learning_rate": 3.2641878602158795e-05, "loss": 1.0948, "step": 7923 }, { "epoch": 1.4553057052592455, "grad_norm": 0.6953625679016113, "learning_rate": 3.262026546510535e-05, "loss": 1.1129, "step": 7924 }, { "epoch": 1.4554915443226166, "grad_norm": 0.5916890501976013, "learning_rate": 3.259865809118618e-05, "loss": 0.9081, "step": 7925 }, { "epoch": 1.4556773833859877, "grad_norm": 0.6639631390571594, "learning_rate": 3.257705648224947e-05, "loss": 1.0106, "step": 7926 }, { "epoch": 1.4558632224493588, "grad_norm": 0.853246808052063, "learning_rate": 3.255546064014287e-05, "loss": 1.0606, "step": 7927 }, { "epoch": 1.45604906151273, "grad_norm": 0.6534708142280579, "learning_rate": 3.253387056671344e-05, "loss": 0.7146, "step": 7928 }, { "epoch": 1.456234900576101, "grad_norm": 0.807653546333313, "learning_rate": 3.251228626380792e-05, "loss": 0.8833, "step": 7929 }, { "epoch": 1.4564207396394722, "grad_norm": 0.7415690422058105, "learning_rate": 3.249070773327239e-05, "loss": 1.0653, "step": 7930 }, { "epoch": 1.4566065787028433, "grad_norm": 0.6846359372138977, "learning_rate": 3.246913497695255e-05, "loss": 1.0067, "step": 7931 }, { "epoch": 1.4567924177662144, "grad_norm": 0.7008286118507385, "learning_rate": 3.2447567996693526e-05, "loss": 1.1567, "step": 7932 }, { "epoch": 1.4569782568295855, "grad_norm": 0.6331776976585388, "learning_rate": 3.242600679434001e-05, "loss": 0.7786, "step": 7933 }, { "epoch": 1.4571640958929568, "grad_norm": 0.8562239408493042, "learning_rate": 3.240445137173619e-05, "loss": 0.8782, "step": 7934 }, { "epoch": 1.4573499349563277, "grad_norm": 0.6675928235054016, "learning_rate": 3.2382901730725756e-05, "loss": 0.968, "step": 7935 }, { "epoch": 1.457535774019699, "grad_norm": 0.7753133773803711, "learning_rate": 3.2361357873151846e-05, "loss": 0.8592, "step": 7936 }, { "epoch": 1.4577216130830701, "grad_norm": 0.7975282669067383, "learning_rate": 3.233981980085719e-05, "loss": 1.0158, "step": 7937 }, { "epoch": 1.4579074521464412, "grad_norm": 0.8110864758491516, "learning_rate": 3.231828751568401e-05, "loss": 1.0904, "step": 7938 }, { "epoch": 1.4580932912098123, "grad_norm": 0.5796823501586914, "learning_rate": 3.2296761019473944e-05, "loss": 0.8856, "step": 7939 }, { "epoch": 1.4582791302731835, "grad_norm": 0.7160946130752563, "learning_rate": 3.2275240314068224e-05, "loss": 0.9291, "step": 7940 }, { "epoch": 1.4584649693365546, "grad_norm": 0.6326502561569214, "learning_rate": 3.225372540130762e-05, "loss": 1.0077, "step": 7941 }, { "epoch": 1.4586508083999257, "grad_norm": 0.7014022469520569, "learning_rate": 3.2232216283032254e-05, "loss": 0.9311, "step": 7942 }, { "epoch": 1.4588366474632968, "grad_norm": 0.8314329981803894, "learning_rate": 3.221071296108195e-05, "loss": 0.9909, "step": 7943 }, { "epoch": 1.4590224865266679, "grad_norm": 0.7300158143043518, "learning_rate": 3.218921543729583e-05, "loss": 0.8944, "step": 7944 }, { "epoch": 1.459208325590039, "grad_norm": 0.6036564111709595, "learning_rate": 3.21677237135127e-05, "loss": 0.9688, "step": 7945 }, { "epoch": 1.45939416465341, "grad_norm": 0.8187325596809387, "learning_rate": 3.214623779157074e-05, "loss": 0.9268, "step": 7946 }, { "epoch": 1.4595800037167812, "grad_norm": 0.7486228346824646, "learning_rate": 3.212475767330771e-05, "loss": 0.9888, "step": 7947 }, { "epoch": 1.4597658427801523, "grad_norm": 0.6301973462104797, "learning_rate": 3.210328336056089e-05, "loss": 0.64, "step": 7948 }, { "epoch": 1.4599516818435236, "grad_norm": 0.6302767395973206, "learning_rate": 3.208181485516696e-05, "loss": 0.9719, "step": 7949 }, { "epoch": 1.4601375209068945, "grad_norm": 0.6822143197059631, "learning_rate": 3.206035215896218e-05, "loss": 1.0381, "step": 7950 }, { "epoch": 1.4603233599702659, "grad_norm": 0.7191839814186096, "learning_rate": 3.2038895273782335e-05, "loss": 1.1134, "step": 7951 }, { "epoch": 1.4605091990336367, "grad_norm": 0.8182570934295654, "learning_rate": 3.201744420146269e-05, "loss": 0.8333, "step": 7952 }, { "epoch": 1.460695038097008, "grad_norm": 0.8387598395347595, "learning_rate": 3.1995998943837935e-05, "loss": 1.0758, "step": 7953 }, { "epoch": 1.4608808771603792, "grad_norm": 0.6082673668861389, "learning_rate": 3.1974559502742364e-05, "loss": 0.7804, "step": 7954 }, { "epoch": 1.4610667162237503, "grad_norm": 0.6701902151107788, "learning_rate": 3.195312588000977e-05, "loss": 1.039, "step": 7955 }, { "epoch": 1.4612525552871214, "grad_norm": 0.7884292602539062, "learning_rate": 3.193169807747336e-05, "loss": 1.0115, "step": 7956 }, { "epoch": 1.4614383943504925, "grad_norm": 0.725053071975708, "learning_rate": 3.1910276096965966e-05, "loss": 1.0554, "step": 7957 }, { "epoch": 1.4616242334138636, "grad_norm": 0.8168944120407104, "learning_rate": 3.188885994031978e-05, "loss": 0.9558, "step": 7958 }, { "epoch": 1.4618100724772347, "grad_norm": 0.726453959941864, "learning_rate": 3.186744960936665e-05, "loss": 0.9404, "step": 7959 }, { "epoch": 1.4619959115406058, "grad_norm": 0.7123615741729736, "learning_rate": 3.184604510593777e-05, "loss": 0.8578, "step": 7960 }, { "epoch": 1.462181750603977, "grad_norm": 0.6957084536552429, "learning_rate": 3.1824646431863956e-05, "loss": 0.9583, "step": 7961 }, { "epoch": 1.462367589667348, "grad_norm": 0.7854464054107666, "learning_rate": 3.180325358897551e-05, "loss": 0.6127, "step": 7962 }, { "epoch": 1.4625534287307191, "grad_norm": 0.799656331539154, "learning_rate": 3.178186657910215e-05, "loss": 0.9282, "step": 7963 }, { "epoch": 1.4627392677940902, "grad_norm": 0.7751187682151794, "learning_rate": 3.176048540407317e-05, "loss": 0.9911, "step": 7964 }, { "epoch": 1.4629251068574614, "grad_norm": 0.8648492097854614, "learning_rate": 3.173911006571736e-05, "loss": 0.8404, "step": 7965 }, { "epoch": 1.4631109459208327, "grad_norm": 0.6226553320884705, "learning_rate": 3.171774056586303e-05, "loss": 0.8417, "step": 7966 }, { "epoch": 1.4632967849842036, "grad_norm": 0.6965428590774536, "learning_rate": 3.1696376906337896e-05, "loss": 1.175, "step": 7967 }, { "epoch": 1.463482624047575, "grad_norm": 0.6485991477966309, "learning_rate": 3.1675019088969274e-05, "loss": 0.964, "step": 7968 }, { "epoch": 1.4636684631109458, "grad_norm": 0.7091854214668274, "learning_rate": 3.1653667115583976e-05, "loss": 0.9875, "step": 7969 }, { "epoch": 1.4638543021743171, "grad_norm": 0.7788587808609009, "learning_rate": 3.16323209880082e-05, "loss": 0.9208, "step": 7970 }, { "epoch": 1.4640401412376882, "grad_norm": 0.6741541028022766, "learning_rate": 3.1610980708067805e-05, "loss": 0.8609, "step": 7971 }, { "epoch": 1.4642259803010593, "grad_norm": 0.855179488658905, "learning_rate": 3.158964627758802e-05, "loss": 0.9807, "step": 7972 }, { "epoch": 1.4644118193644304, "grad_norm": 0.6979156136512756, "learning_rate": 3.1568317698393655e-05, "loss": 1.2114, "step": 7973 }, { "epoch": 1.4645976584278015, "grad_norm": 0.6675369739532471, "learning_rate": 3.154699497230895e-05, "loss": 0.887, "step": 7974 }, { "epoch": 1.4647834974911726, "grad_norm": 0.7123900651931763, "learning_rate": 3.152567810115772e-05, "loss": 0.9494, "step": 7975 }, { "epoch": 1.4649693365545438, "grad_norm": 0.6881269812583923, "learning_rate": 3.1504367086763264e-05, "loss": 0.9702, "step": 7976 }, { "epoch": 1.4651551756179149, "grad_norm": 0.6381533741950989, "learning_rate": 3.14830619309483e-05, "loss": 1.0109, "step": 7977 }, { "epoch": 1.465341014681286, "grad_norm": 0.7267858982086182, "learning_rate": 3.146176263553513e-05, "loss": 1.0232, "step": 7978 }, { "epoch": 1.465526853744657, "grad_norm": 0.8178361654281616, "learning_rate": 3.144046920234553e-05, "loss": 0.9091, "step": 7979 }, { "epoch": 1.4657126928080282, "grad_norm": 0.7100738286972046, "learning_rate": 3.141918163320081e-05, "loss": 0.8937, "step": 7980 }, { "epoch": 1.4658985318713993, "grad_norm": 0.6343532204627991, "learning_rate": 3.1397899929921684e-05, "loss": 0.8325, "step": 7981 }, { "epoch": 1.4660843709347704, "grad_norm": 0.6798224449157715, "learning_rate": 3.1376624094328434e-05, "loss": 0.8343, "step": 7982 }, { "epoch": 1.4662702099981417, "grad_norm": 0.739434540271759, "learning_rate": 3.135535412824088e-05, "loss": 0.9174, "step": 7983 }, { "epoch": 1.4664560490615126, "grad_norm": 0.7073687314987183, "learning_rate": 3.1334090033478206e-05, "loss": 0.8722, "step": 7984 }, { "epoch": 1.466641888124884, "grad_norm": 0.8394288420677185, "learning_rate": 3.1312831811859255e-05, "loss": 0.8673, "step": 7985 }, { "epoch": 1.466827727188255, "grad_norm": 0.614472508430481, "learning_rate": 3.1291579465202224e-05, "loss": 0.8653, "step": 7986 }, { "epoch": 1.4670135662516262, "grad_norm": 0.6752113103866577, "learning_rate": 3.127033299532492e-05, "loss": 1.1083, "step": 7987 }, { "epoch": 1.4671994053149973, "grad_norm": 0.7037453651428223, "learning_rate": 3.124909240404456e-05, "loss": 1.0695, "step": 7988 }, { "epoch": 1.4673852443783684, "grad_norm": 0.643002986907959, "learning_rate": 3.122785769317791e-05, "loss": 0.9207, "step": 7989 }, { "epoch": 1.4675710834417395, "grad_norm": 0.6749702095985413, "learning_rate": 3.1206628864541264e-05, "loss": 0.9807, "step": 7990 }, { "epoch": 1.4677569225051106, "grad_norm": 0.8223366737365723, "learning_rate": 3.1185405919950295e-05, "loss": 0.9541, "step": 7991 }, { "epoch": 1.4679427615684817, "grad_norm": 0.6743607521057129, "learning_rate": 3.1164188861220325e-05, "loss": 0.9941, "step": 7992 }, { "epoch": 1.4681286006318528, "grad_norm": 0.773655354976654, "learning_rate": 3.114297769016602e-05, "loss": 1.0346, "step": 7993 }, { "epoch": 1.468314439695224, "grad_norm": 0.7370700836181641, "learning_rate": 3.1121772408601654e-05, "loss": 0.9967, "step": 7994 }, { "epoch": 1.468500278758595, "grad_norm": 0.6398366093635559, "learning_rate": 3.1100573018340965e-05, "loss": 0.8079, "step": 7995 }, { "epoch": 1.4686861178219661, "grad_norm": 0.6904419660568237, "learning_rate": 3.107937952119716e-05, "loss": 1.2773, "step": 7996 }, { "epoch": 1.4688719568853372, "grad_norm": 0.6900709271430969, "learning_rate": 3.1058191918983025e-05, "loss": 0.9101, "step": 7997 }, { "epoch": 1.4690577959487086, "grad_norm": 0.7680274248123169, "learning_rate": 3.103701021351071e-05, "loss": 1.0101, "step": 7998 }, { "epoch": 1.4692436350120794, "grad_norm": 0.6433159112930298, "learning_rate": 3.1015834406591985e-05, "loss": 1.0083, "step": 7999 }, { "epoch": 1.4694294740754508, "grad_norm": 0.8103683590888977, "learning_rate": 3.0994664500038004e-05, "loss": 0.8678, "step": 8000 }, { "epoch": 1.4696153131388217, "grad_norm": 0.7142993807792664, "learning_rate": 3.097350049565955e-05, "loss": 0.9821, "step": 8001 }, { "epoch": 1.469801152202193, "grad_norm": 0.659246027469635, "learning_rate": 3.0952342395266746e-05, "loss": 1.0788, "step": 8002 }, { "epoch": 1.469986991265564, "grad_norm": 0.7127649188041687, "learning_rate": 3.093119020066933e-05, "loss": 1.1004, "step": 8003 }, { "epoch": 1.4701728303289352, "grad_norm": 0.6787214279174805, "learning_rate": 3.091004391367652e-05, "loss": 0.9848, "step": 8004 }, { "epoch": 1.4703586693923063, "grad_norm": 0.6760038733482361, "learning_rate": 3.088890353609695e-05, "loss": 0.8568, "step": 8005 }, { "epoch": 1.4705445084556774, "grad_norm": 0.7517935633659363, "learning_rate": 3.086776906973886e-05, "loss": 0.9261, "step": 8006 }, { "epoch": 1.4707303475190485, "grad_norm": 0.7414070963859558, "learning_rate": 3.0846640516409865e-05, "loss": 1.121, "step": 8007 }, { "epoch": 1.4709161865824196, "grad_norm": 0.6196426153182983, "learning_rate": 3.082551787791716e-05, "loss": 1.0212, "step": 8008 }, { "epoch": 1.4711020256457907, "grad_norm": 0.7787929773330688, "learning_rate": 3.080440115606743e-05, "loss": 0.8718, "step": 8009 }, { "epoch": 1.4712878647091618, "grad_norm": 0.9043441414833069, "learning_rate": 3.0783290352666816e-05, "loss": 1.1491, "step": 8010 }, { "epoch": 1.471473703772533, "grad_norm": 0.6080015897750854, "learning_rate": 3.076218546952101e-05, "loss": 0.7824, "step": 8011 }, { "epoch": 1.471659542835904, "grad_norm": 0.7211466431617737, "learning_rate": 3.074108650843508e-05, "loss": 0.7902, "step": 8012 }, { "epoch": 1.4718453818992752, "grad_norm": 0.6852525472640991, "learning_rate": 3.071999347121375e-05, "loss": 0.8488, "step": 8013 }, { "epoch": 1.4720312209626463, "grad_norm": 0.7389121651649475, "learning_rate": 3.0698906359661074e-05, "loss": 0.7997, "step": 8014 }, { "epoch": 1.4722170600260176, "grad_norm": 0.6455549001693726, "learning_rate": 3.0677825175580756e-05, "loss": 0.718, "step": 8015 }, { "epoch": 1.4724028990893885, "grad_norm": 0.5886526703834534, "learning_rate": 3.065674992077584e-05, "loss": 0.9358, "step": 8016 }, { "epoch": 1.4725887381527598, "grad_norm": 0.6826702952384949, "learning_rate": 3.0635680597048975e-05, "loss": 1.0897, "step": 8017 }, { "epoch": 1.4727745772161307, "grad_norm": 0.7578597664833069, "learning_rate": 3.0614617206202287e-05, "loss": 1.0939, "step": 8018 }, { "epoch": 1.472960416279502, "grad_norm": 0.7338199615478516, "learning_rate": 3.0593559750037327e-05, "loss": 1.2138, "step": 8019 }, { "epoch": 1.4731462553428731, "grad_norm": 0.9254752993583679, "learning_rate": 3.0572508230355246e-05, "loss": 0.8234, "step": 8020 }, { "epoch": 1.4733320944062442, "grad_norm": 0.6886142492294312, "learning_rate": 3.055146264895655e-05, "loss": 1.069, "step": 8021 }, { "epoch": 1.4735179334696153, "grad_norm": 0.7945106625556946, "learning_rate": 3.053042300764135e-05, "loss": 0.9822, "step": 8022 }, { "epoch": 1.4737037725329865, "grad_norm": 0.6745129823684692, "learning_rate": 3.0509389308209224e-05, "loss": 0.9658, "step": 8023 }, { "epoch": 1.4738896115963576, "grad_norm": 0.6760376691818237, "learning_rate": 3.048836155245921e-05, "loss": 1.0209, "step": 8024 }, { "epoch": 1.4740754506597287, "grad_norm": 0.6404924988746643, "learning_rate": 3.0467339742189895e-05, "loss": 1.0087, "step": 8025 }, { "epoch": 1.4742612897230998, "grad_norm": 0.6808585524559021, "learning_rate": 3.0446323879199267e-05, "loss": 1.1226, "step": 8026 }, { "epoch": 1.4744471287864709, "grad_norm": 0.7933690547943115, "learning_rate": 3.0425313965284917e-05, "loss": 0.8736, "step": 8027 }, { "epoch": 1.474632967849842, "grad_norm": 0.5835078954696655, "learning_rate": 3.0404310002243797e-05, "loss": 0.8837, "step": 8028 }, { "epoch": 1.474818806913213, "grad_norm": 0.6240862607955933, "learning_rate": 3.038331199187249e-05, "loss": 0.9098, "step": 8029 }, { "epoch": 1.4750046459765842, "grad_norm": 0.6900718212127686, "learning_rate": 3.0362319935966942e-05, "loss": 1.0717, "step": 8030 }, { "epoch": 1.4751904850399553, "grad_norm": 0.6570308804512024, "learning_rate": 3.034133383632267e-05, "loss": 0.9055, "step": 8031 }, { "epoch": 1.4753763241033266, "grad_norm": 0.8068198561668396, "learning_rate": 3.0320353694734706e-05, "loss": 0.7794, "step": 8032 }, { "epoch": 1.4755621631666975, "grad_norm": 0.7877241969108582, "learning_rate": 3.029937951299746e-05, "loss": 0.971, "step": 8033 }, { "epoch": 1.4757480022300689, "grad_norm": 0.686204195022583, "learning_rate": 3.027841129290495e-05, "loss": 0.9194, "step": 8034 }, { "epoch": 1.47593384129344, "grad_norm": 0.7279642820358276, "learning_rate": 3.0257449036250584e-05, "loss": 0.8231, "step": 8035 }, { "epoch": 1.476119680356811, "grad_norm": 0.6372054219245911, "learning_rate": 3.023649274482733e-05, "loss": 0.9844, "step": 8036 }, { "epoch": 1.4763055194201822, "grad_norm": 0.6370877623558044, "learning_rate": 3.0215542420427667e-05, "loss": 0.8887, "step": 8037 }, { "epoch": 1.4764913584835533, "grad_norm": 0.7689259052276611, "learning_rate": 3.019459806484345e-05, "loss": 0.7539, "step": 8038 }, { "epoch": 1.4766771975469244, "grad_norm": 0.6394402384757996, "learning_rate": 3.017365967986613e-05, "loss": 0.737, "step": 8039 }, { "epoch": 1.4768630366102955, "grad_norm": 0.8527523875236511, "learning_rate": 3.0152727267286607e-05, "loss": 0.9706, "step": 8040 }, { "epoch": 1.4770488756736666, "grad_norm": 0.6314235329627991, "learning_rate": 3.0131800828895317e-05, "loss": 0.7901, "step": 8041 }, { "epoch": 1.4772347147370377, "grad_norm": 0.6281957626342773, "learning_rate": 3.0110880366482085e-05, "loss": 0.9776, "step": 8042 }, { "epoch": 1.4774205538004088, "grad_norm": 0.69016432762146, "learning_rate": 3.0089965881836324e-05, "loss": 1.026, "step": 8043 }, { "epoch": 1.47760639286378, "grad_norm": 0.6415174603462219, "learning_rate": 3.0069057376746857e-05, "loss": 0.998, "step": 8044 }, { "epoch": 1.477792231927151, "grad_norm": 0.7496092319488525, "learning_rate": 3.004815485300204e-05, "loss": 0.6161, "step": 8045 }, { "epoch": 1.4779780709905221, "grad_norm": 0.7724894881248474, "learning_rate": 3.002725831238976e-05, "loss": 1.1819, "step": 8046 }, { "epoch": 1.4781639100538932, "grad_norm": 0.7565277814865112, "learning_rate": 3.0006367756697284e-05, "loss": 0.9707, "step": 8047 }, { "epoch": 1.4783497491172644, "grad_norm": 0.7986220121383667, "learning_rate": 2.9985483187711473e-05, "loss": 1.0053, "step": 8048 }, { "epoch": 1.4785355881806357, "grad_norm": 0.6957052946090698, "learning_rate": 2.9964604607218572e-05, "loss": 0.984, "step": 8049 }, { "epoch": 1.4787214272440066, "grad_norm": 0.6831666231155396, "learning_rate": 2.9943732017004444e-05, "loss": 0.9922, "step": 8050 }, { "epoch": 1.478907266307378, "grad_norm": 0.7440987229347229, "learning_rate": 2.9922865418854295e-05, "loss": 0.813, "step": 8051 }, { "epoch": 1.479093105370749, "grad_norm": 0.5783529877662659, "learning_rate": 2.990200481455292e-05, "loss": 0.8253, "step": 8052 }, { "epoch": 1.4792789444341201, "grad_norm": 0.7621669769287109, "learning_rate": 2.988115020588458e-05, "loss": 0.8967, "step": 8053 }, { "epoch": 1.4794647834974912, "grad_norm": 0.6287006735801697, "learning_rate": 2.986030159463301e-05, "loss": 0.7993, "step": 8054 }, { "epoch": 1.4796506225608623, "grad_norm": 0.9010291695594788, "learning_rate": 2.983945898258147e-05, "loss": 0.6416, "step": 8055 }, { "epoch": 1.4798364616242334, "grad_norm": 0.9841485023498535, "learning_rate": 2.9818622371512594e-05, "loss": 0.9354, "step": 8056 }, { "epoch": 1.4800223006876045, "grad_norm": 0.6324443221092224, "learning_rate": 2.9797791763208673e-05, "loss": 0.7948, "step": 8057 }, { "epoch": 1.4802081397509756, "grad_norm": 0.7640272378921509, "learning_rate": 2.977696715945132e-05, "loss": 0.835, "step": 8058 }, { "epoch": 1.4803939788143468, "grad_norm": 0.654093861579895, "learning_rate": 2.9756148562021735e-05, "loss": 0.9433, "step": 8059 }, { "epoch": 1.4805798178777179, "grad_norm": 0.7237150073051453, "learning_rate": 2.9735335972700607e-05, "loss": 0.8982, "step": 8060 }, { "epoch": 1.480765656941089, "grad_norm": 0.7748257517814636, "learning_rate": 2.971452939326802e-05, "loss": 0.9941, "step": 8061 }, { "epoch": 1.48095149600446, "grad_norm": 0.609855055809021, "learning_rate": 2.9693728825503687e-05, "loss": 0.9444, "step": 8062 }, { "epoch": 1.4811373350678312, "grad_norm": 0.7217187285423279, "learning_rate": 2.9672934271186636e-05, "loss": 0.7118, "step": 8063 }, { "epoch": 1.4813231741312025, "grad_norm": 0.8199107050895691, "learning_rate": 2.965214573209555e-05, "loss": 0.9445, "step": 8064 }, { "epoch": 1.4815090131945734, "grad_norm": 0.7062032222747803, "learning_rate": 2.9631363210008444e-05, "loss": 0.9306, "step": 8065 }, { "epoch": 1.4816948522579447, "grad_norm": 0.7301066517829895, "learning_rate": 2.9610586706702938e-05, "loss": 1.0229, "step": 8066 }, { "epoch": 1.4818806913213156, "grad_norm": 0.6758785247802734, "learning_rate": 2.958981622395608e-05, "loss": 0.9138, "step": 8067 }, { "epoch": 1.482066530384687, "grad_norm": 0.8148382902145386, "learning_rate": 2.956905176354441e-05, "loss": 0.9845, "step": 8068 }, { "epoch": 1.482252369448058, "grad_norm": 0.6515848636627197, "learning_rate": 2.9548293327244004e-05, "loss": 0.8787, "step": 8069 }, { "epoch": 1.4824382085114292, "grad_norm": 0.753722071647644, "learning_rate": 2.95275409168303e-05, "loss": 0.8843, "step": 8070 }, { "epoch": 1.4826240475748003, "grad_norm": 0.6378787159919739, "learning_rate": 2.9506794534078363e-05, "loss": 1.0363, "step": 8071 }, { "epoch": 1.4828098866381714, "grad_norm": 0.7277052998542786, "learning_rate": 2.948605418076261e-05, "loss": 0.9157, "step": 8072 }, { "epoch": 1.4829957257015425, "grad_norm": 0.4940520226955414, "learning_rate": 2.9465319858657057e-05, "loss": 0.5199, "step": 8073 }, { "epoch": 1.4831815647649136, "grad_norm": 0.8421851396560669, "learning_rate": 2.9444591569535163e-05, "loss": 1.0781, "step": 8074 }, { "epoch": 1.4833674038282847, "grad_norm": 0.6914893984794617, "learning_rate": 2.9423869315169816e-05, "loss": 0.8737, "step": 8075 }, { "epoch": 1.4835532428916558, "grad_norm": 0.7011006474494934, "learning_rate": 2.9403153097333493e-05, "loss": 1.1134, "step": 8076 }, { "epoch": 1.4835532428916558, "eval_loss": 1.0098494291305542, "eval_runtime": 23.2152, "eval_samples_per_second": 47.038, "eval_steps_per_second": 23.519, "step": 8076 }, { "epoch": 1.483739081955027, "grad_norm": 0.6604907512664795, "learning_rate": 2.9382442917798027e-05, "loss": 1.0299, "step": 8077 }, { "epoch": 1.483924921018398, "grad_norm": 0.7371731996536255, "learning_rate": 2.936173877833488e-05, "loss": 1.2304, "step": 8078 }, { "epoch": 1.4841107600817691, "grad_norm": 0.6601764559745789, "learning_rate": 2.934104068071485e-05, "loss": 0.9159, "step": 8079 }, { "epoch": 1.4842965991451402, "grad_norm": 0.6797536015510559, "learning_rate": 2.9320348626708327e-05, "loss": 0.9202, "step": 8080 }, { "epoch": 1.4844824382085116, "grad_norm": 1.3544020652770996, "learning_rate": 2.9299662618085145e-05, "loss": 1.1437, "step": 8081 }, { "epoch": 1.4846682772718824, "grad_norm": 0.686285674571991, "learning_rate": 2.927898265661465e-05, "loss": 0.838, "step": 8082 }, { "epoch": 1.4848541163352538, "grad_norm": 0.7199298739433289, "learning_rate": 2.92583087440656e-05, "loss": 1.045, "step": 8083 }, { "epoch": 1.4850399553986247, "grad_norm": 0.7211536169052124, "learning_rate": 2.9237640882206274e-05, "loss": 1.203, "step": 8084 }, { "epoch": 1.485225794461996, "grad_norm": 0.7082626223564148, "learning_rate": 2.9216979072804475e-05, "loss": 0.9541, "step": 8085 }, { "epoch": 1.485411633525367, "grad_norm": 0.6888993978500366, "learning_rate": 2.9196323317627472e-05, "loss": 0.7679, "step": 8086 }, { "epoch": 1.4855974725887382, "grad_norm": 0.5987149477005005, "learning_rate": 2.9175673618441925e-05, "loss": 0.8992, "step": 8087 }, { "epoch": 1.4857833116521093, "grad_norm": 0.7250107526779175, "learning_rate": 2.9155029977014114e-05, "loss": 1.1304, "step": 8088 }, { "epoch": 1.4859691507154804, "grad_norm": 0.7596735954284668, "learning_rate": 2.9134392395109677e-05, "loss": 1.1595, "step": 8089 }, { "epoch": 1.4861549897788515, "grad_norm": 0.7681040167808533, "learning_rate": 2.9113760874493846e-05, "loss": 1.0206, "step": 8090 }, { "epoch": 1.4863408288422226, "grad_norm": 0.7400950193405151, "learning_rate": 2.9093135416931226e-05, "loss": 0.6833, "step": 8091 }, { "epoch": 1.4865266679055937, "grad_norm": 0.674723207950592, "learning_rate": 2.9072516024185993e-05, "loss": 1.0275, "step": 8092 }, { "epoch": 1.4867125069689648, "grad_norm": 0.7911410331726074, "learning_rate": 2.905190269802177e-05, "loss": 0.9139, "step": 8093 }, { "epoch": 1.486898346032336, "grad_norm": 0.8201619386672974, "learning_rate": 2.9031295440201634e-05, "loss": 0.9108, "step": 8094 }, { "epoch": 1.487084185095707, "grad_norm": 0.5533270835876465, "learning_rate": 2.9010694252488203e-05, "loss": 0.7583, "step": 8095 }, { "epoch": 1.4872700241590782, "grad_norm": 0.7166458368301392, "learning_rate": 2.89900991366435e-05, "loss": 0.9202, "step": 8096 }, { "epoch": 1.4874558632224493, "grad_norm": 0.6119358539581299, "learning_rate": 2.8969510094429074e-05, "loss": 0.7437, "step": 8097 }, { "epoch": 1.4876417022858206, "grad_norm": 0.6777541041374207, "learning_rate": 2.8948927127605985e-05, "loss": 1.013, "step": 8098 }, { "epoch": 1.4878275413491915, "grad_norm": 0.6690256595611572, "learning_rate": 2.8928350237934708e-05, "loss": 0.9622, "step": 8099 }, { "epoch": 1.4880133804125628, "grad_norm": 0.6234597563743591, "learning_rate": 2.8907779427175274e-05, "loss": 0.7835, "step": 8100 }, { "epoch": 1.488199219475934, "grad_norm": 0.6435613036155701, "learning_rate": 2.8887214697087094e-05, "loss": 1.0462, "step": 8101 }, { "epoch": 1.488385058539305, "grad_norm": 0.6958020329475403, "learning_rate": 2.8866656049429162e-05, "loss": 0.8945, "step": 8102 }, { "epoch": 1.4885708976026761, "grad_norm": 0.5903449654579163, "learning_rate": 2.884610348595984e-05, "loss": 0.9441, "step": 8103 }, { "epoch": 1.4887567366660472, "grad_norm": 0.7021283507347107, "learning_rate": 2.882555700843711e-05, "loss": 0.9256, "step": 8104 }, { "epoch": 1.4889425757294183, "grad_norm": 0.8339864611625671, "learning_rate": 2.8805016618618285e-05, "loss": 1.0403, "step": 8105 }, { "epoch": 1.4891284147927895, "grad_norm": 0.6968022584915161, "learning_rate": 2.8784482318260252e-05, "loss": 0.9723, "step": 8106 }, { "epoch": 1.4893142538561606, "grad_norm": 0.7342457175254822, "learning_rate": 2.8763954109119408e-05, "loss": 0.9621, "step": 8107 }, { "epoch": 1.4895000929195317, "grad_norm": 0.6052370071411133, "learning_rate": 2.8743431992951487e-05, "loss": 1.0057, "step": 8108 }, { "epoch": 1.4896859319829028, "grad_norm": 0.6225925087928772, "learning_rate": 2.8722915971511876e-05, "loss": 0.9445, "step": 8109 }, { "epoch": 1.4898717710462739, "grad_norm": 0.7247329354286194, "learning_rate": 2.8702406046555273e-05, "loss": 0.9245, "step": 8110 }, { "epoch": 1.490057610109645, "grad_norm": 0.8592934012413025, "learning_rate": 2.8681902219835976e-05, "loss": 0.8827, "step": 8111 }, { "epoch": 1.490243449173016, "grad_norm": 0.8161135911941528, "learning_rate": 2.8661404493107725e-05, "loss": 1.0164, "step": 8112 }, { "epoch": 1.4904292882363872, "grad_norm": 0.6986614465713501, "learning_rate": 2.8640912868123727e-05, "loss": 0.9613, "step": 8113 }, { "epoch": 1.4906151272997583, "grad_norm": 0.6560515761375427, "learning_rate": 2.8620427346636712e-05, "loss": 0.8074, "step": 8114 }, { "epoch": 1.4908009663631296, "grad_norm": 0.6758663058280945, "learning_rate": 2.8599947930398786e-05, "loss": 1.0231, "step": 8115 }, { "epoch": 1.4909868054265005, "grad_norm": 0.7396555542945862, "learning_rate": 2.8579474621161663e-05, "loss": 0.9426, "step": 8116 }, { "epoch": 1.4911726444898719, "grad_norm": 0.7931793928146362, "learning_rate": 2.8559007420676397e-05, "loss": 1.0995, "step": 8117 }, { "epoch": 1.491358483553243, "grad_norm": 0.5825560688972473, "learning_rate": 2.853854633069366e-05, "loss": 0.679, "step": 8118 }, { "epoch": 1.491544322616614, "grad_norm": 0.6996251344680786, "learning_rate": 2.8518091352963484e-05, "loss": 0.9056, "step": 8119 }, { "epoch": 1.4917301616799852, "grad_norm": 0.7999283671379089, "learning_rate": 2.8497642489235444e-05, "loss": 0.8565, "step": 8120 }, { "epoch": 1.4919160007433563, "grad_norm": 0.7158589363098145, "learning_rate": 2.847719974125861e-05, "loss": 0.7917, "step": 8121 }, { "epoch": 1.4921018398067274, "grad_norm": 0.7307506203651428, "learning_rate": 2.8456763110781427e-05, "loss": 0.9391, "step": 8122 }, { "epoch": 1.4922876788700985, "grad_norm": 0.7652497887611389, "learning_rate": 2.843633259955195e-05, "loss": 1.058, "step": 8123 }, { "epoch": 1.4924735179334696, "grad_norm": 0.6631175875663757, "learning_rate": 2.8415908209317588e-05, "loss": 0.9038, "step": 8124 }, { "epoch": 1.4926593569968407, "grad_norm": 0.6075304746627808, "learning_rate": 2.8395489941825315e-05, "loss": 0.759, "step": 8125 }, { "epoch": 1.4928451960602118, "grad_norm": 0.812389612197876, "learning_rate": 2.8375077798821535e-05, "loss": 0.995, "step": 8126 }, { "epoch": 1.493031035123583, "grad_norm": 0.6336919069290161, "learning_rate": 2.83546717820522e-05, "loss": 0.9026, "step": 8127 }, { "epoch": 1.493216874186954, "grad_norm": 0.7257450222969055, "learning_rate": 2.8334271893262587e-05, "loss": 1.0456, "step": 8128 }, { "epoch": 1.4934027132503251, "grad_norm": 0.6064552664756775, "learning_rate": 2.831387813419759e-05, "loss": 0.9039, "step": 8129 }, { "epoch": 1.4935885523136965, "grad_norm": 0.7207791209220886, "learning_rate": 2.829349050660157e-05, "loss": 0.8323, "step": 8130 }, { "epoch": 1.4937743913770674, "grad_norm": 0.6437431573867798, "learning_rate": 2.8273109012218247e-05, "loss": 0.8694, "step": 8131 }, { "epoch": 1.4939602304404387, "grad_norm": 0.6493000388145447, "learning_rate": 2.8252733652790974e-05, "loss": 0.7337, "step": 8132 }, { "epoch": 1.4941460695038096, "grad_norm": 0.7215288877487183, "learning_rate": 2.8232364430062418e-05, "loss": 0.8804, "step": 8133 }, { "epoch": 1.494331908567181, "grad_norm": 0.665878415107727, "learning_rate": 2.821200134577484e-05, "loss": 1.1581, "step": 8134 }, { "epoch": 1.494517747630552, "grad_norm": 0.6766164302825928, "learning_rate": 2.819164440166998e-05, "loss": 0.9617, "step": 8135 }, { "epoch": 1.4947035866939231, "grad_norm": 0.5960833430290222, "learning_rate": 2.8171293599488934e-05, "loss": 0.8801, "step": 8136 }, { "epoch": 1.4948894257572942, "grad_norm": 0.6401859521865845, "learning_rate": 2.815094894097242e-05, "loss": 0.9309, "step": 8137 }, { "epoch": 1.4950752648206653, "grad_norm": 1.31476628780365, "learning_rate": 2.8130610427860504e-05, "loss": 1.1537, "step": 8138 }, { "epoch": 1.4952611038840364, "grad_norm": 0.6927815675735474, "learning_rate": 2.8110278061892837e-05, "loss": 1.0044, "step": 8139 }, { "epoch": 1.4954469429474075, "grad_norm": 0.7193856835365295, "learning_rate": 2.8089951844808425e-05, "loss": 0.81, "step": 8140 }, { "epoch": 1.4956327820107786, "grad_norm": 0.7486124038696289, "learning_rate": 2.806963177834586e-05, "loss": 0.9009, "step": 8141 }, { "epoch": 1.4958186210741498, "grad_norm": 0.7283627390861511, "learning_rate": 2.804931786424314e-05, "loss": 0.9371, "step": 8142 }, { "epoch": 1.4960044601375209, "grad_norm": 0.6429521441459656, "learning_rate": 2.8029010104237785e-05, "loss": 0.873, "step": 8143 }, { "epoch": 1.496190299200892, "grad_norm": 0.9424156546592712, "learning_rate": 2.800870850006676e-05, "loss": 0.9181, "step": 8144 }, { "epoch": 1.496376138264263, "grad_norm": 0.7225376963615417, "learning_rate": 2.7988413053466468e-05, "loss": 0.9672, "step": 8145 }, { "epoch": 1.4965619773276342, "grad_norm": 0.7944941520690918, "learning_rate": 2.7968123766172872e-05, "loss": 1.1031, "step": 8146 }, { "epoch": 1.4967478163910055, "grad_norm": 0.677739679813385, "learning_rate": 2.794784063992131e-05, "loss": 0.7279, "step": 8147 }, { "epoch": 1.4969336554543764, "grad_norm": 0.6650786399841309, "learning_rate": 2.7927563676446657e-05, "loss": 0.9218, "step": 8148 }, { "epoch": 1.4971194945177477, "grad_norm": 0.6516730785369873, "learning_rate": 2.790729287748327e-05, "loss": 1.008, "step": 8149 }, { "epoch": 1.4973053335811186, "grad_norm": 0.8765578866004944, "learning_rate": 2.7887028244764924e-05, "loss": 0.9222, "step": 8150 }, { "epoch": 1.49749117264449, "grad_norm": 0.6291196942329407, "learning_rate": 2.7866769780024927e-05, "loss": 1.0059, "step": 8151 }, { "epoch": 1.497677011707861, "grad_norm": 0.8652521371841431, "learning_rate": 2.7846517484995993e-05, "loss": 1.0825, "step": 8152 }, { "epoch": 1.4978628507712322, "grad_norm": 0.7405718564987183, "learning_rate": 2.7826271361410384e-05, "loss": 0.9718, "step": 8153 }, { "epoch": 1.4980486898346033, "grad_norm": 0.6694837808609009, "learning_rate": 2.7806031410999745e-05, "loss": 0.9055, "step": 8154 }, { "epoch": 1.4982345288979744, "grad_norm": 0.7980131506919861, "learning_rate": 2.778579763549527e-05, "loss": 1.2031, "step": 8155 }, { "epoch": 1.4984203679613455, "grad_norm": 0.6876955032348633, "learning_rate": 2.7765570036627597e-05, "loss": 0.7122, "step": 8156 }, { "epoch": 1.4986062070247166, "grad_norm": 0.8370168805122375, "learning_rate": 2.7745348616126844e-05, "loss": 0.9301, "step": 8157 }, { "epoch": 1.4987920460880877, "grad_norm": 0.6756365895271301, "learning_rate": 2.7725133375722613e-05, "loss": 1.1112, "step": 8158 }, { "epoch": 1.4989778851514588, "grad_norm": 0.6710642576217651, "learning_rate": 2.77049243171439e-05, "loss": 0.8111, "step": 8159 }, { "epoch": 1.49916372421483, "grad_norm": 0.7266205549240112, "learning_rate": 2.76847214421193e-05, "loss": 1.0749, "step": 8160 }, { "epoch": 1.499349563278201, "grad_norm": 0.6995110511779785, "learning_rate": 2.766452475237672e-05, "loss": 1.0062, "step": 8161 }, { "epoch": 1.4995354023415721, "grad_norm": 0.6293860673904419, "learning_rate": 2.7644334249643688e-05, "loss": 0.9042, "step": 8162 }, { "epoch": 1.4997212414049432, "grad_norm": 0.7614185810089111, "learning_rate": 2.762414993564716e-05, "loss": 0.8192, "step": 8163 }, { "epoch": 1.4999070804683146, "grad_norm": 0.7335909008979797, "learning_rate": 2.760397181211348e-05, "loss": 1.0214, "step": 8164 }, { "epoch": 1.5000929195316854, "grad_norm": 0.5928364992141724, "learning_rate": 2.758379988076859e-05, "loss": 0.7934, "step": 8165 }, { "epoch": 1.5002787585950568, "grad_norm": 0.8057161569595337, "learning_rate": 2.756363414333778e-05, "loss": 0.951, "step": 8166 }, { "epoch": 1.5004645976584277, "grad_norm": 1.572473406791687, "learning_rate": 2.7543474601545927e-05, "loss": 1.5114, "step": 8167 }, { "epoch": 1.500650436721799, "grad_norm": 0.5864894390106201, "learning_rate": 2.7523321257117253e-05, "loss": 0.7276, "step": 8168 }, { "epoch": 1.50083627578517, "grad_norm": 0.6349899172782898, "learning_rate": 2.7503174111775566e-05, "loss": 0.9229, "step": 8169 }, { "epoch": 1.5010221148485412, "grad_norm": 0.723136842250824, "learning_rate": 2.7483033167244065e-05, "loss": 1.0168, "step": 8170 }, { "epoch": 1.5012079539119123, "grad_norm": 0.7174968719482422, "learning_rate": 2.7462898425245487e-05, "loss": 1.0505, "step": 8171 }, { "epoch": 1.5013937929752834, "grad_norm": 0.7736288905143738, "learning_rate": 2.7442769887501996e-05, "loss": 1.0762, "step": 8172 }, { "epoch": 1.5015796320386545, "grad_norm": 0.6830939054489136, "learning_rate": 2.7422647555735192e-05, "loss": 0.8832, "step": 8173 }, { "epoch": 1.5017654711020256, "grad_norm": 0.6052866578102112, "learning_rate": 2.7402531431666234e-05, "loss": 0.8339, "step": 8174 }, { "epoch": 1.5019513101653967, "grad_norm": 0.8002974987030029, "learning_rate": 2.7382421517015634e-05, "loss": 0.9862, "step": 8175 }, { "epoch": 1.5021371492287678, "grad_norm": 0.6755656003952026, "learning_rate": 2.7362317813503467e-05, "loss": 0.7707, "step": 8176 }, { "epoch": 1.5023229882921392, "grad_norm": 0.7527087330818176, "learning_rate": 2.7342220322849276e-05, "loss": 1.0019, "step": 8177 }, { "epoch": 1.50250882735551, "grad_norm": 0.7766450643539429, "learning_rate": 2.7322129046771994e-05, "loss": 0.8555, "step": 8178 }, { "epoch": 1.5026946664188814, "grad_norm": 0.6567835211753845, "learning_rate": 2.730204398699011e-05, "loss": 1.0564, "step": 8179 }, { "epoch": 1.5028805054822523, "grad_norm": 0.7293635010719299, "learning_rate": 2.72819651452215e-05, "loss": 0.9468, "step": 8180 }, { "epoch": 1.5030663445456236, "grad_norm": 0.7716689109802246, "learning_rate": 2.7261892523183607e-05, "loss": 0.9835, "step": 8181 }, { "epoch": 1.5032521836089945, "grad_norm": 0.6577703356742859, "learning_rate": 2.724182612259323e-05, "loss": 0.9563, "step": 8182 }, { "epoch": 1.5034380226723658, "grad_norm": 0.6561897993087769, "learning_rate": 2.72217659451667e-05, "loss": 1.1897, "step": 8183 }, { "epoch": 1.5036238617357367, "grad_norm": 0.728302538394928, "learning_rate": 2.720171199261987e-05, "loss": 0.8003, "step": 8184 }, { "epoch": 1.503809700799108, "grad_norm": 0.7979081273078918, "learning_rate": 2.7181664266667906e-05, "loss": 1.0519, "step": 8185 }, { "epoch": 1.5039955398624791, "grad_norm": 0.7118033170700073, "learning_rate": 2.7161622769025585e-05, "loss": 0.9791, "step": 8186 }, { "epoch": 1.5041813789258502, "grad_norm": 0.7213948965072632, "learning_rate": 2.7141587501407095e-05, "loss": 0.9177, "step": 8187 }, { "epoch": 1.5043672179892214, "grad_norm": 0.8359013199806213, "learning_rate": 2.7121558465526133e-05, "loss": 1.0279, "step": 8188 }, { "epoch": 1.5045530570525925, "grad_norm": 0.6666085720062256, "learning_rate": 2.7101535663095735e-05, "loss": 1.1037, "step": 8189 }, { "epoch": 1.5047388961159636, "grad_norm": 0.7200797200202942, "learning_rate": 2.7081519095828568e-05, "loss": 0.9053, "step": 8190 }, { "epoch": 1.5049247351793347, "grad_norm": 0.6248354911804199, "learning_rate": 2.706150876543668e-05, "loss": 1.0788, "step": 8191 }, { "epoch": 1.5051105742427058, "grad_norm": 0.6507927775382996, "learning_rate": 2.7041504673631567e-05, "loss": 0.8718, "step": 8192 }, { "epoch": 1.5052964133060769, "grad_norm": 0.7430590391159058, "learning_rate": 2.7021506822124275e-05, "loss": 0.8365, "step": 8193 }, { "epoch": 1.5054822523694482, "grad_norm": 0.7140336632728577, "learning_rate": 2.7001515212625205e-05, "loss": 0.9323, "step": 8194 }, { "epoch": 1.505668091432819, "grad_norm": 0.8088352084159851, "learning_rate": 2.6981529846844332e-05, "loss": 1.1455, "step": 8195 }, { "epoch": 1.5058539304961904, "grad_norm": 0.8508310914039612, "learning_rate": 2.6961550726491004e-05, "loss": 1.084, "step": 8196 }, { "epoch": 1.5060397695595613, "grad_norm": 0.6638537645339966, "learning_rate": 2.6941577853274093e-05, "loss": 1.0291, "step": 8197 }, { "epoch": 1.5062256086229326, "grad_norm": 0.597263753414154, "learning_rate": 2.6921611228901957e-05, "loss": 0.9016, "step": 8198 }, { "epoch": 1.5064114476863035, "grad_norm": 0.6490717530250549, "learning_rate": 2.690165085508233e-05, "loss": 0.8965, "step": 8199 }, { "epoch": 1.5065972867496749, "grad_norm": 0.6764369010925293, "learning_rate": 2.688169673352249e-05, "loss": 0.8949, "step": 8200 }, { "epoch": 1.5067831258130457, "grad_norm": 0.746873140335083, "learning_rate": 2.686174886592915e-05, "loss": 0.9743, "step": 8201 }, { "epoch": 1.506968964876417, "grad_norm": 0.5665560960769653, "learning_rate": 2.6841807254008545e-05, "loss": 0.6897, "step": 8202 }, { "epoch": 1.5071548039397882, "grad_norm": 0.7615552544593811, "learning_rate": 2.6821871899466234e-05, "loss": 0.9747, "step": 8203 }, { "epoch": 1.5073406430031593, "grad_norm": 0.850107729434967, "learning_rate": 2.6801942804007397e-05, "loss": 0.7636, "step": 8204 }, { "epoch": 1.5075264820665304, "grad_norm": 0.6412014365196228, "learning_rate": 2.6782019969336615e-05, "loss": 0.843, "step": 8205 }, { "epoch": 1.5077123211299015, "grad_norm": 0.7340335249900818, "learning_rate": 2.676210339715788e-05, "loss": 0.9309, "step": 8206 }, { "epoch": 1.5078981601932726, "grad_norm": 0.6725056767463684, "learning_rate": 2.6742193089174762e-05, "loss": 0.7102, "step": 8207 }, { "epoch": 1.5080839992566437, "grad_norm": 0.6415348649024963, "learning_rate": 2.6722289047090166e-05, "loss": 0.9333, "step": 8208 }, { "epoch": 1.5082698383200148, "grad_norm": 0.8024607300758362, "learning_rate": 2.6702391272606596e-05, "loss": 0.9718, "step": 8209 }, { "epoch": 1.508455677383386, "grad_norm": 0.6357980370521545, "learning_rate": 2.6682499767425885e-05, "loss": 0.919, "step": 8210 }, { "epoch": 1.5086415164467573, "grad_norm": 0.7079041004180908, "learning_rate": 2.666261453324943e-05, "loss": 0.9, "step": 8211 }, { "epoch": 1.5088273555101281, "grad_norm": 0.8778452277183533, "learning_rate": 2.664273557177809e-05, "loss": 1.0977, "step": 8212 }, { "epoch": 1.5090131945734995, "grad_norm": 0.5249161124229431, "learning_rate": 2.6622862884712085e-05, "loss": 0.7411, "step": 8213 }, { "epoch": 1.5091990336368704, "grad_norm": 0.670387327671051, "learning_rate": 2.6602996473751208e-05, "loss": 0.9845, "step": 8214 }, { "epoch": 1.5093848727002417, "grad_norm": 0.7721664309501648, "learning_rate": 2.6583136340594674e-05, "loss": 0.9434, "step": 8215 }, { "epoch": 1.5095707117636126, "grad_norm": 0.7793735861778259, "learning_rate": 2.6563282486941197e-05, "loss": 0.8901, "step": 8216 }, { "epoch": 1.509756550826984, "grad_norm": 0.6147542595863342, "learning_rate": 2.6543434914488852e-05, "loss": 0.8793, "step": 8217 }, { "epoch": 1.509942389890355, "grad_norm": 0.686143159866333, "learning_rate": 2.6523593624935283e-05, "loss": 0.9777, "step": 8218 }, { "epoch": 1.5101282289537261, "grad_norm": 0.6980476379394531, "learning_rate": 2.6503758619977594e-05, "loss": 0.9811, "step": 8219 }, { "epoch": 1.5103140680170972, "grad_norm": 0.6905124187469482, "learning_rate": 2.6483929901312233e-05, "loss": 0.8508, "step": 8220 }, { "epoch": 1.5104999070804683, "grad_norm": 0.6223216652870178, "learning_rate": 2.646410747063527e-05, "loss": 1.1032, "step": 8221 }, { "epoch": 1.5106857461438394, "grad_norm": 0.7925272583961487, "learning_rate": 2.644429132964209e-05, "loss": 1.1465, "step": 8222 }, { "epoch": 1.5108715852072105, "grad_norm": 0.6701057553291321, "learning_rate": 2.6424481480027695e-05, "loss": 0.9851, "step": 8223 }, { "epoch": 1.5110574242705817, "grad_norm": 0.7091572284698486, "learning_rate": 2.6404677923486377e-05, "loss": 0.9173, "step": 8224 }, { "epoch": 1.5112432633339528, "grad_norm": 0.7232625484466553, "learning_rate": 2.638488066171201e-05, "loss": 1.1431, "step": 8225 }, { "epoch": 1.5114291023973239, "grad_norm": 0.6791695356369019, "learning_rate": 2.636508969639794e-05, "loss": 0.9333, "step": 8226 }, { "epoch": 1.511614941460695, "grad_norm": 0.7125614881515503, "learning_rate": 2.6345305029236867e-05, "loss": 0.8559, "step": 8227 }, { "epoch": 1.5118007805240663, "grad_norm": 0.6903975605964661, "learning_rate": 2.632552666192104e-05, "loss": 0.8817, "step": 8228 }, { "epoch": 1.5119866195874372, "grad_norm": 0.8414456844329834, "learning_rate": 2.6305754596142173e-05, "loss": 1.0211, "step": 8229 }, { "epoch": 1.5121724586508085, "grad_norm": 0.6724753975868225, "learning_rate": 2.6285988833591367e-05, "loss": 0.9309, "step": 8230 }, { "epoch": 1.5123582977141794, "grad_norm": 0.6871885061264038, "learning_rate": 2.6266229375959252e-05, "loss": 0.884, "step": 8231 }, { "epoch": 1.5125441367775507, "grad_norm": 0.7753848433494568, "learning_rate": 2.6246476224935902e-05, "loss": 0.9779, "step": 8232 }, { "epoch": 1.5127299758409216, "grad_norm": 0.8011336922645569, "learning_rate": 2.6226729382210868e-05, "loss": 1.0863, "step": 8233 }, { "epoch": 1.512915814904293, "grad_norm": 0.74050372838974, "learning_rate": 2.6206988849473092e-05, "loss": 0.9114, "step": 8234 }, { "epoch": 1.513101653967664, "grad_norm": 1.0021766424179077, "learning_rate": 2.6187254628411064e-05, "loss": 1.1928, "step": 8235 }, { "epoch": 1.5132874930310352, "grad_norm": 0.706662654876709, "learning_rate": 2.6167526720712655e-05, "loss": 1.0503, "step": 8236 }, { "epoch": 1.5134733320944063, "grad_norm": 0.7461012005805969, "learning_rate": 2.6147805128065294e-05, "loss": 1.2067, "step": 8237 }, { "epoch": 1.5136591711577774, "grad_norm": 0.6484167575836182, "learning_rate": 2.6128089852155736e-05, "loss": 0.9687, "step": 8238 }, { "epoch": 1.5138450102211485, "grad_norm": 0.7335364818572998, "learning_rate": 2.6108380894670315e-05, "loss": 0.9708, "step": 8239 }, { "epoch": 1.5140308492845196, "grad_norm": 0.8530272841453552, "learning_rate": 2.608867825729481e-05, "loss": 0.9567, "step": 8240 }, { "epoch": 1.5142166883478907, "grad_norm": 0.6858327984809875, "learning_rate": 2.6068981941714367e-05, "loss": 0.9842, "step": 8241 }, { "epoch": 1.5144025274112618, "grad_norm": 0.6089559197425842, "learning_rate": 2.604929194961371e-05, "loss": 0.9545, "step": 8242 }, { "epoch": 1.5145883664746331, "grad_norm": 0.6408554911613464, "learning_rate": 2.6029608282676908e-05, "loss": 0.7563, "step": 8243 }, { "epoch": 1.514774205538004, "grad_norm": 0.6375374794006348, "learning_rate": 2.600993094258759e-05, "loss": 0.9235, "step": 8244 }, { "epoch": 1.5149600446013753, "grad_norm": 0.6979017853736877, "learning_rate": 2.5990259931028792e-05, "loss": 0.9803, "step": 8245 }, { "epoch": 1.5151458836647462, "grad_norm": 1.0037568807601929, "learning_rate": 2.5970595249683017e-05, "loss": 1.0065, "step": 8246 }, { "epoch": 1.5153317227281176, "grad_norm": 0.764148473739624, "learning_rate": 2.595093690023225e-05, "loss": 1.068, "step": 8247 }, { "epoch": 1.5155175617914884, "grad_norm": 0.7334862351417542, "learning_rate": 2.5931284884357864e-05, "loss": 1.2625, "step": 8248 }, { "epoch": 1.5157034008548598, "grad_norm": 0.734988272190094, "learning_rate": 2.59116392037408e-05, "loss": 0.9587, "step": 8249 }, { "epoch": 1.5158892399182307, "grad_norm": 0.6814414858818054, "learning_rate": 2.5891999860061333e-05, "loss": 0.9823, "step": 8250 }, { "epoch": 1.516075078981602, "grad_norm": 0.7905431985855103, "learning_rate": 2.587236685499931e-05, "loss": 0.8889, "step": 8251 }, { "epoch": 1.516260918044973, "grad_norm": 0.8326538801193237, "learning_rate": 2.5852740190233937e-05, "loss": 0.8816, "step": 8252 }, { "epoch": 1.5164467571083442, "grad_norm": 0.787573516368866, "learning_rate": 2.5833119867443945e-05, "loss": 0.8138, "step": 8253 }, { "epoch": 1.5166325961717153, "grad_norm": 0.820881187915802, "learning_rate": 2.581350588830753e-05, "loss": 0.792, "step": 8254 }, { "epoch": 1.5168184352350864, "grad_norm": 0.6634076833724976, "learning_rate": 2.579389825450228e-05, "loss": 0.9024, "step": 8255 }, { "epoch": 1.5170042742984575, "grad_norm": 0.660010576248169, "learning_rate": 2.5774296967705314e-05, "loss": 1.0527, "step": 8256 }, { "epoch": 1.5171901133618286, "grad_norm": 0.8844035267829895, "learning_rate": 2.5754702029593113e-05, "loss": 1.1888, "step": 8257 }, { "epoch": 1.5173759524251997, "grad_norm": 0.6765933036804199, "learning_rate": 2.573511344184173e-05, "loss": 1.0193, "step": 8258 }, { "epoch": 1.5175617914885708, "grad_norm": 0.725508451461792, "learning_rate": 2.5715531206126585e-05, "loss": 0.9188, "step": 8259 }, { "epoch": 1.5177476305519422, "grad_norm": 0.6848968267440796, "learning_rate": 2.5695955324122622e-05, "loss": 1.0983, "step": 8260 }, { "epoch": 1.517933469615313, "grad_norm": 0.8235938549041748, "learning_rate": 2.567638579750421e-05, "loss": 0.8204, "step": 8261 }, { "epoch": 1.5181193086786844, "grad_norm": 0.6256136894226074, "learning_rate": 2.5656822627945133e-05, "loss": 0.9667, "step": 8262 }, { "epoch": 1.5183051477420553, "grad_norm": 0.7181174755096436, "learning_rate": 2.5637265817118715e-05, "loss": 0.9107, "step": 8263 }, { "epoch": 1.5184909868054266, "grad_norm": 0.6005790829658508, "learning_rate": 2.5617715366697647e-05, "loss": 0.8373, "step": 8264 }, { "epoch": 1.5186768258687975, "grad_norm": 0.7009453773498535, "learning_rate": 2.559817127835418e-05, "loss": 0.9135, "step": 8265 }, { "epoch": 1.5188626649321688, "grad_norm": 0.7299928665161133, "learning_rate": 2.5578633553759878e-05, "loss": 1.1003, "step": 8266 }, { "epoch": 1.5190485039955397, "grad_norm": 0.7495889663696289, "learning_rate": 2.5559102194585917e-05, "loss": 1.2302, "step": 8267 }, { "epoch": 1.519234343058911, "grad_norm": 0.7094706296920776, "learning_rate": 2.5539577202502852e-05, "loss": 0.8065, "step": 8268 }, { "epoch": 1.5194201821222821, "grad_norm": 0.651636004447937, "learning_rate": 2.552005857918065e-05, "loss": 0.8975, "step": 8269 }, { "epoch": 1.5196060211856532, "grad_norm": 0.7161694765090942, "learning_rate": 2.550054632628883e-05, "loss": 1.0802, "step": 8270 }, { "epoch": 1.5197918602490244, "grad_norm": 0.6569831371307373, "learning_rate": 2.5481040445496286e-05, "loss": 1.0261, "step": 8271 }, { "epoch": 1.5199776993123955, "grad_norm": 0.7755568623542786, "learning_rate": 2.5461540938471397e-05, "loss": 1.0336, "step": 8272 }, { "epoch": 1.5201635383757666, "grad_norm": 0.6399084329605103, "learning_rate": 2.5442047806882007e-05, "loss": 0.9518, "step": 8273 }, { "epoch": 1.5203493774391377, "grad_norm": 0.7283554077148438, "learning_rate": 2.5422561052395455e-05, "loss": 0.9542, "step": 8274 }, { "epoch": 1.5205352165025088, "grad_norm": 0.7275633811950684, "learning_rate": 2.5403080676678392e-05, "loss": 0.982, "step": 8275 }, { "epoch": 1.5207210555658799, "grad_norm": 0.6447848677635193, "learning_rate": 2.5383606681397076e-05, "loss": 0.701, "step": 8276 }, { "epoch": 1.5209068946292512, "grad_norm": 0.801145613193512, "learning_rate": 2.5364139068217174e-05, "loss": 0.891, "step": 8277 }, { "epoch": 1.521092733692622, "grad_norm": 0.8931204676628113, "learning_rate": 2.5344677838803733e-05, "loss": 1.1393, "step": 8278 }, { "epoch": 1.5212785727559934, "grad_norm": 0.721251368522644, "learning_rate": 2.5325222994821385e-05, "loss": 0.9685, "step": 8279 }, { "epoch": 1.5214644118193643, "grad_norm": 0.7849665284156799, "learning_rate": 2.5305774537934067e-05, "loss": 1.1303, "step": 8280 }, { "epoch": 1.5216502508827356, "grad_norm": 0.6657546758651733, "learning_rate": 2.5286332469805296e-05, "loss": 0.8404, "step": 8281 }, { "epoch": 1.5218360899461065, "grad_norm": 0.6446507573127747, "learning_rate": 2.5266896792098015e-05, "loss": 0.8706, "step": 8282 }, { "epoch": 1.5220219290094779, "grad_norm": 0.6058208346366882, "learning_rate": 2.5247467506474543e-05, "loss": 1.0097, "step": 8283 }, { "epoch": 1.522207768072849, "grad_norm": 0.6460893154144287, "learning_rate": 2.522804461459677e-05, "loss": 0.8407, "step": 8284 }, { "epoch": 1.52239360713622, "grad_norm": 0.7795756459236145, "learning_rate": 2.520862811812591e-05, "loss": 0.8684, "step": 8285 }, { "epoch": 1.5225794461995912, "grad_norm": 0.6236745715141296, "learning_rate": 2.518921801872276e-05, "loss": 0.739, "step": 8286 }, { "epoch": 1.5227652852629623, "grad_norm": 0.698995053768158, "learning_rate": 2.5169814318047447e-05, "loss": 1.0715, "step": 8287 }, { "epoch": 1.5229511243263334, "grad_norm": 0.6841782331466675, "learning_rate": 2.5150417017759654e-05, "loss": 0.9162, "step": 8288 }, { "epoch": 1.5231369633897045, "grad_norm": 0.7248424291610718, "learning_rate": 2.513102611951845e-05, "loss": 0.9685, "step": 8289 }, { "epoch": 1.5233228024530756, "grad_norm": 0.6345195174217224, "learning_rate": 2.5111641624982417e-05, "loss": 0.9229, "step": 8290 }, { "epoch": 1.5235086415164467, "grad_norm": 0.6780673861503601, "learning_rate": 2.5092263535809545e-05, "loss": 1.0447, "step": 8291 }, { "epoch": 1.5236944805798178, "grad_norm": 0.7004578113555908, "learning_rate": 2.5072891853657243e-05, "loss": 0.9548, "step": 8292 }, { "epoch": 1.523880319643189, "grad_norm": 0.7394935488700867, "learning_rate": 2.5053526580182472e-05, "loss": 0.889, "step": 8293 }, { "epoch": 1.5240661587065603, "grad_norm": 0.8624938130378723, "learning_rate": 2.5034167717041523e-05, "loss": 1.1627, "step": 8294 }, { "epoch": 1.5242519977699311, "grad_norm": 0.7264074683189392, "learning_rate": 2.5014815265890236e-05, "loss": 1.0638, "step": 8295 }, { "epoch": 1.5244378368333025, "grad_norm": 0.8741801381111145, "learning_rate": 2.4995469228383884e-05, "loss": 0.8824, "step": 8296 }, { "epoch": 1.5246236758966734, "grad_norm": 0.8204243779182434, "learning_rate": 2.4976129606177122e-05, "loss": 0.894, "step": 8297 }, { "epoch": 1.5248095149600447, "grad_norm": 0.7651588320732117, "learning_rate": 2.4956796400924188e-05, "loss": 1.0573, "step": 8298 }, { "epoch": 1.5249953540234156, "grad_norm": 0.7579216361045837, "learning_rate": 2.493746961427861e-05, "loss": 1.1599, "step": 8299 }, { "epoch": 1.525181193086787, "grad_norm": 0.7527614235877991, "learning_rate": 2.4918149247893508e-05, "loss": 1.167, "step": 8300 }, { "epoch": 1.525367032150158, "grad_norm": 0.7581583261489868, "learning_rate": 2.4898835303421353e-05, "loss": 0.9118, "step": 8301 }, { "epoch": 1.5255528712135291, "grad_norm": 0.9576455354690552, "learning_rate": 2.4879527782514134e-05, "loss": 1.1397, "step": 8302 }, { "epoch": 1.5257387102769002, "grad_norm": 0.756462812423706, "learning_rate": 2.4860226686823253e-05, "loss": 1.0269, "step": 8303 }, { "epoch": 1.5259245493402713, "grad_norm": 0.935738205909729, "learning_rate": 2.4840932017999584e-05, "loss": 1.1624, "step": 8304 }, { "epoch": 1.5261103884036424, "grad_norm": 0.8938685655593872, "learning_rate": 2.4821643777693472e-05, "loss": 0.89, "step": 8305 }, { "epoch": 1.5262962274670135, "grad_norm": 0.7011599540710449, "learning_rate": 2.480236196755462e-05, "loss": 0.7417, "step": 8306 }, { "epoch": 1.5264820665303847, "grad_norm": 0.6875777244567871, "learning_rate": 2.4783086589232295e-05, "loss": 1.086, "step": 8307 }, { "epoch": 1.5266679055937558, "grad_norm": 0.7056373357772827, "learning_rate": 2.476381764437512e-05, "loss": 0.8894, "step": 8308 }, { "epoch": 1.526853744657127, "grad_norm": 0.7568773627281189, "learning_rate": 2.474455513463122e-05, "loss": 1.0199, "step": 8309 }, { "epoch": 1.527039583720498, "grad_norm": 0.7980220913887024, "learning_rate": 2.47252990616482e-05, "loss": 0.874, "step": 8310 }, { "epoch": 1.5272254227838693, "grad_norm": 0.7311957478523254, "learning_rate": 2.470604942707302e-05, "loss": 0.979, "step": 8311 }, { "epoch": 1.5274112618472402, "grad_norm": 1.1911845207214355, "learning_rate": 2.4686806232552183e-05, "loss": 0.8673, "step": 8312 }, { "epoch": 1.5275971009106115, "grad_norm": 0.748319685459137, "learning_rate": 2.4667569479731556e-05, "loss": 1.1138, "step": 8313 }, { "epoch": 1.5277829399739824, "grad_norm": 0.7561832666397095, "learning_rate": 2.4648339170256552e-05, "loss": 1.1001, "step": 8314 }, { "epoch": 1.5279687790373537, "grad_norm": 0.7505941987037659, "learning_rate": 2.4629115305771933e-05, "loss": 1.1796, "step": 8315 }, { "epoch": 1.5281546181007246, "grad_norm": 0.6446735858917236, "learning_rate": 2.4609897887921973e-05, "loss": 0.954, "step": 8316 }, { "epoch": 1.528340457164096, "grad_norm": 0.6511173248291016, "learning_rate": 2.459068691835039e-05, "loss": 0.8117, "step": 8317 }, { "epoch": 1.528526296227467, "grad_norm": 0.7788544297218323, "learning_rate": 2.457148239870033e-05, "loss": 0.9103, "step": 8318 }, { "epoch": 1.5287121352908382, "grad_norm": 0.7949376702308655, "learning_rate": 2.455228433061444e-05, "loss": 1.0756, "step": 8319 }, { "epoch": 1.5288979743542093, "grad_norm": 0.7452298402786255, "learning_rate": 2.4533092715734708e-05, "loss": 0.8614, "step": 8320 }, { "epoch": 1.5290838134175804, "grad_norm": 0.6368029713630676, "learning_rate": 2.4513907555702675e-05, "loss": 0.7188, "step": 8321 }, { "epoch": 1.5292696524809515, "grad_norm": 0.8253371119499207, "learning_rate": 2.4494728852159266e-05, "loss": 0.829, "step": 8322 }, { "epoch": 1.5294554915443226, "grad_norm": 0.7835356593132019, "learning_rate": 2.4475556606744875e-05, "loss": 0.9303, "step": 8323 }, { "epoch": 1.5296413306076937, "grad_norm": 0.7281311750411987, "learning_rate": 2.4456390821099385e-05, "loss": 1.0951, "step": 8324 }, { "epoch": 1.5298271696710648, "grad_norm": 0.6802740097045898, "learning_rate": 2.4437231496862034e-05, "loss": 0.9621, "step": 8325 }, { "epoch": 1.5300130087344361, "grad_norm": 0.6554965376853943, "learning_rate": 2.4418078635671616e-05, "loss": 1.1749, "step": 8326 }, { "epoch": 1.530198847797807, "grad_norm": 0.7031880021095276, "learning_rate": 2.439893223916626e-05, "loss": 1.1384, "step": 8327 }, { "epoch": 1.5303846868611783, "grad_norm": 0.682519257068634, "learning_rate": 2.437979230898365e-05, "loss": 0.7771, "step": 8328 }, { "epoch": 1.5305705259245492, "grad_norm": 0.6459039449691772, "learning_rate": 2.436065884676081e-05, "loss": 1.0385, "step": 8329 }, { "epoch": 1.5307563649879206, "grad_norm": 0.777291476726532, "learning_rate": 2.4341531854134304e-05, "loss": 1.21, "step": 8330 }, { "epoch": 1.5309422040512914, "grad_norm": 0.7058006525039673, "learning_rate": 2.4322411332740115e-05, "loss": 0.8476, "step": 8331 }, { "epoch": 1.5311280431146628, "grad_norm": 0.689065158367157, "learning_rate": 2.430329728421362e-05, "loss": 0.6523, "step": 8332 }, { "epoch": 1.5313138821780337, "grad_norm": 0.6234003305435181, "learning_rate": 2.428418971018972e-05, "loss": 1.0589, "step": 8333 }, { "epoch": 1.531499721241405, "grad_norm": 0.6710896492004395, "learning_rate": 2.4265088612302712e-05, "loss": 0.9926, "step": 8334 }, { "epoch": 1.531685560304776, "grad_norm": 0.698925793170929, "learning_rate": 2.424599399218639e-05, "loss": 1.093, "step": 8335 }, { "epoch": 1.5318713993681472, "grad_norm": 0.6863391399383545, "learning_rate": 2.422690585147389e-05, "loss": 0.7161, "step": 8336 }, { "epoch": 1.5320572384315183, "grad_norm": 0.7835180759429932, "learning_rate": 2.4207824191797913e-05, "loss": 0.895, "step": 8337 }, { "epoch": 1.5322430774948894, "grad_norm": 0.8524803519248962, "learning_rate": 2.4188749014790556e-05, "loss": 0.9524, "step": 8338 }, { "epoch": 1.5324289165582605, "grad_norm": 0.6551930904388428, "learning_rate": 2.416968032208332e-05, "loss": 0.9428, "step": 8339 }, { "epoch": 1.5326147556216316, "grad_norm": 0.7103485465049744, "learning_rate": 2.4150618115307254e-05, "loss": 1.0382, "step": 8340 }, { "epoch": 1.5328005946850027, "grad_norm": 0.6723774075508118, "learning_rate": 2.4131562396092722e-05, "loss": 0.9524, "step": 8341 }, { "epoch": 1.5329864337483738, "grad_norm": 0.6750916242599487, "learning_rate": 2.411251316606966e-05, "loss": 0.8356, "step": 8342 }, { "epoch": 1.5331722728117452, "grad_norm": 0.7004226446151733, "learning_rate": 2.4093470426867326e-05, "loss": 0.9496, "step": 8343 }, { "epoch": 1.533358111875116, "grad_norm": 0.6192967295646667, "learning_rate": 2.4074434180114523e-05, "loss": 0.7493, "step": 8344 }, { "epoch": 1.5335439509384874, "grad_norm": 0.8744343519210815, "learning_rate": 2.4055404427439498e-05, "loss": 1.0087, "step": 8345 }, { "epoch": 1.5337297900018583, "grad_norm": 0.6814777851104736, "learning_rate": 2.4036381170469837e-05, "loss": 1.0026, "step": 8346 }, { "epoch": 1.5339156290652296, "grad_norm": 0.8172118067741394, "learning_rate": 2.4017364410832677e-05, "loss": 0.7954, "step": 8347 }, { "epoch": 1.5341014681286005, "grad_norm": 0.733949601650238, "learning_rate": 2.3998354150154555e-05, "loss": 1.0086, "step": 8348 }, { "epoch": 1.5342873071919718, "grad_norm": 0.6402868628501892, "learning_rate": 2.3979350390061494e-05, "loss": 0.6177, "step": 8349 }, { "epoch": 1.534473146255343, "grad_norm": 0.671047031879425, "learning_rate": 2.396035313217887e-05, "loss": 0.7657, "step": 8350 }, { "epoch": 1.534658985318714, "grad_norm": 0.8322077989578247, "learning_rate": 2.394136237813158e-05, "loss": 0.9858, "step": 8351 }, { "epoch": 1.5348448243820851, "grad_norm": 0.6607497930526733, "learning_rate": 2.3922378129543975e-05, "loss": 0.9245, "step": 8352 }, { "epoch": 1.5350306634454562, "grad_norm": 0.6483280062675476, "learning_rate": 2.3903400388039775e-05, "loss": 0.8515, "step": 8353 }, { "epoch": 1.5352165025088274, "grad_norm": 0.7380024790763855, "learning_rate": 2.3884429155242227e-05, "loss": 0.8586, "step": 8354 }, { "epoch": 1.5354023415721985, "grad_norm": 0.6636247634887695, "learning_rate": 2.3865464432773943e-05, "loss": 0.7131, "step": 8355 }, { "epoch": 1.5355881806355696, "grad_norm": 0.6486872434616089, "learning_rate": 2.384650622225706e-05, "loss": 0.8961, "step": 8356 }, { "epoch": 1.5357740196989407, "grad_norm": 0.7080100178718567, "learning_rate": 2.3827554525313055e-05, "loss": 1.1486, "step": 8357 }, { "epoch": 1.535959858762312, "grad_norm": 0.7490073442459106, "learning_rate": 2.380860934356294e-05, "loss": 0.8438, "step": 8358 }, { "epoch": 1.5361456978256829, "grad_norm": 0.6263805627822876, "learning_rate": 2.3789670678627173e-05, "loss": 1.0868, "step": 8359 }, { "epoch": 1.5363315368890542, "grad_norm": 0.6247264742851257, "learning_rate": 2.377073853212556e-05, "loss": 0.7488, "step": 8360 }, { "epoch": 1.536517375952425, "grad_norm": 0.805173397064209, "learning_rate": 2.3751812905677427e-05, "loss": 1.1677, "step": 8361 }, { "epoch": 1.5367032150157964, "grad_norm": 0.6882678270339966, "learning_rate": 2.3732893800901522e-05, "loss": 0.9059, "step": 8362 }, { "epoch": 1.5368890540791673, "grad_norm": 0.6723858714103699, "learning_rate": 2.3713981219416094e-05, "loss": 0.8269, "step": 8363 }, { "epoch": 1.5370748931425386, "grad_norm": 0.7437129020690918, "learning_rate": 2.3695075162838687e-05, "loss": 1.0252, "step": 8364 }, { "epoch": 1.5372607322059095, "grad_norm": 0.6647806763648987, "learning_rate": 2.3676175632786423e-05, "loss": 0.9327, "step": 8365 }, { "epoch": 1.5374465712692809, "grad_norm": 0.8653480410575867, "learning_rate": 2.3657282630875833e-05, "loss": 1.3636, "step": 8366 }, { "epoch": 1.537632410332652, "grad_norm": 0.7383580207824707, "learning_rate": 2.363839615872284e-05, "loss": 0.7505, "step": 8367 }, { "epoch": 1.537818249396023, "grad_norm": 0.810131847858429, "learning_rate": 2.3619516217942882e-05, "loss": 1.0403, "step": 8368 }, { "epoch": 1.5380040884593942, "grad_norm": 0.8347377181053162, "learning_rate": 2.3600642810150765e-05, "loss": 1.0137, "step": 8369 }, { "epoch": 1.5381899275227653, "grad_norm": 0.7918209433555603, "learning_rate": 2.3581775936960816e-05, "loss": 0.8376, "step": 8370 }, { "epoch": 1.5383757665861364, "grad_norm": 0.706166684627533, "learning_rate": 2.356291559998669e-05, "loss": 1.0973, "step": 8371 }, { "epoch": 1.5385616056495075, "grad_norm": 0.6235726475715637, "learning_rate": 2.3544061800841612e-05, "loss": 0.931, "step": 8372 }, { "epoch": 1.5387474447128786, "grad_norm": 0.6441529393196106, "learning_rate": 2.3525214541138184e-05, "loss": 0.9873, "step": 8373 }, { "epoch": 1.5389332837762497, "grad_norm": 0.6860111355781555, "learning_rate": 2.3506373822488425e-05, "loss": 0.7699, "step": 8374 }, { "epoch": 1.539119122839621, "grad_norm": 0.7331928014755249, "learning_rate": 2.348753964650383e-05, "loss": 0.8729, "step": 8375 }, { "epoch": 1.539304961902992, "grad_norm": 0.6296803951263428, "learning_rate": 2.346871201479537e-05, "loss": 0.8982, "step": 8376 }, { "epoch": 1.5394908009663633, "grad_norm": 0.7057747840881348, "learning_rate": 2.344989092897334e-05, "loss": 0.938, "step": 8377 }, { "epoch": 1.5396766400297341, "grad_norm": 0.7245149612426758, "learning_rate": 2.3431076390647588e-05, "loss": 0.878, "step": 8378 }, { "epoch": 1.5398624790931055, "grad_norm": 0.684192955493927, "learning_rate": 2.341226840142736e-05, "loss": 1.0114, "step": 8379 }, { "epoch": 1.5400483181564764, "grad_norm": 0.7038612961769104, "learning_rate": 2.3393466962921385e-05, "loss": 1.0632, "step": 8380 }, { "epoch": 1.5402341572198477, "grad_norm": 0.7834692001342773, "learning_rate": 2.3374672076737704e-05, "loss": 0.9348, "step": 8381 }, { "epoch": 1.5404199962832186, "grad_norm": 0.712929904460907, "learning_rate": 2.3355883744483964e-05, "loss": 0.7863, "step": 8382 }, { "epoch": 1.54060583534659, "grad_norm": 0.9249239563941956, "learning_rate": 2.3337101967767106e-05, "loss": 1.0305, "step": 8383 }, { "epoch": 1.540791674409961, "grad_norm": 0.6518446803092957, "learning_rate": 2.331832674819363e-05, "loss": 0.8175, "step": 8384 }, { "epoch": 1.5409775134733321, "grad_norm": 0.7660650014877319, "learning_rate": 2.3299558087369376e-05, "loss": 0.8638, "step": 8385 }, { "epoch": 1.5411633525367032, "grad_norm": 0.6161681413650513, "learning_rate": 2.3280795986899695e-05, "loss": 0.8675, "step": 8386 }, { "epoch": 1.5413491916000743, "grad_norm": 0.6976705193519592, "learning_rate": 2.3262040448389367e-05, "loss": 0.8803, "step": 8387 }, { "epoch": 1.5415350306634454, "grad_norm": 0.683444082736969, "learning_rate": 2.324329147344255e-05, "loss": 0.972, "step": 8388 }, { "epoch": 1.5417208697268165, "grad_norm": 0.7143980264663696, "learning_rate": 2.3224549063662927e-05, "loss": 1.1016, "step": 8389 }, { "epoch": 1.5419067087901877, "grad_norm": 0.7508431077003479, "learning_rate": 2.320581322065354e-05, "loss": 0.9667, "step": 8390 }, { "epoch": 1.5420925478535588, "grad_norm": 0.6859725713729858, "learning_rate": 2.3187083946016906e-05, "loss": 0.8693, "step": 8391 }, { "epoch": 1.54227838691693, "grad_norm": 0.6826413869857788, "learning_rate": 2.3168361241355008e-05, "loss": 0.89, "step": 8392 }, { "epoch": 1.542464225980301, "grad_norm": 0.6446660757064819, "learning_rate": 2.3149645108269224e-05, "loss": 0.9055, "step": 8393 }, { "epoch": 1.5426500650436723, "grad_norm": 0.842805027961731, "learning_rate": 2.313093554836041e-05, "loss": 0.9147, "step": 8394 }, { "epoch": 1.5428359041070432, "grad_norm": 0.7529627084732056, "learning_rate": 2.3112232563228798e-05, "loss": 1.0043, "step": 8395 }, { "epoch": 1.5430217431704145, "grad_norm": 0.6439473628997803, "learning_rate": 2.3093536154474137e-05, "loss": 0.7722, "step": 8396 }, { "epoch": 1.5432075822337854, "grad_norm": 0.6234472990036011, "learning_rate": 2.3074846323695522e-05, "loss": 1.039, "step": 8397 }, { "epoch": 1.5433934212971567, "grad_norm": 0.6632897257804871, "learning_rate": 2.3056163072491577e-05, "loss": 0.9647, "step": 8398 }, { "epoch": 1.5435792603605278, "grad_norm": 0.688230574131012, "learning_rate": 2.3037486402460274e-05, "loss": 0.9006, "step": 8399 }, { "epoch": 1.543765099423899, "grad_norm": 0.7446749210357666, "learning_rate": 2.3018816315199097e-05, "loss": 0.9976, "step": 8400 }, { "epoch": 1.54395093848727, "grad_norm": 0.6614901423454285, "learning_rate": 2.3000152812304975e-05, "loss": 0.94, "step": 8401 }, { "epoch": 1.5441367775506412, "grad_norm": 0.6514410376548767, "learning_rate": 2.298149589537416e-05, "loss": 0.8233, "step": 8402 }, { "epoch": 1.5443226166140123, "grad_norm": 0.8451980352401733, "learning_rate": 2.29628455660025e-05, "loss": 1.1003, "step": 8403 }, { "epoch": 1.5445084556773834, "grad_norm": 0.7540441155433655, "learning_rate": 2.2944201825785128e-05, "loss": 0.9058, "step": 8404 }, { "epoch": 1.5446942947407545, "grad_norm": 0.6781880259513855, "learning_rate": 2.2925564676316714e-05, "loss": 0.958, "step": 8405 }, { "epoch": 1.5448801338041256, "grad_norm": 0.7639658451080322, "learning_rate": 2.2906934119191324e-05, "loss": 0.9928, "step": 8406 }, { "epoch": 1.5450659728674967, "grad_norm": 0.7071417570114136, "learning_rate": 2.288831015600249e-05, "loss": 0.9922, "step": 8407 }, { "epoch": 1.5452518119308678, "grad_norm": 0.7705291509628296, "learning_rate": 2.2869692788343166e-05, "loss": 0.8055, "step": 8408 }, { "epoch": 1.5454376509942391, "grad_norm": 0.6227234601974487, "learning_rate": 2.2851082017805703e-05, "loss": 0.9943, "step": 8409 }, { "epoch": 1.54562349005761, "grad_norm": 0.787907063961029, "learning_rate": 2.2832477845981958e-05, "loss": 1.0025, "step": 8410 }, { "epoch": 1.5458093291209813, "grad_norm": 0.5764409303665161, "learning_rate": 2.2813880274463128e-05, "loss": 0.7052, "step": 8411 }, { "epoch": 1.5459951681843522, "grad_norm": 0.7593272924423218, "learning_rate": 2.2795289304839973e-05, "loss": 0.8737, "step": 8412 }, { "epoch": 1.5461810072477236, "grad_norm": 0.7019006013870239, "learning_rate": 2.2776704938702565e-05, "loss": 0.9578, "step": 8413 }, { "epoch": 1.5463668463110944, "grad_norm": 0.7341148853302002, "learning_rate": 2.2758127177640477e-05, "loss": 1.0765, "step": 8414 }, { "epoch": 1.5465526853744658, "grad_norm": 0.748848557472229, "learning_rate": 2.2739556023242738e-05, "loss": 0.8449, "step": 8415 }, { "epoch": 1.5467385244378369, "grad_norm": 0.6858508586883545, "learning_rate": 2.272099147709773e-05, "loss": 0.9085, "step": 8416 }, { "epoch": 1.546924363501208, "grad_norm": 0.6726716160774231, "learning_rate": 2.2702433540793377e-05, "loss": 0.9671, "step": 8417 }, { "epoch": 1.547110202564579, "grad_norm": 0.7550967335700989, "learning_rate": 2.2683882215916907e-05, "loss": 1.1665, "step": 8418 }, { "epoch": 1.5472960416279502, "grad_norm": 0.898966908454895, "learning_rate": 2.2665337504055096e-05, "loss": 1.0144, "step": 8419 }, { "epoch": 1.5474818806913213, "grad_norm": 0.779742956161499, "learning_rate": 2.2646799406794117e-05, "loss": 0.9829, "step": 8420 }, { "epoch": 1.5476677197546924, "grad_norm": 1.3177577257156372, "learning_rate": 2.2628267925719592e-05, "loss": 1.559, "step": 8421 }, { "epoch": 1.5478535588180635, "grad_norm": 0.7216274738311768, "learning_rate": 2.2609743062416498e-05, "loss": 0.8658, "step": 8422 }, { "epoch": 1.5480393978814346, "grad_norm": 0.7669714689254761, "learning_rate": 2.2591224818469347e-05, "loss": 0.9549, "step": 8423 }, { "epoch": 1.548225236944806, "grad_norm": 0.5377376675605774, "learning_rate": 2.257271319546207e-05, "loss": 0.7196, "step": 8424 }, { "epoch": 1.5484110760081768, "grad_norm": 0.6631278991699219, "learning_rate": 2.2554208194977955e-05, "loss": 0.7867, "step": 8425 }, { "epoch": 1.5485969150715482, "grad_norm": 0.9313716292381287, "learning_rate": 2.2535709818599816e-05, "loss": 1.1775, "step": 8426 }, { "epoch": 1.548782754134919, "grad_norm": 0.7080629467964172, "learning_rate": 2.2517218067909807e-05, "loss": 0.4182, "step": 8427 }, { "epoch": 1.5489685931982904, "grad_norm": 0.6456342339515686, "learning_rate": 2.2498732944489596e-05, "loss": 0.7099, "step": 8428 }, { "epoch": 1.5491544322616613, "grad_norm": 0.7400659322738647, "learning_rate": 2.248025444992029e-05, "loss": 0.9625, "step": 8429 }, { "epoch": 1.5493402713250326, "grad_norm": 1.3561872243881226, "learning_rate": 2.246178258578234e-05, "loss": 1.3351, "step": 8430 }, { "epoch": 1.5495261103884035, "grad_norm": 0.7631298303604126, "learning_rate": 2.2443317353655736e-05, "loss": 0.8268, "step": 8431 }, { "epoch": 1.5497119494517748, "grad_norm": 0.7139994502067566, "learning_rate": 2.2424858755119794e-05, "loss": 0.9721, "step": 8432 }, { "epoch": 1.549897788515146, "grad_norm": 0.6325114965438843, "learning_rate": 2.2406406791753364e-05, "loss": 0.9429, "step": 8433 }, { "epoch": 1.550083627578517, "grad_norm": 0.7448883056640625, "learning_rate": 2.238796146513463e-05, "loss": 0.8453, "step": 8434 }, { "epoch": 1.5502694666418881, "grad_norm": 0.7192485332489014, "learning_rate": 2.2369522776841312e-05, "loss": 0.9864, "step": 8435 }, { "epoch": 1.5504553057052592, "grad_norm": 0.720230758190155, "learning_rate": 2.2351090728450486e-05, "loss": 0.8496, "step": 8436 }, { "epoch": 1.5506411447686304, "grad_norm": 0.7473940849304199, "learning_rate": 2.2332665321538682e-05, "loss": 0.9731, "step": 8437 }, { "epoch": 1.5508269838320015, "grad_norm": 0.7922482490539551, "learning_rate": 2.2314246557681905e-05, "loss": 1.0866, "step": 8438 }, { "epoch": 1.5510128228953726, "grad_norm": 0.6319082379341125, "learning_rate": 2.22958344384555e-05, "loss": 0.892, "step": 8439 }, { "epoch": 1.5511986619587437, "grad_norm": 0.6750452518463135, "learning_rate": 2.2277428965434344e-05, "loss": 0.842, "step": 8440 }, { "epoch": 1.551384501022115, "grad_norm": 0.6215194463729858, "learning_rate": 2.2259030140192637e-05, "loss": 0.714, "step": 8441 }, { "epoch": 1.5515703400854859, "grad_norm": 0.6646790504455566, "learning_rate": 2.22406379643041e-05, "loss": 0.9337, "step": 8442 }, { "epoch": 1.5517561791488572, "grad_norm": 0.5718748569488525, "learning_rate": 2.2222252439341896e-05, "loss": 0.885, "step": 8443 }, { "epoch": 1.551942018212228, "grad_norm": 0.7178288102149963, "learning_rate": 2.220387356687851e-05, "loss": 1.1043, "step": 8444 }, { "epoch": 1.5521278572755994, "grad_norm": 0.8014031648635864, "learning_rate": 2.2185501348485972e-05, "loss": 0.8161, "step": 8445 }, { "epoch": 1.5523136963389703, "grad_norm": 0.7333347201347351, "learning_rate": 2.2167135785735672e-05, "loss": 0.8641, "step": 8446 }, { "epoch": 1.5524995354023416, "grad_norm": 0.7811396718025208, "learning_rate": 2.2148776880198487e-05, "loss": 1.0327, "step": 8447 }, { "epoch": 1.5526853744657125, "grad_norm": 0.6206461191177368, "learning_rate": 2.2130424633444645e-05, "loss": 0.9963, "step": 8448 }, { "epoch": 1.5528712135290839, "grad_norm": 0.8549916744232178, "learning_rate": 2.211207904704389e-05, "loss": 0.8474, "step": 8449 }, { "epoch": 1.553057052592455, "grad_norm": 0.7330931425094604, "learning_rate": 2.2093740122565343e-05, "loss": 1.0992, "step": 8450 }, { "epoch": 1.553242891655826, "grad_norm": 0.8338735103607178, "learning_rate": 2.2075407861577578e-05, "loss": 0.9663, "step": 8451 }, { "epoch": 1.5534287307191972, "grad_norm": 0.6526317000389099, "learning_rate": 2.205708226564863e-05, "loss": 0.8416, "step": 8452 }, { "epoch": 1.5536145697825683, "grad_norm": 0.7737939357757568, "learning_rate": 2.203876333634587e-05, "loss": 1.0954, "step": 8453 }, { "epoch": 1.5538004088459394, "grad_norm": 0.6432002186775208, "learning_rate": 2.2020451075236193e-05, "loss": 0.7378, "step": 8454 }, { "epoch": 1.5539862479093105, "grad_norm": 0.6858252882957458, "learning_rate": 2.200214548388586e-05, "loss": 1.2002, "step": 8455 }, { "epoch": 1.5541720869726816, "grad_norm": 0.8318917155265808, "learning_rate": 2.1983846563860587e-05, "loss": 0.8824, "step": 8456 }, { "epoch": 1.5543579260360527, "grad_norm": 0.8037974238395691, "learning_rate": 2.1965554316725568e-05, "loss": 1.1095, "step": 8457 }, { "epoch": 1.554543765099424, "grad_norm": 1.7428077459335327, "learning_rate": 2.1947268744045303e-05, "loss": 1.7246, "step": 8458 }, { "epoch": 1.554729604162795, "grad_norm": 0.6576951146125793, "learning_rate": 2.1928989847383873e-05, "loss": 1.05, "step": 8459 }, { "epoch": 1.5549154432261663, "grad_norm": 0.7942461371421814, "learning_rate": 2.1910717628304656e-05, "loss": 0.8262, "step": 8460 }, { "epoch": 1.5551012822895371, "grad_norm": 0.7093661427497864, "learning_rate": 2.189245208837054e-05, "loss": 0.9478, "step": 8461 }, { "epoch": 1.5552871213529085, "grad_norm": 0.7666981816291809, "learning_rate": 2.1874193229143804e-05, "loss": 0.9724, "step": 8462 }, { "epoch": 1.5554729604162794, "grad_norm": 0.7784303426742554, "learning_rate": 2.1855941052186158e-05, "loss": 1.0842, "step": 8463 }, { "epoch": 1.5556587994796507, "grad_norm": 0.7566025853157043, "learning_rate": 2.183769555905877e-05, "loss": 0.9761, "step": 8464 }, { "epoch": 1.5558446385430218, "grad_norm": 0.6337560415267944, "learning_rate": 2.181945675132221e-05, "loss": 0.7571, "step": 8465 }, { "epoch": 1.556030477606393, "grad_norm": 0.8436826467514038, "learning_rate": 2.180122463053652e-05, "loss": 1.0056, "step": 8466 }, { "epoch": 1.556216316669764, "grad_norm": 0.7065210342407227, "learning_rate": 2.1782999198261057e-05, "loss": 1.0622, "step": 8467 }, { "epoch": 1.5564021557331351, "grad_norm": 0.6758416295051575, "learning_rate": 2.1764780456054757e-05, "loss": 0.9907, "step": 8468 }, { "epoch": 1.5565879947965062, "grad_norm": 0.6660606265068054, "learning_rate": 2.1746568405475843e-05, "loss": 0.8577, "step": 8469 }, { "epoch": 1.5567738338598773, "grad_norm": 0.7996534705162048, "learning_rate": 2.1728363048082068e-05, "loss": 0.9277, "step": 8470 }, { "epoch": 1.5569596729232484, "grad_norm": 0.8163617253303528, "learning_rate": 2.171016438543059e-05, "loss": 1.0313, "step": 8471 }, { "epoch": 1.5571455119866195, "grad_norm": 0.6959372758865356, "learning_rate": 2.169197241907793e-05, "loss": 1.0311, "step": 8472 }, { "epoch": 1.5573313510499907, "grad_norm": 0.8374831080436707, "learning_rate": 2.167378715058015e-05, "loss": 1.03, "step": 8473 }, { "epoch": 1.5575171901133618, "grad_norm": 0.6212435960769653, "learning_rate": 2.1655608581492603e-05, "loss": 1.0169, "step": 8474 }, { "epoch": 1.557703029176733, "grad_norm": 0.8012619614601135, "learning_rate": 2.163743671337021e-05, "loss": 1.0254, "step": 8475 }, { "epoch": 1.557888868240104, "grad_norm": 0.7814256548881531, "learning_rate": 2.1619271547767193e-05, "loss": 1.1478, "step": 8476 }, { "epoch": 1.5580747073034753, "grad_norm": 0.6881610751152039, "learning_rate": 2.160111308623728e-05, "loss": 0.8462, "step": 8477 }, { "epoch": 1.5582605463668462, "grad_norm": 0.7728042602539062, "learning_rate": 2.158296133033364e-05, "loss": 1.1767, "step": 8478 }, { "epoch": 1.5584463854302175, "grad_norm": 0.7264332175254822, "learning_rate": 2.156481628160877e-05, "loss": 0.8743, "step": 8479 }, { "epoch": 1.5586322244935884, "grad_norm": 0.5601229667663574, "learning_rate": 2.1546677941614678e-05, "loss": 0.8995, "step": 8480 }, { "epoch": 1.5588180635569597, "grad_norm": 0.7997213006019592, "learning_rate": 2.1528546311902788e-05, "loss": 0.9521, "step": 8481 }, { "epoch": 1.5590039026203308, "grad_norm": 0.6687659025192261, "learning_rate": 2.151042139402395e-05, "loss": 0.9774, "step": 8482 }, { "epoch": 1.559189741683702, "grad_norm": 0.6728366017341614, "learning_rate": 2.1492303189528395e-05, "loss": 0.8514, "step": 8483 }, { "epoch": 1.559375580747073, "grad_norm": 0.621809720993042, "learning_rate": 2.147419169996582e-05, "loss": 0.8785, "step": 8484 }, { "epoch": 1.5595614198104442, "grad_norm": 0.8290244340896606, "learning_rate": 2.1456086926885367e-05, "loss": 1.2388, "step": 8485 }, { "epoch": 1.5597472588738153, "grad_norm": 0.6844221353530884, "learning_rate": 2.1437988871835523e-05, "loss": 1.0081, "step": 8486 }, { "epoch": 1.5599330979371864, "grad_norm": 0.8001043796539307, "learning_rate": 2.1419897536364332e-05, "loss": 1.0609, "step": 8487 }, { "epoch": 1.5601189370005575, "grad_norm": 0.7094721794128418, "learning_rate": 2.1401812922019092e-05, "loss": 0.9187, "step": 8488 }, { "epoch": 1.5603047760639286, "grad_norm": 0.6323491930961609, "learning_rate": 2.1383735030346706e-05, "loss": 0.7464, "step": 8489 }, { "epoch": 1.5604906151273, "grad_norm": 0.5895442962646484, "learning_rate": 2.1365663862893348e-05, "loss": 0.7452, "step": 8490 }, { "epoch": 1.5606764541906708, "grad_norm": 0.6722056269645691, "learning_rate": 2.13475994212047e-05, "loss": 0.9545, "step": 8491 }, { "epoch": 1.5608622932540421, "grad_norm": 0.7342957854270935, "learning_rate": 2.1329541706825896e-05, "loss": 0.9049, "step": 8492 }, { "epoch": 1.561048132317413, "grad_norm": 0.9032208919525146, "learning_rate": 2.1311490721301396e-05, "loss": 0.7619, "step": 8493 }, { "epoch": 1.5612339713807843, "grad_norm": 0.5408475399017334, "learning_rate": 2.129344646617516e-05, "loss": 0.4899, "step": 8494 }, { "epoch": 1.5614198104441552, "grad_norm": 0.7296645641326904, "learning_rate": 2.127540894299056e-05, "loss": 0.5888, "step": 8495 }, { "epoch": 1.5616056495075266, "grad_norm": 0.7951775193214417, "learning_rate": 2.1257378153290375e-05, "loss": 1.2182, "step": 8496 }, { "epoch": 1.5617914885708974, "grad_norm": 0.6165066361427307, "learning_rate": 2.1239354098616847e-05, "loss": 1.0031, "step": 8497 }, { "epoch": 1.5619773276342688, "grad_norm": 0.6538174748420715, "learning_rate": 2.122133678051156e-05, "loss": 0.9143, "step": 8498 }, { "epoch": 1.5621631666976399, "grad_norm": 0.7776987552642822, "learning_rate": 2.1203326200515628e-05, "loss": 1.191, "step": 8499 }, { "epoch": 1.562349005761011, "grad_norm": 0.7507323026657104, "learning_rate": 2.118532236016948e-05, "loss": 1.0686, "step": 8500 }, { "epoch": 1.562534844824382, "grad_norm": 0.6666802763938904, "learning_rate": 2.1167325261013082e-05, "loss": 0.8877, "step": 8501 }, { "epoch": 1.5627206838877532, "grad_norm": 0.5777565240859985, "learning_rate": 2.1149334904585706e-05, "loss": 0.8839, "step": 8502 }, { "epoch": 1.5629065229511243, "grad_norm": 0.8229918479919434, "learning_rate": 2.113135129242614e-05, "loss": 0.8825, "step": 8503 }, { "epoch": 1.5630923620144954, "grad_norm": 0.6364744305610657, "learning_rate": 2.1113374426072574e-05, "loss": 0.9031, "step": 8504 }, { "epoch": 1.5632782010778665, "grad_norm": 0.6380570530891418, "learning_rate": 2.109540430706256e-05, "loss": 0.7958, "step": 8505 }, { "epoch": 1.5634640401412376, "grad_norm": 0.7195653319358826, "learning_rate": 2.107744093693319e-05, "loss": 0.9536, "step": 8506 }, { "epoch": 1.563649879204609, "grad_norm": 0.7349860072135925, "learning_rate": 2.1059484317220835e-05, "loss": 1.0266, "step": 8507 }, { "epoch": 1.5638357182679798, "grad_norm": 0.7431777119636536, "learning_rate": 2.104153444946141e-05, "loss": 0.9168, "step": 8508 }, { "epoch": 1.5640215573313512, "grad_norm": 0.7292587757110596, "learning_rate": 2.1023591335190184e-05, "loss": 0.9093, "step": 8509 }, { "epoch": 1.564207396394722, "grad_norm": 0.6694021224975586, "learning_rate": 2.100565497594188e-05, "loss": 0.9558, "step": 8510 }, { "epoch": 1.5643932354580934, "grad_norm": 0.6532893776893616, "learning_rate": 2.0987725373250666e-05, "loss": 0.934, "step": 8511 }, { "epoch": 1.5645790745214643, "grad_norm": 0.7578188180923462, "learning_rate": 2.096980252865005e-05, "loss": 1.0388, "step": 8512 }, { "epoch": 1.5647649135848356, "grad_norm": 0.7167041301727295, "learning_rate": 2.0951886443673042e-05, "loss": 0.9336, "step": 8513 }, { "epoch": 1.5649507526482065, "grad_norm": 0.7971643209457397, "learning_rate": 2.093397711985202e-05, "loss": 1.1815, "step": 8514 }, { "epoch": 1.5651365917115778, "grad_norm": 0.8955301642417908, "learning_rate": 2.0916074558718836e-05, "loss": 1.2098, "step": 8515 }, { "epoch": 1.565322430774949, "grad_norm": 0.6152545809745789, "learning_rate": 2.0898178761804698e-05, "loss": 1.0297, "step": 8516 }, { "epoch": 1.56550826983832, "grad_norm": 0.6585174202919006, "learning_rate": 2.088028973064028e-05, "loss": 0.6759, "step": 8517 }, { "epoch": 1.5656941089016911, "grad_norm": 0.7088993191719055, "learning_rate": 2.0862407466755718e-05, "loss": 1.0041, "step": 8518 }, { "epoch": 1.5658799479650622, "grad_norm": 0.7710633277893066, "learning_rate": 2.0844531971680457e-05, "loss": 1.0349, "step": 8519 }, { "epoch": 1.5660657870284334, "grad_norm": 0.7347123622894287, "learning_rate": 2.0826663246943478e-05, "loss": 0.8464, "step": 8520 }, { "epoch": 1.5662516260918045, "grad_norm": 0.635280191898346, "learning_rate": 2.0808801294073087e-05, "loss": 0.9236, "step": 8521 }, { "epoch": 1.5664374651551756, "grad_norm": 0.8302204608917236, "learning_rate": 2.0790946114597066e-05, "loss": 1.0667, "step": 8522 }, { "epoch": 1.5666233042185467, "grad_norm": 0.8665578961372375, "learning_rate": 2.0773097710042655e-05, "loss": 1.2012, "step": 8523 }, { "epoch": 1.566809143281918, "grad_norm": 0.722381055355072, "learning_rate": 2.075525608193639e-05, "loss": 0.9888, "step": 8524 }, { "epoch": 1.566994982345289, "grad_norm": 0.699985682964325, "learning_rate": 2.0737421231804356e-05, "loss": 0.8727, "step": 8525 }, { "epoch": 1.5671808214086602, "grad_norm": 0.5751911997795105, "learning_rate": 2.071959316117198e-05, "loss": 0.7623, "step": 8526 }, { "epoch": 1.567366660472031, "grad_norm": 0.6195750832557678, "learning_rate": 2.070177187156418e-05, "loss": 0.9094, "step": 8527 }, { "epoch": 1.5675524995354024, "grad_norm": 0.6845465898513794, "learning_rate": 2.0683957364505202e-05, "loss": 0.9303, "step": 8528 }, { "epoch": 1.5677383385987733, "grad_norm": 0.7471318244934082, "learning_rate": 2.0666149641518782e-05, "loss": 1.1109, "step": 8529 }, { "epoch": 1.5679241776621446, "grad_norm": 0.6891338229179382, "learning_rate": 2.064834870412804e-05, "loss": 0.9626, "step": 8530 }, { "epoch": 1.5681100167255158, "grad_norm": 0.6418676376342773, "learning_rate": 2.0630554553855518e-05, "loss": 0.8029, "step": 8531 }, { "epoch": 1.5682958557888869, "grad_norm": 0.6934593915939331, "learning_rate": 2.0612767192223238e-05, "loss": 1.1254, "step": 8532 }, { "epoch": 1.568481694852258, "grad_norm": 0.893159806728363, "learning_rate": 2.059498662075253e-05, "loss": 1.0269, "step": 8533 }, { "epoch": 1.568667533915629, "grad_norm": 0.6825253963470459, "learning_rate": 2.0577212840964267e-05, "loss": 1.0025, "step": 8534 }, { "epoch": 1.5688533729790002, "grad_norm": 0.6435436606407166, "learning_rate": 2.055944585437861e-05, "loss": 0.8664, "step": 8535 }, { "epoch": 1.5690392120423713, "grad_norm": 0.789273738861084, "learning_rate": 2.0541685662515276e-05, "loss": 1.0906, "step": 8536 }, { "epoch": 1.5692250511057424, "grad_norm": 0.8879076838493347, "learning_rate": 2.0523932266893264e-05, "loss": 1.1946, "step": 8537 }, { "epoch": 1.5694108901691135, "grad_norm": 0.6618274450302124, "learning_rate": 2.0506185669031107e-05, "loss": 1.1427, "step": 8538 }, { "epoch": 1.5695967292324848, "grad_norm": 0.855046272277832, "learning_rate": 2.0488445870446695e-05, "loss": 0.8346, "step": 8539 }, { "epoch": 1.5697825682958557, "grad_norm": 0.8858674764633179, "learning_rate": 2.047071287265735e-05, "loss": 1.101, "step": 8540 }, { "epoch": 1.569968407359227, "grad_norm": 0.732211709022522, "learning_rate": 2.0452986677179843e-05, "loss": 1.0229, "step": 8541 }, { "epoch": 1.570154246422598, "grad_norm": 0.7777327299118042, "learning_rate": 2.043526728553028e-05, "loss": 0.854, "step": 8542 }, { "epoch": 1.5703400854859693, "grad_norm": 0.6771265268325806, "learning_rate": 2.0417554699224305e-05, "loss": 0.927, "step": 8543 }, { "epoch": 1.5705259245493401, "grad_norm": 0.8816447257995605, "learning_rate": 2.0399848919776844e-05, "loss": 1.3677, "step": 8544 }, { "epoch": 1.5707117636127115, "grad_norm": 0.5746204257011414, "learning_rate": 2.0382149948702346e-05, "loss": 0.8165, "step": 8545 }, { "epoch": 1.5708976026760824, "grad_norm": 0.8437845706939697, "learning_rate": 2.0364457787514666e-05, "loss": 1.0448, "step": 8546 }, { "epoch": 1.5710834417394537, "grad_norm": 0.8124422430992126, "learning_rate": 2.0346772437727003e-05, "loss": 0.8906, "step": 8547 }, { "epoch": 1.5712692808028248, "grad_norm": 0.7039830088615417, "learning_rate": 2.032909390085207e-05, "loss": 0.839, "step": 8548 }, { "epoch": 1.571455119866196, "grad_norm": 0.7771579623222351, "learning_rate": 2.0311422178401907e-05, "loss": 1.0007, "step": 8549 }, { "epoch": 1.571640958929567, "grad_norm": 0.6714681386947632, "learning_rate": 2.0293757271888046e-05, "loss": 1.0287, "step": 8550 }, { "epoch": 1.5718267979929381, "grad_norm": 0.7209590077400208, "learning_rate": 2.0276099182821383e-05, "loss": 0.8024, "step": 8551 }, { "epoch": 1.5720126370563092, "grad_norm": 0.7411814332008362, "learning_rate": 2.0258447912712252e-05, "loss": 1.0593, "step": 8552 }, { "epoch": 1.5721984761196803, "grad_norm": 0.7381874918937683, "learning_rate": 2.0240803463070425e-05, "loss": 0.9287, "step": 8553 }, { "epoch": 1.5723843151830514, "grad_norm": 0.6632939577102661, "learning_rate": 2.0223165835405046e-05, "loss": 0.9132, "step": 8554 }, { "epoch": 1.5725701542464225, "grad_norm": 0.7423511743545532, "learning_rate": 2.0205535031224752e-05, "loss": 0.8735, "step": 8555 }, { "epoch": 1.5727559933097939, "grad_norm": 0.7011168003082275, "learning_rate": 2.018791105203748e-05, "loss": 0.7841, "step": 8556 }, { "epoch": 1.5729418323731648, "grad_norm": 0.8723081350326538, "learning_rate": 2.017029389935069e-05, "loss": 1.0542, "step": 8557 }, { "epoch": 1.573127671436536, "grad_norm": 0.7361454963684082, "learning_rate": 2.015268357467117e-05, "loss": 0.8223, "step": 8558 }, { "epoch": 1.573313510499907, "grad_norm": 0.8173145055770874, "learning_rate": 2.0135080079505208e-05, "loss": 0.9669, "step": 8559 }, { "epoch": 1.5734993495632783, "grad_norm": 1.3938632011413574, "learning_rate": 2.0117483415358474e-05, "loss": 1.2551, "step": 8560 }, { "epoch": 1.5736851886266492, "grad_norm": 0.7503166198730469, "learning_rate": 2.0099893583736006e-05, "loss": 0.9042, "step": 8561 }, { "epoch": 1.5738710276900205, "grad_norm": 0.8426245450973511, "learning_rate": 2.008231058614236e-05, "loss": 0.8681, "step": 8562 }, { "epoch": 1.5740568667533914, "grad_norm": 0.6863853931427002, "learning_rate": 2.0064734424081388e-05, "loss": 1.0267, "step": 8563 }, { "epoch": 1.5742427058167627, "grad_norm": 0.6541019678115845, "learning_rate": 2.0047165099056466e-05, "loss": 0.943, "step": 8564 }, { "epoch": 1.5744285448801338, "grad_norm": 0.6452354788780212, "learning_rate": 2.0029602612570285e-05, "loss": 0.852, "step": 8565 }, { "epoch": 1.574614383943505, "grad_norm": 0.7347366809844971, "learning_rate": 2.001204696612503e-05, "loss": 0.8893, "step": 8566 }, { "epoch": 1.574800223006876, "grad_norm": 0.7106027007102966, "learning_rate": 1.9994498161222287e-05, "loss": 1.0119, "step": 8567 }, { "epoch": 1.5749860620702472, "grad_norm": 0.7813401818275452, "learning_rate": 1.997695619936305e-05, "loss": 0.9425, "step": 8568 }, { "epoch": 1.5751719011336183, "grad_norm": 0.8719832897186279, "learning_rate": 1.995942108204767e-05, "loss": 0.9462, "step": 8569 }, { "epoch": 1.5753577401969894, "grad_norm": 0.6365001201629639, "learning_rate": 1.9941892810775998e-05, "loss": 1.0615, "step": 8570 }, { "epoch": 1.5755435792603605, "grad_norm": 0.7773917317390442, "learning_rate": 1.9924371387047293e-05, "loss": 0.9165, "step": 8571 }, { "epoch": 1.5757294183237316, "grad_norm": 0.7087615132331848, "learning_rate": 1.9906856812360142e-05, "loss": 1.1179, "step": 8572 }, { "epoch": 1.575915257387103, "grad_norm": 0.6763791441917419, "learning_rate": 1.9889349088212627e-05, "loss": 0.8996, "step": 8573 }, { "epoch": 1.5761010964504738, "grad_norm": 0.722274124622345, "learning_rate": 1.9871848216102262e-05, "loss": 0.8501, "step": 8574 }, { "epoch": 1.5762869355138451, "grad_norm": 0.7045106887817383, "learning_rate": 1.985435419752586e-05, "loss": 0.8928, "step": 8575 }, { "epoch": 1.576472774577216, "grad_norm": 0.6729766726493835, "learning_rate": 1.98368670339798e-05, "loss": 1.0161, "step": 8576 }, { "epoch": 1.5766586136405873, "grad_norm": 0.7530122399330139, "learning_rate": 1.9819386726959722e-05, "loss": 0.9384, "step": 8577 }, { "epoch": 1.5768444527039582, "grad_norm": 0.7287601232528687, "learning_rate": 1.9801913277960827e-05, "loss": 0.9684, "step": 8578 }, { "epoch": 1.5770302917673296, "grad_norm": 0.6725710034370422, "learning_rate": 1.9784446688477587e-05, "loss": 0.8374, "step": 8579 }, { "epoch": 1.5772161308307004, "grad_norm": 0.8536375164985657, "learning_rate": 1.9766986960003985e-05, "loss": 1.0045, "step": 8580 }, { "epoch": 1.5774019698940718, "grad_norm": 0.6646410226821899, "learning_rate": 1.9749534094033428e-05, "loss": 0.7725, "step": 8581 }, { "epoch": 1.5775878089574429, "grad_norm": 0.73421710729599, "learning_rate": 1.973208809205863e-05, "loss": 0.8398, "step": 8582 }, { "epoch": 1.577773648020814, "grad_norm": 0.6640996336936951, "learning_rate": 1.9714648955571834e-05, "loss": 0.8321, "step": 8583 }, { "epoch": 1.577959487084185, "grad_norm": 0.7355279922485352, "learning_rate": 1.9697216686064624e-05, "loss": 1.1926, "step": 8584 }, { "epoch": 1.5781453261475562, "grad_norm": 0.892637312412262, "learning_rate": 1.967979128502805e-05, "loss": 0.9099, "step": 8585 }, { "epoch": 1.5783311652109273, "grad_norm": 0.6400608420372009, "learning_rate": 1.9662372753952497e-05, "loss": 0.8335, "step": 8586 }, { "epoch": 1.5785170042742984, "grad_norm": 0.8645890355110168, "learning_rate": 1.964496109432784e-05, "loss": 0.9008, "step": 8587 }, { "epoch": 1.5787028433376695, "grad_norm": 0.6955536007881165, "learning_rate": 1.9627556307643357e-05, "loss": 0.847, "step": 8588 }, { "epoch": 1.5788886824010406, "grad_norm": 0.7045804262161255, "learning_rate": 1.9610158395387657e-05, "loss": 0.944, "step": 8589 }, { "epoch": 1.579074521464412, "grad_norm": 0.642744243144989, "learning_rate": 1.959276735904889e-05, "loss": 0.9423, "step": 8590 }, { "epoch": 1.5792603605277828, "grad_norm": 0.758800745010376, "learning_rate": 1.9575383200114484e-05, "loss": 1.1133, "step": 8591 }, { "epoch": 1.5794461995911542, "grad_norm": 0.8545302152633667, "learning_rate": 1.9558005920071397e-05, "loss": 1.1038, "step": 8592 }, { "epoch": 1.579632038654525, "grad_norm": 0.6158139705657959, "learning_rate": 1.9540635520405902e-05, "loss": 0.9974, "step": 8593 }, { "epoch": 1.5798178777178964, "grad_norm": 0.6586875319480896, "learning_rate": 1.9523272002603742e-05, "loss": 0.7796, "step": 8594 }, { "epoch": 1.5800037167812673, "grad_norm": 0.6694611310958862, "learning_rate": 1.9505915368150084e-05, "loss": 1.0665, "step": 8595 }, { "epoch": 1.5801895558446386, "grad_norm": 0.7861499190330505, "learning_rate": 1.948856561852943e-05, "loss": 1.2053, "step": 8596 }, { "epoch": 1.5803753949080097, "grad_norm": 0.7392147779464722, "learning_rate": 1.947122275522577e-05, "loss": 0.8328, "step": 8597 }, { "epoch": 1.5805612339713808, "grad_norm": 3.2451038360595703, "learning_rate": 1.9453886779722473e-05, "loss": 1.4786, "step": 8598 }, { "epoch": 1.580747073034752, "grad_norm": 0.6445174813270569, "learning_rate": 1.9436557693502356e-05, "loss": 0.7439, "step": 8599 }, { "epoch": 1.580932912098123, "grad_norm": 0.9785395860671997, "learning_rate": 1.9419235498047537e-05, "loss": 0.9491, "step": 8600 }, { "epoch": 1.5811187511614941, "grad_norm": 0.7265476584434509, "learning_rate": 1.9401920194839675e-05, "loss": 0.9351, "step": 8601 }, { "epoch": 1.5813045902248652, "grad_norm": 0.7562678456306458, "learning_rate": 1.9384611785359795e-05, "loss": 0.9766, "step": 8602 }, { "epoch": 1.5814904292882364, "grad_norm": 0.6438634395599365, "learning_rate": 1.936731027108828e-05, "loss": 0.805, "step": 8603 }, { "epoch": 1.5816762683516075, "grad_norm": 0.6853559613227844, "learning_rate": 1.9350015653505015e-05, "loss": 0.9047, "step": 8604 }, { "epoch": 1.5818621074149788, "grad_norm": 0.8060786724090576, "learning_rate": 1.933272793408919e-05, "loss": 1.1083, "step": 8605 }, { "epoch": 1.5820479464783497, "grad_norm": 0.5869574546813965, "learning_rate": 1.9315447114319517e-05, "loss": 0.7251, "step": 8606 }, { "epoch": 1.582233785541721, "grad_norm": 0.7552639842033386, "learning_rate": 1.9298173195674007e-05, "loss": 0.935, "step": 8607 }, { "epoch": 1.582419624605092, "grad_norm": 0.7403124570846558, "learning_rate": 1.9280906179630165e-05, "loss": 1.0466, "step": 8608 }, { "epoch": 1.5826054636684632, "grad_norm": 0.753761887550354, "learning_rate": 1.9263646067664897e-05, "loss": 0.8971, "step": 8609 }, { "epoch": 1.582791302731834, "grad_norm": 0.7540832161903381, "learning_rate": 1.924639286125446e-05, "loss": 0.8634, "step": 8610 }, { "epoch": 1.5829771417952054, "grad_norm": 0.6291332840919495, "learning_rate": 1.9229146561874557e-05, "loss": 0.9713, "step": 8611 }, { "epoch": 1.5831629808585763, "grad_norm": 0.6404367089271545, "learning_rate": 1.921190717100032e-05, "loss": 0.8434, "step": 8612 }, { "epoch": 1.5833488199219476, "grad_norm": 0.7628955245018005, "learning_rate": 1.9194674690106307e-05, "loss": 0.8384, "step": 8613 }, { "epoch": 1.5835346589853188, "grad_norm": 0.7761968970298767, "learning_rate": 1.917744912066638e-05, "loss": 1.0999, "step": 8614 }, { "epoch": 1.5837204980486899, "grad_norm": 0.7242310047149658, "learning_rate": 1.9160230464153905e-05, "loss": 1.045, "step": 8615 }, { "epoch": 1.583906337112061, "grad_norm": 0.8341673612594604, "learning_rate": 1.914301872204166e-05, "loss": 0.967, "step": 8616 }, { "epoch": 1.584092176175432, "grad_norm": 0.799604058265686, "learning_rate": 1.9125813895801758e-05, "loss": 0.8067, "step": 8617 }, { "epoch": 1.5842780152388032, "grad_norm": 0.7231751084327698, "learning_rate": 1.9108615986905808e-05, "loss": 1.1896, "step": 8618 }, { "epoch": 1.5844638543021743, "grad_norm": 0.7168472409248352, "learning_rate": 1.9091424996824748e-05, "loss": 0.9404, "step": 8619 }, { "epoch": 1.5846496933655454, "grad_norm": 0.5969322919845581, "learning_rate": 1.9074240927028986e-05, "loss": 0.7992, "step": 8620 }, { "epoch": 1.5848355324289165, "grad_norm": 0.7880811095237732, "learning_rate": 1.9057063778988283e-05, "loss": 0.9778, "step": 8621 }, { "epoch": 1.5850213714922878, "grad_norm": 0.6685348749160767, "learning_rate": 1.903989355417185e-05, "loss": 1.0805, "step": 8622 }, { "epoch": 1.5852072105556587, "grad_norm": 0.7681272625923157, "learning_rate": 1.9022730254048327e-05, "loss": 1.0139, "step": 8623 }, { "epoch": 1.58539304961903, "grad_norm": 0.7222515940666199, "learning_rate": 1.900557388008568e-05, "loss": 0.8886, "step": 8624 }, { "epoch": 1.585578888682401, "grad_norm": 0.6568890810012817, "learning_rate": 1.8988424433751374e-05, "loss": 0.8081, "step": 8625 }, { "epoch": 1.5857647277457723, "grad_norm": 0.7260691523551941, "learning_rate": 1.8971281916512184e-05, "loss": 0.9272, "step": 8626 }, { "epoch": 1.5859505668091431, "grad_norm": 0.6106563210487366, "learning_rate": 1.8954146329834377e-05, "loss": 0.9614, "step": 8627 }, { "epoch": 1.5861364058725145, "grad_norm": 0.7054546475410461, "learning_rate": 1.893701767518361e-05, "loss": 0.8133, "step": 8628 }, { "epoch": 1.5863222449358854, "grad_norm": 0.6412302255630493, "learning_rate": 1.8919895954024912e-05, "loss": 1.0168, "step": 8629 }, { "epoch": 1.5865080839992567, "grad_norm": 0.8782892227172852, "learning_rate": 1.890278116782277e-05, "loss": 1.1363, "step": 8630 }, { "epoch": 1.5866939230626278, "grad_norm": 0.8776052594184875, "learning_rate": 1.8885673318041018e-05, "loss": 0.8191, "step": 8631 }, { "epoch": 1.586879762125999, "grad_norm": 0.6485216617584229, "learning_rate": 1.8868572406142958e-05, "loss": 0.6533, "step": 8632 }, { "epoch": 1.58706560118937, "grad_norm": 0.8401525616645813, "learning_rate": 1.885147843359122e-05, "loss": 1.0349, "step": 8633 }, { "epoch": 1.5872514402527411, "grad_norm": 0.7597665786743164, "learning_rate": 1.883439140184794e-05, "loss": 1.0918, "step": 8634 }, { "epoch": 1.5874372793161122, "grad_norm": 0.762586236000061, "learning_rate": 1.8817311312374564e-05, "loss": 1.1152, "step": 8635 }, { "epoch": 1.5876231183794833, "grad_norm": 0.73943692445755, "learning_rate": 1.8800238166632012e-05, "loss": 0.6863, "step": 8636 }, { "epoch": 1.5878089574428544, "grad_norm": 0.6723772883415222, "learning_rate": 1.8783171966080603e-05, "loss": 0.8738, "step": 8637 }, { "epoch": 1.5879947965062255, "grad_norm": 0.7524980306625366, "learning_rate": 1.8766112712180006e-05, "loss": 0.8884, "step": 8638 }, { "epoch": 1.5881806355695969, "grad_norm": 0.8099347352981567, "learning_rate": 1.8749060406389386e-05, "loss": 1.1606, "step": 8639 }, { "epoch": 1.5883664746329678, "grad_norm": 0.756376326084137, "learning_rate": 1.8732015050167218e-05, "loss": 0.9519, "step": 8640 }, { "epoch": 1.588552313696339, "grad_norm": 0.7101845741271973, "learning_rate": 1.871497664497144e-05, "loss": 0.7999, "step": 8641 }, { "epoch": 1.58873815275971, "grad_norm": 0.8314061164855957, "learning_rate": 1.869794519225938e-05, "loss": 1.0423, "step": 8642 }, { "epoch": 1.5889239918230813, "grad_norm": 0.7229037880897522, "learning_rate": 1.8680920693487792e-05, "loss": 0.9616, "step": 8643 }, { "epoch": 1.5891098308864522, "grad_norm": 0.6967963576316833, "learning_rate": 1.8663903150112837e-05, "loss": 0.9671, "step": 8644 }, { "epoch": 1.5892956699498235, "grad_norm": 0.7232875227928162, "learning_rate": 1.864689256359001e-05, "loss": 1.0798, "step": 8645 }, { "epoch": 1.5894815090131946, "grad_norm": 0.7937799096107483, "learning_rate": 1.862988893537432e-05, "loss": 0.9915, "step": 8646 }, { "epoch": 1.5896673480765657, "grad_norm": 0.7361637949943542, "learning_rate": 1.8612892266920055e-05, "loss": 0.733, "step": 8647 }, { "epoch": 1.5898531871399368, "grad_norm": 0.6371137499809265, "learning_rate": 1.8595902559681057e-05, "loss": 0.8012, "step": 8648 }, { "epoch": 1.590039026203308, "grad_norm": 0.7666060328483582, "learning_rate": 1.8578919815110408e-05, "loss": 1.1355, "step": 8649 }, { "epoch": 1.590224865266679, "grad_norm": 0.8657146692276001, "learning_rate": 1.856194403466074e-05, "loss": 1.0821, "step": 8650 }, { "epoch": 1.5904107043300502, "grad_norm": 0.8458912968635559, "learning_rate": 1.854497521978402e-05, "loss": 1.0808, "step": 8651 }, { "epoch": 1.5905965433934213, "grad_norm": 0.7622255086898804, "learning_rate": 1.8528013371931596e-05, "loss": 0.9853, "step": 8652 }, { "epoch": 1.5907823824567924, "grad_norm": 0.7003671526908875, "learning_rate": 1.8511058492554302e-05, "loss": 1.0668, "step": 8653 }, { "epoch": 1.5909682215201635, "grad_norm": 0.7001639604568481, "learning_rate": 1.8494110583102265e-05, "loss": 0.9536, "step": 8654 }, { "epoch": 1.5911540605835346, "grad_norm": 0.8250663876533508, "learning_rate": 1.8477169645025105e-05, "loss": 0.8961, "step": 8655 }, { "epoch": 1.591339899646906, "grad_norm": 0.6515169739723206, "learning_rate": 1.846023567977182e-05, "loss": 0.8208, "step": 8656 }, { "epoch": 1.5915257387102768, "grad_norm": 0.8317230939865112, "learning_rate": 1.8443308688790816e-05, "loss": 1.1036, "step": 8657 }, { "epoch": 1.5917115777736481, "grad_norm": 0.7945295572280884, "learning_rate": 1.8426388673529904e-05, "loss": 0.9185, "step": 8658 }, { "epoch": 1.591897416837019, "grad_norm": 0.5760872960090637, "learning_rate": 1.8409475635436245e-05, "loss": 0.7793, "step": 8659 }, { "epoch": 1.5920832559003903, "grad_norm": 0.627590537071228, "learning_rate": 1.8392569575956508e-05, "loss": 0.7388, "step": 8660 }, { "epoch": 1.5922690949637612, "grad_norm": 0.7489169836044312, "learning_rate": 1.8375670496536645e-05, "loss": 0.8907, "step": 8661 }, { "epoch": 1.5924549340271326, "grad_norm": 0.7438281178474426, "learning_rate": 1.8358778398622122e-05, "loss": 1.0574, "step": 8662 }, { "epoch": 1.5926407730905037, "grad_norm": 0.7566865086555481, "learning_rate": 1.8341893283657706e-05, "loss": 0.9673, "step": 8663 }, { "epoch": 1.5928266121538748, "grad_norm": 0.7408047318458557, "learning_rate": 1.8325015153087655e-05, "loss": 0.8386, "step": 8664 }, { "epoch": 1.5930124512172459, "grad_norm": 0.7485256791114807, "learning_rate": 1.83081440083556e-05, "loss": 0.8719, "step": 8665 }, { "epoch": 1.593198290280617, "grad_norm": 0.713358461856842, "learning_rate": 1.829127985090451e-05, "loss": 1.0221, "step": 8666 }, { "epoch": 1.593384129343988, "grad_norm": 0.6457721590995789, "learning_rate": 1.8274422682176882e-05, "loss": 0.7824, "step": 8667 }, { "epoch": 1.5935699684073592, "grad_norm": 0.6470180153846741, "learning_rate": 1.8257572503614494e-05, "loss": 0.9674, "step": 8668 }, { "epoch": 1.5937558074707303, "grad_norm": 0.654979407787323, "learning_rate": 1.82407293166586e-05, "loss": 0.976, "step": 8669 }, { "epoch": 1.5939416465341014, "grad_norm": 1.354627251625061, "learning_rate": 1.8223893122749857e-05, "loss": 1.4556, "step": 8670 }, { "epoch": 1.5941274855974727, "grad_norm": 0.8423448204994202, "learning_rate": 1.8207063923328237e-05, "loss": 0.9121, "step": 8671 }, { "epoch": 1.5943133246608436, "grad_norm": 0.7790369391441345, "learning_rate": 1.8190241719833233e-05, "loss": 0.8746, "step": 8672 }, { "epoch": 1.594499163724215, "grad_norm": 0.7903871536254883, "learning_rate": 1.8173426513703662e-05, "loss": 1.0795, "step": 8673 }, { "epoch": 1.5946850027875858, "grad_norm": 0.8038315773010254, "learning_rate": 1.8156618306377805e-05, "loss": 1.0508, "step": 8674 }, { "epoch": 1.5948708418509572, "grad_norm": 0.6397636532783508, "learning_rate": 1.8139817099293232e-05, "loss": 0.8527, "step": 8675 }, { "epoch": 1.595056680914328, "grad_norm": 0.774939775466919, "learning_rate": 1.8123022893887065e-05, "loss": 1.0397, "step": 8676 }, { "epoch": 1.5952425199776994, "grad_norm": 0.7472224235534668, "learning_rate": 1.8106235691595675e-05, "loss": 1.1034, "step": 8677 }, { "epoch": 1.5954283590410703, "grad_norm": 0.7666073441505432, "learning_rate": 1.8089455493854944e-05, "loss": 0.9883, "step": 8678 }, { "epoch": 1.5956141981044416, "grad_norm": 0.6919713020324707, "learning_rate": 1.807268230210014e-05, "loss": 0.8407, "step": 8679 }, { "epoch": 1.5958000371678127, "grad_norm": 0.6459718942642212, "learning_rate": 1.805591611776587e-05, "loss": 0.9556, "step": 8680 }, { "epoch": 1.5959858762311838, "grad_norm": 0.7357549667358398, "learning_rate": 1.8039156942286218e-05, "loss": 0.928, "step": 8681 }, { "epoch": 1.596171715294555, "grad_norm": 0.6708377599716187, "learning_rate": 1.8022404777094592e-05, "loss": 0.7842, "step": 8682 }, { "epoch": 1.596357554357926, "grad_norm": 0.7238865494728088, "learning_rate": 1.800565962362388e-05, "loss": 0.845, "step": 8683 }, { "epoch": 1.5965433934212971, "grad_norm": 0.5772403478622437, "learning_rate": 1.7988921483306297e-05, "loss": 0.9731, "step": 8684 }, { "epoch": 1.5967292324846682, "grad_norm": 0.628963828086853, "learning_rate": 1.7972190357573505e-05, "loss": 0.9534, "step": 8685 }, { "epoch": 1.5969150715480394, "grad_norm": 0.6648304462432861, "learning_rate": 1.7955466247856556e-05, "loss": 0.8157, "step": 8686 }, { "epoch": 1.5971009106114105, "grad_norm": 0.7793940305709839, "learning_rate": 1.7938749155585897e-05, "loss": 1.2967, "step": 8687 }, { "epoch": 1.5972867496747818, "grad_norm": 0.6746123433113098, "learning_rate": 1.792203908219141e-05, "loss": 1.0104, "step": 8688 }, { "epoch": 1.5974725887381527, "grad_norm": 0.8871963024139404, "learning_rate": 1.790533602910229e-05, "loss": 1.184, "step": 8689 }, { "epoch": 1.597658427801524, "grad_norm": 0.7723110318183899, "learning_rate": 1.7888639997747237e-05, "loss": 0.9134, "step": 8690 }, { "epoch": 1.597844266864895, "grad_norm": 0.6560113430023193, "learning_rate": 1.787195098955424e-05, "loss": 0.9556, "step": 8691 }, { "epoch": 1.5980301059282662, "grad_norm": 0.846978485584259, "learning_rate": 1.785526900595078e-05, "loss": 1.0675, "step": 8692 }, { "epoch": 1.598215944991637, "grad_norm": 0.7356848120689392, "learning_rate": 1.783859404836372e-05, "loss": 0.8517, "step": 8693 }, { "epoch": 1.5984017840550084, "grad_norm": 0.837843656539917, "learning_rate": 1.7821926118219268e-05, "loss": 1.2275, "step": 8694 }, { "epoch": 1.5985876231183793, "grad_norm": 0.6126946806907654, "learning_rate": 1.78052652169431e-05, "loss": 0.8591, "step": 8695 }, { "epoch": 1.5987734621817506, "grad_norm": 0.7435805201530457, "learning_rate": 1.7788611345960227e-05, "loss": 1.0836, "step": 8696 }, { "epoch": 1.5989593012451218, "grad_norm": 0.7877734899520874, "learning_rate": 1.7771964506695136e-05, "loss": 1.2115, "step": 8697 }, { "epoch": 1.5991451403084929, "grad_norm": 0.5992589592933655, "learning_rate": 1.7755324700571608e-05, "loss": 0.7697, "step": 8698 }, { "epoch": 1.599330979371864, "grad_norm": 1.7316218614578247, "learning_rate": 1.7738691929012918e-05, "loss": 1.7008, "step": 8699 }, { "epoch": 1.599516818435235, "grad_norm": 0.7186190485954285, "learning_rate": 1.772206619344169e-05, "loss": 0.8947, "step": 8700 }, { "epoch": 1.5997026574986062, "grad_norm": 0.6659690141677856, "learning_rate": 1.770544749527997e-05, "loss": 0.7263, "step": 8701 }, { "epoch": 1.5998884965619773, "grad_norm": 0.9670864343643188, "learning_rate": 1.7688835835949214e-05, "loss": 1.0106, "step": 8702 }, { "epoch": 1.6000743356253484, "grad_norm": 0.7367480397224426, "learning_rate": 1.7672231216870194e-05, "loss": 1.1188, "step": 8703 }, { "epoch": 1.6002601746887195, "grad_norm": 0.6136513948440552, "learning_rate": 1.7655633639463208e-05, "loss": 0.7875, "step": 8704 }, { "epoch": 1.6004460137520908, "grad_norm": 0.6350746750831604, "learning_rate": 1.7639043105147813e-05, "loss": 1.0765, "step": 8705 }, { "epoch": 1.6006318528154617, "grad_norm": 0.8280909657478333, "learning_rate": 1.7622459615343067e-05, "loss": 0.9999, "step": 8706 }, { "epoch": 1.600817691878833, "grad_norm": 0.6341865658760071, "learning_rate": 1.7605883171467418e-05, "loss": 0.8677, "step": 8707 }, { "epoch": 1.601003530942204, "grad_norm": 0.6041312217712402, "learning_rate": 1.7589313774938643e-05, "loss": 0.7505, "step": 8708 }, { "epoch": 1.6011893700055753, "grad_norm": 0.6812422871589661, "learning_rate": 1.7572751427173984e-05, "loss": 0.9407, "step": 8709 }, { "epoch": 1.6013752090689461, "grad_norm": 0.7144443392753601, "learning_rate": 1.755619612959003e-05, "loss": 0.9346, "step": 8710 }, { "epoch": 1.6015610481323175, "grad_norm": 0.6252550482749939, "learning_rate": 1.7539647883602828e-05, "loss": 0.7634, "step": 8711 }, { "epoch": 1.6017468871956886, "grad_norm": 0.9227977395057678, "learning_rate": 1.7523106690627732e-05, "loss": 0.6466, "step": 8712 }, { "epoch": 1.6019327262590597, "grad_norm": 0.6825462579727173, "learning_rate": 1.750657255207958e-05, "loss": 0.9107, "step": 8713 }, { "epoch": 1.6021185653224308, "grad_norm": 0.6466217041015625, "learning_rate": 1.7490045469372574e-05, "loss": 0.8781, "step": 8714 }, { "epoch": 1.602304404385802, "grad_norm": 0.766028642654419, "learning_rate": 1.7473525443920325e-05, "loss": 0.7986, "step": 8715 }, { "epoch": 1.602490243449173, "grad_norm": 0.8478003740310669, "learning_rate": 1.7457012477135782e-05, "loss": 0.7937, "step": 8716 }, { "epoch": 1.6026760825125441, "grad_norm": 0.6838301420211792, "learning_rate": 1.744050657043137e-05, "loss": 1.0052, "step": 8717 }, { "epoch": 1.6028619215759152, "grad_norm": 0.6403393149375916, "learning_rate": 1.7424007725218883e-05, "loss": 0.6411, "step": 8718 }, { "epoch": 1.6030477606392863, "grad_norm": 0.7843559980392456, "learning_rate": 1.7407515942909457e-05, "loss": 0.9943, "step": 8719 }, { "epoch": 1.6032335997026577, "grad_norm": 0.69202721118927, "learning_rate": 1.739103122491371e-05, "loss": 0.8886, "step": 8720 }, { "epoch": 1.6034194387660285, "grad_norm": 0.6292579770088196, "learning_rate": 1.7374553572641617e-05, "loss": 0.9584, "step": 8721 }, { "epoch": 1.6036052778293999, "grad_norm": 0.707538902759552, "learning_rate": 1.7358082987502523e-05, "loss": 0.9153, "step": 8722 }, { "epoch": 1.6037911168927708, "grad_norm": 0.7223339080810547, "learning_rate": 1.734161947090521e-05, "loss": 0.8471, "step": 8723 }, { "epoch": 1.603976955956142, "grad_norm": 0.9097388982772827, "learning_rate": 1.7325163024257818e-05, "loss": 0.9756, "step": 8724 }, { "epoch": 1.604162795019513, "grad_norm": 0.6092236638069153, "learning_rate": 1.7308713648967945e-05, "loss": 0.8956, "step": 8725 }, { "epoch": 1.6043486340828843, "grad_norm": 0.7708209753036499, "learning_rate": 1.729227134644248e-05, "loss": 0.9583, "step": 8726 }, { "epoch": 1.6045344731462552, "grad_norm": 0.5948147177696228, "learning_rate": 1.7275836118087807e-05, "loss": 0.9365, "step": 8727 }, { "epoch": 1.6047203122096265, "grad_norm": 0.679939866065979, "learning_rate": 1.7259407965309682e-05, "loss": 0.963, "step": 8728 }, { "epoch": 1.6049061512729976, "grad_norm": 0.6386947631835938, "learning_rate": 1.72429868895132e-05, "loss": 1.0086, "step": 8729 }, { "epoch": 1.6050919903363687, "grad_norm": 0.6231483221054077, "learning_rate": 1.7226572892102912e-05, "loss": 0.9308, "step": 8730 }, { "epoch": 1.6052778293997398, "grad_norm": 0.688547670841217, "learning_rate": 1.721016597448274e-05, "loss": 0.9231, "step": 8731 }, { "epoch": 1.605463668463111, "grad_norm": 0.8977246880531311, "learning_rate": 1.719376613805602e-05, "loss": 1.176, "step": 8732 }, { "epoch": 1.605649507526482, "grad_norm": 0.6980913281440735, "learning_rate": 1.7177373384225436e-05, "loss": 0.8452, "step": 8733 }, { "epoch": 1.6058353465898532, "grad_norm": 0.9629896879196167, "learning_rate": 1.7160987714393105e-05, "loss": 0.7985, "step": 8734 }, { "epoch": 1.6060211856532243, "grad_norm": 0.7529435157775879, "learning_rate": 1.7144609129960553e-05, "loss": 1.2252, "step": 8735 }, { "epoch": 1.6062070247165954, "grad_norm": 0.663002610206604, "learning_rate": 1.7128237632328636e-05, "loss": 0.8357, "step": 8736 }, { "epoch": 1.6063928637799667, "grad_norm": 0.729487419128418, "learning_rate": 1.711187322289769e-05, "loss": 1.0419, "step": 8737 }, { "epoch": 1.6065787028433376, "grad_norm": 0.8559988737106323, "learning_rate": 1.7095515903067348e-05, "loss": 1.0194, "step": 8738 }, { "epoch": 1.606764541906709, "grad_norm": 0.8755013942718506, "learning_rate": 1.707916567423674e-05, "loss": 1.1847, "step": 8739 }, { "epoch": 1.6069503809700798, "grad_norm": 0.6425202488899231, "learning_rate": 1.706282253780428e-05, "loss": 0.9878, "step": 8740 }, { "epoch": 1.6071362200334511, "grad_norm": 0.6389315724372864, "learning_rate": 1.7046486495167857e-05, "loss": 0.9392, "step": 8741 }, { "epoch": 1.607322059096822, "grad_norm": 0.7063748240470886, "learning_rate": 1.7030157547724745e-05, "loss": 1.0674, "step": 8742 }, { "epoch": 1.6075078981601933, "grad_norm": 0.7330775260925293, "learning_rate": 1.7013835696871572e-05, "loss": 0.8779, "step": 8743 }, { "epoch": 1.6076937372235642, "grad_norm": 0.7162792682647705, "learning_rate": 1.6997520944004375e-05, "loss": 1.028, "step": 8744 }, { "epoch": 1.6078795762869356, "grad_norm": 0.7317261695861816, "learning_rate": 1.6981213290518614e-05, "loss": 1.0238, "step": 8745 }, { "epoch": 1.6080654153503067, "grad_norm": 0.6344571113586426, "learning_rate": 1.6964912737809123e-05, "loss": 0.8266, "step": 8746 }, { "epoch": 1.6082512544136778, "grad_norm": 0.6402305960655212, "learning_rate": 1.6948619287270097e-05, "loss": 0.9155, "step": 8747 }, { "epoch": 1.6084370934770489, "grad_norm": 0.7457547187805176, "learning_rate": 1.693233294029515e-05, "loss": 1.1292, "step": 8748 }, { "epoch": 1.60862293254042, "grad_norm": 0.6838110089302063, "learning_rate": 1.6916053698277323e-05, "loss": 0.6868, "step": 8749 }, { "epoch": 1.608808771603791, "grad_norm": 1.871200442314148, "learning_rate": 1.6899781562608962e-05, "loss": 1.2426, "step": 8750 }, { "epoch": 1.6089946106671622, "grad_norm": 0.7761055827140808, "learning_rate": 1.688351653468192e-05, "loss": 1.0335, "step": 8751 }, { "epoch": 1.6091804497305333, "grad_norm": 0.5921274423599243, "learning_rate": 1.6867258615887328e-05, "loss": 0.7222, "step": 8752 }, { "epoch": 1.6093662887939044, "grad_norm": 0.6779144406318665, "learning_rate": 1.6851007807615792e-05, "loss": 0.873, "step": 8753 }, { "epoch": 1.6095521278572757, "grad_norm": 0.6916807889938354, "learning_rate": 1.683476411125725e-05, "loss": 1.0378, "step": 8754 }, { "epoch": 1.6097379669206466, "grad_norm": 0.6583566069602966, "learning_rate": 1.6818527528201078e-05, "loss": 0.9381, "step": 8755 }, { "epoch": 1.609923805984018, "grad_norm": 0.6733114719390869, "learning_rate": 1.6802298059836052e-05, "loss": 0.7155, "step": 8756 }, { "epoch": 1.6101096450473888, "grad_norm": 0.5884139537811279, "learning_rate": 1.6786075707550253e-05, "loss": 0.8258, "step": 8757 }, { "epoch": 1.6102954841107602, "grad_norm": 0.6669282913208008, "learning_rate": 1.6769860472731257e-05, "loss": 0.8458, "step": 8758 }, { "epoch": 1.610481323174131, "grad_norm": 0.7951620221138, "learning_rate": 1.6753652356765968e-05, "loss": 1.1442, "step": 8759 }, { "epoch": 1.6106671622375024, "grad_norm": 0.6817410588264465, "learning_rate": 1.6737451361040746e-05, "loss": 0.9404, "step": 8760 }, { "epoch": 1.6108530013008733, "grad_norm": 0.7657042741775513, "learning_rate": 1.6721257486941234e-05, "loss": 0.9618, "step": 8761 }, { "epoch": 1.6110388403642446, "grad_norm": 0.7671672105789185, "learning_rate": 1.670507073585257e-05, "loss": 0.88, "step": 8762 }, { "epoch": 1.6112246794276157, "grad_norm": 0.7470976114273071, "learning_rate": 1.6688891109159244e-05, "loss": 1.0605, "step": 8763 }, { "epoch": 1.6114105184909868, "grad_norm": 0.7541417479515076, "learning_rate": 1.6672718608245086e-05, "loss": 1.0119, "step": 8764 }, { "epoch": 1.611596357554358, "grad_norm": 0.69011390209198, "learning_rate": 1.6656553234493445e-05, "loss": 0.8798, "step": 8765 }, { "epoch": 1.611782196617729, "grad_norm": 0.7282363176345825, "learning_rate": 1.664039498928689e-05, "loss": 1.0463, "step": 8766 }, { "epoch": 1.6119680356811001, "grad_norm": 0.6672340035438538, "learning_rate": 1.662424387400755e-05, "loss": 1.0736, "step": 8767 }, { "epoch": 1.6121538747444712, "grad_norm": 0.9124537706375122, "learning_rate": 1.6608099890036812e-05, "loss": 0.8491, "step": 8768 }, { "epoch": 1.6123397138078424, "grad_norm": 0.7714374661445618, "learning_rate": 1.659196303875551e-05, "loss": 0.8433, "step": 8769 }, { "epoch": 1.6125255528712135, "grad_norm": 0.6858008503913879, "learning_rate": 1.657583332154391e-05, "loss": 1.0529, "step": 8770 }, { "epoch": 1.6127113919345848, "grad_norm": 0.5979937314987183, "learning_rate": 1.6559710739781565e-05, "loss": 0.6754, "step": 8771 }, { "epoch": 1.6128972309979557, "grad_norm": 0.6006745100021362, "learning_rate": 1.6543595294847525e-05, "loss": 0.8297, "step": 8772 }, { "epoch": 1.613083070061327, "grad_norm": 0.7025749683380127, "learning_rate": 1.652748698812012e-05, "loss": 0.7941, "step": 8773 }, { "epoch": 1.613268909124698, "grad_norm": 0.7484373450279236, "learning_rate": 1.6511385820977165e-05, "loss": 0.8617, "step": 8774 }, { "epoch": 1.6134547481880692, "grad_norm": 0.6823762059211731, "learning_rate": 1.6495291794795832e-05, "loss": 0.8227, "step": 8775 }, { "epoch": 1.61364058725144, "grad_norm": 0.6824286580085754, "learning_rate": 1.6479204910952663e-05, "loss": 0.979, "step": 8776 }, { "epoch": 1.6138264263148114, "grad_norm": 0.7116637229919434, "learning_rate": 1.6463125170823635e-05, "loss": 0.7134, "step": 8777 }, { "epoch": 1.6140122653781825, "grad_norm": 0.716037929058075, "learning_rate": 1.6447052575784028e-05, "loss": 0.9194, "step": 8778 }, { "epoch": 1.6141981044415536, "grad_norm": 0.6647517681121826, "learning_rate": 1.6430987127208618e-05, "loss": 0.8919, "step": 8779 }, { "epoch": 1.6143839435049248, "grad_norm": 0.8217048645019531, "learning_rate": 1.6414928826471488e-05, "loss": 0.9364, "step": 8780 }, { "epoch": 1.6145697825682959, "grad_norm": 0.8008784055709839, "learning_rate": 1.6398877674946156e-05, "loss": 0.9451, "step": 8781 }, { "epoch": 1.614755621631667, "grad_norm": 0.7590863108634949, "learning_rate": 1.6382833674005484e-05, "loss": 0.7621, "step": 8782 }, { "epoch": 1.614941460695038, "grad_norm": 0.6634281277656555, "learning_rate": 1.636679682502176e-05, "loss": 1.0038, "step": 8783 }, { "epoch": 1.6151272997584092, "grad_norm": 0.767975926399231, "learning_rate": 1.6350767129366694e-05, "loss": 0.8915, "step": 8784 }, { "epoch": 1.6153131388217803, "grad_norm": 0.6990622282028198, "learning_rate": 1.6334744588411276e-05, "loss": 1.0527, "step": 8785 }, { "epoch": 1.6154989778851516, "grad_norm": 0.5986868739128113, "learning_rate": 1.6318729203525994e-05, "loss": 0.9237, "step": 8786 }, { "epoch": 1.6156848169485225, "grad_norm": 0.7767715454101562, "learning_rate": 1.6302720976080653e-05, "loss": 1.0121, "step": 8787 }, { "epoch": 1.6158706560118938, "grad_norm": 0.7090464234352112, "learning_rate": 1.628671990744447e-05, "loss": 0.89, "step": 8788 }, { "epoch": 1.6160564950752647, "grad_norm": 0.7036499977111816, "learning_rate": 1.627072599898607e-05, "loss": 1.0243, "step": 8789 }, { "epoch": 1.616242334138636, "grad_norm": 0.7550042867660522, "learning_rate": 1.6254739252073436e-05, "loss": 0.9792, "step": 8790 }, { "epoch": 1.616428173202007, "grad_norm": 0.8685239553451538, "learning_rate": 1.6238759668073966e-05, "loss": 0.902, "step": 8791 }, { "epoch": 1.6166140122653783, "grad_norm": 0.741919755935669, "learning_rate": 1.6222787248354398e-05, "loss": 0.8697, "step": 8792 }, { "epoch": 1.6167998513287491, "grad_norm": 0.6833041906356812, "learning_rate": 1.620682199428093e-05, "loss": 1.2719, "step": 8793 }, { "epoch": 1.6169856903921205, "grad_norm": 0.6062202453613281, "learning_rate": 1.6190863907219046e-05, "loss": 0.8007, "step": 8794 }, { "epoch": 1.6171715294554916, "grad_norm": 0.741337239742279, "learning_rate": 1.617491298853373e-05, "loss": 0.9497, "step": 8795 }, { "epoch": 1.6173573685188627, "grad_norm": 0.683061957359314, "learning_rate": 1.615896923958926e-05, "loss": 0.8604, "step": 8796 }, { "epoch": 1.6175432075822338, "grad_norm": 0.7003892064094543, "learning_rate": 1.6143032661749347e-05, "loss": 1.0212, "step": 8797 }, { "epoch": 1.617729046645605, "grad_norm": 0.6905108690261841, "learning_rate": 1.612710325637712e-05, "loss": 1.0571, "step": 8798 }, { "epoch": 1.617914885708976, "grad_norm": 0.5632252097129822, "learning_rate": 1.6111181024835e-05, "loss": 0.9349, "step": 8799 }, { "epoch": 1.6181007247723471, "grad_norm": 0.6871153116226196, "learning_rate": 1.6095265968484898e-05, "loss": 0.9384, "step": 8800 }, { "epoch": 1.6182865638357182, "grad_norm": 0.8183594346046448, "learning_rate": 1.607935808868801e-05, "loss": 0.8518, "step": 8801 }, { "epoch": 1.6184724028990893, "grad_norm": 0.8559496402740479, "learning_rate": 1.6063457386805004e-05, "loss": 1.1082, "step": 8802 }, { "epoch": 1.6186582419624607, "grad_norm": 0.7051379680633545, "learning_rate": 1.6047563864195902e-05, "loss": 0.8697, "step": 8803 }, { "epoch": 1.6188440810258315, "grad_norm": 0.6523978114128113, "learning_rate": 1.6031677522220113e-05, "loss": 1.1111, "step": 8804 }, { "epoch": 1.6190299200892029, "grad_norm": 0.6645488739013672, "learning_rate": 1.6015798362236433e-05, "loss": 0.9398, "step": 8805 }, { "epoch": 1.6192157591525738, "grad_norm": 0.7267053723335266, "learning_rate": 1.5999926385603025e-05, "loss": 0.8864, "step": 8806 }, { "epoch": 1.619401598215945, "grad_norm": 0.7057207226753235, "learning_rate": 1.598406159367748e-05, "loss": 0.8944, "step": 8807 }, { "epoch": 1.619587437279316, "grad_norm": 0.7058221101760864, "learning_rate": 1.59682039878167e-05, "loss": 0.7088, "step": 8808 }, { "epoch": 1.6197732763426873, "grad_norm": 0.6227111220359802, "learning_rate": 1.5952353569377077e-05, "loss": 0.9605, "step": 8809 }, { "epoch": 1.6199591154060582, "grad_norm": 0.7384781241416931, "learning_rate": 1.5936510339714273e-05, "loss": 1.1215, "step": 8810 }, { "epoch": 1.6201449544694295, "grad_norm": 0.5976701378822327, "learning_rate": 1.5920674300183435e-05, "loss": 0.6481, "step": 8811 }, { "epoch": 1.6203307935328006, "grad_norm": 0.6821479201316833, "learning_rate": 1.5904845452139062e-05, "loss": 0.9243, "step": 8812 }, { "epoch": 1.6205166325961717, "grad_norm": 0.7828944325447083, "learning_rate": 1.5889023796934986e-05, "loss": 1.0344, "step": 8813 }, { "epoch": 1.6207024716595428, "grad_norm": 0.6303045153617859, "learning_rate": 1.5873209335924522e-05, "loss": 0.8991, "step": 8814 }, { "epoch": 1.620888310722914, "grad_norm": 0.8067341446876526, "learning_rate": 1.585740207046026e-05, "loss": 0.961, "step": 8815 }, { "epoch": 1.621074149786285, "grad_norm": 0.7798840403556824, "learning_rate": 1.584160200189425e-05, "loss": 0.9915, "step": 8816 }, { "epoch": 1.6212599888496562, "grad_norm": 0.6932112574577332, "learning_rate": 1.582580913157794e-05, "loss": 1.0362, "step": 8817 }, { "epoch": 1.6214458279130273, "grad_norm": 0.7943875193595886, "learning_rate": 1.5810023460862077e-05, "loss": 1.2271, "step": 8818 }, { "epoch": 1.6216316669763984, "grad_norm": 0.7108856439590454, "learning_rate": 1.5794244991096862e-05, "loss": 0.959, "step": 8819 }, { "epoch": 1.6218175060397697, "grad_norm": 0.683498203754425, "learning_rate": 1.577847372363187e-05, "loss": 0.8458, "step": 8820 }, { "epoch": 1.6220033451031406, "grad_norm": 0.8767088055610657, "learning_rate": 1.5762709659816066e-05, "loss": 0.8359, "step": 8821 }, { "epoch": 1.622189184166512, "grad_norm": 0.6798695921897888, "learning_rate": 1.574695280099774e-05, "loss": 0.8094, "step": 8822 }, { "epoch": 1.6223750232298828, "grad_norm": 0.5580926537513733, "learning_rate": 1.5731203148524663e-05, "loss": 0.6944, "step": 8823 }, { "epoch": 1.6225608622932541, "grad_norm": 0.7183531522750854, "learning_rate": 1.571546070374389e-05, "loss": 0.9789, "step": 8824 }, { "epoch": 1.622746701356625, "grad_norm": 0.5126298666000366, "learning_rate": 1.5699725468001912e-05, "loss": 0.5138, "step": 8825 }, { "epoch": 1.6229325404199964, "grad_norm": 0.6757166981697083, "learning_rate": 1.568399744264465e-05, "loss": 0.8028, "step": 8826 }, { "epoch": 1.6231183794833675, "grad_norm": 0.6396664381027222, "learning_rate": 1.5668276629017286e-05, "loss": 1.0848, "step": 8827 }, { "epoch": 1.6233042185467386, "grad_norm": 0.7553821206092834, "learning_rate": 1.5652563028464517e-05, "loss": 0.8485, "step": 8828 }, { "epoch": 1.6234900576101097, "grad_norm": 0.7239199876785278, "learning_rate": 1.56368566423303e-05, "loss": 0.9049, "step": 8829 }, { "epoch": 1.6236758966734808, "grad_norm": 0.6508596539497375, "learning_rate": 1.5621157471958093e-05, "loss": 0.8735, "step": 8830 }, { "epoch": 1.6238617357368519, "grad_norm": 0.6568943858146667, "learning_rate": 1.5605465518690633e-05, "loss": 0.9732, "step": 8831 }, { "epoch": 1.624047574800223, "grad_norm": 0.6617598533630371, "learning_rate": 1.558978078387011e-05, "loss": 0.8283, "step": 8832 }, { "epoch": 1.624233413863594, "grad_norm": 0.7476336359977722, "learning_rate": 1.5574103268838058e-05, "loss": 0.8835, "step": 8833 }, { "epoch": 1.6244192529269652, "grad_norm": 0.744114339351654, "learning_rate": 1.555843297493543e-05, "loss": 0.9546, "step": 8834 }, { "epoch": 1.6246050919903363, "grad_norm": 0.5931903123855591, "learning_rate": 1.5542769903502562e-05, "loss": 0.7503, "step": 8835 }, { "epoch": 1.6247909310537074, "grad_norm": 0.7275744080543518, "learning_rate": 1.552711405587909e-05, "loss": 1.0665, "step": 8836 }, { "epoch": 1.6249767701170788, "grad_norm": 0.8302237391471863, "learning_rate": 1.551146543340415e-05, "loss": 0.8374, "step": 8837 }, { "epoch": 1.6251626091804496, "grad_norm": 0.7517722249031067, "learning_rate": 1.549582403741615e-05, "loss": 1.0305, "step": 8838 }, { "epoch": 1.625348448243821, "grad_norm": 0.7293110489845276, "learning_rate": 1.548018986925296e-05, "loss": 0.8774, "step": 8839 }, { "epoch": 1.6255342873071918, "grad_norm": 0.7916002869606018, "learning_rate": 1.5464562930251814e-05, "loss": 1.0857, "step": 8840 }, { "epoch": 1.6257201263705632, "grad_norm": 0.7529808282852173, "learning_rate": 1.544894322174929e-05, "loss": 0.9024, "step": 8841 }, { "epoch": 1.625905965433934, "grad_norm": 0.6958862543106079, "learning_rate": 1.543333074508142e-05, "loss": 0.8539, "step": 8842 }, { "epoch": 1.6260918044973054, "grad_norm": 0.7076324820518494, "learning_rate": 1.5417725501583514e-05, "loss": 0.9758, "step": 8843 }, { "epoch": 1.6262776435606765, "grad_norm": 0.6581704020500183, "learning_rate": 1.540212749259038e-05, "loss": 0.9227, "step": 8844 }, { "epoch": 1.6264634826240476, "grad_norm": 0.9186629056930542, "learning_rate": 1.5386536719436095e-05, "loss": 1.1539, "step": 8845 }, { "epoch": 1.6266493216874187, "grad_norm": 0.6017464995384216, "learning_rate": 1.5370953183454194e-05, "loss": 0.8596, "step": 8846 }, { "epoch": 1.6268351607507898, "grad_norm": 0.8916236758232117, "learning_rate": 1.535537688597758e-05, "loss": 1.0041, "step": 8847 }, { "epoch": 1.627020999814161, "grad_norm": 0.7704942226409912, "learning_rate": 1.5339807828338524e-05, "loss": 0.9189, "step": 8848 }, { "epoch": 1.627206838877532, "grad_norm": 0.7622827887535095, "learning_rate": 1.5324246011868694e-05, "loss": 0.9924, "step": 8849 }, { "epoch": 1.6273926779409031, "grad_norm": 0.6575504541397095, "learning_rate": 1.5308691437899093e-05, "loss": 0.8895, "step": 8850 }, { "epoch": 1.6275785170042743, "grad_norm": 0.7674857974052429, "learning_rate": 1.5293144107760183e-05, "loss": 0.9631, "step": 8851 }, { "epoch": 1.6277643560676456, "grad_norm": 0.7199091911315918, "learning_rate": 1.5277604022781712e-05, "loss": 1.0325, "step": 8852 }, { "epoch": 1.6279501951310165, "grad_norm": 0.7748641967773438, "learning_rate": 1.526207118429287e-05, "loss": 1.1489, "step": 8853 }, { "epoch": 1.6281360341943878, "grad_norm": 0.8894672989845276, "learning_rate": 1.5246545593622242e-05, "loss": 0.9587, "step": 8854 }, { "epoch": 1.6283218732577587, "grad_norm": 0.6617799997329712, "learning_rate": 1.523102725209773e-05, "loss": 1.0071, "step": 8855 }, { "epoch": 1.62850771232113, "grad_norm": 0.6675786375999451, "learning_rate": 1.5215516161046683e-05, "loss": 1.0015, "step": 8856 }, { "epoch": 1.628693551384501, "grad_norm": 0.6378850340843201, "learning_rate": 1.5200012321795765e-05, "loss": 0.8807, "step": 8857 }, { "epoch": 1.6288793904478722, "grad_norm": 0.8859041929244995, "learning_rate": 1.518451573567109e-05, "loss": 1.2239, "step": 8858 }, { "epoch": 1.629065229511243, "grad_norm": 0.8571634888648987, "learning_rate": 1.5169026403998076e-05, "loss": 1.0145, "step": 8859 }, { "epoch": 1.6292510685746144, "grad_norm": 1.0119373798370361, "learning_rate": 1.5153544328101577e-05, "loss": 1.3056, "step": 8860 }, { "epoch": 1.6294369076379855, "grad_norm": 0.6872910261154175, "learning_rate": 1.5138069509305807e-05, "loss": 0.6696, "step": 8861 }, { "epoch": 1.6296227467013567, "grad_norm": 0.6504276990890503, "learning_rate": 1.512260194893439e-05, "loss": 0.9549, "step": 8862 }, { "epoch": 1.6298085857647278, "grad_norm": 0.6661650538444519, "learning_rate": 1.5107141648310241e-05, "loss": 0.8873, "step": 8863 }, { "epoch": 1.6299944248280989, "grad_norm": 0.8729071617126465, "learning_rate": 1.5091688608755749e-05, "loss": 0.847, "step": 8864 }, { "epoch": 1.63018026389147, "grad_norm": 0.8262352347373962, "learning_rate": 1.5076242831592668e-05, "loss": 0.8209, "step": 8865 }, { "epoch": 1.630366102954841, "grad_norm": 0.6832601428031921, "learning_rate": 1.5060804318142052e-05, "loss": 0.8971, "step": 8866 }, { "epoch": 1.6305519420182122, "grad_norm": 0.7103747725486755, "learning_rate": 1.5045373069724423e-05, "loss": 0.8639, "step": 8867 }, { "epoch": 1.6307377810815833, "grad_norm": 0.7560636401176453, "learning_rate": 1.5029949087659667e-05, "loss": 0.941, "step": 8868 }, { "epoch": 1.6309236201449546, "grad_norm": 0.7987247109413147, "learning_rate": 1.5014532373266987e-05, "loss": 0.76, "step": 8869 }, { "epoch": 1.6311094592083255, "grad_norm": 0.7744994759559631, "learning_rate": 1.4999122927865061e-05, "loss": 0.7802, "step": 8870 }, { "epoch": 1.6312952982716968, "grad_norm": 0.7106496095657349, "learning_rate": 1.4983720752771834e-05, "loss": 0.8834, "step": 8871 }, { "epoch": 1.6314811373350677, "grad_norm": 0.7209110260009766, "learning_rate": 1.4968325849304743e-05, "loss": 1.0118, "step": 8872 }, { "epoch": 1.631666976398439, "grad_norm": 0.6772007346153259, "learning_rate": 1.4952938218780488e-05, "loss": 0.6785, "step": 8873 }, { "epoch": 1.63185281546181, "grad_norm": 0.6663495302200317, "learning_rate": 1.4937557862515239e-05, "loss": 1.0211, "step": 8874 }, { "epoch": 1.6320386545251813, "grad_norm": 0.6624771356582642, "learning_rate": 1.4922184781824545e-05, "loss": 0.9342, "step": 8875 }, { "epoch": 1.6322244935885521, "grad_norm": 0.656817615032196, "learning_rate": 1.490681897802323e-05, "loss": 1.0656, "step": 8876 }, { "epoch": 1.6324103326519235, "grad_norm": 0.7161831259727478, "learning_rate": 1.4891460452425598e-05, "loss": 1.0012, "step": 8877 }, { "epoch": 1.6325961717152946, "grad_norm": 0.6671724915504456, "learning_rate": 1.4876109206345301e-05, "loss": 1.1334, "step": 8878 }, { "epoch": 1.6327820107786657, "grad_norm": 1.1625293493270874, "learning_rate": 1.4860765241095387e-05, "loss": 0.7399, "step": 8879 }, { "epoch": 1.6329678498420368, "grad_norm": 0.7702340483665466, "learning_rate": 1.4845428557988206e-05, "loss": 0.823, "step": 8880 }, { "epoch": 1.633153688905408, "grad_norm": 0.7329745888710022, "learning_rate": 1.4830099158335563e-05, "loss": 1.1374, "step": 8881 }, { "epoch": 1.633339527968779, "grad_norm": 0.6118117570877075, "learning_rate": 1.4814777043448636e-05, "loss": 0.6717, "step": 8882 }, { "epoch": 1.6335253670321501, "grad_norm": 0.7152222990989685, "learning_rate": 1.4799462214637916e-05, "loss": 0.9746, "step": 8883 }, { "epoch": 1.6337112060955212, "grad_norm": 0.674939751625061, "learning_rate": 1.4784154673213357e-05, "loss": 0.9757, "step": 8884 }, { "epoch": 1.6338970451588923, "grad_norm": 0.7836318016052246, "learning_rate": 1.4768854420484214e-05, "loss": 1.039, "step": 8885 }, { "epoch": 1.6340828842222637, "grad_norm": 0.7620517611503601, "learning_rate": 1.4753561457759168e-05, "loss": 0.9907, "step": 8886 }, { "epoch": 1.6342687232856346, "grad_norm": 0.7664940357208252, "learning_rate": 1.4738275786346234e-05, "loss": 0.8479, "step": 8887 }, { "epoch": 1.6344545623490059, "grad_norm": 0.8321072459220886, "learning_rate": 1.4722997407552852e-05, "loss": 1.0961, "step": 8888 }, { "epoch": 1.6346404014123768, "grad_norm": 0.7598660588264465, "learning_rate": 1.4707726322685822e-05, "loss": 1.2227, "step": 8889 }, { "epoch": 1.634826240475748, "grad_norm": 0.7904730439186096, "learning_rate": 1.4692462533051288e-05, "loss": 0.7229, "step": 8890 }, { "epoch": 1.635012079539119, "grad_norm": 0.8430718183517456, "learning_rate": 1.4677206039954805e-05, "loss": 0.8934, "step": 8891 }, { "epoch": 1.6351979186024903, "grad_norm": 0.6781235337257385, "learning_rate": 1.4661956844701297e-05, "loss": 0.7438, "step": 8892 }, { "epoch": 1.6353837576658614, "grad_norm": 0.7655029892921448, "learning_rate": 1.4646714948595076e-05, "loss": 0.9364, "step": 8893 }, { "epoch": 1.6355695967292325, "grad_norm": 0.809462308883667, "learning_rate": 1.4631480352939787e-05, "loss": 0.8887, "step": 8894 }, { "epoch": 1.6357554357926036, "grad_norm": 0.6403674483299255, "learning_rate": 1.4616253059038477e-05, "loss": 1.1736, "step": 8895 }, { "epoch": 1.6359412748559747, "grad_norm": 0.7233485579490662, "learning_rate": 1.4601033068193615e-05, "loss": 0.8797, "step": 8896 }, { "epoch": 1.6361271139193458, "grad_norm": 1.15740966796875, "learning_rate": 1.4585820381706927e-05, "loss": 1.3284, "step": 8897 }, { "epoch": 1.636312952982717, "grad_norm": 0.6786386966705322, "learning_rate": 1.457061500087966e-05, "loss": 0.7029, "step": 8898 }, { "epoch": 1.636498792046088, "grad_norm": 0.8133037090301514, "learning_rate": 1.455541692701231e-05, "loss": 1.1436, "step": 8899 }, { "epoch": 1.6366846311094592, "grad_norm": 0.6569185256958008, "learning_rate": 1.4540226161404835e-05, "loss": 0.8784, "step": 8900 }, { "epoch": 1.6368704701728305, "grad_norm": 0.6427562832832336, "learning_rate": 1.4525042705356506e-05, "loss": 0.9798, "step": 8901 }, { "epoch": 1.6370563092362014, "grad_norm": 0.616054117679596, "learning_rate": 1.4509866560166007e-05, "loss": 0.9621, "step": 8902 }, { "epoch": 1.6372421482995727, "grad_norm": 0.6826965808868408, "learning_rate": 1.44946977271314e-05, "loss": 0.9178, "step": 8903 }, { "epoch": 1.6374279873629436, "grad_norm": 0.8229233026504517, "learning_rate": 1.4479536207550093e-05, "loss": 1.1039, "step": 8904 }, { "epoch": 1.637613826426315, "grad_norm": 0.7446808815002441, "learning_rate": 1.446438200271888e-05, "loss": 1.1094, "step": 8905 }, { "epoch": 1.6377996654896858, "grad_norm": 0.7734174132347107, "learning_rate": 1.4449235113933934e-05, "loss": 0.8696, "step": 8906 }, { "epoch": 1.6379855045530571, "grad_norm": 0.6430695652961731, "learning_rate": 1.4434095542490844e-05, "loss": 1.0186, "step": 8907 }, { "epoch": 1.638171343616428, "grad_norm": 0.7133775353431702, "learning_rate": 1.4418963289684462e-05, "loss": 1.1213, "step": 8908 }, { "epoch": 1.6383571826797994, "grad_norm": 0.5039517283439636, "learning_rate": 1.4403838356809119e-05, "loss": 0.6748, "step": 8909 }, { "epoch": 1.6385430217431705, "grad_norm": 0.6650997996330261, "learning_rate": 1.4388720745158502e-05, "loss": 0.96, "step": 8910 }, { "epoch": 1.6387288608065416, "grad_norm": 0.6760223507881165, "learning_rate": 1.437361045602561e-05, "loss": 0.9497, "step": 8911 }, { "epoch": 1.6389146998699127, "grad_norm": 0.7307868599891663, "learning_rate": 1.4358507490702899e-05, "loss": 1.1226, "step": 8912 }, { "epoch": 1.6391005389332838, "grad_norm": 0.7235276103019714, "learning_rate": 1.4343411850482114e-05, "loss": 0.9746, "step": 8913 }, { "epoch": 1.6392863779966549, "grad_norm": 0.716784656047821, "learning_rate": 1.4328323536654442e-05, "loss": 1.049, "step": 8914 }, { "epoch": 1.639472217060026, "grad_norm": 0.8350142240524292, "learning_rate": 1.4313242550510442e-05, "loss": 0.9188, "step": 8915 }, { "epoch": 1.639658056123397, "grad_norm": 0.7000654339790344, "learning_rate": 1.429816889333997e-05, "loss": 0.9902, "step": 8916 }, { "epoch": 1.6398438951867682, "grad_norm": 0.6660363674163818, "learning_rate": 1.4283102566432371e-05, "loss": 0.9285, "step": 8917 }, { "epoch": 1.6400297342501395, "grad_norm": 0.6869511008262634, "learning_rate": 1.4268043571076239e-05, "loss": 1.1581, "step": 8918 }, { "epoch": 1.6402155733135104, "grad_norm": 0.7386857271194458, "learning_rate": 1.4252991908559665e-05, "loss": 1.1359, "step": 8919 }, { "epoch": 1.6404014123768818, "grad_norm": 0.7131685018539429, "learning_rate": 1.4237947580169986e-05, "loss": 0.9634, "step": 8920 }, { "epoch": 1.6405872514402526, "grad_norm": 0.6731940507888794, "learning_rate": 1.4222910587194005e-05, "loss": 0.9535, "step": 8921 }, { "epoch": 1.640773090503624, "grad_norm": 0.7020549774169922, "learning_rate": 1.4207880930917871e-05, "loss": 0.9597, "step": 8922 }, { "epoch": 1.6409589295669949, "grad_norm": 0.6156516075134277, "learning_rate": 1.4192858612627102e-05, "loss": 1.0362, "step": 8923 }, { "epoch": 1.6411447686303662, "grad_norm": 0.7188947200775146, "learning_rate": 1.4177843633606614e-05, "loss": 1.0978, "step": 8924 }, { "epoch": 1.641330607693737, "grad_norm": 0.6286084055900574, "learning_rate": 1.4162835995140622e-05, "loss": 0.998, "step": 8925 }, { "epoch": 1.6415164467571084, "grad_norm": 0.6593037843704224, "learning_rate": 1.4147835698512812e-05, "loss": 0.9243, "step": 8926 }, { "epoch": 1.6417022858204795, "grad_norm": 0.6840078234672546, "learning_rate": 1.4132842745006148e-05, "loss": 0.8513, "step": 8927 }, { "epoch": 1.6418881248838506, "grad_norm": 0.7872484922409058, "learning_rate": 1.4117857135903013e-05, "loss": 0.8972, "step": 8928 }, { "epoch": 1.6420739639472217, "grad_norm": 0.7854396104812622, "learning_rate": 1.4102878872485203e-05, "loss": 0.9316, "step": 8929 }, { "epoch": 1.6422598030105928, "grad_norm": 0.7035794854164124, "learning_rate": 1.4087907956033785e-05, "loss": 1.0664, "step": 8930 }, { "epoch": 1.642445642073964, "grad_norm": 0.7821934819221497, "learning_rate": 1.4072944387829313e-05, "loss": 0.9837, "step": 8931 }, { "epoch": 1.642631481137335, "grad_norm": 0.7449780106544495, "learning_rate": 1.4057988169151592e-05, "loss": 1.0677, "step": 8932 }, { "epoch": 1.6428173202007061, "grad_norm": 0.7577154636383057, "learning_rate": 1.4043039301279903e-05, "loss": 1.1414, "step": 8933 }, { "epoch": 1.6430031592640773, "grad_norm": 0.6866757273674011, "learning_rate": 1.4028097785492823e-05, "loss": 0.9267, "step": 8934 }, { "epoch": 1.6431889983274486, "grad_norm": 0.7813276648521423, "learning_rate": 1.4013163623068348e-05, "loss": 0.8713, "step": 8935 }, { "epoch": 1.6433748373908195, "grad_norm": 0.5878521800041199, "learning_rate": 1.3998236815283838e-05, "loss": 0.7959, "step": 8936 }, { "epoch": 1.6435606764541908, "grad_norm": 0.6542325615882874, "learning_rate": 1.3983317363416005e-05, "loss": 0.8847, "step": 8937 }, { "epoch": 1.6437465155175617, "grad_norm": 0.7386040687561035, "learning_rate": 1.3968405268740959e-05, "loss": 0.7502, "step": 8938 }, { "epoch": 1.643932354580933, "grad_norm": 0.657832145690918, "learning_rate": 1.3953500532534137e-05, "loss": 0.9111, "step": 8939 }, { "epoch": 1.644118193644304, "grad_norm": 0.8460830450057983, "learning_rate": 1.3938603156070395e-05, "loss": 1.0726, "step": 8940 }, { "epoch": 1.6443040327076752, "grad_norm": 0.6688886880874634, "learning_rate": 1.3923713140623918e-05, "loss": 0.6267, "step": 8941 }, { "epoch": 1.644489871771046, "grad_norm": 0.7037612199783325, "learning_rate": 1.3908830487468283e-05, "loss": 0.9604, "step": 8942 }, { "epoch": 1.6446757108344174, "grad_norm": 0.7139345407485962, "learning_rate": 1.3893955197876474e-05, "loss": 0.6997, "step": 8943 }, { "epoch": 1.6448615498977885, "grad_norm": 0.7229803800582886, "learning_rate": 1.387908727312074e-05, "loss": 0.9263, "step": 8944 }, { "epoch": 1.6450473889611597, "grad_norm": 0.6166016459465027, "learning_rate": 1.386422671447284e-05, "loss": 0.794, "step": 8945 }, { "epoch": 1.6452332280245308, "grad_norm": 0.6664713621139526, "learning_rate": 1.3849373523203769e-05, "loss": 0.8972, "step": 8946 }, { "epoch": 1.6454190670879019, "grad_norm": 0.7948108911514282, "learning_rate": 1.3834527700583988e-05, "loss": 0.8877, "step": 8947 }, { "epoch": 1.645604906151273, "grad_norm": 1.114028811454773, "learning_rate": 1.3819689247883271e-05, "loss": 0.9342, "step": 8948 }, { "epoch": 1.645790745214644, "grad_norm": 0.7399374842643738, "learning_rate": 1.3804858166370782e-05, "loss": 0.9073, "step": 8949 }, { "epoch": 1.6459765842780152, "grad_norm": 0.7469755411148071, "learning_rate": 1.3790034457315082e-05, "loss": 1.078, "step": 8950 }, { "epoch": 1.6461624233413863, "grad_norm": 0.6429402828216553, "learning_rate": 1.3775218121984046e-05, "loss": 0.8276, "step": 8951 }, { "epoch": 1.6463482624047576, "grad_norm": 0.6140831112861633, "learning_rate": 1.3760409161644982e-05, "loss": 0.952, "step": 8952 }, { "epoch": 1.6465341014681285, "grad_norm": 0.6000019907951355, "learning_rate": 1.3745607577564501e-05, "loss": 0.6117, "step": 8953 }, { "epoch": 1.6467199405314998, "grad_norm": 0.6426273584365845, "learning_rate": 1.3730813371008633e-05, "loss": 1.0371, "step": 8954 }, { "epoch": 1.6469057795948707, "grad_norm": 0.7724543809890747, "learning_rate": 1.3716026543242732e-05, "loss": 0.9657, "step": 8955 }, { "epoch": 1.647091618658242, "grad_norm": 0.6861767768859863, "learning_rate": 1.3701247095531566e-05, "loss": 0.9033, "step": 8956 }, { "epoch": 1.647277457721613, "grad_norm": 0.7680606842041016, "learning_rate": 1.3686475029139267e-05, "loss": 0.8494, "step": 8957 }, { "epoch": 1.6474632967849843, "grad_norm": 0.7061701416969299, "learning_rate": 1.3671710345329292e-05, "loss": 0.9876, "step": 8958 }, { "epoch": 1.6476491358483554, "grad_norm": 0.6801935434341431, "learning_rate": 1.365695304536454e-05, "loss": 0.8746, "step": 8959 }, { "epoch": 1.6478349749117265, "grad_norm": 0.6824638247489929, "learning_rate": 1.364220313050717e-05, "loss": 0.7854, "step": 8960 }, { "epoch": 1.6480208139750976, "grad_norm": 0.6700693368911743, "learning_rate": 1.3627460602018838e-05, "loss": 0.8267, "step": 8961 }, { "epoch": 1.6482066530384687, "grad_norm": 0.7284310460090637, "learning_rate": 1.3612725461160459e-05, "loss": 1.0115, "step": 8962 }, { "epoch": 1.6483924921018398, "grad_norm": 0.6551657915115356, "learning_rate": 1.3597997709192378e-05, "loss": 1.084, "step": 8963 }, { "epoch": 1.648578331165211, "grad_norm": 0.7529725432395935, "learning_rate": 1.3583277347374301e-05, "loss": 0.8888, "step": 8964 }, { "epoch": 1.648764170228582, "grad_norm": 0.848233699798584, "learning_rate": 1.3568564376965277e-05, "loss": 0.8482, "step": 8965 }, { "epoch": 1.6489500092919531, "grad_norm": 0.6673817038536072, "learning_rate": 1.355385879922374e-05, "loss": 0.8099, "step": 8966 }, { "epoch": 1.6491358483553245, "grad_norm": 0.7535316348075867, "learning_rate": 1.3539160615407486e-05, "loss": 1.0279, "step": 8967 }, { "epoch": 1.6493216874186953, "grad_norm": 0.5885194540023804, "learning_rate": 1.3524469826773723e-05, "loss": 0.869, "step": 8968 }, { "epoch": 1.6495075264820667, "grad_norm": 0.7876520752906799, "learning_rate": 1.3509786434578931e-05, "loss": 1.0675, "step": 8969 }, { "epoch": 1.6496933655454376, "grad_norm": 0.6128770709037781, "learning_rate": 1.349511044007904e-05, "loss": 0.8816, "step": 8970 }, { "epoch": 1.6498792046088089, "grad_norm": 0.6121591925621033, "learning_rate": 1.3480441844529324e-05, "loss": 0.716, "step": 8971 }, { "epoch": 1.6500650436721798, "grad_norm": 0.7618534564971924, "learning_rate": 1.34657806491844e-05, "loss": 0.9837, "step": 8972 }, { "epoch": 1.650250882735551, "grad_norm": 0.7061711549758911, "learning_rate": 1.345112685529829e-05, "loss": 0.9989, "step": 8973 }, { "epoch": 1.650436721798922, "grad_norm": 0.6352358460426331, "learning_rate": 1.3436480464124346e-05, "loss": 0.9719, "step": 8974 }, { "epoch": 1.6506225608622933, "grad_norm": 0.6654130220413208, "learning_rate": 1.3421841476915343e-05, "loss": 1.0551, "step": 8975 }, { "epoch": 1.6508083999256644, "grad_norm": 0.7092660665512085, "learning_rate": 1.3407209894923323e-05, "loss": 1.1038, "step": 8976 }, { "epoch": 1.6509942389890355, "grad_norm": 0.7314668297767639, "learning_rate": 1.3392585719399797e-05, "loss": 0.9794, "step": 8977 }, { "epoch": 1.6511800780524066, "grad_norm": 0.739293098449707, "learning_rate": 1.3377968951595621e-05, "loss": 1.0016, "step": 8978 }, { "epoch": 1.6513659171157777, "grad_norm": 0.6878502368927002, "learning_rate": 1.336335959276096e-05, "loss": 0.9889, "step": 8979 }, { "epoch": 1.6515517561791488, "grad_norm": 0.7072255611419678, "learning_rate": 1.3348757644145392e-05, "loss": 0.8155, "step": 8980 }, { "epoch": 1.65173759524252, "grad_norm": 0.7043284177780151, "learning_rate": 1.3334163106997865e-05, "loss": 1.0011, "step": 8981 }, { "epoch": 1.651923434305891, "grad_norm": 0.8223550915718079, "learning_rate": 1.331957598256669e-05, "loss": 0.8955, "step": 8982 }, { "epoch": 1.6521092733692622, "grad_norm": 0.6612815856933594, "learning_rate": 1.3304996272099512e-05, "loss": 0.8959, "step": 8983 }, { "epoch": 1.6522951124326335, "grad_norm": 0.7884023785591125, "learning_rate": 1.3290423976843369e-05, "loss": 0.8742, "step": 8984 }, { "epoch": 1.6524809514960044, "grad_norm": 0.837375819683075, "learning_rate": 1.3275859098044684e-05, "loss": 0.9496, "step": 8985 }, { "epoch": 1.6526667905593757, "grad_norm": 0.6119824051856995, "learning_rate": 1.3261301636949197e-05, "loss": 0.7175, "step": 8986 }, { "epoch": 1.6528526296227466, "grad_norm": 0.7192531824111938, "learning_rate": 1.3246751594802064e-05, "loss": 0.9797, "step": 8987 }, { "epoch": 1.653038468686118, "grad_norm": 0.9993611574172974, "learning_rate": 1.3232208972847737e-05, "loss": 1.2924, "step": 8988 }, { "epoch": 1.6532243077494888, "grad_norm": 0.6902458667755127, "learning_rate": 1.321767377233013e-05, "loss": 0.8782, "step": 8989 }, { "epoch": 1.6534101468128601, "grad_norm": 0.6710980534553528, "learning_rate": 1.320314599449244e-05, "loss": 0.9699, "step": 8990 }, { "epoch": 1.653595985876231, "grad_norm": 0.6646018028259277, "learning_rate": 1.3188625640577246e-05, "loss": 1.1329, "step": 8991 }, { "epoch": 1.6537818249396024, "grad_norm": 0.6741959452629089, "learning_rate": 1.3174112711826559e-05, "loss": 0.8457, "step": 8992 }, { "epoch": 1.6539676640029735, "grad_norm": 0.8614079356193542, "learning_rate": 1.3159607209481639e-05, "loss": 1.0179, "step": 8993 }, { "epoch": 1.6541535030663446, "grad_norm": 0.7563652992248535, "learning_rate": 1.3145109134783207e-05, "loss": 1.1809, "step": 8994 }, { "epoch": 1.6543393421297157, "grad_norm": 0.7481111884117126, "learning_rate": 1.3130618488971302e-05, "loss": 0.9286, "step": 8995 }, { "epoch": 1.6545251811930868, "grad_norm": 0.8062092065811157, "learning_rate": 1.3116135273285369e-05, "loss": 0.8791, "step": 8996 }, { "epoch": 1.6547110202564579, "grad_norm": 0.7598450183868408, "learning_rate": 1.3101659488964146e-05, "loss": 0.9577, "step": 8997 }, { "epoch": 1.654896859319829, "grad_norm": 0.7781131267547607, "learning_rate": 1.3087191137245802e-05, "loss": 0.972, "step": 8998 }, { "epoch": 1.6550826983832, "grad_norm": 0.6933823823928833, "learning_rate": 1.3072730219367869e-05, "loss": 0.8878, "step": 8999 }, { "epoch": 1.6552685374465712, "grad_norm": 0.6313965320587158, "learning_rate": 1.3058276736567165e-05, "loss": 0.8923, "step": 9000 }, { "epoch": 1.6554543765099425, "grad_norm": 0.7283336520195007, "learning_rate": 1.3043830690079984e-05, "loss": 0.885, "step": 9001 }, { "epoch": 1.6556402155733134, "grad_norm": 0.6406028270721436, "learning_rate": 1.3029392081141878e-05, "loss": 1.0113, "step": 9002 }, { "epoch": 1.6558260546366848, "grad_norm": 0.8105238080024719, "learning_rate": 1.301496091098786e-05, "loss": 1.1788, "step": 9003 }, { "epoch": 1.6560118937000556, "grad_norm": 0.6627447605133057, "learning_rate": 1.3000537180852212e-05, "loss": 0.8417, "step": 9004 }, { "epoch": 1.656197732763427, "grad_norm": 0.6995481848716736, "learning_rate": 1.2986120891968645e-05, "loss": 0.9624, "step": 9005 }, { "epoch": 1.6563835718267979, "grad_norm": 0.8995388150215149, "learning_rate": 1.297171204557025e-05, "loss": 1.0961, "step": 9006 }, { "epoch": 1.6565694108901692, "grad_norm": 0.6590665578842163, "learning_rate": 1.2957310642889387e-05, "loss": 0.9818, "step": 9007 }, { "epoch": 1.6567552499535403, "grad_norm": 0.6268938183784485, "learning_rate": 1.2942916685157868e-05, "loss": 0.8111, "step": 9008 }, { "epoch": 1.6569410890169114, "grad_norm": 0.7919785976409912, "learning_rate": 1.2928530173606867e-05, "loss": 0.9393, "step": 9009 }, { "epoch": 1.6571269280802825, "grad_norm": 0.7027867436408997, "learning_rate": 1.2914151109466843e-05, "loss": 0.6645, "step": 9010 }, { "epoch": 1.6573127671436536, "grad_norm": 0.6374213695526123, "learning_rate": 1.289977949396769e-05, "loss": 0.8168, "step": 9011 }, { "epoch": 1.6574986062070247, "grad_norm": 0.7896588444709778, "learning_rate": 1.2885415328338645e-05, "loss": 0.9752, "step": 9012 }, { "epoch": 1.6576844452703958, "grad_norm": 0.7199686765670776, "learning_rate": 1.2871058613808329e-05, "loss": 0.8953, "step": 9013 }, { "epoch": 1.657870284333767, "grad_norm": 0.8703217506408691, "learning_rate": 1.2856709351604657e-05, "loss": 0.7636, "step": 9014 }, { "epoch": 1.658056123397138, "grad_norm": 0.6465247869491577, "learning_rate": 1.2842367542955003e-05, "loss": 0.8154, "step": 9015 }, { "epoch": 1.6582419624605091, "grad_norm": 0.6402195692062378, "learning_rate": 1.2828033189085997e-05, "loss": 0.7662, "step": 9016 }, { "epoch": 1.6584278015238803, "grad_norm": 0.7437441945075989, "learning_rate": 1.2813706291223748e-05, "loss": 0.9204, "step": 9017 }, { "epoch": 1.6586136405872516, "grad_norm": 0.7095872759819031, "learning_rate": 1.2799386850593597e-05, "loss": 0.9336, "step": 9018 }, { "epoch": 1.6587994796506225, "grad_norm": 0.7508102655410767, "learning_rate": 1.2785074868420366e-05, "loss": 1.1321, "step": 9019 }, { "epoch": 1.6589853187139938, "grad_norm": 0.6868159770965576, "learning_rate": 1.2770770345928196e-05, "loss": 0.9345, "step": 9020 }, { "epoch": 1.6591711577773647, "grad_norm": 0.6406965851783752, "learning_rate": 1.2756473284340543e-05, "loss": 1.02, "step": 9021 }, { "epoch": 1.659356996840736, "grad_norm": 0.6748953461647034, "learning_rate": 1.2742183684880304e-05, "loss": 0.959, "step": 9022 }, { "epoch": 1.659542835904107, "grad_norm": 0.7843619585037231, "learning_rate": 1.2727901548769672e-05, "loss": 0.8909, "step": 9023 }, { "epoch": 1.6597286749674782, "grad_norm": 0.9022567272186279, "learning_rate": 1.2713626877230233e-05, "loss": 0.9762, "step": 9024 }, { "epoch": 1.6599145140308493, "grad_norm": 0.6865164041519165, "learning_rate": 1.2699359671482947e-05, "loss": 0.9494, "step": 9025 }, { "epoch": 1.6601003530942204, "grad_norm": 0.8706424236297607, "learning_rate": 1.2685099932748101e-05, "loss": 1.1028, "step": 9026 }, { "epoch": 1.6602861921575915, "grad_norm": 0.576554536819458, "learning_rate": 1.26708476622454e-05, "loss": 0.6907, "step": 9027 }, { "epoch": 1.6604720312209627, "grad_norm": 0.6635376811027527, "learning_rate": 1.265660286119381e-05, "loss": 0.9428, "step": 9028 }, { "epoch": 1.6606578702843338, "grad_norm": 0.8346258401870728, "learning_rate": 1.2642365530811772e-05, "loss": 0.976, "step": 9029 }, { "epoch": 1.6608437093477049, "grad_norm": 0.779428243637085, "learning_rate": 1.2628135672317009e-05, "loss": 0.887, "step": 9030 }, { "epoch": 1.661029548411076, "grad_norm": 0.7409526109695435, "learning_rate": 1.2613913286926648e-05, "loss": 0.9256, "step": 9031 }, { "epoch": 1.661215387474447, "grad_norm": 0.7076467871665955, "learning_rate": 1.2599698375857127e-05, "loss": 0.9611, "step": 9032 }, { "epoch": 1.6614012265378184, "grad_norm": 0.796972393989563, "learning_rate": 1.2585490940324297e-05, "loss": 1.0653, "step": 9033 }, { "epoch": 1.6615870656011893, "grad_norm": 0.7058619260787964, "learning_rate": 1.2571290981543382e-05, "loss": 1.1194, "step": 9034 }, { "epoch": 1.6617729046645606, "grad_norm": 0.6679715514183044, "learning_rate": 1.2557098500728892e-05, "loss": 0.7929, "step": 9035 }, { "epoch": 1.6619587437279315, "grad_norm": 0.6468355655670166, "learning_rate": 1.2542913499094766e-05, "loss": 1.1127, "step": 9036 }, { "epoch": 1.6621445827913028, "grad_norm": 0.854033350944519, "learning_rate": 1.2528735977854255e-05, "loss": 1.2172, "step": 9037 }, { "epoch": 1.6623304218546737, "grad_norm": 0.7941082715988159, "learning_rate": 1.2514565938220003e-05, "loss": 0.7856, "step": 9038 }, { "epoch": 1.662516260918045, "grad_norm": 1.2629947662353516, "learning_rate": 1.2500403381404003e-05, "loss": 1.1651, "step": 9039 }, { "epoch": 1.662702099981416, "grad_norm": 0.6334301829338074, "learning_rate": 1.248624830861762e-05, "loss": 1.0095, "step": 9040 }, { "epoch": 1.6628879390447873, "grad_norm": 0.8614044785499573, "learning_rate": 1.2472100721071578e-05, "loss": 1.084, "step": 9041 }, { "epoch": 1.6630737781081584, "grad_norm": 0.6715635061264038, "learning_rate": 1.2457960619975917e-05, "loss": 0.8696, "step": 9042 }, { "epoch": 1.6632596171715295, "grad_norm": 0.6359160542488098, "learning_rate": 1.2443828006540103e-05, "loss": 1.0494, "step": 9043 }, { "epoch": 1.6634454562349006, "grad_norm": 0.8149633407592773, "learning_rate": 1.2429702881972905e-05, "loss": 1.1387, "step": 9044 }, { "epoch": 1.6636312952982717, "grad_norm": 0.7104032635688782, "learning_rate": 1.2415585247482498e-05, "loss": 1.1578, "step": 9045 }, { "epoch": 1.6638171343616428, "grad_norm": 0.8964525461196899, "learning_rate": 1.240147510427635e-05, "loss": 1.0038, "step": 9046 }, { "epoch": 1.664002973425014, "grad_norm": 0.6363485455513, "learning_rate": 1.2387372453561364e-05, "loss": 0.9393, "step": 9047 }, { "epoch": 1.664188812488385, "grad_norm": 0.6944891810417175, "learning_rate": 1.2373277296543795e-05, "loss": 0.8978, "step": 9048 }, { "epoch": 1.6643746515517561, "grad_norm": 0.766639769077301, "learning_rate": 1.235918963442918e-05, "loss": 0.9532, "step": 9049 }, { "epoch": 1.6645604906151275, "grad_norm": 0.8323293328285217, "learning_rate": 1.2345109468422523e-05, "loss": 0.8493, "step": 9050 }, { "epoch": 1.6647463296784983, "grad_norm": 0.7760543823242188, "learning_rate": 1.233103679972807e-05, "loss": 0.7718, "step": 9051 }, { "epoch": 1.6649321687418697, "grad_norm": 0.7688063979148865, "learning_rate": 1.2316971629549511e-05, "loss": 1.2442, "step": 9052 }, { "epoch": 1.6651180078052406, "grad_norm": 0.783732533454895, "learning_rate": 1.230291395908989e-05, "loss": 1.0332, "step": 9053 }, { "epoch": 1.6653038468686119, "grad_norm": 0.7472023367881775, "learning_rate": 1.2288863789551586e-05, "loss": 0.9761, "step": 9054 }, { "epoch": 1.6654896859319828, "grad_norm": 0.6275432705879211, "learning_rate": 1.2274821122136316e-05, "loss": 0.8392, "step": 9055 }, { "epoch": 1.665675524995354, "grad_norm": 0.7407889366149902, "learning_rate": 1.2260785958045196e-05, "loss": 1.0087, "step": 9056 }, { "epoch": 1.665861364058725, "grad_norm": 0.6514436602592468, "learning_rate": 1.22467582984787e-05, "loss": 0.8457, "step": 9057 }, { "epoch": 1.6660472031220963, "grad_norm": 0.666793167591095, "learning_rate": 1.2232738144636613e-05, "loss": 0.8491, "step": 9058 }, { "epoch": 1.6662330421854674, "grad_norm": 0.708659827709198, "learning_rate": 1.221872549771813e-05, "loss": 1.1277, "step": 9059 }, { "epoch": 1.6664188812488385, "grad_norm": 0.7047358155250549, "learning_rate": 1.2204720358921761e-05, "loss": 1.0066, "step": 9060 }, { "epoch": 1.6666047203122096, "grad_norm": 0.7291968464851379, "learning_rate": 1.2190722729445415e-05, "loss": 1.1258, "step": 9061 }, { "epoch": 1.6667905593755807, "grad_norm": 0.7509036064147949, "learning_rate": 1.2176732610486353e-05, "loss": 0.7721, "step": 9062 }, { "epoch": 1.6669763984389518, "grad_norm": 0.6720606684684753, "learning_rate": 1.216275000324114e-05, "loss": 1.0531, "step": 9063 }, { "epoch": 1.667162237502323, "grad_norm": 0.6274673938751221, "learning_rate": 1.214877490890578e-05, "loss": 0.8732, "step": 9064 }, { "epoch": 1.667348076565694, "grad_norm": 0.7732565402984619, "learning_rate": 1.2134807328675557e-05, "loss": 1.1978, "step": 9065 }, { "epoch": 1.6675339156290652, "grad_norm": 0.6812934279441833, "learning_rate": 1.2120847263745183e-05, "loss": 0.9908, "step": 9066 }, { "epoch": 1.6677197546924365, "grad_norm": 0.7481977939605713, "learning_rate": 1.2106894715308647e-05, "loss": 0.9184, "step": 9067 }, { "epoch": 1.6679055937558074, "grad_norm": 0.6654353141784668, "learning_rate": 1.2092949684559373e-05, "loss": 0.9962, "step": 9068 }, { "epoch": 1.6680914328191787, "grad_norm": 0.8279258608818054, "learning_rate": 1.2079012172690108e-05, "loss": 0.9949, "step": 9069 }, { "epoch": 1.6682772718825496, "grad_norm": 0.6551133990287781, "learning_rate": 1.206508218089295e-05, "loss": 0.7166, "step": 9070 }, { "epoch": 1.668463110945921, "grad_norm": 0.6765016913414001, "learning_rate": 1.2051159710359384e-05, "loss": 0.8067, "step": 9071 }, { "epoch": 1.6686489500092918, "grad_norm": 0.8082672953605652, "learning_rate": 1.2037244762280197e-05, "loss": 0.932, "step": 9072 }, { "epoch": 1.6688347890726631, "grad_norm": 0.7176616191864014, "learning_rate": 1.2023337337845596e-05, "loss": 0.8466, "step": 9073 }, { "epoch": 1.6690206281360342, "grad_norm": 0.7027639150619507, "learning_rate": 1.2009437438245074e-05, "loss": 0.8016, "step": 9074 }, { "epoch": 1.6692064671994054, "grad_norm": 0.5039098858833313, "learning_rate": 1.1995545064667535e-05, "loss": 0.6857, "step": 9075 }, { "epoch": 1.6693923062627765, "grad_norm": 0.7166251540184021, "learning_rate": 1.1981660218301261e-05, "loss": 0.9291, "step": 9076 }, { "epoch": 1.6695781453261476, "grad_norm": 0.6661301851272583, "learning_rate": 1.196778290033379e-05, "loss": 0.9587, "step": 9077 }, { "epoch": 1.6697639843895187, "grad_norm": 0.8017847537994385, "learning_rate": 1.1953913111952141e-05, "loss": 1.0492, "step": 9078 }, { "epoch": 1.6699498234528898, "grad_norm": 0.6611772179603577, "learning_rate": 1.1940050854342566e-05, "loss": 0.7787, "step": 9079 }, { "epoch": 1.6701356625162609, "grad_norm": 0.7013022899627686, "learning_rate": 1.1926196128690792e-05, "loss": 1.048, "step": 9080 }, { "epoch": 1.670321501579632, "grad_norm": 0.7118967175483704, "learning_rate": 1.1912348936181794e-05, "loss": 1.0242, "step": 9081 }, { "epoch": 1.6705073406430033, "grad_norm": 1.904453158378601, "learning_rate": 1.189850927799997e-05, "loss": 1.505, "step": 9082 }, { "epoch": 1.6706931797063742, "grad_norm": 0.6158105731010437, "learning_rate": 1.188467715532906e-05, "loss": 1.0896, "step": 9083 }, { "epoch": 1.6708790187697455, "grad_norm": 0.6351505517959595, "learning_rate": 1.1870852569352153e-05, "loss": 0.8779, "step": 9084 }, { "epoch": 1.6710648578331164, "grad_norm": 0.8218066692352295, "learning_rate": 1.1857035521251725e-05, "loss": 1.052, "step": 9085 }, { "epoch": 1.6712506968964878, "grad_norm": 0.7728452682495117, "learning_rate": 1.1843226012209529e-05, "loss": 0.9847, "step": 9086 }, { "epoch": 1.6714365359598586, "grad_norm": 0.7268964052200317, "learning_rate": 1.1829424043406755e-05, "loss": 0.7735, "step": 9087 }, { "epoch": 1.67162237502323, "grad_norm": 0.6986159682273865, "learning_rate": 1.1815629616023893e-05, "loss": 0.8668, "step": 9088 }, { "epoch": 1.6718082140866009, "grad_norm": 0.6351314783096313, "learning_rate": 1.180184273124082e-05, "loss": 0.7405, "step": 9089 }, { "epoch": 1.6719940531499722, "grad_norm": 0.6102209091186523, "learning_rate": 1.1788063390236782e-05, "loss": 0.9222, "step": 9090 }, { "epoch": 1.6721798922133433, "grad_norm": 0.7918252348899841, "learning_rate": 1.1774291594190311e-05, "loss": 0.7727, "step": 9091 }, { "epoch": 1.6723657312767144, "grad_norm": 0.8020595908164978, "learning_rate": 1.1760527344279382e-05, "loss": 0.9602, "step": 9092 }, { "epoch": 1.6725515703400855, "grad_norm": 0.6822522878646851, "learning_rate": 1.1746770641681238e-05, "loss": 0.9458, "step": 9093 }, { "epoch": 1.6727374094034566, "grad_norm": 0.6773095726966858, "learning_rate": 1.173302148757256e-05, "loss": 0.8478, "step": 9094 }, { "epoch": 1.6729232484668277, "grad_norm": 0.6347144246101379, "learning_rate": 1.1719279883129308e-05, "loss": 0.8156, "step": 9095 }, { "epoch": 1.6731090875301988, "grad_norm": 0.7613920569419861, "learning_rate": 1.1705545829526843e-05, "loss": 1.0156, "step": 9096 }, { "epoch": 1.67329492659357, "grad_norm": 0.6074971556663513, "learning_rate": 1.1691819327939879e-05, "loss": 0.8857, "step": 9097 }, { "epoch": 1.673480765656941, "grad_norm": 0.5004672408103943, "learning_rate": 1.1678100379542468e-05, "loss": 0.5623, "step": 9098 }, { "epoch": 1.6736666047203124, "grad_norm": 0.7880104184150696, "learning_rate": 1.1664388985508035e-05, "loss": 1.0569, "step": 9099 }, { "epoch": 1.6738524437836833, "grad_norm": 0.6186123490333557, "learning_rate": 1.1650685147009311e-05, "loss": 0.8835, "step": 9100 }, { "epoch": 1.6740382828470546, "grad_norm": 0.6890327334403992, "learning_rate": 1.1636988865218446e-05, "loss": 0.99, "step": 9101 }, { "epoch": 1.6742241219104255, "grad_norm": 0.677280604839325, "learning_rate": 1.1623300141306892e-05, "loss": 1.0252, "step": 9102 }, { "epoch": 1.6744099609737968, "grad_norm": 0.6223238110542297, "learning_rate": 1.160961897644548e-05, "loss": 1.0804, "step": 9103 }, { "epoch": 1.6745958000371677, "grad_norm": 0.6181697845458984, "learning_rate": 1.1595945371804418e-05, "loss": 0.822, "step": 9104 }, { "epoch": 1.674781639100539, "grad_norm": 0.7645830512046814, "learning_rate": 1.1582279328553191e-05, "loss": 0.8882, "step": 9105 }, { "epoch": 1.67496747816391, "grad_norm": 0.7457630038261414, "learning_rate": 1.1568620847860733e-05, "loss": 0.716, "step": 9106 }, { "epoch": 1.6751533172272812, "grad_norm": 0.6209438443183899, "learning_rate": 1.1554969930895232e-05, "loss": 0.8928, "step": 9107 }, { "epoch": 1.6753391562906523, "grad_norm": 0.7704802751541138, "learning_rate": 1.154132657882433e-05, "loss": 0.845, "step": 9108 }, { "epoch": 1.6755249953540234, "grad_norm": 0.9186254739761353, "learning_rate": 1.1527690792814926e-05, "loss": 0.8236, "step": 9109 }, { "epoch": 1.6757108344173945, "grad_norm": 0.5914714932441711, "learning_rate": 1.1514062574033357e-05, "loss": 0.7576, "step": 9110 }, { "epoch": 1.6758966734807657, "grad_norm": 0.772611141204834, "learning_rate": 1.1500441923645266e-05, "loss": 1.0004, "step": 9111 }, { "epoch": 1.6760825125441368, "grad_norm": 0.6939578652381897, "learning_rate": 1.1486828842815633e-05, "loss": 0.9088, "step": 9112 }, { "epoch": 1.6762683516075079, "grad_norm": 0.989822268486023, "learning_rate": 1.1473223332708838e-05, "loss": 0.9906, "step": 9113 }, { "epoch": 1.676454190670879, "grad_norm": 0.8016864061355591, "learning_rate": 1.1459625394488571e-05, "loss": 0.9294, "step": 9114 }, { "epoch": 1.67664002973425, "grad_norm": 0.8364852666854858, "learning_rate": 1.1446035029317936e-05, "loss": 1.044, "step": 9115 }, { "epoch": 1.6768258687976214, "grad_norm": 0.7144214510917664, "learning_rate": 1.1432452238359293e-05, "loss": 1.0231, "step": 9116 }, { "epoch": 1.6770117078609923, "grad_norm": 0.6551642417907715, "learning_rate": 1.1418877022774421e-05, "loss": 0.9845, "step": 9117 }, { "epoch": 1.6771975469243636, "grad_norm": 0.7367211580276489, "learning_rate": 1.1405309383724472e-05, "loss": 1.0699, "step": 9118 }, { "epoch": 1.6773833859877345, "grad_norm": 0.676918625831604, "learning_rate": 1.1391749322369871e-05, "loss": 0.9828, "step": 9119 }, { "epoch": 1.6775692250511058, "grad_norm": 0.7982406616210938, "learning_rate": 1.137819683987048e-05, "loss": 1.1131, "step": 9120 }, { "epoch": 1.6777550641144767, "grad_norm": 0.8516080379486084, "learning_rate": 1.1364651937385429e-05, "loss": 1.1943, "step": 9121 }, { "epoch": 1.677940903177848, "grad_norm": 0.7958751320838928, "learning_rate": 1.1351114616073277e-05, "loss": 1.1384, "step": 9122 }, { "epoch": 1.678126742241219, "grad_norm": 0.8014817237854004, "learning_rate": 1.1337584877091877e-05, "loss": 0.7247, "step": 9123 }, { "epoch": 1.6783125813045903, "grad_norm": 0.6827468872070312, "learning_rate": 1.132406272159846e-05, "loss": 0.7683, "step": 9124 }, { "epoch": 1.6784984203679614, "grad_norm": 0.6997562646865845, "learning_rate": 1.1310548150749634e-05, "loss": 0.6016, "step": 9125 }, { "epoch": 1.6786842594313325, "grad_norm": 0.7690675854682922, "learning_rate": 1.1297041165701294e-05, "loss": 0.9419, "step": 9126 }, { "epoch": 1.6788700984947036, "grad_norm": 0.6297846436500549, "learning_rate": 1.128354176760873e-05, "loss": 0.767, "step": 9127 }, { "epoch": 1.6790559375580747, "grad_norm": 0.7651461362838745, "learning_rate": 1.1270049957626571e-05, "loss": 0.8214, "step": 9128 }, { "epoch": 1.6792417766214458, "grad_norm": 0.6811181902885437, "learning_rate": 1.1256565736908841e-05, "loss": 0.6638, "step": 9129 }, { "epoch": 1.679427615684817, "grad_norm": 0.8382944464683533, "learning_rate": 1.1243089106608828e-05, "loss": 0.8391, "step": 9130 }, { "epoch": 1.679613454748188, "grad_norm": 0.6987102031707764, "learning_rate": 1.1229620067879221e-05, "loss": 0.9216, "step": 9131 }, { "epoch": 1.6797992938115591, "grad_norm": 0.7947177886962891, "learning_rate": 1.1216158621872098e-05, "loss": 1.0208, "step": 9132 }, { "epoch": 1.6799851328749305, "grad_norm": 0.6815338134765625, "learning_rate": 1.1202704769738792e-05, "loss": 0.7046, "step": 9133 }, { "epoch": 1.6801709719383013, "grad_norm": 0.6513886451721191, "learning_rate": 1.1189258512630074e-05, "loss": 1.1137, "step": 9134 }, { "epoch": 1.6803568110016727, "grad_norm": 0.5718039274215698, "learning_rate": 1.1175819851696012e-05, "loss": 0.7564, "step": 9135 }, { "epoch": 1.6805426500650436, "grad_norm": 0.7113762497901917, "learning_rate": 1.1162388788086065e-05, "loss": 1.0238, "step": 9136 }, { "epoch": 1.6807284891284149, "grad_norm": 0.6775386333465576, "learning_rate": 1.1148965322948989e-05, "loss": 0.9812, "step": 9137 }, { "epoch": 1.6809143281917858, "grad_norm": 0.8017896413803101, "learning_rate": 1.1135549457432948e-05, "loss": 0.8781, "step": 9138 }, { "epoch": 1.681100167255157, "grad_norm": 0.6371380686759949, "learning_rate": 1.1122141192685442e-05, "loss": 0.9729, "step": 9139 }, { "epoch": 1.6812860063185282, "grad_norm": 0.7196660041809082, "learning_rate": 1.1108740529853268e-05, "loss": 0.8344, "step": 9140 }, { "epoch": 1.6814718453818993, "grad_norm": 0.6226027607917786, "learning_rate": 1.1095347470082629e-05, "loss": 0.8421, "step": 9141 }, { "epoch": 1.6816576844452704, "grad_norm": 0.6938750743865967, "learning_rate": 1.1081962014519076e-05, "loss": 0.7527, "step": 9142 }, { "epoch": 1.6818435235086415, "grad_norm": 0.9057941436767578, "learning_rate": 1.1068584164307504e-05, "loss": 1.1757, "step": 9143 }, { "epoch": 1.6820293625720126, "grad_norm": 0.6205626726150513, "learning_rate": 1.1055213920592122e-05, "loss": 1.0075, "step": 9144 }, { "epoch": 1.6822152016353837, "grad_norm": 0.742459237575531, "learning_rate": 1.1041851284516525e-05, "loss": 0.8579, "step": 9145 }, { "epoch": 1.6824010406987548, "grad_norm": 0.6259186863899231, "learning_rate": 1.1028496257223676e-05, "loss": 0.8867, "step": 9146 }, { "epoch": 1.682586879762126, "grad_norm": 0.6648344993591309, "learning_rate": 1.1015148839855805e-05, "loss": 0.8293, "step": 9147 }, { "epoch": 1.6827727188254973, "grad_norm": 0.609562337398529, "learning_rate": 1.10018090335546e-05, "loss": 0.9139, "step": 9148 }, { "epoch": 1.6829585578888682, "grad_norm": 0.6610622406005859, "learning_rate": 1.0988476839461004e-05, "loss": 0.9723, "step": 9149 }, { "epoch": 1.6831443969522395, "grad_norm": 0.825636088848114, "learning_rate": 1.0975152258715382e-05, "loss": 0.9169, "step": 9150 }, { "epoch": 1.6833302360156104, "grad_norm": 0.6621133685112, "learning_rate": 1.0961835292457368e-05, "loss": 0.9953, "step": 9151 }, { "epoch": 1.6835160750789817, "grad_norm": 0.7959614396095276, "learning_rate": 1.0948525941826026e-05, "loss": 0.9995, "step": 9152 }, { "epoch": 1.6837019141423526, "grad_norm": 0.7150267362594604, "learning_rate": 1.0935224207959738e-05, "loss": 1.0144, "step": 9153 }, { "epoch": 1.683887753205724, "grad_norm": 0.7913087606430054, "learning_rate": 1.0921930091996203e-05, "loss": 1.1451, "step": 9154 }, { "epoch": 1.6840735922690948, "grad_norm": 0.6784157156944275, "learning_rate": 1.090864359507251e-05, "loss": 1.0383, "step": 9155 }, { "epoch": 1.6842594313324661, "grad_norm": 0.9016114473342896, "learning_rate": 1.0895364718325096e-05, "loss": 1.107, "step": 9156 }, { "epoch": 1.6844452703958372, "grad_norm": 0.6144252419471741, "learning_rate": 1.0882093462889709e-05, "loss": 0.9491, "step": 9157 }, { "epoch": 1.6846311094592084, "grad_norm": 0.7540306448936462, "learning_rate": 1.0868829829901473e-05, "loss": 1.031, "step": 9158 }, { "epoch": 1.6848169485225795, "grad_norm": 0.665734589099884, "learning_rate": 1.0855573820494857e-05, "loss": 1.0527, "step": 9159 }, { "epoch": 1.6850027875859506, "grad_norm": 0.8586148619651794, "learning_rate": 1.084232543580369e-05, "loss": 1.0122, "step": 9160 }, { "epoch": 1.6851886266493217, "grad_norm": 0.6013672947883606, "learning_rate": 1.082908467696112e-05, "loss": 0.8935, "step": 9161 }, { "epoch": 1.6853744657126928, "grad_norm": 0.7050352692604065, "learning_rate": 1.0815851545099676e-05, "loss": 0.8401, "step": 9162 }, { "epoch": 1.685560304776064, "grad_norm": 0.7062950730323792, "learning_rate": 1.0802626041351183e-05, "loss": 1.0707, "step": 9163 }, { "epoch": 1.685746143839435, "grad_norm": 0.8225484490394592, "learning_rate": 1.0789408166846882e-05, "loss": 0.9885, "step": 9164 }, { "epoch": 1.6859319829028063, "grad_norm": 0.8079144358634949, "learning_rate": 1.077619792271729e-05, "loss": 1.1399, "step": 9165 }, { "epoch": 1.6861178219661772, "grad_norm": 0.6949532628059387, "learning_rate": 1.0762995310092328e-05, "loss": 0.8332, "step": 9166 }, { "epoch": 1.6863036610295485, "grad_norm": 0.7433616518974304, "learning_rate": 1.0749800330101257e-05, "loss": 1.0674, "step": 9167 }, { "epoch": 1.6864895000929194, "grad_norm": 0.7382586598396301, "learning_rate": 1.073661298387265e-05, "loss": 0.7192, "step": 9168 }, { "epoch": 1.6866753391562908, "grad_norm": 0.8430142402648926, "learning_rate": 1.0723433272534467e-05, "loss": 0.9512, "step": 9169 }, { "epoch": 1.6868611782196616, "grad_norm": 0.6655109524726868, "learning_rate": 1.0710261197213978e-05, "loss": 0.8721, "step": 9170 }, { "epoch": 1.687047017283033, "grad_norm": 0.6514587998390198, "learning_rate": 1.0697096759037829e-05, "loss": 0.8485, "step": 9171 }, { "epoch": 1.6872328563464039, "grad_norm": 0.734404444694519, "learning_rate": 1.0683939959131994e-05, "loss": 1.2359, "step": 9172 }, { "epoch": 1.6874186954097752, "grad_norm": 0.5610777735710144, "learning_rate": 1.0670790798621822e-05, "loss": 0.5728, "step": 9173 }, { "epoch": 1.6876045344731463, "grad_norm": 0.7563809752464294, "learning_rate": 1.0657649278631998e-05, "loss": 1.2102, "step": 9174 }, { "epoch": 1.6877903735365174, "grad_norm": 0.8163540363311768, "learning_rate": 1.0644515400286504e-05, "loss": 1.0889, "step": 9175 }, { "epoch": 1.6879762125998885, "grad_norm": 0.9675668478012085, "learning_rate": 1.063138916470875e-05, "loss": 0.7862, "step": 9176 }, { "epoch": 1.6881620516632596, "grad_norm": 0.6208799481391907, "learning_rate": 1.0618270573021405e-05, "loss": 0.892, "step": 9177 }, { "epoch": 1.6883478907266307, "grad_norm": 0.6580058932304382, "learning_rate": 1.0605159626346584e-05, "loss": 0.75, "step": 9178 }, { "epoch": 1.6885337297900018, "grad_norm": 0.7946237921714783, "learning_rate": 1.0592056325805644e-05, "loss": 0.9343, "step": 9179 }, { "epoch": 1.688719568853373, "grad_norm": 0.6942155957221985, "learning_rate": 1.0578960672519366e-05, "loss": 0.8665, "step": 9180 }, { "epoch": 1.688905407916744, "grad_norm": 0.693755567073822, "learning_rate": 1.0565872667607867e-05, "loss": 0.7981, "step": 9181 }, { "epoch": 1.6890912469801154, "grad_norm": 0.9935961961746216, "learning_rate": 1.0552792312190552e-05, "loss": 1.0529, "step": 9182 }, { "epoch": 1.6892770860434863, "grad_norm": 0.7871524095535278, "learning_rate": 1.0539719607386244e-05, "loss": 1.0497, "step": 9183 }, { "epoch": 1.6894629251068576, "grad_norm": 0.7142606377601624, "learning_rate": 1.0526654554313053e-05, "loss": 0.9732, "step": 9184 }, { "epoch": 1.6896487641702285, "grad_norm": 0.784253716468811, "learning_rate": 1.0513597154088472e-05, "loss": 1.0093, "step": 9185 }, { "epoch": 1.6898346032335998, "grad_norm": 0.6851341128349304, "learning_rate": 1.0500547407829341e-05, "loss": 0.9728, "step": 9186 }, { "epoch": 1.6900204422969707, "grad_norm": 0.961861789226532, "learning_rate": 1.0487505316651813e-05, "loss": 0.9666, "step": 9187 }, { "epoch": 1.690206281360342, "grad_norm": 0.630499541759491, "learning_rate": 1.0474470881671439e-05, "loss": 0.9504, "step": 9188 }, { "epoch": 1.6903921204237131, "grad_norm": 0.7499755024909973, "learning_rate": 1.0461444104003049e-05, "loss": 0.9772, "step": 9189 }, { "epoch": 1.6905779594870842, "grad_norm": 0.6846833229064941, "learning_rate": 1.0448424984760874e-05, "loss": 0.9679, "step": 9190 }, { "epoch": 1.6907637985504553, "grad_norm": 0.6661667823791504, "learning_rate": 1.0435413525058435e-05, "loss": 0.9704, "step": 9191 }, { "epoch": 1.6909496376138264, "grad_norm": 0.6650241613388062, "learning_rate": 1.0422409726008675e-05, "loss": 1.0795, "step": 9192 }, { "epoch": 1.6911354766771975, "grad_norm": 0.7499749660491943, "learning_rate": 1.0409413588723804e-05, "loss": 0.7986, "step": 9193 }, { "epoch": 1.6913213157405687, "grad_norm": 0.8803886771202087, "learning_rate": 1.0396425114315412e-05, "loss": 0.9701, "step": 9194 }, { "epoch": 1.6915071548039398, "grad_norm": 0.692290186882019, "learning_rate": 1.0383444303894452e-05, "loss": 0.8647, "step": 9195 }, { "epoch": 1.6916929938673109, "grad_norm": 0.7082966566085815, "learning_rate": 1.0370471158571172e-05, "loss": 0.916, "step": 9196 }, { "epoch": 1.691878832930682, "grad_norm": 0.7729305028915405, "learning_rate": 1.0357505679455226e-05, "loss": 1.0942, "step": 9197 }, { "epoch": 1.692064671994053, "grad_norm": 0.7437100410461426, "learning_rate": 1.0344547867655553e-05, "loss": 0.821, "step": 9198 }, { "epoch": 1.6922505110574244, "grad_norm": 0.6189757585525513, "learning_rate": 1.0331597724280461e-05, "loss": 0.8888, "step": 9199 }, { "epoch": 1.6924363501207953, "grad_norm": 0.6114462614059448, "learning_rate": 1.0318655250437614e-05, "loss": 0.8383, "step": 9200 }, { "epoch": 1.6926221891841666, "grad_norm": 0.6665554642677307, "learning_rate": 1.0305720447234035e-05, "loss": 0.9874, "step": 9201 }, { "epoch": 1.6928080282475375, "grad_norm": 0.7060142159461975, "learning_rate": 1.0292793315776027e-05, "loss": 0.9165, "step": 9202 }, { "epoch": 1.6929938673109088, "grad_norm": 0.7609959244728088, "learning_rate": 1.0279873857169276e-05, "loss": 0.9382, "step": 9203 }, { "epoch": 1.6931797063742797, "grad_norm": 0.7125695943832397, "learning_rate": 1.0266962072518849e-05, "loss": 1.0135, "step": 9204 }, { "epoch": 1.693365545437651, "grad_norm": 0.7978722453117371, "learning_rate": 1.0254057962929087e-05, "loss": 0.9271, "step": 9205 }, { "epoch": 1.6935513845010222, "grad_norm": 1.0889793634414673, "learning_rate": 1.0241161529503717e-05, "loss": 1.3928, "step": 9206 }, { "epoch": 1.6937372235643933, "grad_norm": 0.6475967764854431, "learning_rate": 1.0228272773345782e-05, "loss": 0.971, "step": 9207 }, { "epoch": 1.6939230626277644, "grad_norm": 0.7573670148849487, "learning_rate": 1.0215391695557707e-05, "loss": 1.1833, "step": 9208 }, { "epoch": 1.6941089016911355, "grad_norm": 0.7922182083129883, "learning_rate": 1.0202518297241237e-05, "loss": 1.004, "step": 9209 }, { "epoch": 1.6942947407545066, "grad_norm": 0.7527633309364319, "learning_rate": 1.0189652579497444e-05, "loss": 0.9222, "step": 9210 }, { "epoch": 1.6944805798178777, "grad_norm": 0.6256292462348938, "learning_rate": 1.0176794543426782e-05, "loss": 0.9044, "step": 9211 }, { "epoch": 1.6946664188812488, "grad_norm": 0.8159219622612, "learning_rate": 1.0163944190129005e-05, "loss": 0.9947, "step": 9212 }, { "epoch": 1.69485225794462, "grad_norm": 0.6840503811836243, "learning_rate": 1.0151101520703265e-05, "loss": 1.0449, "step": 9213 }, { "epoch": 1.6950380970079912, "grad_norm": 0.6659444570541382, "learning_rate": 1.0138266536247976e-05, "loss": 1.0373, "step": 9214 }, { "epoch": 1.6952239360713621, "grad_norm": 0.7139298915863037, "learning_rate": 1.0125439237860968e-05, "loss": 0.9433, "step": 9215 }, { "epoch": 1.6954097751347335, "grad_norm": 0.6589419841766357, "learning_rate": 1.0112619626639386e-05, "loss": 0.8481, "step": 9216 }, { "epoch": 1.6955956141981043, "grad_norm": 0.6323763132095337, "learning_rate": 1.009980770367972e-05, "loss": 0.8495, "step": 9217 }, { "epoch": 1.6957814532614757, "grad_norm": 0.920706033706665, "learning_rate": 1.0087003470077815e-05, "loss": 1.1459, "step": 9218 }, { "epoch": 1.6959672923248466, "grad_norm": 0.7430810332298279, "learning_rate": 1.0074206926928819e-05, "loss": 0.9022, "step": 9219 }, { "epoch": 1.6961531313882179, "grad_norm": 0.724772572517395, "learning_rate": 1.0061418075327278e-05, "loss": 0.6118, "step": 9220 }, { "epoch": 1.6963389704515888, "grad_norm": 0.7099050879478455, "learning_rate": 1.0048636916367004e-05, "loss": 0.8845, "step": 9221 }, { "epoch": 1.69652480951496, "grad_norm": 0.7855052947998047, "learning_rate": 1.0035863451141236e-05, "loss": 0.9102, "step": 9222 }, { "epoch": 1.6967106485783312, "grad_norm": 0.7682072520256042, "learning_rate": 1.0023097680742522e-05, "loss": 0.8534, "step": 9223 }, { "epoch": 1.6968964876417023, "grad_norm": 0.7189264297485352, "learning_rate": 1.0010339606262709e-05, "loss": 1.0848, "step": 9224 }, { "epoch": 1.6970823267050734, "grad_norm": 1.1096469163894653, "learning_rate": 9.997589228793058e-06, "loss": 1.1886, "step": 9225 }, { "epoch": 1.6972681657684445, "grad_norm": 0.7639279961585999, "learning_rate": 9.98484654942411e-06, "loss": 0.7868, "step": 9226 }, { "epoch": 1.6974540048318156, "grad_norm": 0.5577638745307922, "learning_rate": 9.972111569245791e-06, "loss": 0.7305, "step": 9227 }, { "epoch": 1.6976398438951867, "grad_norm": 0.6836424469947815, "learning_rate": 9.95938428934734e-06, "loss": 0.773, "step": 9228 }, { "epoch": 1.6978256829585578, "grad_norm": 0.7009959816932678, "learning_rate": 9.946664710817345e-06, "loss": 0.9731, "step": 9229 }, { "epoch": 1.698011522021929, "grad_norm": 0.7553769946098328, "learning_rate": 9.933952834743753e-06, "loss": 1.0267, "step": 9230 }, { "epoch": 1.6981973610853003, "grad_norm": 0.8114926815032959, "learning_rate": 9.921248662213833e-06, "loss": 1.0416, "step": 9231 }, { "epoch": 1.6983832001486712, "grad_norm": 0.7146091461181641, "learning_rate": 9.908552194314214e-06, "loss": 1.08, "step": 9232 }, { "epoch": 1.6985690392120425, "grad_norm": 0.7939289212226868, "learning_rate": 9.895863432130826e-06, "loss": 0.9978, "step": 9233 }, { "epoch": 1.6987548782754134, "grad_norm": 0.7090169787406921, "learning_rate": 9.883182376749001e-06, "loss": 0.9042, "step": 9234 }, { "epoch": 1.6989407173387847, "grad_norm": 0.736428439617157, "learning_rate": 9.870509029253327e-06, "loss": 1.0081, "step": 9235 }, { "epoch": 1.6991265564021556, "grad_norm": 0.6867282390594482, "learning_rate": 9.857843390727816e-06, "loss": 0.9255, "step": 9236 }, { "epoch": 1.699312395465527, "grad_norm": 0.6552194356918335, "learning_rate": 9.8451854622558e-06, "loss": 0.9218, "step": 9237 }, { "epoch": 1.6994982345288978, "grad_norm": 1.0081390142440796, "learning_rate": 9.832535244919906e-06, "loss": 0.9419, "step": 9238 }, { "epoch": 1.6996840735922691, "grad_norm": 0.8012736439704895, "learning_rate": 9.819892739802173e-06, "loss": 1.052, "step": 9239 }, { "epoch": 1.6998699126556402, "grad_norm": 0.6890170574188232, "learning_rate": 9.807257947983895e-06, "loss": 0.9941, "step": 9240 }, { "epoch": 1.7000557517190114, "grad_norm": 0.7510611414909363, "learning_rate": 9.794630870545796e-06, "loss": 1.0552, "step": 9241 }, { "epoch": 1.7002415907823825, "grad_norm": 0.833249032497406, "learning_rate": 9.782011508567857e-06, "loss": 1.1007, "step": 9242 }, { "epoch": 1.7004274298457536, "grad_norm": 1.0734540224075317, "learning_rate": 9.769399863129459e-06, "loss": 1.3107, "step": 9243 }, { "epoch": 1.7006132689091247, "grad_norm": 0.6381735801696777, "learning_rate": 9.756795935309316e-06, "loss": 1.0896, "step": 9244 }, { "epoch": 1.7007991079724958, "grad_norm": 0.6753941178321838, "learning_rate": 9.744199726185443e-06, "loss": 1.0672, "step": 9245 }, { "epoch": 1.700984947035867, "grad_norm": 0.825974702835083, "learning_rate": 9.731611236835259e-06, "loss": 0.9139, "step": 9246 }, { "epoch": 1.701170786099238, "grad_norm": 0.700402021408081, "learning_rate": 9.719030468335443e-06, "loss": 0.7945, "step": 9247 }, { "epoch": 1.7013566251626093, "grad_norm": 0.7434213757514954, "learning_rate": 9.706457421762106e-06, "loss": 1.1943, "step": 9248 }, { "epoch": 1.7015424642259802, "grad_norm": 0.5882927775382996, "learning_rate": 9.693892098190582e-06, "loss": 0.8897, "step": 9249 }, { "epoch": 1.7017283032893515, "grad_norm": 0.627251148223877, "learning_rate": 9.681334498695648e-06, "loss": 0.9317, "step": 9250 }, { "epoch": 1.7019141423527224, "grad_norm": 0.74644935131073, "learning_rate": 9.66878462435139e-06, "loss": 1.091, "step": 9251 }, { "epoch": 1.7020999814160938, "grad_norm": 0.7172256708145142, "learning_rate": 9.656242476231204e-06, "loss": 0.7462, "step": 9252 }, { "epoch": 1.7022858204794646, "grad_norm": 0.761581540107727, "learning_rate": 9.643708055407873e-06, "loss": 0.8465, "step": 9253 }, { "epoch": 1.702471659542836, "grad_norm": 0.7488076090812683, "learning_rate": 9.631181362953467e-06, "loss": 0.7814, "step": 9254 }, { "epoch": 1.702657498606207, "grad_norm": 0.803229033946991, "learning_rate": 9.618662399939448e-06, "loss": 0.9648, "step": 9255 }, { "epoch": 1.7028433376695782, "grad_norm": 0.7162587642669678, "learning_rate": 9.60615116743655e-06, "loss": 0.8486, "step": 9256 }, { "epoch": 1.7030291767329493, "grad_norm": 0.8364415764808655, "learning_rate": 9.593647666514904e-06, "loss": 0.9365, "step": 9257 }, { "epoch": 1.7032150157963204, "grad_norm": 0.6324708461761475, "learning_rate": 9.581151898244e-06, "loss": 0.9032, "step": 9258 }, { "epoch": 1.7034008548596915, "grad_norm": 0.6390442252159119, "learning_rate": 9.568663863692562e-06, "loss": 0.877, "step": 9259 }, { "epoch": 1.7035866939230626, "grad_norm": 0.6136648058891296, "learning_rate": 9.556183563928766e-06, "loss": 0.941, "step": 9260 }, { "epoch": 1.7037725329864337, "grad_norm": 0.6558448672294617, "learning_rate": 9.54371100002005e-06, "loss": 0.9333, "step": 9261 }, { "epoch": 1.7039583720498048, "grad_norm": 0.8663216829299927, "learning_rate": 9.53124617303326e-06, "loss": 1.1077, "step": 9262 }, { "epoch": 1.7041442111131762, "grad_norm": 0.9772462844848633, "learning_rate": 9.518789084034495e-06, "loss": 1.0044, "step": 9263 }, { "epoch": 1.704330050176547, "grad_norm": 0.7501927614212036, "learning_rate": 9.506339734089253e-06, "loss": 0.8734, "step": 9264 }, { "epoch": 1.7045158892399184, "grad_norm": 0.8446945548057556, "learning_rate": 9.49389812426238e-06, "loss": 1.0656, "step": 9265 }, { "epoch": 1.7047017283032893, "grad_norm": 0.7121186852455139, "learning_rate": 9.48146425561799e-06, "loss": 1.0447, "step": 9266 }, { "epoch": 1.7048875673666606, "grad_norm": 0.7923415303230286, "learning_rate": 9.46903812921962e-06, "loss": 0.7783, "step": 9267 }, { "epoch": 1.7050734064300315, "grad_norm": 0.7992509603500366, "learning_rate": 9.456619746130058e-06, "loss": 0.854, "step": 9268 }, { "epoch": 1.7052592454934028, "grad_norm": 0.8346595764160156, "learning_rate": 9.444209107411518e-06, "loss": 1.1072, "step": 9269 }, { "epoch": 1.7054450845567737, "grad_norm": 0.6362949013710022, "learning_rate": 9.431806214125472e-06, "loss": 0.688, "step": 9270 }, { "epoch": 1.705630923620145, "grad_norm": 0.9164164662361145, "learning_rate": 9.419411067332773e-06, "loss": 1.0372, "step": 9271 }, { "epoch": 1.7058167626835161, "grad_norm": 0.6323965191841125, "learning_rate": 9.40702366809365e-06, "loss": 0.7713, "step": 9272 }, { "epoch": 1.7060026017468872, "grad_norm": 0.7026657462120056, "learning_rate": 9.39464401746757e-06, "loss": 1.0552, "step": 9273 }, { "epoch": 1.7061884408102583, "grad_norm": 0.8523253202438354, "learning_rate": 9.382272116513402e-06, "loss": 0.9925, "step": 9274 }, { "epoch": 1.7063742798736294, "grad_norm": 0.7092315554618835, "learning_rate": 9.36990796628936e-06, "loss": 0.9838, "step": 9275 }, { "epoch": 1.7065601189370005, "grad_norm": 0.7713611721992493, "learning_rate": 9.357551567852974e-06, "loss": 0.6708, "step": 9276 }, { "epoch": 1.7067459580003717, "grad_norm": 0.6664693355560303, "learning_rate": 9.345202922261098e-06, "loss": 1.0176, "step": 9277 }, { "epoch": 1.7069317970637428, "grad_norm": 0.7604504823684692, "learning_rate": 9.332862030569944e-06, "loss": 1.0009, "step": 9278 }, { "epoch": 1.7071176361271139, "grad_norm": 0.7931390404701233, "learning_rate": 9.320528893835078e-06, "loss": 0.9902, "step": 9279 }, { "epoch": 1.7073034751904852, "grad_norm": 0.7187214493751526, "learning_rate": 9.308203513111336e-06, "loss": 0.9574, "step": 9280 }, { "epoch": 1.707489314253856, "grad_norm": 0.7779387831687927, "learning_rate": 9.295885889452982e-06, "loss": 1.0485, "step": 9281 }, { "epoch": 1.7076751533172274, "grad_norm": 0.7711448073387146, "learning_rate": 9.283576023913532e-06, "loss": 1.0164, "step": 9282 }, { "epoch": 1.7078609923805983, "grad_norm": 0.6007513403892517, "learning_rate": 9.271273917545897e-06, "loss": 0.8938, "step": 9283 }, { "epoch": 1.7080468314439696, "grad_norm": 0.8609693646430969, "learning_rate": 9.258979571402282e-06, "loss": 1.0425, "step": 9284 }, { "epoch": 1.7082326705073405, "grad_norm": 0.6705703139305115, "learning_rate": 9.246692986534255e-06, "loss": 1.1867, "step": 9285 }, { "epoch": 1.7084185095707118, "grad_norm": 0.8482201099395752, "learning_rate": 9.234414163992744e-06, "loss": 1.0239, "step": 9286 }, { "epoch": 1.7086043486340827, "grad_norm": 0.7028303146362305, "learning_rate": 9.22214310482794e-06, "loss": 1.054, "step": 9287 }, { "epoch": 1.708790187697454, "grad_norm": 0.6620766520500183, "learning_rate": 9.20987981008944e-06, "loss": 0.7547, "step": 9288 }, { "epoch": 1.7089760267608252, "grad_norm": 0.685680627822876, "learning_rate": 9.197624280826134e-06, "loss": 0.9087, "step": 9289 }, { "epoch": 1.7091618658241963, "grad_norm": 0.8493560552597046, "learning_rate": 9.185376518086297e-06, "loss": 1.0277, "step": 9290 }, { "epoch": 1.7093477048875674, "grad_norm": 0.7470026016235352, "learning_rate": 9.173136522917457e-06, "loss": 0.867, "step": 9291 }, { "epoch": 1.7095335439509385, "grad_norm": 0.6782554388046265, "learning_rate": 9.160904296366557e-06, "loss": 0.9899, "step": 9292 }, { "epoch": 1.7097193830143096, "grad_norm": 0.7193986177444458, "learning_rate": 9.148679839479856e-06, "loss": 0.7549, "step": 9293 }, { "epoch": 1.7099052220776807, "grad_norm": 1.4221397638320923, "learning_rate": 9.13646315330291e-06, "loss": 0.9677, "step": 9294 }, { "epoch": 1.7100910611410518, "grad_norm": 0.7279139161109924, "learning_rate": 9.12425423888067e-06, "loss": 0.7372, "step": 9295 }, { "epoch": 1.710276900204423, "grad_norm": 2.2297890186309814, "learning_rate": 9.112053097257357e-06, "loss": 1.1687, "step": 9296 }, { "epoch": 1.7104627392677942, "grad_norm": 0.7337605953216553, "learning_rate": 9.0998597294766e-06, "loss": 0.9823, "step": 9297 }, { "epoch": 1.7106485783311651, "grad_norm": 0.7120336890220642, "learning_rate": 9.087674136581271e-06, "loss": 0.7, "step": 9298 }, { "epoch": 1.7108344173945365, "grad_norm": 0.6502007842063904, "learning_rate": 9.075496319613663e-06, "loss": 0.9573, "step": 9299 }, { "epoch": 1.7110202564579073, "grad_norm": 0.7360041737556458, "learning_rate": 9.063326279615391e-06, "loss": 0.9621, "step": 9300 }, { "epoch": 1.7112060955212787, "grad_norm": 0.6855154037475586, "learning_rate": 9.051164017627346e-06, "loss": 0.8322, "step": 9301 }, { "epoch": 1.7113919345846496, "grad_norm": 0.8629727959632874, "learning_rate": 9.039009534689813e-06, "loss": 0.9323, "step": 9302 }, { "epoch": 1.7115777736480209, "grad_norm": 0.7231772541999817, "learning_rate": 9.026862831842398e-06, "loss": 0.7956, "step": 9303 }, { "epoch": 1.7117636127113918, "grad_norm": 0.6572983860969543, "learning_rate": 9.014723910124001e-06, "loss": 0.8395, "step": 9304 }, { "epoch": 1.711949451774763, "grad_norm": 0.8665408492088318, "learning_rate": 9.002592770572915e-06, "loss": 1.0307, "step": 9305 }, { "epoch": 1.7121352908381342, "grad_norm": 0.8959839940071106, "learning_rate": 8.990469414226743e-06, "loss": 0.8463, "step": 9306 }, { "epoch": 1.7123211299015053, "grad_norm": 0.6686840057373047, "learning_rate": 8.978353842122445e-06, "loss": 0.8437, "step": 9307 }, { "epoch": 1.7125069689648764, "grad_norm": 0.655849277973175, "learning_rate": 8.966246055296235e-06, "loss": 1.0538, "step": 9308 }, { "epoch": 1.7126928080282475, "grad_norm": 0.6408183574676514, "learning_rate": 8.954146054783774e-06, "loss": 0.8483, "step": 9309 }, { "epoch": 1.7128786470916186, "grad_norm": 0.7304595112800598, "learning_rate": 8.942053841619968e-06, "loss": 0.8701, "step": 9310 }, { "epoch": 1.7130644861549897, "grad_norm": 0.7849457263946533, "learning_rate": 8.9299694168391e-06, "loss": 0.9448, "step": 9311 }, { "epoch": 1.7132503252183608, "grad_norm": 0.7002692818641663, "learning_rate": 8.917892781474768e-06, "loss": 1.1745, "step": 9312 }, { "epoch": 1.713436164281732, "grad_norm": 0.6553815007209778, "learning_rate": 8.90582393655992e-06, "loss": 1.0872, "step": 9313 }, { "epoch": 1.7136220033451033, "grad_norm": 0.885659396648407, "learning_rate": 8.893762883126855e-06, "loss": 0.8797, "step": 9314 }, { "epoch": 1.7138078424084742, "grad_norm": 0.6541244983673096, "learning_rate": 8.881709622207125e-06, "loss": 0.9569, "step": 9315 }, { "epoch": 1.7139936814718455, "grad_norm": 0.7587239742279053, "learning_rate": 8.869664154831737e-06, "loss": 0.9818, "step": 9316 }, { "epoch": 1.7141795205352164, "grad_norm": 0.8520368337631226, "learning_rate": 8.857626482030901e-06, "loss": 0.8596, "step": 9317 }, { "epoch": 1.7143653595985877, "grad_norm": 0.6669599413871765, "learning_rate": 8.84559660483426e-06, "loss": 0.9869, "step": 9318 }, { "epoch": 1.7145511986619586, "grad_norm": 0.6341535449028015, "learning_rate": 8.83357452427076e-06, "loss": 1.0055, "step": 9319 }, { "epoch": 1.71473703772533, "grad_norm": 0.8398535251617432, "learning_rate": 8.821560241368655e-06, "loss": 0.9017, "step": 9320 }, { "epoch": 1.714922876788701, "grad_norm": 1.0017404556274414, "learning_rate": 8.809553757155597e-06, "loss": 0.8948, "step": 9321 }, { "epoch": 1.7151087158520721, "grad_norm": 0.7218863368034363, "learning_rate": 8.79755507265847e-06, "loss": 0.8497, "step": 9322 }, { "epoch": 1.7152945549154432, "grad_norm": 0.7040542364120483, "learning_rate": 8.785564188903583e-06, "loss": 1.0778, "step": 9323 }, { "epoch": 1.7154803939788144, "grad_norm": 0.609380841255188, "learning_rate": 8.773581106916517e-06, "loss": 0.7377, "step": 9324 }, { "epoch": 1.7156662330421855, "grad_norm": 0.6335259675979614, "learning_rate": 8.76160582772223e-06, "loss": 0.8535, "step": 9325 }, { "epoch": 1.7158520721055566, "grad_norm": 0.6981773376464844, "learning_rate": 8.749638352345002e-06, "loss": 0.8354, "step": 9326 }, { "epoch": 1.7160379111689277, "grad_norm": 0.7336592078208923, "learning_rate": 8.737678681808414e-06, "loss": 0.9337, "step": 9327 }, { "epoch": 1.7162237502322988, "grad_norm": 0.7219322919845581, "learning_rate": 8.725726817135427e-06, "loss": 0.9018, "step": 9328 }, { "epoch": 1.7164095892956701, "grad_norm": 0.7079262137413025, "learning_rate": 8.713782759348266e-06, "loss": 0.8605, "step": 9329 }, { "epoch": 1.716595428359041, "grad_norm": 0.6500135660171509, "learning_rate": 8.701846509468592e-06, "loss": 0.798, "step": 9330 }, { "epoch": 1.7167812674224123, "grad_norm": 0.8792296051979065, "learning_rate": 8.689918068517278e-06, "loss": 1.222, "step": 9331 }, { "epoch": 1.7169671064857832, "grad_norm": 0.720497727394104, "learning_rate": 8.677997437514629e-06, "loss": 0.8851, "step": 9332 }, { "epoch": 1.7171529455491545, "grad_norm": 0.6603461503982544, "learning_rate": 8.66608461748023e-06, "loss": 0.8619, "step": 9333 }, { "epoch": 1.7173387846125254, "grad_norm": 0.731478214263916, "learning_rate": 8.654179609433011e-06, "loss": 0.9271, "step": 9334 }, { "epoch": 1.7175246236758968, "grad_norm": 1.2854375839233398, "learning_rate": 8.642282414391257e-06, "loss": 1.5186, "step": 9335 }, { "epoch": 1.7177104627392676, "grad_norm": 0.7765483260154724, "learning_rate": 8.630393033372507e-06, "loss": 0.7269, "step": 9336 }, { "epoch": 1.717896301802639, "grad_norm": 0.6998369097709656, "learning_rate": 8.618511467393741e-06, "loss": 1.0292, "step": 9337 }, { "epoch": 1.71808214086601, "grad_norm": 0.7568623423576355, "learning_rate": 8.606637717471178e-06, "loss": 1.1241, "step": 9338 }, { "epoch": 1.7182679799293812, "grad_norm": 0.8563328981399536, "learning_rate": 8.594771784620414e-06, "loss": 0.9679, "step": 9339 }, { "epoch": 1.7184538189927523, "grad_norm": 0.6528421640396118, "learning_rate": 8.582913669856385e-06, "loss": 0.8435, "step": 9340 }, { "epoch": 1.7186396580561234, "grad_norm": 0.7107359170913696, "learning_rate": 8.571063374193322e-06, "loss": 0.9204, "step": 9341 }, { "epoch": 1.7188254971194945, "grad_norm": 0.725544273853302, "learning_rate": 8.559220898644827e-06, "loss": 0.9239, "step": 9342 }, { "epoch": 1.7190113361828656, "grad_norm": 0.7376357913017273, "learning_rate": 8.547386244223777e-06, "loss": 1.0024, "step": 9343 }, { "epoch": 1.7191971752462367, "grad_norm": 0.8160426616668701, "learning_rate": 8.53555941194245e-06, "loss": 0.9461, "step": 9344 }, { "epoch": 1.7193830143096078, "grad_norm": 0.6708709597587585, "learning_rate": 8.523740402812397e-06, "loss": 0.7973, "step": 9345 }, { "epoch": 1.7195688533729792, "grad_norm": 0.8642181158065796, "learning_rate": 8.511929217844528e-06, "loss": 0.9434, "step": 9346 }, { "epoch": 1.71975469243635, "grad_norm": 0.6233113408088684, "learning_rate": 8.50012585804908e-06, "loss": 0.8648, "step": 9347 }, { "epoch": 1.7199405314997214, "grad_norm": 0.6707333922386169, "learning_rate": 8.488330324435655e-06, "loss": 0.9194, "step": 9348 }, { "epoch": 1.7201263705630923, "grad_norm": 0.6645792722702026, "learning_rate": 8.476542618013095e-06, "loss": 0.8238, "step": 9349 }, { "epoch": 1.7203122096264636, "grad_norm": 0.6607288718223572, "learning_rate": 8.464762739789655e-06, "loss": 0.7656, "step": 9350 }, { "epoch": 1.7204980486898345, "grad_norm": 0.7014643549919128, "learning_rate": 8.45299069077291e-06, "loss": 1.0564, "step": 9351 }, { "epoch": 1.7206838877532058, "grad_norm": 0.7516298890113831, "learning_rate": 8.44122647196971e-06, "loss": 1.0525, "step": 9352 }, { "epoch": 1.7208697268165767, "grad_norm": 0.8982193470001221, "learning_rate": 8.429470084386293e-06, "loss": 1.1706, "step": 9353 }, { "epoch": 1.721055565879948, "grad_norm": 0.7839781045913696, "learning_rate": 8.417721529028234e-06, "loss": 1.0219, "step": 9354 }, { "epoch": 1.7212414049433191, "grad_norm": 0.8507671356201172, "learning_rate": 8.405980806900371e-06, "loss": 0.9497, "step": 9355 }, { "epoch": 1.7214272440066902, "grad_norm": 1.1558749675750732, "learning_rate": 8.394247919006936e-06, "loss": 1.34, "step": 9356 }, { "epoch": 1.7216130830700613, "grad_norm": 0.9968247413635254, "learning_rate": 8.38252286635145e-06, "loss": 1.3205, "step": 9357 }, { "epoch": 1.7217989221334324, "grad_norm": 0.938660740852356, "learning_rate": 8.370805649936818e-06, "loss": 1.102, "step": 9358 }, { "epoch": 1.7219847611968035, "grad_norm": 0.6969743967056274, "learning_rate": 8.359096270765198e-06, "loss": 0.8952, "step": 9359 }, { "epoch": 1.7221706002601747, "grad_norm": 0.7880121469497681, "learning_rate": 8.347394729838132e-06, "loss": 1.1787, "step": 9360 }, { "epoch": 1.7223564393235458, "grad_norm": 0.7151280045509338, "learning_rate": 8.335701028156495e-06, "loss": 0.9412, "step": 9361 }, { "epoch": 1.7225422783869169, "grad_norm": 0.6870322823524475, "learning_rate": 8.324015166720456e-06, "loss": 0.9034, "step": 9362 }, { "epoch": 1.7227281174502882, "grad_norm": 0.7795487642288208, "learning_rate": 8.312337146529525e-06, "loss": 1.3137, "step": 9363 }, { "epoch": 1.722913956513659, "grad_norm": 0.7532526254653931, "learning_rate": 8.300666968582572e-06, "loss": 0.8896, "step": 9364 }, { "epoch": 1.7230997955770304, "grad_norm": 0.77845299243927, "learning_rate": 8.289004633877773e-06, "loss": 1.2301, "step": 9365 }, { "epoch": 1.7232856346404013, "grad_norm": 0.7793643474578857, "learning_rate": 8.27735014341261e-06, "loss": 1.0974, "step": 9366 }, { "epoch": 1.7234714737037726, "grad_norm": 0.6873371005058289, "learning_rate": 8.265703498183918e-06, "loss": 0.8727, "step": 9367 }, { "epoch": 1.7236573127671435, "grad_norm": 0.7508918046951294, "learning_rate": 8.25406469918789e-06, "loss": 1.0924, "step": 9368 }, { "epoch": 1.7238431518305148, "grad_norm": 0.8036147356033325, "learning_rate": 8.242433747419987e-06, "loss": 1.0435, "step": 9369 }, { "epoch": 1.724028990893886, "grad_norm": 0.7148988842964172, "learning_rate": 8.230810643875053e-06, "loss": 1.0826, "step": 9370 }, { "epoch": 1.724214829957257, "grad_norm": 0.8428753018379211, "learning_rate": 8.219195389547207e-06, "loss": 0.9271, "step": 9371 }, { "epoch": 1.7244006690206282, "grad_norm": 0.65247642993927, "learning_rate": 8.207587985429954e-06, "loss": 0.8675, "step": 9372 }, { "epoch": 1.7245865080839993, "grad_norm": 0.9677809476852417, "learning_rate": 8.195988432516078e-06, "loss": 0.9372, "step": 9373 }, { "epoch": 1.7247723471473704, "grad_norm": 0.7008064389228821, "learning_rate": 8.184396731797728e-06, "loss": 0.7741, "step": 9374 }, { "epoch": 1.7249581862107415, "grad_norm": 0.6639923453330994, "learning_rate": 8.172812884266378e-06, "loss": 0.813, "step": 9375 }, { "epoch": 1.7251440252741126, "grad_norm": 0.7582129240036011, "learning_rate": 8.161236890912794e-06, "loss": 0.8838, "step": 9376 }, { "epoch": 1.7253298643374837, "grad_norm": 0.6991216540336609, "learning_rate": 8.149668752727113e-06, "loss": 0.9785, "step": 9377 }, { "epoch": 1.7255157034008548, "grad_norm": 0.6826270222663879, "learning_rate": 8.138108470698768e-06, "loss": 0.9951, "step": 9378 }, { "epoch": 1.725701542464226, "grad_norm": 0.6590842604637146, "learning_rate": 8.126556045816569e-06, "loss": 0.8817, "step": 9379 }, { "epoch": 1.7258873815275972, "grad_norm": 0.7262789011001587, "learning_rate": 8.115011479068568e-06, "loss": 1.0486, "step": 9380 }, { "epoch": 1.7260732205909681, "grad_norm": 0.6248814463615417, "learning_rate": 8.103474771442243e-06, "loss": 0.9154, "step": 9381 }, { "epoch": 1.7262590596543395, "grad_norm": 0.7736847400665283, "learning_rate": 8.091945923924338e-06, "loss": 0.9123, "step": 9382 }, { "epoch": 1.7264448987177103, "grad_norm": 0.6092911958694458, "learning_rate": 8.08042493750093e-06, "loss": 1.0612, "step": 9383 }, { "epoch": 1.7266307377810817, "grad_norm": 0.6939818263053894, "learning_rate": 8.068911813157454e-06, "loss": 1.0582, "step": 9384 }, { "epoch": 1.7268165768444526, "grad_norm": 0.7486193776130676, "learning_rate": 8.057406551878621e-06, "loss": 1.0236, "step": 9385 }, { "epoch": 1.7270024159078239, "grad_norm": 0.6597424149513245, "learning_rate": 8.045909154648534e-06, "loss": 0.9871, "step": 9386 }, { "epoch": 1.727188254971195, "grad_norm": 2.9619107246398926, "learning_rate": 8.034419622450561e-06, "loss": 1.189, "step": 9387 }, { "epoch": 1.727374094034566, "grad_norm": 0.838079571723938, "learning_rate": 8.02293795626744e-06, "loss": 0.8031, "step": 9388 }, { "epoch": 1.7275599330979372, "grad_norm": 0.6807136535644531, "learning_rate": 8.01146415708125e-06, "loss": 0.9412, "step": 9389 }, { "epoch": 1.7277457721613083, "grad_norm": 0.641676664352417, "learning_rate": 7.999998225873317e-06, "loss": 0.7303, "step": 9390 }, { "epoch": 1.7279316112246794, "grad_norm": 0.7298771142959595, "learning_rate": 7.988540163624381e-06, "loss": 0.8572, "step": 9391 }, { "epoch": 1.7281174502880505, "grad_norm": 0.8123074173927307, "learning_rate": 7.977089971314466e-06, "loss": 0.9141, "step": 9392 }, { "epoch": 1.7283032893514216, "grad_norm": 0.6712712049484253, "learning_rate": 7.965647649922947e-06, "loss": 0.8206, "step": 9393 }, { "epoch": 1.7284891284147927, "grad_norm": 0.7178130745887756, "learning_rate": 7.954213200428485e-06, "loss": 0.9454, "step": 9394 }, { "epoch": 1.728674967478164, "grad_norm": 0.6644273996353149, "learning_rate": 7.942786623809106e-06, "loss": 1.0341, "step": 9395 }, { "epoch": 1.728860806541535, "grad_norm": 0.7427142262458801, "learning_rate": 7.931367921042166e-06, "loss": 0.9111, "step": 9396 }, { "epoch": 1.7290466456049063, "grad_norm": 0.7232275009155273, "learning_rate": 7.91995709310429e-06, "loss": 0.8069, "step": 9397 }, { "epoch": 1.7292324846682772, "grad_norm": 0.6869901418685913, "learning_rate": 7.908554140971514e-06, "loss": 0.8375, "step": 9398 }, { "epoch": 1.7294183237316485, "grad_norm": 0.8033495545387268, "learning_rate": 7.89715906561912e-06, "loss": 0.9688, "step": 9399 }, { "epoch": 1.7296041627950194, "grad_norm": 0.8616623878479004, "learning_rate": 7.885771868021796e-06, "loss": 1.1344, "step": 9400 }, { "epoch": 1.7297900018583907, "grad_norm": 0.6528728008270264, "learning_rate": 7.874392549153464e-06, "loss": 0.9157, "step": 9401 }, { "epoch": 1.7299758409217616, "grad_norm": 0.7244764566421509, "learning_rate": 7.863021109987457e-06, "loss": 0.571, "step": 9402 }, { "epoch": 1.730161679985133, "grad_norm": 0.8218297362327576, "learning_rate": 7.851657551496395e-06, "loss": 1.0371, "step": 9403 }, { "epoch": 1.730347519048504, "grad_norm": 0.6427557468414307, "learning_rate": 7.840301874652211e-06, "loss": 1.0065, "step": 9404 }, { "epoch": 1.7305333581118751, "grad_norm": 0.6553307175636292, "learning_rate": 7.828954080426198e-06, "loss": 0.8694, "step": 9405 }, { "epoch": 1.7307191971752462, "grad_norm": 1.0670928955078125, "learning_rate": 7.817614169788945e-06, "loss": 1.3094, "step": 9406 }, { "epoch": 1.7309050362386174, "grad_norm": 0.6958805918693542, "learning_rate": 7.806282143710375e-06, "loss": 0.9212, "step": 9407 }, { "epoch": 1.7310908753019885, "grad_norm": 0.6197575926780701, "learning_rate": 7.794958003159757e-06, "loss": 0.7294, "step": 9408 }, { "epoch": 1.7312767143653596, "grad_norm": 0.698625922203064, "learning_rate": 7.783641749105664e-06, "loss": 1.0314, "step": 9409 }, { "epoch": 1.7314625534287307, "grad_norm": 0.6479657888412476, "learning_rate": 7.772333382516017e-06, "loss": 0.971, "step": 9410 }, { "epoch": 1.7316483924921018, "grad_norm": 0.7665457129478455, "learning_rate": 7.761032904358e-06, "loss": 1.1693, "step": 9411 }, { "epoch": 1.7318342315554731, "grad_norm": 0.7469654083251953, "learning_rate": 7.749740315598209e-06, "loss": 0.9795, "step": 9412 }, { "epoch": 1.732020070618844, "grad_norm": 0.6912213563919067, "learning_rate": 7.738455617202501e-06, "loss": 0.9352, "step": 9413 }, { "epoch": 1.7322059096822153, "grad_norm": 0.6912152171134949, "learning_rate": 7.727178810136093e-06, "loss": 1.0428, "step": 9414 }, { "epoch": 1.7323917487455862, "grad_norm": 0.9005116820335388, "learning_rate": 7.715909895363505e-06, "loss": 1.0998, "step": 9415 }, { "epoch": 1.7325775878089575, "grad_norm": 0.6274826526641846, "learning_rate": 7.704648873848586e-06, "loss": 1.1338, "step": 9416 }, { "epoch": 1.7327634268723284, "grad_norm": 0.8490490913391113, "learning_rate": 7.693395746554544e-06, "loss": 0.9429, "step": 9417 }, { "epoch": 1.7329492659356998, "grad_norm": 1.9930272102355957, "learning_rate": 7.682150514443853e-06, "loss": 1.04, "step": 9418 }, { "epoch": 1.7331351049990706, "grad_norm": 0.6957984566688538, "learning_rate": 7.670913178478367e-06, "loss": 0.9502, "step": 9419 }, { "epoch": 1.733320944062442, "grad_norm": 0.7939738631248474, "learning_rate": 7.659683739619206e-06, "loss": 1.1698, "step": 9420 }, { "epoch": 1.733506783125813, "grad_norm": 0.6218949556350708, "learning_rate": 7.648462198826879e-06, "loss": 0.952, "step": 9421 }, { "epoch": 1.7336926221891842, "grad_norm": 0.6665553450584412, "learning_rate": 7.63724855706116e-06, "loss": 0.916, "step": 9422 }, { "epoch": 1.7336926221891842, "eval_loss": 1.0072797536849976, "eval_runtime": 23.2133, "eval_samples_per_second": 47.042, "eval_steps_per_second": 23.521, "step": 9422 }, { "epoch": 1.7338784612525553, "grad_norm": 0.7004141211509705, "learning_rate": 7.626042815281209e-06, "loss": 0.8868, "step": 9423 }, { "epoch": 1.7340643003159264, "grad_norm": 0.7327351570129395, "learning_rate": 7.6148449744454786e-06, "loss": 0.9734, "step": 9424 }, { "epoch": 1.7342501393792975, "grad_norm": 0.6216403841972351, "learning_rate": 7.603655035511703e-06, "loss": 0.9558, "step": 9425 }, { "epoch": 1.7344359784426686, "grad_norm": 0.6237961649894714, "learning_rate": 7.592472999437028e-06, "loss": 0.8549, "step": 9426 }, { "epoch": 1.7346218175060397, "grad_norm": 0.7453010082244873, "learning_rate": 7.581298867177844e-06, "loss": 0.9747, "step": 9427 }, { "epoch": 1.7348076565694108, "grad_norm": 0.6449342966079712, "learning_rate": 7.5701326396899194e-06, "loss": 0.8981, "step": 9428 }, { "epoch": 1.7349934956327822, "grad_norm": 0.7956316471099854, "learning_rate": 7.558974317928313e-06, "loss": 0.8466, "step": 9429 }, { "epoch": 1.735179334696153, "grad_norm": 0.7923126220703125, "learning_rate": 7.547823902847406e-06, "loss": 1.1053, "step": 9430 }, { "epoch": 1.7353651737595244, "grad_norm": 0.7033422589302063, "learning_rate": 7.536681395400968e-06, "loss": 0.8778, "step": 9431 }, { "epoch": 1.7355510128228953, "grad_norm": 0.6055691242218018, "learning_rate": 7.525546796541994e-06, "loss": 0.6723, "step": 9432 }, { "epoch": 1.7357368518862666, "grad_norm": 0.6363259553909302, "learning_rate": 7.5144201072228745e-06, "loss": 0.859, "step": 9433 }, { "epoch": 1.7359226909496375, "grad_norm": 0.6942827105522156, "learning_rate": 7.503301328395273e-06, "loss": 1.1544, "step": 9434 }, { "epoch": 1.7361085300130088, "grad_norm": 0.6959713101387024, "learning_rate": 7.492190461010229e-06, "loss": 0.8484, "step": 9435 }, { "epoch": 1.73629436907638, "grad_norm": 0.7058919072151184, "learning_rate": 7.481087506018058e-06, "loss": 0.9904, "step": 9436 }, { "epoch": 1.736480208139751, "grad_norm": 0.7573116421699524, "learning_rate": 7.469992464368447e-06, "loss": 0.8349, "step": 9437 }, { "epoch": 1.7366660472031221, "grad_norm": 0.6595671772956848, "learning_rate": 7.458905337010369e-06, "loss": 0.8104, "step": 9438 }, { "epoch": 1.7368518862664932, "grad_norm": 0.7107803821563721, "learning_rate": 7.447826124892121e-06, "loss": 0.8094, "step": 9439 }, { "epoch": 1.7370377253298643, "grad_norm": 0.7347820997238159, "learning_rate": 7.436754828961346e-06, "loss": 0.8601, "step": 9440 }, { "epoch": 1.7372235643932354, "grad_norm": 0.6467185020446777, "learning_rate": 7.425691450164962e-06, "loss": 0.9238, "step": 9441 }, { "epoch": 1.7374094034566065, "grad_norm": 4.698081016540527, "learning_rate": 7.414635989449293e-06, "loss": 1.2099, "step": 9442 }, { "epoch": 1.7375952425199777, "grad_norm": 0.6623215079307556, "learning_rate": 7.40358844775989e-06, "loss": 0.8568, "step": 9443 }, { "epoch": 1.737781081583349, "grad_norm": 0.6400848031044006, "learning_rate": 7.392548826041701e-06, "loss": 1.0292, "step": 9444 }, { "epoch": 1.7379669206467199, "grad_norm": 0.7742132544517517, "learning_rate": 7.381517125238968e-06, "loss": 1.0895, "step": 9445 }, { "epoch": 1.7381527597100912, "grad_norm": 0.6758570671081543, "learning_rate": 7.370493346295249e-06, "loss": 0.8089, "step": 9446 }, { "epoch": 1.738338598773462, "grad_norm": 0.7959402203559875, "learning_rate": 7.359477490153432e-06, "loss": 1.1993, "step": 9447 }, { "epoch": 1.7385244378368334, "grad_norm": 0.7615758776664734, "learning_rate": 7.3484695577557216e-06, "loss": 1.0477, "step": 9448 }, { "epoch": 1.7387102769002043, "grad_norm": 0.8708173036575317, "learning_rate": 7.337469550043652e-06, "loss": 1.5399, "step": 9449 }, { "epoch": 1.7388961159635756, "grad_norm": 0.6313830614089966, "learning_rate": 7.326477467958104e-06, "loss": 0.8139, "step": 9450 }, { "epoch": 1.7390819550269465, "grad_norm": 0.6188602447509766, "learning_rate": 7.3154933124392145e-06, "loss": 0.7236, "step": 9451 }, { "epoch": 1.7392677940903178, "grad_norm": 0.6811180114746094, "learning_rate": 7.304517084426488e-06, "loss": 1.0196, "step": 9452 }, { "epoch": 1.739453633153689, "grad_norm": 0.7616562247276306, "learning_rate": 7.2935487848587725e-06, "loss": 1.1128, "step": 9453 }, { "epoch": 1.73963947221706, "grad_norm": 0.7264105677604675, "learning_rate": 7.282588414674196e-06, "loss": 1.0773, "step": 9454 }, { "epoch": 1.7398253112804312, "grad_norm": 0.7808012366294861, "learning_rate": 7.27163597481022e-06, "loss": 1.0436, "step": 9455 }, { "epoch": 1.7400111503438023, "grad_norm": 0.7362692356109619, "learning_rate": 7.260691466203639e-06, "loss": 1.033, "step": 9456 }, { "epoch": 1.7401969894071734, "grad_norm": 0.8960447907447815, "learning_rate": 7.249754889790539e-06, "loss": 0.893, "step": 9457 }, { "epoch": 1.7403828284705445, "grad_norm": 0.7096341252326965, "learning_rate": 7.238826246506359e-06, "loss": 0.9255, "step": 9458 }, { "epoch": 1.7405686675339156, "grad_norm": 0.6668227314949036, "learning_rate": 7.227905537285873e-06, "loss": 1.0548, "step": 9459 }, { "epoch": 1.7407545065972867, "grad_norm": 0.567503035068512, "learning_rate": 7.216992763063113e-06, "loss": 0.8955, "step": 9460 }, { "epoch": 1.740940345660658, "grad_norm": 0.698553204536438, "learning_rate": 7.206087924771521e-06, "loss": 1.1543, "step": 9461 }, { "epoch": 1.741126184724029, "grad_norm": 0.6601186394691467, "learning_rate": 7.19519102334375e-06, "loss": 1.0774, "step": 9462 }, { "epoch": 1.7413120237874002, "grad_norm": 0.7196704149246216, "learning_rate": 7.184302059711901e-06, "loss": 0.832, "step": 9463 }, { "epoch": 1.7414978628507711, "grad_norm": 0.6979762315750122, "learning_rate": 7.17342103480727e-06, "loss": 0.8598, "step": 9464 }, { "epoch": 1.7416837019141425, "grad_norm": 0.7031317353248596, "learning_rate": 7.162547949560561e-06, "loss": 0.9583, "step": 9465 }, { "epoch": 1.7418695409775133, "grad_norm": 0.7397237420082092, "learning_rate": 7.151682804901782e-06, "loss": 0.9233, "step": 9466 }, { "epoch": 1.7420553800408847, "grad_norm": 0.6605537533760071, "learning_rate": 7.140825601760237e-06, "loss": 0.8789, "step": 9467 }, { "epoch": 1.7422412191042556, "grad_norm": 0.7009168863296509, "learning_rate": 7.129976341064592e-06, "loss": 0.9078, "step": 9468 }, { "epoch": 1.7424270581676269, "grad_norm": 0.7297293543815613, "learning_rate": 7.119135023742773e-06, "loss": 0.8503, "step": 9469 }, { "epoch": 1.742612897230998, "grad_norm": 0.6891294717788696, "learning_rate": 7.108301650722104e-06, "loss": 0.9163, "step": 9470 }, { "epoch": 1.742798736294369, "grad_norm": 0.749011754989624, "learning_rate": 7.097476222929134e-06, "loss": 0.9002, "step": 9471 }, { "epoch": 1.7429845753577402, "grad_norm": 0.7376906871795654, "learning_rate": 7.0866587412898085e-06, "loss": 0.8309, "step": 9472 }, { "epoch": 1.7431704144211113, "grad_norm": 0.6205510497093201, "learning_rate": 7.07584920672939e-06, "loss": 0.901, "step": 9473 }, { "epoch": 1.7433562534844824, "grad_norm": 0.8700895309448242, "learning_rate": 7.065047620172416e-06, "loss": 0.7299, "step": 9474 }, { "epoch": 1.7435420925478535, "grad_norm": 1.9028913974761963, "learning_rate": 7.054253982542791e-06, "loss": 1.5785, "step": 9475 }, { "epoch": 1.7437279316112246, "grad_norm": 0.7316854000091553, "learning_rate": 7.043468294763689e-06, "loss": 0.8147, "step": 9476 }, { "epoch": 1.7439137706745957, "grad_norm": 0.561488687992096, "learning_rate": 7.03269055775766e-06, "loss": 0.8449, "step": 9477 }, { "epoch": 1.744099609737967, "grad_norm": 0.73995041847229, "learning_rate": 7.021920772446522e-06, "loss": 0.9434, "step": 9478 }, { "epoch": 1.744285448801338, "grad_norm": 0.6605014801025391, "learning_rate": 7.011158939751461e-06, "loss": 0.8919, "step": 9479 }, { "epoch": 1.7444712878647093, "grad_norm": 0.6619118452072144, "learning_rate": 7.000405060592941e-06, "loss": 0.9865, "step": 9480 }, { "epoch": 1.7446571269280802, "grad_norm": 0.7531654834747314, "learning_rate": 6.98965913589078e-06, "loss": 0.8969, "step": 9481 }, { "epoch": 1.7448429659914515, "grad_norm": 0.6228139400482178, "learning_rate": 6.978921166564112e-06, "loss": 0.7863, "step": 9482 }, { "epoch": 1.7450288050548224, "grad_norm": 0.7277988791465759, "learning_rate": 6.968191153531334e-06, "loss": 1.0369, "step": 9483 }, { "epoch": 1.7452146441181937, "grad_norm": 0.6712504029273987, "learning_rate": 6.9574690977102675e-06, "loss": 1.0008, "step": 9484 }, { "epoch": 1.7454004831815646, "grad_norm": 0.7697523832321167, "learning_rate": 6.946755000017935e-06, "loss": 0.9245, "step": 9485 }, { "epoch": 1.745586322244936, "grad_norm": 0.5483777523040771, "learning_rate": 6.9360488613707585e-06, "loss": 0.5716, "step": 9486 }, { "epoch": 1.745772161308307, "grad_norm": 0.725399911403656, "learning_rate": 6.925350682684484e-06, "loss": 0.9928, "step": 9487 }, { "epoch": 1.7459580003716781, "grad_norm": 0.7769641876220703, "learning_rate": 6.914660464874112e-06, "loss": 0.9754, "step": 9488 }, { "epoch": 1.7461438394350492, "grad_norm": 0.5875201225280762, "learning_rate": 6.903978208854023e-06, "loss": 0.6502, "step": 9489 }, { "epoch": 1.7463296784984204, "grad_norm": 0.6539204120635986, "learning_rate": 6.893303915537874e-06, "loss": 0.9873, "step": 9490 }, { "epoch": 1.7465155175617915, "grad_norm": 0.6799665689468384, "learning_rate": 6.882637585838681e-06, "loss": 0.9155, "step": 9491 }, { "epoch": 1.7467013566251626, "grad_norm": 0.7157257199287415, "learning_rate": 6.871979220668734e-06, "loss": 1.1283, "step": 9492 }, { "epoch": 1.7468871956885337, "grad_norm": 0.7215269804000854, "learning_rate": 6.861328820939683e-06, "loss": 0.8819, "step": 9493 }, { "epoch": 1.7470730347519048, "grad_norm": 0.7681776285171509, "learning_rate": 6.8506863875624775e-06, "loss": 1.0661, "step": 9494 }, { "epoch": 1.7472588738152761, "grad_norm": 0.7559865713119507, "learning_rate": 6.84005192144741e-06, "loss": 0.821, "step": 9495 }, { "epoch": 1.747444712878647, "grad_norm": 0.8904591798782349, "learning_rate": 6.829425423504021e-06, "loss": 0.9272, "step": 9496 }, { "epoch": 1.7476305519420183, "grad_norm": 0.6811634302139282, "learning_rate": 6.81880689464125e-06, "loss": 0.6446, "step": 9497 }, { "epoch": 1.7478163910053892, "grad_norm": 0.6657458543777466, "learning_rate": 6.808196335767325e-06, "loss": 0.9042, "step": 9498 }, { "epoch": 1.7480022300687605, "grad_norm": 0.6674712300300598, "learning_rate": 6.797593747789765e-06, "loss": 0.9185, "step": 9499 }, { "epoch": 1.7481880691321314, "grad_norm": 0.5958244800567627, "learning_rate": 6.786999131615457e-06, "loss": 0.8436, "step": 9500 }, { "epoch": 1.7483739081955028, "grad_norm": 0.7511781454086304, "learning_rate": 6.776412488150585e-06, "loss": 1.1611, "step": 9501 }, { "epoch": 1.7485597472588739, "grad_norm": 0.6515342593193054, "learning_rate": 6.7658338183006155e-06, "loss": 1.1498, "step": 9502 }, { "epoch": 1.748745586322245, "grad_norm": 0.7776544094085693, "learning_rate": 6.755263122970412e-06, "loss": 0.8307, "step": 9503 }, { "epoch": 1.748931425385616, "grad_norm": 0.5926681160926819, "learning_rate": 6.7447004030640525e-06, "loss": 0.9145, "step": 9504 }, { "epoch": 1.7491172644489872, "grad_norm": 0.707554042339325, "learning_rate": 6.734145659485047e-06, "loss": 1.0083, "step": 9505 }, { "epoch": 1.7493031035123583, "grad_norm": 0.6766769289970398, "learning_rate": 6.723598893136119e-06, "loss": 1.0921, "step": 9506 }, { "epoch": 1.7494889425757294, "grad_norm": 0.6683719158172607, "learning_rate": 6.7130601049193664e-06, "loss": 0.9509, "step": 9507 }, { "epoch": 1.7496747816391005, "grad_norm": 0.6655029058456421, "learning_rate": 6.7025292957362265e-06, "loss": 0.6811, "step": 9508 }, { "epoch": 1.7498606207024716, "grad_norm": 0.6449025869369507, "learning_rate": 6.692006466487377e-06, "loss": 0.8224, "step": 9509 }, { "epoch": 1.750046459765843, "grad_norm": 0.588196873664856, "learning_rate": 6.681491618072877e-06, "loss": 0.6702, "step": 9510 }, { "epoch": 1.7502322988292138, "grad_norm": 0.7115856409072876, "learning_rate": 6.670984751392095e-06, "loss": 1.03, "step": 9511 }, { "epoch": 1.7504181378925852, "grad_norm": 0.6933878064155579, "learning_rate": 6.660485867343713e-06, "loss": 0.7458, "step": 9512 }, { "epoch": 1.750603976955956, "grad_norm": 0.8777282238006592, "learning_rate": 6.649994966825679e-06, "loss": 0.8825, "step": 9513 }, { "epoch": 1.7507898160193274, "grad_norm": 0.951866090297699, "learning_rate": 6.6395120507353415e-06, "loss": 0.9348, "step": 9514 }, { "epoch": 1.7509756550826983, "grad_norm": 0.6491940021514893, "learning_rate": 6.6290371199693395e-06, "loss": 0.9793, "step": 9515 }, { "epoch": 1.7511614941460696, "grad_norm": 0.6472504734992981, "learning_rate": 6.618570175423566e-06, "loss": 0.8149, "step": 9516 }, { "epoch": 1.7513473332094405, "grad_norm": 0.7329473495483398, "learning_rate": 6.608111217993329e-06, "loss": 0.9426, "step": 9517 }, { "epoch": 1.7515331722728118, "grad_norm": 0.6990534067153931, "learning_rate": 6.597660248573179e-06, "loss": 0.9525, "step": 9518 }, { "epoch": 1.751719011336183, "grad_norm": 0.7106050848960876, "learning_rate": 6.587217268057022e-06, "loss": 1.1132, "step": 9519 }, { "epoch": 1.751904850399554, "grad_norm": 0.7301816344261169, "learning_rate": 6.576782277338056e-06, "loss": 1.0255, "step": 9520 }, { "epoch": 1.7520906894629251, "grad_norm": 0.8306465148925781, "learning_rate": 6.566355277308811e-06, "loss": 0.9773, "step": 9521 }, { "epoch": 1.7522765285262962, "grad_norm": 0.6874080300331116, "learning_rate": 6.555936268861151e-06, "loss": 1.1446, "step": 9522 }, { "epoch": 1.7524623675896673, "grad_norm": 0.5944158434867859, "learning_rate": 6.545525252886198e-06, "loss": 0.8901, "step": 9523 }, { "epoch": 1.7526482066530384, "grad_norm": 0.7529304027557373, "learning_rate": 6.53512223027446e-06, "loss": 1.0603, "step": 9524 }, { "epoch": 1.7528340457164096, "grad_norm": 0.6817454695701599, "learning_rate": 6.524727201915726e-06, "loss": 1.0123, "step": 9525 }, { "epoch": 1.7530198847797807, "grad_norm": 0.6408984065055847, "learning_rate": 6.514340168699107e-06, "loss": 0.8478, "step": 9526 }, { "epoch": 1.753205723843152, "grad_norm": 0.8707150816917419, "learning_rate": 6.5039611315130144e-06, "loss": 1.3053, "step": 9527 }, { "epoch": 1.7533915629065229, "grad_norm": 0.6480703353881836, "learning_rate": 6.493590091245194e-06, "loss": 1.0935, "step": 9528 }, { "epoch": 1.7535774019698942, "grad_norm": 0.6530660390853882, "learning_rate": 6.483227048782725e-06, "loss": 0.8194, "step": 9529 }, { "epoch": 1.753763241033265, "grad_norm": 1.7306596040725708, "learning_rate": 6.472872005011954e-06, "loss": 1.2105, "step": 9530 }, { "epoch": 1.7539490800966364, "grad_norm": 0.7162639498710632, "learning_rate": 6.462524960818583e-06, "loss": 1.0676, "step": 9531 }, { "epoch": 1.7541349191600073, "grad_norm": 0.6799525022506714, "learning_rate": 6.4521859170876055e-06, "loss": 0.9085, "step": 9532 }, { "epoch": 1.7543207582233786, "grad_norm": 0.8963913321495056, "learning_rate": 6.441854874703357e-06, "loss": 1.1685, "step": 9533 }, { "epoch": 1.7545065972867495, "grad_norm": 0.7606403231620789, "learning_rate": 6.4315318345494536e-06, "loss": 1.0158, "step": 9534 }, { "epoch": 1.7546924363501208, "grad_norm": 0.7327620983123779, "learning_rate": 6.421216797508866e-06, "loss": 0.9107, "step": 9535 }, { "epoch": 1.754878275413492, "grad_norm": 0.7143133878707886, "learning_rate": 6.4109097644638684e-06, "loss": 0.9722, "step": 9536 }, { "epoch": 1.755064114476863, "grad_norm": 0.8020583391189575, "learning_rate": 6.4006107362960195e-06, "loss": 1.2301, "step": 9537 }, { "epoch": 1.7552499535402342, "grad_norm": 0.6440826058387756, "learning_rate": 6.390319713886217e-06, "loss": 1.0175, "step": 9538 }, { "epoch": 1.7554357926036053, "grad_norm": 0.6304287910461426, "learning_rate": 6.3800366981147e-06, "loss": 0.9864, "step": 9539 }, { "epoch": 1.7556216316669764, "grad_norm": 0.7574819922447205, "learning_rate": 6.369761689860998e-06, "loss": 0.9718, "step": 9540 }, { "epoch": 1.7558074707303475, "grad_norm": 0.6932011246681213, "learning_rate": 6.35949469000392e-06, "loss": 0.96, "step": 9541 }, { "epoch": 1.7559933097937186, "grad_norm": 0.6913305521011353, "learning_rate": 6.3492356994216404e-06, "loss": 1.1127, "step": 9542 }, { "epoch": 1.7561791488570897, "grad_norm": 0.766208291053772, "learning_rate": 6.338984718991658e-06, "loss": 1.0068, "step": 9543 }, { "epoch": 1.756364987920461, "grad_norm": 0.6946398615837097, "learning_rate": 6.328741749590727e-06, "loss": 1.0393, "step": 9544 }, { "epoch": 1.756550826983832, "grad_norm": 0.6677290797233582, "learning_rate": 6.31850679209498e-06, "loss": 0.7805, "step": 9545 }, { "epoch": 1.7567366660472032, "grad_norm": 0.6138538718223572, "learning_rate": 6.308279847379806e-06, "loss": 0.8485, "step": 9546 }, { "epoch": 1.7569225051105741, "grad_norm": 0.733422577381134, "learning_rate": 6.298060916319959e-06, "loss": 0.9421, "step": 9547 }, { "epoch": 1.7571083441739455, "grad_norm": 0.6412607431411743, "learning_rate": 6.2878499997894635e-06, "loss": 0.9485, "step": 9548 }, { "epoch": 1.7572941832373163, "grad_norm": 0.6196644902229309, "learning_rate": 6.277647098661699e-06, "loss": 1.0333, "step": 9549 }, { "epoch": 1.7574800223006877, "grad_norm": 0.734294056892395, "learning_rate": 6.267452213809355e-06, "loss": 0.9258, "step": 9550 }, { "epoch": 1.7576658613640588, "grad_norm": 0.6560178995132446, "learning_rate": 6.2572653461043905e-06, "loss": 0.8529, "step": 9551 }, { "epoch": 1.7578517004274299, "grad_norm": 0.6958460211753845, "learning_rate": 6.247086496418142e-06, "loss": 1.0363, "step": 9552 }, { "epoch": 1.758037539490801, "grad_norm": 0.629332959651947, "learning_rate": 6.236915665621201e-06, "loss": 0.8877, "step": 9553 }, { "epoch": 1.758223378554172, "grad_norm": 0.8279165625572205, "learning_rate": 6.226752854583506e-06, "loss": 1.0771, "step": 9554 }, { "epoch": 1.7584092176175432, "grad_norm": 0.6355517506599426, "learning_rate": 6.216598064174306e-06, "loss": 0.8392, "step": 9555 }, { "epoch": 1.7585950566809143, "grad_norm": 0.7394723892211914, "learning_rate": 6.206451295262173e-06, "loss": 0.9233, "step": 9556 }, { "epoch": 1.7587808957442854, "grad_norm": 0.7708925604820251, "learning_rate": 6.196312548714989e-06, "loss": 1.1978, "step": 9557 }, { "epoch": 1.7589667348076565, "grad_norm": 0.7235886454582214, "learning_rate": 6.186181825399906e-06, "loss": 1.0615, "step": 9558 }, { "epoch": 1.7591525738710276, "grad_norm": 0.7010600566864014, "learning_rate": 6.176059126183475e-06, "loss": 0.8606, "step": 9559 }, { "epoch": 1.7593384129343987, "grad_norm": 0.7215830683708191, "learning_rate": 6.165944451931471e-06, "loss": 0.9703, "step": 9560 }, { "epoch": 1.75952425199777, "grad_norm": 0.6933256387710571, "learning_rate": 6.155837803509046e-06, "loss": 0.9035, "step": 9561 }, { "epoch": 1.759710091061141, "grad_norm": 0.823757529258728, "learning_rate": 6.14573918178063e-06, "loss": 0.9078, "step": 9562 }, { "epoch": 1.7598959301245123, "grad_norm": 0.5969639420509338, "learning_rate": 6.135648587609988e-06, "loss": 0.845, "step": 9563 }, { "epoch": 1.7600817691878832, "grad_norm": 0.6157955527305603, "learning_rate": 6.1255660218602074e-06, "loss": 1.0596, "step": 9564 }, { "epoch": 1.7602676082512545, "grad_norm": 0.723203718662262, "learning_rate": 6.115491485393632e-06, "loss": 0.9388, "step": 9565 }, { "epoch": 1.7604534473146254, "grad_norm": 0.8322511911392212, "learning_rate": 6.1054249790720055e-06, "loss": 0.9494, "step": 9566 }, { "epoch": 1.7606392863779967, "grad_norm": 0.6444146037101746, "learning_rate": 6.095366503756295e-06, "loss": 0.8682, "step": 9567 }, { "epoch": 1.7608251254413678, "grad_norm": 0.8847242593765259, "learning_rate": 6.085316060306834e-06, "loss": 1.0478, "step": 9568 }, { "epoch": 1.761010964504739, "grad_norm": 0.6584118008613586, "learning_rate": 6.075273649583268e-06, "loss": 0.7521, "step": 9569 }, { "epoch": 1.76119680356811, "grad_norm": 0.6482993364334106, "learning_rate": 6.065239272444556e-06, "loss": 1.0018, "step": 9570 }, { "epoch": 1.7613826426314811, "grad_norm": 0.6682214736938477, "learning_rate": 6.055212929748955e-06, "loss": 0.9601, "step": 9571 }, { "epoch": 1.7615684816948523, "grad_norm": 0.6621394157409668, "learning_rate": 6.045194622354022e-06, "loss": 0.8458, "step": 9572 }, { "epoch": 1.7617543207582234, "grad_norm": 0.7191212177276611, "learning_rate": 6.035184351116663e-06, "loss": 0.9726, "step": 9573 }, { "epoch": 1.7619401598215945, "grad_norm": 0.6445138454437256, "learning_rate": 6.025182116893069e-06, "loss": 0.9465, "step": 9574 }, { "epoch": 1.7621259988849656, "grad_norm": 0.6561879515647888, "learning_rate": 6.015187920538767e-06, "loss": 0.8458, "step": 9575 }, { "epoch": 1.762311837948337, "grad_norm": 0.7514671683311462, "learning_rate": 6.005201762908552e-06, "loss": 1.0257, "step": 9576 }, { "epoch": 1.7624976770117078, "grad_norm": 0.6483860015869141, "learning_rate": 5.9952236448565735e-06, "loss": 0.9824, "step": 9577 }, { "epoch": 1.7626835160750791, "grad_norm": 0.6980741024017334, "learning_rate": 5.985253567236304e-06, "loss": 1.1466, "step": 9578 }, { "epoch": 1.76286935513845, "grad_norm": 0.6964401602745056, "learning_rate": 5.975291530900484e-06, "loss": 0.801, "step": 9579 }, { "epoch": 1.7630551942018213, "grad_norm": 0.6684138774871826, "learning_rate": 5.9653375367011985e-06, "loss": 0.7537, "step": 9580 }, { "epoch": 1.7632410332651922, "grad_norm": 0.7552790641784668, "learning_rate": 5.955391585489822e-06, "loss": 0.9026, "step": 9581 }, { "epoch": 1.7634268723285635, "grad_norm": 0.6640274524688721, "learning_rate": 5.945453678117063e-06, "loss": 1.0204, "step": 9582 }, { "epoch": 1.7636127113919344, "grad_norm": 0.6475071310997009, "learning_rate": 5.935523815432919e-06, "loss": 0.8581, "step": 9583 }, { "epoch": 1.7637985504553058, "grad_norm": 0.7806620001792908, "learning_rate": 5.925601998286734e-06, "loss": 1.1294, "step": 9584 }, { "epoch": 1.7639843895186769, "grad_norm": 0.7511613965034485, "learning_rate": 5.915688227527139e-06, "loss": 1.1108, "step": 9585 }, { "epoch": 1.764170228582048, "grad_norm": 0.6426988840103149, "learning_rate": 5.905782504002055e-06, "loss": 0.6887, "step": 9586 }, { "epoch": 1.764356067645419, "grad_norm": 0.8487985730171204, "learning_rate": 5.8958848285587844e-06, "loss": 0.9581, "step": 9587 }, { "epoch": 1.7645419067087902, "grad_norm": 0.7105994820594788, "learning_rate": 5.885995202043848e-06, "loss": 0.9261, "step": 9588 }, { "epoch": 1.7647277457721613, "grad_norm": 0.8497649431228638, "learning_rate": 5.876113625303159e-06, "loss": 1.0863, "step": 9589 }, { "epoch": 1.7649135848355324, "grad_norm": 0.6333727240562439, "learning_rate": 5.866240099181886e-06, "loss": 0.8614, "step": 9590 }, { "epoch": 1.7650994238989035, "grad_norm": 1.783128261566162, "learning_rate": 5.856374624524552e-06, "loss": 1.5696, "step": 9591 }, { "epoch": 1.7652852629622746, "grad_norm": 0.6523680090904236, "learning_rate": 5.846517202174984e-06, "loss": 0.9532, "step": 9592 }, { "epoch": 1.765471102025646, "grad_norm": 1.0413211584091187, "learning_rate": 5.836667832976272e-06, "loss": 0.8771, "step": 9593 }, { "epoch": 1.7656569410890168, "grad_norm": 0.6950120329856873, "learning_rate": 5.8268265177709e-06, "loss": 0.7809, "step": 9594 }, { "epoch": 1.7658427801523882, "grad_norm": 0.7298243641853333, "learning_rate": 5.81699325740056e-06, "loss": 0.9626, "step": 9595 }, { "epoch": 1.766028619215759, "grad_norm": 0.7584399580955505, "learning_rate": 5.8071680527063575e-06, "loss": 1.0026, "step": 9596 }, { "epoch": 1.7662144582791304, "grad_norm": 0.6953753232955933, "learning_rate": 5.797350904528653e-06, "loss": 0.9389, "step": 9597 }, { "epoch": 1.7664002973425013, "grad_norm": 0.6708144545555115, "learning_rate": 5.78754181370712e-06, "loss": 1.1463, "step": 9598 }, { "epoch": 1.7665861364058726, "grad_norm": 0.6775829195976257, "learning_rate": 5.777740781080743e-06, "loss": 1.1084, "step": 9599 }, { "epoch": 1.7667719754692435, "grad_norm": 0.837083637714386, "learning_rate": 5.76794780748785e-06, "loss": 0.9402, "step": 9600 }, { "epoch": 1.7669578145326148, "grad_norm": 0.7145852446556091, "learning_rate": 5.75816289376605e-06, "loss": 1.1851, "step": 9601 }, { "epoch": 1.767143653595986, "grad_norm": 0.7356708645820618, "learning_rate": 5.748386040752252e-06, "loss": 0.9147, "step": 9602 }, { "epoch": 1.767329492659357, "grad_norm": 0.8315185904502869, "learning_rate": 5.738617249282707e-06, "loss": 1.1358, "step": 9603 }, { "epoch": 1.7675153317227281, "grad_norm": 0.6617395877838135, "learning_rate": 5.728856520192949e-06, "loss": 1.068, "step": 9604 }, { "epoch": 1.7677011707860992, "grad_norm": 0.8139854669570923, "learning_rate": 5.719103854317831e-06, "loss": 1.0914, "step": 9605 }, { "epoch": 1.7678870098494703, "grad_norm": 0.6905813813209534, "learning_rate": 5.709359252491553e-06, "loss": 0.9102, "step": 9606 }, { "epoch": 1.7680728489128414, "grad_norm": 0.7273473143577576, "learning_rate": 5.699622715547548e-06, "loss": 1.16, "step": 9607 }, { "epoch": 1.7682586879762126, "grad_norm": 0.7004194259643555, "learning_rate": 5.689894244318628e-06, "loss": 1.0497, "step": 9608 }, { "epoch": 1.7684445270395837, "grad_norm": 0.6936884522438049, "learning_rate": 5.680173839636882e-06, "loss": 0.9429, "step": 9609 }, { "epoch": 1.768630366102955, "grad_norm": 0.916488766670227, "learning_rate": 5.670461502333724e-06, "loss": 0.9076, "step": 9610 }, { "epoch": 1.7688162051663259, "grad_norm": 0.7607235908508301, "learning_rate": 5.6607572332398554e-06, "loss": 1.1614, "step": 9611 }, { "epoch": 1.7690020442296972, "grad_norm": 0.7022486925125122, "learning_rate": 5.651061033185323e-06, "loss": 0.8073, "step": 9612 }, { "epoch": 1.769187883293068, "grad_norm": 0.7221197485923767, "learning_rate": 5.641372902999442e-06, "loss": 0.8381, "step": 9613 }, { "epoch": 1.7693737223564394, "grad_norm": 0.8069917559623718, "learning_rate": 5.631692843510883e-06, "loss": 1.2511, "step": 9614 }, { "epoch": 1.7695595614198103, "grad_norm": 0.7064328193664551, "learning_rate": 5.622020855547605e-06, "loss": 1.1579, "step": 9615 }, { "epoch": 1.7697454004831816, "grad_norm": 0.8496193885803223, "learning_rate": 5.612356939936836e-06, "loss": 0.8325, "step": 9616 }, { "epoch": 1.7699312395465527, "grad_norm": 0.6425907015800476, "learning_rate": 5.602701097505203e-06, "loss": 1.0847, "step": 9617 }, { "epoch": 1.7701170786099238, "grad_norm": 0.7497144341468811, "learning_rate": 5.593053329078546e-06, "loss": 0.6886, "step": 9618 }, { "epoch": 1.770302917673295, "grad_norm": 0.6739358305931091, "learning_rate": 5.583413635482082e-06, "loss": 0.9889, "step": 9619 }, { "epoch": 1.770488756736666, "grad_norm": 0.5923432111740112, "learning_rate": 5.573782017540319e-06, "loss": 0.8585, "step": 9620 }, { "epoch": 1.7706745958000372, "grad_norm": 0.7298094630241394, "learning_rate": 5.564158476077053e-06, "loss": 0.8495, "step": 9621 }, { "epoch": 1.7708604348634083, "grad_norm": 0.6663339138031006, "learning_rate": 5.554543011915425e-06, "loss": 0.828, "step": 9622 }, { "epoch": 1.7710462739267794, "grad_norm": 0.7471387982368469, "learning_rate": 5.544935625877834e-06, "loss": 1.0049, "step": 9623 }, { "epoch": 1.7712321129901505, "grad_norm": 0.6680027842521667, "learning_rate": 5.535336318786077e-06, "loss": 1.1446, "step": 9624 }, { "epoch": 1.7714179520535218, "grad_norm": 0.8819547295570374, "learning_rate": 5.525745091461143e-06, "loss": 1.2994, "step": 9625 }, { "epoch": 1.7716037911168927, "grad_norm": 0.7097487449645996, "learning_rate": 5.516161944723419e-06, "loss": 0.9665, "step": 9626 }, { "epoch": 1.771789630180264, "grad_norm": 0.8968320488929749, "learning_rate": 5.506586879392572e-06, "loss": 1.1205, "step": 9627 }, { "epoch": 1.771975469243635, "grad_norm": 0.7016741633415222, "learning_rate": 5.49701989628757e-06, "loss": 1.1043, "step": 9628 }, { "epoch": 1.7721613083070062, "grad_norm": 0.7549055814743042, "learning_rate": 5.4874609962267236e-06, "loss": 0.809, "step": 9629 }, { "epoch": 1.7723471473703771, "grad_norm": 0.6078971028327942, "learning_rate": 5.47791018002759e-06, "loss": 0.7538, "step": 9630 }, { "epoch": 1.7725329864337485, "grad_norm": 1.518277883529663, "learning_rate": 5.468367448507106e-06, "loss": 1.4723, "step": 9631 }, { "epoch": 1.7727188254971193, "grad_norm": 0.8478793501853943, "learning_rate": 5.4588328024814505e-06, "loss": 0.928, "step": 9632 }, { "epoch": 1.7729046645604907, "grad_norm": 0.874586284160614, "learning_rate": 5.44930624276615e-06, "loss": 0.8689, "step": 9633 }, { "epoch": 1.7730905036238618, "grad_norm": 0.7951120138168335, "learning_rate": 5.439787770176053e-06, "loss": 0.8361, "step": 9634 }, { "epoch": 1.7732763426872329, "grad_norm": 0.7073873281478882, "learning_rate": 5.430277385525273e-06, "loss": 1.2187, "step": 9635 }, { "epoch": 1.773462181750604, "grad_norm": 0.6185064315795898, "learning_rate": 5.420775089627272e-06, "loss": 0.8071, "step": 9636 }, { "epoch": 1.773648020813975, "grad_norm": 0.7715813517570496, "learning_rate": 5.411280883294767e-06, "loss": 0.9601, "step": 9637 }, { "epoch": 1.7738338598773462, "grad_norm": 0.6617690324783325, "learning_rate": 5.4017947673398625e-06, "loss": 1.1743, "step": 9638 }, { "epoch": 1.7740196989407173, "grad_norm": 0.7935189008712769, "learning_rate": 5.3923167425738885e-06, "loss": 1.0538, "step": 9639 }, { "epoch": 1.7742055380040884, "grad_norm": 0.7197004556655884, "learning_rate": 5.38284680980754e-06, "loss": 1.0795, "step": 9640 }, { "epoch": 1.7743913770674595, "grad_norm": 0.6420508027076721, "learning_rate": 5.373384969850792e-06, "loss": 1.0606, "step": 9641 }, { "epoch": 1.7745772161308309, "grad_norm": 0.9372122883796692, "learning_rate": 5.363931223512964e-06, "loss": 0.8252, "step": 9642 }, { "epoch": 1.7747630551942017, "grad_norm": 0.6339817643165588, "learning_rate": 5.354485571602619e-06, "loss": 0.7044, "step": 9643 }, { "epoch": 1.774948894257573, "grad_norm": 0.6962215900421143, "learning_rate": 5.3450480149276785e-06, "loss": 0.9662, "step": 9644 }, { "epoch": 1.775134733320944, "grad_norm": 0.7110217213630676, "learning_rate": 5.335618554295374e-06, "loss": 0.9173, "step": 9645 }, { "epoch": 1.7753205723843153, "grad_norm": 0.7352878451347351, "learning_rate": 5.326197190512194e-06, "loss": 1.0549, "step": 9646 }, { "epoch": 1.7755064114476862, "grad_norm": 0.6918430328369141, "learning_rate": 5.316783924383983e-06, "loss": 0.8215, "step": 9647 }, { "epoch": 1.7756922505110575, "grad_norm": 0.7566372156143188, "learning_rate": 5.307378756715898e-06, "loss": 1.1456, "step": 9648 }, { "epoch": 1.7758780895744284, "grad_norm": 0.5607185363769531, "learning_rate": 5.297981688312348e-06, "loss": 0.5717, "step": 9649 }, { "epoch": 1.7760639286377997, "grad_norm": 0.788888156414032, "learning_rate": 5.288592719977114e-06, "loss": 0.8542, "step": 9650 }, { "epoch": 1.7762497677011708, "grad_norm": 0.648224949836731, "learning_rate": 5.279211852513233e-06, "loss": 0.5628, "step": 9651 }, { "epoch": 1.776435606764542, "grad_norm": 0.6493148803710938, "learning_rate": 5.269839086723094e-06, "loss": 0.8436, "step": 9652 }, { "epoch": 1.776621445827913, "grad_norm": 0.7417361736297607, "learning_rate": 5.260474423408346e-06, "loss": 0.9803, "step": 9653 }, { "epoch": 1.7768072848912841, "grad_norm": 0.8001540303230286, "learning_rate": 5.251117863369981e-06, "loss": 0.8131, "step": 9654 }, { "epoch": 1.7769931239546553, "grad_norm": 0.5939586162567139, "learning_rate": 5.241769407408293e-06, "loss": 1.0488, "step": 9655 }, { "epoch": 1.7771789630180264, "grad_norm": 0.7578622698783875, "learning_rate": 5.232429056322852e-06, "loss": 0.971, "step": 9656 }, { "epoch": 1.7773648020813975, "grad_norm": 0.7863082885742188, "learning_rate": 5.223096810912576e-06, "loss": 0.9085, "step": 9657 }, { "epoch": 1.7775506411447686, "grad_norm": 0.8486718535423279, "learning_rate": 5.21377267197567e-06, "loss": 0.8571, "step": 9658 }, { "epoch": 1.77773648020814, "grad_norm": 0.97194504737854, "learning_rate": 5.204456640309663e-06, "loss": 0.6344, "step": 9659 }, { "epoch": 1.7779223192715108, "grad_norm": 0.7148550748825073, "learning_rate": 5.19514871671134e-06, "loss": 1.2341, "step": 9660 }, { "epoch": 1.7781081583348821, "grad_norm": 0.9144877195358276, "learning_rate": 5.185848901976853e-06, "loss": 1.1233, "step": 9661 }, { "epoch": 1.778293997398253, "grad_norm": 0.7061643004417419, "learning_rate": 5.1765571969016436e-06, "loss": 0.8772, "step": 9662 }, { "epoch": 1.7784798364616243, "grad_norm": 0.5985503196716309, "learning_rate": 5.167273602280421e-06, "loss": 1.1218, "step": 9663 }, { "epoch": 1.7786656755249952, "grad_norm": 0.7457888126373291, "learning_rate": 5.15799811890727e-06, "loss": 1.3084, "step": 9664 }, { "epoch": 1.7788515145883665, "grad_norm": 0.7234838008880615, "learning_rate": 5.148730747575492e-06, "loss": 0.8696, "step": 9665 }, { "epoch": 1.7790373536517374, "grad_norm": 0.7006774544715881, "learning_rate": 5.1394714890777965e-06, "loss": 0.8938, "step": 9666 }, { "epoch": 1.7792231927151088, "grad_norm": 0.6671582460403442, "learning_rate": 5.130220344206116e-06, "loss": 0.7923, "step": 9667 }, { "epoch": 1.7794090317784799, "grad_norm": 0.7441495060920715, "learning_rate": 5.120977313751718e-06, "loss": 0.8206, "step": 9668 }, { "epoch": 1.779594870841851, "grad_norm": 0.7187232971191406, "learning_rate": 5.111742398505215e-06, "loss": 0.7165, "step": 9669 }, { "epoch": 1.779780709905222, "grad_norm": 0.6727659702301025, "learning_rate": 5.102515599256441e-06, "loss": 0.7269, "step": 9670 }, { "epoch": 1.7799665489685932, "grad_norm": 0.6532821655273438, "learning_rate": 5.0932969167946095e-06, "loss": 0.8986, "step": 9671 }, { "epoch": 1.7801523880319643, "grad_norm": 0.7378084659576416, "learning_rate": 5.084086351908224e-06, "loss": 1.0243, "step": 9672 }, { "epoch": 1.7803382270953354, "grad_norm": 0.7145583033561707, "learning_rate": 5.074883905385075e-06, "loss": 1.0555, "step": 9673 }, { "epoch": 1.7805240661587065, "grad_norm": 0.7014021873474121, "learning_rate": 5.065689578012267e-06, "loss": 0.9485, "step": 9674 }, { "epoch": 1.7807099052220776, "grad_norm": 0.6306982636451721, "learning_rate": 5.056503370576193e-06, "loss": 1.0009, "step": 9675 }, { "epoch": 1.780895744285449, "grad_norm": 0.7286544442176819, "learning_rate": 5.0473252838626025e-06, "loss": 1.0114, "step": 9676 }, { "epoch": 1.7810815833488198, "grad_norm": 0.7763290405273438, "learning_rate": 5.038155318656501e-06, "loss": 0.9, "step": 9677 }, { "epoch": 1.7812674224121912, "grad_norm": 0.6970409154891968, "learning_rate": 5.028993475742217e-06, "loss": 0.8089, "step": 9678 }, { "epoch": 1.781453261475562, "grad_norm": 0.6255539059638977, "learning_rate": 5.019839755903366e-06, "loss": 0.9643, "step": 9679 }, { "epoch": 1.7816391005389334, "grad_norm": 0.7349768877029419, "learning_rate": 5.0106941599229125e-06, "loss": 0.9415, "step": 9680 }, { "epoch": 1.7818249396023043, "grad_norm": 0.6985937356948853, "learning_rate": 5.0015566885830756e-06, "loss": 0.8242, "step": 9681 }, { "epoch": 1.7820107786656756, "grad_norm": 0.7383229732513428, "learning_rate": 4.992427342665406e-06, "loss": 1.0944, "step": 9682 }, { "epoch": 1.7821966177290467, "grad_norm": 0.7543425559997559, "learning_rate": 4.983306122950793e-06, "loss": 1.0926, "step": 9683 }, { "epoch": 1.7823824567924178, "grad_norm": 0.699912965297699, "learning_rate": 4.974193030219343e-06, "loss": 0.7168, "step": 9684 }, { "epoch": 1.782568295855789, "grad_norm": 0.6963754296302795, "learning_rate": 4.965088065250545e-06, "loss": 0.9581, "step": 9685 }, { "epoch": 1.78275413491916, "grad_norm": 0.8270363211631775, "learning_rate": 4.955991228823154e-06, "loss": 0.9795, "step": 9686 }, { "epoch": 1.7829399739825311, "grad_norm": 0.6962025761604309, "learning_rate": 4.946902521715269e-06, "loss": 0.9428, "step": 9687 }, { "epoch": 1.7831258130459022, "grad_norm": 0.6201028823852539, "learning_rate": 4.9378219447042354e-06, "loss": 0.7519, "step": 9688 }, { "epoch": 1.7833116521092733, "grad_norm": 0.7408379316329956, "learning_rate": 4.928749498566743e-06, "loss": 1.0062, "step": 9689 }, { "epoch": 1.7834974911726444, "grad_norm": 0.6847137212753296, "learning_rate": 4.919685184078804e-06, "loss": 1.0721, "step": 9690 }, { "epoch": 1.7836833302360158, "grad_norm": 0.7941872477531433, "learning_rate": 4.910629002015665e-06, "loss": 0.7529, "step": 9691 }, { "epoch": 1.7838691692993867, "grad_norm": 0.7792496085166931, "learning_rate": 4.901580953151963e-06, "loss": 0.9921, "step": 9692 }, { "epoch": 1.784055008362758, "grad_norm": 0.5640096068382263, "learning_rate": 4.8925410382615555e-06, "loss": 0.7529, "step": 9693 }, { "epoch": 1.7842408474261289, "grad_norm": 0.8288540244102478, "learning_rate": 4.88350925811768e-06, "loss": 1.0118, "step": 9694 }, { "epoch": 1.7844266864895002, "grad_norm": 0.7848095297813416, "learning_rate": 4.87448561349283e-06, "loss": 1.1714, "step": 9695 }, { "epoch": 1.784612525552871, "grad_norm": 0.715578019618988, "learning_rate": 4.865470105158809e-06, "loss": 0.9999, "step": 9696 }, { "epoch": 1.7847983646162424, "grad_norm": 0.6751718521118164, "learning_rate": 4.856462733886757e-06, "loss": 0.8101, "step": 9697 }, { "epoch": 1.7849842036796133, "grad_norm": 0.7342599630355835, "learning_rate": 4.847463500447069e-06, "loss": 0.9857, "step": 9698 }, { "epoch": 1.7851700427429846, "grad_norm": 0.7690227627754211, "learning_rate": 4.838472405609495e-06, "loss": 1.0378, "step": 9699 }, { "epoch": 1.7853558818063557, "grad_norm": 0.6772790551185608, "learning_rate": 4.829489450143021e-06, "loss": 1.0879, "step": 9700 }, { "epoch": 1.7855417208697268, "grad_norm": 0.8678627014160156, "learning_rate": 4.82051463481602e-06, "loss": 1.079, "step": 9701 }, { "epoch": 1.785727559933098, "grad_norm": 0.6281694769859314, "learning_rate": 4.811547960396101e-06, "loss": 0.8617, "step": 9702 }, { "epoch": 1.785913398996469, "grad_norm": 0.7710700035095215, "learning_rate": 4.8025894276502285e-06, "loss": 0.7237, "step": 9703 }, { "epoch": 1.7860992380598402, "grad_norm": 0.7536906003952026, "learning_rate": 4.7936390373446325e-06, "loss": 0.6335, "step": 9704 }, { "epoch": 1.7862850771232113, "grad_norm": 0.93522709608078, "learning_rate": 4.784696790244847e-06, "loss": 0.979, "step": 9705 }, { "epoch": 1.7864709161865824, "grad_norm": 0.6946282386779785, "learning_rate": 4.775762687115748e-06, "loss": 0.9663, "step": 9706 }, { "epoch": 1.7866567552499535, "grad_norm": 0.6784142255783081, "learning_rate": 4.766836728721458e-06, "loss": 0.7839, "step": 9707 }, { "epoch": 1.7868425943133248, "grad_norm": 0.7739217281341553, "learning_rate": 4.757918915825455e-06, "loss": 0.9678, "step": 9708 }, { "epoch": 1.7870284333766957, "grad_norm": 0.6688413023948669, "learning_rate": 4.749009249190495e-06, "loss": 0.9067, "step": 9709 }, { "epoch": 1.787214272440067, "grad_norm": 0.7805230617523193, "learning_rate": 4.7401077295786244e-06, "loss": 0.8597, "step": 9710 }, { "epoch": 1.787400111503438, "grad_norm": 0.7463875412940979, "learning_rate": 4.731214357751245e-06, "loss": 1.0316, "step": 9711 }, { "epoch": 1.7875859505668092, "grad_norm": 0.7301071286201477, "learning_rate": 4.722329134468983e-06, "loss": 0.9194, "step": 9712 }, { "epoch": 1.7877717896301801, "grad_norm": 0.702477216720581, "learning_rate": 4.713452060491852e-06, "loss": 1.176, "step": 9713 }, { "epoch": 1.7879576286935515, "grad_norm": 0.6391510963439941, "learning_rate": 4.704583136579088e-06, "loss": 0.8577, "step": 9714 }, { "epoch": 1.7881434677569223, "grad_norm": 0.71745765209198, "learning_rate": 4.695722363489297e-06, "loss": 0.9414, "step": 9715 }, { "epoch": 1.7883293068202937, "grad_norm": 0.7245863676071167, "learning_rate": 4.6868697419803485e-06, "loss": 0.9282, "step": 9716 }, { "epoch": 1.7885151458836648, "grad_norm": 0.6648089289665222, "learning_rate": 4.678025272809427e-06, "loss": 0.9492, "step": 9717 }, { "epoch": 1.7887009849470359, "grad_norm": 0.7349907159805298, "learning_rate": 4.6691889567330395e-06, "loss": 1.0022, "step": 9718 }, { "epoch": 1.788886824010407, "grad_norm": 0.713378369808197, "learning_rate": 4.660360794506946e-06, "loss": 1.0444, "step": 9719 }, { "epoch": 1.789072663073778, "grad_norm": 0.8368687629699707, "learning_rate": 4.651540786886255e-06, "loss": 1.0523, "step": 9720 }, { "epoch": 1.7892585021371492, "grad_norm": 0.6994075775146484, "learning_rate": 4.6427289346253515e-06, "loss": 1.0559, "step": 9721 }, { "epoch": 1.7894443412005203, "grad_norm": 0.8998470306396484, "learning_rate": 4.633925238477943e-06, "loss": 1.0073, "step": 9722 }, { "epoch": 1.7896301802638914, "grad_norm": 0.7161475419998169, "learning_rate": 4.625129699197006e-06, "loss": 0.8975, "step": 9723 }, { "epoch": 1.7898160193272625, "grad_norm": 0.8403297662734985, "learning_rate": 4.616342317534861e-06, "loss": 0.8977, "step": 9724 }, { "epoch": 1.7900018583906339, "grad_norm": 0.6233807802200317, "learning_rate": 4.607563094243128e-06, "loss": 0.8097, "step": 9725 }, { "epoch": 1.7901876974540047, "grad_norm": 0.6614106297492981, "learning_rate": 4.598792030072674e-06, "loss": 1.1261, "step": 9726 }, { "epoch": 1.790373536517376, "grad_norm": 0.7223928570747375, "learning_rate": 4.590029125773742e-06, "loss": 0.748, "step": 9727 }, { "epoch": 1.790559375580747, "grad_norm": 0.6953086853027344, "learning_rate": 4.581274382095812e-06, "loss": 0.7081, "step": 9728 }, { "epoch": 1.7907452146441183, "grad_norm": 0.7670634984970093, "learning_rate": 4.572527799787718e-06, "loss": 0.954, "step": 9729 }, { "epoch": 1.7909310537074892, "grad_norm": 0.6398134827613831, "learning_rate": 4.5637893795975605e-06, "loss": 0.9984, "step": 9730 }, { "epoch": 1.7911168927708605, "grad_norm": 0.5937197208404541, "learning_rate": 4.5550591222727645e-06, "loss": 0.8642, "step": 9731 }, { "epoch": 1.7913027318342316, "grad_norm": 1.3300977945327759, "learning_rate": 4.546337028560066e-06, "loss": 1.1381, "step": 9732 }, { "epoch": 1.7914885708976027, "grad_norm": 0.7279658317565918, "learning_rate": 4.537623099205435e-06, "loss": 0.819, "step": 9733 }, { "epoch": 1.7916744099609738, "grad_norm": 0.7543433308601379, "learning_rate": 4.5289173349542414e-06, "loss": 0.7391, "step": 9734 }, { "epoch": 1.791860249024345, "grad_norm": 0.678759753704071, "learning_rate": 4.520219736551079e-06, "loss": 1.0428, "step": 9735 }, { "epoch": 1.792046088087716, "grad_norm": 0.6893246173858643, "learning_rate": 4.5115303047398745e-06, "loss": 0.808, "step": 9736 }, { "epoch": 1.7922319271510871, "grad_norm": 0.6843549013137817, "learning_rate": 4.502849040263879e-06, "loss": 0.9483, "step": 9737 }, { "epoch": 1.7924177662144583, "grad_norm": 0.7032724618911743, "learning_rate": 4.494175943865597e-06, "loss": 1.0616, "step": 9738 }, { "epoch": 1.7926036052778294, "grad_norm": 0.6670497059822083, "learning_rate": 4.4855110162868585e-06, "loss": 0.9504, "step": 9739 }, { "epoch": 1.7927894443412005, "grad_norm": 0.6596825122833252, "learning_rate": 4.4768542582687925e-06, "loss": 0.992, "step": 9740 }, { "epoch": 1.7929752834045716, "grad_norm": 0.7123975157737732, "learning_rate": 4.468205670551851e-06, "loss": 0.8786, "step": 9741 }, { "epoch": 1.793161122467943, "grad_norm": 0.6945950984954834, "learning_rate": 4.45956525387573e-06, "loss": 0.967, "step": 9742 }, { "epoch": 1.7933469615313138, "grad_norm": 1.4450714588165283, "learning_rate": 4.4509330089794855e-06, "loss": 1.2641, "step": 9743 }, { "epoch": 1.7935328005946851, "grad_norm": 0.7171867489814758, "learning_rate": 4.442308936601458e-06, "loss": 0.9805, "step": 9744 }, { "epoch": 1.793718639658056, "grad_norm": 0.737531840801239, "learning_rate": 4.4336930374792695e-06, "loss": 0.9138, "step": 9745 }, { "epoch": 1.7939044787214273, "grad_norm": 0.7215433120727539, "learning_rate": 4.425085312349853e-06, "loss": 0.9392, "step": 9746 }, { "epoch": 1.7940903177847982, "grad_norm": 0.752029299736023, "learning_rate": 4.416485761949462e-06, "loss": 0.997, "step": 9747 }, { "epoch": 1.7942761568481695, "grad_norm": 0.8215382695198059, "learning_rate": 4.407894387013634e-06, "loss": 0.8914, "step": 9748 }, { "epoch": 1.7944619959115407, "grad_norm": 0.7880039215087891, "learning_rate": 4.399311188277189e-06, "loss": 0.9539, "step": 9749 }, { "epoch": 1.7946478349749118, "grad_norm": 1.2010185718536377, "learning_rate": 4.390736166474285e-06, "loss": 1.1393, "step": 9750 }, { "epoch": 1.7948336740382829, "grad_norm": 0.5865445137023926, "learning_rate": 4.382169322338359e-06, "loss": 0.8907, "step": 9751 }, { "epoch": 1.795019513101654, "grad_norm": 0.7465519309043884, "learning_rate": 4.373610656602134e-06, "loss": 0.8727, "step": 9752 }, { "epoch": 1.795205352165025, "grad_norm": 0.5926641821861267, "learning_rate": 4.365060169997692e-06, "loss": 1.0208, "step": 9753 }, { "epoch": 1.7953911912283962, "grad_norm": 0.6432677507400513, "learning_rate": 4.356517863256326e-06, "loss": 0.9708, "step": 9754 }, { "epoch": 1.7955770302917673, "grad_norm": 0.7736765742301941, "learning_rate": 4.347983737108708e-06, "loss": 1.0175, "step": 9755 }, { "epoch": 1.7957628693551384, "grad_norm": 0.8660035729408264, "learning_rate": 4.339457792284762e-06, "loss": 0.8954, "step": 9756 }, { "epoch": 1.7959487084185097, "grad_norm": 0.7590531706809998, "learning_rate": 4.330940029513741e-06, "loss": 0.9476, "step": 9757 }, { "epoch": 1.7961345474818806, "grad_norm": 0.7696019411087036, "learning_rate": 4.322430449524206e-06, "loss": 1.1094, "step": 9758 }, { "epoch": 1.796320386545252, "grad_norm": 0.6668081879615784, "learning_rate": 4.313929053043963e-06, "loss": 0.8396, "step": 9759 }, { "epoch": 1.7965062256086228, "grad_norm": 0.8675522804260254, "learning_rate": 4.305435840800176e-06, "loss": 0.8775, "step": 9760 }, { "epoch": 1.7966920646719942, "grad_norm": 0.8108303546905518, "learning_rate": 4.2969508135192844e-06, "loss": 0.8486, "step": 9761 }, { "epoch": 1.796877903735365, "grad_norm": 0.638525664806366, "learning_rate": 4.288473971927043e-06, "loss": 0.7874, "step": 9762 }, { "epoch": 1.7970637427987364, "grad_norm": 0.6676344871520996, "learning_rate": 4.2800053167484696e-06, "loss": 0.5854, "step": 9763 }, { "epoch": 1.7972495818621073, "grad_norm": 0.6011619567871094, "learning_rate": 4.27154484870792e-06, "loss": 0.8661, "step": 9764 }, { "epoch": 1.7974354209254786, "grad_norm": 1.2263431549072266, "learning_rate": 4.263092568529048e-06, "loss": 1.3577, "step": 9765 }, { "epoch": 1.7976212599888497, "grad_norm": 0.7756538391113281, "learning_rate": 4.254648476934775e-06, "loss": 0.9552, "step": 9766 }, { "epoch": 1.7978070990522208, "grad_norm": 1.5570555925369263, "learning_rate": 4.246212574647357e-06, "loss": 1.2383, "step": 9767 }, { "epoch": 1.797992938115592, "grad_norm": 0.8159297108650208, "learning_rate": 4.237784862388316e-06, "loss": 1.101, "step": 9768 }, { "epoch": 1.798178777178963, "grad_norm": 0.6934140920639038, "learning_rate": 4.229365340878522e-06, "loss": 1.1076, "step": 9769 }, { "epoch": 1.7983646162423341, "grad_norm": 0.7720450758934021, "learning_rate": 4.2209540108380855e-06, "loss": 0.9178, "step": 9770 }, { "epoch": 1.7985504553057052, "grad_norm": 1.274947166442871, "learning_rate": 4.2125508729864535e-06, "loss": 1.5571, "step": 9771 }, { "epoch": 1.7987362943690763, "grad_norm": 1.9692296981811523, "learning_rate": 4.204155928042386e-06, "loss": 1.3165, "step": 9772 }, { "epoch": 1.7989221334324474, "grad_norm": 0.726324200630188, "learning_rate": 4.195769176723885e-06, "loss": 1.0163, "step": 9773 }, { "epoch": 1.7991079724958188, "grad_norm": 1.0148183107376099, "learning_rate": 4.18739061974831e-06, "loss": 0.9745, "step": 9774 }, { "epoch": 1.7992938115591897, "grad_norm": 0.6964415907859802, "learning_rate": 4.1790202578323e-06, "loss": 1.0187, "step": 9775 }, { "epoch": 1.799479650622561, "grad_norm": 0.852936863899231, "learning_rate": 4.170658091691792e-06, "loss": 0.9487, "step": 9776 }, { "epoch": 1.7996654896859319, "grad_norm": 0.7964418530464172, "learning_rate": 4.162304122041994e-06, "loss": 1.15, "step": 9777 }, { "epoch": 1.7998513287493032, "grad_norm": 0.8701530694961548, "learning_rate": 4.153958349597464e-06, "loss": 1.078, "step": 9778 }, { "epoch": 1.800037167812674, "grad_norm": 1.0016974210739136, "learning_rate": 4.1456207750720345e-06, "loss": 0.9747, "step": 9779 }, { "epoch": 1.8002230068760454, "grad_norm": 0.7461830377578735, "learning_rate": 4.137291399178822e-06, "loss": 1.0308, "step": 9780 }, { "epoch": 1.8004088459394163, "grad_norm": 0.6315507292747498, "learning_rate": 4.128970222630268e-06, "loss": 1.0419, "step": 9781 }, { "epoch": 1.8005946850027876, "grad_norm": 0.7336019277572632, "learning_rate": 4.120657246138093e-06, "loss": 0.7364, "step": 9782 }, { "epoch": 1.8007805240661587, "grad_norm": 0.8575476408004761, "learning_rate": 4.112352470413328e-06, "loss": 1.1506, "step": 9783 }, { "epoch": 1.8009663631295298, "grad_norm": 0.6964014768600464, "learning_rate": 4.104055896166292e-06, "loss": 1.0312, "step": 9784 }, { "epoch": 1.801152202192901, "grad_norm": 0.6191890239715576, "learning_rate": 4.095767524106609e-06, "loss": 0.8681, "step": 9785 }, { "epoch": 1.801338041256272, "grad_norm": 0.74542635679245, "learning_rate": 4.08748735494322e-06, "loss": 0.783, "step": 9786 }, { "epoch": 1.8015238803196432, "grad_norm": 0.7950938940048218, "learning_rate": 4.079215389384316e-06, "loss": 1.082, "step": 9787 }, { "epoch": 1.8017097193830143, "grad_norm": 0.6605604887008667, "learning_rate": 4.070951628137443e-06, "loss": 1.0312, "step": 9788 }, { "epoch": 1.8018955584463854, "grad_norm": 0.6296193599700928, "learning_rate": 4.062696071909411e-06, "loss": 0.5754, "step": 9789 }, { "epoch": 1.8020813975097565, "grad_norm": 0.6792443990707397, "learning_rate": 4.054448721406312e-06, "loss": 0.6941, "step": 9790 }, { "epoch": 1.8022672365731278, "grad_norm": 0.7101089954376221, "learning_rate": 4.046209577333593e-06, "loss": 0.8607, "step": 9791 }, { "epoch": 1.8024530756364987, "grad_norm": 0.6400145888328552, "learning_rate": 4.037978640395945e-06, "loss": 0.7641, "step": 9792 }, { "epoch": 1.80263891469987, "grad_norm": 0.6590439677238464, "learning_rate": 4.029755911297395e-06, "loss": 1.0334, "step": 9793 }, { "epoch": 1.802824753763241, "grad_norm": 0.8242725133895874, "learning_rate": 4.021541390741235e-06, "loss": 0.9977, "step": 9794 }, { "epoch": 1.8030105928266122, "grad_norm": 0.7872090935707092, "learning_rate": 4.013335079430092e-06, "loss": 0.8787, "step": 9795 }, { "epoch": 1.8031964318899831, "grad_norm": 0.8022633194923401, "learning_rate": 4.005136978065837e-06, "loss": 1.0688, "step": 9796 }, { "epoch": 1.8033822709533545, "grad_norm": 0.7343721389770508, "learning_rate": 3.996947087349701e-06, "loss": 1.0336, "step": 9797 }, { "epoch": 1.8035681100167256, "grad_norm": 0.684739351272583, "learning_rate": 3.9887654079821536e-06, "loss": 1.0992, "step": 9798 }, { "epoch": 1.8037539490800967, "grad_norm": 0.6524844169616699, "learning_rate": 3.980591940663015e-06, "loss": 0.9778, "step": 9799 }, { "epoch": 1.8039397881434678, "grad_norm": 0.7692164778709412, "learning_rate": 3.9724266860913795e-06, "loss": 0.7201, "step": 9800 }, { "epoch": 1.8041256272068389, "grad_norm": 0.7136658430099487, "learning_rate": 3.964269644965623e-06, "loss": 0.9806, "step": 9801 }, { "epoch": 1.80431146627021, "grad_norm": 0.6302587985992432, "learning_rate": 3.956120817983455e-06, "loss": 0.9819, "step": 9802 }, { "epoch": 1.804497305333581, "grad_norm": 0.8235353827476501, "learning_rate": 3.947980205841839e-06, "loss": 0.9021, "step": 9803 }, { "epoch": 1.8046831443969522, "grad_norm": 0.6519436836242676, "learning_rate": 3.9398478092370736e-06, "loss": 0.95, "step": 9804 }, { "epoch": 1.8048689834603233, "grad_norm": 0.6274929046630859, "learning_rate": 3.9317236288647365e-06, "loss": 1.0314, "step": 9805 }, { "epoch": 1.8050548225236946, "grad_norm": 0.9879053235054016, "learning_rate": 3.923607665419715e-06, "loss": 0.9081, "step": 9806 }, { "epoch": 1.8052406615870655, "grad_norm": 0.7791699767112732, "learning_rate": 3.915499919596189e-06, "loss": 0.7968, "step": 9807 }, { "epoch": 1.8054265006504369, "grad_norm": 0.7316449284553528, "learning_rate": 3.907400392087602e-06, "loss": 0.9871, "step": 9808 }, { "epoch": 1.8056123397138077, "grad_norm": 0.7337790131568909, "learning_rate": 3.899309083586755e-06, "loss": 1.1018, "step": 9809 }, { "epoch": 1.805798178777179, "grad_norm": 0.766534149646759, "learning_rate": 3.891225994785697e-06, "loss": 1.0126, "step": 9810 }, { "epoch": 1.80598401784055, "grad_norm": 0.6639066338539124, "learning_rate": 3.883151126375806e-06, "loss": 0.9444, "step": 9811 }, { "epoch": 1.8061698569039213, "grad_norm": 0.7706999182701111, "learning_rate": 3.87508447904773e-06, "loss": 1.094, "step": 9812 }, { "epoch": 1.8063556959672922, "grad_norm": 0.5971929430961609, "learning_rate": 3.867026053491418e-06, "loss": 0.8445, "step": 9813 }, { "epoch": 1.8065415350306635, "grad_norm": 0.7434088587760925, "learning_rate": 3.8589758503961625e-06, "loss": 0.7663, "step": 9814 }, { "epoch": 1.8067273740940346, "grad_norm": 0.9167700409889221, "learning_rate": 3.850933870450479e-06, "loss": 1.1226, "step": 9815 }, { "epoch": 1.8069132131574057, "grad_norm": 0.6300912499427795, "learning_rate": 3.8429001143422296e-06, "loss": 0.8259, "step": 9816 }, { "epoch": 1.8070990522207768, "grad_norm": 0.6689139008522034, "learning_rate": 3.834874582758552e-06, "loss": 1.0018, "step": 9817 }, { "epoch": 1.807284891284148, "grad_norm": 0.7586479187011719, "learning_rate": 3.826857276385886e-06, "loss": 1.0716, "step": 9818 }, { "epoch": 1.807470730347519, "grad_norm": 0.7082575559616089, "learning_rate": 3.8188481959099745e-06, "loss": 1.0471, "step": 9819 }, { "epoch": 1.8076565694108901, "grad_norm": 0.7369147539138794, "learning_rate": 3.810847342015855e-06, "loss": 0.769, "step": 9820 }, { "epoch": 1.8078424084742613, "grad_norm": 0.686251699924469, "learning_rate": 3.8028547153878714e-06, "loss": 0.736, "step": 9821 }, { "epoch": 1.8080282475376324, "grad_norm": 0.7864113450050354, "learning_rate": 3.79487031670962e-06, "loss": 0.7086, "step": 9822 }, { "epoch": 1.8082140866010037, "grad_norm": 0.6438609957695007, "learning_rate": 3.786894146664044e-06, "loss": 0.7527, "step": 9823 }, { "epoch": 1.8083999256643746, "grad_norm": 0.6810690760612488, "learning_rate": 3.778926205933342e-06, "loss": 1.1615, "step": 9824 }, { "epoch": 1.808585764727746, "grad_norm": 0.653982400894165, "learning_rate": 3.7709664951990575e-06, "loss": 0.7498, "step": 9825 }, { "epoch": 1.8087716037911168, "grad_norm": 0.7026686072349548, "learning_rate": 3.7630150151419684e-06, "loss": 1.0246, "step": 9826 }, { "epoch": 1.8089574428544881, "grad_norm": 0.7423397302627563, "learning_rate": 3.7550717664422085e-06, "loss": 1.0985, "step": 9827 }, { "epoch": 1.809143281917859, "grad_norm": 0.5977106094360352, "learning_rate": 3.7471367497791897e-06, "loss": 0.8143, "step": 9828 }, { "epoch": 1.8093291209812303, "grad_norm": 0.6815504431724548, "learning_rate": 3.7392099658315693e-06, "loss": 1.1765, "step": 9829 }, { "epoch": 1.8095149600446012, "grad_norm": 0.8144717216491699, "learning_rate": 3.7312914152773936e-06, "loss": 1.1033, "step": 9830 }, { "epoch": 1.8097007991079725, "grad_norm": 0.6317072510719299, "learning_rate": 3.72338109879391e-06, "loss": 0.7315, "step": 9831 }, { "epoch": 1.8098866381713437, "grad_norm": 0.69041907787323, "learning_rate": 3.7154790170577103e-06, "loss": 0.8003, "step": 9832 }, { "epoch": 1.8100724772347148, "grad_norm": 0.7883708477020264, "learning_rate": 3.7075851707446983e-06, "loss": 0.7498, "step": 9833 }, { "epoch": 1.8102583162980859, "grad_norm": 0.6976882815361023, "learning_rate": 3.6996995605300567e-06, "loss": 0.9999, "step": 9834 }, { "epoch": 1.810444155361457, "grad_norm": 0.7721940279006958, "learning_rate": 3.6918221870882342e-06, "loss": 0.801, "step": 9835 }, { "epoch": 1.810629994424828, "grad_norm": 0.6680358052253723, "learning_rate": 3.6839530510930032e-06, "loss": 0.8228, "step": 9836 }, { "epoch": 1.8108158334881992, "grad_norm": 0.7693148255348206, "learning_rate": 3.6760921532174474e-06, "loss": 0.9878, "step": 9837 }, { "epoch": 1.8110016725515703, "grad_norm": 0.7048506140708923, "learning_rate": 3.668239494133896e-06, "loss": 0.9883, "step": 9838 }, { "epoch": 1.8111875116149414, "grad_norm": 0.6518003940582275, "learning_rate": 3.6603950745140447e-06, "loss": 0.9176, "step": 9839 }, { "epoch": 1.8113733506783127, "grad_norm": 0.6439449787139893, "learning_rate": 3.6525588950288013e-06, "loss": 0.7398, "step": 9840 }, { "epoch": 1.8115591897416836, "grad_norm": 1.1814019680023193, "learning_rate": 3.64473095634843e-06, "loss": 1.2524, "step": 9841 }, { "epoch": 1.811745028805055, "grad_norm": 0.6845316290855408, "learning_rate": 3.636911259142484e-06, "loss": 0.9213, "step": 9842 }, { "epoch": 1.8119308678684258, "grad_norm": 0.9367323517799377, "learning_rate": 3.6290998040797717e-06, "loss": 1.0116, "step": 9843 }, { "epoch": 1.8121167069317972, "grad_norm": 0.7118209004402161, "learning_rate": 3.621296591828449e-06, "loss": 1.0242, "step": 9844 }, { "epoch": 1.812302545995168, "grad_norm": 0.7774336934089661, "learning_rate": 3.613501623055926e-06, "loss": 0.734, "step": 9845 }, { "epoch": 1.8124883850585394, "grad_norm": 0.6717697978019714, "learning_rate": 3.605714898428936e-06, "loss": 1.1021, "step": 9846 }, { "epoch": 1.8126742241219103, "grad_norm": 0.7385362386703491, "learning_rate": 3.597936418613468e-06, "loss": 0.8702, "step": 9847 }, { "epoch": 1.8128600631852816, "grad_norm": 0.7260059714317322, "learning_rate": 3.5901661842748568e-06, "loss": 0.9295, "step": 9848 }, { "epoch": 1.8130459022486527, "grad_norm": 0.6407851576805115, "learning_rate": 3.5824041960777044e-06, "loss": 0.8742, "step": 9849 }, { "epoch": 1.8132317413120238, "grad_norm": 0.7481780052185059, "learning_rate": 3.5746504546859018e-06, "loss": 0.9837, "step": 9850 }, { "epoch": 1.813417580375395, "grad_norm": 0.694119930267334, "learning_rate": 3.5669049607626514e-06, "loss": 1.2178, "step": 9851 }, { "epoch": 1.813603419438766, "grad_norm": 0.8012346029281616, "learning_rate": 3.5591677149704462e-06, "loss": 1.0699, "step": 9852 }, { "epoch": 1.8137892585021371, "grad_norm": 0.7269610166549683, "learning_rate": 3.551438717971056e-06, "loss": 0.9231, "step": 9853 }, { "epoch": 1.8139750975655082, "grad_norm": 0.7896559834480286, "learning_rate": 3.543717970425564e-06, "loss": 0.9874, "step": 9854 }, { "epoch": 1.8141609366288793, "grad_norm": 0.6833961606025696, "learning_rate": 3.5360054729943413e-06, "loss": 1.008, "step": 9855 }, { "epoch": 1.8143467756922504, "grad_norm": 0.6516629457473755, "learning_rate": 3.528301226337072e-06, "loss": 0.9614, "step": 9856 }, { "epoch": 1.8145326147556218, "grad_norm": 0.8365222215652466, "learning_rate": 3.5206052311126836e-06, "loss": 1.0754, "step": 9857 }, { "epoch": 1.8147184538189927, "grad_norm": 0.7373721599578857, "learning_rate": 3.5129174879794613e-06, "loss": 1.0566, "step": 9858 }, { "epoch": 1.814904292882364, "grad_norm": 0.7686201333999634, "learning_rate": 3.5052379975949343e-06, "loss": 1.1492, "step": 9859 }, { "epoch": 1.8150901319457349, "grad_norm": 0.7509794235229492, "learning_rate": 3.497566760615967e-06, "loss": 1.1087, "step": 9860 }, { "epoch": 1.8152759710091062, "grad_norm": 1.1377304792404175, "learning_rate": 3.4899037776986665e-06, "loss": 1.2943, "step": 9861 }, { "epoch": 1.815461810072477, "grad_norm": 0.789901077747345, "learning_rate": 3.482249049498487e-06, "loss": 1.0101, "step": 9862 }, { "epoch": 1.8156476491358484, "grad_norm": 0.6904792785644531, "learning_rate": 3.4746025766701494e-06, "loss": 0.7317, "step": 9863 }, { "epoch": 1.8158334881992195, "grad_norm": 0.7771925926208496, "learning_rate": 3.4669643598676637e-06, "loss": 0.9515, "step": 9864 }, { "epoch": 1.8160193272625906, "grad_norm": 0.8037922978401184, "learning_rate": 3.459334399744374e-06, "loss": 0.9012, "step": 9865 }, { "epoch": 1.8162051663259617, "grad_norm": 0.8191158175468445, "learning_rate": 3.4517126969528467e-06, "loss": 1.1604, "step": 9866 }, { "epoch": 1.8163910053893328, "grad_norm": 0.6841832399368286, "learning_rate": 3.4440992521450057e-06, "loss": 1.0259, "step": 9867 }, { "epoch": 1.816576844452704, "grad_norm": 0.6980661749839783, "learning_rate": 3.43649406597204e-06, "loss": 0.7243, "step": 9868 }, { "epoch": 1.816762683516075, "grad_norm": 0.7261941432952881, "learning_rate": 3.4288971390844303e-06, "loss": 1.0282, "step": 9869 }, { "epoch": 1.8169485225794462, "grad_norm": 0.7119371891021729, "learning_rate": 3.4213084721319897e-06, "loss": 1.15, "step": 9870 }, { "epoch": 1.8171343616428173, "grad_norm": 0.7503767013549805, "learning_rate": 3.413728065763744e-06, "loss": 0.8418, "step": 9871 }, { "epoch": 1.8173202007061886, "grad_norm": 0.6611210703849792, "learning_rate": 3.406155920628107e-06, "loss": 0.8486, "step": 9872 }, { "epoch": 1.8175060397695595, "grad_norm": 0.689490020275116, "learning_rate": 3.3985920373727053e-06, "loss": 0.8954, "step": 9873 }, { "epoch": 1.8176918788329308, "grad_norm": 0.6549000144004822, "learning_rate": 3.391036416644522e-06, "loss": 0.9076, "step": 9874 }, { "epoch": 1.8178777178963017, "grad_norm": 0.8170062899589539, "learning_rate": 3.383489059089784e-06, "loss": 1.0304, "step": 9875 }, { "epoch": 1.818063556959673, "grad_norm": 0.572019636631012, "learning_rate": 3.3759499653540416e-06, "loss": 0.7253, "step": 9876 }, { "epoch": 1.818249396023044, "grad_norm": 0.8576362729072571, "learning_rate": 3.368419136082135e-06, "loss": 0.9564, "step": 9877 }, { "epoch": 1.8184352350864152, "grad_norm": 0.7417992353439331, "learning_rate": 3.360896571918182e-06, "loss": 1.0617, "step": 9878 }, { "epoch": 1.8186210741497861, "grad_norm": 0.6697123050689697, "learning_rate": 3.353382273505623e-06, "loss": 0.7978, "step": 9879 }, { "epoch": 1.8188069132131575, "grad_norm": 0.6738744378089905, "learning_rate": 3.345876241487145e-06, "loss": 0.8131, "step": 9880 }, { "epoch": 1.8189927522765286, "grad_norm": 0.6736224889755249, "learning_rate": 3.338378476504789e-06, "loss": 0.966, "step": 9881 }, { "epoch": 1.8191785913398997, "grad_norm": 0.656453549861908, "learning_rate": 3.330888979199809e-06, "loss": 0.8023, "step": 9882 }, { "epoch": 1.8193644304032708, "grad_norm": 0.5946424603462219, "learning_rate": 3.3234077502128367e-06, "loss": 0.7506, "step": 9883 }, { "epoch": 1.819550269466642, "grad_norm": 0.813317596912384, "learning_rate": 3.31593479018375e-06, "loss": 1.0371, "step": 9884 }, { "epoch": 1.819736108530013, "grad_norm": 0.6244372129440308, "learning_rate": 3.308470099751715e-06, "loss": 0.8708, "step": 9885 }, { "epoch": 1.819921947593384, "grad_norm": 0.8620980978012085, "learning_rate": 3.3010136795552204e-06, "loss": 1.0695, "step": 9886 }, { "epoch": 1.8201077866567552, "grad_norm": 0.7761400938034058, "learning_rate": 3.2935655302320123e-06, "loss": 1.0793, "step": 9887 }, { "epoch": 1.8202936257201263, "grad_norm": 0.6685424447059631, "learning_rate": 3.2861256524191586e-06, "loss": 0.9213, "step": 9888 }, { "epoch": 1.8204794647834976, "grad_norm": 0.6758712530136108, "learning_rate": 3.2786940467529948e-06, "loss": 0.9071, "step": 9889 }, { "epoch": 1.8206653038468685, "grad_norm": 0.6849507689476013, "learning_rate": 3.271270713869179e-06, "loss": 0.9324, "step": 9890 }, { "epoch": 1.8208511429102399, "grad_norm": 0.8326185345649719, "learning_rate": 3.2638556544026368e-06, "loss": 1.1884, "step": 9891 }, { "epoch": 1.8210369819736107, "grad_norm": 0.6872798800468445, "learning_rate": 3.256448868987594e-06, "loss": 1.0787, "step": 9892 }, { "epoch": 1.821222821036982, "grad_norm": 0.6884906888008118, "learning_rate": 3.249050358257566e-06, "loss": 0.8761, "step": 9893 }, { "epoch": 1.821408660100353, "grad_norm": 0.8486963510513306, "learning_rate": 3.241660122845358e-06, "loss": 1.1537, "step": 9894 }, { "epoch": 1.8215944991637243, "grad_norm": 0.6911030411720276, "learning_rate": 3.2342781633831085e-06, "loss": 1.03, "step": 9895 }, { "epoch": 1.8217803382270952, "grad_norm": 0.7132589817047119, "learning_rate": 3.2269044805021577e-06, "loss": 0.7436, "step": 9896 }, { "epoch": 1.8219661772904665, "grad_norm": 0.6569371223449707, "learning_rate": 3.2195390748332334e-06, "loss": 0.9463, "step": 9897 }, { "epoch": 1.8221520163538376, "grad_norm": 0.6883752942085266, "learning_rate": 3.2121819470063097e-06, "loss": 0.9598, "step": 9898 }, { "epoch": 1.8223378554172087, "grad_norm": 0.7526514530181885, "learning_rate": 3.204833097650639e-06, "loss": 1.0459, "step": 9899 }, { "epoch": 1.8225236944805798, "grad_norm": 0.7786809206008911, "learning_rate": 3.1974925273948075e-06, "loss": 0.9149, "step": 9900 }, { "epoch": 1.822709533543951, "grad_norm": 0.6994304060935974, "learning_rate": 3.190160236866646e-06, "loss": 1.1353, "step": 9901 }, { "epoch": 1.822895372607322, "grad_norm": 0.6753819584846497, "learning_rate": 3.182836226693331e-06, "loss": 0.6098, "step": 9902 }, { "epoch": 1.8230812116706931, "grad_norm": 0.6040153503417969, "learning_rate": 3.175520497501261e-06, "loss": 1.0412, "step": 9903 }, { "epoch": 1.8232670507340643, "grad_norm": 0.7435270547866821, "learning_rate": 3.168213049916191e-06, "loss": 0.9567, "step": 9904 }, { "epoch": 1.8234528897974354, "grad_norm": 0.6494448184967041, "learning_rate": 3.1609138845631546e-06, "loss": 0.9767, "step": 9905 }, { "epoch": 1.8236387288608067, "grad_norm": 0.7443634271621704, "learning_rate": 3.1536230020664417e-06, "loss": 1.1566, "step": 9906 }, { "epoch": 1.8238245679241776, "grad_norm": 0.7014687657356262, "learning_rate": 3.146340403049652e-06, "loss": 0.9532, "step": 9907 }, { "epoch": 1.824010406987549, "grad_norm": 0.6410042643547058, "learning_rate": 3.1390660881357116e-06, "loss": 0.7681, "step": 9908 }, { "epoch": 1.8241962460509198, "grad_norm": 0.6442815065383911, "learning_rate": 3.131800057946799e-06, "loss": 0.8326, "step": 9909 }, { "epoch": 1.8243820851142911, "grad_norm": 0.801694929599762, "learning_rate": 3.1245423131043726e-06, "loss": 1.0109, "step": 9910 }, { "epoch": 1.824567924177662, "grad_norm": 0.6718851327896118, "learning_rate": 3.1172928542292146e-06, "loss": 0.7381, "step": 9911 }, { "epoch": 1.8247537632410333, "grad_norm": 0.6806093454360962, "learning_rate": 3.110051681941406e-06, "loss": 1.0502, "step": 9912 }, { "epoch": 1.8249396023044044, "grad_norm": 0.654414176940918, "learning_rate": 3.102818796860274e-06, "loss": 0.9833, "step": 9913 }, { "epoch": 1.8251254413677755, "grad_norm": 0.8000136017799377, "learning_rate": 3.09559419960449e-06, "loss": 0.9632, "step": 9914 }, { "epoch": 1.8253112804311467, "grad_norm": 0.7904422283172607, "learning_rate": 3.088377890791949e-06, "loss": 1.1118, "step": 9915 }, { "epoch": 1.8254971194945178, "grad_norm": 0.8170812726020813, "learning_rate": 3.081169871039913e-06, "loss": 0.862, "step": 9916 }, { "epoch": 1.8256829585578889, "grad_norm": 0.7572363615036011, "learning_rate": 3.073970140964877e-06, "loss": 0.9306, "step": 9917 }, { "epoch": 1.82586879762126, "grad_norm": 0.7189555764198303, "learning_rate": 3.066778701182671e-06, "loss": 1.0578, "step": 9918 }, { "epoch": 1.826054636684631, "grad_norm": 0.7715607285499573, "learning_rate": 3.059595552308381e-06, "loss": 0.9815, "step": 9919 }, { "epoch": 1.8262404757480022, "grad_norm": 0.5737166404724121, "learning_rate": 3.0524206949563926e-06, "loss": 0.7259, "step": 9920 }, { "epoch": 1.8264263148113733, "grad_norm": 0.7895978093147278, "learning_rate": 3.0452541297403934e-06, "loss": 1.0323, "step": 9921 }, { "epoch": 1.8266121538747444, "grad_norm": 0.8224542140960693, "learning_rate": 3.038095857273349e-06, "loss": 1.0439, "step": 9922 }, { "epoch": 1.8267979929381157, "grad_norm": 0.7269798517227173, "learning_rate": 3.030945878167546e-06, "loss": 0.983, "step": 9923 }, { "epoch": 1.8269838320014866, "grad_norm": 0.6962180137634277, "learning_rate": 3.0238041930345073e-06, "loss": 0.9832, "step": 9924 }, { "epoch": 1.827169671064858, "grad_norm": 0.8408883810043335, "learning_rate": 3.0166708024850886e-06, "loss": 1.1521, "step": 9925 }, { "epoch": 1.8273555101282288, "grad_norm": 0.6679197549819946, "learning_rate": 3.009545707129435e-06, "loss": 0.7788, "step": 9926 }, { "epoch": 1.8275413491916002, "grad_norm": 0.6073801517486572, "learning_rate": 3.0024289075769596e-06, "loss": 0.9097, "step": 9927 }, { "epoch": 1.827727188254971, "grad_norm": 0.6985334157943726, "learning_rate": 2.9953204044363747e-06, "loss": 0.8337, "step": 9928 }, { "epoch": 1.8279130273183424, "grad_norm": 0.6692867279052734, "learning_rate": 2.988220198315683e-06, "loss": 0.8872, "step": 9929 }, { "epoch": 1.8280988663817135, "grad_norm": 0.6330978274345398, "learning_rate": 2.9811282898222104e-06, "loss": 0.9072, "step": 9930 }, { "epoch": 1.8282847054450846, "grad_norm": 0.7436206340789795, "learning_rate": 2.974044679562493e-06, "loss": 1.1705, "step": 9931 }, { "epoch": 1.8284705445084557, "grad_norm": 0.6733962893486023, "learning_rate": 2.966969368142447e-06, "loss": 0.7876, "step": 9932 }, { "epoch": 1.8286563835718268, "grad_norm": 0.7703623175621033, "learning_rate": 2.959902356167232e-06, "loss": 0.9789, "step": 9933 }, { "epoch": 1.828842222635198, "grad_norm": 0.6624138355255127, "learning_rate": 2.9528436442412988e-06, "loss": 0.9694, "step": 9934 }, { "epoch": 1.829028061698569, "grad_norm": 0.666533887386322, "learning_rate": 2.945793232968386e-06, "loss": 1.1035, "step": 9935 }, { "epoch": 1.8292139007619401, "grad_norm": 0.7200380563735962, "learning_rate": 2.938751122951544e-06, "loss": 0.9698, "step": 9936 }, { "epoch": 1.8293997398253112, "grad_norm": 0.7723873853683472, "learning_rate": 2.931717314793092e-06, "loss": 0.7781, "step": 9937 }, { "epoch": 1.8295855788886826, "grad_norm": 0.7704063653945923, "learning_rate": 2.9246918090946597e-06, "loss": 1.1694, "step": 9938 }, { "epoch": 1.8297714179520534, "grad_norm": 0.6921645998954773, "learning_rate": 2.9176746064571327e-06, "loss": 1.096, "step": 9939 }, { "epoch": 1.8299572570154248, "grad_norm": 0.6649049520492554, "learning_rate": 2.9106657074807307e-06, "loss": 0.5551, "step": 9940 }, { "epoch": 1.8301430960787957, "grad_norm": 0.7728575468063354, "learning_rate": 2.9036651127649194e-06, "loss": 0.9851, "step": 9941 }, { "epoch": 1.830328935142167, "grad_norm": 0.6400600671768188, "learning_rate": 2.896672822908497e-06, "loss": 0.8924, "step": 9942 }, { "epoch": 1.8305147742055379, "grad_norm": 0.8135813474655151, "learning_rate": 2.8896888385094967e-06, "loss": 1.0684, "step": 9943 }, { "epoch": 1.8307006132689092, "grad_norm": 0.6735711693763733, "learning_rate": 2.8827131601653178e-06, "loss": 1.0137, "step": 9944 }, { "epoch": 1.83088645233228, "grad_norm": 0.6968306303024292, "learning_rate": 2.8757457884725613e-06, "loss": 1.0254, "step": 9945 }, { "epoch": 1.8310722913956514, "grad_norm": 0.7960559725761414, "learning_rate": 2.8687867240271725e-06, "loss": 0.8744, "step": 9946 }, { "epoch": 1.8312581304590225, "grad_norm": 0.8474821448326111, "learning_rate": 2.861835967424409e-06, "loss": 1.1025, "step": 9947 }, { "epoch": 1.8314439695223936, "grad_norm": 0.6721470355987549, "learning_rate": 2.8548935192587388e-06, "loss": 0.8398, "step": 9948 }, { "epoch": 1.8316298085857647, "grad_norm": 0.9784024357795715, "learning_rate": 2.847959380123999e-06, "loss": 0.7279, "step": 9949 }, { "epoch": 1.8318156476491358, "grad_norm": 0.6895228624343872, "learning_rate": 2.841033550613259e-06, "loss": 1.0313, "step": 9950 }, { "epoch": 1.832001486712507, "grad_norm": 0.6543493270874023, "learning_rate": 2.8341160313189006e-06, "loss": 0.5196, "step": 9951 }, { "epoch": 1.832187325775878, "grad_norm": 0.6853628158569336, "learning_rate": 2.8272068228325955e-06, "loss": 0.9682, "step": 9952 }, { "epoch": 1.8323731648392492, "grad_norm": 0.8264276385307312, "learning_rate": 2.8203059257453256e-06, "loss": 1.2192, "step": 9953 }, { "epoch": 1.8325590039026203, "grad_norm": 0.7728658318519592, "learning_rate": 2.8134133406473197e-06, "loss": 1.0588, "step": 9954 }, { "epoch": 1.8327448429659916, "grad_norm": 0.8136929273605347, "learning_rate": 2.8065290681281164e-06, "loss": 0.8082, "step": 9955 }, { "epoch": 1.8329306820293625, "grad_norm": 0.7946676015853882, "learning_rate": 2.799653108776545e-06, "loss": 0.8515, "step": 9956 }, { "epoch": 1.8331165210927338, "grad_norm": 0.5548973083496094, "learning_rate": 2.7927854631807247e-06, "loss": 0.6857, "step": 9957 }, { "epoch": 1.8333023601561047, "grad_norm": 0.7363007664680481, "learning_rate": 2.7859261319280515e-06, "loss": 0.8401, "step": 9958 }, { "epoch": 1.833488199219476, "grad_norm": 0.6322947144508362, "learning_rate": 2.779075115605223e-06, "loss": 0.8207, "step": 9959 }, { "epoch": 1.833674038282847, "grad_norm": 0.7248316407203674, "learning_rate": 2.772232414798226e-06, "loss": 1.0542, "step": 9960 }, { "epoch": 1.8338598773462182, "grad_norm": 0.8802450299263, "learning_rate": 2.7653980300923254e-06, "loss": 1.0695, "step": 9961 }, { "epoch": 1.8340457164095891, "grad_norm": 0.7538386583328247, "learning_rate": 2.7585719620720765e-06, "loss": 0.9221, "step": 9962 }, { "epoch": 1.8342315554729605, "grad_norm": 0.7812655568122864, "learning_rate": 2.7517542113213445e-06, "loss": 0.9761, "step": 9963 }, { "epoch": 1.8344173945363316, "grad_norm": 0.6933215260505676, "learning_rate": 2.744944778423253e-06, "loss": 0.8904, "step": 9964 }, { "epoch": 1.8346032335997027, "grad_norm": 0.6729377508163452, "learning_rate": 2.7381436639602243e-06, "loss": 0.9443, "step": 9965 }, { "epoch": 1.8347890726630738, "grad_norm": 0.7871366739273071, "learning_rate": 2.731350868513982e-06, "loss": 1.0416, "step": 9966 }, { "epoch": 1.834974911726445, "grad_norm": 0.679682731628418, "learning_rate": 2.7245663926655174e-06, "loss": 1.0339, "step": 9967 }, { "epoch": 1.835160750789816, "grad_norm": 0.7425009608268738, "learning_rate": 2.7177902369951436e-06, "loss": 0.9609, "step": 9968 }, { "epoch": 1.835346589853187, "grad_norm": 0.8477035760879517, "learning_rate": 2.7110224020824194e-06, "loss": 1.0835, "step": 9969 }, { "epoch": 1.8355324289165582, "grad_norm": 0.8120266795158386, "learning_rate": 2.704262888506226e-06, "loss": 0.875, "step": 9970 }, { "epoch": 1.8357182679799293, "grad_norm": 0.7462162375450134, "learning_rate": 2.697511696844701e-06, "loss": 0.9639, "step": 9971 }, { "epoch": 1.8359041070433006, "grad_norm": 0.777911365032196, "learning_rate": 2.6907688276753053e-06, "loss": 0.9999, "step": 9972 }, { "epoch": 1.8360899461066715, "grad_norm": 0.6171411871910095, "learning_rate": 2.6840342815747544e-06, "loss": 0.7703, "step": 9973 }, { "epoch": 1.8362757851700429, "grad_norm": 1.0107492208480835, "learning_rate": 2.677308059119088e-06, "loss": 1.3989, "step": 9974 }, { "epoch": 1.8364616242334137, "grad_norm": 0.7087421417236328, "learning_rate": 2.6705901608836017e-06, "loss": 0.9501, "step": 9975 }, { "epoch": 1.836647463296785, "grad_norm": 0.7753424048423767, "learning_rate": 2.6638805874428907e-06, "loss": 0.803, "step": 9976 }, { "epoch": 1.836833302360156, "grad_norm": 1.1736202239990234, "learning_rate": 2.657179339370863e-06, "loss": 0.6303, "step": 9977 }, { "epoch": 1.8370191414235273, "grad_norm": 0.7461515665054321, "learning_rate": 2.6504864172406497e-06, "loss": 0.9204, "step": 9978 }, { "epoch": 1.8372049804868984, "grad_norm": 0.664656937122345, "learning_rate": 2.6438018216247475e-06, "loss": 0.8246, "step": 9979 }, { "epoch": 1.8373908195502695, "grad_norm": 0.7617626786231995, "learning_rate": 2.637125553094877e-06, "loss": 1.0333, "step": 9980 }, { "epoch": 1.8375766586136406, "grad_norm": 0.7563158869743347, "learning_rate": 2.6304576122221035e-06, "loss": 0.9878, "step": 9981 }, { "epoch": 1.8377624976770117, "grad_norm": 0.6394928693771362, "learning_rate": 2.6237979995767157e-06, "loss": 1.0379, "step": 9982 }, { "epoch": 1.8379483367403828, "grad_norm": 0.7006260752677917, "learning_rate": 2.6171467157283467e-06, "loss": 1.0053, "step": 9983 }, { "epoch": 1.838134175803754, "grad_norm": 0.8882328271865845, "learning_rate": 2.6105037612459083e-06, "loss": 1.1598, "step": 9984 }, { "epoch": 1.838320014867125, "grad_norm": 1.0692800283432007, "learning_rate": 2.6038691366975566e-06, "loss": 0.8886, "step": 9985 }, { "epoch": 1.8385058539304961, "grad_norm": 0.5800960063934326, "learning_rate": 2.5972428426507823e-06, "loss": 0.9888, "step": 9986 }, { "epoch": 1.8386916929938675, "grad_norm": 0.7504416108131409, "learning_rate": 2.590624879672332e-06, "loss": 0.9861, "step": 9987 }, { "epoch": 1.8388775320572384, "grad_norm": 1.5852302312850952, "learning_rate": 2.5840152483282752e-06, "loss": 1.3195, "step": 9988 }, { "epoch": 1.8390633711206097, "grad_norm": 0.7464103102684021, "learning_rate": 2.5774139491839376e-06, "loss": 0.9331, "step": 9989 }, { "epoch": 1.8392492101839806, "grad_norm": 0.6646602153778076, "learning_rate": 2.5708209828039343e-06, "loss": 0.9741, "step": 9990 }, { "epoch": 1.839435049247352, "grad_norm": 0.6763034462928772, "learning_rate": 2.5642363497522025e-06, "loss": 0.9705, "step": 9991 }, { "epoch": 1.8396208883107228, "grad_norm": 0.6655982136726379, "learning_rate": 2.5576600505919035e-06, "loss": 0.8737, "step": 9992 }, { "epoch": 1.8398067273740941, "grad_norm": 0.6391465663909912, "learning_rate": 2.5510920858855535e-06, "loss": 0.7289, "step": 9993 }, { "epoch": 1.839992566437465, "grad_norm": 0.7988792061805725, "learning_rate": 2.5445324561949035e-06, "loss": 0.8365, "step": 9994 }, { "epoch": 1.8401784055008363, "grad_norm": 0.7054262161254883, "learning_rate": 2.537981162081027e-06, "loss": 0.8371, "step": 9995 }, { "epoch": 1.8403642445642074, "grad_norm": 0.6635208129882812, "learning_rate": 2.531438204104253e-06, "loss": 0.8408, "step": 9996 }, { "epoch": 1.8405500836275785, "grad_norm": 0.7707086801528931, "learning_rate": 2.524903582824234e-06, "loss": 0.9308, "step": 9997 }, { "epoch": 1.8407359226909497, "grad_norm": 0.6752921938896179, "learning_rate": 2.51837729879989e-06, "loss": 1.0843, "step": 9998 }, { "epoch": 1.8409217617543208, "grad_norm": 0.6770755052566528, "learning_rate": 2.5118593525894184e-06, "loss": 0.7495, "step": 9999 }, { "epoch": 1.8411076008176919, "grad_norm": 0.5796447992324829, "learning_rate": 2.5053497447503183e-06, "loss": 0.6934, "step": 10000 }, { "epoch": 1.841293439881063, "grad_norm": 0.6556915640830994, "learning_rate": 2.498848475839355e-06, "loss": 0.9931, "step": 10001 }, { "epoch": 1.841479278944434, "grad_norm": 0.734487771987915, "learning_rate": 2.492355546412617e-06, "loss": 0.8293, "step": 10002 }, { "epoch": 1.8416651180078052, "grad_norm": 1.2783101797103882, "learning_rate": 2.4858709570254603e-06, "loss": 1.2197, "step": 10003 }, { "epoch": 1.8418509570711765, "grad_norm": 0.8434581756591797, "learning_rate": 2.4793947082324964e-06, "loss": 0.8842, "step": 10004 }, { "epoch": 1.8420367961345474, "grad_norm": 0.8138322234153748, "learning_rate": 2.4729268005876938e-06, "loss": 1.1372, "step": 10005 }, { "epoch": 1.8422226351979187, "grad_norm": 0.69370436668396, "learning_rate": 2.466467234644232e-06, "loss": 0.9559, "step": 10006 }, { "epoch": 1.8424084742612896, "grad_norm": 0.7971329689025879, "learning_rate": 2.4600160109546354e-06, "loss": 0.9585, "step": 10007 }, { "epoch": 1.842594313324661, "grad_norm": 0.8003334999084473, "learning_rate": 2.453573130070674e-06, "loss": 0.7161, "step": 10008 }, { "epoch": 1.8427801523880318, "grad_norm": 0.6565207839012146, "learning_rate": 2.4471385925434297e-06, "loss": 0.916, "step": 10009 }, { "epoch": 1.8429659914514032, "grad_norm": 0.6413012742996216, "learning_rate": 2.4407123989232507e-06, "loss": 0.9176, "step": 10010 }, { "epoch": 1.843151830514774, "grad_norm": 0.587217390537262, "learning_rate": 2.4342945497598078e-06, "loss": 0.7268, "step": 10011 }, { "epoch": 1.8433376695781454, "grad_norm": 0.9451626539230347, "learning_rate": 2.4278850456020184e-06, "loss": 1.0721, "step": 10012 }, { "epoch": 1.8435235086415165, "grad_norm": 0.8284534215927124, "learning_rate": 2.4214838869980994e-06, "loss": 0.8944, "step": 10013 }, { "epoch": 1.8437093477048876, "grad_norm": 0.8125157952308655, "learning_rate": 2.415091074495557e-06, "loss": 1.2481, "step": 10014 }, { "epoch": 1.8438951867682587, "grad_norm": 1.239067792892456, "learning_rate": 2.4087066086411867e-06, "loss": 1.1899, "step": 10015 }, { "epoch": 1.8440810258316298, "grad_norm": 0.6294758915901184, "learning_rate": 2.402330489981053e-06, "loss": 0.7415, "step": 10016 }, { "epoch": 1.844266864895001, "grad_norm": 0.6115023493766785, "learning_rate": 2.3959627190605404e-06, "loss": 0.8013, "step": 10017 }, { "epoch": 1.844452703958372, "grad_norm": 0.6057900786399841, "learning_rate": 2.389603296424281e-06, "loss": 0.7154, "step": 10018 }, { "epoch": 1.8446385430217431, "grad_norm": 0.6940528154373169, "learning_rate": 2.383252222616228e-06, "loss": 0.759, "step": 10019 }, { "epoch": 1.8448243820851142, "grad_norm": 1.202257513999939, "learning_rate": 2.3769094981795693e-06, "loss": 1.3982, "step": 10020 }, { "epoch": 1.8450102211484856, "grad_norm": 0.6999146342277527, "learning_rate": 2.3705751236568485e-06, "loss": 0.8744, "step": 10021 }, { "epoch": 1.8451960602118564, "grad_norm": 0.7464430332183838, "learning_rate": 2.3642490995898327e-06, "loss": 1.4102, "step": 10022 }, { "epoch": 1.8453818992752278, "grad_norm": 0.7458065748214722, "learning_rate": 2.3579314265196104e-06, "loss": 0.9248, "step": 10023 }, { "epoch": 1.8455677383385987, "grad_norm": 0.5382136702537537, "learning_rate": 2.3516221049865505e-06, "loss": 0.5963, "step": 10024 }, { "epoch": 1.84575357740197, "grad_norm": 0.7321708798408508, "learning_rate": 2.3453211355302874e-06, "loss": 0.8908, "step": 10025 }, { "epoch": 1.8459394164653409, "grad_norm": 0.6686316132545471, "learning_rate": 2.3390285186897896e-06, "loss": 0.8337, "step": 10026 }, { "epoch": 1.8461252555287122, "grad_norm": 0.7071788311004639, "learning_rate": 2.3327442550032385e-06, "loss": 0.938, "step": 10027 }, { "epoch": 1.846311094592083, "grad_norm": 0.6526303291320801, "learning_rate": 2.3264683450081594e-06, "loss": 0.7184, "step": 10028 }, { "epoch": 1.8464969336554544, "grad_norm": 0.736538827419281, "learning_rate": 2.3202007892413447e-06, "loss": 0.8133, "step": 10029 }, { "epoch": 1.8466827727188255, "grad_norm": 0.7265169024467468, "learning_rate": 2.313941588238866e-06, "loss": 0.9624, "step": 10030 }, { "epoch": 1.8468686117821966, "grad_norm": 0.8249513506889343, "learning_rate": 2.3076907425361062e-06, "loss": 1.13, "step": 10031 }, { "epoch": 1.8470544508455677, "grad_norm": 0.6018231511116028, "learning_rate": 2.3014482526676816e-06, "loss": 0.678, "step": 10032 }, { "epoch": 1.8472402899089388, "grad_norm": 0.6337421536445618, "learning_rate": 2.2952141191675547e-06, "loss": 1.1592, "step": 10033 }, { "epoch": 1.84742612897231, "grad_norm": 0.7989595532417297, "learning_rate": 2.2889883425689206e-06, "loss": 0.9037, "step": 10034 }, { "epoch": 1.847611968035681, "grad_norm": 0.6029747128486633, "learning_rate": 2.2827709234043092e-06, "loss": 0.7932, "step": 10035 }, { "epoch": 1.8477978070990522, "grad_norm": 0.6751794815063477, "learning_rate": 2.276561862205473e-06, "loss": 0.8837, "step": 10036 }, { "epoch": 1.8479836461624233, "grad_norm": 0.7136726379394531, "learning_rate": 2.27036115950352e-06, "loss": 0.8024, "step": 10037 }, { "epoch": 1.8481694852257946, "grad_norm": 0.6392678618431091, "learning_rate": 2.2641688158288043e-06, "loss": 0.7938, "step": 10038 }, { "epoch": 1.8483553242891655, "grad_norm": 0.6552032232284546, "learning_rate": 2.2579848317109575e-06, "loss": 0.8467, "step": 10039 }, { "epoch": 1.8485411633525368, "grad_norm": 0.6480730772018433, "learning_rate": 2.2518092076789234e-06, "loss": 0.9873, "step": 10040 }, { "epoch": 1.8487270024159077, "grad_norm": 0.6986692547798157, "learning_rate": 2.2456419442609012e-06, "loss": 0.9678, "step": 10041 }, { "epoch": 1.848912841479279, "grad_norm": 0.7117049098014832, "learning_rate": 2.2394830419844027e-06, "loss": 0.9666, "step": 10042 }, { "epoch": 1.84909868054265, "grad_norm": 0.6958870887756348, "learning_rate": 2.233332501376206e-06, "loss": 0.9391, "step": 10043 }, { "epoch": 1.8492845196060212, "grad_norm": 0.6736997961997986, "learning_rate": 2.22719032296238e-06, "loss": 0.7636, "step": 10044 }, { "epoch": 1.8494703586693924, "grad_norm": 0.5893682241439819, "learning_rate": 2.221056507268293e-06, "loss": 0.6087, "step": 10045 }, { "epoch": 1.8496561977327635, "grad_norm": 0.7475652694702148, "learning_rate": 2.2149310548185586e-06, "loss": 1.0171, "step": 10046 }, { "epoch": 1.8498420367961346, "grad_norm": 0.580328643321991, "learning_rate": 2.2088139661371244e-06, "loss": 0.9603, "step": 10047 }, { "epoch": 1.8500278758595057, "grad_norm": 0.8390524983406067, "learning_rate": 2.2027052417471715e-06, "loss": 1.0896, "step": 10048 }, { "epoch": 1.8502137149228768, "grad_norm": 0.7339285612106323, "learning_rate": 2.196604882171216e-06, "loss": 1.2479, "step": 10049 }, { "epoch": 1.850399553986248, "grad_norm": 0.6563944816589355, "learning_rate": 2.190512887931018e-06, "loss": 0.908, "step": 10050 }, { "epoch": 1.850585393049619, "grad_norm": 0.6774222254753113, "learning_rate": 2.184429259547649e-06, "loss": 0.7921, "step": 10051 }, { "epoch": 1.85077123211299, "grad_norm": 0.7838877439498901, "learning_rate": 2.1783539975414714e-06, "loss": 0.8798, "step": 10052 }, { "epoch": 1.8509570711763614, "grad_norm": 0.6545433402061462, "learning_rate": 2.172287102432069e-06, "loss": 0.9829, "step": 10053 }, { "epoch": 1.8511429102397323, "grad_norm": 0.7077997326850891, "learning_rate": 2.1662285747383936e-06, "loss": 1.0176, "step": 10054 }, { "epoch": 1.8513287493031036, "grad_norm": 0.6887453198432922, "learning_rate": 2.160178414978642e-06, "loss": 0.826, "step": 10055 }, { "epoch": 1.8515145883664745, "grad_norm": 0.7139452695846558, "learning_rate": 2.1541366236702887e-06, "loss": 0.8449, "step": 10056 }, { "epoch": 1.8517004274298459, "grad_norm": 0.8103654980659485, "learning_rate": 2.1481032013300984e-06, "loss": 1.1509, "step": 10057 }, { "epoch": 1.8518862664932167, "grad_norm": 0.634610116481781, "learning_rate": 2.1420781484741247e-06, "loss": 0.9645, "step": 10058 }, { "epoch": 1.852072105556588, "grad_norm": 0.7717024683952332, "learning_rate": 2.1360614656177115e-06, "loss": 0.841, "step": 10059 }, { "epoch": 1.852257944619959, "grad_norm": 0.6939625144004822, "learning_rate": 2.1300531532754686e-06, "loss": 0.8912, "step": 10060 }, { "epoch": 1.8524437836833303, "grad_norm": 0.5884217023849487, "learning_rate": 2.1240532119613078e-06, "loss": 0.7692, "step": 10061 }, { "epoch": 1.8526296227467014, "grad_norm": 0.5974510312080383, "learning_rate": 2.118061642188418e-06, "loss": 0.7782, "step": 10062 }, { "epoch": 1.8528154618100725, "grad_norm": 0.5739233493804932, "learning_rate": 2.1120784444692675e-06, "loss": 0.7937, "step": 10063 }, { "epoch": 1.8530013008734436, "grad_norm": 0.7020597457885742, "learning_rate": 2.1061036193156025e-06, "loss": 0.9993, "step": 10064 }, { "epoch": 1.8531871399368147, "grad_norm": 0.9987313151359558, "learning_rate": 2.1001371672384697e-06, "loss": 1.2808, "step": 10065 }, { "epoch": 1.8533729790001858, "grad_norm": 0.7735684514045715, "learning_rate": 2.0941790887482047e-06, "loss": 1.062, "step": 10066 }, { "epoch": 1.853558818063557, "grad_norm": 0.6970610618591309, "learning_rate": 2.0882293843543897e-06, "loss": 0.7506, "step": 10067 }, { "epoch": 1.853744657126928, "grad_norm": 0.6600317358970642, "learning_rate": 2.0822880545659396e-06, "loss": 0.8805, "step": 10068 }, { "epoch": 1.8539304961902991, "grad_norm": 0.7148259878158569, "learning_rate": 2.0763550998910143e-06, "loss": 1.0151, "step": 10069 }, { "epoch": 1.8541163352536705, "grad_norm": 0.720138430595398, "learning_rate": 2.0704305208370857e-06, "loss": 1.0692, "step": 10070 }, { "epoch": 1.8543021743170414, "grad_norm": 0.9224472045898438, "learning_rate": 2.064514317910893e-06, "loss": 1.1559, "step": 10071 }, { "epoch": 1.8544880133804127, "grad_norm": 0.9005986452102661, "learning_rate": 2.058606491618442e-06, "loss": 1.0002, "step": 10072 }, { "epoch": 1.8546738524437836, "grad_norm": 0.6716923713684082, "learning_rate": 2.0527070424650742e-06, "loss": 0.8971, "step": 10073 }, { "epoch": 1.854859691507155, "grad_norm": 0.8251639604568481, "learning_rate": 2.0468159709553513e-06, "loss": 0.8688, "step": 10074 }, { "epoch": 1.8550455305705258, "grad_norm": 0.7305351495742798, "learning_rate": 2.040933277593171e-06, "loss": 0.9406, "step": 10075 }, { "epoch": 1.8552313696338971, "grad_norm": 0.8185998797416687, "learning_rate": 2.035058962881675e-06, "loss": 0.9181, "step": 10076 }, { "epoch": 1.855417208697268, "grad_norm": 0.5533907413482666, "learning_rate": 2.0291930273233394e-06, "loss": 0.7119, "step": 10077 }, { "epoch": 1.8556030477606393, "grad_norm": 0.7022377848625183, "learning_rate": 2.0233354714198406e-06, "loss": 1.1283, "step": 10078 }, { "epoch": 1.8557888868240104, "grad_norm": 0.7600376605987549, "learning_rate": 2.017486295672222e-06, "loss": 1.0736, "step": 10079 }, { "epoch": 1.8559747258873815, "grad_norm": 0.7274320721626282, "learning_rate": 2.011645500580772e-06, "loss": 1.1651, "step": 10080 }, { "epoch": 1.8561605649507527, "grad_norm": 0.8164176940917969, "learning_rate": 2.0058130866450695e-06, "loss": 1.2344, "step": 10081 }, { "epoch": 1.8563464040141238, "grad_norm": 0.8189756870269775, "learning_rate": 1.9999890543639487e-06, "loss": 1.0574, "step": 10082 }, { "epoch": 1.8565322430774949, "grad_norm": 0.5922825932502747, "learning_rate": 1.9941734042355886e-06, "loss": 0.6764, "step": 10083 }, { "epoch": 1.856718082140866, "grad_norm": 0.7183148860931396, "learning_rate": 1.9883661367573803e-06, "loss": 1.0216, "step": 10084 }, { "epoch": 1.856903921204237, "grad_norm": 0.6784137487411499, "learning_rate": 1.9825672524260598e-06, "loss": 0.8048, "step": 10085 }, { "epoch": 1.8570897602676082, "grad_norm": 0.6123964786529541, "learning_rate": 1.9767767517375966e-06, "loss": 1.0757, "step": 10086 }, { "epoch": 1.8572755993309795, "grad_norm": 0.9567644000053406, "learning_rate": 1.9709946351872845e-06, "loss": 0.8913, "step": 10087 }, { "epoch": 1.8574614383943504, "grad_norm": 0.6350728869438171, "learning_rate": 1.96522090326966e-06, "loss": 0.8999, "step": 10088 }, { "epoch": 1.8576472774577217, "grad_norm": 0.6837267279624939, "learning_rate": 1.9594555564785844e-06, "loss": 0.8036, "step": 10089 }, { "epoch": 1.8578331165210926, "grad_norm": 0.7758045196533203, "learning_rate": 1.9536985953071516e-06, "loss": 1.1897, "step": 10090 }, { "epoch": 1.858018955584464, "grad_norm": 0.7234291434288025, "learning_rate": 1.947950020247802e-06, "loss": 0.9989, "step": 10091 }, { "epoch": 1.8582047946478348, "grad_norm": 0.7876737117767334, "learning_rate": 1.9422098317921854e-06, "loss": 0.9718, "step": 10092 }, { "epoch": 1.8583906337112062, "grad_norm": 0.8611382842063904, "learning_rate": 1.936478030431299e-06, "loss": 0.927, "step": 10093 }, { "epoch": 1.8585764727745773, "grad_norm": 0.6509695649147034, "learning_rate": 1.9307546166554056e-06, "loss": 1.0372, "step": 10094 }, { "epoch": 1.8587623118379484, "grad_norm": 0.7223415970802307, "learning_rate": 1.925039590954014e-06, "loss": 0.988, "step": 10095 }, { "epoch": 1.8589481509013195, "grad_norm": 0.6797177195549011, "learning_rate": 1.9193329538159553e-06, "loss": 0.8761, "step": 10096 }, { "epoch": 1.8591339899646906, "grad_norm": 0.5940880179405212, "learning_rate": 1.913634705729328e-06, "loss": 0.7282, "step": 10097 }, { "epoch": 1.8593198290280617, "grad_norm": 0.7168911695480347, "learning_rate": 1.9079448471815087e-06, "loss": 1.055, "step": 10098 }, { "epoch": 1.8595056680914328, "grad_norm": 0.5971741080284119, "learning_rate": 1.9022633786591747e-06, "loss": 0.9821, "step": 10099 }, { "epoch": 1.859691507154804, "grad_norm": 0.7200136780738831, "learning_rate": 1.896590300648271e-06, "loss": 1.1599, "step": 10100 }, { "epoch": 1.859877346218175, "grad_norm": 0.676017701625824, "learning_rate": 1.8909256136340425e-06, "loss": 1.0941, "step": 10101 }, { "epoch": 1.8600631852815461, "grad_norm": 0.7950423359870911, "learning_rate": 1.8852693181009795e-06, "loss": 1.0326, "step": 10102 }, { "epoch": 1.8602490243449172, "grad_norm": 0.6871570348739624, "learning_rate": 1.8796214145328838e-06, "loss": 0.9171, "step": 10103 }, { "epoch": 1.8604348634082886, "grad_norm": 0.7822986841201782, "learning_rate": 1.8739819034128359e-06, "loss": 1.051, "step": 10104 }, { "epoch": 1.8606207024716594, "grad_norm": 0.6993398666381836, "learning_rate": 1.868350785223194e-06, "loss": 0.9718, "step": 10105 }, { "epoch": 1.8608065415350308, "grad_norm": 1.4518990516662598, "learning_rate": 1.8627280604456066e-06, "loss": 0.7944, "step": 10106 }, { "epoch": 1.8609923805984017, "grad_norm": 0.7145583629608154, "learning_rate": 1.8571137295609775e-06, "loss": 0.8091, "step": 10107 }, { "epoch": 1.861178219661773, "grad_norm": 0.6150624752044678, "learning_rate": 1.8515077930495338e-06, "loss": 0.9405, "step": 10108 }, { "epoch": 1.8613640587251439, "grad_norm": 0.9233468770980835, "learning_rate": 1.8459102513907477e-06, "loss": 0.9038, "step": 10109 }, { "epoch": 1.8615498977885152, "grad_norm": 0.6704568862915039, "learning_rate": 1.840321105063414e-06, "loss": 0.8806, "step": 10110 }, { "epoch": 1.8617357368518863, "grad_norm": 0.9922787547111511, "learning_rate": 1.83474035454555e-06, "loss": 0.9688, "step": 10111 }, { "epoch": 1.8619215759152574, "grad_norm": 0.7160792946815491, "learning_rate": 1.8291680003145073e-06, "loss": 0.9976, "step": 10112 }, { "epoch": 1.8621074149786285, "grad_norm": 0.6046891212463379, "learning_rate": 1.8236040428469049e-06, "loss": 0.8351, "step": 10113 }, { "epoch": 1.8622932540419996, "grad_norm": 0.640130877494812, "learning_rate": 1.8180484826186283e-06, "loss": 0.8284, "step": 10114 }, { "epoch": 1.8624790931053707, "grad_norm": 0.7319105267524719, "learning_rate": 1.8125013201048757e-06, "loss": 1.0137, "step": 10115 }, { "epoch": 1.8626649321687418, "grad_norm": 0.6713131666183472, "learning_rate": 1.8069625557800896e-06, "loss": 0.758, "step": 10116 }, { "epoch": 1.862850771232113, "grad_norm": 0.5534182190895081, "learning_rate": 1.8014321901180354e-06, "loss": 0.772, "step": 10117 }, { "epoch": 1.863036610295484, "grad_norm": 0.7840726375579834, "learning_rate": 1.7959102235917014e-06, "loss": 1.123, "step": 10118 }, { "epoch": 1.8632224493588554, "grad_norm": 0.8231391310691833, "learning_rate": 1.790396656673421e-06, "loss": 0.977, "step": 10119 }, { "epoch": 1.8634082884222263, "grad_norm": 0.6878448128700256, "learning_rate": 1.7848914898347723e-06, "loss": 0.9395, "step": 10120 }, { "epoch": 1.8635941274855976, "grad_norm": 0.6510032415390015, "learning_rate": 1.7793947235466235e-06, "loss": 1.0007, "step": 10121 }, { "epoch": 1.8637799665489685, "grad_norm": 0.845956027507782, "learning_rate": 1.7739063582791315e-06, "loss": 1.0199, "step": 10122 }, { "epoch": 1.8639658056123398, "grad_norm": 0.837614119052887, "learning_rate": 1.768426394501721e-06, "loss": 0.9004, "step": 10123 }, { "epoch": 1.8641516446757107, "grad_norm": 0.6333421468734741, "learning_rate": 1.7629548326831058e-06, "loss": 0.8635, "step": 10124 }, { "epoch": 1.864337483739082, "grad_norm": 0.7224404215812683, "learning_rate": 1.7574916732912782e-06, "loss": 0.9852, "step": 10125 }, { "epoch": 1.864523322802453, "grad_norm": 0.7380586266517639, "learning_rate": 1.7520369167935202e-06, "loss": 0.9746, "step": 10126 }, { "epoch": 1.8647091618658242, "grad_norm": 0.8007980585098267, "learning_rate": 1.7465905636563806e-06, "loss": 0.8674, "step": 10127 }, { "epoch": 1.8648950009291954, "grad_norm": 0.6576514840126038, "learning_rate": 1.74115261434572e-06, "loss": 1.1316, "step": 10128 }, { "epoch": 1.8650808399925665, "grad_norm": 0.6236616969108582, "learning_rate": 1.7357230693266336e-06, "loss": 0.9738, "step": 10129 }, { "epoch": 1.8652666790559376, "grad_norm": 0.7905383706092834, "learning_rate": 1.730301929063527e-06, "loss": 1.2255, "step": 10130 }, { "epoch": 1.8654525181193087, "grad_norm": 0.6649918556213379, "learning_rate": 1.724889194020085e-06, "loss": 1.1122, "step": 10131 }, { "epoch": 1.8656383571826798, "grad_norm": 1.586954116821289, "learning_rate": 1.7194848646592709e-06, "loss": 1.3224, "step": 10132 }, { "epoch": 1.865824196246051, "grad_norm": 0.6258449554443359, "learning_rate": 1.714088941443337e-06, "loss": 0.938, "step": 10133 }, { "epoch": 1.866010035309422, "grad_norm": 0.873955488204956, "learning_rate": 1.7087014248337918e-06, "loss": 0.9731, "step": 10134 }, { "epoch": 1.866195874372793, "grad_norm": 0.7023456692695618, "learning_rate": 1.7033223152914446e-06, "loss": 0.937, "step": 10135 }, { "epoch": 1.8663817134361644, "grad_norm": 0.9610904455184937, "learning_rate": 1.697951613276405e-06, "loss": 1.1295, "step": 10136 }, { "epoch": 1.8665675524995353, "grad_norm": 0.6218596696853638, "learning_rate": 1.6925893192480058e-06, "loss": 0.8515, "step": 10137 }, { "epoch": 1.8667533915629066, "grad_norm": 0.7436656951904297, "learning_rate": 1.6872354336649243e-06, "loss": 0.9952, "step": 10138 }, { "epoch": 1.8669392306262775, "grad_norm": 0.6895730495452881, "learning_rate": 1.681889956985061e-06, "loss": 0.6323, "step": 10139 }, { "epoch": 1.8671250696896489, "grad_norm": 0.6715397238731384, "learning_rate": 1.6765528896656612e-06, "loss": 0.9762, "step": 10140 }, { "epoch": 1.8673109087530197, "grad_norm": 0.8727614283561707, "learning_rate": 1.6712242321631822e-06, "loss": 0.9921, "step": 10141 }, { "epoch": 1.867496747816391, "grad_norm": 0.6065548658370972, "learning_rate": 1.6659039849334147e-06, "loss": 0.8066, "step": 10142 }, { "epoch": 1.867682586879762, "grad_norm": 0.7665624618530273, "learning_rate": 1.6605921484314169e-06, "loss": 0.7634, "step": 10143 }, { "epoch": 1.8678684259431333, "grad_norm": 0.6623754501342773, "learning_rate": 1.6552887231115033e-06, "loss": 1.0655, "step": 10144 }, { "epoch": 1.8680542650065044, "grad_norm": 0.6355829238891602, "learning_rate": 1.6499937094273e-06, "loss": 0.7108, "step": 10145 }, { "epoch": 1.8682401040698755, "grad_norm": 0.7509677410125732, "learning_rate": 1.644707107831689e-06, "loss": 1.041, "step": 10146 }, { "epoch": 1.8684259431332466, "grad_norm": 0.8060480952262878, "learning_rate": 1.639428918776864e-06, "loss": 0.7989, "step": 10147 }, { "epoch": 1.8686117821966177, "grad_norm": 0.8885887861251831, "learning_rate": 1.6341591427142643e-06, "loss": 1.0574, "step": 10148 }, { "epoch": 1.8687976212599888, "grad_norm": 0.7218427062034607, "learning_rate": 1.6288977800946292e-06, "loss": 1.2323, "step": 10149 }, { "epoch": 1.86898346032336, "grad_norm": 0.668627142906189, "learning_rate": 1.6236448313679764e-06, "loss": 0.8974, "step": 10150 }, { "epoch": 1.869169299386731, "grad_norm": 0.6256469488143921, "learning_rate": 1.618400296983602e-06, "loss": 0.972, "step": 10151 }, { "epoch": 1.8693551384501021, "grad_norm": 0.679287314414978, "learning_rate": 1.6131641773900807e-06, "loss": 0.7333, "step": 10152 }, { "epoch": 1.8695409775134735, "grad_norm": 0.6528913974761963, "learning_rate": 1.6079364730352653e-06, "loss": 0.9249, "step": 10153 }, { "epoch": 1.8697268165768444, "grad_norm": 0.7103220224380493, "learning_rate": 1.6027171843662981e-06, "loss": 1.0212, "step": 10154 }, { "epoch": 1.8699126556402157, "grad_norm": 0.7032877206802368, "learning_rate": 1.5975063118295996e-06, "loss": 0.9606, "step": 10155 }, { "epoch": 1.8700984947035866, "grad_norm": 0.6555808782577515, "learning_rate": 1.592303855870847e-06, "loss": 0.894, "step": 10156 }, { "epoch": 1.870284333766958, "grad_norm": 0.6237680315971375, "learning_rate": 1.5871098169350395e-06, "loss": 0.9904, "step": 10157 }, { "epoch": 1.8704701728303288, "grad_norm": 0.7430229187011719, "learning_rate": 1.5819241954664331e-06, "loss": 0.9049, "step": 10158 }, { "epoch": 1.8706560118937001, "grad_norm": 0.695476770401001, "learning_rate": 1.576746991908562e-06, "loss": 1.1796, "step": 10159 }, { "epoch": 1.8708418509570712, "grad_norm": 0.6034365296363831, "learning_rate": 1.5715782067042385e-06, "loss": 0.8092, "step": 10160 }, { "epoch": 1.8710276900204423, "grad_norm": 0.7450648546218872, "learning_rate": 1.5664178402955532e-06, "loss": 0.9053, "step": 10161 }, { "epoch": 1.8712135290838134, "grad_norm": 0.6403368711471558, "learning_rate": 1.5612658931239089e-06, "loss": 1.0559, "step": 10162 }, { "epoch": 1.8713993681471845, "grad_norm": 0.6581562757492065, "learning_rate": 1.5561223656299307e-06, "loss": 0.9156, "step": 10163 }, { "epoch": 1.8715852072105557, "grad_norm": 0.8338717818260193, "learning_rate": 1.550987258253589e-06, "loss": 0.9417, "step": 10164 }, { "epoch": 1.8717710462739268, "grad_norm": 0.6185001730918884, "learning_rate": 1.5458605714340657e-06, "loss": 0.5992, "step": 10165 }, { "epoch": 1.8719568853372979, "grad_norm": 0.7387946248054504, "learning_rate": 1.540742305609899e-06, "loss": 0.9353, "step": 10166 }, { "epoch": 1.872142724400669, "grad_norm": 0.762082576751709, "learning_rate": 1.5356324612188166e-06, "loss": 1.1015, "step": 10167 }, { "epoch": 1.8723285634640403, "grad_norm": 0.7067580819129944, "learning_rate": 1.5305310386979132e-06, "loss": 1.0188, "step": 10168 }, { "epoch": 1.8725144025274112, "grad_norm": 0.7750047445297241, "learning_rate": 1.5254380384835065e-06, "loss": 0.9803, "step": 10169 }, { "epoch": 1.8727002415907825, "grad_norm": 0.7149769067764282, "learning_rate": 1.5203534610112147e-06, "loss": 1.1311, "step": 10170 }, { "epoch": 1.8728860806541534, "grad_norm": 0.8082043528556824, "learning_rate": 1.5152773067159342e-06, "loss": 0.9219, "step": 10171 }, { "epoch": 1.8730719197175247, "grad_norm": 0.6081134676933289, "learning_rate": 1.5102095760318291e-06, "loss": 0.9132, "step": 10172 }, { "epoch": 1.8732577587808956, "grad_norm": 0.6561916470527649, "learning_rate": 1.5051502693923858e-06, "loss": 1.0227, "step": 10173 }, { "epoch": 1.873443597844267, "grad_norm": 0.7679175734519958, "learning_rate": 1.5000993872302916e-06, "loss": 0.789, "step": 10174 }, { "epoch": 1.8736294369076378, "grad_norm": 0.6609779596328735, "learning_rate": 1.4950569299776008e-06, "loss": 0.979, "step": 10175 }, { "epoch": 1.8738152759710092, "grad_norm": 0.6817535161972046, "learning_rate": 1.4900228980655794e-06, "loss": 0.8825, "step": 10176 }, { "epoch": 1.8740011150343803, "grad_norm": 0.7659928202629089, "learning_rate": 1.4849972919248168e-06, "loss": 1.1114, "step": 10177 }, { "epoch": 1.8741869540977514, "grad_norm": 0.7996124029159546, "learning_rate": 1.4799801119851465e-06, "loss": 1.3768, "step": 10178 }, { "epoch": 1.8743727931611225, "grad_norm": 0.7944355607032776, "learning_rate": 1.474971358675703e-06, "loss": 0.8329, "step": 10179 }, { "epoch": 1.8745586322244936, "grad_norm": 0.6426302194595337, "learning_rate": 1.4699710324249105e-06, "loss": 0.9689, "step": 10180 }, { "epoch": 1.8747444712878647, "grad_norm": 0.7049875855445862, "learning_rate": 1.4649791336604491e-06, "loss": 0.8881, "step": 10181 }, { "epoch": 1.8749303103512358, "grad_norm": 0.7229358553886414, "learning_rate": 1.459995662809266e-06, "loss": 0.7566, "step": 10182 }, { "epoch": 1.875116149414607, "grad_norm": 0.5669488310813904, "learning_rate": 1.4550206202976536e-06, "loss": 0.8043, "step": 10183 }, { "epoch": 1.875301988477978, "grad_norm": 0.9962398409843445, "learning_rate": 1.4500540065510825e-06, "loss": 1.1309, "step": 10184 }, { "epoch": 1.8754878275413494, "grad_norm": 0.7182316184043884, "learning_rate": 1.4450958219944132e-06, "loss": 0.9019, "step": 10185 }, { "epoch": 1.8756736666047202, "grad_norm": 0.7526625990867615, "learning_rate": 1.4401460670516842e-06, "loss": 1.0304, "step": 10186 }, { "epoch": 1.8758595056680916, "grad_norm": 0.7577337026596069, "learning_rate": 1.4352047421462789e-06, "loss": 0.9453, "step": 10187 }, { "epoch": 1.8760453447314624, "grad_norm": 0.6579355597496033, "learning_rate": 1.4302718477008481e-06, "loss": 0.9844, "step": 10188 }, { "epoch": 1.8762311837948338, "grad_norm": 0.8321214318275452, "learning_rate": 1.4253473841372988e-06, "loss": 0.9747, "step": 10189 }, { "epoch": 1.8764170228582047, "grad_norm": 0.7083122730255127, "learning_rate": 1.4204313518768387e-06, "loss": 0.8278, "step": 10190 }, { "epoch": 1.876602861921576, "grad_norm": 0.7506430745124817, "learning_rate": 1.4155237513399312e-06, "loss": 0.9235, "step": 10191 }, { "epoch": 1.8767887009849469, "grad_norm": 0.6202881336212158, "learning_rate": 1.4106245829463627e-06, "loss": 0.7342, "step": 10192 }, { "epoch": 1.8769745400483182, "grad_norm": 0.7362320423126221, "learning_rate": 1.4057338471151427e-06, "loss": 1.0821, "step": 10193 }, { "epoch": 1.8771603791116893, "grad_norm": 0.6716258525848389, "learning_rate": 1.4008515442646141e-06, "loss": 0.8926, "step": 10194 }, { "epoch": 1.8773462181750604, "grad_norm": 0.727524995803833, "learning_rate": 1.395977674812332e-06, "loss": 0.9095, "step": 10195 }, { "epoch": 1.8775320572384315, "grad_norm": 0.6297793984413147, "learning_rate": 1.3911122391751962e-06, "loss": 0.9232, "step": 10196 }, { "epoch": 1.8777178963018026, "grad_norm": 0.7759661674499512, "learning_rate": 1.3862552377693627e-06, "loss": 0.8585, "step": 10197 }, { "epoch": 1.8779037353651737, "grad_norm": 0.7787834405899048, "learning_rate": 1.3814066710102436e-06, "loss": 1.0092, "step": 10198 }, { "epoch": 1.8780895744285449, "grad_norm": 0.6704865097999573, "learning_rate": 1.3765665393125515e-06, "loss": 0.9321, "step": 10199 }, { "epoch": 1.878275413491916, "grad_norm": 0.8808336853981018, "learning_rate": 1.3717348430902776e-06, "loss": 0.8639, "step": 10200 }, { "epoch": 1.878461252555287, "grad_norm": 0.7640100717544556, "learning_rate": 1.3669115827566915e-06, "loss": 1.0329, "step": 10201 }, { "epoch": 1.8786470916186584, "grad_norm": 0.7684122323989868, "learning_rate": 1.3620967587243294e-06, "loss": 1.0354, "step": 10202 }, { "epoch": 1.8788329306820293, "grad_norm": 0.7215694785118103, "learning_rate": 1.3572903714050067e-06, "loss": 1.1981, "step": 10203 }, { "epoch": 1.8790187697454006, "grad_norm": 0.6940367817878723, "learning_rate": 1.35249242120985e-06, "loss": 0.9857, "step": 10204 }, { "epoch": 1.8792046088087715, "grad_norm": 0.6102963089942932, "learning_rate": 1.3477029085492087e-06, "loss": 0.9679, "step": 10205 }, { "epoch": 1.8793904478721428, "grad_norm": 0.7456520795822144, "learning_rate": 1.3429218338327666e-06, "loss": 0.5382, "step": 10206 }, { "epoch": 1.8795762869355137, "grad_norm": 0.7495307922363281, "learning_rate": 1.3381491974694405e-06, "loss": 1.1192, "step": 10207 }, { "epoch": 1.879762125998885, "grad_norm": 0.7574533224105835, "learning_rate": 1.3333849998674487e-06, "loss": 1.0553, "step": 10208 }, { "epoch": 1.879947965062256, "grad_norm": 0.6794142723083496, "learning_rate": 1.328629241434276e-06, "loss": 0.9301, "step": 10209 }, { "epoch": 1.8801338041256273, "grad_norm": 0.7537904381752014, "learning_rate": 1.3238819225767085e-06, "loss": 0.8018, "step": 10210 }, { "epoch": 1.8803196431889984, "grad_norm": 0.8281949758529663, "learning_rate": 1.319143043700799e-06, "loss": 0.9391, "step": 10211 }, { "epoch": 1.8805054822523695, "grad_norm": 0.7075900435447693, "learning_rate": 1.3144126052118454e-06, "loss": 0.95, "step": 10212 }, { "epoch": 1.8806913213157406, "grad_norm": 0.7371687293052673, "learning_rate": 1.3096906075144798e-06, "loss": 0.973, "step": 10213 }, { "epoch": 1.8808771603791117, "grad_norm": 0.7950626611709595, "learning_rate": 1.3049770510125681e-06, "loss": 1.0202, "step": 10214 }, { "epoch": 1.8810629994424828, "grad_norm": 0.6449244618415833, "learning_rate": 1.3002719361092763e-06, "loss": 0.9921, "step": 10215 }, { "epoch": 1.881248838505854, "grad_norm": 0.7738901972770691, "learning_rate": 1.2955752632070384e-06, "loss": 1.0622, "step": 10216 }, { "epoch": 1.881434677569225, "grad_norm": 0.7098965644836426, "learning_rate": 1.2908870327075884e-06, "loss": 0.8252, "step": 10217 }, { "epoch": 1.881620516632596, "grad_norm": 0.707387387752533, "learning_rate": 1.2862072450119057e-06, "loss": 1.0608, "step": 10218 }, { "epoch": 1.8818063556959674, "grad_norm": 0.6619812846183777, "learning_rate": 1.2815359005202588e-06, "loss": 0.9372, "step": 10219 }, { "epoch": 1.8819921947593383, "grad_norm": 0.7271058559417725, "learning_rate": 1.2768729996322171e-06, "loss": 0.8793, "step": 10220 }, { "epoch": 1.8821780338227097, "grad_norm": 0.6920179128646851, "learning_rate": 1.272218542746595e-06, "loss": 0.8097, "step": 10221 }, { "epoch": 1.8823638728860805, "grad_norm": 0.7369257807731628, "learning_rate": 1.267572530261496e-06, "loss": 1.0333, "step": 10222 }, { "epoch": 1.8825497119494519, "grad_norm": 0.7454902529716492, "learning_rate": 1.2629349625743025e-06, "loss": 0.9823, "step": 10223 }, { "epoch": 1.8827355510128228, "grad_norm": 0.639519989490509, "learning_rate": 1.258305840081686e-06, "loss": 0.8006, "step": 10224 }, { "epoch": 1.882921390076194, "grad_norm": 0.7165234684944153, "learning_rate": 1.2536851631795854e-06, "loss": 0.9338, "step": 10225 }, { "epoch": 1.8831072291395652, "grad_norm": 0.602321445941925, "learning_rate": 1.2490729322631955e-06, "loss": 0.7861, "step": 10226 }, { "epoch": 1.8832930682029363, "grad_norm": 0.8231889605522156, "learning_rate": 1.2444691477270453e-06, "loss": 1.0127, "step": 10227 }, { "epoch": 1.8834789072663074, "grad_norm": 0.7236772179603577, "learning_rate": 1.2398738099648755e-06, "loss": 1.0205, "step": 10228 }, { "epoch": 1.8836647463296785, "grad_norm": 0.686307966709137, "learning_rate": 1.235286919369749e-06, "loss": 0.9445, "step": 10229 }, { "epoch": 1.8838505853930496, "grad_norm": 0.8536903262138367, "learning_rate": 1.2307084763339971e-06, "loss": 1.0062, "step": 10230 }, { "epoch": 1.8840364244564207, "grad_norm": 0.778286337852478, "learning_rate": 1.2261384812492061e-06, "loss": 1.0921, "step": 10231 }, { "epoch": 1.8842222635197918, "grad_norm": 0.740349531173706, "learning_rate": 1.2215769345062744e-06, "loss": 1.0735, "step": 10232 }, { "epoch": 1.884408102583163, "grad_norm": 0.671109676361084, "learning_rate": 1.2170238364953567e-06, "loss": 0.8181, "step": 10233 }, { "epoch": 1.8845939416465343, "grad_norm": 0.7762554287910461, "learning_rate": 1.212479187605897e-06, "loss": 0.8365, "step": 10234 }, { "epoch": 1.8847797807099052, "grad_norm": 0.699923574924469, "learning_rate": 1.2079429882265847e-06, "loss": 0.8449, "step": 10235 }, { "epoch": 1.8849656197732765, "grad_norm": 0.7666675448417664, "learning_rate": 1.2034152387454424e-06, "loss": 0.9722, "step": 10236 }, { "epoch": 1.8851514588366474, "grad_norm": 0.6274758577346802, "learning_rate": 1.1988959395497045e-06, "loss": 0.8508, "step": 10237 }, { "epoch": 1.8853372979000187, "grad_norm": 0.716448962688446, "learning_rate": 1.1943850910259401e-06, "loss": 0.9753, "step": 10238 }, { "epoch": 1.8855231369633896, "grad_norm": 0.739844560623169, "learning_rate": 1.1898826935599737e-06, "loss": 0.9353, "step": 10239 }, { "epoch": 1.885708976026761, "grad_norm": 0.66637122631073, "learning_rate": 1.1853887475368974e-06, "loss": 0.7805, "step": 10240 }, { "epoch": 1.8858948150901318, "grad_norm": 0.8586833477020264, "learning_rate": 1.1809032533410814e-06, "loss": 0.8905, "step": 10241 }, { "epoch": 1.8860806541535031, "grad_norm": 0.7887431383132935, "learning_rate": 1.1764262113561964e-06, "loss": 0.9023, "step": 10242 }, { "epoch": 1.8862664932168742, "grad_norm": 0.676100492477417, "learning_rate": 1.1719576219651585e-06, "loss": 0.8355, "step": 10243 }, { "epoch": 1.8864523322802453, "grad_norm": 0.8734824657440186, "learning_rate": 1.167497485550173e-06, "loss": 0.9403, "step": 10244 }, { "epoch": 1.8866381713436164, "grad_norm": 0.7353190779685974, "learning_rate": 1.1630458024927348e-06, "loss": 0.9986, "step": 10245 }, { "epoch": 1.8868240104069876, "grad_norm": 0.7474417090415955, "learning_rate": 1.158602573173606e-06, "loss": 0.9251, "step": 10246 }, { "epoch": 1.8870098494703587, "grad_norm": 0.6567177176475525, "learning_rate": 1.1541677979728161e-06, "loss": 0.78, "step": 10247 }, { "epoch": 1.8871956885337298, "grad_norm": 0.6639741659164429, "learning_rate": 1.1497414772697056e-06, "loss": 1.071, "step": 10248 }, { "epoch": 1.8873815275971009, "grad_norm": 0.656317949295044, "learning_rate": 1.145323611442839e-06, "loss": 0.7967, "step": 10249 }, { "epoch": 1.887567366660472, "grad_norm": 0.6598522067070007, "learning_rate": 1.1409142008701023e-06, "loss": 0.9534, "step": 10250 }, { "epoch": 1.8877532057238433, "grad_norm": 0.9313237071037292, "learning_rate": 1.1365132459286275e-06, "loss": 0.8162, "step": 10251 }, { "epoch": 1.8879390447872142, "grad_norm": 0.739664614200592, "learning_rate": 1.132120746994847e-06, "loss": 0.8651, "step": 10252 }, { "epoch": 1.8881248838505855, "grad_norm": 0.7578397989273071, "learning_rate": 1.127736704444471e-06, "loss": 0.8895, "step": 10253 }, { "epoch": 1.8883107229139564, "grad_norm": 0.7636153101921082, "learning_rate": 1.123361118652455e-06, "loss": 1.0874, "step": 10254 }, { "epoch": 1.8884965619773277, "grad_norm": 0.687780499458313, "learning_rate": 1.1189939899930778e-06, "loss": 0.7707, "step": 10255 }, { "epoch": 1.8886824010406986, "grad_norm": 0.6946448087692261, "learning_rate": 1.1146353188398407e-06, "loss": 0.9095, "step": 10256 }, { "epoch": 1.88886824010407, "grad_norm": 0.6907515525817871, "learning_rate": 1.1102851055655784e-06, "loss": 0.9238, "step": 10257 }, { "epoch": 1.8890540791674408, "grad_norm": 0.7136023044586182, "learning_rate": 1.1059433505423488e-06, "loss": 0.9172, "step": 10258 }, { "epoch": 1.8892399182308122, "grad_norm": 0.630096971988678, "learning_rate": 1.101610054141522e-06, "loss": 0.9887, "step": 10259 }, { "epoch": 1.8894257572941833, "grad_norm": 0.6554904580116272, "learning_rate": 1.0972852167337345e-06, "loss": 1.0012, "step": 10260 }, { "epoch": 1.8896115963575544, "grad_norm": 0.5938538908958435, "learning_rate": 1.0929688386889014e-06, "loss": 0.961, "step": 10261 }, { "epoch": 1.8897974354209255, "grad_norm": 0.6572749018669128, "learning_rate": 1.0886609203762054e-06, "loss": 0.9109, "step": 10262 }, { "epoch": 1.8899832744842966, "grad_norm": 0.7717788219451904, "learning_rate": 1.0843614621641185e-06, "loss": 1.2538, "step": 10263 }, { "epoch": 1.8901691135476677, "grad_norm": 0.8543664216995239, "learning_rate": 1.0800704644203908e-06, "loss": 0.8828, "step": 10264 }, { "epoch": 1.8903549526110388, "grad_norm": 0.6619085669517517, "learning_rate": 1.0757879275120175e-06, "loss": 0.8544, "step": 10265 }, { "epoch": 1.89054079167441, "grad_norm": 0.6682559251785278, "learning_rate": 1.071513851805317e-06, "loss": 0.7595, "step": 10266 }, { "epoch": 1.890726630737781, "grad_norm": 0.6358002424240112, "learning_rate": 1.067248237665841e-06, "loss": 0.7431, "step": 10267 }, { "epoch": 1.8909124698011524, "grad_norm": 0.6468208432197571, "learning_rate": 1.0629910854584534e-06, "loss": 0.7797, "step": 10268 }, { "epoch": 1.8910983088645232, "grad_norm": 0.7336880564689636, "learning_rate": 1.058742395547263e-06, "loss": 0.9174, "step": 10269 }, { "epoch": 1.8912841479278946, "grad_norm": 0.9050149917602539, "learning_rate": 1.0545021682956791e-06, "loss": 0.8339, "step": 10270 }, { "epoch": 1.8914699869912655, "grad_norm": 0.6562024354934692, "learning_rate": 1.0502704040663669e-06, "loss": 0.9984, "step": 10271 }, { "epoch": 1.8916558260546368, "grad_norm": 0.6586121916770935, "learning_rate": 1.0460471032212925e-06, "loss": 0.8628, "step": 10272 }, { "epoch": 1.8918416651180077, "grad_norm": 0.7983189821243286, "learning_rate": 1.0418322661216673e-06, "loss": 1.1281, "step": 10273 }, { "epoch": 1.892027504181379, "grad_norm": 0.6309989094734192, "learning_rate": 1.0376258931280136e-06, "loss": 0.8158, "step": 10274 }, { "epoch": 1.89221334324475, "grad_norm": 0.8750993013381958, "learning_rate": 1.0334279846001106e-06, "loss": 1.1648, "step": 10275 }, { "epoch": 1.8923991823081212, "grad_norm": 0.7219237089157104, "learning_rate": 1.029238540896993e-06, "loss": 0.9757, "step": 10276 }, { "epoch": 1.8925850213714923, "grad_norm": 0.7669157981872559, "learning_rate": 1.0250575623770075e-06, "loss": 0.9233, "step": 10277 }, { "epoch": 1.8927708604348634, "grad_norm": 0.7733719348907471, "learning_rate": 1.0208850493977685e-06, "loss": 0.849, "step": 10278 }, { "epoch": 1.8929566994982345, "grad_norm": 0.7132909297943115, "learning_rate": 1.0167210023161566e-06, "loss": 1.1725, "step": 10279 }, { "epoch": 1.8931425385616056, "grad_norm": 0.7796614766120911, "learning_rate": 1.0125654214883207e-06, "loss": 1.1139, "step": 10280 }, { "epoch": 1.8933283776249767, "grad_norm": 0.7235081791877747, "learning_rate": 1.0084183072697095e-06, "loss": 0.8145, "step": 10281 }, { "epoch": 1.8935142166883479, "grad_norm": 0.7745675444602966, "learning_rate": 1.0042796600150172e-06, "loss": 1.0418, "step": 10282 }, { "epoch": 1.893700055751719, "grad_norm": 0.6154912710189819, "learning_rate": 1.0001494800782606e-06, "loss": 0.9604, "step": 10283 }, { "epoch": 1.89388589481509, "grad_norm": 0.7308815717697144, "learning_rate": 9.960277678126795e-07, "loss": 0.9988, "step": 10284 }, { "epoch": 1.8940717338784614, "grad_norm": 0.7246875762939453, "learning_rate": 9.919145235708139e-07, "loss": 1.0016, "step": 10285 }, { "epoch": 1.8942575729418323, "grad_norm": 0.7031250596046448, "learning_rate": 9.878097477044934e-07, "loss": 0.9951, "step": 10286 }, { "epoch": 1.8944434120052036, "grad_norm": 0.6622384786605835, "learning_rate": 9.83713440564782e-07, "loss": 1.0912, "step": 10287 }, { "epoch": 1.8946292510685745, "grad_norm": 0.6151398420333862, "learning_rate": 9.796256025020767e-07, "loss": 0.9752, "step": 10288 }, { "epoch": 1.8948150901319458, "grad_norm": 0.6820587515830994, "learning_rate": 9.755462338659981e-07, "loss": 0.9326, "step": 10289 }, { "epoch": 1.8950009291953167, "grad_norm": 0.5994402766227722, "learning_rate": 9.714753350054783e-07, "loss": 0.7013, "step": 10290 }, { "epoch": 1.895186768258688, "grad_norm": 0.7843385338783264, "learning_rate": 9.674129062686944e-07, "loss": 1.1573, "step": 10291 }, { "epoch": 1.8953726073220591, "grad_norm": 0.7633580565452576, "learning_rate": 9.633589480031236e-07, "loss": 0.7809, "step": 10292 }, { "epoch": 1.8955584463854303, "grad_norm": 0.7467591762542725, "learning_rate": 9.593134605555109e-07, "loss": 0.868, "step": 10293 }, { "epoch": 1.8957442854488014, "grad_norm": 0.9395126104354858, "learning_rate": 9.552764442718687e-07, "loss": 0.9111, "step": 10294 }, { "epoch": 1.8959301245121725, "grad_norm": 1.2283921241760254, "learning_rate": 9.512478994974982e-07, "loss": 1.4538, "step": 10295 }, { "epoch": 1.8961159635755436, "grad_norm": 0.685003399848938, "learning_rate": 9.472278265769685e-07, "loss": 1.1466, "step": 10296 }, { "epoch": 1.8963018026389147, "grad_norm": 0.7409753799438477, "learning_rate": 9.432162258541267e-07, "loss": 0.9852, "step": 10297 }, { "epoch": 1.8964876417022858, "grad_norm": 0.7501311302185059, "learning_rate": 9.39213097672087e-07, "loss": 1.0421, "step": 10298 }, { "epoch": 1.896673480765657, "grad_norm": 0.6653595566749573, "learning_rate": 9.352184423732535e-07, "loss": 1.1197, "step": 10299 }, { "epoch": 1.8968593198290282, "grad_norm": 0.6663728356361389, "learning_rate": 9.312322602992863e-07, "loss": 0.8264, "step": 10300 }, { "epoch": 1.897045158892399, "grad_norm": 0.697401225566864, "learning_rate": 9.272545517911457e-07, "loss": 1.1152, "step": 10301 }, { "epoch": 1.8972309979557704, "grad_norm": 0.8602698445320129, "learning_rate": 9.232853171890488e-07, "loss": 0.7819, "step": 10302 }, { "epoch": 1.8974168370191413, "grad_norm": 0.8105998635292053, "learning_rate": 9.193245568324904e-07, "loss": 0.9317, "step": 10303 }, { "epoch": 1.8976026760825127, "grad_norm": 0.700174868106842, "learning_rate": 9.15372271060233e-07, "loss": 0.9864, "step": 10304 }, { "epoch": 1.8977885151458835, "grad_norm": 0.6726325154304504, "learning_rate": 9.114284602103507e-07, "loss": 0.726, "step": 10305 }, { "epoch": 1.8979743542092549, "grad_norm": 0.7302104830741882, "learning_rate": 9.074931246201402e-07, "loss": 1.1052, "step": 10306 }, { "epoch": 1.8981601932726258, "grad_norm": 0.5432365536689758, "learning_rate": 9.03566264626221e-07, "loss": 0.6084, "step": 10307 }, { "epoch": 1.898346032335997, "grad_norm": 0.6986281871795654, "learning_rate": 8.996478805644471e-07, "loss": 0.8696, "step": 10308 }, { "epoch": 1.8985318713993682, "grad_norm": 0.9530007243156433, "learning_rate": 8.957379727699722e-07, "loss": 1.023, "step": 10309 }, { "epoch": 1.8987177104627393, "grad_norm": 0.6925898194313049, "learning_rate": 8.91836541577229e-07, "loss": 1.1046, "step": 10310 }, { "epoch": 1.8989035495261104, "grad_norm": 0.8129331469535828, "learning_rate": 8.879435873199061e-07, "loss": 0.9929, "step": 10311 }, { "epoch": 1.8990893885894815, "grad_norm": 0.7528035044670105, "learning_rate": 8.840591103309703e-07, "loss": 1.167, "step": 10312 }, { "epoch": 1.8992752276528526, "grad_norm": 0.7556763887405396, "learning_rate": 8.801831109426783e-07, "loss": 1.006, "step": 10313 }, { "epoch": 1.8994610667162237, "grad_norm": 0.6110503077507019, "learning_rate": 8.763155894865538e-07, "loss": 0.7358, "step": 10314 }, { "epoch": 1.8996469057795948, "grad_norm": 0.8103258609771729, "learning_rate": 8.724565462933876e-07, "loss": 1.0752, "step": 10315 }, { "epoch": 1.899832744842966, "grad_norm": 0.7095519304275513, "learning_rate": 8.686059816932602e-07, "loss": 1.0222, "step": 10316 }, { "epoch": 1.9000185839063373, "grad_norm": 0.705930233001709, "learning_rate": 8.64763896015508e-07, "loss": 0.9961, "step": 10317 }, { "epoch": 1.9002044229697082, "grad_norm": 0.667270302772522, "learning_rate": 8.609302895887683e-07, "loss": 0.8224, "step": 10318 }, { "epoch": 1.9003902620330795, "grad_norm": 0.6715710163116455, "learning_rate": 8.571051627409232e-07, "loss": 0.9866, "step": 10319 }, { "epoch": 1.9005761010964504, "grad_norm": 0.6669120788574219, "learning_rate": 8.532885157991555e-07, "loss": 0.8995, "step": 10320 }, { "epoch": 1.9007619401598217, "grad_norm": 0.7272221446037292, "learning_rate": 8.494803490898928e-07, "loss": 1.0071, "step": 10321 }, { "epoch": 1.9009477792231926, "grad_norm": 0.7126994132995605, "learning_rate": 8.456806629388747e-07, "loss": 0.8916, "step": 10322 }, { "epoch": 1.901133618286564, "grad_norm": 0.6916284561157227, "learning_rate": 8.418894576710967e-07, "loss": 1.0381, "step": 10323 }, { "epoch": 1.9013194573499348, "grad_norm": 0.7423785328865051, "learning_rate": 8.381067336108106e-07, "loss": 0.9574, "step": 10324 }, { "epoch": 1.9015052964133061, "grad_norm": 0.8324457406997681, "learning_rate": 8.343324910815798e-07, "loss": 0.7473, "step": 10325 }, { "epoch": 1.9016911354766772, "grad_norm": 0.6474454998970032, "learning_rate": 8.305667304062237e-07, "loss": 0.8059, "step": 10326 }, { "epoch": 1.9018769745400483, "grad_norm": 0.6828518509864807, "learning_rate": 8.268094519068182e-07, "loss": 1.0419, "step": 10327 }, { "epoch": 1.9020628136034194, "grad_norm": 0.7035902142524719, "learning_rate": 8.230606559047394e-07, "loss": 0.8835, "step": 10328 }, { "epoch": 1.9022486526667906, "grad_norm": 0.6972468495368958, "learning_rate": 8.19320342720642e-07, "loss": 1.0148, "step": 10329 }, { "epoch": 1.9024344917301617, "grad_norm": 0.8360022902488708, "learning_rate": 8.155885126744256e-07, "loss": 1.222, "step": 10330 }, { "epoch": 1.9026203307935328, "grad_norm": 0.7290945649147034, "learning_rate": 8.118651660852905e-07, "loss": 0.9121, "step": 10331 }, { "epoch": 1.9028061698569039, "grad_norm": 0.7698082327842712, "learning_rate": 8.081503032717041e-07, "loss": 0.9743, "step": 10332 }, { "epoch": 1.902992008920275, "grad_norm": 0.6450352072715759, "learning_rate": 8.044439245514013e-07, "loss": 0.8837, "step": 10333 }, { "epoch": 1.9031778479836463, "grad_norm": 0.7240906953811646, "learning_rate": 8.00746030241395e-07, "loss": 0.8972, "step": 10334 }, { "epoch": 1.9033636870470172, "grad_norm": 0.6440247297286987, "learning_rate": 7.970566206579877e-07, "loss": 0.8675, "step": 10335 }, { "epoch": 1.9035495261103885, "grad_norm": 0.780107855796814, "learning_rate": 7.933756961167271e-07, "loss": 1.0229, "step": 10336 }, { "epoch": 1.9037353651737594, "grad_norm": 0.65886390209198, "learning_rate": 7.897032569324614e-07, "loss": 1.0398, "step": 10337 }, { "epoch": 1.9039212042371307, "grad_norm": 0.6943807601928711, "learning_rate": 7.860393034192948e-07, "loss": 0.8795, "step": 10338 }, { "epoch": 1.9041070433005016, "grad_norm": 0.7381815314292908, "learning_rate": 7.82383835890621e-07, "loss": 0.8438, "step": 10339 }, { "epoch": 1.904292882363873, "grad_norm": 0.7263152003288269, "learning_rate": 7.787368546590901e-07, "loss": 0.8213, "step": 10340 }, { "epoch": 1.904478721427244, "grad_norm": 1.1103852987289429, "learning_rate": 7.750983600366524e-07, "loss": 0.8701, "step": 10341 }, { "epoch": 1.9046645604906152, "grad_norm": 0.752496600151062, "learning_rate": 7.714683523344924e-07, "loss": 1.1805, "step": 10342 }, { "epoch": 1.9048503995539863, "grad_norm": 0.8111420273780823, "learning_rate": 7.678468318631061e-07, "loss": 1.162, "step": 10343 }, { "epoch": 1.9050362386173574, "grad_norm": 0.6829381585121155, "learning_rate": 7.642337989322568e-07, "loss": 1.0697, "step": 10344 }, { "epoch": 1.9052220776807285, "grad_norm": 0.7154696583747864, "learning_rate": 7.60629253850953e-07, "loss": 0.9418, "step": 10345 }, { "epoch": 1.9054079167440996, "grad_norm": 0.7428658604621887, "learning_rate": 7.570331969275257e-07, "loss": 0.8949, "step": 10346 }, { "epoch": 1.9055937558074707, "grad_norm": 0.7169175148010254, "learning_rate": 7.534456284695401e-07, "loss": 1.0155, "step": 10347 }, { "epoch": 1.9057795948708418, "grad_norm": 0.5764283537864685, "learning_rate": 7.498665487838508e-07, "loss": 0.6472, "step": 10348 }, { "epoch": 1.9059654339342131, "grad_norm": 0.657071590423584, "learning_rate": 7.462959581765794e-07, "loss": 1.0341, "step": 10349 }, { "epoch": 1.906151272997584, "grad_norm": 0.7069972157478333, "learning_rate": 7.427338569531261e-07, "loss": 0.8275, "step": 10350 }, { "epoch": 1.9063371120609554, "grad_norm": 0.7730807065963745, "learning_rate": 7.391802454181807e-07, "loss": 0.9245, "step": 10351 }, { "epoch": 1.9065229511243262, "grad_norm": 0.6439422965049744, "learning_rate": 7.356351238756665e-07, "loss": 1.1224, "step": 10352 }, { "epoch": 1.9067087901876976, "grad_norm": 0.7238260507583618, "learning_rate": 7.320984926288299e-07, "loss": 1.0158, "step": 10353 }, { "epoch": 1.9068946292510685, "grad_norm": 0.6328291893005371, "learning_rate": 7.285703519801512e-07, "loss": 1.0939, "step": 10354 }, { "epoch": 1.9070804683144398, "grad_norm": 0.7213243842124939, "learning_rate": 7.250507022314113e-07, "loss": 1.0515, "step": 10355 }, { "epoch": 1.9072663073778107, "grad_norm": 0.6419424414634705, "learning_rate": 7.215395436836358e-07, "loss": 0.7971, "step": 10356 }, { "epoch": 1.907452146441182, "grad_norm": 0.6070999503135681, "learning_rate": 7.180368766371515e-07, "loss": 0.8184, "step": 10357 }, { "epoch": 1.907637985504553, "grad_norm": 0.6605267524719238, "learning_rate": 7.145427013915629e-07, "loss": 0.8258, "step": 10358 }, { "epoch": 1.9078238245679242, "grad_norm": 0.761379599571228, "learning_rate": 7.110570182457088e-07, "loss": 0.9409, "step": 10359 }, { "epoch": 1.9080096636312953, "grad_norm": 0.7137780785560608, "learning_rate": 7.075798274977396e-07, "loss": 0.8647, "step": 10360 }, { "epoch": 1.9081955026946664, "grad_norm": 0.800717294216156, "learning_rate": 7.04111129445073e-07, "loss": 0.9653, "step": 10361 }, { "epoch": 1.9083813417580375, "grad_norm": 0.8079236149787903, "learning_rate": 7.006509243843829e-07, "loss": 1.1265, "step": 10362 }, { "epoch": 1.9085671808214086, "grad_norm": 0.8685330748558044, "learning_rate": 6.971992126116322e-07, "loss": 1.1229, "step": 10363 }, { "epoch": 1.9087530198847797, "grad_norm": 0.6397227048873901, "learning_rate": 6.937559944220628e-07, "loss": 0.9112, "step": 10364 }, { "epoch": 1.9089388589481509, "grad_norm": 0.6759904623031616, "learning_rate": 6.903212701101725e-07, "loss": 0.8865, "step": 10365 }, { "epoch": 1.9091246980115222, "grad_norm": 0.6716403961181641, "learning_rate": 6.86895039969726e-07, "loss": 1.143, "step": 10366 }, { "epoch": 1.909310537074893, "grad_norm": 0.6793673038482666, "learning_rate": 6.834773042938114e-07, "loss": 0.8889, "step": 10367 }, { "epoch": 1.9094963761382644, "grad_norm": 0.7783335447311401, "learning_rate": 6.800680633747169e-07, "loss": 1.1913, "step": 10368 }, { "epoch": 1.9096822152016353, "grad_norm": 0.7612161636352539, "learning_rate": 6.76667317504065e-07, "loss": 0.9476, "step": 10369 }, { "epoch": 1.9098680542650066, "grad_norm": 0.7343637347221375, "learning_rate": 6.732750669727228e-07, "loss": 1.0106, "step": 10370 }, { "epoch": 1.9100538933283775, "grad_norm": 1.6431206464767456, "learning_rate": 6.698913120708361e-07, "loss": 1.6607, "step": 10371 }, { "epoch": 1.9102397323917488, "grad_norm": 0.6783691644668579, "learning_rate": 6.665160530878401e-07, "loss": 0.8022, "step": 10372 }, { "epoch": 1.9104255714551197, "grad_norm": 2.257593870162964, "learning_rate": 6.631492903123926e-07, "loss": 1.3108, "step": 10373 }, { "epoch": 1.910611410518491, "grad_norm": 0.7017894983291626, "learning_rate": 6.597910240324967e-07, "loss": 1.1359, "step": 10374 }, { "epoch": 1.9107972495818621, "grad_norm": 0.6423133611679077, "learning_rate": 6.564412545353671e-07, "loss": 1.0166, "step": 10375 }, { "epoch": 1.9109830886452333, "grad_norm": 0.6490014791488647, "learning_rate": 6.530999821075301e-07, "loss": 0.9365, "step": 10376 }, { "epoch": 1.9111689277086044, "grad_norm": 0.7735306620597839, "learning_rate": 6.497672070347793e-07, "loss": 1.0894, "step": 10377 }, { "epoch": 1.9113547667719755, "grad_norm": 0.7858856916427612, "learning_rate": 6.464429296021534e-07, "loss": 1.1268, "step": 10378 }, { "epoch": 1.9115406058353466, "grad_norm": 0.655606746673584, "learning_rate": 6.431271500939917e-07, "loss": 1.1135, "step": 10379 }, { "epoch": 1.9117264448987177, "grad_norm": 0.8696974515914917, "learning_rate": 6.398198687939005e-07, "loss": 1.0957, "step": 10380 }, { "epoch": 1.9119122839620888, "grad_norm": 0.7006300687789917, "learning_rate": 6.365210859847759e-07, "loss": 0.9136, "step": 10381 }, { "epoch": 1.91209812302546, "grad_norm": 0.687654435634613, "learning_rate": 6.332308019487476e-07, "loss": 0.6121, "step": 10382 }, { "epoch": 1.9122839620888312, "grad_norm": 0.6635216474533081, "learning_rate": 6.299490169672462e-07, "loss": 0.9353, "step": 10383 }, { "epoch": 1.912469801152202, "grad_norm": 0.6450825333595276, "learning_rate": 6.266757313209693e-07, "loss": 0.8709, "step": 10384 }, { "epoch": 1.9126556402155734, "grad_norm": 0.6535255312919617, "learning_rate": 6.234109452899039e-07, "loss": 0.9271, "step": 10385 }, { "epoch": 1.9128414792789443, "grad_norm": 0.6958978176116943, "learning_rate": 6.201546591532714e-07, "loss": 0.6958, "step": 10386 }, { "epoch": 1.9130273183423157, "grad_norm": 0.6085829138755798, "learning_rate": 6.169068731896044e-07, "loss": 0.5802, "step": 10387 }, { "epoch": 1.9132131574056865, "grad_norm": 0.7713790535926819, "learning_rate": 6.136675876766806e-07, "loss": 1.1302, "step": 10388 }, { "epoch": 1.9133989964690579, "grad_norm": 0.6537017822265625, "learning_rate": 6.104368028915785e-07, "loss": 0.8586, "step": 10389 }, { "epoch": 1.9135848355324288, "grad_norm": 0.6550865769386292, "learning_rate": 6.072145191106216e-07, "loss": 0.8799, "step": 10390 }, { "epoch": 1.9137706745958, "grad_norm": 0.7534873485565186, "learning_rate": 6.040007366094225e-07, "loss": 0.998, "step": 10391 }, { "epoch": 1.9139565136591712, "grad_norm": 0.756750226020813, "learning_rate": 6.007954556628615e-07, "loss": 1.0923, "step": 10392 }, { "epoch": 1.9141423527225423, "grad_norm": 0.8632221221923828, "learning_rate": 5.975986765450969e-07, "loss": 1.1357, "step": 10393 }, { "epoch": 1.9143281917859134, "grad_norm": 0.73847496509552, "learning_rate": 5.944103995295547e-07, "loss": 0.9223, "step": 10394 }, { "epoch": 1.9145140308492845, "grad_norm": 0.6389723420143127, "learning_rate": 5.912306248889388e-07, "loss": 0.9463, "step": 10395 }, { "epoch": 1.9146998699126556, "grad_norm": 0.6227095723152161, "learning_rate": 5.880593528952094e-07, "loss": 0.8493, "step": 10396 }, { "epoch": 1.9148857089760267, "grad_norm": 0.6578342914581299, "learning_rate": 5.848965838196274e-07, "loss": 0.9054, "step": 10397 }, { "epoch": 1.9150715480393978, "grad_norm": 0.8469536900520325, "learning_rate": 5.817423179327098e-07, "loss": 1.0138, "step": 10398 }, { "epoch": 1.915257387102769, "grad_norm": 0.6048383712768555, "learning_rate": 5.785965555042295e-07, "loss": 0.85, "step": 10399 }, { "epoch": 1.9154432261661403, "grad_norm": 1.3355565071105957, "learning_rate": 5.754592968032713e-07, "loss": 1.393, "step": 10400 }, { "epoch": 1.9156290652295112, "grad_norm": 1.1654173135757446, "learning_rate": 5.72330542098165e-07, "loss": 1.057, "step": 10401 }, { "epoch": 1.9158149042928825, "grad_norm": 1.7299017906188965, "learning_rate": 5.69210291656519e-07, "loss": 1.3309, "step": 10402 }, { "epoch": 1.9160007433562534, "grad_norm": 0.7381700873374939, "learning_rate": 5.660985457452195e-07, "loss": 0.9086, "step": 10403 }, { "epoch": 1.9161865824196247, "grad_norm": 0.9093272686004639, "learning_rate": 5.629953046304093e-07, "loss": 0.8974, "step": 10404 }, { "epoch": 1.9163724214829956, "grad_norm": 0.7489191293716431, "learning_rate": 5.599005685775205e-07, "loss": 0.9781, "step": 10405 }, { "epoch": 1.916558260546367, "grad_norm": 0.7251028418540955, "learning_rate": 5.568143378512525e-07, "loss": 1.0954, "step": 10406 }, { "epoch": 1.916744099609738, "grad_norm": 0.6643712520599365, "learning_rate": 5.53736612715583e-07, "loss": 0.8945, "step": 10407 }, { "epoch": 1.9169299386731091, "grad_norm": 0.7415669560432434, "learning_rate": 5.50667393433757e-07, "loss": 1.152, "step": 10408 }, { "epoch": 1.9171157777364802, "grad_norm": 0.6884954571723938, "learning_rate": 5.476066802682866e-07, "loss": 1.1658, "step": 10409 }, { "epoch": 1.9173016167998513, "grad_norm": 0.6170604825019836, "learning_rate": 5.445544734809626e-07, "loss": 0.9808, "step": 10410 }, { "epoch": 1.9174874558632224, "grad_norm": 0.6698222160339355, "learning_rate": 5.415107733328428e-07, "loss": 1.1359, "step": 10411 }, { "epoch": 1.9176732949265936, "grad_norm": 0.741590142250061, "learning_rate": 5.384755800842633e-07, "loss": 0.7777, "step": 10412 }, { "epoch": 1.9178591339899647, "grad_norm": 0.6918681263923645, "learning_rate": 5.354488939948277e-07, "loss": 0.9641, "step": 10413 }, { "epoch": 1.9180449730533358, "grad_norm": 0.6736308336257935, "learning_rate": 5.324307153234287e-07, "loss": 0.8739, "step": 10414 }, { "epoch": 1.918230812116707, "grad_norm": 0.7565740346908569, "learning_rate": 5.294210443282044e-07, "loss": 0.6995, "step": 10415 }, { "epoch": 1.918416651180078, "grad_norm": 0.7019480466842651, "learning_rate": 5.264198812665822e-07, "loss": 0.6801, "step": 10416 }, { "epoch": 1.9186024902434493, "grad_norm": 0.7393890619277954, "learning_rate": 5.234272263952566e-07, "loss": 1.0164, "step": 10417 }, { "epoch": 1.9187883293068202, "grad_norm": 0.93512362241745, "learning_rate": 5.204430799702009e-07, "loss": 1.0921, "step": 10418 }, { "epoch": 1.9189741683701915, "grad_norm": 0.7040289640426636, "learning_rate": 5.174674422466552e-07, "loss": 0.8494, "step": 10419 }, { "epoch": 1.9191600074335624, "grad_norm": 0.6942346096038818, "learning_rate": 5.14500313479116e-07, "loss": 1.0021, "step": 10420 }, { "epoch": 1.9193458464969337, "grad_norm": 0.6451189517974854, "learning_rate": 5.115416939213913e-07, "loss": 0.8916, "step": 10421 }, { "epoch": 1.9195316855603046, "grad_norm": 1.1839951276779175, "learning_rate": 5.085915838265343e-07, "loss": 1.3439, "step": 10422 }, { "epoch": 1.919717524623676, "grad_norm": 0.7084904313087463, "learning_rate": 5.056499834468543e-07, "loss": 0.9234, "step": 10423 }, { "epoch": 1.919903363687047, "grad_norm": 0.6282612681388855, "learning_rate": 5.027168930339721e-07, "loss": 0.7671, "step": 10424 }, { "epoch": 1.9200892027504182, "grad_norm": 0.7924585938453674, "learning_rate": 4.99792312838765e-07, "loss": 0.8562, "step": 10425 }, { "epoch": 1.9202750418137893, "grad_norm": 0.746904730796814, "learning_rate": 4.968762431113549e-07, "loss": 1.0235, "step": 10426 }, { "epoch": 1.9204608808771604, "grad_norm": 0.7356643080711365, "learning_rate": 4.93968684101187e-07, "loss": 1.0711, "step": 10427 }, { "epoch": 1.9206467199405315, "grad_norm": 0.7611639499664307, "learning_rate": 4.910696360569289e-07, "loss": 0.9852, "step": 10428 }, { "epoch": 1.9208325590039026, "grad_norm": 0.7935344576835632, "learning_rate": 4.881790992265601e-07, "loss": 0.9326, "step": 10429 }, { "epoch": 1.9210183980672737, "grad_norm": 0.6445058584213257, "learning_rate": 4.852970738573048e-07, "loss": 0.8003, "step": 10430 }, { "epoch": 1.9212042371306448, "grad_norm": 0.6539177894592285, "learning_rate": 4.824235601956661e-07, "loss": 0.797, "step": 10431 }, { "epoch": 1.9213900761940161, "grad_norm": 0.7449811100959778, "learning_rate": 4.795585584874252e-07, "loss": 0.9627, "step": 10432 }, { "epoch": 1.921575915257387, "grad_norm": 0.7212844491004944, "learning_rate": 4.7670206897763025e-07, "loss": 0.9828, "step": 10433 }, { "epoch": 1.9217617543207584, "grad_norm": 0.6266747713088989, "learning_rate": 4.7385409191060826e-07, "loss": 0.5924, "step": 10434 }, { "epoch": 1.9219475933841292, "grad_norm": 0.6358219385147095, "learning_rate": 4.710146275299643e-07, "loss": 0.8659, "step": 10435 }, { "epoch": 1.9221334324475006, "grad_norm": 0.7808411121368408, "learning_rate": 4.681836760785374e-07, "loss": 0.9741, "step": 10436 }, { "epoch": 1.9223192715108715, "grad_norm": 0.9361091256141663, "learning_rate": 4.653612377984673e-07, "loss": 1.1164, "step": 10437 }, { "epoch": 1.9225051105742428, "grad_norm": 0.6802321076393127, "learning_rate": 4.6254731293118304e-07, "loss": 0.9483, "step": 10438 }, { "epoch": 1.9226909496376137, "grad_norm": 0.722303032875061, "learning_rate": 4.5974190171735874e-07, "loss": 1.0042, "step": 10439 }, { "epoch": 1.922876788700985, "grad_norm": 0.6165829300880432, "learning_rate": 4.5694500439694696e-07, "loss": 0.9669, "step": 10440 }, { "epoch": 1.923062627764356, "grad_norm": 0.73440021276474, "learning_rate": 4.5415662120915637e-07, "loss": 0.9426, "step": 10441 }, { "epoch": 1.9232484668277272, "grad_norm": 0.7627027630805969, "learning_rate": 4.513767523925072e-07, "loss": 0.9235, "step": 10442 }, { "epoch": 1.9234343058910983, "grad_norm": 0.6763946413993835, "learning_rate": 4.486053981847538e-07, "loss": 0.863, "step": 10443 }, { "epoch": 1.9236201449544694, "grad_norm": 0.7742003202438354, "learning_rate": 4.458425588229509e-07, "loss": 0.8294, "step": 10444 }, { "epoch": 1.9238059840178405, "grad_norm": 0.860358476638794, "learning_rate": 4.4308823454338734e-07, "loss": 0.9598, "step": 10445 }, { "epoch": 1.9239918230812116, "grad_norm": 0.8054762482643127, "learning_rate": 4.403424255816635e-07, "loss": 1.0493, "step": 10446 }, { "epoch": 1.9241776621445827, "grad_norm": 0.7892675399780273, "learning_rate": 4.376051321726138e-07, "loss": 0.9371, "step": 10447 }, { "epoch": 1.9243635012079539, "grad_norm": 0.9061946868896484, "learning_rate": 4.3487635455039533e-07, "loss": 0.9521, "step": 10448 }, { "epoch": 1.9245493402713252, "grad_norm": 0.6500113606452942, "learning_rate": 4.321560929483881e-07, "loss": 1.1488, "step": 10449 }, { "epoch": 1.924735179334696, "grad_norm": 0.6235157251358032, "learning_rate": 4.294443475992616e-07, "loss": 0.8004, "step": 10450 }, { "epoch": 1.9249210183980674, "grad_norm": 0.753731369972229, "learning_rate": 4.2674111873496345e-07, "loss": 1.1275, "step": 10451 }, { "epoch": 1.9251068574614383, "grad_norm": 0.7848681211471558, "learning_rate": 4.240464065866978e-07, "loss": 0.9641, "step": 10452 }, { "epoch": 1.9252926965248096, "grad_norm": 0.7322390079498291, "learning_rate": 4.21360211384958e-07, "loss": 0.9026, "step": 10453 }, { "epoch": 1.9254785355881805, "grad_norm": 0.8562449812889099, "learning_rate": 4.1868253335949345e-07, "loss": 1.0697, "step": 10454 }, { "epoch": 1.9256643746515518, "grad_norm": 0.6389977335929871, "learning_rate": 4.160133727393434e-07, "loss": 1.0883, "step": 10455 }, { "epoch": 1.925850213714923, "grad_norm": 0.7520100474357605, "learning_rate": 4.133527297527917e-07, "loss": 0.8914, "step": 10456 }, { "epoch": 1.926036052778294, "grad_norm": 0.6367651224136353, "learning_rate": 4.107006046274231e-07, "loss": 0.8606, "step": 10457 }, { "epoch": 1.9262218918416651, "grad_norm": 0.7314348220825195, "learning_rate": 4.080569975900672e-07, "loss": 1.2232, "step": 10458 }, { "epoch": 1.9264077309050363, "grad_norm": 0.571670413017273, "learning_rate": 4.0542190886684317e-07, "loss": 0.9089, "step": 10459 }, { "epoch": 1.9265935699684074, "grad_norm": 0.6684804558753967, "learning_rate": 4.0279533868314844e-07, "loss": 0.7138, "step": 10460 }, { "epoch": 1.9267794090317785, "grad_norm": 0.6759762763977051, "learning_rate": 4.0017728726360336e-07, "loss": 1.0231, "step": 10461 }, { "epoch": 1.9269652480951496, "grad_norm": 1.505415439605713, "learning_rate": 3.975677548321732e-07, "loss": 1.5421, "step": 10462 }, { "epoch": 1.9271510871585207, "grad_norm": 0.7095223665237427, "learning_rate": 3.949667416120462e-07, "loss": 0.8065, "step": 10463 }, { "epoch": 1.9273369262218918, "grad_norm": 0.6022650599479675, "learning_rate": 3.9237424782567757e-07, "loss": 0.8043, "step": 10464 }, { "epoch": 1.927522765285263, "grad_norm": 0.6729022264480591, "learning_rate": 3.8979027369482333e-07, "loss": 0.668, "step": 10465 }, { "epoch": 1.9277086043486342, "grad_norm": 0.635015606880188, "learning_rate": 3.8721481944048455e-07, "loss": 0.8435, "step": 10466 }, { "epoch": 1.927894443412005, "grad_norm": 0.8524276614189148, "learning_rate": 3.846478852829738e-07, "loss": 1.204, "step": 10467 }, { "epoch": 1.9280802824753764, "grad_norm": 1.4829657077789307, "learning_rate": 3.820894714418044e-07, "loss": 1.5791, "step": 10468 }, { "epoch": 1.9282661215387473, "grad_norm": 0.7150884866714478, "learning_rate": 3.795395781358346e-07, "loss": 1.0709, "step": 10469 }, { "epoch": 1.9284519606021187, "grad_norm": 0.8258856534957886, "learning_rate": 3.7699820558315667e-07, "loss": 0.7993, "step": 10470 }, { "epoch": 1.9286377996654895, "grad_norm": 0.7824717164039612, "learning_rate": 3.7446535400114114e-07, "loss": 1.0424, "step": 10471 }, { "epoch": 1.9288236387288609, "grad_norm": 0.6867206692695618, "learning_rate": 3.719410236064147e-07, "loss": 0.978, "step": 10472 }, { "epoch": 1.929009477792232, "grad_norm": 0.8157885670661926, "learning_rate": 3.6942521461489356e-07, "loss": 1.032, "step": 10473 }, { "epoch": 1.929195316855603, "grad_norm": 0.7037714123725891, "learning_rate": 3.669179272417722e-07, "loss": 0.89, "step": 10474 }, { "epoch": 1.9293811559189742, "grad_norm": 0.7749755382537842, "learning_rate": 3.6441916170149024e-07, "loss": 0.9459, "step": 10475 }, { "epoch": 1.9295669949823453, "grad_norm": 0.6908509135246277, "learning_rate": 3.6192891820777676e-07, "loss": 0.9343, "step": 10476 }, { "epoch": 1.9297528340457164, "grad_norm": 0.711149275302887, "learning_rate": 3.5944719697363907e-07, "loss": 0.9214, "step": 10477 }, { "epoch": 1.9299386731090875, "grad_norm": 0.7808820009231567, "learning_rate": 3.569739982113185e-07, "loss": 1.0175, "step": 10478 }, { "epoch": 1.9301245121724586, "grad_norm": 0.7938927412033081, "learning_rate": 3.5450932213237917e-07, "loss": 1.0029, "step": 10479 }, { "epoch": 1.9303103512358297, "grad_norm": 0.7212532758712769, "learning_rate": 3.520531689476192e-07, "loss": 0.9368, "step": 10480 }, { "epoch": 1.930496190299201, "grad_norm": 0.7534226179122925, "learning_rate": 3.496055388671149e-07, "loss": 0.9919, "step": 10481 }, { "epoch": 1.930682029362572, "grad_norm": 0.6845839619636536, "learning_rate": 3.47166432100221e-07, "loss": 0.8314, "step": 10482 }, { "epoch": 1.9308678684259433, "grad_norm": 0.7597885131835938, "learning_rate": 3.4473584885557075e-07, "loss": 1.0507, "step": 10483 }, { "epoch": 1.9310537074893142, "grad_norm": 0.6701219081878662, "learning_rate": 3.423137893410422e-07, "loss": 0.8769, "step": 10484 }, { "epoch": 1.9312395465526855, "grad_norm": 0.8467074036598206, "learning_rate": 3.399002537637919e-07, "loss": 0.788, "step": 10485 }, { "epoch": 1.9314253856160564, "grad_norm": 0.823009729385376, "learning_rate": 3.3749524233028795e-07, "loss": 0.9358, "step": 10486 }, { "epoch": 1.9316112246794277, "grad_norm": 0.6456196904182434, "learning_rate": 3.3509875524619924e-07, "loss": 0.843, "step": 10487 }, { "epoch": 1.9317970637427986, "grad_norm": 0.7399866580963135, "learning_rate": 3.327107927165174e-07, "loss": 0.9594, "step": 10488 }, { "epoch": 1.93198290280617, "grad_norm": 0.6855292916297913, "learning_rate": 3.3033135494550117e-07, "loss": 0.8529, "step": 10489 }, { "epoch": 1.932168741869541, "grad_norm": 0.8369126915931702, "learning_rate": 3.2796044213665447e-07, "loss": 0.7957, "step": 10490 }, { "epoch": 1.9323545809329121, "grad_norm": 0.6412609219551086, "learning_rate": 3.2559805449277056e-07, "loss": 0.8879, "step": 10491 }, { "epoch": 1.9325404199962832, "grad_norm": 0.908142626285553, "learning_rate": 3.2324419221591017e-07, "loss": 1.1453, "step": 10492 }, { "epoch": 1.9327262590596543, "grad_norm": 0.6138076186180115, "learning_rate": 3.2089885550740106e-07, "loss": 0.9155, "step": 10493 }, { "epoch": 1.9329120981230254, "grad_norm": 0.7081475853919983, "learning_rate": 3.185620445678494e-07, "loss": 0.8805, "step": 10494 }, { "epoch": 1.9330979371863966, "grad_norm": 0.7727792263031006, "learning_rate": 3.1623375959711767e-07, "loss": 1.0354, "step": 10495 }, { "epoch": 1.9332837762497677, "grad_norm": 0.7606066465377808, "learning_rate": 3.139140007943686e-07, "loss": 1.1053, "step": 10496 }, { "epoch": 1.9334696153131388, "grad_norm": 0.6166344881057739, "learning_rate": 3.1160276835799916e-07, "loss": 0.8652, "step": 10497 }, { "epoch": 1.93365545437651, "grad_norm": 0.6693359613418579, "learning_rate": 3.0930006248570675e-07, "loss": 0.9455, "step": 10498 }, { "epoch": 1.933841293439881, "grad_norm": 0.9655212163925171, "learning_rate": 3.070058833744338e-07, "loss": 1.1453, "step": 10499 }, { "epoch": 1.9340271325032523, "grad_norm": 0.767994225025177, "learning_rate": 3.0472023122041225e-07, "loss": 0.768, "step": 10500 }, { "epoch": 1.9342129715666232, "grad_norm": 0.8077344298362732, "learning_rate": 3.024431062191413e-07, "loss": 1.139, "step": 10501 }, { "epoch": 1.9343988106299945, "grad_norm": 0.7413500547409058, "learning_rate": 3.001745085653873e-07, "loss": 1.0268, "step": 10502 }, { "epoch": 1.9345846496933654, "grad_norm": 1.358109474182129, "learning_rate": 2.9791443845319514e-07, "loss": 1.4329, "step": 10503 }, { "epoch": 1.9347704887567367, "grad_norm": 0.674161970615387, "learning_rate": 2.956628960758545e-07, "loss": 0.8224, "step": 10504 }, { "epoch": 1.9349563278201076, "grad_norm": 0.9352040886878967, "learning_rate": 2.934198816259559e-07, "loss": 0.8062, "step": 10505 }, { "epoch": 1.935142166883479, "grad_norm": 0.8096712231636047, "learning_rate": 2.9118539529534586e-07, "loss": 0.9979, "step": 10506 }, { "epoch": 1.93532800594685, "grad_norm": 0.708527684211731, "learning_rate": 2.889594372751603e-07, "loss": 1.0437, "step": 10507 }, { "epoch": 1.9355138450102212, "grad_norm": 0.7208974361419678, "learning_rate": 2.867420077557692e-07, "loss": 1.1676, "step": 10508 }, { "epoch": 1.9356996840735923, "grad_norm": 0.8035686016082764, "learning_rate": 2.845331069268431e-07, "loss": 1.0673, "step": 10509 }, { "epoch": 1.9358855231369634, "grad_norm": 0.7524265050888062, "learning_rate": 2.823327349773197e-07, "loss": 0.8933, "step": 10510 }, { "epoch": 1.9360713622003345, "grad_norm": 0.5901182293891907, "learning_rate": 2.8014089209538186e-07, "loss": 0.9226, "step": 10511 }, { "epoch": 1.9362572012637056, "grad_norm": 0.8229266405105591, "learning_rate": 2.779575784685351e-07, "loss": 1.1362, "step": 10512 }, { "epoch": 1.9364430403270767, "grad_norm": 0.8021316528320312, "learning_rate": 2.7578279428349676e-07, "loss": 1.1076, "step": 10513 }, { "epoch": 1.9366288793904478, "grad_norm": 0.6480070948600769, "learning_rate": 2.7361653972628467e-07, "loss": 0.9693, "step": 10514 }, { "epoch": 1.9368147184538191, "grad_norm": 0.6520128846168518, "learning_rate": 2.7145881498218395e-07, "loss": 0.9093, "step": 10515 }, { "epoch": 1.93700055751719, "grad_norm": 0.7292173504829407, "learning_rate": 2.693096202357692e-07, "loss": 1.0411, "step": 10516 }, { "epoch": 1.9371863965805614, "grad_norm": 0.8277835249900818, "learning_rate": 2.6716895567083786e-07, "loss": 0.9591, "step": 10517 }, { "epoch": 1.9373722356439322, "grad_norm": 0.7037623524665833, "learning_rate": 2.650368214704879e-07, "loss": 0.8856, "step": 10518 }, { "epoch": 1.9375580747073036, "grad_norm": 0.9497572779655457, "learning_rate": 2.629132178171068e-07, "loss": 1.115, "step": 10519 }, { "epoch": 1.9377439137706745, "grad_norm": 0.686296284198761, "learning_rate": 2.607981448923158e-07, "loss": 0.9223, "step": 10520 }, { "epoch": 1.9379297528340458, "grad_norm": 0.7315409779548645, "learning_rate": 2.586916028770259e-07, "loss": 0.7553, "step": 10521 }, { "epoch": 1.938115591897417, "grad_norm": 0.7983646392822266, "learning_rate": 2.56593591951404e-07, "loss": 0.9392, "step": 10522 }, { "epoch": 1.938301430960788, "grad_norm": 0.6945982575416565, "learning_rate": 2.5450411229491765e-07, "loss": 0.5498, "step": 10523 }, { "epoch": 1.938487270024159, "grad_norm": 0.5849335789680481, "learning_rate": 2.524231640862684e-07, "loss": 0.8544, "step": 10524 }, { "epoch": 1.9386731090875302, "grad_norm": 0.568228542804718, "learning_rate": 2.5035074750344724e-07, "loss": 0.6332, "step": 10525 }, { "epoch": 1.9388589481509013, "grad_norm": 0.7203419804573059, "learning_rate": 2.4828686272371226e-07, "loss": 1.0255, "step": 10526 }, { "epoch": 1.9390447872142724, "grad_norm": 0.741192102432251, "learning_rate": 2.46231509923589e-07, "loss": 0.8389, "step": 10527 }, { "epoch": 1.9392306262776435, "grad_norm": 0.6687784194946289, "learning_rate": 2.441846892788924e-07, "loss": 0.9354, "step": 10528 }, { "epoch": 1.9394164653410146, "grad_norm": 0.639785885810852, "learning_rate": 2.421464009646712e-07, "loss": 0.9714, "step": 10529 }, { "epoch": 1.9396023044043857, "grad_norm": 0.8041900992393494, "learning_rate": 2.401166451552861e-07, "loss": 0.8889, "step": 10530 }, { "epoch": 1.9397881434677569, "grad_norm": 0.8194205164909363, "learning_rate": 2.3809542202432034e-07, "loss": 1.0706, "step": 10531 }, { "epoch": 1.9399739825311282, "grad_norm": 0.6742299199104309, "learning_rate": 2.36082731744669e-07, "loss": 1.085, "step": 10532 }, { "epoch": 1.940159821594499, "grad_norm": 0.9145841598510742, "learning_rate": 2.3407857448848324e-07, "loss": 1.016, "step": 10533 }, { "epoch": 1.9403456606578704, "grad_norm": 0.7550665140151978, "learning_rate": 2.3208295042718154e-07, "loss": 1.0904, "step": 10534 }, { "epoch": 1.9405314997212413, "grad_norm": 0.7545983791351318, "learning_rate": 2.3009585973144955e-07, "loss": 0.8752, "step": 10535 }, { "epoch": 1.9407173387846126, "grad_norm": 0.8406251072883606, "learning_rate": 2.281173025712513e-07, "loss": 0.9596, "step": 10536 }, { "epoch": 1.9409031778479835, "grad_norm": 0.6592369079589844, "learning_rate": 2.2614727911581812e-07, "loss": 0.928, "step": 10537 }, { "epoch": 1.9410890169113548, "grad_norm": 0.7806558012962341, "learning_rate": 2.2418578953363745e-07, "loss": 1.0567, "step": 10538 }, { "epoch": 1.941274855974726, "grad_norm": 0.6726809740066528, "learning_rate": 2.2223283399249728e-07, "loss": 1.0418, "step": 10539 }, { "epoch": 1.941460695038097, "grad_norm": 0.7796017527580261, "learning_rate": 2.2028841265943067e-07, "loss": 1.0132, "step": 10540 }, { "epoch": 1.9416465341014681, "grad_norm": 0.5700896382331848, "learning_rate": 2.1835252570074905e-07, "loss": 0.7862, "step": 10541 }, { "epoch": 1.9418323731648393, "grad_norm": 0.7132280468940735, "learning_rate": 2.1642517328203105e-07, "loss": 0.9926, "step": 10542 }, { "epoch": 1.9420182122282104, "grad_norm": 0.7122735977172852, "learning_rate": 2.145063555681226e-07, "loss": 0.771, "step": 10543 }, { "epoch": 1.9422040512915815, "grad_norm": 0.6890891194343567, "learning_rate": 2.1259607272315907e-07, "loss": 0.9131, "step": 10544 }, { "epoch": 1.9423898903549526, "grad_norm": 0.7969406843185425, "learning_rate": 2.1069432491052088e-07, "loss": 0.7925, "step": 10545 }, { "epoch": 1.9425757294183237, "grad_norm": 0.6591358780860901, "learning_rate": 2.088011122928668e-07, "loss": 0.957, "step": 10546 }, { "epoch": 1.942761568481695, "grad_norm": 0.7907562851905823, "learning_rate": 2.0691643503213398e-07, "loss": 0.983, "step": 10547 }, { "epoch": 1.942947407545066, "grad_norm": 0.5965955853462219, "learning_rate": 2.050402932895157e-07, "loss": 0.7991, "step": 10548 }, { "epoch": 1.9431332466084372, "grad_norm": 0.6739276051521301, "learning_rate": 2.0317268722549464e-07, "loss": 0.943, "step": 10549 }, { "epoch": 1.943319085671808, "grad_norm": 0.6748188138008118, "learning_rate": 2.0131361699979868e-07, "loss": 0.9612, "step": 10550 }, { "epoch": 1.9435049247351794, "grad_norm": 0.7416154146194458, "learning_rate": 1.99463082771445e-07, "loss": 0.8746, "step": 10551 }, { "epoch": 1.9436907637985503, "grad_norm": 0.7586497068405151, "learning_rate": 1.97621084698707e-07, "loss": 0.8507, "step": 10552 }, { "epoch": 1.9438766028619217, "grad_norm": 0.788551926612854, "learning_rate": 1.9578762293913644e-07, "loss": 0.9915, "step": 10553 }, { "epoch": 1.9440624419252925, "grad_norm": 0.7596179246902466, "learning_rate": 1.9396269764956343e-07, "loss": 0.8605, "step": 10554 }, { "epoch": 1.9442482809886639, "grad_norm": 0.7364116311073303, "learning_rate": 1.9214630898606312e-07, "loss": 0.8314, "step": 10555 }, { "epoch": 1.944434120052035, "grad_norm": 2.9015426635742188, "learning_rate": 1.9033845710400012e-07, "loss": 1.5259, "step": 10556 }, { "epoch": 1.944619959115406, "grad_norm": 0.6181079149246216, "learning_rate": 1.885391421580174e-07, "loss": 0.7021, "step": 10557 }, { "epoch": 1.9448057981787772, "grad_norm": 0.670671284198761, "learning_rate": 1.8674836430199183e-07, "loss": 1.0022, "step": 10558 }, { "epoch": 1.9449916372421483, "grad_norm": 0.7670441269874573, "learning_rate": 1.8496612368911203e-07, "loss": 0.8757, "step": 10559 }, { "epoch": 1.9451774763055194, "grad_norm": 0.7375046014785767, "learning_rate": 1.831924204718005e-07, "loss": 0.8354, "step": 10560 }, { "epoch": 1.9453633153688905, "grad_norm": 0.7295529842376709, "learning_rate": 1.8142725480178037e-07, "loss": 0.9892, "step": 10561 }, { "epoch": 1.9455491544322616, "grad_norm": 0.7388021945953369, "learning_rate": 1.7967062683001967e-07, "loss": 1.0299, "step": 10562 }, { "epoch": 1.9457349934956327, "grad_norm": 0.5903481245040894, "learning_rate": 1.7792253670676496e-07, "loss": 0.9985, "step": 10563 }, { "epoch": 1.945920832559004, "grad_norm": 0.5798445343971252, "learning_rate": 1.7618298458155213e-07, "loss": 0.5386, "step": 10564 }, { "epoch": 1.946106671622375, "grad_norm": 0.6595876812934875, "learning_rate": 1.744519706031511e-07, "loss": 0.8605, "step": 10565 }, { "epoch": 1.9462925106857463, "grad_norm": 0.5831649303436279, "learning_rate": 1.727294949196212e-07, "loss": 0.8318, "step": 10566 }, { "epoch": 1.9464783497491172, "grad_norm": 0.7746573686599731, "learning_rate": 1.7101555767828904e-07, "loss": 1.048, "step": 10567 }, { "epoch": 1.9466641888124885, "grad_norm": 0.7500365376472473, "learning_rate": 1.6931015902575954e-07, "loss": 1.1274, "step": 10568 }, { "epoch": 1.9468500278758594, "grad_norm": 0.7787057757377625, "learning_rate": 1.676132991079049e-07, "loss": 0.918, "step": 10569 }, { "epoch": 1.9470358669392307, "grad_norm": 0.6510689854621887, "learning_rate": 1.6592497806983133e-07, "loss": 0.7985, "step": 10570 }, { "epoch": 1.9472217060026016, "grad_norm": 0.704802393913269, "learning_rate": 1.6424519605597876e-07, "loss": 0.8693, "step": 10571 }, { "epoch": 1.947407545065973, "grad_norm": 0.7769961953163147, "learning_rate": 1.6257395320999903e-07, "loss": 0.9472, "step": 10572 }, { "epoch": 1.947593384129344, "grad_norm": 0.7937042713165283, "learning_rate": 1.6091124967485548e-07, "loss": 0.7564, "step": 10573 }, { "epoch": 1.9477792231927151, "grad_norm": 0.7213950157165527, "learning_rate": 1.5925708559274556e-07, "loss": 1.1245, "step": 10574 }, { "epoch": 1.9479650622560862, "grad_norm": 0.7393320798873901, "learning_rate": 1.576114611051671e-07, "loss": 1.0531, "step": 10575 }, { "epoch": 1.9481509013194573, "grad_norm": 0.6105191111564636, "learning_rate": 1.5597437635286316e-07, "loss": 0.8292, "step": 10576 }, { "epoch": 1.9483367403828284, "grad_norm": 0.7339911460876465, "learning_rate": 1.5434583147586612e-07, "loss": 0.9979, "step": 10577 }, { "epoch": 1.9485225794461996, "grad_norm": 0.7356317043304443, "learning_rate": 1.5272582661346458e-07, "loss": 0.9202, "step": 10578 }, { "epoch": 1.9487084185095707, "grad_norm": 0.6233305335044861, "learning_rate": 1.5111436190422546e-07, "loss": 0.7786, "step": 10579 }, { "epoch": 1.9488942575729418, "grad_norm": 0.6189817786216736, "learning_rate": 1.4951143748597186e-07, "loss": 0.7014, "step": 10580 }, { "epoch": 1.949080096636313, "grad_norm": 0.7941526174545288, "learning_rate": 1.479170534958052e-07, "loss": 0.9507, "step": 10581 }, { "epoch": 1.949265935699684, "grad_norm": 0.6824877262115479, "learning_rate": 1.4633121007011642e-07, "loss": 1.1483, "step": 10582 }, { "epoch": 1.9494517747630553, "grad_norm": 0.8881486654281616, "learning_rate": 1.4475390734451922e-07, "loss": 0.9177, "step": 10583 }, { "epoch": 1.9496376138264262, "grad_norm": 0.6779103875160217, "learning_rate": 1.4318514545393903e-07, "loss": 1.0484, "step": 10584 }, { "epoch": 1.9498234528897975, "grad_norm": 0.66915363073349, "learning_rate": 1.4162492453254628e-07, "loss": 0.6762, "step": 10585 }, { "epoch": 1.9500092919531684, "grad_norm": 0.6980171799659729, "learning_rate": 1.400732447138009e-07, "loss": 0.8928, "step": 10586 }, { "epoch": 1.9501951310165397, "grad_norm": 0.7655068039894104, "learning_rate": 1.3853010613041896e-07, "loss": 0.9839, "step": 10587 }, { "epoch": 1.9503809700799108, "grad_norm": 0.7359232902526855, "learning_rate": 1.3699550891438372e-07, "loss": 1.0138, "step": 10588 }, { "epoch": 1.950566809143282, "grad_norm": 0.6629403829574585, "learning_rate": 1.3546945319695693e-07, "loss": 0.8499, "step": 10589 }, { "epoch": 1.950752648206653, "grad_norm": 0.8161008954048157, "learning_rate": 1.3395193910866744e-07, "loss": 1.0079, "step": 10590 }, { "epoch": 1.9509384872700242, "grad_norm": 0.640409529209137, "learning_rate": 1.3244296677930035e-07, "loss": 0.8732, "step": 10591 }, { "epoch": 1.9511243263333953, "grad_norm": 0.6880654692649841, "learning_rate": 1.309425363379302e-07, "loss": 1.0767, "step": 10592 }, { "epoch": 1.9513101653967664, "grad_norm": 0.8354215621948242, "learning_rate": 1.2945064791288764e-07, "loss": 1.0845, "step": 10593 }, { "epoch": 1.9514960044601375, "grad_norm": 0.6961938738822937, "learning_rate": 1.279673016317817e-07, "loss": 0.9278, "step": 10594 }, { "epoch": 1.9516818435235086, "grad_norm": 1.2774786949157715, "learning_rate": 1.2649249762148874e-07, "loss": 1.3529, "step": 10595 }, { "epoch": 1.95186768258688, "grad_norm": 0.7045060992240906, "learning_rate": 1.250262360081411e-07, "loss": 0.9794, "step": 10596 }, { "epoch": 1.9520535216502508, "grad_norm": 0.7024220824241638, "learning_rate": 1.2356851691716077e-07, "loss": 1.1276, "step": 10597 }, { "epoch": 1.9522393607136221, "grad_norm": 0.6957845091819763, "learning_rate": 1.221193404732257e-07, "loss": 0.8501, "step": 10598 }, { "epoch": 1.952425199776993, "grad_norm": 0.7334973216056824, "learning_rate": 1.2067870680029236e-07, "loss": 1.0198, "step": 10599 }, { "epoch": 1.9526110388403644, "grad_norm": 0.8500604033470154, "learning_rate": 1.1924661602157329e-07, "loss": 0.8819, "step": 10600 }, { "epoch": 1.9527968779037352, "grad_norm": 0.7799627780914307, "learning_rate": 1.1782306825955935e-07, "loss": 1.0159, "step": 10601 }, { "epoch": 1.9529827169671066, "grad_norm": 0.6954518556594849, "learning_rate": 1.1640806363601986e-07, "loss": 0.9773, "step": 10602 }, { "epoch": 1.9531685560304775, "grad_norm": 0.6808642745018005, "learning_rate": 1.150016022719691e-07, "loss": 0.9727, "step": 10603 }, { "epoch": 1.9533543950938488, "grad_norm": 0.7429071068763733, "learning_rate": 1.1360368428771084e-07, "loss": 0.8981, "step": 10604 }, { "epoch": 1.95354023415722, "grad_norm": 0.7698872089385986, "learning_rate": 1.122143098028161e-07, "loss": 0.8609, "step": 10605 }, { "epoch": 1.953726073220591, "grad_norm": 0.6500043272972107, "learning_rate": 1.1083347893611206e-07, "loss": 0.7626, "step": 10606 }, { "epoch": 1.953911912283962, "grad_norm": 0.7074955105781555, "learning_rate": 1.0946119180571535e-07, "loss": 1.18, "step": 10607 }, { "epoch": 1.9540977513473332, "grad_norm": 0.7872495055198669, "learning_rate": 1.0809744852898762e-07, "loss": 1.2688, "step": 10608 }, { "epoch": 1.9542835904107043, "grad_norm": 0.7019426822662354, "learning_rate": 1.0674224922258003e-07, "loss": 0.9837, "step": 10609 }, { "epoch": 1.9544694294740754, "grad_norm": 0.6763778328895569, "learning_rate": 1.0539559400242205e-07, "loss": 0.8063, "step": 10610 }, { "epoch": 1.9546552685374465, "grad_norm": 1.0150338411331177, "learning_rate": 1.0405748298366602e-07, "loss": 0.9567, "step": 10611 }, { "epoch": 1.9548411076008176, "grad_norm": 0.8957339525222778, "learning_rate": 1.0272791628077594e-07, "loss": 1.0751, "step": 10612 }, { "epoch": 1.955026946664189, "grad_norm": 0.6664177775382996, "learning_rate": 1.0140689400747194e-07, "loss": 1.121, "step": 10613 }, { "epoch": 1.9552127857275599, "grad_norm": 0.6629258394241333, "learning_rate": 1.0009441627675254e-07, "loss": 0.7483, "step": 10614 }, { "epoch": 1.9553986247909312, "grad_norm": 0.8004807233810425, "learning_rate": 9.879048320086126e-08, "loss": 0.9074, "step": 10615 }, { "epoch": 1.955584463854302, "grad_norm": 0.5369228720664978, "learning_rate": 9.749509489134223e-08, "loss": 0.4898, "step": 10616 }, { "epoch": 1.9557703029176734, "grad_norm": 0.7646516561508179, "learning_rate": 9.62082514589846e-08, "loss": 0.8515, "step": 10617 }, { "epoch": 1.9559561419810443, "grad_norm": 0.7015865445137024, "learning_rate": 9.492995301385588e-08, "loss": 0.9903, "step": 10618 }, { "epoch": 1.9561419810444156, "grad_norm": 1.3769493103027344, "learning_rate": 9.366019966529083e-08, "loss": 1.3636, "step": 10619 }, { "epoch": 1.9563278201077865, "grad_norm": 0.6428963541984558, "learning_rate": 9.239899152189146e-08, "loss": 0.9718, "step": 10620 }, { "epoch": 1.9565136591711578, "grad_norm": 0.6724200248718262, "learning_rate": 9.114632869153816e-08, "loss": 1.0401, "step": 10621 }, { "epoch": 1.956699498234529, "grad_norm": 0.8284958600997925, "learning_rate": 8.990221128137854e-08, "loss": 1.0784, "step": 10622 }, { "epoch": 1.9568853372979, "grad_norm": 0.8442558646202087, "learning_rate": 8.866663939780529e-08, "loss": 0.9158, "step": 10623 }, { "epoch": 1.9570711763612711, "grad_norm": 0.8485099673271179, "learning_rate": 8.743961314651161e-08, "loss": 1.0308, "step": 10624 }, { "epoch": 1.9572570154246423, "grad_norm": 0.7097102999687195, "learning_rate": 8.622113263245801e-08, "loss": 0.8732, "step": 10625 }, { "epoch": 1.9574428544880134, "grad_norm": 0.7192796468734741, "learning_rate": 8.501119795983892e-08, "loss": 0.9586, "step": 10626 }, { "epoch": 1.9576286935513845, "grad_norm": 0.7567042112350464, "learning_rate": 8.380980923217152e-08, "loss": 1.0679, "step": 10627 }, { "epoch": 1.9578145326147556, "grad_norm": 0.7260935306549072, "learning_rate": 8.261696655218476e-08, "loss": 0.9328, "step": 10628 }, { "epoch": 1.9580003716781267, "grad_norm": 0.6080493330955505, "learning_rate": 8.143267002193034e-08, "loss": 0.8633, "step": 10629 }, { "epoch": 1.958186210741498, "grad_norm": 0.6445833444595337, "learning_rate": 8.025691974268279e-08, "loss": 0.9214, "step": 10630 }, { "epoch": 1.958372049804869, "grad_norm": 0.640957236289978, "learning_rate": 7.908971581501723e-08, "loss": 0.8789, "step": 10631 }, { "epoch": 1.9585578888682402, "grad_norm": 0.7331262230873108, "learning_rate": 7.793105833876491e-08, "loss": 1.0869, "step": 10632 }, { "epoch": 1.9587437279316111, "grad_norm": 0.9871970415115356, "learning_rate": 7.678094741303543e-08, "loss": 0.8037, "step": 10633 }, { "epoch": 1.9589295669949824, "grad_norm": 0.6546254754066467, "learning_rate": 7.563938313619457e-08, "loss": 0.9437, "step": 10634 }, { "epoch": 1.9591154060583533, "grad_norm": 0.6918784976005554, "learning_rate": 7.450636560587531e-08, "loss": 0.8262, "step": 10635 }, { "epoch": 1.9593012451217247, "grad_norm": 0.6856305599212646, "learning_rate": 7.338189491900016e-08, "loss": 0.7126, "step": 10636 }, { "epoch": 1.9594870841850958, "grad_norm": 0.6383512616157532, "learning_rate": 7.226597117173661e-08, "loss": 0.8585, "step": 10637 }, { "epoch": 1.9596729232484669, "grad_norm": 1.4464577436447144, "learning_rate": 7.115859445954165e-08, "loss": 1.3592, "step": 10638 }, { "epoch": 1.959858762311838, "grad_norm": 0.7710501551628113, "learning_rate": 7.005976487711729e-08, "loss": 1.1809, "step": 10639 }, { "epoch": 1.960044601375209, "grad_norm": 0.6691771149635315, "learning_rate": 6.896948251846613e-08, "loss": 0.9255, "step": 10640 }, { "epoch": 1.9602304404385802, "grad_norm": 0.7149869203567505, "learning_rate": 6.78877474768358e-08, "loss": 0.9247, "step": 10641 }, { "epoch": 1.9604162795019513, "grad_norm": 0.8322938084602356, "learning_rate": 6.681455984474116e-08, "loss": 1.0775, "step": 10642 }, { "epoch": 1.9606021185653224, "grad_norm": 0.7335876822471619, "learning_rate": 6.574991971398658e-08, "loss": 0.7026, "step": 10643 }, { "epoch": 1.9607879576286935, "grad_norm": 0.7936712503433228, "learning_rate": 6.469382717563255e-08, "loss": 0.8784, "step": 10644 }, { "epoch": 1.9609737966920646, "grad_norm": 0.7369674444198608, "learning_rate": 6.36462823199957e-08, "loss": 1.0884, "step": 10645 }, { "epoch": 1.9611596357554357, "grad_norm": 0.8778194189071655, "learning_rate": 6.260728523668213e-08, "loss": 0.8975, "step": 10646 }, { "epoch": 1.961345474818807, "grad_norm": 0.6566877961158752, "learning_rate": 6.15768360145652e-08, "loss": 0.8484, "step": 10647 }, { "epoch": 1.961531313882178, "grad_norm": 0.690839409828186, "learning_rate": 6.055493474177443e-08, "loss": 0.9666, "step": 10648 }, { "epoch": 1.9617171529455493, "grad_norm": 0.7134891748428345, "learning_rate": 5.9541581505717646e-08, "loss": 0.722, "step": 10649 }, { "epoch": 1.9619029920089202, "grad_norm": 0.7135552763938904, "learning_rate": 5.853677639306998e-08, "loss": 0.8176, "step": 10650 }, { "epoch": 1.9620888310722915, "grad_norm": 0.7070262432098389, "learning_rate": 5.7540519489773794e-08, "loss": 0.8338, "step": 10651 }, { "epoch": 1.9622746701356624, "grad_norm": 0.7059808373451233, "learning_rate": 5.65528108810498e-08, "loss": 0.7998, "step": 10652 }, { "epoch": 1.9624605091990337, "grad_norm": 0.6993285417556763, "learning_rate": 5.557365065135267e-08, "loss": 0.9615, "step": 10653 }, { "epoch": 1.9626463482624048, "grad_norm": 0.782971203327179, "learning_rate": 5.4603038884459834e-08, "loss": 0.9966, "step": 10654 }, { "epoch": 1.962832187325776, "grad_norm": 0.9900389313697815, "learning_rate": 5.3640975663382664e-08, "loss": 1.1481, "step": 10655 }, { "epoch": 1.963018026389147, "grad_norm": 0.7023151516914368, "learning_rate": 5.268746107039979e-08, "loss": 1.0139, "step": 10656 }, { "epoch": 1.9632038654525181, "grad_norm": 0.6240903735160828, "learning_rate": 5.17424951870793e-08, "loss": 1.055, "step": 10657 }, { "epoch": 1.9633897045158892, "grad_norm": 0.7413076162338257, "learning_rate": 5.0806078094234325e-08, "loss": 0.8608, "step": 10658 }, { "epoch": 1.9635755435792603, "grad_norm": 0.6608368158340454, "learning_rate": 4.987820987195635e-08, "loss": 0.9219, "step": 10659 }, { "epoch": 1.9637613826426314, "grad_norm": 1.0145299434661865, "learning_rate": 4.895889059962633e-08, "loss": 1.0964, "step": 10660 }, { "epoch": 1.9639472217060026, "grad_norm": 0.6843851208686829, "learning_rate": 4.804812035585915e-08, "loss": 0.9641, "step": 10661 }, { "epoch": 1.9641330607693739, "grad_norm": 0.665683925151825, "learning_rate": 4.714589921857027e-08, "loss": 0.8979, "step": 10662 }, { "epoch": 1.9643188998327448, "grad_norm": 1.0688481330871582, "learning_rate": 4.625222726490908e-08, "loss": 1.2587, "step": 10663 }, { "epoch": 1.964504738896116, "grad_norm": 0.6237012147903442, "learning_rate": 4.5367104571325535e-08, "loss": 0.6693, "step": 10664 }, { "epoch": 1.964690577959487, "grad_norm": 0.6066529750823975, "learning_rate": 4.4490531213525754e-08, "loss": 0.7855, "step": 10665 }, { "epoch": 1.9648764170228583, "grad_norm": 0.761724054813385, "learning_rate": 4.3622507266483095e-08, "loss": 0.7746, "step": 10666 }, { "epoch": 1.9650622560862292, "grad_norm": 0.7549482583999634, "learning_rate": 4.2763032804449267e-08, "loss": 1.0191, "step": 10667 }, { "epoch": 1.9652480951496005, "grad_norm": 0.8386363387107849, "learning_rate": 4.191210790092104e-08, "loss": 0.8702, "step": 10668 }, { "epoch": 1.9654339342129714, "grad_norm": 0.7027541995048523, "learning_rate": 4.106973262868463e-08, "loss": 1.0075, "step": 10669 }, { "epoch": 1.9656197732763427, "grad_norm": 0.6551834940910339, "learning_rate": 4.023590705979352e-08, "loss": 0.8276, "step": 10670 }, { "epoch": 1.9658056123397138, "grad_norm": 0.6328686475753784, "learning_rate": 3.941063126557954e-08, "loss": 0.9006, "step": 10671 }, { "epoch": 1.965991451403085, "grad_norm": 0.6727689504623413, "learning_rate": 3.859390531660845e-08, "loss": 0.9283, "step": 10672 }, { "epoch": 1.966177290466456, "grad_norm": 0.6957010626792908, "learning_rate": 3.7785729282746594e-08, "loss": 0.9045, "step": 10673 }, { "epoch": 1.9663631295298272, "grad_norm": 0.6626486778259277, "learning_rate": 3.6986103233116465e-08, "loss": 0.9211, "step": 10674 }, { "epoch": 1.9665489685931983, "grad_norm": 0.6777610182762146, "learning_rate": 3.619502723610779e-08, "loss": 0.9478, "step": 10675 }, { "epoch": 1.9667348076565694, "grad_norm": 0.6472916603088379, "learning_rate": 3.5412501359399776e-08, "loss": 0.9344, "step": 10676 }, { "epoch": 1.9669206467199405, "grad_norm": 0.7133098840713501, "learning_rate": 3.4638525669905554e-08, "loss": 1.0665, "step": 10677 }, { "epoch": 1.9671064857833116, "grad_norm": 0.703316867351532, "learning_rate": 3.387310023383883e-08, "loss": 0.9378, "step": 10678 }, { "epoch": 1.967292324846683, "grad_norm": 0.777379035949707, "learning_rate": 3.311622511665835e-08, "loss": 0.8881, "step": 10679 }, { "epoch": 1.9674781639100538, "grad_norm": 0.8485547304153442, "learning_rate": 3.2367900383101227e-08, "loss": 1.0767, "step": 10680 }, { "epoch": 1.9676640029734251, "grad_norm": 0.7185204029083252, "learning_rate": 3.162812609717181e-08, "loss": 0.8991, "step": 10681 }, { "epoch": 1.967849842036796, "grad_norm": 0.774558424949646, "learning_rate": 3.089690232215281e-08, "loss": 0.7413, "step": 10682 }, { "epoch": 1.9680356811001674, "grad_norm": 0.6125521659851074, "learning_rate": 3.01742291205831e-08, "loss": 0.8836, "step": 10683 }, { "epoch": 1.9682215201635382, "grad_norm": 0.6851577162742615, "learning_rate": 2.9460106554279886e-08, "loss": 0.7599, "step": 10684 }, { "epoch": 1.9684073592269096, "grad_norm": 0.8051230311393738, "learning_rate": 2.8754534684316547e-08, "loss": 1.0482, "step": 10685 }, { "epoch": 1.9685931982902805, "grad_norm": 0.737567126750946, "learning_rate": 2.8057513571044803e-08, "loss": 0.9012, "step": 10686 }, { "epoch": 1.9687790373536518, "grad_norm": 0.7131343483924866, "learning_rate": 2.7369043274072524e-08, "loss": 1.1263, "step": 10687 }, { "epoch": 1.968964876417023, "grad_norm": 0.74207603931427, "learning_rate": 2.668912385230815e-08, "loss": 0.8822, "step": 10688 }, { "epoch": 1.969150715480394, "grad_norm": 0.6574434638023376, "learning_rate": 2.6017755363882955e-08, "loss": 0.6997, "step": 10689 }, { "epoch": 1.969336554543765, "grad_norm": 0.8549362421035767, "learning_rate": 2.535493786623988e-08, "loss": 1.0949, "step": 10690 }, { "epoch": 1.9695223936071362, "grad_norm": 0.9490185976028442, "learning_rate": 2.470067141605581e-08, "loss": 0.9981, "step": 10691 }, { "epoch": 1.9697082326705073, "grad_norm": 0.8517225384712219, "learning_rate": 2.4054956069308188e-08, "loss": 1.2965, "step": 10692 }, { "epoch": 1.9698940717338784, "grad_norm": 0.638239324092865, "learning_rate": 2.3417791881208405e-08, "loss": 0.7109, "step": 10693 }, { "epoch": 1.9700799107972495, "grad_norm": 0.6345565319061279, "learning_rate": 2.2789178906257313e-08, "loss": 0.9631, "step": 10694 }, { "epoch": 1.9702657498606206, "grad_norm": 0.7187051177024841, "learning_rate": 2.2169117198234114e-08, "loss": 0.9652, "step": 10695 }, { "epoch": 1.970451588923992, "grad_norm": 0.6116509437561035, "learning_rate": 2.1557606810174157e-08, "loss": 0.9015, "step": 10696 }, { "epoch": 1.9706374279873629, "grad_norm": 0.8735626339912415, "learning_rate": 2.0954647794357852e-08, "loss": 0.9417, "step": 10697 }, { "epoch": 1.9708232670507342, "grad_norm": 0.6325920224189758, "learning_rate": 2.0360240202388357e-08, "loss": 1.0079, "step": 10698 }, { "epoch": 1.971009106114105, "grad_norm": 0.687492847442627, "learning_rate": 1.9774384085080587e-08, "loss": 1.0389, "step": 10699 }, { "epoch": 1.9711949451774764, "grad_norm": 0.971679151058197, "learning_rate": 1.919707949256111e-08, "loss": 1.0921, "step": 10700 }, { "epoch": 1.9713807842408473, "grad_norm": 0.7148098945617676, "learning_rate": 1.862832647420154e-08, "loss": 0.7552, "step": 10701 }, { "epoch": 1.9715666233042186, "grad_norm": 0.9046821594238281, "learning_rate": 1.8068125078651853e-08, "loss": 0.9696, "step": 10702 }, { "epoch": 1.9717524623675897, "grad_norm": 0.7230017781257629, "learning_rate": 1.7516475353829275e-08, "loss": 0.9414, "step": 10703 }, { "epoch": 1.9719383014309608, "grad_norm": 0.7333557605743408, "learning_rate": 1.697337734689608e-08, "loss": 0.874, "step": 10704 }, { "epoch": 1.972124140494332, "grad_norm": 0.6274997591972351, "learning_rate": 1.6438831104337305e-08, "loss": 0.6356, "step": 10705 }, { "epoch": 1.972309979557703, "grad_norm": 0.7487014532089233, "learning_rate": 1.5912836671849728e-08, "loss": 1.1066, "step": 10706 }, { "epoch": 1.9724958186210741, "grad_norm": 0.7841073274612427, "learning_rate": 1.5395394094441794e-08, "loss": 1.0279, "step": 10707 }, { "epoch": 1.9726816576844453, "grad_norm": 0.6704834699630737, "learning_rate": 1.488650341635589e-08, "loss": 0.7724, "step": 10708 }, { "epoch": 1.9728674967478164, "grad_norm": 0.7334095239639282, "learning_rate": 1.4386164681112757e-08, "loss": 1.0464, "step": 10709 }, { "epoch": 1.9730533358111875, "grad_norm": 0.6330432891845703, "learning_rate": 1.3894377931533698e-08, "loss": 0.8495, "step": 10710 }, { "epoch": 1.9732391748745586, "grad_norm": 0.7090950608253479, "learning_rate": 1.3411143209651755e-08, "loss": 1.1233, "step": 10711 }, { "epoch": 1.9734250139379297, "grad_norm": 0.6479860544204712, "learning_rate": 1.2936460556822737e-08, "loss": 0.878, "step": 10712 }, { "epoch": 1.973610853001301, "grad_norm": 0.7400798201560974, "learning_rate": 1.2470330013647502e-08, "loss": 0.8981, "step": 10713 }, { "epoch": 1.973796692064672, "grad_norm": 0.7641975283622742, "learning_rate": 1.2012751619971952e-08, "loss": 1.0427, "step": 10714 }, { "epoch": 1.9739825311280432, "grad_norm": 0.7831242680549622, "learning_rate": 1.1563725414953652e-08, "loss": 1.0748, "step": 10715 }, { "epoch": 1.9741683701914141, "grad_norm": 0.6629639863967896, "learning_rate": 1.1123251436995218e-08, "loss": 0.7366, "step": 10716 }, { "epoch": 1.9743542092547854, "grad_norm": 0.6962116956710815, "learning_rate": 1.0691329723766519e-08, "loss": 1.065, "step": 10717 }, { "epoch": 1.9745400483181563, "grad_norm": 0.6824777722358704, "learning_rate": 1.0267960312215774e-08, "loss": 1.0652, "step": 10718 }, { "epoch": 1.9747258873815277, "grad_norm": 0.7710967659950256, "learning_rate": 9.853143238547358e-09, "loss": 1.0013, "step": 10719 }, { "epoch": 1.9749117264448988, "grad_norm": 0.8690904378890991, "learning_rate": 9.4468785382551e-09, "loss": 1.057, "step": 10720 }, { "epoch": 1.9750975655082699, "grad_norm": 0.8146247863769531, "learning_rate": 9.049166246077878e-09, "loss": 1.0114, "step": 10721 }, { "epoch": 1.975283404571641, "grad_norm": 0.6535385251045227, "learning_rate": 8.660006396032928e-09, "loss": 0.9387, "step": 10722 }, { "epoch": 1.975469243635012, "grad_norm": 0.5985758304595947, "learning_rate": 8.279399021404732e-09, "loss": 0.8463, "step": 10723 }, { "epoch": 1.9756550826983832, "grad_norm": 0.9042931199073792, "learning_rate": 7.907344154756135e-09, "loss": 0.66, "step": 10724 }, { "epoch": 1.9758409217617543, "grad_norm": 0.7004178762435913, "learning_rate": 7.543841827895025e-09, "loss": 0.8676, "step": 10725 }, { "epoch": 1.9760267608251254, "grad_norm": 2.0802855491638184, "learning_rate": 7.188892071929854e-09, "loss": 1.6243, "step": 10726 }, { "epoch": 1.9762125998884965, "grad_norm": 0.8206648826599121, "learning_rate": 6.8424949172030124e-09, "loss": 1.0054, "step": 10727 }, { "epoch": 1.9763984389518678, "grad_norm": 0.6644618511199951, "learning_rate": 6.504650393357459e-09, "loss": 1.0766, "step": 10728 }, { "epoch": 1.9765842780152387, "grad_norm": 0.5854812860488892, "learning_rate": 6.1753585292811945e-09, "loss": 1.0447, "step": 10729 }, { "epoch": 1.97677011707861, "grad_norm": 0.6894762516021729, "learning_rate": 5.854619353140578e-09, "loss": 0.7763, "step": 10730 }, { "epoch": 1.976955956141981, "grad_norm": 0.6703664064407349, "learning_rate": 5.542432892369221e-09, "loss": 0.9355, "step": 10731 }, { "epoch": 1.9771417952053523, "grad_norm": 0.720122218132019, "learning_rate": 5.2387991736790876e-09, "loss": 1.1274, "step": 10732 }, { "epoch": 1.9773276342687232, "grad_norm": 0.7998895049095154, "learning_rate": 4.943718223016092e-09, "loss": 1.2719, "step": 10733 }, { "epoch": 1.9775134733320945, "grad_norm": 0.7410762310028076, "learning_rate": 4.657190065648909e-09, "loss": 1.0627, "step": 10734 }, { "epoch": 1.9776993123954654, "grad_norm": 0.842653751373291, "learning_rate": 4.379214726069058e-09, "loss": 0.9684, "step": 10735 }, { "epoch": 1.9778851514588367, "grad_norm": 0.6367177963256836, "learning_rate": 4.1097922280464165e-09, "loss": 0.9476, "step": 10736 }, { "epoch": 1.9780709905222078, "grad_norm": 0.6847108006477356, "learning_rate": 3.848922594640314e-09, "loss": 0.9157, "step": 10737 }, { "epoch": 1.978256829585579, "grad_norm": 0.7617043852806091, "learning_rate": 3.59660584814403e-09, "loss": 0.9627, "step": 10738 }, { "epoch": 1.97844266864895, "grad_norm": 0.6675279140472412, "learning_rate": 3.352842010162505e-09, "loss": 0.9792, "step": 10739 }, { "epoch": 1.9786285077123211, "grad_norm": 0.7527198195457458, "learning_rate": 3.117631101534624e-09, "loss": 0.942, "step": 10740 }, { "epoch": 1.9788143467756922, "grad_norm": 0.8000282049179077, "learning_rate": 2.8909731423665264e-09, "loss": 0.9577, "step": 10741 }, { "epoch": 1.9790001858390633, "grad_norm": 0.6555179953575134, "learning_rate": 2.672868152064911e-09, "loss": 0.9841, "step": 10742 }, { "epoch": 1.9791860249024344, "grad_norm": 0.753463089466095, "learning_rate": 2.4633161492704227e-09, "loss": 0.9208, "step": 10743 }, { "epoch": 1.9793718639658056, "grad_norm": 0.8542004227638245, "learning_rate": 2.2623171519131625e-09, "loss": 1.1224, "step": 10744 }, { "epoch": 1.9795577030291769, "grad_norm": 0.7783091068267822, "learning_rate": 2.0698711771793835e-09, "loss": 0.7907, "step": 10745 }, { "epoch": 1.9797435420925478, "grad_norm": 0.6484434604644775, "learning_rate": 1.8859782415447947e-09, "loss": 1.1133, "step": 10746 }, { "epoch": 1.979929381155919, "grad_norm": 1.1288589239120483, "learning_rate": 1.7106383607190523e-09, "loss": 1.3652, "step": 10747 }, { "epoch": 1.98011522021929, "grad_norm": 0.7378808856010437, "learning_rate": 1.5438515497123718e-09, "loss": 0.7895, "step": 10748 }, { "epoch": 1.9803010592826613, "grad_norm": 0.706842839717865, "learning_rate": 1.3856178227800165e-09, "loss": 0.8849, "step": 10749 }, { "epoch": 1.9804868983460322, "grad_norm": 0.6956677436828613, "learning_rate": 1.2359371934556052e-09, "loss": 1.0329, "step": 10750 }, { "epoch": 1.9806727374094035, "grad_norm": 0.7249075174331665, "learning_rate": 1.0948096745622138e-09, "loss": 0.9499, "step": 10751 }, { "epoch": 1.9808585764727744, "grad_norm": 0.7836699485778809, "learning_rate": 9.622352781457622e-10, "loss": 1.2175, "step": 10752 }, { "epoch": 1.9810444155361457, "grad_norm": 0.7368754148483276, "learning_rate": 8.382140155527296e-10, "loss": 1.0044, "step": 10753 }, { "epoch": 1.9812302545995168, "grad_norm": 0.6586326360702515, "learning_rate": 7.227458973968482e-10, "loss": 1.1634, "step": 10754 }, { "epoch": 1.981416093662888, "grad_norm": 0.7742688655853271, "learning_rate": 6.158309335591028e-10, "loss": 0.8664, "step": 10755 }, { "epoch": 1.981601932726259, "grad_norm": 0.703048050403595, "learning_rate": 5.174691331655268e-10, "loss": 0.8754, "step": 10756 }, { "epoch": 1.9817877717896302, "grad_norm": 0.6949231028556824, "learning_rate": 4.276605046427129e-10, "loss": 1.0135, "step": 10757 }, { "epoch": 1.9819736108530013, "grad_norm": 0.8362816572189331, "learning_rate": 3.464050556734044e-10, "loss": 0.9027, "step": 10758 }, { "epoch": 1.9821594499163724, "grad_norm": 0.6021760106086731, "learning_rate": 2.737027932075975e-10, "loss": 0.9221, "step": 10759 }, { "epoch": 1.9823452889797435, "grad_norm": 0.7097657322883606, "learning_rate": 2.0955372345143886e-10, "loss": 0.945, "step": 10760 }, { "epoch": 1.9825311280431146, "grad_norm": 0.713502824306488, "learning_rate": 1.5395785190053247e-10, "loss": 0.7171, "step": 10761 }, { "epoch": 1.982716967106486, "grad_norm": 0.7117783427238464, "learning_rate": 1.0691518331773509e-10, "loss": 0.8693, "step": 10762 }, { "epoch": 2.0001858390633713, "grad_norm": 0.6326022744178772, "learning_rate": 0.00010020453062911643, "loss": 0.9353, "step": 10763 }, { "epoch": 2.000371678126742, "grad_norm": 0.615979790687561, "learning_rate": 0.0001001899213167062, "loss": 0.941, "step": 10764 }, { "epoch": 2.0005575171901135, "grad_norm": 0.6112083196640015, "learning_rate": 0.00010017531200024244, "loss": 0.8806, "step": 10765 }, { "epoch": 2.0007433562534844, "grad_norm": 0.7036938071250916, "learning_rate": 0.00010016070268003695, "loss": 0.7532, "step": 10766 }, { "epoch": 2.0009291953168558, "grad_norm": 0.6403826475143433, "learning_rate": 0.00010014609335640152, "loss": 0.6336, "step": 10767 }, { "epoch": 2.0011150343802266, "grad_norm": 0.6601222157478333, "learning_rate": 0.00010013148402964798, "loss": 0.8791, "step": 10768 }, { "epoch": 2.0011150343802266, "eval_loss": 1.0075899362564087, "eval_runtime": 23.187, "eval_samples_per_second": 47.095, "eval_steps_per_second": 23.548, "step": 10768 }, { "epoch": 2.001300873443598, "grad_norm": 0.7029190063476562, "learning_rate": 0.00010011687470008817, "loss": 0.8273, "step": 10769 }, { "epoch": 2.001486712506969, "grad_norm": 0.630573570728302, "learning_rate": 0.00010010226536803385, "loss": 0.9447, "step": 10770 }, { "epoch": 2.00167255157034, "grad_norm": 0.6786049008369446, "learning_rate": 0.00010008765603379685, "loss": 0.9115, "step": 10771 }, { "epoch": 2.001858390633711, "grad_norm": 0.680065929889679, "learning_rate": 0.00010007304669768897, "loss": 0.8726, "step": 10772 }, { "epoch": 2.0020442296970824, "grad_norm": 0.6689335107803345, "learning_rate": 0.00010005843736002206, "loss": 0.9695, "step": 10773 }, { "epoch": 2.0022300687604533, "grad_norm": 0.7676763534545898, "learning_rate": 0.00010004382802110787, "loss": 1.04, "step": 10774 }, { "epoch": 2.0024159078238246, "grad_norm": 0.7226296663284302, "learning_rate": 0.00010002921868125827, "loss": 0.6713, "step": 10775 }, { "epoch": 2.0026017468871955, "grad_norm": 0.6010726094245911, "learning_rate": 0.00010001460934078506, "loss": 0.6302, "step": 10776 }, { "epoch": 2.002787585950567, "grad_norm": 0.8062921762466431, "learning_rate": 0.0001, "loss": 0.9336, "step": 10777 }, { "epoch": 2.0029734250139377, "grad_norm": 0.8343218564987183, "learning_rate": 9.998539065921496e-05, "loss": 1.0856, "step": 10778 }, { "epoch": 2.003159264077309, "grad_norm": 0.6121909618377686, "learning_rate": 9.997078131874175e-05, "loss": 0.9131, "step": 10779 }, { "epoch": 2.0033451031406804, "grad_norm": 0.7555389404296875, "learning_rate": 9.995617197889215e-05, "loss": 1.0778, "step": 10780 }, { "epoch": 2.0035309422040513, "grad_norm": 0.6045559048652649, "learning_rate": 9.9941562639978e-05, "loss": 0.7465, "step": 10781 }, { "epoch": 2.0037167812674226, "grad_norm": 0.7431402802467346, "learning_rate": 9.992695330231105e-05, "loss": 0.9095, "step": 10782 }, { "epoch": 2.0039026203307935, "grad_norm": 0.7544237375259399, "learning_rate": 9.991234396620316e-05, "loss": 0.9414, "step": 10783 }, { "epoch": 2.004088459394165, "grad_norm": 0.6956236362457275, "learning_rate": 9.989773463196618e-05, "loss": 1.1008, "step": 10784 }, { "epoch": 2.0042742984575357, "grad_norm": 0.7116724252700806, "learning_rate": 9.988312529991185e-05, "loss": 0.8229, "step": 10785 }, { "epoch": 2.004460137520907, "grad_norm": 0.7476662993431091, "learning_rate": 9.986851597035203e-05, "loss": 0.7782, "step": 10786 }, { "epoch": 2.004645976584278, "grad_norm": 0.7127363681793213, "learning_rate": 9.985390664359849e-05, "loss": 1.1201, "step": 10787 }, { "epoch": 2.0048318156476492, "grad_norm": 0.6719833612442017, "learning_rate": 9.983929731996306e-05, "loss": 0.7797, "step": 10788 }, { "epoch": 2.00501765471102, "grad_norm": 0.7116879820823669, "learning_rate": 9.98246879997576e-05, "loss": 0.7709, "step": 10789 }, { "epoch": 2.0052034937743914, "grad_norm": 0.7485224008560181, "learning_rate": 9.981007868329382e-05, "loss": 1.1297, "step": 10790 }, { "epoch": 2.0053893328377623, "grad_norm": 0.7865563631057739, "learning_rate": 9.979546937088362e-05, "loss": 1.0207, "step": 10791 }, { "epoch": 2.0055751719011337, "grad_norm": 0.65020352602005, "learning_rate": 9.978086006283875e-05, "loss": 0.6663, "step": 10792 }, { "epoch": 2.0057610109645045, "grad_norm": 0.6295920014381409, "learning_rate": 9.976625075947103e-05, "loss": 0.6556, "step": 10793 }, { "epoch": 2.005946850027876, "grad_norm": 0.8350384831428528, "learning_rate": 9.975164146109232e-05, "loss": 0.9925, "step": 10794 }, { "epoch": 2.006132689091247, "grad_norm": 0.7139639854431152, "learning_rate": 9.973703216801437e-05, "loss": 0.7801, "step": 10795 }, { "epoch": 2.006318528154618, "grad_norm": 0.8106564283370972, "learning_rate": 9.972242288054906e-05, "loss": 0.962, "step": 10796 }, { "epoch": 2.0065043672179894, "grad_norm": 0.7551612257957458, "learning_rate": 9.970781359900812e-05, "loss": 0.8711, "step": 10797 }, { "epoch": 2.0066902062813603, "grad_norm": 0.6887959837913513, "learning_rate": 9.96932043237034e-05, "loss": 0.8263, "step": 10798 }, { "epoch": 2.0068760453447316, "grad_norm": 0.7183730006217957, "learning_rate": 9.967859505494673e-05, "loss": 0.7888, "step": 10799 }, { "epoch": 2.0070618844081025, "grad_norm": 0.7318005561828613, "learning_rate": 9.966398579304989e-05, "loss": 0.9993, "step": 10800 }, { "epoch": 2.007247723471474, "grad_norm": 0.9438154101371765, "learning_rate": 9.964937653832468e-05, "loss": 1.1902, "step": 10801 }, { "epoch": 2.0074335625348447, "grad_norm": 0.8429304361343384, "learning_rate": 9.963476729108294e-05, "loss": 0.9782, "step": 10802 }, { "epoch": 2.007619401598216, "grad_norm": 0.7255978584289551, "learning_rate": 9.962015805163647e-05, "loss": 0.7803, "step": 10803 }, { "epoch": 2.007805240661587, "grad_norm": 0.7246456742286682, "learning_rate": 9.960554882029705e-05, "loss": 0.8352, "step": 10804 }, { "epoch": 2.0079910797249583, "grad_norm": 0.6792916059494019, "learning_rate": 9.959093959737652e-05, "loss": 0.795, "step": 10805 }, { "epoch": 2.008176918788329, "grad_norm": 0.8336760997772217, "learning_rate": 9.95763303831867e-05, "loss": 0.9368, "step": 10806 }, { "epoch": 2.0083627578517005, "grad_norm": 0.7357673645019531, "learning_rate": 9.956172117803937e-05, "loss": 1.0218, "step": 10807 }, { "epoch": 2.0085485969150714, "grad_norm": 0.9112713932991028, "learning_rate": 9.954711198224634e-05, "loss": 1.193, "step": 10808 }, { "epoch": 2.0087344359784427, "grad_norm": 0.8156577348709106, "learning_rate": 9.953250279611946e-05, "loss": 0.9637, "step": 10809 }, { "epoch": 2.0089202750418136, "grad_norm": 0.6362747550010681, "learning_rate": 9.951789361997048e-05, "loss": 0.717, "step": 10810 }, { "epoch": 2.009106114105185, "grad_norm": 0.6513655781745911, "learning_rate": 9.950328445411125e-05, "loss": 0.8537, "step": 10811 }, { "epoch": 2.0092919531685562, "grad_norm": 0.65449059009552, "learning_rate": 9.948867529885355e-05, "loss": 0.9545, "step": 10812 }, { "epoch": 2.009477792231927, "grad_norm": 0.8565689921379089, "learning_rate": 9.94740661545092e-05, "loss": 1.092, "step": 10813 }, { "epoch": 2.0096636312952985, "grad_norm": 0.6790142059326172, "learning_rate": 9.945945702138998e-05, "loss": 0.9912, "step": 10814 }, { "epoch": 2.0098494703586693, "grad_norm": 0.6726483106613159, "learning_rate": 9.944484789980773e-05, "loss": 0.7884, "step": 10815 }, { "epoch": 2.0100353094220407, "grad_norm": 0.7265700697898865, "learning_rate": 9.943023879007428e-05, "loss": 0.8606, "step": 10816 }, { "epoch": 2.0102211484854116, "grad_norm": 0.7459362149238586, "learning_rate": 9.941562969250138e-05, "loss": 0.8296, "step": 10817 }, { "epoch": 2.010406987548783, "grad_norm": 0.6986656188964844, "learning_rate": 9.940102060740087e-05, "loss": 0.8882, "step": 10818 }, { "epoch": 2.0105928266121538, "grad_norm": 0.8597057461738586, "learning_rate": 9.938641153508456e-05, "loss": 1.0597, "step": 10819 }, { "epoch": 2.010778665675525, "grad_norm": 0.6738445162773132, "learning_rate": 9.937180247586422e-05, "loss": 0.7268, "step": 10820 }, { "epoch": 2.010964504738896, "grad_norm": 0.6111264228820801, "learning_rate": 9.935719343005171e-05, "loss": 0.6557, "step": 10821 }, { "epoch": 2.0111503438022673, "grad_norm": 0.7664129734039307, "learning_rate": 9.934258439795879e-05, "loss": 0.8225, "step": 10822 }, { "epoch": 2.011336182865638, "grad_norm": 0.7452059984207153, "learning_rate": 9.932797537989727e-05, "loss": 0.8836, "step": 10823 }, { "epoch": 2.0115220219290095, "grad_norm": 0.6665897965431213, "learning_rate": 9.931336637617896e-05, "loss": 0.8776, "step": 10824 }, { "epoch": 2.0117078609923804, "grad_norm": 0.7272871732711792, "learning_rate": 9.929875738711564e-05, "loss": 0.7879, "step": 10825 }, { "epoch": 2.0118937000557517, "grad_norm": 0.6861128807067871, "learning_rate": 9.928414841301919e-05, "loss": 0.9416, "step": 10826 }, { "epoch": 2.0120795391191226, "grad_norm": 0.7545861005783081, "learning_rate": 9.926953945420132e-05, "loss": 1.0873, "step": 10827 }, { "epoch": 2.012265378182494, "grad_norm": 0.7821633219718933, "learning_rate": 9.925493051097389e-05, "loss": 0.9366, "step": 10828 }, { "epoch": 2.0124512172458653, "grad_norm": 0.6889685392379761, "learning_rate": 9.924032158364873e-05, "loss": 0.8754, "step": 10829 }, { "epoch": 2.012637056309236, "grad_norm": 1.621429443359375, "learning_rate": 9.922571267253755e-05, "loss": 1.0026, "step": 10830 }, { "epoch": 2.0128228953726075, "grad_norm": 0.7398176193237305, "learning_rate": 9.921110377795225e-05, "loss": 0.8988, "step": 10831 }, { "epoch": 2.0130087344359784, "grad_norm": 0.6716428995132446, "learning_rate": 9.919649490020455e-05, "loss": 0.9864, "step": 10832 }, { "epoch": 2.0131945734993497, "grad_norm": 0.7361804246902466, "learning_rate": 9.918188603960632e-05, "loss": 1.0162, "step": 10833 }, { "epoch": 2.0133804125627206, "grad_norm": 0.8350973725318909, "learning_rate": 9.91672771964693e-05, "loss": 0.9043, "step": 10834 }, { "epoch": 2.013566251626092, "grad_norm": 0.869459867477417, "learning_rate": 9.915266837110529e-05, "loss": 0.9351, "step": 10835 }, { "epoch": 2.013752090689463, "grad_norm": 0.6775286793708801, "learning_rate": 9.913805956382617e-05, "loss": 0.7345, "step": 10836 }, { "epoch": 2.013937929752834, "grad_norm": 0.7324315309524536, "learning_rate": 9.912345077494368e-05, "loss": 0.8199, "step": 10837 }, { "epoch": 2.014123768816205, "grad_norm": 0.8208001852035522, "learning_rate": 9.91088420047696e-05, "loss": 1.1714, "step": 10838 }, { "epoch": 2.0143096078795764, "grad_norm": 0.8033028244972229, "learning_rate": 9.909423325361581e-05, "loss": 0.9778, "step": 10839 }, { "epoch": 2.0144954469429472, "grad_norm": 0.7496377825737, "learning_rate": 9.907962452179403e-05, "loss": 0.718, "step": 10840 }, { "epoch": 2.0146812860063186, "grad_norm": 0.7925049662590027, "learning_rate": 9.90650158096161e-05, "loss": 1.1434, "step": 10841 }, { "epoch": 2.0148671250696895, "grad_norm": 0.8028472065925598, "learning_rate": 9.905040711739377e-05, "loss": 0.8863, "step": 10842 }, { "epoch": 2.015052964133061, "grad_norm": 0.8392669558525085, "learning_rate": 9.90357984454389e-05, "loss": 1.0166, "step": 10843 }, { "epoch": 2.0152388031964317, "grad_norm": 0.7445780634880066, "learning_rate": 9.902118979406324e-05, "loss": 0.8275, "step": 10844 }, { "epoch": 2.015424642259803, "grad_norm": 0.638634204864502, "learning_rate": 9.900658116357863e-05, "loss": 0.9946, "step": 10845 }, { "epoch": 2.0156104813231743, "grad_norm": 0.776951014995575, "learning_rate": 9.899197255429678e-05, "loss": 0.7251, "step": 10846 }, { "epoch": 2.015796320386545, "grad_norm": 0.7218950986862183, "learning_rate": 9.897736396652959e-05, "loss": 0.875, "step": 10847 }, { "epoch": 2.0159821594499165, "grad_norm": 0.7573806047439575, "learning_rate": 9.896275540058881e-05, "loss": 1.0281, "step": 10848 }, { "epoch": 2.0161679985132874, "grad_norm": 0.7256792783737183, "learning_rate": 9.894814685678624e-05, "loss": 0.8202, "step": 10849 }, { "epoch": 2.0163538375766588, "grad_norm": 0.7149482369422913, "learning_rate": 9.893353833543365e-05, "loss": 0.7834, "step": 10850 }, { "epoch": 2.0165396766400296, "grad_norm": 0.6294644474983215, "learning_rate": 9.891892983684289e-05, "loss": 0.7256, "step": 10851 }, { "epoch": 2.016725515703401, "grad_norm": 0.7436536550521851, "learning_rate": 9.89043213613257e-05, "loss": 0.8414, "step": 10852 }, { "epoch": 2.016911354766772, "grad_norm": 0.708697497844696, "learning_rate": 9.888971290919389e-05, "loss": 0.9798, "step": 10853 }, { "epoch": 2.017097193830143, "grad_norm": 0.6918801665306091, "learning_rate": 9.887510448075924e-05, "loss": 0.7837, "step": 10854 }, { "epoch": 2.017283032893514, "grad_norm": 0.7205170392990112, "learning_rate": 9.886049607633357e-05, "loss": 0.6071, "step": 10855 }, { "epoch": 2.0174688719568854, "grad_norm": 0.735811710357666, "learning_rate": 9.884588769622862e-05, "loss": 0.9584, "step": 10856 }, { "epoch": 2.0176547110202563, "grad_norm": 0.6744986772537231, "learning_rate": 9.883127934075624e-05, "loss": 0.7276, "step": 10857 }, { "epoch": 2.0178405500836276, "grad_norm": 0.8203528523445129, "learning_rate": 9.88166710102282e-05, "loss": 0.8217, "step": 10858 }, { "epoch": 2.0180263891469985, "grad_norm": 0.718228816986084, "learning_rate": 9.88020627049563e-05, "loss": 0.905, "step": 10859 }, { "epoch": 2.01821222821037, "grad_norm": 1.0625849962234497, "learning_rate": 9.87874544252523e-05, "loss": 1.0942, "step": 10860 }, { "epoch": 2.018398067273741, "grad_norm": 0.6921961903572083, "learning_rate": 9.877284617142802e-05, "loss": 0.8311, "step": 10861 }, { "epoch": 2.018583906337112, "grad_norm": 0.7733319997787476, "learning_rate": 9.875823794379522e-05, "loss": 0.9363, "step": 10862 }, { "epoch": 2.0187697454004834, "grad_norm": 0.8737978935241699, "learning_rate": 9.874362974266572e-05, "loss": 0.7882, "step": 10863 }, { "epoch": 2.0189555844638543, "grad_norm": 0.6428980231285095, "learning_rate": 9.872902156835125e-05, "loss": 0.8382, "step": 10864 }, { "epoch": 2.0191414235272256, "grad_norm": 0.8537091612815857, "learning_rate": 9.871441342116368e-05, "loss": 1.1804, "step": 10865 }, { "epoch": 2.0193272625905965, "grad_norm": 0.8355739116668701, "learning_rate": 9.86998053014147e-05, "loss": 1.0267, "step": 10866 }, { "epoch": 2.019513101653968, "grad_norm": 0.6342544555664062, "learning_rate": 9.868519720941615e-05, "loss": 0.5791, "step": 10867 }, { "epoch": 2.0196989407173387, "grad_norm": 0.7603575587272644, "learning_rate": 9.867058914547981e-05, "loss": 1.0268, "step": 10868 }, { "epoch": 2.01988477978071, "grad_norm": 0.7468525767326355, "learning_rate": 9.86559811099175e-05, "loss": 1.032, "step": 10869 }, { "epoch": 2.020070618844081, "grad_norm": 0.6898447275161743, "learning_rate": 9.864137310304094e-05, "loss": 0.757, "step": 10870 }, { "epoch": 2.0202564579074522, "grad_norm": 0.5959223508834839, "learning_rate": 9.862676512516196e-05, "loss": 0.7033, "step": 10871 }, { "epoch": 2.020442296970823, "grad_norm": 0.7550606727600098, "learning_rate": 9.86121571765923e-05, "loss": 0.8592, "step": 10872 }, { "epoch": 2.0206281360341944, "grad_norm": 0.7787352800369263, "learning_rate": 9.859754925764378e-05, "loss": 1.0283, "step": 10873 }, { "epoch": 2.0208139750975653, "grad_norm": 0.848624587059021, "learning_rate": 9.858294136862815e-05, "loss": 0.8106, "step": 10874 }, { "epoch": 2.0209998141609367, "grad_norm": 0.6989394426345825, "learning_rate": 9.856833350985722e-05, "loss": 0.8153, "step": 10875 }, { "epoch": 2.0211856532243075, "grad_norm": 0.7675639986991882, "learning_rate": 9.855372568164274e-05, "loss": 0.9524, "step": 10876 }, { "epoch": 2.021371492287679, "grad_norm": 0.9018722176551819, "learning_rate": 9.853911788429648e-05, "loss": 1.0343, "step": 10877 }, { "epoch": 2.02155733135105, "grad_norm": 0.7960389852523804, "learning_rate": 9.852451011813027e-05, "loss": 0.8321, "step": 10878 }, { "epoch": 2.021743170414421, "grad_norm": 0.7187342643737793, "learning_rate": 9.850990238345587e-05, "loss": 0.9727, "step": 10879 }, { "epoch": 2.0219290094777924, "grad_norm": 0.7807068228721619, "learning_rate": 9.849529468058503e-05, "loss": 1.0024, "step": 10880 }, { "epoch": 2.0221148485411633, "grad_norm": 0.6860795021057129, "learning_rate": 9.848068700982955e-05, "loss": 0.9321, "step": 10881 }, { "epoch": 2.0223006876045346, "grad_norm": 1.0080658197402954, "learning_rate": 9.84660793715012e-05, "loss": 0.9219, "step": 10882 }, { "epoch": 2.0224865266679055, "grad_norm": 2.826295852661133, "learning_rate": 9.845147176591176e-05, "loss": 1.5738, "step": 10883 }, { "epoch": 2.022672365731277, "grad_norm": 0.5995877981185913, "learning_rate": 9.843686419337298e-05, "loss": 0.4572, "step": 10884 }, { "epoch": 2.0228582047946477, "grad_norm": 0.7833349108695984, "learning_rate": 9.842225665419668e-05, "loss": 0.9323, "step": 10885 }, { "epoch": 2.023044043858019, "grad_norm": 0.827555775642395, "learning_rate": 9.840764914869456e-05, "loss": 0.9816, "step": 10886 }, { "epoch": 2.02322988292139, "grad_norm": 0.7901204824447632, "learning_rate": 9.839304167717849e-05, "loss": 0.7856, "step": 10887 }, { "epoch": 2.0234157219847613, "grad_norm": 0.8344346284866333, "learning_rate": 9.837843423996011e-05, "loss": 1.0287, "step": 10888 }, { "epoch": 2.023601561048132, "grad_norm": 0.7958934307098389, "learning_rate": 9.836382683735132e-05, "loss": 0.9216, "step": 10889 }, { "epoch": 2.0237874001115035, "grad_norm": 0.7749890089035034, "learning_rate": 9.834921946966383e-05, "loss": 0.9817, "step": 10890 }, { "epoch": 2.0239732391748744, "grad_norm": 0.6827292442321777, "learning_rate": 9.833461213720944e-05, "loss": 1.0178, "step": 10891 }, { "epoch": 2.0241590782382457, "grad_norm": 0.8337804675102234, "learning_rate": 9.832000484029987e-05, "loss": 0.8763, "step": 10892 }, { "epoch": 2.0243449173016166, "grad_norm": 0.8186933398246765, "learning_rate": 9.830539757924694e-05, "loss": 0.9928, "step": 10893 }, { "epoch": 2.024530756364988, "grad_norm": 0.91987144947052, "learning_rate": 9.829079035436236e-05, "loss": 1.1445, "step": 10894 }, { "epoch": 2.0247165954283592, "grad_norm": 0.7778676748275757, "learning_rate": 9.827618316595795e-05, "loss": 0.9972, "step": 10895 }, { "epoch": 2.02490243449173, "grad_norm": 0.8444634079933167, "learning_rate": 9.826157601434543e-05, "loss": 0.7407, "step": 10896 }, { "epoch": 2.0250882735551015, "grad_norm": 0.8590813279151917, "learning_rate": 9.824696889983662e-05, "loss": 1.0552, "step": 10897 }, { "epoch": 2.0252741126184723, "grad_norm": 0.765149712562561, "learning_rate": 9.823236182274318e-05, "loss": 0.9949, "step": 10898 }, { "epoch": 2.0254599516818437, "grad_norm": 0.8012186288833618, "learning_rate": 9.8217754783377e-05, "loss": 1.0422, "step": 10899 }, { "epoch": 2.0256457907452146, "grad_norm": 0.9006714820861816, "learning_rate": 9.820314778204978e-05, "loss": 1.1215, "step": 10900 }, { "epoch": 2.025831629808586, "grad_norm": 0.9209079146385193, "learning_rate": 9.81885408190733e-05, "loss": 1.1306, "step": 10901 }, { "epoch": 2.0260174688719568, "grad_norm": 0.6512848734855652, "learning_rate": 9.817393389475928e-05, "loss": 0.7758, "step": 10902 }, { "epoch": 2.026203307935328, "grad_norm": 0.8860459327697754, "learning_rate": 9.815932700941954e-05, "loss": 0.9835, "step": 10903 }, { "epoch": 2.026389146998699, "grad_norm": 0.7635270953178406, "learning_rate": 9.814472016336578e-05, "loss": 0.8204, "step": 10904 }, { "epoch": 2.0265749860620703, "grad_norm": 0.7491157054901123, "learning_rate": 9.813011335690981e-05, "loss": 0.7987, "step": 10905 }, { "epoch": 2.026760825125441, "grad_norm": 0.6436562538146973, "learning_rate": 9.811550659036333e-05, "loss": 0.9272, "step": 10906 }, { "epoch": 2.0269466641888125, "grad_norm": 0.804854691028595, "learning_rate": 9.810089986403816e-05, "loss": 0.9982, "step": 10907 }, { "epoch": 2.0271325032521834, "grad_norm": 0.6464484333992004, "learning_rate": 9.808629317824599e-05, "loss": 0.7298, "step": 10908 }, { "epoch": 2.0273183423155547, "grad_norm": 0.7032682299613953, "learning_rate": 9.807168653329858e-05, "loss": 0.8478, "step": 10909 }, { "epoch": 2.0275041813789256, "grad_norm": 0.7089880108833313, "learning_rate": 9.805707992950776e-05, "loss": 0.8488, "step": 10910 }, { "epoch": 2.027690020442297, "grad_norm": 1.3038439750671387, "learning_rate": 9.804247336718523e-05, "loss": 0.7484, "step": 10911 }, { "epoch": 2.0278758595056683, "grad_norm": 0.7218604683876038, "learning_rate": 9.802786684664274e-05, "loss": 0.7819, "step": 10912 }, { "epoch": 2.028061698569039, "grad_norm": 0.9532296061515808, "learning_rate": 9.801326036819208e-05, "loss": 1.137, "step": 10913 }, { "epoch": 2.0282475376324105, "grad_norm": 0.6444713473320007, "learning_rate": 9.799865393214492e-05, "loss": 0.838, "step": 10914 }, { "epoch": 2.0284333766957814, "grad_norm": 0.8020101189613342, "learning_rate": 9.79840475388131e-05, "loss": 0.8373, "step": 10915 }, { "epoch": 2.0286192157591527, "grad_norm": 0.8423665165901184, "learning_rate": 9.796944118850829e-05, "loss": 0.7276, "step": 10916 }, { "epoch": 2.0288050548225236, "grad_norm": 0.8215285539627075, "learning_rate": 9.79548348815423e-05, "loss": 0.9521, "step": 10917 }, { "epoch": 2.028990893885895, "grad_norm": 0.7488442659378052, "learning_rate": 9.794022861822684e-05, "loss": 0.8316, "step": 10918 }, { "epoch": 2.029176732949266, "grad_norm": 0.6915283799171448, "learning_rate": 9.792562239887363e-05, "loss": 0.5913, "step": 10919 }, { "epoch": 2.029362572012637, "grad_norm": 0.8294501304626465, "learning_rate": 9.791101622379448e-05, "loss": 1.0033, "step": 10920 }, { "epoch": 2.029548411076008, "grad_norm": 0.7450535297393799, "learning_rate": 9.789641009330111e-05, "loss": 0.8725, "step": 10921 }, { "epoch": 2.0297342501393794, "grad_norm": 0.7984308004379272, "learning_rate": 9.788180400770524e-05, "loss": 1.1632, "step": 10922 }, { "epoch": 2.0299200892027502, "grad_norm": 0.7514494061470032, "learning_rate": 9.786719796731866e-05, "loss": 1.0312, "step": 10923 }, { "epoch": 2.0301059282661216, "grad_norm": 0.7737922668457031, "learning_rate": 9.785259197245306e-05, "loss": 0.8648, "step": 10924 }, { "epoch": 2.0302917673294925, "grad_norm": 0.8706908822059631, "learning_rate": 9.783798602342021e-05, "loss": 1.1641, "step": 10925 }, { "epoch": 2.030477606392864, "grad_norm": 1.0741982460021973, "learning_rate": 9.782338012053183e-05, "loss": 0.8855, "step": 10926 }, { "epoch": 2.030663445456235, "grad_norm": 1.0242948532104492, "learning_rate": 9.780877426409966e-05, "loss": 1.0979, "step": 10927 }, { "epoch": 2.030849284519606, "grad_norm": 0.718450665473938, "learning_rate": 9.779416845443545e-05, "loss": 0.9463, "step": 10928 }, { "epoch": 2.0310351235829773, "grad_norm": 0.7844233512878418, "learning_rate": 9.777956269185092e-05, "loss": 0.859, "step": 10929 }, { "epoch": 2.031220962646348, "grad_norm": 0.820450484752655, "learning_rate": 9.77649569766578e-05, "loss": 0.8462, "step": 10930 }, { "epoch": 2.0314068017097195, "grad_norm": 0.7150939106941223, "learning_rate": 9.775035130916786e-05, "loss": 0.8532, "step": 10931 }, { "epoch": 2.0315926407730904, "grad_norm": 0.9015347957611084, "learning_rate": 9.77357456896928e-05, "loss": 0.8909, "step": 10932 }, { "epoch": 2.0317784798364618, "grad_norm": 0.7773807644844055, "learning_rate": 9.772114011854438e-05, "loss": 0.7619, "step": 10933 }, { "epoch": 2.0319643188998326, "grad_norm": 0.7356576323509216, "learning_rate": 9.77065345960343e-05, "loss": 1.0733, "step": 10934 }, { "epoch": 2.032150157963204, "grad_norm": 0.7767458558082581, "learning_rate": 9.769192912247434e-05, "loss": 0.9063, "step": 10935 }, { "epoch": 2.032335997026575, "grad_norm": 0.7118449211120605, "learning_rate": 9.767732369817615e-05, "loss": 0.8264, "step": 10936 }, { "epoch": 2.032521836089946, "grad_norm": 0.7526161670684814, "learning_rate": 9.766271832345152e-05, "loss": 0.836, "step": 10937 }, { "epoch": 2.032707675153317, "grad_norm": 0.7727689146995544, "learning_rate": 9.764811299861215e-05, "loss": 1.0169, "step": 10938 }, { "epoch": 2.0328935142166884, "grad_norm": 0.6880142092704773, "learning_rate": 9.763350772396975e-05, "loss": 0.8605, "step": 10939 }, { "epoch": 2.0330793532800593, "grad_norm": 0.7179078459739685, "learning_rate": 9.761890249983605e-05, "loss": 0.9297, "step": 10940 }, { "epoch": 2.0332651923434306, "grad_norm": 0.6645086407661438, "learning_rate": 9.760429732652284e-05, "loss": 0.941, "step": 10941 }, { "epoch": 2.0334510314068015, "grad_norm": 0.8322639465332031, "learning_rate": 9.758969220434178e-05, "loss": 0.9808, "step": 10942 }, { "epoch": 2.033636870470173, "grad_norm": 0.6776291728019714, "learning_rate": 9.757508713360461e-05, "loss": 0.7789, "step": 10943 }, { "epoch": 2.033822709533544, "grad_norm": 0.6935237646102905, "learning_rate": 9.756048211462303e-05, "loss": 0.8107, "step": 10944 }, { "epoch": 2.034008548596915, "grad_norm": 0.8299887180328369, "learning_rate": 9.754587714770878e-05, "loss": 0.8408, "step": 10945 }, { "epoch": 2.0341943876602864, "grad_norm": 0.652435839176178, "learning_rate": 9.753127223317357e-05, "loss": 0.961, "step": 10946 }, { "epoch": 2.0343802267236573, "grad_norm": 0.8443545699119568, "learning_rate": 9.751666737132913e-05, "loss": 1.1084, "step": 10947 }, { "epoch": 2.0345660657870286, "grad_norm": 0.6825294494628906, "learning_rate": 9.750206256248715e-05, "loss": 0.7776, "step": 10948 }, { "epoch": 2.0347519048503995, "grad_norm": 0.9838349223136902, "learning_rate": 9.748745780695934e-05, "loss": 0.9621, "step": 10949 }, { "epoch": 2.034937743913771, "grad_norm": 0.6445854902267456, "learning_rate": 9.747285310505747e-05, "loss": 0.8213, "step": 10950 }, { "epoch": 2.0351235829771417, "grad_norm": 0.8864513635635376, "learning_rate": 9.745824845709316e-05, "loss": 1.0444, "step": 10951 }, { "epoch": 2.035309422040513, "grad_norm": 0.5171553492546082, "learning_rate": 9.74436438633782e-05, "loss": 0.5602, "step": 10952 }, { "epoch": 2.035495261103884, "grad_norm": 0.6739189028739929, "learning_rate": 9.74290393242243e-05, "loss": 0.803, "step": 10953 }, { "epoch": 2.0356811001672552, "grad_norm": 0.7470421195030212, "learning_rate": 9.741443483994312e-05, "loss": 0.7937, "step": 10954 }, { "epoch": 2.035866939230626, "grad_norm": 0.7404161095619202, "learning_rate": 9.739983041084642e-05, "loss": 1.0081, "step": 10955 }, { "epoch": 2.0360527782939974, "grad_norm": 0.6297615766525269, "learning_rate": 9.738522603724584e-05, "loss": 0.6757, "step": 10956 }, { "epoch": 2.0362386173573683, "grad_norm": 0.6591044068336487, "learning_rate": 9.737062171945315e-05, "loss": 0.6592, "step": 10957 }, { "epoch": 2.0364244564207397, "grad_norm": 0.7246942520141602, "learning_rate": 9.735601745778001e-05, "loss": 0.8395, "step": 10958 }, { "epoch": 2.0366102954841105, "grad_norm": 0.8555113673210144, "learning_rate": 9.734141325253813e-05, "loss": 1.0245, "step": 10959 }, { "epoch": 2.036796134547482, "grad_norm": 0.8811112642288208, "learning_rate": 9.732680910403926e-05, "loss": 0.9338, "step": 10960 }, { "epoch": 2.036981973610853, "grad_norm": 0.7802111506462097, "learning_rate": 9.731220501259501e-05, "loss": 1.1103, "step": 10961 }, { "epoch": 2.037167812674224, "grad_norm": 0.8129925727844238, "learning_rate": 9.729760097851716e-05, "loss": 0.9776, "step": 10962 }, { "epoch": 2.0373536517375954, "grad_norm": 0.7644342184066772, "learning_rate": 9.728299700211739e-05, "loss": 0.835, "step": 10963 }, { "epoch": 2.0375394908009663, "grad_norm": 0.7132039070129395, "learning_rate": 9.726839308370738e-05, "loss": 0.9298, "step": 10964 }, { "epoch": 2.0377253298643376, "grad_norm": 0.7377307415008545, "learning_rate": 9.725378922359884e-05, "loss": 0.9102, "step": 10965 }, { "epoch": 2.0379111689277085, "grad_norm": 0.852078914642334, "learning_rate": 9.723918542210343e-05, "loss": 1.0642, "step": 10966 }, { "epoch": 2.03809700799108, "grad_norm": 0.6840757727622986, "learning_rate": 9.72245816795329e-05, "loss": 0.8916, "step": 10967 }, { "epoch": 2.0382828470544507, "grad_norm": 0.6280407905578613, "learning_rate": 9.72099779961989e-05, "loss": 0.9485, "step": 10968 }, { "epoch": 2.038468686117822, "grad_norm": 0.7283921241760254, "learning_rate": 9.719537437241312e-05, "loss": 0.9284, "step": 10969 }, { "epoch": 2.038654525181193, "grad_norm": 0.8217094540596008, "learning_rate": 9.718077080848727e-05, "loss": 0.7122, "step": 10970 }, { "epoch": 2.0388403642445643, "grad_norm": 0.7073930501937866, "learning_rate": 9.716616730473304e-05, "loss": 0.7266, "step": 10971 }, { "epoch": 2.039026203307935, "grad_norm": 0.6238510608673096, "learning_rate": 9.715156386146206e-05, "loss": 0.7414, "step": 10972 }, { "epoch": 2.0392120423713065, "grad_norm": 0.7381787300109863, "learning_rate": 9.71369604789861e-05, "loss": 0.9392, "step": 10973 }, { "epoch": 2.0393978814346774, "grad_norm": 0.8915388584136963, "learning_rate": 9.712235715761679e-05, "loss": 0.8417, "step": 10974 }, { "epoch": 2.0395837204980487, "grad_norm": 0.7346033453941345, "learning_rate": 9.710775389766587e-05, "loss": 1.0207, "step": 10975 }, { "epoch": 2.03976955956142, "grad_norm": 0.9085640907287598, "learning_rate": 9.709315069944493e-05, "loss": 0.9353, "step": 10976 }, { "epoch": 2.039955398624791, "grad_norm": 0.7272212505340576, "learning_rate": 9.707854756326575e-05, "loss": 1.0059, "step": 10977 }, { "epoch": 2.0401412376881622, "grad_norm": 0.8539851307868958, "learning_rate": 9.706394448943991e-05, "loss": 0.9126, "step": 10978 }, { "epoch": 2.040327076751533, "grad_norm": 0.696914792060852, "learning_rate": 9.704934147827914e-05, "loss": 0.7602, "step": 10979 }, { "epoch": 2.0405129158149045, "grad_norm": 0.858230710029602, "learning_rate": 9.703473853009514e-05, "loss": 1.1857, "step": 10980 }, { "epoch": 2.0406987548782753, "grad_norm": 0.8481776118278503, "learning_rate": 9.702013564519954e-05, "loss": 0.7737, "step": 10981 }, { "epoch": 2.0408845939416467, "grad_norm": 0.7733168601989746, "learning_rate": 9.700553282390401e-05, "loss": 0.9721, "step": 10982 }, { "epoch": 2.0410704330050176, "grad_norm": 0.8131083250045776, "learning_rate": 9.699093006652028e-05, "loss": 0.813, "step": 10983 }, { "epoch": 2.041256272068389, "grad_norm": 0.8533946871757507, "learning_rate": 9.697632737335996e-05, "loss": 1.1391, "step": 10984 }, { "epoch": 2.0414421111317598, "grad_norm": 0.7576166391372681, "learning_rate": 9.696172474473479e-05, "loss": 0.7313, "step": 10985 }, { "epoch": 2.041627950195131, "grad_norm": 0.664013147354126, "learning_rate": 9.694712218095634e-05, "loss": 0.9421, "step": 10986 }, { "epoch": 2.041813789258502, "grad_norm": 0.7320838570594788, "learning_rate": 9.693251968233635e-05, "loss": 0.7804, "step": 10987 }, { "epoch": 2.0419996283218733, "grad_norm": 0.8353871703147888, "learning_rate": 9.691791724918646e-05, "loss": 0.9285, "step": 10988 }, { "epoch": 2.042185467385244, "grad_norm": 0.7641454935073853, "learning_rate": 9.690331488181834e-05, "loss": 0.9363, "step": 10989 }, { "epoch": 2.0423713064486155, "grad_norm": 0.8495318293571472, "learning_rate": 9.688871258054366e-05, "loss": 0.9949, "step": 10990 }, { "epoch": 2.0425571455119864, "grad_norm": 0.8309895396232605, "learning_rate": 9.687411034567404e-05, "loss": 0.7591, "step": 10991 }, { "epoch": 2.0427429845753577, "grad_norm": 0.7717214822769165, "learning_rate": 9.68595081775212e-05, "loss": 1.1372, "step": 10992 }, { "epoch": 2.042928823638729, "grad_norm": 0.756056010723114, "learning_rate": 9.684490607639673e-05, "loss": 0.8602, "step": 10993 }, { "epoch": 2.0431146627021, "grad_norm": 0.7627288699150085, "learning_rate": 9.683030404261234e-05, "loss": 1.0895, "step": 10994 }, { "epoch": 2.0433005017654713, "grad_norm": 0.7354733943939209, "learning_rate": 9.68157020764797e-05, "loss": 0.9717, "step": 10995 }, { "epoch": 2.043486340828842, "grad_norm": 0.8014447689056396, "learning_rate": 9.68011001783104e-05, "loss": 1.1493, "step": 10996 }, { "epoch": 2.0436721798922135, "grad_norm": 0.6164006590843201, "learning_rate": 9.678649834841617e-05, "loss": 0.8292, "step": 10997 }, { "epoch": 2.0438580189555844, "grad_norm": 0.9240174889564514, "learning_rate": 9.677189658710858e-05, "loss": 0.8912, "step": 10998 }, { "epoch": 2.0440438580189557, "grad_norm": 0.7147821187973022, "learning_rate": 9.675729489469933e-05, "loss": 0.7899, "step": 10999 }, { "epoch": 2.0442296970823266, "grad_norm": 0.8751394152641296, "learning_rate": 9.674269327150007e-05, "loss": 1.1711, "step": 11000 }, { "epoch": 2.044415536145698, "grad_norm": 0.8109802603721619, "learning_rate": 9.67280917178224e-05, "loss": 0.8241, "step": 11001 }, { "epoch": 2.044601375209069, "grad_norm": 0.7171081900596619, "learning_rate": 9.671349023397803e-05, "loss": 0.8985, "step": 11002 }, { "epoch": 2.04478721427244, "grad_norm": 0.6551255583763123, "learning_rate": 9.669888882027853e-05, "loss": 0.8839, "step": 11003 }, { "epoch": 2.044973053335811, "grad_norm": 0.8627475500106812, "learning_rate": 9.66842874770356e-05, "loss": 0.9054, "step": 11004 }, { "epoch": 2.0451588923991824, "grad_norm": 0.8003649115562439, "learning_rate": 9.666968620456088e-05, "loss": 0.8746, "step": 11005 }, { "epoch": 2.0453447314625532, "grad_norm": 0.8886460065841675, "learning_rate": 9.6655085003166e-05, "loss": 1.2039, "step": 11006 }, { "epoch": 2.0455305705259246, "grad_norm": 0.8739449381828308, "learning_rate": 9.664048387316258e-05, "loss": 0.93, "step": 11007 }, { "epoch": 2.0457164095892955, "grad_norm": 0.8335513472557068, "learning_rate": 9.662588281486226e-05, "loss": 0.8358, "step": 11008 }, { "epoch": 2.045902248652667, "grad_norm": 0.7394416928291321, "learning_rate": 9.661128182857667e-05, "loss": 0.8069, "step": 11009 }, { "epoch": 2.046088087716038, "grad_norm": 0.7699256539344788, "learning_rate": 9.659668091461749e-05, "loss": 1.0213, "step": 11010 }, { "epoch": 2.046273926779409, "grad_norm": 0.739697277545929, "learning_rate": 9.658208007329628e-05, "loss": 0.9363, "step": 11011 }, { "epoch": 2.0464597658427803, "grad_norm": 0.7034657597541809, "learning_rate": 9.656747930492474e-05, "loss": 0.9542, "step": 11012 }, { "epoch": 2.046645604906151, "grad_norm": 0.633341908454895, "learning_rate": 9.655287860981444e-05, "loss": 0.8322, "step": 11013 }, { "epoch": 2.0468314439695225, "grad_norm": 0.7756519913673401, "learning_rate": 9.653827798827699e-05, "loss": 0.9888, "step": 11014 }, { "epoch": 2.0470172830328934, "grad_norm": 0.7440750002861023, "learning_rate": 9.652367744062412e-05, "loss": 0.9726, "step": 11015 }, { "epoch": 2.0472031220962648, "grad_norm": 0.7013724446296692, "learning_rate": 9.650907696716736e-05, "loss": 1.0856, "step": 11016 }, { "epoch": 2.0473889611596356, "grad_norm": 0.8220937252044678, "learning_rate": 9.649447656821839e-05, "loss": 1.0899, "step": 11017 }, { "epoch": 2.047574800223007, "grad_norm": 0.7048841118812561, "learning_rate": 9.647987624408877e-05, "loss": 0.8282, "step": 11018 }, { "epoch": 2.047760639286378, "grad_norm": 0.7343522906303406, "learning_rate": 9.646527599509015e-05, "loss": 0.8971, "step": 11019 }, { "epoch": 2.047946478349749, "grad_norm": 0.6640952825546265, "learning_rate": 9.645067582153418e-05, "loss": 0.7383, "step": 11020 }, { "epoch": 2.04813231741312, "grad_norm": 0.864271879196167, "learning_rate": 9.643607572373242e-05, "loss": 0.8369, "step": 11021 }, { "epoch": 2.0483181564764914, "grad_norm": 1.0475118160247803, "learning_rate": 9.642147570199652e-05, "loss": 0.8565, "step": 11022 }, { "epoch": 2.0485039955398623, "grad_norm": 0.6887284517288208, "learning_rate": 9.640687575663808e-05, "loss": 0.8654, "step": 11023 }, { "epoch": 2.0486898346032336, "grad_norm": 0.7000967264175415, "learning_rate": 9.639227588796866e-05, "loss": 0.8847, "step": 11024 }, { "epoch": 2.0488756736666045, "grad_norm": 0.832517147064209, "learning_rate": 9.637767609629999e-05, "loss": 1.0639, "step": 11025 }, { "epoch": 2.049061512729976, "grad_norm": 0.7449747920036316, "learning_rate": 9.636307638194357e-05, "loss": 0.9122, "step": 11026 }, { "epoch": 2.049247351793347, "grad_norm": 0.6630158424377441, "learning_rate": 9.634847674521106e-05, "loss": 0.8657, "step": 11027 }, { "epoch": 2.049433190856718, "grad_norm": 0.8289523720741272, "learning_rate": 9.633387718641404e-05, "loss": 0.9851, "step": 11028 }, { "epoch": 2.0496190299200894, "grad_norm": 0.7368113994598389, "learning_rate": 9.631927770586412e-05, "loss": 0.9755, "step": 11029 }, { "epoch": 2.0498048689834603, "grad_norm": 0.7921997904777527, "learning_rate": 9.630467830387291e-05, "loss": 1.1219, "step": 11030 }, { "epoch": 2.0499907080468316, "grad_norm": 0.9077422618865967, "learning_rate": 9.6290078980752e-05, "loss": 1.1224, "step": 11031 }, { "epoch": 2.0501765471102025, "grad_norm": 0.7921608090400696, "learning_rate": 9.627547973681299e-05, "loss": 0.8099, "step": 11032 }, { "epoch": 2.050362386173574, "grad_norm": 0.6978192925453186, "learning_rate": 9.626088057236745e-05, "loss": 0.9829, "step": 11033 }, { "epoch": 2.0505482252369447, "grad_norm": 0.6289072632789612, "learning_rate": 9.624628148772703e-05, "loss": 0.6646, "step": 11034 }, { "epoch": 2.050734064300316, "grad_norm": 0.6852868795394897, "learning_rate": 9.623168248320325e-05, "loss": 0.7644, "step": 11035 }, { "epoch": 2.050919903363687, "grad_norm": Infinity, "learning_rate": 9.623168248320325e-05, "loss": 1.7865, "step": 11036 }, { "epoch": 2.0511057424270582, "grad_norm": 0.774402379989624, "learning_rate": 9.621708355910776e-05, "loss": 0.8294, "step": 11037 }, { "epoch": 2.051291581490429, "grad_norm": 0.7371591329574585, "learning_rate": 9.620248471575214e-05, "loss": 0.7436, "step": 11038 }, { "epoch": 2.0514774205538004, "grad_norm": 0.7850064039230347, "learning_rate": 9.618788595344795e-05, "loss": 0.8809, "step": 11039 }, { "epoch": 2.0516632596171713, "grad_norm": 0.865877628326416, "learning_rate": 9.61732872725068e-05, "loss": 0.78, "step": 11040 }, { "epoch": 2.0518490986805427, "grad_norm": 0.663486659526825, "learning_rate": 9.615868867324028e-05, "loss": 0.7357, "step": 11041 }, { "epoch": 2.0520349377439135, "grad_norm": 0.7983995676040649, "learning_rate": 9.614409015595995e-05, "loss": 0.9285, "step": 11042 }, { "epoch": 2.052220776807285, "grad_norm": 0.7850358486175537, "learning_rate": 9.61294917209774e-05, "loss": 0.8746, "step": 11043 }, { "epoch": 2.052406615870656, "grad_norm": 0.866909384727478, "learning_rate": 9.61148933686042e-05, "loss": 1.0918, "step": 11044 }, { "epoch": 2.052592454934027, "grad_norm": 0.8137707710266113, "learning_rate": 9.610029509915195e-05, "loss": 1.0047, "step": 11045 }, { "epoch": 2.0527782939973984, "grad_norm": 0.6748443245887756, "learning_rate": 9.608569691293216e-05, "loss": 0.7952, "step": 11046 }, { "epoch": 2.0529641330607693, "grad_norm": 0.8559642434120178, "learning_rate": 9.607109881025648e-05, "loss": 1.0124, "step": 11047 }, { "epoch": 2.0531499721241406, "grad_norm": 0.8332653641700745, "learning_rate": 9.605650079143648e-05, "loss": 0.9663, "step": 11048 }, { "epoch": 2.0533358111875115, "grad_norm": 0.7888073921203613, "learning_rate": 9.604190285678368e-05, "loss": 0.7784, "step": 11049 }, { "epoch": 2.053521650250883, "grad_norm": 0.7318856120109558, "learning_rate": 9.602730500660967e-05, "loss": 0.6779, "step": 11050 }, { "epoch": 2.0537074893142537, "grad_norm": 0.8940858840942383, "learning_rate": 9.601270724122603e-05, "loss": 0.6288, "step": 11051 }, { "epoch": 2.053893328377625, "grad_norm": 0.8630791306495667, "learning_rate": 9.59981095609443e-05, "loss": 0.9083, "step": 11052 }, { "epoch": 2.054079167440996, "grad_norm": 0.7749754190444946, "learning_rate": 9.598351196607607e-05, "loss": 0.9646, "step": 11053 }, { "epoch": 2.0542650065043673, "grad_norm": 0.8278696537017822, "learning_rate": 9.596891445693287e-05, "loss": 0.7448, "step": 11054 }, { "epoch": 2.054450845567738, "grad_norm": 0.7356969714164734, "learning_rate": 9.595431703382628e-05, "loss": 1.0189, "step": 11055 }, { "epoch": 2.0546366846311095, "grad_norm": 0.8219599723815918, "learning_rate": 9.593971969706786e-05, "loss": 0.8709, "step": 11056 }, { "epoch": 2.0548225236944804, "grad_norm": 0.9181432127952576, "learning_rate": 9.59251224469691e-05, "loss": 1.0932, "step": 11057 }, { "epoch": 2.0550083627578517, "grad_norm": 0.6893907189369202, "learning_rate": 9.591052528384167e-05, "loss": 0.9475, "step": 11058 }, { "epoch": 2.055194201821223, "grad_norm": 0.7080320715904236, "learning_rate": 9.589592820799704e-05, "loss": 0.8971, "step": 11059 }, { "epoch": 2.055380040884594, "grad_norm": 0.5905109643936157, "learning_rate": 9.588133121974676e-05, "loss": 0.3737, "step": 11060 }, { "epoch": 2.0555658799479652, "grad_norm": 0.7928087711334229, "learning_rate": 9.586673431940242e-05, "loss": 0.7819, "step": 11061 }, { "epoch": 2.055751719011336, "grad_norm": 0.9699975848197937, "learning_rate": 9.585213750727553e-05, "loss": 0.8992, "step": 11062 }, { "epoch": 2.0559375580747075, "grad_norm": 1.4009530544281006, "learning_rate": 9.583754078367768e-05, "loss": 1.2486, "step": 11063 }, { "epoch": 2.0561233971380783, "grad_norm": 0.7332651615142822, "learning_rate": 9.582294414892036e-05, "loss": 1.0257, "step": 11064 }, { "epoch": 2.0563092362014497, "grad_norm": 0.9999315142631531, "learning_rate": 9.580834760331514e-05, "loss": 0.7446, "step": 11065 }, { "epoch": 2.0564950752648206, "grad_norm": 0.8058012127876282, "learning_rate": 9.579375114717351e-05, "loss": 1.0821, "step": 11066 }, { "epoch": 2.056680914328192, "grad_norm": 0.8406280279159546, "learning_rate": 9.577915478080705e-05, "loss": 0.9408, "step": 11067 }, { "epoch": 2.0568667533915628, "grad_norm": 0.6476280093193054, "learning_rate": 9.576455850452731e-05, "loss": 0.5505, "step": 11068 }, { "epoch": 2.057052592454934, "grad_norm": 0.9520523548126221, "learning_rate": 9.57499623186458e-05, "loss": 0.98, "step": 11069 }, { "epoch": 2.057238431518305, "grad_norm": 0.7537355422973633, "learning_rate": 9.573536622347404e-05, "loss": 0.9191, "step": 11070 }, { "epoch": 2.0574242705816763, "grad_norm": 0.8616262078285217, "learning_rate": 9.572077021932358e-05, "loss": 0.8966, "step": 11071 }, { "epoch": 2.057610109645047, "grad_norm": 0.7534152865409851, "learning_rate": 9.570617430650592e-05, "loss": 0.7814, "step": 11072 }, { "epoch": 2.0577959487084185, "grad_norm": 0.6804052591323853, "learning_rate": 9.569157848533262e-05, "loss": 0.8825, "step": 11073 }, { "epoch": 2.0579817877717894, "grad_norm": 0.7233737707138062, "learning_rate": 9.567698275611517e-05, "loss": 0.9317, "step": 11074 }, { "epoch": 2.0581676268351607, "grad_norm": 0.7929511666297913, "learning_rate": 9.566238711916513e-05, "loss": 0.8986, "step": 11075 }, { "epoch": 2.058353465898532, "grad_norm": 0.7047044634819031, "learning_rate": 9.564779157479397e-05, "loss": 1.0629, "step": 11076 }, { "epoch": 2.058539304961903, "grad_norm": 0.8283855319023132, "learning_rate": 9.563319612331324e-05, "loss": 1.1195, "step": 11077 }, { "epoch": 2.0587251440252743, "grad_norm": 0.8268432021141052, "learning_rate": 9.561860076503442e-05, "loss": 0.8749, "step": 11078 }, { "epoch": 2.058910983088645, "grad_norm": 0.7072595953941345, "learning_rate": 9.560400550026905e-05, "loss": 0.9702, "step": 11079 }, { "epoch": 2.0590968221520165, "grad_norm": 0.9471309185028076, "learning_rate": 9.558941032932864e-05, "loss": 0.9403, "step": 11080 }, { "epoch": 2.0592826612153874, "grad_norm": 0.8507811427116394, "learning_rate": 9.557481525252474e-05, "loss": 0.8969, "step": 11081 }, { "epoch": 2.0594685002787587, "grad_norm": 0.8363974690437317, "learning_rate": 9.556022027016878e-05, "loss": 1.0464, "step": 11082 }, { "epoch": 2.0596543393421296, "grad_norm": 0.9051037430763245, "learning_rate": 9.554562538257231e-05, "loss": 0.7263, "step": 11083 }, { "epoch": 2.059840178405501, "grad_norm": 1.8966175317764282, "learning_rate": 9.553103059004682e-05, "loss": 1.4728, "step": 11084 }, { "epoch": 2.060026017468872, "grad_norm": 0.829832136631012, "learning_rate": 9.551643589290383e-05, "loss": 0.9924, "step": 11085 }, { "epoch": 2.060211856532243, "grad_norm": 0.6673998236656189, "learning_rate": 9.550184129145479e-05, "loss": 0.8441, "step": 11086 }, { "epoch": 2.060397695595614, "grad_norm": 0.7418094873428345, "learning_rate": 9.548724678601125e-05, "loss": 1.1444, "step": 11087 }, { "epoch": 2.0605835346589854, "grad_norm": 0.8789692521095276, "learning_rate": 9.547265237688465e-05, "loss": 1.012, "step": 11088 }, { "epoch": 2.0607693737223562, "grad_norm": 0.7394304871559143, "learning_rate": 9.545805806438653e-05, "loss": 0.8909, "step": 11089 }, { "epoch": 2.0609552127857276, "grad_norm": 0.9010868072509766, "learning_rate": 9.544346384882838e-05, "loss": 1.042, "step": 11090 }, { "epoch": 2.0611410518490985, "grad_norm": 0.7490463852882385, "learning_rate": 9.542886973052169e-05, "loss": 0.796, "step": 11091 }, { "epoch": 2.06132689091247, "grad_norm": 0.6933886408805847, "learning_rate": 9.54142757097779e-05, "loss": 0.6588, "step": 11092 }, { "epoch": 2.061512729975841, "grad_norm": 0.7842268347740173, "learning_rate": 9.539968178690856e-05, "loss": 0.8955, "step": 11093 }, { "epoch": 2.061698569039212, "grad_norm": 0.7727258801460266, "learning_rate": 9.538508796222508e-05, "loss": 0.8295, "step": 11094 }, { "epoch": 2.0618844081025833, "grad_norm": 0.7276997566223145, "learning_rate": 9.537049423603901e-05, "loss": 1.0724, "step": 11095 }, { "epoch": 2.062070247165954, "grad_norm": 0.6727124452590942, "learning_rate": 9.535590060866177e-05, "loss": 1.0115, "step": 11096 }, { "epoch": 2.0622560862293255, "grad_norm": 0.956548273563385, "learning_rate": 9.53413070804049e-05, "loss": 1.1568, "step": 11097 }, { "epoch": 2.0624419252926964, "grad_norm": 0.7817228436470032, "learning_rate": 9.53267136515798e-05, "loss": 0.8645, "step": 11098 }, { "epoch": 2.0626277643560678, "grad_norm": 0.6998949646949768, "learning_rate": 9.531212032249795e-05, "loss": 0.7166, "step": 11099 }, { "epoch": 2.0628136034194386, "grad_norm": 0.8000270128250122, "learning_rate": 9.529752709347087e-05, "loss": 0.7585, "step": 11100 }, { "epoch": 2.06299944248281, "grad_norm": 0.801515519618988, "learning_rate": 9.528293396481005e-05, "loss": 0.7732, "step": 11101 }, { "epoch": 2.063185281546181, "grad_norm": 0.68977290391922, "learning_rate": 9.526834093682685e-05, "loss": 0.7811, "step": 11102 }, { "epoch": 2.063371120609552, "grad_norm": 0.7006689310073853, "learning_rate": 9.525374800983284e-05, "loss": 0.8202, "step": 11103 }, { "epoch": 2.063556959672923, "grad_norm": 0.7314426302909851, "learning_rate": 9.523915518413941e-05, "loss": 0.9052, "step": 11104 }, { "epoch": 2.0637427987362944, "grad_norm": 0.7322511672973633, "learning_rate": 9.522456246005806e-05, "loss": 0.9001, "step": 11105 }, { "epoch": 2.0639286377996653, "grad_norm": 0.7833496332168579, "learning_rate": 9.52099698379002e-05, "loss": 0.8426, "step": 11106 }, { "epoch": 2.0641144768630366, "grad_norm": 0.888405978679657, "learning_rate": 9.519537731797736e-05, "loss": 0.9917, "step": 11107 }, { "epoch": 2.064300315926408, "grad_norm": 0.7309452295303345, "learning_rate": 9.518078490060089e-05, "loss": 0.7145, "step": 11108 }, { "epoch": 2.064486154989779, "grad_norm": 0.690657377243042, "learning_rate": 9.51661925860823e-05, "loss": 0.7821, "step": 11109 }, { "epoch": 2.06467199405315, "grad_norm": 0.7759823203086853, "learning_rate": 9.515160037473304e-05, "loss": 0.9312, "step": 11110 }, { "epoch": 2.064857833116521, "grad_norm": 0.9517083764076233, "learning_rate": 9.513700826686458e-05, "loss": 1.0287, "step": 11111 }, { "epoch": 2.0650436721798924, "grad_norm": 0.9148969054222107, "learning_rate": 9.512241626278832e-05, "loss": 0.9626, "step": 11112 }, { "epoch": 2.0652295112432633, "grad_norm": 1.4553234577178955, "learning_rate": 9.510782436281572e-05, "loss": 1.398, "step": 11113 }, { "epoch": 2.0654153503066346, "grad_norm": 0.7028954029083252, "learning_rate": 9.509323256725821e-05, "loss": 0.8219, "step": 11114 }, { "epoch": 2.0656011893700055, "grad_norm": 0.7765356302261353, "learning_rate": 9.507864087642724e-05, "loss": 0.9148, "step": 11115 }, { "epoch": 2.065787028433377, "grad_norm": 0.7603330612182617, "learning_rate": 9.506404929063422e-05, "loss": 1.0486, "step": 11116 }, { "epoch": 2.0659728674967477, "grad_norm": 0.8426289558410645, "learning_rate": 9.50494578101906e-05, "loss": 1.0035, "step": 11117 }, { "epoch": 2.066158706560119, "grad_norm": 0.6924977898597717, "learning_rate": 9.503486643540781e-05, "loss": 0.7859, "step": 11118 }, { "epoch": 2.06634454562349, "grad_norm": 0.7882113456726074, "learning_rate": 9.502027516659728e-05, "loss": 0.8989, "step": 11119 }, { "epoch": 2.0665303846868612, "grad_norm": 0.6670895218849182, "learning_rate": 9.500568400407037e-05, "loss": 0.8255, "step": 11120 }, { "epoch": 2.066716223750232, "grad_norm": 0.7035497426986694, "learning_rate": 9.499109294813863e-05, "loss": 0.7877, "step": 11121 }, { "epoch": 2.0669020628136034, "grad_norm": 0.7977902889251709, "learning_rate": 9.497650199911341e-05, "loss": 0.9922, "step": 11122 }, { "epoch": 2.0670879018769743, "grad_norm": 0.6651736497879028, "learning_rate": 9.496191115730614e-05, "loss": 0.8098, "step": 11123 }, { "epoch": 2.0672737409403457, "grad_norm": 0.9503287076950073, "learning_rate": 9.494732042302821e-05, "loss": 0.91, "step": 11124 }, { "epoch": 2.067459580003717, "grad_norm": 0.6866540908813477, "learning_rate": 9.493272979659108e-05, "loss": 0.5869, "step": 11125 }, { "epoch": 2.067645419067088, "grad_norm": 0.8645631670951843, "learning_rate": 9.491813927830611e-05, "loss": 0.8996, "step": 11126 }, { "epoch": 2.067831258130459, "grad_norm": 0.7809982895851135, "learning_rate": 9.490354886848476e-05, "loss": 0.8874, "step": 11127 }, { "epoch": 2.06801709719383, "grad_norm": 0.7561979293823242, "learning_rate": 9.488895856743839e-05, "loss": 0.8648, "step": 11128 }, { "epoch": 2.0682029362572014, "grad_norm": 0.8173332810401917, "learning_rate": 9.487436837547846e-05, "loss": 1.2458, "step": 11129 }, { "epoch": 2.0683887753205723, "grad_norm": 0.8098888397216797, "learning_rate": 9.485977829291627e-05, "loss": 0.9267, "step": 11130 }, { "epoch": 2.0685746143839436, "grad_norm": 0.8903951048851013, "learning_rate": 9.484518832006335e-05, "loss": 0.8468, "step": 11131 }, { "epoch": 2.0687604534473145, "grad_norm": 0.7828354835510254, "learning_rate": 9.483059845723102e-05, "loss": 0.8767, "step": 11132 }, { "epoch": 2.068946292510686, "grad_norm": 0.7997280955314636, "learning_rate": 9.481600870473072e-05, "loss": 1.0065, "step": 11133 }, { "epoch": 2.0691321315740567, "grad_norm": 0.8885520100593567, "learning_rate": 9.48014190628738e-05, "loss": 1.0292, "step": 11134 }, { "epoch": 2.069317970637428, "grad_norm": 0.6353083848953247, "learning_rate": 9.478682953197169e-05, "loss": 0.7653, "step": 11135 }, { "epoch": 2.069503809700799, "grad_norm": 0.7411783933639526, "learning_rate": 9.477224011233573e-05, "loss": 0.7281, "step": 11136 }, { "epoch": 2.0696896487641703, "grad_norm": 0.6759365200996399, "learning_rate": 9.475765080427736e-05, "loss": 0.8458, "step": 11137 }, { "epoch": 2.069875487827541, "grad_norm": 0.7651349306106567, "learning_rate": 9.474306160810791e-05, "loss": 0.9969, "step": 11138 }, { "epoch": 2.0700613268909125, "grad_norm": 0.6856313347816467, "learning_rate": 9.472847252413881e-05, "loss": 0.8236, "step": 11139 }, { "epoch": 2.0702471659542834, "grad_norm": 0.7677355408668518, "learning_rate": 9.47138835526814e-05, "loss": 0.888, "step": 11140 }, { "epoch": 2.0704330050176547, "grad_norm": 0.9559133052825928, "learning_rate": 9.469929469404706e-05, "loss": 0.9105, "step": 11141 }, { "epoch": 2.070618844081026, "grad_norm": 0.7924931645393372, "learning_rate": 9.468470594854718e-05, "loss": 1.0257, "step": 11142 }, { "epoch": 2.070804683144397, "grad_norm": 0.6944732666015625, "learning_rate": 9.467011731649316e-05, "loss": 0.8962, "step": 11143 }, { "epoch": 2.0709905222077682, "grad_norm": 0.9529390335083008, "learning_rate": 9.465552879819633e-05, "loss": 0.8416, "step": 11144 }, { "epoch": 2.071176361271139, "grad_norm": 0.760055422782898, "learning_rate": 9.464094039396807e-05, "loss": 0.8923, "step": 11145 }, { "epoch": 2.0713622003345105, "grad_norm": 0.7828572988510132, "learning_rate": 9.462635210411974e-05, "loss": 0.9872, "step": 11146 }, { "epoch": 2.0715480393978813, "grad_norm": 0.746248185634613, "learning_rate": 9.461176392896271e-05, "loss": 0.9141, "step": 11147 }, { "epoch": 2.0717338784612527, "grad_norm": 0.8269466161727905, "learning_rate": 9.459717586880832e-05, "loss": 0.78, "step": 11148 }, { "epoch": 2.0719197175246236, "grad_norm": 0.7321226596832275, "learning_rate": 9.458258792396796e-05, "loss": 1.0112, "step": 11149 }, { "epoch": 2.072105556587995, "grad_norm": 0.762174129486084, "learning_rate": 9.456800009475293e-05, "loss": 1.0043, "step": 11150 }, { "epoch": 2.0722913956513658, "grad_norm": 0.737138569355011, "learning_rate": 9.45534123814746e-05, "loss": 0.7323, "step": 11151 }, { "epoch": 2.072477234714737, "grad_norm": 0.8031544089317322, "learning_rate": 9.453882478444436e-05, "loss": 1.0911, "step": 11152 }, { "epoch": 2.072663073778108, "grad_norm": 0.9331284761428833, "learning_rate": 9.452423730397357e-05, "loss": 1.1431, "step": 11153 }, { "epoch": 2.0728489128414793, "grad_norm": 0.6606505513191223, "learning_rate": 9.450964994037349e-05, "loss": 0.8519, "step": 11154 }, { "epoch": 2.07303475190485, "grad_norm": 0.7464000582695007, "learning_rate": 9.449506269395553e-05, "loss": 0.7497, "step": 11155 }, { "epoch": 2.0732205909682215, "grad_norm": 0.7550594806671143, "learning_rate": 9.448047556503102e-05, "loss": 0.9044, "step": 11156 }, { "epoch": 2.073406430031593, "grad_norm": 0.7202317118644714, "learning_rate": 9.446588855391129e-05, "loss": 0.6572, "step": 11157 }, { "epoch": 2.0735922690949637, "grad_norm": 0.751534104347229, "learning_rate": 9.445130166090765e-05, "loss": 1.0057, "step": 11158 }, { "epoch": 2.073778108158335, "grad_norm": 0.7617736458778381, "learning_rate": 9.443671488633146e-05, "loss": 0.8516, "step": 11159 }, { "epoch": 2.073963947221706, "grad_norm": 0.8996736407279968, "learning_rate": 9.442212823049404e-05, "loss": 0.8691, "step": 11160 }, { "epoch": 2.0741497862850773, "grad_norm": 1.1209017038345337, "learning_rate": 9.440754169370673e-05, "loss": 1.1305, "step": 11161 }, { "epoch": 2.074335625348448, "grad_norm": 0.9391574859619141, "learning_rate": 9.439295527628081e-05, "loss": 1.184, "step": 11162 }, { "epoch": 2.0745214644118195, "grad_norm": 0.8048052787780762, "learning_rate": 9.437836897852769e-05, "loss": 0.7383, "step": 11163 }, { "epoch": 2.0747073034751904, "grad_norm": 0.7512253522872925, "learning_rate": 9.43637828007586e-05, "loss": 0.8006, "step": 11164 }, { "epoch": 2.0748931425385617, "grad_norm": 0.7666751742362976, "learning_rate": 9.434919674328493e-05, "loss": 0.6582, "step": 11165 }, { "epoch": 2.0750789816019326, "grad_norm": 0.7968409061431885, "learning_rate": 9.433461080641793e-05, "loss": 0.8598, "step": 11166 }, { "epoch": 2.075264820665304, "grad_norm": 0.8389952182769775, "learning_rate": 9.432002499046897e-05, "loss": 0.7596, "step": 11167 }, { "epoch": 2.075450659728675, "grad_norm": 0.8382617235183716, "learning_rate": 9.43054392957493e-05, "loss": 0.9456, "step": 11168 }, { "epoch": 2.075636498792046, "grad_norm": 0.7901649475097656, "learning_rate": 9.429085372257027e-05, "loss": 0.7259, "step": 11169 }, { "epoch": 2.075822337855417, "grad_norm": 0.6912295818328857, "learning_rate": 9.427626827124317e-05, "loss": 0.742, "step": 11170 }, { "epoch": 2.0760081769187884, "grad_norm": 0.7385955452919006, "learning_rate": 9.426168294207928e-05, "loss": 0.7213, "step": 11171 }, { "epoch": 2.0761940159821592, "grad_norm": 0.7250804901123047, "learning_rate": 9.42470977353899e-05, "loss": 0.8584, "step": 11172 }, { "epoch": 2.0763798550455306, "grad_norm": 0.8665145635604858, "learning_rate": 9.423251265148641e-05, "loss": 0.972, "step": 11173 }, { "epoch": 2.076565694108902, "grad_norm": 0.9986973404884338, "learning_rate": 9.421792769068e-05, "loss": 1.2396, "step": 11174 }, { "epoch": 2.076751533172273, "grad_norm": 0.737506628036499, "learning_rate": 9.420334285328203e-05, "loss": 1.1461, "step": 11175 }, { "epoch": 2.076937372235644, "grad_norm": 0.8702325224876404, "learning_rate": 9.418875813960372e-05, "loss": 0.9769, "step": 11176 }, { "epoch": 2.077123211299015, "grad_norm": 1.0117233991622925, "learning_rate": 9.417417354995645e-05, "loss": 1.0833, "step": 11177 }, { "epoch": 2.0773090503623863, "grad_norm": 0.8385712504386902, "learning_rate": 9.41595890846514e-05, "loss": 0.8416, "step": 11178 }, { "epoch": 2.077494889425757, "grad_norm": 0.8949857950210571, "learning_rate": 9.414500474399993e-05, "loss": 0.8885, "step": 11179 }, { "epoch": 2.0776807284891285, "grad_norm": 0.7184500098228455, "learning_rate": 9.413042052831327e-05, "loss": 0.946, "step": 11180 }, { "epoch": 2.0778665675524994, "grad_norm": 0.6816924214363098, "learning_rate": 9.41158364379027e-05, "loss": 0.7566, "step": 11181 }, { "epoch": 2.0780524066158708, "grad_norm": 0.7060255408287048, "learning_rate": 9.410125247307952e-05, "loss": 0.6378, "step": 11182 }, { "epoch": 2.0782382456792416, "grad_norm": 0.7339649796485901, "learning_rate": 9.408666863415497e-05, "loss": 1.0362, "step": 11183 }, { "epoch": 2.078424084742613, "grad_norm": 0.815864622592926, "learning_rate": 9.407208492144031e-05, "loss": 0.9541, "step": 11184 }, { "epoch": 2.078609923805984, "grad_norm": 0.746654212474823, "learning_rate": 9.405750133524687e-05, "loss": 0.9796, "step": 11185 }, { "epoch": 2.078795762869355, "grad_norm": 0.7695572972297668, "learning_rate": 9.404291787588584e-05, "loss": 0.8375, "step": 11186 }, { "epoch": 2.078981601932726, "grad_norm": 0.7486796379089355, "learning_rate": 9.402833454366854e-05, "loss": 0.5969, "step": 11187 }, { "epoch": 2.0791674409960974, "grad_norm": 0.6751930713653564, "learning_rate": 9.401375133890615e-05, "loss": 0.9364, "step": 11188 }, { "epoch": 2.0793532800594683, "grad_norm": 0.9874372482299805, "learning_rate": 9.399916826191001e-05, "loss": 1.09, "step": 11189 }, { "epoch": 2.0795391191228396, "grad_norm": 0.7594035267829895, "learning_rate": 9.398458531299128e-05, "loss": 1.1015, "step": 11190 }, { "epoch": 2.079724958186211, "grad_norm": 6.832021713256836, "learning_rate": 9.397000249246126e-05, "loss": 1.9706, "step": 11191 }, { "epoch": 2.079910797249582, "grad_norm": 0.893206775188446, "learning_rate": 9.395541980063122e-05, "loss": 1.2385, "step": 11192 }, { "epoch": 2.080096636312953, "grad_norm": 0.7244065999984741, "learning_rate": 9.394083723781232e-05, "loss": 0.8494, "step": 11193 }, { "epoch": 2.080282475376324, "grad_norm": 0.8250991702079773, "learning_rate": 9.392625480431587e-05, "loss": 1.0322, "step": 11194 }, { "epoch": 2.0804683144396954, "grad_norm": 0.64857017993927, "learning_rate": 9.391167250045311e-05, "loss": 0.828, "step": 11195 }, { "epoch": 2.0806541535030663, "grad_norm": 0.7038189172744751, "learning_rate": 9.389709032653526e-05, "loss": 0.9225, "step": 11196 }, { "epoch": 2.0808399925664376, "grad_norm": 0.7618677020072937, "learning_rate": 9.388250828287353e-05, "loss": 0.9054, "step": 11197 }, { "epoch": 2.0810258316298085, "grad_norm": 0.9729681015014648, "learning_rate": 9.386792636977915e-05, "loss": 0.9982, "step": 11198 }, { "epoch": 2.08121167069318, "grad_norm": 0.7468367218971252, "learning_rate": 9.385334458756338e-05, "loss": 0.7856, "step": 11199 }, { "epoch": 2.0813975097565507, "grad_norm": 0.9042201638221741, "learning_rate": 9.383876293653741e-05, "loss": 0.7959, "step": 11200 }, { "epoch": 2.081583348819922, "grad_norm": 0.8591930270195007, "learning_rate": 9.382418141701246e-05, "loss": 0.9576, "step": 11201 }, { "epoch": 2.081769187883293, "grad_norm": 0.8453245162963867, "learning_rate": 9.380960002929979e-05, "loss": 1.1545, "step": 11202 }, { "epoch": 2.0819550269466642, "grad_norm": 0.9259535670280457, "learning_rate": 9.379501877371055e-05, "loss": 0.8565, "step": 11203 }, { "epoch": 2.082140866010035, "grad_norm": 0.7732197046279907, "learning_rate": 9.378043765055597e-05, "loss": 0.6425, "step": 11204 }, { "epoch": 2.0823267050734064, "grad_norm": 0.7024487257003784, "learning_rate": 9.376585666014733e-05, "loss": 0.8923, "step": 11205 }, { "epoch": 2.0825125441367778, "grad_norm": 0.739058256149292, "learning_rate": 9.375127580279574e-05, "loss": 0.8377, "step": 11206 }, { "epoch": 2.0826983832001487, "grad_norm": 0.7641466856002808, "learning_rate": 9.373669507881249e-05, "loss": 0.953, "step": 11207 }, { "epoch": 2.08288422226352, "grad_norm": 0.6840345859527588, "learning_rate": 9.372211448850869e-05, "loss": 1.0087, "step": 11208 }, { "epoch": 2.083070061326891, "grad_norm": 0.8499522805213928, "learning_rate": 9.370753403219562e-05, "loss": 0.9861, "step": 11209 }, { "epoch": 2.083255900390262, "grad_norm": 0.8159723281860352, "learning_rate": 9.369295371018442e-05, "loss": 1.0673, "step": 11210 }, { "epoch": 2.083441739453633, "grad_norm": 0.6602420806884766, "learning_rate": 9.367837352278629e-05, "loss": 0.703, "step": 11211 }, { "epoch": 2.0836275785170044, "grad_norm": 0.8844058513641357, "learning_rate": 9.366379347031245e-05, "loss": 0.9038, "step": 11212 }, { "epoch": 2.0838134175803753, "grad_norm": 0.7467980980873108, "learning_rate": 9.364921355307404e-05, "loss": 0.6782, "step": 11213 }, { "epoch": 2.0839992566437466, "grad_norm": 0.6584466099739075, "learning_rate": 9.363463377138225e-05, "loss": 0.8225, "step": 11214 }, { "epoch": 2.0841850957071175, "grad_norm": 0.7740834951400757, "learning_rate": 9.362005412554832e-05, "loss": 0.8503, "step": 11215 }, { "epoch": 2.084370934770489, "grad_norm": 0.7651553153991699, "learning_rate": 9.360547461588339e-05, "loss": 1.0257, "step": 11216 }, { "epoch": 2.0845567738338597, "grad_norm": 0.8948690295219421, "learning_rate": 9.359089524269862e-05, "loss": 1.1734, "step": 11217 }, { "epoch": 2.084742612897231, "grad_norm": 0.8161410689353943, "learning_rate": 9.357631600630518e-05, "loss": 1.2433, "step": 11218 }, { "epoch": 2.084928451960602, "grad_norm": 0.7995330095291138, "learning_rate": 9.356173690701427e-05, "loss": 1.157, "step": 11219 }, { "epoch": 2.0851142910239733, "grad_norm": 0.7776280045509338, "learning_rate": 9.354715794513702e-05, "loss": 0.8992, "step": 11220 }, { "epoch": 2.085300130087344, "grad_norm": 0.8435629606246948, "learning_rate": 9.353257912098462e-05, "loss": 0.7887, "step": 11221 }, { "epoch": 2.0854859691507155, "grad_norm": 0.7178513407707214, "learning_rate": 9.351800043486823e-05, "loss": 0.9152, "step": 11222 }, { "epoch": 2.0856718082140864, "grad_norm": 0.8578248620033264, "learning_rate": 9.350342188709898e-05, "loss": 0.9122, "step": 11223 }, { "epoch": 2.0858576472774577, "grad_norm": 0.7572755217552185, "learning_rate": 9.348884347798805e-05, "loss": 0.8292, "step": 11224 }, { "epoch": 2.086043486340829, "grad_norm": 0.8700441718101501, "learning_rate": 9.347426520784656e-05, "loss": 1.1506, "step": 11225 }, { "epoch": 2.0862293254042, "grad_norm": 0.8453944325447083, "learning_rate": 9.345968707698569e-05, "loss": 1.0707, "step": 11226 }, { "epoch": 2.0864151644675712, "grad_norm": 0.7037955522537231, "learning_rate": 9.34451090857166e-05, "loss": 0.8334, "step": 11227 }, { "epoch": 2.086601003530942, "grad_norm": 0.8542226552963257, "learning_rate": 9.343053123435038e-05, "loss": 0.7795, "step": 11228 }, { "epoch": 2.0867868425943135, "grad_norm": 0.895807147026062, "learning_rate": 9.341595352319822e-05, "loss": 0.7925, "step": 11229 }, { "epoch": 2.0869726816576843, "grad_norm": 0.8979935050010681, "learning_rate": 9.340137595257122e-05, "loss": 1.0552, "step": 11230 }, { "epoch": 2.0871585207210557, "grad_norm": 0.7536588311195374, "learning_rate": 9.338679852278052e-05, "loss": 0.9038, "step": 11231 }, { "epoch": 2.0873443597844266, "grad_norm": 0.7497516870498657, "learning_rate": 9.337222123413729e-05, "loss": 0.8808, "step": 11232 }, { "epoch": 2.087530198847798, "grad_norm": 0.810566782951355, "learning_rate": 9.335764408695259e-05, "loss": 0.9248, "step": 11233 }, { "epoch": 2.0877160379111688, "grad_norm": 0.7648537158966064, "learning_rate": 9.334306708153761e-05, "loss": 0.984, "step": 11234 }, { "epoch": 2.08790187697454, "grad_norm": 0.9524139165878296, "learning_rate": 9.332849021820339e-05, "loss": 0.8884, "step": 11235 }, { "epoch": 2.088087716037911, "grad_norm": 0.6822572350502014, "learning_rate": 9.331391349726112e-05, "loss": 0.869, "step": 11236 }, { "epoch": 2.0882735551012823, "grad_norm": 0.8357948064804077, "learning_rate": 9.329933691902191e-05, "loss": 1.0129, "step": 11237 }, { "epoch": 2.088459394164653, "grad_norm": 0.8944813013076782, "learning_rate": 9.328476048379684e-05, "loss": 0.9413, "step": 11238 }, { "epoch": 2.0886452332280245, "grad_norm": 0.9181443452835083, "learning_rate": 9.327018419189705e-05, "loss": 1.0597, "step": 11239 }, { "epoch": 2.088831072291396, "grad_norm": 0.8279556632041931, "learning_rate": 9.32556080436336e-05, "loss": 0.8327, "step": 11240 }, { "epoch": 2.0890169113547667, "grad_norm": 0.7568429708480835, "learning_rate": 9.324103203931763e-05, "loss": 0.7845, "step": 11241 }, { "epoch": 2.089202750418138, "grad_norm": 0.8147261738777161, "learning_rate": 9.322645617926026e-05, "loss": 0.8945, "step": 11242 }, { "epoch": 2.089388589481509, "grad_norm": 0.7661022543907166, "learning_rate": 9.321188046377254e-05, "loss": 0.8606, "step": 11243 }, { "epoch": 2.0895744285448803, "grad_norm": 0.7411051392555237, "learning_rate": 9.319730489316559e-05, "loss": 0.7371, "step": 11244 }, { "epoch": 2.089760267608251, "grad_norm": 0.9050106406211853, "learning_rate": 9.318272946775049e-05, "loss": 0.9799, "step": 11245 }, { "epoch": 2.0899461066716225, "grad_norm": 0.7047238349914551, "learning_rate": 9.316815418783829e-05, "loss": 0.9825, "step": 11246 }, { "epoch": 2.0901319457349934, "grad_norm": 0.8626230955123901, "learning_rate": 9.315357905374017e-05, "loss": 1.0136, "step": 11247 }, { "epoch": 2.0903177847983647, "grad_norm": 0.7386853098869324, "learning_rate": 9.313900406576712e-05, "loss": 0.8804, "step": 11248 }, { "epoch": 2.0905036238617356, "grad_norm": 0.7710316777229309, "learning_rate": 9.31244292242303e-05, "loss": 0.7352, "step": 11249 }, { "epoch": 2.090689462925107, "grad_norm": 0.7478527426719666, "learning_rate": 9.31098545294407e-05, "loss": 0.9791, "step": 11250 }, { "epoch": 2.090875301988478, "grad_norm": 0.7317497730255127, "learning_rate": 9.309527998170944e-05, "loss": 0.935, "step": 11251 }, { "epoch": 2.091061141051849, "grad_norm": 0.8391066789627075, "learning_rate": 9.308070558134759e-05, "loss": 0.9382, "step": 11252 }, { "epoch": 2.09124698011522, "grad_norm": 0.7186779975891113, "learning_rate": 9.30661313286662e-05, "loss": 0.9443, "step": 11253 }, { "epoch": 2.0914328191785914, "grad_norm": 0.6773986220359802, "learning_rate": 9.305155722397634e-05, "loss": 0.8425, "step": 11254 }, { "epoch": 2.0916186582419622, "grad_norm": 0.7712244391441345, "learning_rate": 9.303698326758905e-05, "loss": 1.0549, "step": 11255 }, { "epoch": 2.0918044973053336, "grad_norm": 0.7316671013832092, "learning_rate": 9.30224094598154e-05, "loss": 0.9495, "step": 11256 }, { "epoch": 2.091990336368705, "grad_norm": 0.7815166115760803, "learning_rate": 9.300783580096647e-05, "loss": 1.0787, "step": 11257 }, { "epoch": 2.092176175432076, "grad_norm": 0.7134209275245667, "learning_rate": 9.299326229135326e-05, "loss": 0.7159, "step": 11258 }, { "epoch": 2.092362014495447, "grad_norm": 0.8529147505760193, "learning_rate": 9.297868893128689e-05, "loss": 0.8936, "step": 11259 }, { "epoch": 2.092547853558818, "grad_norm": 0.8664206266403198, "learning_rate": 9.296411572107831e-05, "loss": 0.953, "step": 11260 }, { "epoch": 2.0927336926221893, "grad_norm": 0.7702050805091858, "learning_rate": 9.294954266103862e-05, "loss": 0.8955, "step": 11261 }, { "epoch": 2.09291953168556, "grad_norm": 0.7006514072418213, "learning_rate": 9.293496975147885e-05, "loss": 0.8528, "step": 11262 }, { "epoch": 2.0931053707489315, "grad_norm": 0.7274358868598938, "learning_rate": 9.292039699271003e-05, "loss": 0.8714, "step": 11263 }, { "epoch": 2.0932912098123024, "grad_norm": 0.717570960521698, "learning_rate": 9.290582438504319e-05, "loss": 1.0414, "step": 11264 }, { "epoch": 2.0934770488756738, "grad_norm": 0.7226032018661499, "learning_rate": 9.289125192878933e-05, "loss": 0.9059, "step": 11265 }, { "epoch": 2.0936628879390446, "grad_norm": 0.7209280729293823, "learning_rate": 9.287667962425952e-05, "loss": 0.9195, "step": 11266 }, { "epoch": 2.093848727002416, "grad_norm": 0.7089038491249084, "learning_rate": 9.286210747176473e-05, "loss": 0.8997, "step": 11267 }, { "epoch": 2.094034566065787, "grad_norm": 0.8167850971221924, "learning_rate": 9.284753547161601e-05, "loss": 0.9131, "step": 11268 }, { "epoch": 2.094220405129158, "grad_norm": 0.6800757050514221, "learning_rate": 9.28329636241244e-05, "loss": 0.7618, "step": 11269 }, { "epoch": 2.094406244192529, "grad_norm": 0.8122108578681946, "learning_rate": 9.281839192960087e-05, "loss": 0.791, "step": 11270 }, { "epoch": 2.0945920832559004, "grad_norm": 0.7946428656578064, "learning_rate": 9.280382038835644e-05, "loss": 0.8722, "step": 11271 }, { "epoch": 2.0947779223192713, "grad_norm": 0.8282971382141113, "learning_rate": 9.278924900070212e-05, "loss": 0.959, "step": 11272 }, { "epoch": 2.0949637613826426, "grad_norm": 0.7812013030052185, "learning_rate": 9.277467776694889e-05, "loss": 0.9049, "step": 11273 }, { "epoch": 2.095149600446014, "grad_norm": 0.7257639169692993, "learning_rate": 9.27601066874078e-05, "loss": 0.9538, "step": 11274 }, { "epoch": 2.095335439509385, "grad_norm": 0.7436121702194214, "learning_rate": 9.274553576238977e-05, "loss": 1.0246, "step": 11275 }, { "epoch": 2.095521278572756, "grad_norm": 0.7253244519233704, "learning_rate": 9.273096499220585e-05, "loss": 0.7533, "step": 11276 }, { "epoch": 2.095707117636127, "grad_norm": 1.0669963359832764, "learning_rate": 9.271639437716698e-05, "loss": 1.0058, "step": 11277 }, { "epoch": 2.0958929566994984, "grad_norm": 0.8048694729804993, "learning_rate": 9.270182391758419e-05, "loss": 0.958, "step": 11278 }, { "epoch": 2.0960787957628693, "grad_norm": 0.7488383054733276, "learning_rate": 9.268725361376847e-05, "loss": 0.835, "step": 11279 }, { "epoch": 2.0962646348262406, "grad_norm": 0.8129644393920898, "learning_rate": 9.267268346603074e-05, "loss": 0.8263, "step": 11280 }, { "epoch": 2.0964504738896115, "grad_norm": 0.8920313119888306, "learning_rate": 9.265811347468201e-05, "loss": 1.0749, "step": 11281 }, { "epoch": 2.096636312952983, "grad_norm": 0.7410893440246582, "learning_rate": 9.264354364003327e-05, "loss": 0.8763, "step": 11282 }, { "epoch": 2.0968221520163537, "grad_norm": 0.7437509298324585, "learning_rate": 9.262897396239544e-05, "loss": 1.0144, "step": 11283 }, { "epoch": 2.097007991079725, "grad_norm": 0.8066478967666626, "learning_rate": 9.261440444207955e-05, "loss": 0.9315, "step": 11284 }, { "epoch": 2.097193830143096, "grad_norm": 0.8381285667419434, "learning_rate": 9.259983507939649e-05, "loss": 0.9115, "step": 11285 }, { "epoch": 2.0973796692064672, "grad_norm": 0.8904730081558228, "learning_rate": 9.258526587465727e-05, "loss": 0.8368, "step": 11286 }, { "epoch": 2.097565508269838, "grad_norm": 0.895017683506012, "learning_rate": 9.25706968281728e-05, "loss": 1.0122, "step": 11287 }, { "epoch": 2.0977513473332094, "grad_norm": 0.8561277389526367, "learning_rate": 9.255612794025405e-05, "loss": 0.8293, "step": 11288 }, { "epoch": 2.0979371863965808, "grad_norm": 0.9393472671508789, "learning_rate": 9.254155921121201e-05, "loss": 0.7817, "step": 11289 }, { "epoch": 2.0981230254599517, "grad_norm": 0.7481895089149475, "learning_rate": 9.252699064135758e-05, "loss": 0.9282, "step": 11290 }, { "epoch": 2.098308864523323, "grad_norm": 0.7104239463806152, "learning_rate": 9.251242223100169e-05, "loss": 0.8032, "step": 11291 }, { "epoch": 2.098494703586694, "grad_norm": 0.8819231986999512, "learning_rate": 9.249785398045534e-05, "loss": 0.9462, "step": 11292 }, { "epoch": 2.098680542650065, "grad_norm": 0.7585166096687317, "learning_rate": 9.24832858900294e-05, "loss": 0.8988, "step": 11293 }, { "epoch": 2.098866381713436, "grad_norm": 0.7590588927268982, "learning_rate": 9.246871796003482e-05, "loss": 1.0465, "step": 11294 }, { "epoch": 2.0990522207768074, "grad_norm": 0.6837874054908752, "learning_rate": 9.245415019078253e-05, "loss": 0.8946, "step": 11295 }, { "epoch": 2.0992380598401783, "grad_norm": 0.9009872674942017, "learning_rate": 9.243958258258347e-05, "loss": 1.042, "step": 11296 }, { "epoch": 2.0994238989035496, "grad_norm": 0.7737190127372742, "learning_rate": 9.242501513574853e-05, "loss": 0.8884, "step": 11297 }, { "epoch": 2.0996097379669205, "grad_norm": 0.9149159789085388, "learning_rate": 9.241044785058862e-05, "loss": 1.0078, "step": 11298 }, { "epoch": 2.099795577030292, "grad_norm": 0.7528471350669861, "learning_rate": 9.239588072741472e-05, "loss": 1.0133, "step": 11299 }, { "epoch": 2.0999814160936627, "grad_norm": 0.9609968066215515, "learning_rate": 9.238131376653766e-05, "loss": 1.2004, "step": 11300 }, { "epoch": 2.100167255157034, "grad_norm": 0.9802888035774231, "learning_rate": 9.23667469682684e-05, "loss": 0.8518, "step": 11301 }, { "epoch": 2.100353094220405, "grad_norm": 0.7507895231246948, "learning_rate": 9.235218033291784e-05, "loss": 0.9317, "step": 11302 }, { "epoch": 2.1005389332837763, "grad_norm": 0.764111340045929, "learning_rate": 9.233761386079685e-05, "loss": 0.9227, "step": 11303 }, { "epoch": 2.100724772347147, "grad_norm": 0.5499566793441772, "learning_rate": 9.232304755221637e-05, "loss": 0.4425, "step": 11304 }, { "epoch": 2.1009106114105185, "grad_norm": 0.6732088923454285, "learning_rate": 9.230848140748724e-05, "loss": 0.8442, "step": 11305 }, { "epoch": 2.10109645047389, "grad_norm": 0.8917862772941589, "learning_rate": 9.229391542692039e-05, "loss": 1.037, "step": 11306 }, { "epoch": 2.1012822895372607, "grad_norm": 0.7546885013580322, "learning_rate": 9.227934961082667e-05, "loss": 1.1376, "step": 11307 }, { "epoch": 2.101468128600632, "grad_norm": 0.9907255172729492, "learning_rate": 9.226478395951701e-05, "loss": 0.9869, "step": 11308 }, { "epoch": 2.101653967664003, "grad_norm": 0.7944941520690918, "learning_rate": 9.22502184733022e-05, "loss": 1.002, "step": 11309 }, { "epoch": 2.1018398067273742, "grad_norm": 0.8978254795074463, "learning_rate": 9.223565315249325e-05, "loss": 0.9644, "step": 11310 }, { "epoch": 2.102025645790745, "grad_norm": 0.7816819548606873, "learning_rate": 9.222108799740091e-05, "loss": 0.8697, "step": 11311 }, { "epoch": 2.1022114848541165, "grad_norm": 0.7533339262008667, "learning_rate": 9.220652300833615e-05, "loss": 0.9458, "step": 11312 }, { "epoch": 2.1023973239174873, "grad_norm": 0.7051428556442261, "learning_rate": 9.219195818560974e-05, "loss": 0.8269, "step": 11313 }, { "epoch": 2.1025831629808587, "grad_norm": 0.7213872671127319, "learning_rate": 9.21773935295326e-05, "loss": 0.8971, "step": 11314 }, { "epoch": 2.1027690020442296, "grad_norm": 1.5018362998962402, "learning_rate": 9.216282904041557e-05, "loss": 0.8616, "step": 11315 }, { "epoch": 2.102954841107601, "grad_norm": 0.6886687278747559, "learning_rate": 9.21482647185695e-05, "loss": 0.888, "step": 11316 }, { "epoch": 2.1031406801709718, "grad_norm": 0.9783439040184021, "learning_rate": 9.213370056430526e-05, "loss": 0.8997, "step": 11317 }, { "epoch": 2.103326519234343, "grad_norm": 0.761658251285553, "learning_rate": 9.211913657793369e-05, "loss": 1.1431, "step": 11318 }, { "epoch": 2.103512358297714, "grad_norm": 0.7639985680580139, "learning_rate": 9.210457275976554e-05, "loss": 0.8484, "step": 11319 }, { "epoch": 2.1036981973610853, "grad_norm": 0.7667630910873413, "learning_rate": 9.209000911011182e-05, "loss": 0.838, "step": 11320 }, { "epoch": 2.103884036424456, "grad_norm": 0.8004581332206726, "learning_rate": 9.207544562928325e-05, "loss": 0.815, "step": 11321 }, { "epoch": 2.1040698754878275, "grad_norm": 0.8453937768936157, "learning_rate": 9.206088231759073e-05, "loss": 0.8801, "step": 11322 }, { "epoch": 2.104255714551199, "grad_norm": 0.7058165669441223, "learning_rate": 9.204631917534501e-05, "loss": 0.7879, "step": 11323 }, { "epoch": 2.1044415536145697, "grad_norm": 0.7437944412231445, "learning_rate": 9.2031756202857e-05, "loss": 0.8152, "step": 11324 }, { "epoch": 2.104627392677941, "grad_norm": 0.9589625597000122, "learning_rate": 9.201719340043745e-05, "loss": 0.8667, "step": 11325 }, { "epoch": 2.104813231741312, "grad_norm": 0.7726141810417175, "learning_rate": 9.200263076839724e-05, "loss": 0.756, "step": 11326 }, { "epoch": 2.1049990708046833, "grad_norm": 0.7532119154930115, "learning_rate": 9.198806830704711e-05, "loss": 0.8386, "step": 11327 }, { "epoch": 2.105184909868054, "grad_norm": 2.2922170162200928, "learning_rate": 9.197350601669795e-05, "loss": 1.3785, "step": 11328 }, { "epoch": 2.1053707489314255, "grad_norm": 0.7087710499763489, "learning_rate": 9.19589438976605e-05, "loss": 0.8279, "step": 11329 }, { "epoch": 2.1055565879947964, "grad_norm": 0.8486080169677734, "learning_rate": 9.194438195024557e-05, "loss": 0.9904, "step": 11330 }, { "epoch": 2.1057424270581677, "grad_norm": 0.7624375224113464, "learning_rate": 9.192982017476401e-05, "loss": 0.9955, "step": 11331 }, { "epoch": 2.1059282661215386, "grad_norm": 0.8084495663642883, "learning_rate": 9.191525857152662e-05, "loss": 0.9191, "step": 11332 }, { "epoch": 2.10611410518491, "grad_norm": 0.6322254538536072, "learning_rate": 9.190069714084413e-05, "loss": 0.7792, "step": 11333 }, { "epoch": 2.106299944248281, "grad_norm": 0.8094192743301392, "learning_rate": 9.188613588302739e-05, "loss": 0.8863, "step": 11334 }, { "epoch": 2.106485783311652, "grad_norm": 0.7641638517379761, "learning_rate": 9.187157479838713e-05, "loss": 0.6735, "step": 11335 }, { "epoch": 2.106671622375023, "grad_norm": 0.765892505645752, "learning_rate": 9.185701388723416e-05, "loss": 0.8556, "step": 11336 }, { "epoch": 2.1068574614383944, "grad_norm": 0.8530939817428589, "learning_rate": 9.184245314987925e-05, "loss": 0.8466, "step": 11337 }, { "epoch": 2.1070433005017657, "grad_norm": 0.7106696367263794, "learning_rate": 9.182789258663321e-05, "loss": 0.9071, "step": 11338 }, { "epoch": 2.1072291395651366, "grad_norm": 0.9117116928100586, "learning_rate": 9.181333219780675e-05, "loss": 1.0345, "step": 11339 }, { "epoch": 2.107414978628508, "grad_norm": 0.7420015335083008, "learning_rate": 9.179877198371063e-05, "loss": 0.639, "step": 11340 }, { "epoch": 2.107600817691879, "grad_norm": 0.8371978402137756, "learning_rate": 9.178421194465569e-05, "loss": 0.9271, "step": 11341 }, { "epoch": 2.10778665675525, "grad_norm": 0.684158444404602, "learning_rate": 9.176965208095265e-05, "loss": 0.8825, "step": 11342 }, { "epoch": 2.107972495818621, "grad_norm": 0.6936484575271606, "learning_rate": 9.175509239291225e-05, "loss": 0.8442, "step": 11343 }, { "epoch": 2.1081583348819923, "grad_norm": 0.8441296815872192, "learning_rate": 9.174053288084527e-05, "loss": 0.7672, "step": 11344 }, { "epoch": 2.108344173945363, "grad_norm": 0.954045832157135, "learning_rate": 9.172597354506244e-05, "loss": 0.9207, "step": 11345 }, { "epoch": 2.1085300130087345, "grad_norm": 0.7127572298049927, "learning_rate": 9.171141438587451e-05, "loss": 0.7678, "step": 11346 }, { "epoch": 2.1087158520721054, "grad_norm": 0.6595544815063477, "learning_rate": 9.16968554035922e-05, "loss": 0.7733, "step": 11347 }, { "epoch": 2.1089016911354768, "grad_norm": 0.7821077704429626, "learning_rate": 9.168229659852629e-05, "loss": 0.8252, "step": 11348 }, { "epoch": 2.1090875301988476, "grad_norm": 0.8677456378936768, "learning_rate": 9.166773797098745e-05, "loss": 1.0355, "step": 11349 }, { "epoch": 2.109273369262219, "grad_norm": 0.7058128118515015, "learning_rate": 9.165317952128647e-05, "loss": 0.7044, "step": 11350 }, { "epoch": 2.10945920832559, "grad_norm": 0.7050992846488953, "learning_rate": 9.163862124973398e-05, "loss": 0.9244, "step": 11351 }, { "epoch": 2.109645047388961, "grad_norm": 0.7171057462692261, "learning_rate": 9.162406315664085e-05, "loss": 0.7257, "step": 11352 }, { "epoch": 2.109830886452332, "grad_norm": 0.8750724196434021, "learning_rate": 9.160950524231768e-05, "loss": 0.9358, "step": 11353 }, { "epoch": 2.1100167255157034, "grad_norm": 0.6821286678314209, "learning_rate": 9.159494750707526e-05, "loss": 0.8675, "step": 11354 }, { "epoch": 2.1102025645790747, "grad_norm": 0.7473365068435669, "learning_rate": 9.158038995122424e-05, "loss": 0.753, "step": 11355 }, { "epoch": 2.1103884036424456, "grad_norm": 0.7012847065925598, "learning_rate": 9.156583257507535e-05, "loss": 0.9714, "step": 11356 }, { "epoch": 2.110574242705817, "grad_norm": 1.2019333839416504, "learning_rate": 9.155127537893929e-05, "loss": 1.156, "step": 11357 }, { "epoch": 2.110760081769188, "grad_norm": 1.0187358856201172, "learning_rate": 9.153671836312675e-05, "loss": 0.9881, "step": 11358 }, { "epoch": 2.110945920832559, "grad_norm": 0.7755599617958069, "learning_rate": 9.152216152794844e-05, "loss": 0.9068, "step": 11359 }, { "epoch": 2.11113175989593, "grad_norm": 0.7259166836738586, "learning_rate": 9.150760487371506e-05, "loss": 0.969, "step": 11360 }, { "epoch": 2.1113175989593014, "grad_norm": 0.6794169545173645, "learning_rate": 9.149304840073721e-05, "loss": 0.8936, "step": 11361 }, { "epoch": 2.1115034380226723, "grad_norm": 0.8772898316383362, "learning_rate": 9.147849210932571e-05, "loss": 0.887, "step": 11362 }, { "epoch": 2.1116892770860436, "grad_norm": 0.860237181186676, "learning_rate": 9.146393599979114e-05, "loss": 0.9114, "step": 11363 }, { "epoch": 2.1118751161494145, "grad_norm": 0.6794553995132446, "learning_rate": 9.144938007244426e-05, "loss": 0.8011, "step": 11364 }, { "epoch": 2.112060955212786, "grad_norm": 0.7240217924118042, "learning_rate": 9.143482432759565e-05, "loss": 0.6517, "step": 11365 }, { "epoch": 2.1122467942761567, "grad_norm": 0.7571320533752441, "learning_rate": 9.142026876555602e-05, "loss": 1.0215, "step": 11366 }, { "epoch": 2.112432633339528, "grad_norm": 0.7616672515869141, "learning_rate": 9.140571338663604e-05, "loss": 0.959, "step": 11367 }, { "epoch": 2.112618472402899, "grad_norm": 0.7437677383422852, "learning_rate": 9.139115819114636e-05, "loss": 0.809, "step": 11368 }, { "epoch": 2.1128043114662702, "grad_norm": 0.8062520027160645, "learning_rate": 9.137660317939761e-05, "loss": 1.0213, "step": 11369 }, { "epoch": 2.112990150529641, "grad_norm": 0.8090141415596008, "learning_rate": 9.13620483517005e-05, "loss": 1.038, "step": 11370 }, { "epoch": 2.1131759895930124, "grad_norm": 0.8203197717666626, "learning_rate": 9.134749370836563e-05, "loss": 0.9441, "step": 11371 }, { "epoch": 2.113361828656384, "grad_norm": 0.8296011686325073, "learning_rate": 9.133293924970362e-05, "loss": 0.9218, "step": 11372 }, { "epoch": 2.1135476677197547, "grad_norm": 0.7578732967376709, "learning_rate": 9.131838497602517e-05, "loss": 0.9063, "step": 11373 }, { "epoch": 2.113733506783126, "grad_norm": 1.003522276878357, "learning_rate": 9.130383088764093e-05, "loss": 0.8269, "step": 11374 }, { "epoch": 2.113919345846497, "grad_norm": 0.7684604525566101, "learning_rate": 9.128927698486147e-05, "loss": 0.9037, "step": 11375 }, { "epoch": 2.114105184909868, "grad_norm": 0.8644822239875793, "learning_rate": 9.127472326799746e-05, "loss": 1.0589, "step": 11376 }, { "epoch": 2.114291023973239, "grad_norm": 0.7624018788337708, "learning_rate": 9.12601697373595e-05, "loss": 1.0758, "step": 11377 }, { "epoch": 2.1144768630366104, "grad_norm": 0.7607561349868774, "learning_rate": 9.124561639325822e-05, "loss": 1.087, "step": 11378 }, { "epoch": 2.1146627020999813, "grad_norm": 0.7460099458694458, "learning_rate": 9.123106323600424e-05, "loss": 1.0461, "step": 11379 }, { "epoch": 2.1148485411633526, "grad_norm": 0.7677852511405945, "learning_rate": 9.121651026590816e-05, "loss": 1.0069, "step": 11380 }, { "epoch": 2.1150343802267235, "grad_norm": 0.7719981074333191, "learning_rate": 9.120195748328059e-05, "loss": 0.9425, "step": 11381 }, { "epoch": 2.115220219290095, "grad_norm": 1.7005661725997925, "learning_rate": 9.118740488843211e-05, "loss": 1.5553, "step": 11382 }, { "epoch": 2.1154060583534657, "grad_norm": 0.7657036781311035, "learning_rate": 9.117285248167337e-05, "loss": 0.9203, "step": 11383 }, { "epoch": 2.115591897416837, "grad_norm": 0.8619498610496521, "learning_rate": 9.115830026331498e-05, "loss": 1.0513, "step": 11384 }, { "epoch": 2.115777736480208, "grad_norm": 0.8409205079078674, "learning_rate": 9.114374823366747e-05, "loss": 1.0542, "step": 11385 }, { "epoch": 2.1159635755435793, "grad_norm": 0.7962708473205566, "learning_rate": 9.112919639304148e-05, "loss": 0.5205, "step": 11386 }, { "epoch": 2.1161494146069506, "grad_norm": 0.6844388842582703, "learning_rate": 9.111464474174755e-05, "loss": 0.7801, "step": 11387 }, { "epoch": 2.1163352536703215, "grad_norm": 0.9194725155830383, "learning_rate": 9.11000932800963e-05, "loss": 1.1574, "step": 11388 }, { "epoch": 2.116521092733693, "grad_norm": 0.8045440316200256, "learning_rate": 9.108554200839826e-05, "loss": 0.7407, "step": 11389 }, { "epoch": 2.1167069317970637, "grad_norm": 0.6948972344398499, "learning_rate": 9.107099092696405e-05, "loss": 0.8281, "step": 11390 }, { "epoch": 2.116892770860435, "grad_norm": 0.8075729608535767, "learning_rate": 9.10564400361042e-05, "loss": 1.0748, "step": 11391 }, { "epoch": 2.117078609923806, "grad_norm": 0.8684819340705872, "learning_rate": 9.104188933612929e-05, "loss": 0.8818, "step": 11392 }, { "epoch": 2.1172644489871772, "grad_norm": 0.7302981019020081, "learning_rate": 9.102733882734984e-05, "loss": 1.0826, "step": 11393 }, { "epoch": 2.117450288050548, "grad_norm": 0.7284327149391174, "learning_rate": 9.10127885100765e-05, "loss": 0.6441, "step": 11394 }, { "epoch": 2.1176361271139195, "grad_norm": 0.8038931488990784, "learning_rate": 9.099823838461975e-05, "loss": 0.9264, "step": 11395 }, { "epoch": 2.1178219661772903, "grad_norm": 0.7592558264732361, "learning_rate": 9.098368845129018e-05, "loss": 1.1874, "step": 11396 }, { "epoch": 2.1180078052406617, "grad_norm": 0.7145334482192993, "learning_rate": 9.096913871039827e-05, "loss": 0.9178, "step": 11397 }, { "epoch": 2.1181936443040326, "grad_norm": 0.8046518564224243, "learning_rate": 9.095458916225464e-05, "loss": 1.0908, "step": 11398 }, { "epoch": 2.118379483367404, "grad_norm": 0.7807143330574036, "learning_rate": 9.094003980716975e-05, "loss": 0.9177, "step": 11399 }, { "epoch": 2.1185653224307748, "grad_norm": 0.7822641134262085, "learning_rate": 9.092549064545417e-05, "loss": 0.9126, "step": 11400 }, { "epoch": 2.118751161494146, "grad_norm": 0.7133731245994568, "learning_rate": 9.091094167741841e-05, "loss": 0.823, "step": 11401 }, { "epoch": 2.118937000557517, "grad_norm": 0.9369903206825256, "learning_rate": 9.0896392903373e-05, "loss": 0.728, "step": 11402 }, { "epoch": 2.1191228396208883, "grad_norm": 0.7016496062278748, "learning_rate": 9.088184432362844e-05, "loss": 1.0928, "step": 11403 }, { "epoch": 2.119308678684259, "grad_norm": 0.6865460276603699, "learning_rate": 9.086729593849532e-05, "loss": 0.749, "step": 11404 }, { "epoch": 2.1194945177476305, "grad_norm": 0.808489978313446, "learning_rate": 9.085274774828404e-05, "loss": 0.8495, "step": 11405 }, { "epoch": 2.119680356811002, "grad_norm": 0.7700434923171997, "learning_rate": 9.083819975330522e-05, "loss": 0.8739, "step": 11406 }, { "epoch": 2.1198661958743727, "grad_norm": 0.7213497161865234, "learning_rate": 9.082365195386928e-05, "loss": 0.9642, "step": 11407 }, { "epoch": 2.120052034937744, "grad_norm": 0.8258712887763977, "learning_rate": 9.080910435028674e-05, "loss": 0.9336, "step": 11408 }, { "epoch": 2.120237874001115, "grad_norm": 0.6518661975860596, "learning_rate": 9.07945569428681e-05, "loss": 0.6723, "step": 11409 }, { "epoch": 2.1204237130644863, "grad_norm": 0.7806102633476257, "learning_rate": 9.078000973192384e-05, "loss": 0.8546, "step": 11410 }, { "epoch": 2.120609552127857, "grad_norm": 0.7937559485435486, "learning_rate": 9.076546271776444e-05, "loss": 1.052, "step": 11411 }, { "epoch": 2.1207953911912285, "grad_norm": 0.8053615093231201, "learning_rate": 9.075091590070041e-05, "loss": 0.8929, "step": 11412 }, { "epoch": 2.1209812302545994, "grad_norm": 0.8069961667060852, "learning_rate": 9.073636928104221e-05, "loss": 0.8843, "step": 11413 }, { "epoch": 2.1211670693179707, "grad_norm": 0.7899468541145325, "learning_rate": 9.072182285910028e-05, "loss": 0.8003, "step": 11414 }, { "epoch": 2.1213529083813416, "grad_norm": 0.7648258805274963, "learning_rate": 9.070727663518512e-05, "loss": 1.1836, "step": 11415 }, { "epoch": 2.121538747444713, "grad_norm": 0.7307213544845581, "learning_rate": 9.069273060960723e-05, "loss": 0.9682, "step": 11416 }, { "epoch": 2.121724586508084, "grad_norm": 0.8227604627609253, "learning_rate": 9.0678184782677e-05, "loss": 0.7594, "step": 11417 }, { "epoch": 2.121910425571455, "grad_norm": 0.9569815993309021, "learning_rate": 9.066363915470495e-05, "loss": 1.0488, "step": 11418 }, { "epoch": 2.122096264634826, "grad_norm": 0.7983200550079346, "learning_rate": 9.064909372600148e-05, "loss": 0.8448, "step": 11419 }, { "epoch": 2.1222821036981974, "grad_norm": 0.905168890953064, "learning_rate": 9.063454849687707e-05, "loss": 0.9164, "step": 11420 }, { "epoch": 2.1224679427615687, "grad_norm": 1.0643441677093506, "learning_rate": 9.062000346764214e-05, "loss": 0.8715, "step": 11421 }, { "epoch": 2.1226537818249396, "grad_norm": 0.8178336024284363, "learning_rate": 9.060545863860713e-05, "loss": 1.1267, "step": 11422 }, { "epoch": 2.122839620888311, "grad_norm": 0.7979642748832703, "learning_rate": 9.059091401008252e-05, "loss": 1.002, "step": 11423 }, { "epoch": 2.123025459951682, "grad_norm": 0.7845909595489502, "learning_rate": 9.057636958237864e-05, "loss": 0.864, "step": 11424 }, { "epoch": 2.123211299015053, "grad_norm": 0.7184850573539734, "learning_rate": 9.056182535580601e-05, "loss": 0.9398, "step": 11425 }, { "epoch": 2.123397138078424, "grad_norm": 0.734954297542572, "learning_rate": 9.054728133067505e-05, "loss": 0.8397, "step": 11426 }, { "epoch": 2.1235829771417953, "grad_norm": 0.8179150819778442, "learning_rate": 9.053273750729611e-05, "loss": 0.7084, "step": 11427 }, { "epoch": 2.123768816205166, "grad_norm": 1.0683249235153198, "learning_rate": 9.051819388597969e-05, "loss": 1.2287, "step": 11428 }, { "epoch": 2.1239546552685376, "grad_norm": 0.9108082056045532, "learning_rate": 9.050365046703611e-05, "loss": 1.1409, "step": 11429 }, { "epoch": 2.1241404943319084, "grad_norm": 0.9073103666305542, "learning_rate": 9.048910725077584e-05, "loss": 0.7285, "step": 11430 }, { "epoch": 2.1243263333952798, "grad_norm": 0.8368109464645386, "learning_rate": 9.047456423750923e-05, "loss": 1.0234, "step": 11431 }, { "epoch": 2.1245121724586506, "grad_norm": 0.8634905815124512, "learning_rate": 9.04600214275467e-05, "loss": 0.977, "step": 11432 }, { "epoch": 2.124698011522022, "grad_norm": 0.7714248895645142, "learning_rate": 9.044547882119867e-05, "loss": 0.9311, "step": 11433 }, { "epoch": 2.124883850585393, "grad_norm": 0.6275684833526611, "learning_rate": 9.043093641877547e-05, "loss": 0.7582, "step": 11434 }, { "epoch": 2.125069689648764, "grad_norm": 0.7640289068222046, "learning_rate": 9.041639422058749e-05, "loss": 0.9363, "step": 11435 }, { "epoch": 2.1252555287121355, "grad_norm": 0.9009206295013428, "learning_rate": 9.040185222694517e-05, "loss": 1.1269, "step": 11436 }, { "epoch": 2.1254413677755064, "grad_norm": 0.8125781416893005, "learning_rate": 9.038731043815882e-05, "loss": 0.8613, "step": 11437 }, { "epoch": 2.1256272068388777, "grad_norm": 0.8105628490447998, "learning_rate": 9.037276885453887e-05, "loss": 0.9688, "step": 11438 }, { "epoch": 2.1258130459022486, "grad_norm": 0.8331906199455261, "learning_rate": 9.035822747639561e-05, "loss": 0.7517, "step": 11439 }, { "epoch": 2.12599888496562, "grad_norm": 0.7814489006996155, "learning_rate": 9.034368630403946e-05, "loss": 1.1615, "step": 11440 }, { "epoch": 2.126184724028991, "grad_norm": 0.9096186757087708, "learning_rate": 9.032914533778075e-05, "loss": 1.0034, "step": 11441 }, { "epoch": 2.126370563092362, "grad_norm": 2.2994253635406494, "learning_rate": 9.031460457792982e-05, "loss": 1.5315, "step": 11442 }, { "epoch": 2.126556402155733, "grad_norm": 0.7273047566413879, "learning_rate": 9.030006402479706e-05, "loss": 0.7698, "step": 11443 }, { "epoch": 2.1267422412191044, "grad_norm": 0.9945558905601501, "learning_rate": 9.028552367869277e-05, "loss": 0.7612, "step": 11444 }, { "epoch": 2.1269280802824753, "grad_norm": 0.8034189939498901, "learning_rate": 9.027098353992729e-05, "loss": 0.8135, "step": 11445 }, { "epoch": 2.1271139193458466, "grad_norm": 0.8340957760810852, "learning_rate": 9.0256443608811e-05, "loss": 0.8855, "step": 11446 }, { "epoch": 2.1272997584092175, "grad_norm": 0.9442667365074158, "learning_rate": 9.024190388565418e-05, "loss": 1.0958, "step": 11447 }, { "epoch": 2.127485597472589, "grad_norm": 0.9203138947486877, "learning_rate": 9.02273643707672e-05, "loss": 1.161, "step": 11448 }, { "epoch": 2.1276714365359597, "grad_norm": 0.6896242499351501, "learning_rate": 9.021282506446035e-05, "loss": 0.8245, "step": 11449 }, { "epoch": 2.127857275599331, "grad_norm": 0.7537946701049805, "learning_rate": 9.019828596704394e-05, "loss": 0.7225, "step": 11450 }, { "epoch": 2.128043114662702, "grad_norm": 0.9126191139221191, "learning_rate": 9.018374707882831e-05, "loss": 1.0069, "step": 11451 }, { "epoch": 2.1282289537260732, "grad_norm": 0.7200766205787659, "learning_rate": 9.016920840012375e-05, "loss": 0.8518, "step": 11452 }, { "epoch": 2.128414792789444, "grad_norm": 0.8545762896537781, "learning_rate": 9.015466993124057e-05, "loss": 0.8795, "step": 11453 }, { "epoch": 2.1286006318528155, "grad_norm": 0.6515056490898132, "learning_rate": 9.014013167248906e-05, "loss": 0.7824, "step": 11454 }, { "epoch": 2.128786470916187, "grad_norm": 0.7449262142181396, "learning_rate": 9.012559362417954e-05, "loss": 1.0393, "step": 11455 }, { "epoch": 2.1289723099795577, "grad_norm": 0.8179948329925537, "learning_rate": 9.011105578662222e-05, "loss": 1.0184, "step": 11456 }, { "epoch": 2.129158149042929, "grad_norm": 0.653978168964386, "learning_rate": 9.009651816012748e-05, "loss": 0.7403, "step": 11457 }, { "epoch": 2.1293439881063, "grad_norm": 0.7826240658760071, "learning_rate": 9.008198074500557e-05, "loss": 0.8306, "step": 11458 }, { "epoch": 2.129529827169671, "grad_norm": 0.8257392644882202, "learning_rate": 9.006744354156676e-05, "loss": 0.8867, "step": 11459 }, { "epoch": 2.129715666233042, "grad_norm": 0.8371952176094055, "learning_rate": 9.00529065501213e-05, "loss": 0.9432, "step": 11460 }, { "epoch": 2.1299015052964134, "grad_norm": 0.7463301420211792, "learning_rate": 9.003836977097952e-05, "loss": 0.7961, "step": 11461 }, { "epoch": 2.1300873443597843, "grad_norm": 0.7014195919036865, "learning_rate": 9.002383320445163e-05, "loss": 0.9354, "step": 11462 }, { "epoch": 2.1302731834231556, "grad_norm": 0.7329978346824646, "learning_rate": 9.00092968508479e-05, "loss": 0.8252, "step": 11463 }, { "epoch": 2.1304590224865265, "grad_norm": 0.7446506023406982, "learning_rate": 8.999476071047857e-05, "loss": 0.8878, "step": 11464 }, { "epoch": 2.130644861549898, "grad_norm": 0.8863813877105713, "learning_rate": 8.998022478365392e-05, "loss": 0.8377, "step": 11465 }, { "epoch": 2.1308307006132687, "grad_norm": 0.7592625617980957, "learning_rate": 8.996568907068415e-05, "loss": 0.9176, "step": 11466 }, { "epoch": 2.13101653967664, "grad_norm": 0.8758193254470825, "learning_rate": 8.995115357187953e-05, "loss": 1.2571, "step": 11467 }, { "epoch": 2.131202378740011, "grad_norm": 0.7715368866920471, "learning_rate": 8.993661828755032e-05, "loss": 0.8266, "step": 11468 }, { "epoch": 2.1313882178033823, "grad_norm": 0.6676689982414246, "learning_rate": 8.99220832180067e-05, "loss": 0.7584, "step": 11469 }, { "epoch": 2.1315740568667536, "grad_norm": 0.9252825379371643, "learning_rate": 8.990754836355893e-05, "loss": 0.8657, "step": 11470 }, { "epoch": 2.1317598959301245, "grad_norm": 0.876563310623169, "learning_rate": 8.989301372451722e-05, "loss": 0.9531, "step": 11471 }, { "epoch": 2.131945734993496, "grad_norm": 0.7079026699066162, "learning_rate": 8.987847930119179e-05, "loss": 0.9201, "step": 11472 }, { "epoch": 2.1321315740568667, "grad_norm": 0.7698578238487244, "learning_rate": 8.986394509389287e-05, "loss": 0.7739, "step": 11473 }, { "epoch": 2.132317413120238, "grad_norm": 0.9522943496704102, "learning_rate": 8.984941110293061e-05, "loss": 0.9106, "step": 11474 }, { "epoch": 2.132503252183609, "grad_norm": 0.8080538511276245, "learning_rate": 8.983487732861528e-05, "loss": 1.0263, "step": 11475 }, { "epoch": 2.1326890912469803, "grad_norm": 0.8311910033226013, "learning_rate": 8.982034377125703e-05, "loss": 0.8961, "step": 11476 }, { "epoch": 2.132874930310351, "grad_norm": 0.7102888822555542, "learning_rate": 8.980581043116605e-05, "loss": 0.8197, "step": 11477 }, { "epoch": 2.1330607693737225, "grad_norm": 0.8097278475761414, "learning_rate": 8.97912773086526e-05, "loss": 0.9794, "step": 11478 }, { "epoch": 2.1332466084370934, "grad_norm": 0.7649460434913635, "learning_rate": 8.977674440402679e-05, "loss": 0.8794, "step": 11479 }, { "epoch": 2.1334324475004647, "grad_norm": 0.730686366558075, "learning_rate": 8.976221171759883e-05, "loss": 1.0425, "step": 11480 }, { "epoch": 2.1336182865638356, "grad_norm": 0.7751401662826538, "learning_rate": 8.97476792496789e-05, "loss": 0.971, "step": 11481 }, { "epoch": 2.133804125627207, "grad_norm": 0.7385703921318054, "learning_rate": 8.973314700057717e-05, "loss": 0.7994, "step": 11482 }, { "epoch": 2.133989964690578, "grad_norm": 0.777972400188446, "learning_rate": 8.97186149706038e-05, "loss": 1.1205, "step": 11483 }, { "epoch": 2.134175803753949, "grad_norm": 0.7550100684165955, "learning_rate": 8.970408316006893e-05, "loss": 1.0834, "step": 11484 }, { "epoch": 2.13436164281732, "grad_norm": 0.6919218301773071, "learning_rate": 8.968955156928276e-05, "loss": 1.0027, "step": 11485 }, { "epoch": 2.1345474818806913, "grad_norm": 0.8418641090393066, "learning_rate": 8.967502019855539e-05, "loss": 0.8558, "step": 11486 }, { "epoch": 2.134733320944062, "grad_norm": 0.8520260453224182, "learning_rate": 8.966048904819698e-05, "loss": 0.7657, "step": 11487 }, { "epoch": 2.1349191600074335, "grad_norm": 0.7584142088890076, "learning_rate": 8.964595811851774e-05, "loss": 0.9419, "step": 11488 }, { "epoch": 2.135104999070805, "grad_norm": 0.7584882378578186, "learning_rate": 8.963142740982773e-05, "loss": 0.806, "step": 11489 }, { "epoch": 2.1352908381341758, "grad_norm": 0.7900401949882507, "learning_rate": 8.96168969224371e-05, "loss": 1.0048, "step": 11490 }, { "epoch": 2.135476677197547, "grad_norm": 0.7943800091743469, "learning_rate": 8.9602366656656e-05, "loss": 1.0302, "step": 11491 }, { "epoch": 2.135662516260918, "grad_norm": 0.7397233843803406, "learning_rate": 8.958783661279454e-05, "loss": 0.6135, "step": 11492 }, { "epoch": 2.1358483553242893, "grad_norm": 0.7478030323982239, "learning_rate": 8.957330679116286e-05, "loss": 0.8393, "step": 11493 }, { "epoch": 2.13603419438766, "grad_norm": 0.7902356386184692, "learning_rate": 8.955877719207102e-05, "loss": 0.8388, "step": 11494 }, { "epoch": 2.1362200334510315, "grad_norm": 0.8481513261795044, "learning_rate": 8.95442478158292e-05, "loss": 1.074, "step": 11495 }, { "epoch": 2.1364058725144024, "grad_norm": 0.8785560727119446, "learning_rate": 8.952971866274742e-05, "loss": 0.6792, "step": 11496 }, { "epoch": 2.1365917115777737, "grad_norm": 0.7510362267494202, "learning_rate": 8.951518973313587e-05, "loss": 0.8089, "step": 11497 }, { "epoch": 2.1367775506411446, "grad_norm": 0.7218495011329651, "learning_rate": 8.950066102730456e-05, "loss": 0.85, "step": 11498 }, { "epoch": 2.136963389704516, "grad_norm": 0.857275664806366, "learning_rate": 8.948613254556364e-05, "loss": 1.0204, "step": 11499 }, { "epoch": 2.137149228767887, "grad_norm": 0.7544958591461182, "learning_rate": 8.947160428822318e-05, "loss": 0.7236, "step": 11500 }, { "epoch": 2.137335067831258, "grad_norm": 0.7592241168022156, "learning_rate": 8.945707625559328e-05, "loss": 0.8866, "step": 11501 }, { "epoch": 2.137520906894629, "grad_norm": 0.860626220703125, "learning_rate": 8.944254844798397e-05, "loss": 1.198, "step": 11502 }, { "epoch": 2.1377067459580004, "grad_norm": 0.819098174571991, "learning_rate": 8.942802086570536e-05, "loss": 0.8842, "step": 11503 }, { "epoch": 2.1378925850213717, "grad_norm": 0.7637498378753662, "learning_rate": 8.94134935090675e-05, "loss": 0.7908, "step": 11504 }, { "epoch": 2.1380784240847426, "grad_norm": 0.9611912369728088, "learning_rate": 8.939896637838045e-05, "loss": 0.9452, "step": 11505 }, { "epoch": 2.138264263148114, "grad_norm": 0.7857555747032166, "learning_rate": 8.938443947395427e-05, "loss": 1.004, "step": 11506 }, { "epoch": 2.138450102211485, "grad_norm": 0.7934672236442566, "learning_rate": 8.936991279609903e-05, "loss": 0.9511, "step": 11507 }, { "epoch": 2.138635941274856, "grad_norm": 0.868075430393219, "learning_rate": 8.935538634512472e-05, "loss": 1.0373, "step": 11508 }, { "epoch": 2.138821780338227, "grad_norm": 0.6859819889068604, "learning_rate": 8.934086012134143e-05, "loss": 0.8933, "step": 11509 }, { "epoch": 2.1390076194015983, "grad_norm": 0.7351570129394531, "learning_rate": 8.93263341250592e-05, "loss": 1.0215, "step": 11510 }, { "epoch": 2.139193458464969, "grad_norm": 0.7704660892486572, "learning_rate": 8.931180835658807e-05, "loss": 0.8473, "step": 11511 }, { "epoch": 2.1393792975283406, "grad_norm": 0.8092166781425476, "learning_rate": 8.929728281623802e-05, "loss": 0.6342, "step": 11512 }, { "epoch": 2.1395651365917114, "grad_norm": 0.7497720122337341, "learning_rate": 8.928275750431912e-05, "loss": 0.9206, "step": 11513 }, { "epoch": 2.1397509756550828, "grad_norm": 0.875147819519043, "learning_rate": 8.926823242114136e-05, "loss": 0.9346, "step": 11514 }, { "epoch": 2.1399368147184537, "grad_norm": 0.724993884563446, "learning_rate": 8.92537075670148e-05, "loss": 0.8848, "step": 11515 }, { "epoch": 2.140122653781825, "grad_norm": 0.9631668925285339, "learning_rate": 8.923918294224935e-05, "loss": 0.8877, "step": 11516 }, { "epoch": 2.140308492845196, "grad_norm": 0.8639618158340454, "learning_rate": 8.922465854715512e-05, "loss": 0.7687, "step": 11517 }, { "epoch": 2.140494331908567, "grad_norm": 0.845238208770752, "learning_rate": 8.921013438204204e-05, "loss": 0.9183, "step": 11518 }, { "epoch": 2.1406801709719385, "grad_norm": 0.9043660759925842, "learning_rate": 8.91956104472201e-05, "loss": 0.9652, "step": 11519 }, { "epoch": 2.1408660100353094, "grad_norm": 0.6800969243049622, "learning_rate": 8.918108674299934e-05, "loss": 0.8477, "step": 11520 }, { "epoch": 2.1410518490986807, "grad_norm": 0.8428097367286682, "learning_rate": 8.916656326968973e-05, "loss": 1.132, "step": 11521 }, { "epoch": 2.1412376881620516, "grad_norm": 0.6179491877555847, "learning_rate": 8.915204002760122e-05, "loss": 0.6547, "step": 11522 }, { "epoch": 2.141423527225423, "grad_norm": 0.738825261592865, "learning_rate": 8.913751701704383e-05, "loss": 0.9981, "step": 11523 }, { "epoch": 2.141609366288794, "grad_norm": 0.7048845291137695, "learning_rate": 8.912299423832746e-05, "loss": 0.7569, "step": 11524 }, { "epoch": 2.141795205352165, "grad_norm": 0.790368914604187, "learning_rate": 8.910847169176213e-05, "loss": 0.7607, "step": 11525 }, { "epoch": 2.141981044415536, "grad_norm": 0.9649472832679749, "learning_rate": 8.909394937765778e-05, "loss": 0.8596, "step": 11526 }, { "epoch": 2.1421668834789074, "grad_norm": 0.7964535355567932, "learning_rate": 8.907942729632439e-05, "loss": 0.8862, "step": 11527 }, { "epoch": 2.1423527225422783, "grad_norm": 0.8461882472038269, "learning_rate": 8.906490544807184e-05, "loss": 0.842, "step": 11528 }, { "epoch": 2.1425385616056496, "grad_norm": 0.9009879231452942, "learning_rate": 8.90503838332101e-05, "loss": 0.8015, "step": 11529 }, { "epoch": 2.1427244006690205, "grad_norm": 0.6781339049339294, "learning_rate": 8.903586245204917e-05, "loss": 0.9277, "step": 11530 }, { "epoch": 2.142910239732392, "grad_norm": 0.8017725944519043, "learning_rate": 8.902134130489895e-05, "loss": 0.8699, "step": 11531 }, { "epoch": 2.1430960787957627, "grad_norm": 0.863451361656189, "learning_rate": 8.900682039206932e-05, "loss": 0.8769, "step": 11532 }, { "epoch": 2.143281917859134, "grad_norm": 0.692290723323822, "learning_rate": 8.899229971387029e-05, "loss": 0.8921, "step": 11533 }, { "epoch": 2.143467756922505, "grad_norm": 1.104231834411621, "learning_rate": 8.897777927061169e-05, "loss": 1.2087, "step": 11534 }, { "epoch": 2.1436535959858762, "grad_norm": 0.6479054689407349, "learning_rate": 8.896325906260352e-05, "loss": 0.5637, "step": 11535 }, { "epoch": 2.143839435049247, "grad_norm": 0.854199230670929, "learning_rate": 8.89487390901556e-05, "loss": 1.0063, "step": 11536 }, { "epoch": 2.1440252741126185, "grad_norm": 0.896887481212616, "learning_rate": 8.893421935357793e-05, "loss": 1.0112, "step": 11537 }, { "epoch": 2.14421111317599, "grad_norm": 0.8437681794166565, "learning_rate": 8.891969985318031e-05, "loss": 1.0294, "step": 11538 }, { "epoch": 2.1443969522393607, "grad_norm": 0.767591118812561, "learning_rate": 8.890518058927272e-05, "loss": 0.972, "step": 11539 }, { "epoch": 2.144582791302732, "grad_norm": 0.8373448848724365, "learning_rate": 8.889066156216495e-05, "loss": 1.0073, "step": 11540 }, { "epoch": 2.144768630366103, "grad_norm": 0.7533522844314575, "learning_rate": 8.8876142772167e-05, "loss": 0.7912, "step": 11541 }, { "epoch": 2.144954469429474, "grad_norm": 0.9369121789932251, "learning_rate": 8.886162421958867e-05, "loss": 0.8993, "step": 11542 }, { "epoch": 2.145140308492845, "grad_norm": 0.7979742884635925, "learning_rate": 8.884710590473988e-05, "loss": 0.8112, "step": 11543 }, { "epoch": 2.1453261475562164, "grad_norm": 0.9218032360076904, "learning_rate": 8.883258782793045e-05, "loss": 0.9041, "step": 11544 }, { "epoch": 2.1455119866195873, "grad_norm": 0.894201397895813, "learning_rate": 8.88180699894703e-05, "loss": 0.81, "step": 11545 }, { "epoch": 2.1456978256829586, "grad_norm": 0.8262656331062317, "learning_rate": 8.880355238966923e-05, "loss": 1.0715, "step": 11546 }, { "epoch": 2.1458836647463295, "grad_norm": 1.118515133857727, "learning_rate": 8.878903502883714e-05, "loss": 0.7873, "step": 11547 }, { "epoch": 2.146069503809701, "grad_norm": 0.6342157125473022, "learning_rate": 8.877451790728383e-05, "loss": 0.7688, "step": 11548 }, { "epoch": 2.1462553428730717, "grad_norm": 0.8411121368408203, "learning_rate": 8.87600010253192e-05, "loss": 0.8673, "step": 11549 }, { "epoch": 2.146441181936443, "grad_norm": 0.7336524128913879, "learning_rate": 8.874548438325301e-05, "loss": 0.6193, "step": 11550 }, { "epoch": 2.146627020999814, "grad_norm": 0.8948276042938232, "learning_rate": 8.873096798139518e-05, "loss": 0.9816, "step": 11551 }, { "epoch": 2.1468128600631853, "grad_norm": 0.7167133688926697, "learning_rate": 8.87164518200555e-05, "loss": 0.9412, "step": 11552 }, { "epoch": 2.1469986991265566, "grad_norm": 0.7607763409614563, "learning_rate": 8.870193589954381e-05, "loss": 0.8501, "step": 11553 }, { "epoch": 2.1471845381899275, "grad_norm": 0.8126594424247742, "learning_rate": 8.868742022016989e-05, "loss": 0.8756, "step": 11554 }, { "epoch": 2.147370377253299, "grad_norm": 0.8860375285148621, "learning_rate": 8.867290478224359e-05, "loss": 0.7168, "step": 11555 }, { "epoch": 2.1475562163166697, "grad_norm": 0.901677131652832, "learning_rate": 8.865838958607467e-05, "loss": 0.8255, "step": 11556 }, { "epoch": 2.147742055380041, "grad_norm": 0.7999879121780396, "learning_rate": 8.864387463197299e-05, "loss": 0.8414, "step": 11557 }, { "epoch": 2.147927894443412, "grad_norm": 0.7665722370147705, "learning_rate": 8.862935992024829e-05, "loss": 0.9855, "step": 11558 }, { "epoch": 2.1481137335067833, "grad_norm": 0.7725609540939331, "learning_rate": 8.861484545121041e-05, "loss": 0.8761, "step": 11559 }, { "epoch": 2.148299572570154, "grad_norm": 0.6915417909622192, "learning_rate": 8.86003312251691e-05, "loss": 0.7849, "step": 11560 }, { "epoch": 2.1484854116335255, "grad_norm": 0.7465013265609741, "learning_rate": 8.858581724243415e-05, "loss": 1.0637, "step": 11561 }, { "epoch": 2.1486712506968964, "grad_norm": 0.766620397567749, "learning_rate": 8.857130350331535e-05, "loss": 0.839, "step": 11562 }, { "epoch": 2.1488570897602677, "grad_norm": 0.7727715969085693, "learning_rate": 8.855679000812249e-05, "loss": 0.9321, "step": 11563 }, { "epoch": 2.1490429288236386, "grad_norm": 0.8486520051956177, "learning_rate": 8.854227675716529e-05, "loss": 0.8663, "step": 11564 }, { "epoch": 2.14922876788701, "grad_norm": 0.7445151805877686, "learning_rate": 8.852776375075355e-05, "loss": 0.761, "step": 11565 }, { "epoch": 2.149414606950381, "grad_norm": 0.9629629850387573, "learning_rate": 8.851325098919698e-05, "loss": 1.2405, "step": 11566 }, { "epoch": 2.149600446013752, "grad_norm": 0.9040130972862244, "learning_rate": 8.849873847280537e-05, "loss": 0.9975, "step": 11567 }, { "epoch": 2.1497862850771234, "grad_norm": 0.8581045866012573, "learning_rate": 8.848422620188844e-05, "loss": 0.9953, "step": 11568 }, { "epoch": 2.1499721241404943, "grad_norm": 0.6524201035499573, "learning_rate": 8.846971417675596e-05, "loss": 0.6089, "step": 11569 }, { "epoch": 2.1501579632038657, "grad_norm": 0.9643622636795044, "learning_rate": 8.845520239771762e-05, "loss": 1.1088, "step": 11570 }, { "epoch": 2.1503438022672365, "grad_norm": 0.8635513782501221, "learning_rate": 8.844069086508316e-05, "loss": 0.9776, "step": 11571 }, { "epoch": 2.150529641330608, "grad_norm": 0.6922736763954163, "learning_rate": 8.842617957916234e-05, "loss": 0.8603, "step": 11572 }, { "epoch": 2.1507154803939788, "grad_norm": 0.8901512026786804, "learning_rate": 8.841166854026487e-05, "loss": 1.153, "step": 11573 }, { "epoch": 2.15090131945735, "grad_norm": 0.8341671228408813, "learning_rate": 8.839715774870042e-05, "loss": 1.0115, "step": 11574 }, { "epoch": 2.151087158520721, "grad_norm": 0.7496020197868347, "learning_rate": 8.838264720477875e-05, "loss": 1.0316, "step": 11575 }, { "epoch": 2.1512729975840923, "grad_norm": 0.7692998051643372, "learning_rate": 8.836813690880953e-05, "loss": 0.9599, "step": 11576 }, { "epoch": 2.151458836647463, "grad_norm": 0.7583287358283997, "learning_rate": 8.835362686110248e-05, "loss": 0.8289, "step": 11577 }, { "epoch": 2.1516446757108345, "grad_norm": 0.7585510015487671, "learning_rate": 8.833911706196725e-05, "loss": 0.8314, "step": 11578 }, { "epoch": 2.1518305147742054, "grad_norm": 0.8208407759666443, "learning_rate": 8.83246075117136e-05, "loss": 0.8846, "step": 11579 }, { "epoch": 2.1520163538375767, "grad_norm": 0.6847831010818481, "learning_rate": 8.831009821065113e-05, "loss": 0.884, "step": 11580 }, { "epoch": 2.1522021929009476, "grad_norm": 0.8710030913352966, "learning_rate": 8.829558915908956e-05, "loss": 1.1368, "step": 11581 }, { "epoch": 2.152388031964319, "grad_norm": 0.7906744480133057, "learning_rate": 8.82810803573385e-05, "loss": 0.8698, "step": 11582 }, { "epoch": 2.15257387102769, "grad_norm": 0.667944610118866, "learning_rate": 8.826657180570775e-05, "loss": 0.6606, "step": 11583 }, { "epoch": 2.152759710091061, "grad_norm": 0.7219695448875427, "learning_rate": 8.825206350450685e-05, "loss": 0.7216, "step": 11584 }, { "epoch": 2.152945549154432, "grad_norm": 0.7834694981575012, "learning_rate": 8.823755545404553e-05, "loss": 0.9002, "step": 11585 }, { "epoch": 2.1531313882178034, "grad_norm": 0.7436776757240295, "learning_rate": 8.822304765463337e-05, "loss": 0.7823, "step": 11586 }, { "epoch": 2.1533172272811747, "grad_norm": 0.7721044421195984, "learning_rate": 8.820854010658007e-05, "loss": 0.7665, "step": 11587 }, { "epoch": 2.1535030663445456, "grad_norm": 0.899067759513855, "learning_rate": 8.819403281019522e-05, "loss": 1.1261, "step": 11588 }, { "epoch": 2.153688905407917, "grad_norm": 0.7515740394592285, "learning_rate": 8.817952576578851e-05, "loss": 1.1386, "step": 11589 }, { "epoch": 2.153874744471288, "grad_norm": 0.7487910985946655, "learning_rate": 8.816501897366953e-05, "loss": 0.8617, "step": 11590 }, { "epoch": 2.154060583534659, "grad_norm": 0.7791520953178406, "learning_rate": 8.815051243414792e-05, "loss": 0.7485, "step": 11591 }, { "epoch": 2.15424642259803, "grad_norm": 0.7022900581359863, "learning_rate": 8.813600614753322e-05, "loss": 0.8004, "step": 11592 }, { "epoch": 2.1544322616614013, "grad_norm": 0.7686887383460999, "learning_rate": 8.812150011413517e-05, "loss": 0.9525, "step": 11593 }, { "epoch": 2.1546181007247722, "grad_norm": 0.8644038438796997, "learning_rate": 8.81069943342633e-05, "loss": 0.9486, "step": 11594 }, { "epoch": 2.1548039397881436, "grad_norm": 1.0755417346954346, "learning_rate": 8.809248880822725e-05, "loss": 1.0545, "step": 11595 }, { "epoch": 2.1549897788515144, "grad_norm": 0.9233502149581909, "learning_rate": 8.807798353633657e-05, "loss": 0.9696, "step": 11596 }, { "epoch": 2.1551756179148858, "grad_norm": 0.885118305683136, "learning_rate": 8.806347851890088e-05, "loss": 0.9493, "step": 11597 }, { "epoch": 2.1553614569782567, "grad_norm": 0.7692207098007202, "learning_rate": 8.804897375622975e-05, "loss": 1.1325, "step": 11598 }, { "epoch": 2.155547296041628, "grad_norm": 0.8364658951759338, "learning_rate": 8.80344692486328e-05, "loss": 0.8861, "step": 11599 }, { "epoch": 2.155733135104999, "grad_norm": 0.6783618330955505, "learning_rate": 8.801996499641951e-05, "loss": 0.7697, "step": 11600 }, { "epoch": 2.15591897416837, "grad_norm": 0.8993368148803711, "learning_rate": 8.800546099989953e-05, "loss": 0.7748, "step": 11601 }, { "epoch": 2.1561048132317415, "grad_norm": 0.9230395555496216, "learning_rate": 8.799095725938243e-05, "loss": 1.2362, "step": 11602 }, { "epoch": 2.1562906522951124, "grad_norm": 0.8871796727180481, "learning_rate": 8.79764537751777e-05, "loss": 0.9197, "step": 11603 }, { "epoch": 2.1564764913584837, "grad_norm": 0.6526246070861816, "learning_rate": 8.796195054759493e-05, "loss": 0.5867, "step": 11604 }, { "epoch": 2.1566623304218546, "grad_norm": 0.8340665102005005, "learning_rate": 8.79474475769437e-05, "loss": 1.0749, "step": 11605 }, { "epoch": 2.156848169485226, "grad_norm": 0.8240909576416016, "learning_rate": 8.793294486353351e-05, "loss": 0.8532, "step": 11606 }, { "epoch": 2.157034008548597, "grad_norm": 0.7156884074211121, "learning_rate": 8.791844240767391e-05, "loss": 0.8128, "step": 11607 }, { "epoch": 2.157219847611968, "grad_norm": 0.7586245536804199, "learning_rate": 8.790394020967441e-05, "loss": 0.8983, "step": 11608 }, { "epoch": 2.157405686675339, "grad_norm": 0.7270494103431702, "learning_rate": 8.788943826984457e-05, "loss": 0.8398, "step": 11609 }, { "epoch": 2.1575915257387104, "grad_norm": 0.9726952314376831, "learning_rate": 8.787493658849386e-05, "loss": 0.958, "step": 11610 }, { "epoch": 2.1577773648020813, "grad_norm": 0.7318455576896667, "learning_rate": 8.786043516593183e-05, "loss": 0.7498, "step": 11611 }, { "epoch": 2.1579632038654526, "grad_norm": 0.9342050552368164, "learning_rate": 8.784593400246799e-05, "loss": 0.9865, "step": 11612 }, { "epoch": 2.1581490429288235, "grad_norm": 0.735383152961731, "learning_rate": 8.78314330984118e-05, "loss": 0.795, "step": 11613 }, { "epoch": 2.158334881992195, "grad_norm": 0.8356888890266418, "learning_rate": 8.781693245407279e-05, "loss": 1.1769, "step": 11614 }, { "epoch": 2.1585207210555657, "grad_norm": 0.790277898311615, "learning_rate": 8.78024320697605e-05, "loss": 0.7551, "step": 11615 }, { "epoch": 2.158706560118937, "grad_norm": 0.8385562896728516, "learning_rate": 8.778793194578433e-05, "loss": 1.0738, "step": 11616 }, { "epoch": 2.1588923991823084, "grad_norm": 0.7916375398635864, "learning_rate": 8.777343208245382e-05, "loss": 0.9502, "step": 11617 }, { "epoch": 2.1590782382456792, "grad_norm": 0.7239101529121399, "learning_rate": 8.775893248007839e-05, "loss": 0.9905, "step": 11618 }, { "epoch": 2.1592640773090506, "grad_norm": 0.6555399894714355, "learning_rate": 8.774443313896757e-05, "loss": 0.7378, "step": 11619 }, { "epoch": 2.1594499163724215, "grad_norm": 0.8354867696762085, "learning_rate": 8.772993405943077e-05, "loss": 0.9485, "step": 11620 }, { "epoch": 2.159635755435793, "grad_norm": 0.7708397507667542, "learning_rate": 8.771543524177747e-05, "loss": 0.9601, "step": 11621 }, { "epoch": 2.1598215944991637, "grad_norm": 0.7791001200675964, "learning_rate": 8.770093668631714e-05, "loss": 0.7209, "step": 11622 }, { "epoch": 2.160007433562535, "grad_norm": 0.7207884788513184, "learning_rate": 8.76864383933592e-05, "loss": 0.8228, "step": 11623 }, { "epoch": 2.160193272625906, "grad_norm": 1.162050724029541, "learning_rate": 8.767194036321307e-05, "loss": 0.888, "step": 11624 }, { "epoch": 2.160379111689277, "grad_norm": 0.6758953332901001, "learning_rate": 8.765744259618826e-05, "loss": 0.8817, "step": 11625 }, { "epoch": 2.160564950752648, "grad_norm": 0.9050666689872742, "learning_rate": 8.764294509259414e-05, "loss": 0.8469, "step": 11626 }, { "epoch": 2.1607507898160194, "grad_norm": 0.8308858871459961, "learning_rate": 8.762844785274017e-05, "loss": 0.7436, "step": 11627 }, { "epoch": 2.1609366288793903, "grad_norm": 0.7234518527984619, "learning_rate": 8.761395087693573e-05, "loss": 0.9538, "step": 11628 }, { "epoch": 2.1611224679427616, "grad_norm": 0.6110588908195496, "learning_rate": 8.759945416549028e-05, "loss": 0.6267, "step": 11629 }, { "epoch": 2.1613083070061325, "grad_norm": 0.7216711640357971, "learning_rate": 8.758495771871318e-05, "loss": 0.7065, "step": 11630 }, { "epoch": 2.161494146069504, "grad_norm": 1.0335111618041992, "learning_rate": 8.757046153691383e-05, "loss": 0.8654, "step": 11631 }, { "epoch": 2.1616799851328747, "grad_norm": 0.7840709090232849, "learning_rate": 8.75559656204017e-05, "loss": 0.8205, "step": 11632 }, { "epoch": 2.161865824196246, "grad_norm": 0.8022106289863586, "learning_rate": 8.754146996948609e-05, "loss": 0.7341, "step": 11633 }, { "epoch": 2.162051663259617, "grad_norm": 0.7449596524238586, "learning_rate": 8.752697458447641e-05, "loss": 0.7446, "step": 11634 }, { "epoch": 2.1622375023229883, "grad_norm": 0.6985468864440918, "learning_rate": 8.75124794656821e-05, "loss": 0.9942, "step": 11635 }, { "epoch": 2.1624233413863596, "grad_norm": 0.740747332572937, "learning_rate": 8.749798461341246e-05, "loss": 0.8125, "step": 11636 }, { "epoch": 2.1626091804497305, "grad_norm": 0.6656926274299622, "learning_rate": 8.748349002797688e-05, "loss": 0.8239, "step": 11637 }, { "epoch": 2.162795019513102, "grad_norm": 0.7795909643173218, "learning_rate": 8.746899570968474e-05, "loss": 0.843, "step": 11638 }, { "epoch": 2.1629808585764727, "grad_norm": 0.8801382184028625, "learning_rate": 8.74545016588454e-05, "loss": 0.7779, "step": 11639 }, { "epoch": 2.163166697639844, "grad_norm": 0.7868742346763611, "learning_rate": 8.744000787576816e-05, "loss": 0.8968, "step": 11640 }, { "epoch": 2.163352536703215, "grad_norm": 0.8084865212440491, "learning_rate": 8.742551436076241e-05, "loss": 0.8565, "step": 11641 }, { "epoch": 2.1635383757665863, "grad_norm": 0.7272623777389526, "learning_rate": 8.741102111413748e-05, "loss": 0.8322, "step": 11642 }, { "epoch": 2.163724214829957, "grad_norm": 0.7027956247329712, "learning_rate": 8.739652813620269e-05, "loss": 0.8649, "step": 11643 }, { "epoch": 2.1639100538933285, "grad_norm": 0.879515528678894, "learning_rate": 8.73820354272674e-05, "loss": 1.1277, "step": 11644 }, { "epoch": 2.1640958929566994, "grad_norm": 0.8235723972320557, "learning_rate": 8.736754298764087e-05, "loss": 0.9575, "step": 11645 }, { "epoch": 2.1642817320200707, "grad_norm": 0.8570378422737122, "learning_rate": 8.735305081763248e-05, "loss": 1.0092, "step": 11646 }, { "epoch": 2.1644675710834416, "grad_norm": 0.8653255701065063, "learning_rate": 8.733855891755153e-05, "loss": 0.8659, "step": 11647 }, { "epoch": 2.164653410146813, "grad_norm": 0.6897451877593994, "learning_rate": 8.73240672877073e-05, "loss": 1.0281, "step": 11648 }, { "epoch": 2.164839249210184, "grad_norm": 0.8468342423439026, "learning_rate": 8.730957592840912e-05, "loss": 0.9191, "step": 11649 }, { "epoch": 2.165025088273555, "grad_norm": 0.8642510771751404, "learning_rate": 8.729508483996624e-05, "loss": 0.9842, "step": 11650 }, { "epoch": 2.1652109273369264, "grad_norm": 0.8552669286727905, "learning_rate": 8.728059402268796e-05, "loss": 0.9799, "step": 11651 }, { "epoch": 2.1653967664002973, "grad_norm": 0.7763219475746155, "learning_rate": 8.726610347688361e-05, "loss": 1.0787, "step": 11652 }, { "epoch": 2.1655826054636687, "grad_norm": 1.0020298957824707, "learning_rate": 8.72516132028624e-05, "loss": 0.8941, "step": 11653 }, { "epoch": 2.1657684445270395, "grad_norm": 1.0689773559570312, "learning_rate": 8.723712320093367e-05, "loss": 0.9945, "step": 11654 }, { "epoch": 2.165954283590411, "grad_norm": 0.9250777363777161, "learning_rate": 8.722263347140658e-05, "loss": 0.8263, "step": 11655 }, { "epoch": 2.1661401226537818, "grad_norm": 0.8608018159866333, "learning_rate": 8.720814401459048e-05, "loss": 0.8399, "step": 11656 }, { "epoch": 2.166325961717153, "grad_norm": 0.9532923698425293, "learning_rate": 8.719365483079461e-05, "loss": 0.917, "step": 11657 }, { "epoch": 2.166511800780524, "grad_norm": 0.8235387802124023, "learning_rate": 8.717916592032818e-05, "loss": 0.8097, "step": 11658 }, { "epoch": 2.1666976398438953, "grad_norm": 0.7674156427383423, "learning_rate": 8.716467728350049e-05, "loss": 0.7582, "step": 11659 }, { "epoch": 2.166883478907266, "grad_norm": 0.7772092819213867, "learning_rate": 8.715018892062069e-05, "loss": 0.9003, "step": 11660 }, { "epoch": 2.1670693179706375, "grad_norm": 0.7887098789215088, "learning_rate": 8.713570083199806e-05, "loss": 0.9104, "step": 11661 }, { "epoch": 2.1672551570340084, "grad_norm": 0.8114263415336609, "learning_rate": 8.712121301794186e-05, "loss": 0.8308, "step": 11662 }, { "epoch": 2.1674409960973797, "grad_norm": 0.7965273261070251, "learning_rate": 8.710672547876124e-05, "loss": 0.7873, "step": 11663 }, { "epoch": 2.1676268351607506, "grad_norm": 0.800265908241272, "learning_rate": 8.709223821476546e-05, "loss": 0.9765, "step": 11664 }, { "epoch": 2.167812674224122, "grad_norm": 0.7654651403427124, "learning_rate": 8.707775122626367e-05, "loss": 0.9037, "step": 11665 }, { "epoch": 2.167998513287493, "grad_norm": 0.9573057293891907, "learning_rate": 8.706326451356511e-05, "loss": 0.8409, "step": 11666 }, { "epoch": 2.168184352350864, "grad_norm": 0.9364465475082397, "learning_rate": 8.704877807697897e-05, "loss": 0.9115, "step": 11667 }, { "epoch": 2.168370191414235, "grad_norm": 0.8293829560279846, "learning_rate": 8.703429191681445e-05, "loss": 0.8936, "step": 11668 }, { "epoch": 2.1685560304776064, "grad_norm": 0.8080455660820007, "learning_rate": 8.701980603338073e-05, "loss": 0.8978, "step": 11669 }, { "epoch": 2.1687418695409777, "grad_norm": 1.0206828117370605, "learning_rate": 8.700532042698696e-05, "loss": 0.9648, "step": 11670 }, { "epoch": 2.1689277086043486, "grad_norm": 0.8141977190971375, "learning_rate": 8.699083509794231e-05, "loss": 0.9385, "step": 11671 }, { "epoch": 2.16911354766772, "grad_norm": 0.8308578729629517, "learning_rate": 8.697635004655599e-05, "loss": 1.0552, "step": 11672 }, { "epoch": 2.169299386731091, "grad_norm": 0.8184531331062317, "learning_rate": 8.696186527313711e-05, "loss": 1.0682, "step": 11673 }, { "epoch": 2.169485225794462, "grad_norm": 0.7495091557502747, "learning_rate": 8.694738077799488e-05, "loss": 0.9417, "step": 11674 }, { "epoch": 2.169671064857833, "grad_norm": 0.9168591499328613, "learning_rate": 8.693289656143835e-05, "loss": 0.9295, "step": 11675 }, { "epoch": 2.1698569039212043, "grad_norm": 0.6868381500244141, "learning_rate": 8.691841262377671e-05, "loss": 0.6573, "step": 11676 }, { "epoch": 2.1700427429845752, "grad_norm": 0.7707226276397705, "learning_rate": 8.690392896531916e-05, "loss": 0.9796, "step": 11677 }, { "epoch": 2.1702285820479466, "grad_norm": 1.0843822956085205, "learning_rate": 8.688944558637474e-05, "loss": 0.939, "step": 11678 }, { "epoch": 2.1704144211113174, "grad_norm": 0.8778970241546631, "learning_rate": 8.687496248725263e-05, "loss": 1.0477, "step": 11679 }, { "epoch": 2.1706002601746888, "grad_norm": 0.8354715704917908, "learning_rate": 8.68604796682619e-05, "loss": 0.9234, "step": 11680 }, { "epoch": 2.1707860992380597, "grad_norm": 0.8660401105880737, "learning_rate": 8.684599712971168e-05, "loss": 0.8527, "step": 11681 }, { "epoch": 2.170971938301431, "grad_norm": 0.772363007068634, "learning_rate": 8.68315148719111e-05, "loss": 0.9042, "step": 11682 }, { "epoch": 2.171157777364802, "grad_norm": 0.7308756709098816, "learning_rate": 8.681703289516922e-05, "loss": 1.0071, "step": 11683 }, { "epoch": 2.171343616428173, "grad_norm": 0.7892624139785767, "learning_rate": 8.680255119979518e-05, "loss": 0.8131, "step": 11684 }, { "epoch": 2.1715294554915445, "grad_norm": 1.0934081077575684, "learning_rate": 8.678806978609801e-05, "loss": 1.3285, "step": 11685 }, { "epoch": 2.1717152945549154, "grad_norm": 0.7644238471984863, "learning_rate": 8.677358865438682e-05, "loss": 0.8815, "step": 11686 }, { "epoch": 2.1719011336182867, "grad_norm": 0.8351262807846069, "learning_rate": 8.675910780497068e-05, "loss": 0.9617, "step": 11687 }, { "epoch": 2.1720869726816576, "grad_norm": 0.8664665818214417, "learning_rate": 8.674462723815865e-05, "loss": 1.0228, "step": 11688 }, { "epoch": 2.172272811745029, "grad_norm": 0.82393479347229, "learning_rate": 8.673014695425983e-05, "loss": 1.0277, "step": 11689 }, { "epoch": 2.1724586508084, "grad_norm": 0.7539329528808594, "learning_rate": 8.671566695358324e-05, "loss": 0.8537, "step": 11690 }, { "epoch": 2.172644489871771, "grad_norm": 0.9268426299095154, "learning_rate": 8.670118723643792e-05, "loss": 0.8671, "step": 11691 }, { "epoch": 2.172830328935142, "grad_norm": 0.8412514328956604, "learning_rate": 8.668670780313299e-05, "loss": 0.9128, "step": 11692 }, { "epoch": 2.1730161679985134, "grad_norm": 0.7689191102981567, "learning_rate": 8.66722286539774e-05, "loss": 0.8982, "step": 11693 }, { "epoch": 2.1732020070618843, "grad_norm": 0.8509365320205688, "learning_rate": 8.665774978928023e-05, "loss": 0.7322, "step": 11694 }, { "epoch": 2.1733878461252556, "grad_norm": 0.6554549932479858, "learning_rate": 8.664327120935047e-05, "loss": 0.8867, "step": 11695 }, { "epoch": 2.1735736851886265, "grad_norm": 0.8374941349029541, "learning_rate": 8.662879291449722e-05, "loss": 0.7539, "step": 11696 }, { "epoch": 2.173759524251998, "grad_norm": 0.8129752278327942, "learning_rate": 8.661431490502936e-05, "loss": 0.8304, "step": 11697 }, { "epoch": 2.1739453633153687, "grad_norm": 0.7586342096328735, "learning_rate": 8.659983718125602e-05, "loss": 0.8621, "step": 11698 }, { "epoch": 2.17413120237874, "grad_norm": 0.7914800047874451, "learning_rate": 8.658535974348617e-05, "loss": 0.8289, "step": 11699 }, { "epoch": 2.1743170414421114, "grad_norm": 1.0254422426223755, "learning_rate": 8.657088259202876e-05, "loss": 1.0067, "step": 11700 }, { "epoch": 2.1745028805054822, "grad_norm": 0.8690272569656372, "learning_rate": 8.655640572719285e-05, "loss": 1.1076, "step": 11701 }, { "epoch": 2.1746887195688536, "grad_norm": 0.9043143391609192, "learning_rate": 8.654192914928739e-05, "loss": 0.8491, "step": 11702 }, { "epoch": 2.1748745586322245, "grad_norm": 0.7068991661071777, "learning_rate": 8.652745285862134e-05, "loss": 0.8558, "step": 11703 }, { "epoch": 2.175060397695596, "grad_norm": 1.0964959859848022, "learning_rate": 8.651297685550371e-05, "loss": 1.084, "step": 11704 }, { "epoch": 2.1752462367589667, "grad_norm": 0.906252920627594, "learning_rate": 8.649850114024342e-05, "loss": 0.8628, "step": 11705 }, { "epoch": 2.175432075822338, "grad_norm": 0.9718541502952576, "learning_rate": 8.648402571314949e-05, "loss": 0.903, "step": 11706 }, { "epoch": 2.175617914885709, "grad_norm": 0.7787262201309204, "learning_rate": 8.64695505745308e-05, "loss": 1.0466, "step": 11707 }, { "epoch": 2.17580375394908, "grad_norm": 0.8103688955307007, "learning_rate": 8.645507572469633e-05, "loss": 1.0094, "step": 11708 }, { "epoch": 2.175989593012451, "grad_norm": 0.668194591999054, "learning_rate": 8.644060116395505e-05, "loss": 1.0853, "step": 11709 }, { "epoch": 2.1761754320758224, "grad_norm": 0.8936452269554138, "learning_rate": 8.642612689261584e-05, "loss": 1.0407, "step": 11710 }, { "epoch": 2.1763612711391933, "grad_norm": 0.8345995545387268, "learning_rate": 8.641165291098767e-05, "loss": 0.9775, "step": 11711 }, { "epoch": 2.1765471102025646, "grad_norm": 0.6872609853744507, "learning_rate": 8.639717921937947e-05, "loss": 0.763, "step": 11712 }, { "epoch": 2.1767329492659355, "grad_norm": 0.7668678760528564, "learning_rate": 8.638270581810013e-05, "loss": 0.8762, "step": 11713 }, { "epoch": 2.176918788329307, "grad_norm": 0.6786547303199768, "learning_rate": 8.636823270745858e-05, "loss": 0.5868, "step": 11714 }, { "epoch": 2.1771046273926777, "grad_norm": 0.7277447581291199, "learning_rate": 8.635375988776368e-05, "loss": 0.7244, "step": 11715 }, { "epoch": 2.177290466456049, "grad_norm": 0.7102178931236267, "learning_rate": 8.633928735932437e-05, "loss": 1.1194, "step": 11716 }, { "epoch": 2.17747630551942, "grad_norm": 0.8310472965240479, "learning_rate": 8.632481512244951e-05, "loss": 1.1853, "step": 11717 }, { "epoch": 2.1776621445827913, "grad_norm": 0.7447524070739746, "learning_rate": 8.6310343177448e-05, "loss": 0.7727, "step": 11718 }, { "epoch": 2.1778479836461626, "grad_norm": 0.7981902360916138, "learning_rate": 8.629587152462876e-05, "loss": 0.9509, "step": 11719 }, { "epoch": 2.1780338227095335, "grad_norm": 1.008462905883789, "learning_rate": 8.628140016430059e-05, "loss": 0.9314, "step": 11720 }, { "epoch": 2.178219661772905, "grad_norm": 0.8005197048187256, "learning_rate": 8.626692909677241e-05, "loss": 0.7536, "step": 11721 }, { "epoch": 2.1784055008362757, "grad_norm": 0.9703549146652222, "learning_rate": 8.625245832235307e-05, "loss": 1.0033, "step": 11722 }, { "epoch": 2.178591339899647, "grad_norm": 0.788756251335144, "learning_rate": 8.62379878413514e-05, "loss": 0.9943, "step": 11723 }, { "epoch": 2.178777178963018, "grad_norm": 1.0320338010787964, "learning_rate": 8.622351765407628e-05, "loss": 0.9527, "step": 11724 }, { "epoch": 2.1789630180263893, "grad_norm": 0.9259473085403442, "learning_rate": 8.620904776083652e-05, "loss": 1.1026, "step": 11725 }, { "epoch": 2.17914885708976, "grad_norm": 0.7380974888801575, "learning_rate": 8.619457816194099e-05, "loss": 0.823, "step": 11726 }, { "epoch": 2.1793346961531315, "grad_norm": 0.9026634693145752, "learning_rate": 8.618010885769845e-05, "loss": 0.7946, "step": 11727 }, { "epoch": 2.1795205352165024, "grad_norm": 0.82573002576828, "learning_rate": 8.616563984841783e-05, "loss": 1.0932, "step": 11728 }, { "epoch": 2.1797063742798737, "grad_norm": 0.8773999214172363, "learning_rate": 8.615117113440783e-05, "loss": 0.9676, "step": 11729 }, { "epoch": 2.1798922133432446, "grad_norm": 0.9673879742622375, "learning_rate": 8.613670271597733e-05, "loss": 0.9729, "step": 11730 }, { "epoch": 2.180078052406616, "grad_norm": 0.774804413318634, "learning_rate": 8.612223459343513e-05, "loss": 1.0304, "step": 11731 }, { "epoch": 2.180263891469987, "grad_norm": 0.8201828002929688, "learning_rate": 8.610776676709004e-05, "loss": 0.987, "step": 11732 }, { "epoch": 2.180449730533358, "grad_norm": 0.7700895667076111, "learning_rate": 8.60932992372508e-05, "loss": 0.9923, "step": 11733 }, { "epoch": 2.1806355695967294, "grad_norm": 0.7555466890335083, "learning_rate": 8.607883200422624e-05, "loss": 0.8199, "step": 11734 }, { "epoch": 2.1808214086601003, "grad_norm": 0.8858521580696106, "learning_rate": 8.606436506832512e-05, "loss": 0.9766, "step": 11735 }, { "epoch": 2.1810072477234717, "grad_norm": 0.6815047264099121, "learning_rate": 8.604989842985622e-05, "loss": 0.8501, "step": 11736 }, { "epoch": 2.1811930867868425, "grad_norm": 0.7316385507583618, "learning_rate": 8.603543208912827e-05, "loss": 0.8795, "step": 11737 }, { "epoch": 2.181378925850214, "grad_norm": 0.8119329214096069, "learning_rate": 8.602096604645009e-05, "loss": 1.1118, "step": 11738 }, { "epoch": 2.1815647649135848, "grad_norm": 0.8801705837249756, "learning_rate": 8.600650030213036e-05, "loss": 0.891, "step": 11739 }, { "epoch": 2.181750603976956, "grad_norm": 0.9159716963768005, "learning_rate": 8.599203485647788e-05, "loss": 0.8022, "step": 11740 }, { "epoch": 2.181936443040327, "grad_norm": 0.810187578201294, "learning_rate": 8.597756970980138e-05, "loss": 0.8708, "step": 11741 }, { "epoch": 2.1821222821036983, "grad_norm": 0.743595540523529, "learning_rate": 8.59631048624096e-05, "loss": 0.8445, "step": 11742 }, { "epoch": 2.182308121167069, "grad_norm": 0.8353040218353271, "learning_rate": 8.594864031461127e-05, "loss": 1.0315, "step": 11743 }, { "epoch": 2.1824939602304405, "grad_norm": 0.7551995515823364, "learning_rate": 8.593417606671509e-05, "loss": 1.0222, "step": 11744 }, { "epoch": 2.1826797992938114, "grad_norm": 0.7523811459541321, "learning_rate": 8.591971211902978e-05, "loss": 1.0328, "step": 11745 }, { "epoch": 2.1828656383571827, "grad_norm": 0.8597369194030762, "learning_rate": 8.590524847186407e-05, "loss": 0.689, "step": 11746 }, { "epoch": 2.1830514774205536, "grad_norm": 0.9100225567817688, "learning_rate": 8.589078512552662e-05, "loss": 0.8001, "step": 11747 }, { "epoch": 2.183237316483925, "grad_norm": 0.8944543600082397, "learning_rate": 8.587632208032617e-05, "loss": 1.2127, "step": 11748 }, { "epoch": 2.1834231555472963, "grad_norm": 0.7308274507522583, "learning_rate": 8.586185933657135e-05, "loss": 0.947, "step": 11749 }, { "epoch": 2.183608994610667, "grad_norm": 0.7620413303375244, "learning_rate": 8.584739689457089e-05, "loss": 0.7689, "step": 11750 }, { "epoch": 2.1837948336740385, "grad_norm": 0.7631767392158508, "learning_rate": 8.583293475463345e-05, "loss": 1.1856, "step": 11751 }, { "epoch": 2.1839806727374094, "grad_norm": 0.8975901007652283, "learning_rate": 8.581847291706775e-05, "loss": 0.9497, "step": 11752 }, { "epoch": 2.1841665118007807, "grad_norm": 0.8111382722854614, "learning_rate": 8.580401138218238e-05, "loss": 0.7698, "step": 11753 }, { "epoch": 2.1843523508641516, "grad_norm": 0.9419172406196594, "learning_rate": 8.578955015028605e-05, "loss": 0.7565, "step": 11754 }, { "epoch": 2.184538189927523, "grad_norm": 0.8097640872001648, "learning_rate": 8.577508922168735e-05, "loss": 1.0693, "step": 11755 }, { "epoch": 2.184724028990894, "grad_norm": 0.6869693994522095, "learning_rate": 8.5760628596695e-05, "loss": 0.8488, "step": 11756 }, { "epoch": 2.184909868054265, "grad_norm": 0.7901400327682495, "learning_rate": 8.574616827561756e-05, "loss": 0.96, "step": 11757 }, { "epoch": 2.185095707117636, "grad_norm": 0.8940836191177368, "learning_rate": 8.573170825876373e-05, "loss": 0.9835, "step": 11758 }, { "epoch": 2.1852815461810073, "grad_norm": 0.8824992179870605, "learning_rate": 8.571724854644207e-05, "loss": 1.007, "step": 11759 }, { "epoch": 2.1854673852443782, "grad_norm": 1.7196069955825806, "learning_rate": 8.570278913896122e-05, "loss": 1.3019, "step": 11760 }, { "epoch": 2.1856532243077496, "grad_norm": 0.7538241744041443, "learning_rate": 8.568833003662982e-05, "loss": 0.8869, "step": 11761 }, { "epoch": 2.1858390633711204, "grad_norm": 0.8255589604377747, "learning_rate": 8.567387123975648e-05, "loss": 0.9831, "step": 11762 }, { "epoch": 2.1860249024344918, "grad_norm": 0.8417258262634277, "learning_rate": 8.565941274864975e-05, "loss": 0.9011, "step": 11763 }, { "epoch": 2.1862107414978627, "grad_norm": 0.7021176218986511, "learning_rate": 8.564495456361825e-05, "loss": 0.8793, "step": 11764 }, { "epoch": 2.186396580561234, "grad_norm": 0.9574293494224548, "learning_rate": 8.563049668497057e-05, "loss": 0.9614, "step": 11765 }, { "epoch": 2.186582419624605, "grad_norm": 0.6656575202941895, "learning_rate": 8.561603911301528e-05, "loss": 0.8067, "step": 11766 }, { "epoch": 2.186768258687976, "grad_norm": 0.7005726099014282, "learning_rate": 8.560158184806093e-05, "loss": 0.8465, "step": 11767 }, { "epoch": 2.1869540977513475, "grad_norm": 1.0161550045013428, "learning_rate": 8.558712489041613e-05, "loss": 0.7472, "step": 11768 }, { "epoch": 2.1871399368147184, "grad_norm": 0.953964352607727, "learning_rate": 8.557266824038939e-05, "loss": 1.036, "step": 11769 }, { "epoch": 2.1873257758780897, "grad_norm": 0.7425580620765686, "learning_rate": 8.555821189828932e-05, "loss": 0.8413, "step": 11770 }, { "epoch": 2.1875116149414606, "grad_norm": 0.7482516765594482, "learning_rate": 8.554375586442436e-05, "loss": 0.9099, "step": 11771 }, { "epoch": 2.187697454004832, "grad_norm": 0.6497737765312195, "learning_rate": 8.55293001391032e-05, "loss": 0.7929, "step": 11772 }, { "epoch": 2.187883293068203, "grad_norm": 0.8159965872764587, "learning_rate": 8.551484472263426e-05, "loss": 0.9298, "step": 11773 }, { "epoch": 2.188069132131574, "grad_norm": 1.116498351097107, "learning_rate": 8.550038961532613e-05, "loss": 1.0621, "step": 11774 }, { "epoch": 2.188254971194945, "grad_norm": 0.7577797174453735, "learning_rate": 8.548593481748728e-05, "loss": 0.9997, "step": 11775 }, { "epoch": 2.1884408102583164, "grad_norm": 0.7481876015663147, "learning_rate": 8.547148032942627e-05, "loss": 0.6183, "step": 11776 }, { "epoch": 2.1886266493216873, "grad_norm": 0.8192367553710938, "learning_rate": 8.545702615145155e-05, "loss": 0.7456, "step": 11777 }, { "epoch": 2.1888124883850586, "grad_norm": 0.7634150385856628, "learning_rate": 8.544257228387167e-05, "loss": 0.8861, "step": 11778 }, { "epoch": 2.1889983274484295, "grad_norm": 0.9598495364189148, "learning_rate": 8.54281187269951e-05, "loss": 0.8434, "step": 11779 }, { "epoch": 2.189184166511801, "grad_norm": 0.8291441798210144, "learning_rate": 8.541366548113033e-05, "loss": 0.7626, "step": 11780 }, { "epoch": 2.1893700055751717, "grad_norm": 0.9502770304679871, "learning_rate": 8.539921254658579e-05, "loss": 0.9791, "step": 11781 }, { "epoch": 2.189555844638543, "grad_norm": 0.715624213218689, "learning_rate": 8.538475992367006e-05, "loss": 0.8788, "step": 11782 }, { "epoch": 2.1897416837019144, "grad_norm": 0.7813842296600342, "learning_rate": 8.537030761269151e-05, "loss": 1.1406, "step": 11783 }, { "epoch": 2.1899275227652852, "grad_norm": 0.7807679772377014, "learning_rate": 8.53558556139587e-05, "loss": 1.0152, "step": 11784 }, { "epoch": 2.1901133618286566, "grad_norm": 0.8997833728790283, "learning_rate": 8.534140392777996e-05, "loss": 1.0855, "step": 11785 }, { "epoch": 2.1902992008920275, "grad_norm": 0.7192310690879822, "learning_rate": 8.532695255446383e-05, "loss": 0.806, "step": 11786 }, { "epoch": 2.190485039955399, "grad_norm": 0.674193799495697, "learning_rate": 8.531250149431871e-05, "loss": 0.7599, "step": 11787 }, { "epoch": 2.1906708790187697, "grad_norm": 0.7934219241142273, "learning_rate": 8.529805074765306e-05, "loss": 0.8466, "step": 11788 }, { "epoch": 2.190856718082141, "grad_norm": 0.7016252875328064, "learning_rate": 8.528360031477526e-05, "loss": 0.6994, "step": 11789 }, { "epoch": 2.191042557145512, "grad_norm": 0.9743391275405884, "learning_rate": 8.526915019599377e-05, "loss": 1.0064, "step": 11790 }, { "epoch": 2.191228396208883, "grad_norm": 0.7312706708908081, "learning_rate": 8.525470039161698e-05, "loss": 0.7806, "step": 11791 }, { "epoch": 2.191414235272254, "grad_norm": 0.7978386878967285, "learning_rate": 8.524025090195328e-05, "loss": 0.8825, "step": 11792 }, { "epoch": 2.1916000743356254, "grad_norm": 0.7949557900428772, "learning_rate": 8.522580172731112e-05, "loss": 1.1221, "step": 11793 }, { "epoch": 2.1917859133989963, "grad_norm": 0.7279219031333923, "learning_rate": 8.52113528679989e-05, "loss": 0.8345, "step": 11794 }, { "epoch": 2.1919717524623676, "grad_norm": 0.8030551075935364, "learning_rate": 8.519690432432494e-05, "loss": 0.9106, "step": 11795 }, { "epoch": 2.1921575915257385, "grad_norm": 0.8001941442489624, "learning_rate": 8.518245609659768e-05, "loss": 0.8612, "step": 11796 }, { "epoch": 2.19234343058911, "grad_norm": 2.0981667041778564, "learning_rate": 8.516800818512545e-05, "loss": 1.374, "step": 11797 }, { "epoch": 2.192529269652481, "grad_norm": 0.8190371990203857, "learning_rate": 8.515356059021664e-05, "loss": 1.0411, "step": 11798 }, { "epoch": 2.192715108715852, "grad_norm": 0.86910080909729, "learning_rate": 8.513911331217958e-05, "loss": 1.0112, "step": 11799 }, { "epoch": 2.1929009477792234, "grad_norm": 0.9143921136856079, "learning_rate": 8.512466635132268e-05, "loss": 0.9417, "step": 11800 }, { "epoch": 2.1930867868425943, "grad_norm": 0.6698209047317505, "learning_rate": 8.511021970795423e-05, "loss": 0.8751, "step": 11801 }, { "epoch": 2.1932726259059656, "grad_norm": 0.8188903331756592, "learning_rate": 8.509577338238255e-05, "loss": 0.8438, "step": 11802 }, { "epoch": 2.1934584649693365, "grad_norm": 0.7005814909934998, "learning_rate": 8.508132737491604e-05, "loss": 0.8891, "step": 11803 }, { "epoch": 2.193644304032708, "grad_norm": 0.7396237850189209, "learning_rate": 8.506688168586302e-05, "loss": 0.9306, "step": 11804 }, { "epoch": 2.1938301430960787, "grad_norm": 0.9246975183486938, "learning_rate": 8.505243631553176e-05, "loss": 1.0293, "step": 11805 }, { "epoch": 2.19401598215945, "grad_norm": 0.7621631622314453, "learning_rate": 8.503799126423062e-05, "loss": 0.8078, "step": 11806 }, { "epoch": 2.194201821222821, "grad_norm": 0.7225520014762878, "learning_rate": 8.502354653226785e-05, "loss": 0.8706, "step": 11807 }, { "epoch": 2.1943876602861923, "grad_norm": 0.7985264658927917, "learning_rate": 8.500910211995182e-05, "loss": 0.9305, "step": 11808 }, { "epoch": 2.194573499349563, "grad_norm": 0.877153217792511, "learning_rate": 8.499465802759073e-05, "loss": 1.0499, "step": 11809 }, { "epoch": 2.1947593384129345, "grad_norm": 0.7561744451522827, "learning_rate": 8.498021425549296e-05, "loss": 0.8551, "step": 11810 }, { "epoch": 2.1949451774763054, "grad_norm": 0.6741968393325806, "learning_rate": 8.49657708039667e-05, "loss": 0.798, "step": 11811 }, { "epoch": 2.1951310165396767, "grad_norm": 0.8386529088020325, "learning_rate": 8.495132767332027e-05, "loss": 0.9944, "step": 11812 }, { "epoch": 2.1953168556030476, "grad_norm": 0.8982092142105103, "learning_rate": 8.493688486386193e-05, "loss": 1.129, "step": 11813 }, { "epoch": 2.195502694666419, "grad_norm": 1.0913642644882202, "learning_rate": 8.492244237589995e-05, "loss": 1.0555, "step": 11814 }, { "epoch": 2.19568853372979, "grad_norm": 0.7127829194068909, "learning_rate": 8.490800020974255e-05, "loss": 1.0229, "step": 11815 }, { "epoch": 2.195874372793161, "grad_norm": 0.8113505840301514, "learning_rate": 8.4893558365698e-05, "loss": 0.9756, "step": 11816 }, { "epoch": 2.1960602118565324, "grad_norm": 0.8548054695129395, "learning_rate": 8.487911684407451e-05, "loss": 0.799, "step": 11817 }, { "epoch": 2.1962460509199033, "grad_norm": 0.7963377833366394, "learning_rate": 8.486467564518034e-05, "loss": 0.7255, "step": 11818 }, { "epoch": 2.1964318899832747, "grad_norm": 1.93429696559906, "learning_rate": 8.485023476932367e-05, "loss": 1.2792, "step": 11819 }, { "epoch": 2.1966177290466455, "grad_norm": 0.8027183413505554, "learning_rate": 8.483579421681276e-05, "loss": 0.7442, "step": 11820 }, { "epoch": 2.196803568110017, "grad_norm": 0.8489840030670166, "learning_rate": 8.482135398795579e-05, "loss": 0.8223, "step": 11821 }, { "epoch": 2.1969894071733878, "grad_norm": 0.9878379106521606, "learning_rate": 8.480691408306097e-05, "loss": 0.8635, "step": 11822 }, { "epoch": 2.197175246236759, "grad_norm": 0.7123323678970337, "learning_rate": 8.479247450243646e-05, "loss": 0.9169, "step": 11823 }, { "epoch": 2.19736108530013, "grad_norm": 0.7710812091827393, "learning_rate": 8.477803524639054e-05, "loss": 0.896, "step": 11824 }, { "epoch": 2.1975469243635013, "grad_norm": 0.7759889364242554, "learning_rate": 8.476359631523133e-05, "loss": 0.9463, "step": 11825 }, { "epoch": 2.197732763426872, "grad_norm": 0.7710733413696289, "learning_rate": 8.474915770926699e-05, "loss": 0.7537, "step": 11826 }, { "epoch": 2.1979186024902435, "grad_norm": 1.081716775894165, "learning_rate": 8.473471942880572e-05, "loss": 0.786, "step": 11827 }, { "epoch": 2.1981044415536144, "grad_norm": 0.698901891708374, "learning_rate": 8.472028147415567e-05, "loss": 0.7743, "step": 11828 }, { "epoch": 2.1982902806169857, "grad_norm": 0.8639124631881714, "learning_rate": 8.470584384562499e-05, "loss": 0.786, "step": 11829 }, { "epoch": 2.1984761196803566, "grad_norm": 0.8688544631004333, "learning_rate": 8.469140654352183e-05, "loss": 0.9876, "step": 11830 }, { "epoch": 2.198661958743728, "grad_norm": 0.8796912431716919, "learning_rate": 8.467696956815432e-05, "loss": 0.6324, "step": 11831 }, { "epoch": 2.1988477978070993, "grad_norm": 0.7779879570007324, "learning_rate": 8.466253291983059e-05, "loss": 1.011, "step": 11832 }, { "epoch": 2.19903363687047, "grad_norm": 0.8026193976402283, "learning_rate": 8.46480965988588e-05, "loss": 0.7093, "step": 11833 }, { "epoch": 2.1992194759338415, "grad_norm": 1.2052098512649536, "learning_rate": 8.463366060554698e-05, "loss": 1.4435, "step": 11834 }, { "epoch": 2.1994053149972124, "grad_norm": 0.7946503758430481, "learning_rate": 8.461922494020336e-05, "loss": 0.8539, "step": 11835 }, { "epoch": 2.1995911540605837, "grad_norm": 0.721773087978363, "learning_rate": 8.460478960313599e-05, "loss": 0.6973, "step": 11836 }, { "epoch": 2.1997769931239546, "grad_norm": 0.7838326096534729, "learning_rate": 8.459035459465294e-05, "loss": 0.8626, "step": 11837 }, { "epoch": 2.199962832187326, "grad_norm": 0.6588775515556335, "learning_rate": 8.457591991506236e-05, "loss": 0.6709, "step": 11838 }, { "epoch": 2.200148671250697, "grad_norm": 1.0023447275161743, "learning_rate": 8.456148556467228e-05, "loss": 0.8973, "step": 11839 }, { "epoch": 2.200334510314068, "grad_norm": 0.9219651222229004, "learning_rate": 8.454705154379082e-05, "loss": 0.9682, "step": 11840 }, { "epoch": 2.200520349377439, "grad_norm": 1.0128252506256104, "learning_rate": 8.4532617852726e-05, "loss": 0.8887, "step": 11841 }, { "epoch": 2.2007061884408103, "grad_norm": 0.8471603989601135, "learning_rate": 8.451818449178591e-05, "loss": 0.8781, "step": 11842 }, { "epoch": 2.2008920275041812, "grad_norm": 0.6617082357406616, "learning_rate": 8.450375146127862e-05, "loss": 0.6619, "step": 11843 }, { "epoch": 2.2010778665675526, "grad_norm": 0.9384303092956543, "learning_rate": 8.448931876151212e-05, "loss": 1.0131, "step": 11844 }, { "epoch": 2.2012637056309234, "grad_norm": 0.716192901134491, "learning_rate": 8.447488639279452e-05, "loss": 0.702, "step": 11845 }, { "epoch": 2.2014495446942948, "grad_norm": 0.7688436508178711, "learning_rate": 8.446045435543387e-05, "loss": 0.8177, "step": 11846 }, { "epoch": 2.2016353837576657, "grad_norm": 0.7544126510620117, "learning_rate": 8.44460226497381e-05, "loss": 0.9157, "step": 11847 }, { "epoch": 2.201821222821037, "grad_norm": 0.7286648154258728, "learning_rate": 8.443159127601532e-05, "loss": 0.6314, "step": 11848 }, { "epoch": 2.202007061884408, "grad_norm": 0.8427985906600952, "learning_rate": 8.44171602345735e-05, "loss": 0.6621, "step": 11849 }, { "epoch": 2.202192900947779, "grad_norm": 0.7273639440536499, "learning_rate": 8.440272952572064e-05, "loss": 0.9898, "step": 11850 }, { "epoch": 2.2023787400111505, "grad_norm": 0.7268412709236145, "learning_rate": 8.438829914976477e-05, "loss": 0.8822, "step": 11851 }, { "epoch": 2.2025645790745214, "grad_norm": 0.6997602581977844, "learning_rate": 8.437386910701384e-05, "loss": 0.6818, "step": 11852 }, { "epoch": 2.2027504181378927, "grad_norm": 0.9023566842079163, "learning_rate": 8.435943939777588e-05, "loss": 0.6727, "step": 11853 }, { "epoch": 2.2029362572012636, "grad_norm": 0.72446209192276, "learning_rate": 8.434501002235883e-05, "loss": 0.8095, "step": 11854 }, { "epoch": 2.203122096264635, "grad_norm": 0.7783926725387573, "learning_rate": 8.433058098107065e-05, "loss": 0.8789, "step": 11855 }, { "epoch": 2.203307935328006, "grad_norm": 0.8605186939239502, "learning_rate": 8.431615227421937e-05, "loss": 0.9955, "step": 11856 }, { "epoch": 2.203493774391377, "grad_norm": 0.8575723171234131, "learning_rate": 8.43017239021129e-05, "loss": 0.7633, "step": 11857 }, { "epoch": 2.203679613454748, "grad_norm": 0.8678473830223083, "learning_rate": 8.42872958650592e-05, "loss": 0.9923, "step": 11858 }, { "epoch": 2.2038654525181194, "grad_norm": 0.9308450818061829, "learning_rate": 8.42728681633662e-05, "loss": 1.1472, "step": 11859 }, { "epoch": 2.2040512915814903, "grad_norm": 0.7468540668487549, "learning_rate": 8.425844079734184e-05, "loss": 0.7736, "step": 11860 }, { "epoch": 2.2042371306448616, "grad_norm": 0.7660921216011047, "learning_rate": 8.424401376729404e-05, "loss": 0.7215, "step": 11861 }, { "epoch": 2.2044229697082325, "grad_norm": 0.8048068284988403, "learning_rate": 8.422958707353072e-05, "loss": 0.8567, "step": 11862 }, { "epoch": 2.204608808771604, "grad_norm": 0.927282452583313, "learning_rate": 8.421516071635984e-05, "loss": 1.0192, "step": 11863 }, { "epoch": 2.2047946478349747, "grad_norm": 0.8508273959159851, "learning_rate": 8.420073469608922e-05, "loss": 1.0097, "step": 11864 }, { "epoch": 2.204980486898346, "grad_norm": 0.8396299481391907, "learning_rate": 8.418630901302679e-05, "loss": 0.8578, "step": 11865 }, { "epoch": 2.2051663259617174, "grad_norm": 0.9400133490562439, "learning_rate": 8.417188366748052e-05, "loss": 0.9306, "step": 11866 }, { "epoch": 2.2053521650250882, "grad_norm": 0.7686029672622681, "learning_rate": 8.415745865975819e-05, "loss": 0.8702, "step": 11867 }, { "epoch": 2.2055380040884596, "grad_norm": 0.9369570016860962, "learning_rate": 8.414303399016775e-05, "loss": 0.656, "step": 11868 }, { "epoch": 2.2057238431518305, "grad_norm": 0.7464059591293335, "learning_rate": 8.4128609659017e-05, "loss": 0.9271, "step": 11869 }, { "epoch": 2.205909682215202, "grad_norm": 0.8918207883834839, "learning_rate": 8.411418566661388e-05, "loss": 0.9687, "step": 11870 }, { "epoch": 2.2060955212785727, "grad_norm": 0.7785696983337402, "learning_rate": 8.409976201326618e-05, "loss": 0.9655, "step": 11871 }, { "epoch": 2.206281360341944, "grad_norm": 0.6803451776504517, "learning_rate": 8.408533869928177e-05, "loss": 0.6949, "step": 11872 }, { "epoch": 2.206467199405315, "grad_norm": 0.7219688296318054, "learning_rate": 8.407091572496851e-05, "loss": 0.9155, "step": 11873 }, { "epoch": 2.206653038468686, "grad_norm": 0.7794553637504578, "learning_rate": 8.40564930906342e-05, "loss": 1.0356, "step": 11874 }, { "epoch": 2.206838877532057, "grad_norm": 1.1876341104507446, "learning_rate": 8.404207079658671e-05, "loss": 1.0767, "step": 11875 }, { "epoch": 2.2070247165954284, "grad_norm": 0.7047664523124695, "learning_rate": 8.40276488431338e-05, "loss": 0.8224, "step": 11876 }, { "epoch": 2.2072105556587993, "grad_norm": 1.0113282203674316, "learning_rate": 8.401322723058332e-05, "loss": 0.8664, "step": 11877 }, { "epoch": 2.2073963947221706, "grad_norm": 0.7891967296600342, "learning_rate": 8.399880595924312e-05, "loss": 0.7399, "step": 11878 }, { "epoch": 2.2075822337855415, "grad_norm": 0.7502462267875671, "learning_rate": 8.39843850294209e-05, "loss": 0.9752, "step": 11879 }, { "epoch": 2.207768072848913, "grad_norm": 0.700694739818573, "learning_rate": 8.396996444142452e-05, "loss": 0.6745, "step": 11880 }, { "epoch": 2.207953911912284, "grad_norm": 0.8352717757225037, "learning_rate": 8.395554419556173e-05, "loss": 0.9155, "step": 11881 }, { "epoch": 2.208139750975655, "grad_norm": 0.8278815746307373, "learning_rate": 8.39411242921403e-05, "loss": 0.9406, "step": 11882 }, { "epoch": 2.2083255900390264, "grad_norm": 0.7651520371437073, "learning_rate": 8.392670473146806e-05, "loss": 0.9727, "step": 11883 }, { "epoch": 2.2085114291023973, "grad_norm": 0.8754087686538696, "learning_rate": 8.391228551385268e-05, "loss": 0.9176, "step": 11884 }, { "epoch": 2.2086972681657686, "grad_norm": 0.8060709238052368, "learning_rate": 8.3897866639602e-05, "loss": 0.8943, "step": 11885 }, { "epoch": 2.2088831072291395, "grad_norm": 0.9044803977012634, "learning_rate": 8.388344810902367e-05, "loss": 0.8597, "step": 11886 }, { "epoch": 2.209068946292511, "grad_norm": 0.6913041472434998, "learning_rate": 8.386902992242549e-05, "loss": 0.9207, "step": 11887 }, { "epoch": 2.2092547853558817, "grad_norm": 1.0214216709136963, "learning_rate": 8.385461208011523e-05, "loss": 1.1811, "step": 11888 }, { "epoch": 2.209440624419253, "grad_norm": 0.8380892872810364, "learning_rate": 8.384019458240054e-05, "loss": 0.9377, "step": 11889 }, { "epoch": 2.209626463482624, "grad_norm": 0.898802638053894, "learning_rate": 8.382577742958917e-05, "loss": 0.7937, "step": 11890 }, { "epoch": 2.2098123025459953, "grad_norm": 0.8506227731704712, "learning_rate": 8.381136062198881e-05, "loss": 1.0291, "step": 11891 }, { "epoch": 2.209998141609366, "grad_norm": 0.7427432537078857, "learning_rate": 8.37969441599072e-05, "loss": 0.6678, "step": 11892 }, { "epoch": 2.2101839806727375, "grad_norm": 0.7741548418998718, "learning_rate": 8.378252804365202e-05, "loss": 0.9471, "step": 11893 }, { "epoch": 2.2103698197361084, "grad_norm": 0.7084181904792786, "learning_rate": 8.376811227353091e-05, "loss": 0.9541, "step": 11894 }, { "epoch": 2.2105556587994797, "grad_norm": 0.7543783187866211, "learning_rate": 8.375369684985164e-05, "loss": 0.8969, "step": 11895 }, { "epoch": 2.2107414978628506, "grad_norm": 0.9766399264335632, "learning_rate": 8.373928177292179e-05, "loss": 0.8219, "step": 11896 }, { "epoch": 2.210927336926222, "grad_norm": 1.0114517211914062, "learning_rate": 8.372486704304905e-05, "loss": 1.0136, "step": 11897 }, { "epoch": 2.211113175989593, "grad_norm": 0.6716600656509399, "learning_rate": 8.371045266054114e-05, "loss": 0.6813, "step": 11898 }, { "epoch": 2.211299015052964, "grad_norm": 0.7623451352119446, "learning_rate": 8.369603862570563e-05, "loss": 0.9137, "step": 11899 }, { "epoch": 2.2114848541163354, "grad_norm": 0.8574510812759399, "learning_rate": 8.368162493885022e-05, "loss": 0.7934, "step": 11900 }, { "epoch": 2.2116706931797063, "grad_norm": 0.8110435605049133, "learning_rate": 8.366721160028251e-05, "loss": 0.7885, "step": 11901 }, { "epoch": 2.2118565322430777, "grad_norm": 0.8771747946739197, "learning_rate": 8.365279861031014e-05, "loss": 0.9261, "step": 11902 }, { "epoch": 2.2120423713064485, "grad_norm": 0.9254234433174133, "learning_rate": 8.363838596924075e-05, "loss": 1.0125, "step": 11903 }, { "epoch": 2.21222821036982, "grad_norm": 0.7689984440803528, "learning_rate": 8.362397367738191e-05, "loss": 1.0572, "step": 11904 }, { "epoch": 2.2124140494331908, "grad_norm": 0.7987253665924072, "learning_rate": 8.360956173504127e-05, "loss": 0.9679, "step": 11905 }, { "epoch": 2.212599888496562, "grad_norm": 0.8854193687438965, "learning_rate": 8.359515014252638e-05, "loss": 0.9194, "step": 11906 }, { "epoch": 2.212785727559933, "grad_norm": 0.7638492584228516, "learning_rate": 8.358073890014486e-05, "loss": 0.9595, "step": 11907 }, { "epoch": 2.2129715666233043, "grad_norm": 0.866855263710022, "learning_rate": 8.356632800820432e-05, "loss": 0.9976, "step": 11908 }, { "epoch": 2.213157405686675, "grad_norm": 0.9702017307281494, "learning_rate": 8.355191746701227e-05, "loss": 0.9449, "step": 11909 }, { "epoch": 2.2133432447500465, "grad_norm": 0.7269165515899658, "learning_rate": 8.353750727687637e-05, "loss": 0.7667, "step": 11910 }, { "epoch": 2.2135290838134174, "grad_norm": 0.781199038028717, "learning_rate": 8.352309743810409e-05, "loss": 0.8012, "step": 11911 }, { "epoch": 2.2137149228767887, "grad_norm": 0.8521917462348938, "learning_rate": 8.350868795100301e-05, "loss": 0.9337, "step": 11912 }, { "epoch": 2.2139007619401596, "grad_norm": 0.9267933368682861, "learning_rate": 8.349427881588071e-05, "loss": 0.8166, "step": 11913 }, { "epoch": 2.214086601003531, "grad_norm": 0.865480363368988, "learning_rate": 8.347987003304469e-05, "loss": 1.0724, "step": 11914 }, { "epoch": 2.2142724400669023, "grad_norm": 0.6637668609619141, "learning_rate": 8.34654616028025e-05, "loss": 0.7964, "step": 11915 }, { "epoch": 2.214458279130273, "grad_norm": 0.838745653629303, "learning_rate": 8.345105352546166e-05, "loss": 0.9489, "step": 11916 }, { "epoch": 2.2146441181936445, "grad_norm": 0.9586402177810669, "learning_rate": 8.34366458013297e-05, "loss": 0.8424, "step": 11917 }, { "epoch": 2.2148299572570154, "grad_norm": 0.7502764463424683, "learning_rate": 8.342223843071406e-05, "loss": 1.0838, "step": 11918 }, { "epoch": 2.2150157963203867, "grad_norm": 0.6431674957275391, "learning_rate": 8.340783141392231e-05, "loss": 0.6327, "step": 11919 }, { "epoch": 2.2152016353837576, "grad_norm": 0.7192468047142029, "learning_rate": 8.339342475126195e-05, "loss": 0.8298, "step": 11920 }, { "epoch": 2.215387474447129, "grad_norm": 0.7980626821517944, "learning_rate": 8.337901844304042e-05, "loss": 1.006, "step": 11921 }, { "epoch": 2.2155733135105, "grad_norm": 0.7548335194587708, "learning_rate": 8.336461248956522e-05, "loss": 0.6331, "step": 11922 }, { "epoch": 2.215759152573871, "grad_norm": 1.0166386365890503, "learning_rate": 8.335020689114384e-05, "loss": 0.9778, "step": 11923 }, { "epoch": 2.215944991637242, "grad_norm": 0.7799232602119446, "learning_rate": 8.333580164808372e-05, "loss": 0.7998, "step": 11924 }, { "epoch": 2.2161308307006133, "grad_norm": 0.838235080242157, "learning_rate": 8.33213967606923e-05, "loss": 0.865, "step": 11925 }, { "epoch": 2.2163166697639842, "grad_norm": 0.9255481958389282, "learning_rate": 8.330699222927705e-05, "loss": 1.018, "step": 11926 }, { "epoch": 2.2165025088273556, "grad_norm": 0.8905596733093262, "learning_rate": 8.329258805414542e-05, "loss": 1.0686, "step": 11927 }, { "epoch": 2.2166883478907264, "grad_norm": 0.836381733417511, "learning_rate": 8.327818423560478e-05, "loss": 1.023, "step": 11928 }, { "epoch": 2.2168741869540978, "grad_norm": 1.0391117334365845, "learning_rate": 8.326378077396262e-05, "loss": 1.118, "step": 11929 }, { "epoch": 2.217060026017469, "grad_norm": 0.7728176712989807, "learning_rate": 8.324937766952638e-05, "loss": 0.9211, "step": 11930 }, { "epoch": 2.21724586508084, "grad_norm": 0.9985128045082092, "learning_rate": 8.323497492260338e-05, "loss": 1.2685, "step": 11931 }, { "epoch": 2.2174317041442113, "grad_norm": 0.7964622378349304, "learning_rate": 8.322057253350108e-05, "loss": 0.9126, "step": 11932 }, { "epoch": 2.217617543207582, "grad_norm": 0.8635339736938477, "learning_rate": 8.32061705025269e-05, "loss": 0.9854, "step": 11933 }, { "epoch": 2.2178033822709535, "grad_norm": 0.8185827136039734, "learning_rate": 8.319176882998816e-05, "loss": 1.0503, "step": 11934 }, { "epoch": 2.2179892213343244, "grad_norm": 0.8222116827964783, "learning_rate": 8.317736751619229e-05, "loss": 0.948, "step": 11935 }, { "epoch": 2.2181750603976957, "grad_norm": 0.7733019590377808, "learning_rate": 8.316296656144662e-05, "loss": 1.0669, "step": 11936 }, { "epoch": 2.2183608994610666, "grad_norm": 0.7140665650367737, "learning_rate": 8.314856596605856e-05, "loss": 0.8587, "step": 11937 }, { "epoch": 2.218546738524438, "grad_norm": 0.7716907858848572, "learning_rate": 8.313416573033541e-05, "loss": 0.9409, "step": 11938 }, { "epoch": 2.218732577587809, "grad_norm": 0.9706321358680725, "learning_rate": 8.311976585458453e-05, "loss": 0.8897, "step": 11939 }, { "epoch": 2.21891841665118, "grad_norm": 0.6067124009132385, "learning_rate": 8.310536633911334e-05, "loss": 0.6413, "step": 11940 }, { "epoch": 2.219104255714551, "grad_norm": 0.7392117977142334, "learning_rate": 8.309096718422907e-05, "loss": 0.6049, "step": 11941 }, { "epoch": 2.2192900947779224, "grad_norm": 0.7874842286109924, "learning_rate": 8.307656839023909e-05, "loss": 1.0683, "step": 11942 }, { "epoch": 2.2194759338412933, "grad_norm": 0.8187624216079712, "learning_rate": 8.306216995745075e-05, "loss": 0.8404, "step": 11943 }, { "epoch": 2.2196617729046646, "grad_norm": 1.0005868673324585, "learning_rate": 8.30477718861713e-05, "loss": 0.9145, "step": 11944 }, { "epoch": 2.2198476119680355, "grad_norm": 0.7779518961906433, "learning_rate": 8.303337417670811e-05, "loss": 0.8756, "step": 11945 }, { "epoch": 2.220033451031407, "grad_norm": 0.9289587140083313, "learning_rate": 8.301897682936838e-05, "loss": 0.9345, "step": 11946 }, { "epoch": 2.2202192900947777, "grad_norm": 0.8238828182220459, "learning_rate": 8.300457984445948e-05, "loss": 0.8322, "step": 11947 }, { "epoch": 2.220405129158149, "grad_norm": 0.911462664604187, "learning_rate": 8.299018322228864e-05, "loss": 0.9052, "step": 11948 }, { "epoch": 2.2205909682215204, "grad_norm": 0.8210474848747253, "learning_rate": 8.297578696316312e-05, "loss": 0.9744, "step": 11949 }, { "epoch": 2.2207768072848912, "grad_norm": 0.7710187435150146, "learning_rate": 8.296139106739026e-05, "loss": 1.0271, "step": 11950 }, { "epoch": 2.2209626463482626, "grad_norm": 0.8088672757148743, "learning_rate": 8.294699553527725e-05, "loss": 1.0682, "step": 11951 }, { "epoch": 2.2211484854116335, "grad_norm": 0.7803736925125122, "learning_rate": 8.293260036713135e-05, "loss": 0.8144, "step": 11952 }, { "epoch": 2.221334324475005, "grad_norm": 0.6293572187423706, "learning_rate": 8.291820556325984e-05, "loss": 0.8965, "step": 11953 }, { "epoch": 2.2215201635383757, "grad_norm": 0.6929018497467041, "learning_rate": 8.290381112396987e-05, "loss": 0.71, "step": 11954 }, { "epoch": 2.221706002601747, "grad_norm": 1.1523988246917725, "learning_rate": 8.288941704956876e-05, "loss": 0.9437, "step": 11955 }, { "epoch": 2.221891841665118, "grad_norm": 0.7967273592948914, "learning_rate": 8.287502334036365e-05, "loss": 1.0549, "step": 11956 }, { "epoch": 2.222077680728489, "grad_norm": 0.8078708052635193, "learning_rate": 8.286062999666182e-05, "loss": 0.8772, "step": 11957 }, { "epoch": 2.22226351979186, "grad_norm": 0.726443350315094, "learning_rate": 8.284623701877037e-05, "loss": 0.9252, "step": 11958 }, { "epoch": 2.2224493588552314, "grad_norm": 0.8753724694252014, "learning_rate": 8.28318444069966e-05, "loss": 0.8713, "step": 11959 }, { "epoch": 2.2226351979186023, "grad_norm": 0.5907177925109863, "learning_rate": 8.28174521616476e-05, "loss": 0.6124, "step": 11960 }, { "epoch": 2.2228210369819736, "grad_norm": 0.9451390504837036, "learning_rate": 8.280306028303063e-05, "loss": 1.117, "step": 11961 }, { "epoch": 2.2230068760453445, "grad_norm": 0.8585532903671265, "learning_rate": 8.278866877145282e-05, "loss": 1.0065, "step": 11962 }, { "epoch": 2.223192715108716, "grad_norm": 0.843278706073761, "learning_rate": 8.277427762722136e-05, "loss": 1.2527, "step": 11963 }, { "epoch": 2.223378554172087, "grad_norm": 0.831453800201416, "learning_rate": 8.275988685064335e-05, "loss": 0.9158, "step": 11964 }, { "epoch": 2.223564393235458, "grad_norm": 0.757862389087677, "learning_rate": 8.274549644202602e-05, "loss": 0.7903, "step": 11965 }, { "epoch": 2.2237502322988294, "grad_norm": 0.7283931970596313, "learning_rate": 8.273110640167641e-05, "loss": 0.9407, "step": 11966 }, { "epoch": 2.2239360713622003, "grad_norm": 0.6937702894210815, "learning_rate": 8.271671672990173e-05, "loss": 0.8566, "step": 11967 }, { "epoch": 2.2241219104255716, "grad_norm": 0.9456591606140137, "learning_rate": 8.270232742700906e-05, "loss": 1.024, "step": 11968 }, { "epoch": 2.2243077494889425, "grad_norm": 0.9929325580596924, "learning_rate": 8.268793849330553e-05, "loss": 0.7111, "step": 11969 }, { "epoch": 2.224493588552314, "grad_norm": 1.037474274635315, "learning_rate": 8.267354992909822e-05, "loss": 0.7245, "step": 11970 }, { "epoch": 2.2246794276156847, "grad_norm": 0.7578388452529907, "learning_rate": 8.265916173469426e-05, "loss": 0.9581, "step": 11971 }, { "epoch": 2.224865266679056, "grad_norm": 0.9178577661514282, "learning_rate": 8.264477391040075e-05, "loss": 0.9945, "step": 11972 }, { "epoch": 2.225051105742427, "grad_norm": 1.0197036266326904, "learning_rate": 8.263038645652477e-05, "loss": 0.8752, "step": 11973 }, { "epoch": 2.2252369448057983, "grad_norm": 0.8248927593231201, "learning_rate": 8.261599937337336e-05, "loss": 0.8712, "step": 11974 }, { "epoch": 2.225422783869169, "grad_norm": 0.7531763315200806, "learning_rate": 8.260161266125363e-05, "loss": 0.8023, "step": 11975 }, { "epoch": 2.2256086229325405, "grad_norm": 1.0819270610809326, "learning_rate": 8.25872263204726e-05, "loss": 1.0796, "step": 11976 }, { "epoch": 2.2257944619959114, "grad_norm": 2.0839169025421143, "learning_rate": 8.257284035133738e-05, "loss": 1.4436, "step": 11977 }, { "epoch": 2.2259803010592827, "grad_norm": 0.856492817401886, "learning_rate": 8.255845475415494e-05, "loss": 0.9693, "step": 11978 }, { "epoch": 2.226166140122654, "grad_norm": 0.6808337569236755, "learning_rate": 8.254406952923238e-05, "loss": 0.698, "step": 11979 }, { "epoch": 2.226351979186025, "grad_norm": 0.8147267699241638, "learning_rate": 8.252968467687667e-05, "loss": 0.9628, "step": 11980 }, { "epoch": 2.2265378182493962, "grad_norm": 0.9136236310005188, "learning_rate": 8.251530019739485e-05, "loss": 0.6957, "step": 11981 }, { "epoch": 2.226723657312767, "grad_norm": 0.802736222743988, "learning_rate": 8.250091609109396e-05, "loss": 0.9213, "step": 11982 }, { "epoch": 2.2269094963761384, "grad_norm": 1.0352932214736938, "learning_rate": 8.248653235828102e-05, "loss": 1.1422, "step": 11983 }, { "epoch": 2.2270953354395093, "grad_norm": 0.8228843808174133, "learning_rate": 8.247214899926294e-05, "loss": 0.8079, "step": 11984 }, { "epoch": 2.2272811745028807, "grad_norm": 0.7414042949676514, "learning_rate": 8.24577660143468e-05, "loss": 0.9815, "step": 11985 }, { "epoch": 2.2274670135662515, "grad_norm": 0.8866422772407532, "learning_rate": 8.244338340383953e-05, "loss": 0.9617, "step": 11986 }, { "epoch": 2.227652852629623, "grad_norm": 0.894111156463623, "learning_rate": 8.242900116804811e-05, "loss": 1.0187, "step": 11987 }, { "epoch": 2.2278386916929938, "grad_norm": 0.8763172626495361, "learning_rate": 8.24146193072795e-05, "loss": 0.7959, "step": 11988 }, { "epoch": 2.228024530756365, "grad_norm": 0.8017054796218872, "learning_rate": 8.240023782184068e-05, "loss": 0.8997, "step": 11989 }, { "epoch": 2.228210369819736, "grad_norm": 0.7158930897712708, "learning_rate": 8.238585671203858e-05, "loss": 0.6309, "step": 11990 }, { "epoch": 2.2283962088831073, "grad_norm": 0.7901464104652405, "learning_rate": 8.237147597818009e-05, "loss": 0.7189, "step": 11991 }, { "epoch": 2.228582047946478, "grad_norm": 0.9525620937347412, "learning_rate": 8.235709562057224e-05, "loss": 0.7027, "step": 11992 }, { "epoch": 2.2287678870098495, "grad_norm": 0.9726958274841309, "learning_rate": 8.234271563952192e-05, "loss": 0.9302, "step": 11993 }, { "epoch": 2.2289537260732204, "grad_norm": 0.7856528759002686, "learning_rate": 8.2328336035336e-05, "loss": 0.8738, "step": 11994 }, { "epoch": 2.2291395651365917, "grad_norm": 0.8742666840553284, "learning_rate": 8.231395680832145e-05, "loss": 0.8732, "step": 11995 }, { "epoch": 2.2293254041999626, "grad_norm": 0.7303487658500671, "learning_rate": 8.229957795878514e-05, "loss": 0.623, "step": 11996 }, { "epoch": 2.229511243263334, "grad_norm": 0.8090687990188599, "learning_rate": 8.228519948703396e-05, "loss": 0.8562, "step": 11997 }, { "epoch": 2.2296970823267053, "grad_norm": 0.7455593943595886, "learning_rate": 8.227082139337478e-05, "loss": 0.9119, "step": 11998 }, { "epoch": 2.229882921390076, "grad_norm": 0.7629585266113281, "learning_rate": 8.225644367811452e-05, "loss": 0.9862, "step": 11999 }, { "epoch": 2.2300687604534475, "grad_norm": 1.1065359115600586, "learning_rate": 8.224206634156e-05, "loss": 0.9766, "step": 12000 }, { "epoch": 2.2302545995168184, "grad_norm": 0.7593920826911926, "learning_rate": 8.222768938401812e-05, "loss": 0.9066, "step": 12001 }, { "epoch": 2.2304404385801897, "grad_norm": 0.9303902983665466, "learning_rate": 8.221331280579564e-05, "loss": 0.842, "step": 12002 }, { "epoch": 2.2306262776435606, "grad_norm": 0.811173141002655, "learning_rate": 8.219893660719954e-05, "loss": 0.7842, "step": 12003 }, { "epoch": 2.230812116706932, "grad_norm": 0.7823946475982666, "learning_rate": 8.218456078853658e-05, "loss": 0.7958, "step": 12004 }, { "epoch": 2.230997955770303, "grad_norm": 0.9735191464424133, "learning_rate": 8.21701853501136e-05, "loss": 0.8981, "step": 12005 }, { "epoch": 2.231183794833674, "grad_norm": 0.7602159380912781, "learning_rate": 8.215581029223742e-05, "loss": 0.8651, "step": 12006 }, { "epoch": 2.231369633897045, "grad_norm": 0.7160788774490356, "learning_rate": 8.214143561521485e-05, "loss": 0.9801, "step": 12007 }, { "epoch": 2.2315554729604163, "grad_norm": 0.8263550400733948, "learning_rate": 8.212706131935268e-05, "loss": 0.9681, "step": 12008 }, { "epoch": 2.2317413120237872, "grad_norm": 1.2644280195236206, "learning_rate": 8.211268740495773e-05, "loss": 1.2091, "step": 12009 }, { "epoch": 2.2319271510871586, "grad_norm": 0.983466386795044, "learning_rate": 8.209831387233676e-05, "loss": 0.8649, "step": 12010 }, { "epoch": 2.2321129901505294, "grad_norm": 0.8372595906257629, "learning_rate": 8.208394072179657e-05, "loss": 0.7551, "step": 12011 }, { "epoch": 2.2322988292139008, "grad_norm": 0.815680205821991, "learning_rate": 8.206956795364386e-05, "loss": 1.0987, "step": 12012 }, { "epoch": 2.232484668277272, "grad_norm": 0.9907569885253906, "learning_rate": 8.20551955681855e-05, "loss": 0.9979, "step": 12013 }, { "epoch": 2.232670507340643, "grad_norm": 0.8694087862968445, "learning_rate": 8.20408235657282e-05, "loss": 1.0583, "step": 12014 }, { "epoch": 2.2328563464040143, "grad_norm": 0.9345015287399292, "learning_rate": 8.20264519465787e-05, "loss": 0.9702, "step": 12015 }, { "epoch": 2.233042185467385, "grad_norm": 0.7867563962936401, "learning_rate": 8.201208071104374e-05, "loss": 0.7381, "step": 12016 }, { "epoch": 2.2332280245307565, "grad_norm": 0.9041784405708313, "learning_rate": 8.199770985943006e-05, "loss": 1.0081, "step": 12017 }, { "epoch": 2.2334138635941274, "grad_norm": 0.842856764793396, "learning_rate": 8.198333939204435e-05, "loss": 1.0738, "step": 12018 }, { "epoch": 2.2335997026574987, "grad_norm": 1.0508366823196411, "learning_rate": 8.196896930919336e-05, "loss": 0.8267, "step": 12019 }, { "epoch": 2.2337855417208696, "grad_norm": 0.8856883645057678, "learning_rate": 8.195459961118377e-05, "loss": 0.9387, "step": 12020 }, { "epoch": 2.233971380784241, "grad_norm": 0.874148428440094, "learning_rate": 8.19402302983223e-05, "loss": 1.0397, "step": 12021 }, { "epoch": 2.234157219847612, "grad_norm": 0.9051022529602051, "learning_rate": 8.192586137091559e-05, "loss": 0.8769, "step": 12022 }, { "epoch": 2.234343058910983, "grad_norm": 0.9347072839736938, "learning_rate": 8.191149282927034e-05, "loss": 0.9741, "step": 12023 }, { "epoch": 2.234528897974354, "grad_norm": 0.9115281701087952, "learning_rate": 8.189712467369325e-05, "loss": 1.0689, "step": 12024 }, { "epoch": 2.2347147370377254, "grad_norm": 0.8097609877586365, "learning_rate": 8.1882756904491e-05, "loss": 0.8385, "step": 12025 }, { "epoch": 2.2349005761010963, "grad_norm": 0.8436894416809082, "learning_rate": 8.186838952197018e-05, "loss": 0.9244, "step": 12026 }, { "epoch": 2.2350864151644676, "grad_norm": 0.788254976272583, "learning_rate": 8.18540225264375e-05, "loss": 0.9671, "step": 12027 }, { "epoch": 2.2352722542278385, "grad_norm": 0.885251522064209, "learning_rate": 8.183965591819954e-05, "loss": 0.8399, "step": 12028 }, { "epoch": 2.23545809329121, "grad_norm": 0.7178447842597961, "learning_rate": 8.182528969756299e-05, "loss": 0.8732, "step": 12029 }, { "epoch": 2.2356439323545807, "grad_norm": 0.8282201290130615, "learning_rate": 8.181092386483442e-05, "loss": 0.7439, "step": 12030 }, { "epoch": 2.235829771417952, "grad_norm": 0.6753260493278503, "learning_rate": 8.179655842032048e-05, "loss": 0.745, "step": 12031 }, { "epoch": 2.2360156104813234, "grad_norm": 0.7672291994094849, "learning_rate": 8.178219336432775e-05, "loss": 1.0459, "step": 12032 }, { "epoch": 2.2362014495446942, "grad_norm": 0.7237166166305542, "learning_rate": 8.176782869716282e-05, "loss": 0.8665, "step": 12033 }, { "epoch": 2.2363872886080656, "grad_norm": 0.8562415838241577, "learning_rate": 8.17534644191323e-05, "loss": 1.0771, "step": 12034 }, { "epoch": 2.2365731276714365, "grad_norm": 0.869492769241333, "learning_rate": 8.17391005305428e-05, "loss": 1.0328, "step": 12035 }, { "epoch": 2.236758966734808, "grad_norm": 0.7791122794151306, "learning_rate": 8.172473703170085e-05, "loss": 0.9982, "step": 12036 }, { "epoch": 2.2369448057981787, "grad_norm": 0.947382926940918, "learning_rate": 8.171037392291305e-05, "loss": 1.0431, "step": 12037 }, { "epoch": 2.23713064486155, "grad_norm": 0.7841407656669617, "learning_rate": 8.16960112044859e-05, "loss": 0.8743, "step": 12038 }, { "epoch": 2.237316483924921, "grad_norm": 0.8942570090293884, "learning_rate": 8.168164887672602e-05, "loss": 1.0514, "step": 12039 }, { "epoch": 2.237502322988292, "grad_norm": 0.8452247977256775, "learning_rate": 8.166728693993987e-05, "loss": 0.7703, "step": 12040 }, { "epoch": 2.237688162051663, "grad_norm": 0.9405089616775513, "learning_rate": 8.165292539443405e-05, "loss": 1.0186, "step": 12041 }, { "epoch": 2.2378740011150344, "grad_norm": 0.8000966310501099, "learning_rate": 8.163856424051502e-05, "loss": 0.9675, "step": 12042 }, { "epoch": 2.2380598401784053, "grad_norm": 0.7403755784034729, "learning_rate": 8.162420347848934e-05, "loss": 0.9268, "step": 12043 }, { "epoch": 2.2382456792417766, "grad_norm": 0.81078040599823, "learning_rate": 8.160984310866348e-05, "loss": 0.8997, "step": 12044 }, { "epoch": 2.2384315183051475, "grad_norm": 0.8180494904518127, "learning_rate": 8.1595483131344e-05, "loss": 0.9303, "step": 12045 }, { "epoch": 2.238617357368519, "grad_norm": 0.8312895894050598, "learning_rate": 8.158112354683732e-05, "loss": 0.9163, "step": 12046 }, { "epoch": 2.23880319643189, "grad_norm": 0.916143536567688, "learning_rate": 8.156676435544997e-05, "loss": 0.9956, "step": 12047 }, { "epoch": 2.238989035495261, "grad_norm": 0.7745480537414551, "learning_rate": 8.15524055574884e-05, "loss": 0.9158, "step": 12048 }, { "epoch": 2.2391748745586324, "grad_norm": 0.6603816747665405, "learning_rate": 8.153804715325908e-05, "loss": 0.6452, "step": 12049 }, { "epoch": 2.2393607136220033, "grad_norm": 0.8387047648429871, "learning_rate": 8.152368914306846e-05, "loss": 1.0962, "step": 12050 }, { "epoch": 2.2395465526853746, "grad_norm": 0.7379850745201111, "learning_rate": 8.150933152722297e-05, "loss": 0.8961, "step": 12051 }, { "epoch": 2.2397323917487455, "grad_norm": 0.8029947280883789, "learning_rate": 8.149497430602909e-05, "loss": 1.0276, "step": 12052 }, { "epoch": 2.239918230812117, "grad_norm": 0.9665631651878357, "learning_rate": 8.14806174797932e-05, "loss": 1.0227, "step": 12053 }, { "epoch": 2.2401040698754877, "grad_norm": 0.9259684085845947, "learning_rate": 8.146626104882173e-05, "loss": 0.943, "step": 12054 }, { "epoch": 2.240289908938859, "grad_norm": 0.8665300607681274, "learning_rate": 8.145190501342117e-05, "loss": 0.9626, "step": 12055 }, { "epoch": 2.24047574800223, "grad_norm": 0.7480776309967041, "learning_rate": 8.143754937389784e-05, "loss": 0.9297, "step": 12056 }, { "epoch": 2.2406615870656013, "grad_norm": 1.044670581817627, "learning_rate": 8.142319413055818e-05, "loss": 0.9313, "step": 12057 }, { "epoch": 2.240847426128972, "grad_norm": 0.81687331199646, "learning_rate": 8.140883928370855e-05, "loss": 0.8503, "step": 12058 }, { "epoch": 2.2410332651923435, "grad_norm": 0.8070717453956604, "learning_rate": 8.139448483365535e-05, "loss": 0.8644, "step": 12059 }, { "epoch": 2.2412191042557144, "grad_norm": 0.7749050855636597, "learning_rate": 8.138013078070493e-05, "loss": 0.8437, "step": 12060 }, { "epoch": 2.2414049433190857, "grad_norm": 0.8389307856559753, "learning_rate": 8.136577712516369e-05, "loss": 0.5763, "step": 12061 }, { "epoch": 2.241590782382457, "grad_norm": 0.7573163509368896, "learning_rate": 8.135142386733794e-05, "loss": 0.7237, "step": 12062 }, { "epoch": 2.241776621445828, "grad_norm": 0.5418093204498291, "learning_rate": 8.133707100753404e-05, "loss": 0.4378, "step": 12063 }, { "epoch": 2.2419624605091992, "grad_norm": 0.784127414226532, "learning_rate": 8.132271854605835e-05, "loss": 0.963, "step": 12064 }, { "epoch": 2.24214829957257, "grad_norm": 0.7751089930534363, "learning_rate": 8.130836648321713e-05, "loss": 1.0521, "step": 12065 }, { "epoch": 2.2423341386359414, "grad_norm": 0.6985954642295837, "learning_rate": 8.12940148193168e-05, "loss": 0.7062, "step": 12066 }, { "epoch": 2.2425199776993123, "grad_norm": 0.8636794090270996, "learning_rate": 8.127966355466363e-05, "loss": 0.975, "step": 12067 }, { "epoch": 2.2427058167626837, "grad_norm": 0.9296517968177795, "learning_rate": 8.126531268956389e-05, "loss": 0.992, "step": 12068 }, { "epoch": 2.2428916558260545, "grad_norm": 0.9428527355194092, "learning_rate": 8.125096222432392e-05, "loss": 0.8024, "step": 12069 }, { "epoch": 2.243077494889426, "grad_norm": 0.786514937877655, "learning_rate": 8.123661215924999e-05, "loss": 0.9604, "step": 12070 }, { "epoch": 2.2432633339527968, "grad_norm": 0.8809694051742554, "learning_rate": 8.122226249464836e-05, "loss": 1.2671, "step": 12071 }, { "epoch": 2.243449173016168, "grad_norm": 0.791553258895874, "learning_rate": 8.120791323082531e-05, "loss": 0.9265, "step": 12072 }, { "epoch": 2.243635012079539, "grad_norm": 0.769401490688324, "learning_rate": 8.11935643680871e-05, "loss": 0.9175, "step": 12073 }, { "epoch": 2.2438208511429103, "grad_norm": 0.9083631038665771, "learning_rate": 8.117921590674001e-05, "loss": 1.0466, "step": 12074 }, { "epoch": 2.244006690206281, "grad_norm": 0.8304582238197327, "learning_rate": 8.116486784709022e-05, "loss": 1.1085, "step": 12075 }, { "epoch": 2.2441925292696525, "grad_norm": 1.6944912672042847, "learning_rate": 8.115052018944402e-05, "loss": 1.3513, "step": 12076 }, { "epoch": 2.2443783683330234, "grad_norm": 0.9561991095542908, "learning_rate": 8.113617293410763e-05, "loss": 1.0121, "step": 12077 }, { "epoch": 2.2445642073963947, "grad_norm": 0.8028138875961304, "learning_rate": 8.112182608138725e-05, "loss": 0.9769, "step": 12078 }, { "epoch": 2.2447500464597656, "grad_norm": 1.1675058603286743, "learning_rate": 8.11074796315891e-05, "loss": 1.0301, "step": 12079 }, { "epoch": 2.244935885523137, "grad_norm": 0.7154922485351562, "learning_rate": 8.10931335850194e-05, "loss": 0.9621, "step": 12080 }, { "epoch": 2.2451217245865083, "grad_norm": 0.9804053902626038, "learning_rate": 8.10787879419843e-05, "loss": 0.8526, "step": 12081 }, { "epoch": 2.245307563649879, "grad_norm": 0.859544038772583, "learning_rate": 8.106444270278999e-05, "loss": 0.9144, "step": 12082 }, { "epoch": 2.2454934027132505, "grad_norm": 0.6977066397666931, "learning_rate": 8.105009786774264e-05, "loss": 0.5816, "step": 12083 }, { "epoch": 2.2456792417766214, "grad_norm": 1.0285512208938599, "learning_rate": 8.103575343714847e-05, "loss": 0.9562, "step": 12084 }, { "epoch": 2.2458650808399927, "grad_norm": 0.9995384216308594, "learning_rate": 8.102140941131358e-05, "loss": 0.9575, "step": 12085 }, { "epoch": 2.2460509199033636, "grad_norm": 1.0273016691207886, "learning_rate": 8.10070657905441e-05, "loss": 0.895, "step": 12086 }, { "epoch": 2.246236758966735, "grad_norm": 0.8160095810890198, "learning_rate": 8.099272257514627e-05, "loss": 1.02, "step": 12087 }, { "epoch": 2.246422598030106, "grad_norm": 0.8777551651000977, "learning_rate": 8.097837976542612e-05, "loss": 1.1007, "step": 12088 }, { "epoch": 2.246608437093477, "grad_norm": 0.7665696740150452, "learning_rate": 8.096403736168985e-05, "loss": 0.9315, "step": 12089 }, { "epoch": 2.246794276156848, "grad_norm": 0.8144046664237976, "learning_rate": 8.094969536424351e-05, "loss": 0.9009, "step": 12090 }, { "epoch": 2.2469801152202193, "grad_norm": 0.7302137613296509, "learning_rate": 8.093535377339325e-05, "loss": 0.8593, "step": 12091 }, { "epoch": 2.2471659542835902, "grad_norm": 0.6795136332511902, "learning_rate": 8.092101258944512e-05, "loss": 0.9363, "step": 12092 }, { "epoch": 2.2473517933469616, "grad_norm": 0.7593479752540588, "learning_rate": 8.090667181270524e-05, "loss": 0.89, "step": 12093 }, { "epoch": 2.2475376324103324, "grad_norm": 0.832933247089386, "learning_rate": 8.08923314434797e-05, "loss": 0.755, "step": 12094 }, { "epoch": 2.2477234714737038, "grad_norm": 0.8301267623901367, "learning_rate": 8.087799148207455e-05, "loss": 0.9751, "step": 12095 }, { "epoch": 2.247909310537075, "grad_norm": 1.8187836408615112, "learning_rate": 8.086365192879582e-05, "loss": 1.3092, "step": 12096 }, { "epoch": 2.248095149600446, "grad_norm": 0.767377495765686, "learning_rate": 8.084931278394964e-05, "loss": 0.9277, "step": 12097 }, { "epoch": 2.2482809886638173, "grad_norm": 0.9382076263427734, "learning_rate": 8.083497404784201e-05, "loss": 1.0645, "step": 12098 }, { "epoch": 2.248466827727188, "grad_norm": 0.824059247970581, "learning_rate": 8.082063572077899e-05, "loss": 0.8328, "step": 12099 }, { "epoch": 2.2486526667905595, "grad_norm": 1.642223596572876, "learning_rate": 8.080629780306655e-05, "loss": 1.2618, "step": 12100 }, { "epoch": 2.2488385058539304, "grad_norm": 0.7809373140335083, "learning_rate": 8.079196029501077e-05, "loss": 0.9178, "step": 12101 }, { "epoch": 2.2490243449173017, "grad_norm": 0.7867723703384399, "learning_rate": 8.077762319691763e-05, "loss": 1.1984, "step": 12102 }, { "epoch": 2.2492101839806726, "grad_norm": 0.8529563546180725, "learning_rate": 8.076328650909313e-05, "loss": 0.8242, "step": 12103 }, { "epoch": 2.249396023044044, "grad_norm": 0.5668296813964844, "learning_rate": 8.074895023184328e-05, "loss": 0.4841, "step": 12104 }, { "epoch": 2.249581862107415, "grad_norm": 0.8013913631439209, "learning_rate": 8.073461436547402e-05, "loss": 1.1196, "step": 12105 }, { "epoch": 2.249767701170786, "grad_norm": 0.8628795146942139, "learning_rate": 8.07202789102914e-05, "loss": 0.9863, "step": 12106 }, { "epoch": 2.249953540234157, "grad_norm": 0.9259904623031616, "learning_rate": 8.07059438666013e-05, "loss": 1.2337, "step": 12107 }, { "epoch": 2.2501393792975284, "grad_norm": 1.2544859647750854, "learning_rate": 8.069160923470974e-05, "loss": 0.7573, "step": 12108 }, { "epoch": 2.2503252183608993, "grad_norm": 0.6399009227752686, "learning_rate": 8.067727501492265e-05, "loss": 0.6587, "step": 12109 }, { "epoch": 2.2505110574242706, "grad_norm": 0.7670935988426208, "learning_rate": 8.066294120754595e-05, "loss": 0.89, "step": 12110 }, { "epoch": 2.250696896487642, "grad_norm": 0.695530891418457, "learning_rate": 8.064860781288561e-05, "loss": 0.7293, "step": 12111 }, { "epoch": 2.250882735551013, "grad_norm": 0.7325321435928345, "learning_rate": 8.063427483124751e-05, "loss": 0.8391, "step": 12112 }, { "epoch": 2.2510685746143837, "grad_norm": 0.9947556257247925, "learning_rate": 8.061994226293757e-05, "loss": 1.0669, "step": 12113 }, { "epoch": 2.251254413677755, "grad_norm": 0.8754369616508484, "learning_rate": 8.060561010826173e-05, "loss": 1.1143, "step": 12114 }, { "epoch": 2.251254413677755, "eval_loss": 1.0257036685943604, "eval_runtime": 23.5312, "eval_samples_per_second": 46.406, "eval_steps_per_second": 23.203, "step": 12114 }, { "epoch": 2.2514402527411264, "grad_norm": 0.8003126978874207, "learning_rate": 8.059127836752585e-05, "loss": 1.0135, "step": 12115 }, { "epoch": 2.2516260918044972, "grad_norm": 0.815754771232605, "learning_rate": 8.057694704103584e-05, "loss": 0.911, "step": 12116 }, { "epoch": 2.2518119308678686, "grad_norm": 0.7302402257919312, "learning_rate": 8.056261612909752e-05, "loss": 0.9386, "step": 12117 }, { "epoch": 2.2519977699312395, "grad_norm": 0.810268223285675, "learning_rate": 8.054828563201682e-05, "loss": 0.6335, "step": 12118 }, { "epoch": 2.252183608994611, "grad_norm": 1.025160551071167, "learning_rate": 8.053395555009961e-05, "loss": 0.9549, "step": 12119 }, { "epoch": 2.2523694480579817, "grad_norm": 1.0502477884292603, "learning_rate": 8.051962588365169e-05, "loss": 0.9445, "step": 12120 }, { "epoch": 2.252555287121353, "grad_norm": 0.6322656273841858, "learning_rate": 8.050529663297895e-05, "loss": 0.6752, "step": 12121 }, { "epoch": 2.252741126184724, "grad_norm": 0.8192923069000244, "learning_rate": 8.049096779838719e-05, "loss": 0.9635, "step": 12122 }, { "epoch": 2.252926965248095, "grad_norm": 0.7621162533760071, "learning_rate": 8.047663938018223e-05, "loss": 0.8157, "step": 12123 }, { "epoch": 2.253112804311466, "grad_norm": 0.818742573261261, "learning_rate": 8.046231137866992e-05, "loss": 0.9787, "step": 12124 }, { "epoch": 2.2532986433748374, "grad_norm": 0.7273044586181641, "learning_rate": 8.044798379415603e-05, "loss": 0.737, "step": 12125 }, { "epoch": 2.2534844824382088, "grad_norm": 0.7540285587310791, "learning_rate": 8.043365662694639e-05, "loss": 0.7866, "step": 12126 }, { "epoch": 2.2536703215015796, "grad_norm": 0.7194929122924805, "learning_rate": 8.041932987734674e-05, "loss": 0.8063, "step": 12127 }, { "epoch": 2.2538561605649505, "grad_norm": 0.7326306104660034, "learning_rate": 8.040500354566288e-05, "loss": 0.8546, "step": 12128 }, { "epoch": 2.254041999628322, "grad_norm": 0.7915225625038147, "learning_rate": 8.039067763220064e-05, "loss": 0.979, "step": 12129 }, { "epoch": 2.254227838691693, "grad_norm": 0.671522319316864, "learning_rate": 8.037635213726571e-05, "loss": 0.6696, "step": 12130 }, { "epoch": 2.254413677755064, "grad_norm": 0.8632599711418152, "learning_rate": 8.03620270611639e-05, "loss": 1.0085, "step": 12131 }, { "epoch": 2.2545995168184354, "grad_norm": 0.9222419857978821, "learning_rate": 8.034770240420089e-05, "loss": 0.7619, "step": 12132 }, { "epoch": 2.2547853558818063, "grad_norm": 0.804999828338623, "learning_rate": 8.033337816668244e-05, "loss": 1.0261, "step": 12133 }, { "epoch": 2.2549711949451776, "grad_norm": 0.7770805358886719, "learning_rate": 8.031905434891431e-05, "loss": 0.7145, "step": 12134 }, { "epoch": 2.2551570340085485, "grad_norm": 0.9589446187019348, "learning_rate": 8.030473095120218e-05, "loss": 1.1274, "step": 12135 }, { "epoch": 2.25534287307192, "grad_norm": 0.7501167058944702, "learning_rate": 8.029040797385178e-05, "loss": 0.7728, "step": 12136 }, { "epoch": 2.2555287121352907, "grad_norm": 0.8118953704833984, "learning_rate": 8.027608541716878e-05, "loss": 1.0878, "step": 12137 }, { "epoch": 2.255714551198662, "grad_norm": 0.7665377259254456, "learning_rate": 8.026176328145887e-05, "loss": 0.9035, "step": 12138 }, { "epoch": 2.255900390262033, "grad_norm": 0.7800799012184143, "learning_rate": 8.02474415670278e-05, "loss": 1.0653, "step": 12139 }, { "epoch": 2.2560862293254043, "grad_norm": 3.0759878158569336, "learning_rate": 8.023312027418116e-05, "loss": 0.9193, "step": 12140 }, { "epoch": 2.256272068388775, "grad_norm": 0.7537910342216492, "learning_rate": 8.021879940322467e-05, "loss": 1.0038, "step": 12141 }, { "epoch": 2.2564579074521465, "grad_norm": 0.6949830651283264, "learning_rate": 8.020447895446395e-05, "loss": 0.9341, "step": 12142 }, { "epoch": 2.2566437465155174, "grad_norm": 0.7067804336547852, "learning_rate": 8.019015892820464e-05, "loss": 0.8213, "step": 12143 }, { "epoch": 2.2568295855788887, "grad_norm": 1.0574980974197388, "learning_rate": 8.017583932475242e-05, "loss": 1.038, "step": 12144 }, { "epoch": 2.25701542464226, "grad_norm": 0.8189867734909058, "learning_rate": 8.016152014441287e-05, "loss": 0.9441, "step": 12145 }, { "epoch": 2.257201263705631, "grad_norm": 0.790252685546875, "learning_rate": 8.014720138749166e-05, "loss": 1.026, "step": 12146 }, { "epoch": 2.2573871027690022, "grad_norm": 1.8028156757354736, "learning_rate": 8.013288305429433e-05, "loss": 1.297, "step": 12147 }, { "epoch": 2.257572941832373, "grad_norm": 0.8649793863296509, "learning_rate": 8.011856514512655e-05, "loss": 1.0208, "step": 12148 }, { "epoch": 2.2577587808957444, "grad_norm": 0.9388594627380371, "learning_rate": 8.010424766029383e-05, "loss": 0.8088, "step": 12149 }, { "epoch": 2.2579446199591153, "grad_norm": 0.8206169009208679, "learning_rate": 8.008993060010183e-05, "loss": 1.0357, "step": 12150 }, { "epoch": 2.2581304590224867, "grad_norm": 0.7279551029205322, "learning_rate": 8.00756139648561e-05, "loss": 0.7968, "step": 12151 }, { "epoch": 2.2583162980858575, "grad_norm": 0.8391129970550537, "learning_rate": 8.00612977548622e-05, "loss": 0.7751, "step": 12152 }, { "epoch": 2.258502137149229, "grad_norm": 0.907163143157959, "learning_rate": 8.004698197042567e-05, "loss": 0.8934, "step": 12153 }, { "epoch": 2.2586879762125998, "grad_norm": 0.7368512153625488, "learning_rate": 8.003266661185209e-05, "loss": 0.8552, "step": 12154 }, { "epoch": 2.258873815275971, "grad_norm": 0.7704903483390808, "learning_rate": 8.001835167944695e-05, "loss": 0.8249, "step": 12155 }, { "epoch": 2.259059654339342, "grad_norm": 0.6785067915916443, "learning_rate": 8.000403717351583e-05, "loss": 0.9551, "step": 12156 }, { "epoch": 2.2592454934027133, "grad_norm": 0.799774706363678, "learning_rate": 7.99897230943642e-05, "loss": 0.776, "step": 12157 }, { "epoch": 2.259431332466084, "grad_norm": 0.7252311110496521, "learning_rate": 7.99754094422976e-05, "loss": 0.8019, "step": 12158 }, { "epoch": 2.2596171715294555, "grad_norm": 0.7100863456726074, "learning_rate": 7.99610962176215e-05, "loss": 0.8433, "step": 12159 }, { "epoch": 2.259803010592827, "grad_norm": 0.7952927350997925, "learning_rate": 7.994678342064144e-05, "loss": 1.2275, "step": 12160 }, { "epoch": 2.2599888496561977, "grad_norm": 0.7643435001373291, "learning_rate": 7.993247105166288e-05, "loss": 0.9501, "step": 12161 }, { "epoch": 2.2601746887195686, "grad_norm": 0.7532644271850586, "learning_rate": 7.991815911099126e-05, "loss": 0.8428, "step": 12162 }, { "epoch": 2.26036052778294, "grad_norm": 0.8832160234451294, "learning_rate": 7.990384759893209e-05, "loss": 1.0004, "step": 12163 }, { "epoch": 2.2605463668463113, "grad_norm": 0.7995880842208862, "learning_rate": 7.988953651579081e-05, "loss": 0.8839, "step": 12164 }, { "epoch": 2.260732205909682, "grad_norm": 0.9771887063980103, "learning_rate": 7.987522586187285e-05, "loss": 1.157, "step": 12165 }, { "epoch": 2.2609180449730535, "grad_norm": 0.743340790271759, "learning_rate": 7.986091563748369e-05, "loss": 0.7109, "step": 12166 }, { "epoch": 2.2611038840364244, "grad_norm": 0.7299709320068359, "learning_rate": 7.98466058429287e-05, "loss": 0.7499, "step": 12167 }, { "epoch": 2.2612897230997957, "grad_norm": 0.9508938789367676, "learning_rate": 7.983229647851333e-05, "loss": 1.0407, "step": 12168 }, { "epoch": 2.2614755621631666, "grad_norm": 0.8278660178184509, "learning_rate": 7.981798754454298e-05, "loss": 0.9318, "step": 12169 }, { "epoch": 2.261661401226538, "grad_norm": 1.05748450756073, "learning_rate": 7.980367904132302e-05, "loss": 1.0355, "step": 12170 }, { "epoch": 2.261847240289909, "grad_norm": 0.7254590392112732, "learning_rate": 7.978937096915893e-05, "loss": 0.8755, "step": 12171 }, { "epoch": 2.26203307935328, "grad_norm": 0.8196592926979065, "learning_rate": 7.977506332835599e-05, "loss": 1.0426, "step": 12172 }, { "epoch": 2.262218918416651, "grad_norm": 1.0252938270568848, "learning_rate": 7.976075611921963e-05, "loss": 1.1224, "step": 12173 }, { "epoch": 2.2624047574800223, "grad_norm": 0.7816571593284607, "learning_rate": 7.974644934205521e-05, "loss": 0.7767, "step": 12174 }, { "epoch": 2.2625905965433932, "grad_norm": 0.7756083607673645, "learning_rate": 7.973214299716806e-05, "loss": 0.9505, "step": 12175 }, { "epoch": 2.2627764356067646, "grad_norm": 0.6845076084136963, "learning_rate": 7.971783708486355e-05, "loss": 0.8793, "step": 12176 }, { "epoch": 2.2629622746701354, "grad_norm": 0.8913975358009338, "learning_rate": 7.970353160544699e-05, "loss": 0.9497, "step": 12177 }, { "epoch": 2.2631481137335068, "grad_norm": 0.718268871307373, "learning_rate": 7.968922655922374e-05, "loss": 1.0769, "step": 12178 }, { "epoch": 2.263333952796878, "grad_norm": 0.7820630073547363, "learning_rate": 7.967492194649907e-05, "loss": 0.8911, "step": 12179 }, { "epoch": 2.263519791860249, "grad_norm": 0.7482863664627075, "learning_rate": 7.966061776757828e-05, "loss": 1.0707, "step": 12180 }, { "epoch": 2.2637056309236203, "grad_norm": 0.9600237607955933, "learning_rate": 7.964631402276675e-05, "loss": 1.035, "step": 12181 }, { "epoch": 2.263891469986991, "grad_norm": 0.7653817534446716, "learning_rate": 7.963201071236971e-05, "loss": 0.8424, "step": 12182 }, { "epoch": 2.2640773090503625, "grad_norm": 1.5581787824630737, "learning_rate": 7.961770783669244e-05, "loss": 1.3041, "step": 12183 }, { "epoch": 2.2642631481137334, "grad_norm": 0.693136990070343, "learning_rate": 7.960340539604023e-05, "loss": 0.9574, "step": 12184 }, { "epoch": 2.2644489871771047, "grad_norm": 0.7554642558097839, "learning_rate": 7.958910339071831e-05, "loss": 0.7301, "step": 12185 }, { "epoch": 2.2646348262404756, "grad_norm": 0.9046366810798645, "learning_rate": 7.957480182103198e-05, "loss": 0.9854, "step": 12186 }, { "epoch": 2.264820665303847, "grad_norm": 0.791377067565918, "learning_rate": 7.956050068728643e-05, "loss": 0.7111, "step": 12187 }, { "epoch": 2.265006504367218, "grad_norm": 0.8362942337989807, "learning_rate": 7.954619998978694e-05, "loss": 1.0304, "step": 12188 }, { "epoch": 2.265192343430589, "grad_norm": 0.7706637978553772, "learning_rate": 7.95318997288387e-05, "loss": 0.8028, "step": 12189 }, { "epoch": 2.26537818249396, "grad_norm": 0.8431205749511719, "learning_rate": 7.951759990474694e-05, "loss": 0.9803, "step": 12190 }, { "epoch": 2.2655640215573314, "grad_norm": 0.7928107976913452, "learning_rate": 7.950330051781682e-05, "loss": 1.0411, "step": 12191 }, { "epoch": 2.2657498606207023, "grad_norm": 0.7658857107162476, "learning_rate": 7.948900156835361e-05, "loss": 0.6491, "step": 12192 }, { "epoch": 2.2659356996840736, "grad_norm": 0.9177638292312622, "learning_rate": 7.947470305666245e-05, "loss": 0.8517, "step": 12193 }, { "epoch": 2.266121538747445, "grad_norm": 0.6885226368904114, "learning_rate": 7.946040498304856e-05, "loss": 0.7414, "step": 12194 }, { "epoch": 2.266307377810816, "grad_norm": 0.8419578075408936, "learning_rate": 7.944610734781706e-05, "loss": 0.816, "step": 12195 }, { "epoch": 2.266493216874187, "grad_norm": 0.8276805281639099, "learning_rate": 7.943181015127314e-05, "loss": 0.9183, "step": 12196 }, { "epoch": 2.266679055937558, "grad_norm": 0.7674410343170166, "learning_rate": 7.941751339372192e-05, "loss": 1.0516, "step": 12197 }, { "epoch": 2.2668648950009294, "grad_norm": 0.9558389186859131, "learning_rate": 7.940321707546857e-05, "loss": 1.1491, "step": 12198 }, { "epoch": 2.2670507340643002, "grad_norm": 0.8432926535606384, "learning_rate": 7.93889211968182e-05, "loss": 0.9333, "step": 12199 }, { "epoch": 2.2672365731276716, "grad_norm": 0.7996000051498413, "learning_rate": 7.937462575807593e-05, "loss": 0.8055, "step": 12200 }, { "epoch": 2.2674224121910425, "grad_norm": 0.7567355036735535, "learning_rate": 7.936033075954687e-05, "loss": 1.0259, "step": 12201 }, { "epoch": 2.267608251254414, "grad_norm": 1.0439573526382446, "learning_rate": 7.934603620153614e-05, "loss": 1.1349, "step": 12202 }, { "epoch": 2.2677940903177847, "grad_norm": 1.0130488872528076, "learning_rate": 7.933174208434882e-05, "loss": 1.0292, "step": 12203 }, { "epoch": 2.267979929381156, "grad_norm": 0.7859103083610535, "learning_rate": 7.931744840829e-05, "loss": 0.9531, "step": 12204 }, { "epoch": 2.268165768444527, "grad_norm": 0.7681362628936768, "learning_rate": 7.930315517366475e-05, "loss": 0.8845, "step": 12205 }, { "epoch": 2.268351607507898, "grad_norm": 0.7114256024360657, "learning_rate": 7.928886238077816e-05, "loss": 0.9216, "step": 12206 }, { "epoch": 2.268537446571269, "grad_norm": 0.935510516166687, "learning_rate": 7.927457002993524e-05, "loss": 0.881, "step": 12207 }, { "epoch": 2.2687232856346404, "grad_norm": 0.7839011549949646, "learning_rate": 7.926027812144106e-05, "loss": 0.9851, "step": 12208 }, { "epoch": 2.2689091246980118, "grad_norm": 0.8583346605300903, "learning_rate": 7.924598665560065e-05, "loss": 1.053, "step": 12209 }, { "epoch": 2.2690949637613826, "grad_norm": 0.811122477054596, "learning_rate": 7.923169563271904e-05, "loss": 0.8436, "step": 12210 }, { "epoch": 2.2692808028247535, "grad_norm": 0.7919806838035583, "learning_rate": 7.921740505310125e-05, "loss": 0.7359, "step": 12211 }, { "epoch": 2.269466641888125, "grad_norm": 0.686648428440094, "learning_rate": 7.920311491705225e-05, "loss": 0.7526, "step": 12212 }, { "epoch": 2.269652480951496, "grad_norm": 0.8840171098709106, "learning_rate": 7.91888252248771e-05, "loss": 1.2036, "step": 12213 }, { "epoch": 2.269838320014867, "grad_norm": 0.8836767673492432, "learning_rate": 7.917453597688078e-05, "loss": 0.876, "step": 12214 }, { "epoch": 2.2700241590782384, "grad_norm": 0.8597715497016907, "learning_rate": 7.916024717336823e-05, "loss": 0.6955, "step": 12215 }, { "epoch": 2.2702099981416093, "grad_norm": 1.0432536602020264, "learning_rate": 7.914595881464448e-05, "loss": 0.9834, "step": 12216 }, { "epoch": 2.2703958372049806, "grad_norm": 0.930818498134613, "learning_rate": 7.913167090101442e-05, "loss": 0.9366, "step": 12217 }, { "epoch": 2.2705816762683515, "grad_norm": 0.8075294494628906, "learning_rate": 7.911738343278304e-05, "loss": 1.0056, "step": 12218 }, { "epoch": 2.270767515331723, "grad_norm": 0.7768161296844482, "learning_rate": 7.910309641025526e-05, "loss": 0.9891, "step": 12219 }, { "epoch": 2.2709533543950937, "grad_norm": 0.797738254070282, "learning_rate": 7.908880983373604e-05, "loss": 1.0823, "step": 12220 }, { "epoch": 2.271139193458465, "grad_norm": 0.7448236346244812, "learning_rate": 7.907452370353028e-05, "loss": 0.7997, "step": 12221 }, { "epoch": 2.271325032521836, "grad_norm": 0.9213894605636597, "learning_rate": 7.906023801994289e-05, "loss": 0.8196, "step": 12222 }, { "epoch": 2.2715108715852073, "grad_norm": 0.8545947074890137, "learning_rate": 7.904595278327879e-05, "loss": 0.9532, "step": 12223 }, { "epoch": 2.271696710648578, "grad_norm": 0.9589551687240601, "learning_rate": 7.90316679938429e-05, "loss": 0.9656, "step": 12224 }, { "epoch": 2.2718825497119495, "grad_norm": 0.8454769849777222, "learning_rate": 7.901738365194004e-05, "loss": 0.7572, "step": 12225 }, { "epoch": 2.2720683887753204, "grad_norm": 0.8186737895011902, "learning_rate": 7.900309975787515e-05, "loss": 1.0801, "step": 12226 }, { "epoch": 2.2722542278386917, "grad_norm": 0.7751138210296631, "learning_rate": 7.898881631195303e-05, "loss": 0.8403, "step": 12227 }, { "epoch": 2.272440066902063, "grad_norm": 0.7126848101615906, "learning_rate": 7.897453331447862e-05, "loss": 0.8724, "step": 12228 }, { "epoch": 2.272625905965434, "grad_norm": 0.8206993341445923, "learning_rate": 7.896025076575667e-05, "loss": 0.7512, "step": 12229 }, { "epoch": 2.2728117450288052, "grad_norm": 0.7583099007606506, "learning_rate": 7.894596866609208e-05, "loss": 0.8338, "step": 12230 }, { "epoch": 2.272997584092176, "grad_norm": 0.8556515574455261, "learning_rate": 7.893168701578966e-05, "loss": 1.1067, "step": 12231 }, { "epoch": 2.2731834231555474, "grad_norm": 0.9765307307243347, "learning_rate": 7.891740581515422e-05, "loss": 0.9025, "step": 12232 }, { "epoch": 2.2733692622189183, "grad_norm": 0.7528972029685974, "learning_rate": 7.890312506449053e-05, "loss": 1.1078, "step": 12233 }, { "epoch": 2.2735551012822897, "grad_norm": 0.8102426528930664, "learning_rate": 7.888884476410348e-05, "loss": 0.8538, "step": 12234 }, { "epoch": 2.2737409403456605, "grad_norm": 1.0095503330230713, "learning_rate": 7.88745649142978e-05, "loss": 0.9943, "step": 12235 }, { "epoch": 2.273926779409032, "grad_norm": 0.7305726408958435, "learning_rate": 7.88602855153783e-05, "loss": 0.8046, "step": 12236 }, { "epoch": 2.2741126184724028, "grad_norm": 0.6932523846626282, "learning_rate": 7.884600656764971e-05, "loss": 0.9641, "step": 12237 }, { "epoch": 2.274298457535774, "grad_norm": 0.92442786693573, "learning_rate": 7.883172807141683e-05, "loss": 1.1832, "step": 12238 }, { "epoch": 2.274484296599145, "grad_norm": 1.3693289756774902, "learning_rate": 7.881745002698435e-05, "loss": 1.3648, "step": 12239 }, { "epoch": 2.2746701356625163, "grad_norm": 0.7918336391448975, "learning_rate": 7.880317243465709e-05, "loss": 0.9941, "step": 12240 }, { "epoch": 2.274855974725887, "grad_norm": 0.8530664443969727, "learning_rate": 7.878889529473969e-05, "loss": 0.7045, "step": 12241 }, { "epoch": 2.2750418137892585, "grad_norm": 0.8111917972564697, "learning_rate": 7.877461860753697e-05, "loss": 0.9872, "step": 12242 }, { "epoch": 2.27522765285263, "grad_norm": 0.7255557775497437, "learning_rate": 7.876034237335352e-05, "loss": 0.9302, "step": 12243 }, { "epoch": 2.2754134919160007, "grad_norm": 1.0161799192428589, "learning_rate": 7.874606659249417e-05, "loss": 0.9581, "step": 12244 }, { "epoch": 2.2755993309793716, "grad_norm": 0.7238139510154724, "learning_rate": 7.873179126526355e-05, "loss": 0.9064, "step": 12245 }, { "epoch": 2.275785170042743, "grad_norm": 0.7794341444969177, "learning_rate": 7.871751639196635e-05, "loss": 0.6845, "step": 12246 }, { "epoch": 2.2759710091061143, "grad_norm": 0.8446567058563232, "learning_rate": 7.870324197290723e-05, "loss": 1.1763, "step": 12247 }, { "epoch": 2.276156848169485, "grad_norm": 0.875368595123291, "learning_rate": 7.868896800839087e-05, "loss": 0.84, "step": 12248 }, { "epoch": 2.2763426872328565, "grad_norm": 0.9106482863426208, "learning_rate": 7.867469449872192e-05, "loss": 1.1002, "step": 12249 }, { "epoch": 2.2765285262962274, "grad_norm": 0.9384245276451111, "learning_rate": 7.866042144420502e-05, "loss": 1.0101, "step": 12250 }, { "epoch": 2.2767143653595987, "grad_norm": 0.748377799987793, "learning_rate": 7.864614884514479e-05, "loss": 0.9531, "step": 12251 }, { "epoch": 2.2769002044229696, "grad_norm": 0.7939218878746033, "learning_rate": 7.86318767018459e-05, "loss": 0.8427, "step": 12252 }, { "epoch": 2.277086043486341, "grad_norm": 0.7586506605148315, "learning_rate": 7.861760501461291e-05, "loss": 0.8141, "step": 12253 }, { "epoch": 2.277271882549712, "grad_norm": 0.9688478112220764, "learning_rate": 7.860333378375041e-05, "loss": 0.7653, "step": 12254 }, { "epoch": 2.277457721613083, "grad_norm": 0.7218331098556519, "learning_rate": 7.858906300956305e-05, "loss": 0.9217, "step": 12255 }, { "epoch": 2.277643560676454, "grad_norm": 0.9971310496330261, "learning_rate": 7.857479269235544e-05, "loss": 1.1914, "step": 12256 }, { "epoch": 2.2778293997398253, "grad_norm": 0.9666123986244202, "learning_rate": 7.856052283243209e-05, "loss": 0.898, "step": 12257 }, { "epoch": 2.2780152388031967, "grad_norm": 0.6891591548919678, "learning_rate": 7.854625343009759e-05, "loss": 0.8644, "step": 12258 }, { "epoch": 2.2782010778665676, "grad_norm": 0.6942464113235474, "learning_rate": 7.85319844856565e-05, "loss": 0.8322, "step": 12259 }, { "epoch": 2.2783869169299384, "grad_norm": 0.715089738368988, "learning_rate": 7.851771599941336e-05, "loss": 0.733, "step": 12260 }, { "epoch": 2.2785727559933098, "grad_norm": 0.7570328116416931, "learning_rate": 7.85034479716727e-05, "loss": 1.1289, "step": 12261 }, { "epoch": 2.278758595056681, "grad_norm": 1.1029595136642456, "learning_rate": 7.848918040273907e-05, "loss": 1.0443, "step": 12262 }, { "epoch": 2.278944434120052, "grad_norm": 0.7635982036590576, "learning_rate": 7.847491329291694e-05, "loss": 1.0012, "step": 12263 }, { "epoch": 2.2791302731834233, "grad_norm": 0.8406252264976501, "learning_rate": 7.846064664251082e-05, "loss": 1.0415, "step": 12264 }, { "epoch": 2.279316112246794, "grad_norm": 1.0755679607391357, "learning_rate": 7.844638045182528e-05, "loss": 1.1597, "step": 12265 }, { "epoch": 2.2795019513101655, "grad_norm": 0.808347225189209, "learning_rate": 7.843211472116476e-05, "loss": 0.906, "step": 12266 }, { "epoch": 2.2796877903735364, "grad_norm": 0.9315565228462219, "learning_rate": 7.841784945083373e-05, "loss": 0.7202, "step": 12267 }, { "epoch": 2.2798736294369077, "grad_norm": 0.6871508955955505, "learning_rate": 7.840358464113669e-05, "loss": 0.8551, "step": 12268 }, { "epoch": 2.2800594685002786, "grad_norm": 0.7517482042312622, "learning_rate": 7.838932029237805e-05, "loss": 1.0232, "step": 12269 }, { "epoch": 2.28024530756365, "grad_norm": 0.8469917178153992, "learning_rate": 7.837505640486232e-05, "loss": 0.9563, "step": 12270 }, { "epoch": 2.280431146627021, "grad_norm": 0.7821633815765381, "learning_rate": 7.836079297889385e-05, "loss": 0.8611, "step": 12271 }, { "epoch": 2.280616985690392, "grad_norm": 0.9212172031402588, "learning_rate": 7.834653001477717e-05, "loss": 0.9973, "step": 12272 }, { "epoch": 2.280802824753763, "grad_norm": 0.7685173153877258, "learning_rate": 7.833226751281661e-05, "loss": 1.1228, "step": 12273 }, { "epoch": 2.2809886638171344, "grad_norm": 0.8296868205070496, "learning_rate": 7.831800547331662e-05, "loss": 1.0455, "step": 12274 }, { "epoch": 2.2811745028805053, "grad_norm": 0.7512198686599731, "learning_rate": 7.830374389658157e-05, "loss": 0.8252, "step": 12275 }, { "epoch": 2.2813603419438766, "grad_norm": 0.7423361539840698, "learning_rate": 7.828948278291592e-05, "loss": 0.8986, "step": 12276 }, { "epoch": 2.281546181007248, "grad_norm": Infinity, "learning_rate": 7.828948278291592e-05, "loss": 1.8963, "step": 12277 }, { "epoch": 2.281732020070619, "grad_norm": 0.7230324149131775, "learning_rate": 7.827522213262399e-05, "loss": 0.8992, "step": 12278 }, { "epoch": 2.28191785913399, "grad_norm": 0.8100959062576294, "learning_rate": 7.826096194601017e-05, "loss": 0.7857, "step": 12279 }, { "epoch": 2.282103698197361, "grad_norm": 0.8222119212150574, "learning_rate": 7.82467022233788e-05, "loss": 0.9166, "step": 12280 }, { "epoch": 2.2822895372607324, "grad_norm": 0.7102981805801392, "learning_rate": 7.823244296503425e-05, "loss": 0.9988, "step": 12281 }, { "epoch": 2.2824753763241032, "grad_norm": 0.8669292330741882, "learning_rate": 7.821818417128084e-05, "loss": 0.9622, "step": 12282 }, { "epoch": 2.2826612153874746, "grad_norm": 0.7663910984992981, "learning_rate": 7.820392584242293e-05, "loss": 0.9651, "step": 12283 }, { "epoch": 2.2828470544508455, "grad_norm": 1.1469777822494507, "learning_rate": 7.81896679787648e-05, "loss": 1.0575, "step": 12284 }, { "epoch": 2.283032893514217, "grad_norm": 0.7489597201347351, "learning_rate": 7.817541058061077e-05, "loss": 0.857, "step": 12285 }, { "epoch": 2.2832187325775877, "grad_norm": 0.8339454531669617, "learning_rate": 7.816115364826513e-05, "loss": 0.8734, "step": 12286 }, { "epoch": 2.283404571640959, "grad_norm": 0.6416560411453247, "learning_rate": 7.814689718203223e-05, "loss": 0.6614, "step": 12287 }, { "epoch": 2.28359041070433, "grad_norm": 0.7036505937576294, "learning_rate": 7.813264118221629e-05, "loss": 0.8057, "step": 12288 }, { "epoch": 2.283776249767701, "grad_norm": 0.7501203417778015, "learning_rate": 7.811838564912163e-05, "loss": 1.0344, "step": 12289 }, { "epoch": 2.283962088831072, "grad_norm": 0.7263859510421753, "learning_rate": 7.810413058305244e-05, "loss": 1.0007, "step": 12290 }, { "epoch": 2.2841479278944434, "grad_norm": 0.6111660003662109, "learning_rate": 7.808987598431303e-05, "loss": 0.4128, "step": 12291 }, { "epoch": 2.2843337669578148, "grad_norm": 0.762751042842865, "learning_rate": 7.807562185320761e-05, "loss": 1.1027, "step": 12292 }, { "epoch": 2.2845196060211856, "grad_norm": 0.8497124910354614, "learning_rate": 7.806136819004042e-05, "loss": 0.9507, "step": 12293 }, { "epoch": 2.2847054450845565, "grad_norm": 0.8142184019088745, "learning_rate": 7.804711499511568e-05, "loss": 0.7366, "step": 12294 }, { "epoch": 2.284891284147928, "grad_norm": 0.953407347202301, "learning_rate": 7.803286226873759e-05, "loss": 0.8777, "step": 12295 }, { "epoch": 2.285077123211299, "grad_norm": 0.8734500408172607, "learning_rate": 7.801861001121037e-05, "loss": 0.9468, "step": 12296 }, { "epoch": 2.28526296227467, "grad_norm": 0.7192512154579163, "learning_rate": 7.800435822283816e-05, "loss": 1.0027, "step": 12297 }, { "epoch": 2.2854488013380414, "grad_norm": 0.8476136922836304, "learning_rate": 7.79901069039252e-05, "loss": 1.0746, "step": 12298 }, { "epoch": 2.2856346404014123, "grad_norm": 0.7300472259521484, "learning_rate": 7.797585605477566e-05, "loss": 0.9394, "step": 12299 }, { "epoch": 2.2858204794647836, "grad_norm": 0.8123537302017212, "learning_rate": 7.796160567569365e-05, "loss": 0.749, "step": 12300 }, { "epoch": 2.2860063185281545, "grad_norm": 0.8489190340042114, "learning_rate": 7.794735576698338e-05, "loss": 0.9906, "step": 12301 }, { "epoch": 2.286192157591526, "grad_norm": 0.8736398816108704, "learning_rate": 7.793310632894893e-05, "loss": 1.219, "step": 12302 }, { "epoch": 2.2863779966548967, "grad_norm": 0.8092479109764099, "learning_rate": 7.791885736189447e-05, "loss": 1.19, "step": 12303 }, { "epoch": 2.286563835718268, "grad_norm": 0.9571067690849304, "learning_rate": 7.79046088661241e-05, "loss": 1.0245, "step": 12304 }, { "epoch": 2.286749674781639, "grad_norm": 0.7240368127822876, "learning_rate": 7.789036084194193e-05, "loss": 0.9194, "step": 12305 }, { "epoch": 2.2869355138450103, "grad_norm": 0.7796837091445923, "learning_rate": 7.787611328965207e-05, "loss": 0.9709, "step": 12306 }, { "epoch": 2.2871213529083816, "grad_norm": 0.7277730703353882, "learning_rate": 7.78618662095586e-05, "loss": 0.8694, "step": 12307 }, { "epoch": 2.2873071919717525, "grad_norm": 1.0038048028945923, "learning_rate": 7.78476196019656e-05, "loss": 1.2055, "step": 12308 }, { "epoch": 2.2874930310351234, "grad_norm": 0.7962849736213684, "learning_rate": 7.783337346717717e-05, "loss": 0.8344, "step": 12309 }, { "epoch": 2.2876788700984947, "grad_norm": 0.7871695756912231, "learning_rate": 7.781912780549733e-05, "loss": 0.9471, "step": 12310 }, { "epoch": 2.287864709161866, "grad_norm": 0.8273323774337769, "learning_rate": 7.780488261723016e-05, "loss": 0.8913, "step": 12311 }, { "epoch": 2.288050548225237, "grad_norm": 1.6526739597320557, "learning_rate": 7.779063790267965e-05, "loss": 1.4423, "step": 12312 }, { "epoch": 2.2882363872886082, "grad_norm": 0.6899594068527222, "learning_rate": 7.777639366214991e-05, "loss": 0.8627, "step": 12313 }, { "epoch": 2.288422226351979, "grad_norm": 0.8037352561950684, "learning_rate": 7.776214989594486e-05, "loss": 0.7692, "step": 12314 }, { "epoch": 2.2886080654153504, "grad_norm": 0.7869132161140442, "learning_rate": 7.774790660436858e-05, "loss": 0.9398, "step": 12315 }, { "epoch": 2.2887939044787213, "grad_norm": 0.7735188603401184, "learning_rate": 7.773366378772506e-05, "loss": 0.7763, "step": 12316 }, { "epoch": 2.2889797435420927, "grad_norm": 0.6020045876502991, "learning_rate": 7.771942144631824e-05, "loss": 0.619, "step": 12317 }, { "epoch": 2.2891655826054635, "grad_norm": 0.6839711666107178, "learning_rate": 7.770517958045213e-05, "loss": 0.8986, "step": 12318 }, { "epoch": 2.289351421668835, "grad_norm": 0.9535199999809265, "learning_rate": 7.769093819043074e-05, "loss": 0.8784, "step": 12319 }, { "epoch": 2.2895372607322058, "grad_norm": 0.8104950189590454, "learning_rate": 7.767669727655798e-05, "loss": 0.8394, "step": 12320 }, { "epoch": 2.289723099795577, "grad_norm": 0.7972855567932129, "learning_rate": 7.766245683913781e-05, "loss": 0.7895, "step": 12321 }, { "epoch": 2.289908938858948, "grad_norm": 0.8431624174118042, "learning_rate": 7.764821687847415e-05, "loss": 0.8213, "step": 12322 }, { "epoch": 2.2900947779223193, "grad_norm": 0.7957172393798828, "learning_rate": 7.763397739487098e-05, "loss": 0.8179, "step": 12323 }, { "epoch": 2.29028061698569, "grad_norm": 0.707563579082489, "learning_rate": 7.761973838863214e-05, "loss": 0.6829, "step": 12324 }, { "epoch": 2.2904664560490615, "grad_norm": 0.6948530077934265, "learning_rate": 7.760549986006159e-05, "loss": 0.649, "step": 12325 }, { "epoch": 2.290652295112433, "grad_norm": 0.7895127534866333, "learning_rate": 7.759126180946324e-05, "loss": 0.7972, "step": 12326 }, { "epoch": 2.2908381341758037, "grad_norm": 0.7186713218688965, "learning_rate": 7.757702423714093e-05, "loss": 0.8915, "step": 12327 }, { "epoch": 2.291023973239175, "grad_norm": 0.7776643633842468, "learning_rate": 7.756278714339852e-05, "loss": 0.7304, "step": 12328 }, { "epoch": 2.291209812302546, "grad_norm": 0.893409013748169, "learning_rate": 7.754855052853997e-05, "loss": 0.8193, "step": 12329 }, { "epoch": 2.2913956513659173, "grad_norm": 0.7297701239585876, "learning_rate": 7.753431439286906e-05, "loss": 0.8881, "step": 12330 }, { "epoch": 2.291581490429288, "grad_norm": 0.910232663154602, "learning_rate": 7.752007873668967e-05, "loss": 1.1218, "step": 12331 }, { "epoch": 2.2917673294926595, "grad_norm": 0.8789218664169312, "learning_rate": 7.750584356030562e-05, "loss": 0.9486, "step": 12332 }, { "epoch": 2.2919531685560304, "grad_norm": 0.7561124563217163, "learning_rate": 7.749160886402073e-05, "loss": 1.1138, "step": 12333 }, { "epoch": 2.2921390076194017, "grad_norm": 0.7757100462913513, "learning_rate": 7.747737464813882e-05, "loss": 0.9899, "step": 12334 }, { "epoch": 2.2923248466827726, "grad_norm": 0.8278506994247437, "learning_rate": 7.74631409129637e-05, "loss": 0.5483, "step": 12335 }, { "epoch": 2.292510685746144, "grad_norm": 0.8181744813919067, "learning_rate": 7.744890765879918e-05, "loss": 0.9717, "step": 12336 }, { "epoch": 2.292696524809515, "grad_norm": 0.9865650534629822, "learning_rate": 7.7434674885949e-05, "loss": 1.0432, "step": 12337 }, { "epoch": 2.292882363872886, "grad_norm": 0.6678515076637268, "learning_rate": 7.7420442594717e-05, "loss": 0.6523, "step": 12338 }, { "epoch": 2.293068202936257, "grad_norm": 0.81447434425354, "learning_rate": 7.740621078540684e-05, "loss": 0.9622, "step": 12339 }, { "epoch": 2.2932540419996283, "grad_norm": 0.8288367986679077, "learning_rate": 7.739197945832235e-05, "loss": 0.9989, "step": 12340 }, { "epoch": 2.2934398810629997, "grad_norm": 0.7778246998786926, "learning_rate": 7.737774861376732e-05, "loss": 0.8636, "step": 12341 }, { "epoch": 2.2936257201263706, "grad_norm": 1.3834868669509888, "learning_rate": 7.736351825204537e-05, "loss": 1.2447, "step": 12342 }, { "epoch": 2.2938115591897414, "grad_norm": 0.9833600521087646, "learning_rate": 7.734928837346033e-05, "loss": 1.0867, "step": 12343 }, { "epoch": 2.2939973982531128, "grad_norm": 0.941429853439331, "learning_rate": 7.73350589783158e-05, "loss": 0.7508, "step": 12344 }, { "epoch": 2.294183237316484, "grad_norm": 0.9358783960342407, "learning_rate": 7.732083006691558e-05, "loss": 1.2586, "step": 12345 }, { "epoch": 2.294369076379855, "grad_norm": 0.6414960026741028, "learning_rate": 7.730660163956333e-05, "loss": 0.8383, "step": 12346 }, { "epoch": 2.2945549154432263, "grad_norm": 1.0815060138702393, "learning_rate": 7.729237369656269e-05, "loss": 1.139, "step": 12347 }, { "epoch": 2.294740754506597, "grad_norm": 0.8384090065956116, "learning_rate": 7.727814623821739e-05, "loss": 0.8974, "step": 12348 }, { "epoch": 2.2949265935699685, "grad_norm": 0.9051550626754761, "learning_rate": 7.726391926483104e-05, "loss": 1.0349, "step": 12349 }, { "epoch": 2.2951124326333394, "grad_norm": 0.8306860327720642, "learning_rate": 7.724969277670733e-05, "loss": 0.91, "step": 12350 }, { "epoch": 2.2952982716967107, "grad_norm": 0.7919532060623169, "learning_rate": 7.72354667741499e-05, "loss": 0.7608, "step": 12351 }, { "epoch": 2.2954841107600816, "grad_norm": 0.6512516736984253, "learning_rate": 7.722124125746236e-05, "loss": 0.6876, "step": 12352 }, { "epoch": 2.295669949823453, "grad_norm": 0.7694550156593323, "learning_rate": 7.720701622694834e-05, "loss": 0.9659, "step": 12353 }, { "epoch": 2.295855788886824, "grad_norm": 0.9084151983261108, "learning_rate": 7.719279168291144e-05, "loss": 0.9069, "step": 12354 }, { "epoch": 2.296041627950195, "grad_norm": 0.8145677447319031, "learning_rate": 7.717856762565524e-05, "loss": 0.9747, "step": 12355 }, { "epoch": 2.296227467013566, "grad_norm": 0.7414378523826599, "learning_rate": 7.71643440554834e-05, "loss": 0.9986, "step": 12356 }, { "epoch": 2.2964133060769374, "grad_norm": 0.7389624118804932, "learning_rate": 7.715012097269942e-05, "loss": 0.9096, "step": 12357 }, { "epoch": 2.2965991451403083, "grad_norm": 1.1509134769439697, "learning_rate": 7.713589837760691e-05, "loss": 1.1409, "step": 12358 }, { "epoch": 2.2967849842036796, "grad_norm": 0.8112145066261292, "learning_rate": 7.712167627050938e-05, "loss": 1.1119, "step": 12359 }, { "epoch": 2.296970823267051, "grad_norm": 0.8990470170974731, "learning_rate": 7.710745465171041e-05, "loss": 1.0636, "step": 12360 }, { "epoch": 2.297156662330422, "grad_norm": 0.7694247364997864, "learning_rate": 7.709323352151357e-05, "loss": 0.8278, "step": 12361 }, { "epoch": 2.297342501393793, "grad_norm": 1.0254145860671997, "learning_rate": 7.707901288022232e-05, "loss": 1.1192, "step": 12362 }, { "epoch": 2.297528340457164, "grad_norm": 0.7462365627288818, "learning_rate": 7.706479272814023e-05, "loss": 1.0382, "step": 12363 }, { "epoch": 2.2977141795205354, "grad_norm": 0.9076231122016907, "learning_rate": 7.705057306557076e-05, "loss": 0.9178, "step": 12364 }, { "epoch": 2.2979000185839062, "grad_norm": 0.8459599614143372, "learning_rate": 7.703635389281743e-05, "loss": 0.9922, "step": 12365 }, { "epoch": 2.2980858576472776, "grad_norm": 0.6892786026000977, "learning_rate": 7.702213521018374e-05, "loss": 0.7726, "step": 12366 }, { "epoch": 2.2982716967106485, "grad_norm": 1.467746615409851, "learning_rate": 7.700791701797313e-05, "loss": 1.4936, "step": 12367 }, { "epoch": 2.29845753577402, "grad_norm": 0.7365753054618835, "learning_rate": 7.699369931648909e-05, "loss": 0.9974, "step": 12368 }, { "epoch": 2.2986433748373907, "grad_norm": 0.8061040043830872, "learning_rate": 7.697948210603503e-05, "loss": 0.9074, "step": 12369 }, { "epoch": 2.298829213900762, "grad_norm": 0.7725257277488708, "learning_rate": 7.69652653869144e-05, "loss": 0.9085, "step": 12370 }, { "epoch": 2.299015052964133, "grad_norm": 0.8355392217636108, "learning_rate": 7.695104915943069e-05, "loss": 0.9589, "step": 12371 }, { "epoch": 2.299200892027504, "grad_norm": 0.7052400708198547, "learning_rate": 7.693683342388727e-05, "loss": 0.791, "step": 12372 }, { "epoch": 2.299386731090875, "grad_norm": 0.6995886564254761, "learning_rate": 7.692261818058758e-05, "loss": 0.6648, "step": 12373 }, { "epoch": 2.2995725701542464, "grad_norm": 1.0227339267730713, "learning_rate": 7.690840342983498e-05, "loss": 1.1262, "step": 12374 }, { "epoch": 2.2997584092176178, "grad_norm": 0.8556922078132629, "learning_rate": 7.689418917193289e-05, "loss": 0.8543, "step": 12375 }, { "epoch": 2.2999442482809886, "grad_norm": 0.8244157433509827, "learning_rate": 7.68799754071847e-05, "loss": 0.7228, "step": 12376 }, { "epoch": 2.30013008734436, "grad_norm": 0.8170839548110962, "learning_rate": 7.686576213589373e-05, "loss": 1.1313, "step": 12377 }, { "epoch": 2.300315926407731, "grad_norm": 0.8408236503601074, "learning_rate": 7.685154935836339e-05, "loss": 0.9342, "step": 12378 }, { "epoch": 2.300501765471102, "grad_norm": 0.75736004114151, "learning_rate": 7.683733707489699e-05, "loss": 0.7792, "step": 12379 }, { "epoch": 2.300687604534473, "grad_norm": 0.7825633883476257, "learning_rate": 7.68231252857979e-05, "loss": 1.0248, "step": 12380 }, { "epoch": 2.3008734435978444, "grad_norm": 0.7606642842292786, "learning_rate": 7.680891399136938e-05, "loss": 0.9121, "step": 12381 }, { "epoch": 2.3010592826612153, "grad_norm": 0.7969626784324646, "learning_rate": 7.679470319191482e-05, "loss": 1.0324, "step": 12382 }, { "epoch": 2.3012451217245866, "grad_norm": 0.857466459274292, "learning_rate": 7.678049288773752e-05, "loss": 1.1447, "step": 12383 }, { "epoch": 2.3014309607879575, "grad_norm": 0.757928192615509, "learning_rate": 7.676628307914074e-05, "loss": 0.7483, "step": 12384 }, { "epoch": 2.301616799851329, "grad_norm": 0.7650940418243408, "learning_rate": 7.675207376642775e-05, "loss": 0.96, "step": 12385 }, { "epoch": 2.3018026389146997, "grad_norm": 0.8665147423744202, "learning_rate": 7.67378649499019e-05, "loss": 0.8273, "step": 12386 }, { "epoch": 2.301988477978071, "grad_norm": 0.8380240797996521, "learning_rate": 7.672365662986636e-05, "loss": 0.763, "step": 12387 }, { "epoch": 2.302174317041442, "grad_norm": 0.7871870398521423, "learning_rate": 7.670944880662446e-05, "loss": 0.8913, "step": 12388 }, { "epoch": 2.3023601561048133, "grad_norm": 0.8932545781135559, "learning_rate": 7.669524148047938e-05, "loss": 0.9449, "step": 12389 }, { "epoch": 2.3025459951681846, "grad_norm": 0.8677648901939392, "learning_rate": 7.668103465173441e-05, "loss": 1.0888, "step": 12390 }, { "epoch": 2.3027318342315555, "grad_norm": 0.8020843267440796, "learning_rate": 7.666682832069269e-05, "loss": 1.0817, "step": 12391 }, { "epoch": 2.3029176732949264, "grad_norm": 0.8647310733795166, "learning_rate": 7.665262248765749e-05, "loss": 0.8104, "step": 12392 }, { "epoch": 2.3031035123582977, "grad_norm": 0.775503396987915, "learning_rate": 7.663841715293202e-05, "loss": 0.9919, "step": 12393 }, { "epoch": 2.303289351421669, "grad_norm": 0.7088800072669983, "learning_rate": 7.662421231681944e-05, "loss": 0.7559, "step": 12394 }, { "epoch": 2.30347519048504, "grad_norm": 0.8725742101669312, "learning_rate": 7.661000797962292e-05, "loss": 1.0528, "step": 12395 }, { "epoch": 2.3036610295484112, "grad_norm": 0.889268159866333, "learning_rate": 7.659580414164567e-05, "loss": 1.1647, "step": 12396 }, { "epoch": 2.303846868611782, "grad_norm": 0.7021028399467468, "learning_rate": 7.658160080319078e-05, "loss": 0.7145, "step": 12397 }, { "epoch": 2.3040327076751534, "grad_norm": 0.835035502910614, "learning_rate": 7.656739796456148e-05, "loss": 0.7709, "step": 12398 }, { "epoch": 2.3042185467385243, "grad_norm": 0.8327295184135437, "learning_rate": 7.655319562606083e-05, "loss": 0.8802, "step": 12399 }, { "epoch": 2.3044043858018957, "grad_norm": 0.7869976162910461, "learning_rate": 7.6538993787992e-05, "loss": 1.0701, "step": 12400 }, { "epoch": 2.3045902248652665, "grad_norm": 0.762888491153717, "learning_rate": 7.652479245065806e-05, "loss": 0.9929, "step": 12401 }, { "epoch": 2.304776063928638, "grad_norm": 0.8556522727012634, "learning_rate": 7.651059161436211e-05, "loss": 0.9351, "step": 12402 }, { "epoch": 2.3049619029920088, "grad_norm": 0.844419538974762, "learning_rate": 7.649639127940735e-05, "loss": 0.8421, "step": 12403 }, { "epoch": 2.30514774205538, "grad_norm": 3.3211510181427, "learning_rate": 7.648219144609673e-05, "loss": 1.0601, "step": 12404 }, { "epoch": 2.305333581118751, "grad_norm": 0.7639872431755066, "learning_rate": 7.646799211473338e-05, "loss": 0.9178, "step": 12405 }, { "epoch": 2.3055194201821223, "grad_norm": 0.8685746788978577, "learning_rate": 7.645379328562039e-05, "loss": 0.712, "step": 12406 }, { "epoch": 2.305705259245493, "grad_norm": 0.7220452427864075, "learning_rate": 7.643959495906074e-05, "loss": 0.806, "step": 12407 }, { "epoch": 2.3058910983088645, "grad_norm": 0.6752744317054749, "learning_rate": 7.642539713535755e-05, "loss": 0.8161, "step": 12408 }, { "epoch": 2.306076937372236, "grad_norm": 0.7289901971817017, "learning_rate": 7.641119981481377e-05, "loss": 0.8479, "step": 12409 }, { "epoch": 2.3062627764356067, "grad_norm": 0.6853960752487183, "learning_rate": 7.639700299773245e-05, "loss": 0.6681, "step": 12410 }, { "epoch": 2.306448615498978, "grad_norm": 0.7992112636566162, "learning_rate": 7.63828066844166e-05, "loss": 0.8624, "step": 12411 }, { "epoch": 2.306634454562349, "grad_norm": 0.7620840668678284, "learning_rate": 7.636861087516917e-05, "loss": 1.0049, "step": 12412 }, { "epoch": 2.3068202936257203, "grad_norm": 0.7466810345649719, "learning_rate": 7.635441557029325e-05, "loss": 0.8561, "step": 12413 }, { "epoch": 2.307006132689091, "grad_norm": 0.9495031833648682, "learning_rate": 7.634022077009173e-05, "loss": 1.1464, "step": 12414 }, { "epoch": 2.3071919717524625, "grad_norm": 0.8608010411262512, "learning_rate": 7.632602647486759e-05, "loss": 1.1267, "step": 12415 }, { "epoch": 2.3073778108158334, "grad_norm": 0.8829839825630188, "learning_rate": 7.63118326849238e-05, "loss": 1.0529, "step": 12416 }, { "epoch": 2.3075636498792047, "grad_norm": 0.6954786777496338, "learning_rate": 7.629763940056328e-05, "loss": 0.8939, "step": 12417 }, { "epoch": 2.3077494889425756, "grad_norm": 0.7862223386764526, "learning_rate": 7.628344662208899e-05, "loss": 0.8202, "step": 12418 }, { "epoch": 2.307935328005947, "grad_norm": 0.6856497526168823, "learning_rate": 7.626925434980382e-05, "loss": 0.7252, "step": 12419 }, { "epoch": 2.308121167069318, "grad_norm": 0.8154091835021973, "learning_rate": 7.625506258401071e-05, "loss": 0.8622, "step": 12420 }, { "epoch": 2.308307006132689, "grad_norm": 0.8779199719429016, "learning_rate": 7.624087132501252e-05, "loss": 1.0541, "step": 12421 }, { "epoch": 2.30849284519606, "grad_norm": 0.810685932636261, "learning_rate": 7.622668057311217e-05, "loss": 0.9654, "step": 12422 }, { "epoch": 2.3086786842594313, "grad_norm": 0.8009283542633057, "learning_rate": 7.621249032861249e-05, "loss": 1.0143, "step": 12423 }, { "epoch": 2.3088645233228027, "grad_norm": 0.7413025498390198, "learning_rate": 7.61983005918164e-05, "loss": 0.9063, "step": 12424 }, { "epoch": 2.3090503623861736, "grad_norm": 0.7271872162818909, "learning_rate": 7.618411136302675e-05, "loss": 0.7407, "step": 12425 }, { "epoch": 2.3092362014495444, "grad_norm": 0.972961962223053, "learning_rate": 7.61699226425464e-05, "loss": 0.9242, "step": 12426 }, { "epoch": 2.3094220405129158, "grad_norm": 0.756356954574585, "learning_rate": 7.615573443067812e-05, "loss": 0.9317, "step": 12427 }, { "epoch": 2.309607879576287, "grad_norm": 0.898516058921814, "learning_rate": 7.61415467277248e-05, "loss": 0.9378, "step": 12428 }, { "epoch": 2.309793718639658, "grad_norm": 1.0278419256210327, "learning_rate": 7.61273595339892e-05, "loss": 0.843, "step": 12429 }, { "epoch": 2.3099795577030293, "grad_norm": 0.9227488040924072, "learning_rate": 7.611317284977415e-05, "loss": 1.0285, "step": 12430 }, { "epoch": 2.3101653967664, "grad_norm": 0.8172624707221985, "learning_rate": 7.609898667538243e-05, "loss": 0.9617, "step": 12431 }, { "epoch": 2.3103512358297715, "grad_norm": 0.879382848739624, "learning_rate": 7.608480101111685e-05, "loss": 0.9802, "step": 12432 }, { "epoch": 2.3105370748931424, "grad_norm": 0.7681975364685059, "learning_rate": 7.60706158572801e-05, "loss": 0.9792, "step": 12433 }, { "epoch": 2.3107229139565137, "grad_norm": 0.8928554058074951, "learning_rate": 7.605643121417501e-05, "loss": 0.9939, "step": 12434 }, { "epoch": 2.3109087530198846, "grad_norm": 0.7671574950218201, "learning_rate": 7.60422470821043e-05, "loss": 0.8958, "step": 12435 }, { "epoch": 2.311094592083256, "grad_norm": 0.9336884021759033, "learning_rate": 7.602806346137075e-05, "loss": 0.7966, "step": 12436 }, { "epoch": 2.311280431146627, "grad_norm": 0.8644281625747681, "learning_rate": 7.601388035227702e-05, "loss": 1.1457, "step": 12437 }, { "epoch": 2.311466270209998, "grad_norm": 0.7973626852035522, "learning_rate": 7.599969775512587e-05, "loss": 0.9548, "step": 12438 }, { "epoch": 2.3116521092733695, "grad_norm": 1.0643774271011353, "learning_rate": 7.598551567021997e-05, "loss": 0.9208, "step": 12439 }, { "epoch": 2.3118379483367404, "grad_norm": 0.8160991668701172, "learning_rate": 7.597133409786203e-05, "loss": 0.8189, "step": 12440 }, { "epoch": 2.3120237874001113, "grad_norm": 0.7943512201309204, "learning_rate": 7.595715303835474e-05, "loss": 0.8455, "step": 12441 }, { "epoch": 2.3122096264634826, "grad_norm": 0.7578389048576355, "learning_rate": 7.594297249200076e-05, "loss": 0.933, "step": 12442 }, { "epoch": 2.312395465526854, "grad_norm": 0.7750895023345947, "learning_rate": 7.592879245910273e-05, "loss": 0.8472, "step": 12443 }, { "epoch": 2.312581304590225, "grad_norm": 0.81695556640625, "learning_rate": 7.591461293996329e-05, "loss": 0.9109, "step": 12444 }, { "epoch": 2.312767143653596, "grad_norm": 0.6965035796165466, "learning_rate": 7.590043393488512e-05, "loss": 0.9366, "step": 12445 }, { "epoch": 2.312952982716967, "grad_norm": 0.7417106032371521, "learning_rate": 7.588625544417087e-05, "loss": 0.7527, "step": 12446 }, { "epoch": 2.3131388217803384, "grad_norm": 0.8987021446228027, "learning_rate": 7.58720774681231e-05, "loss": 0.801, "step": 12447 }, { "epoch": 2.3133246608437092, "grad_norm": 0.8437244296073914, "learning_rate": 7.585790000704445e-05, "loss": 0.8314, "step": 12448 }, { "epoch": 2.3135104999070806, "grad_norm": 0.8858640789985657, "learning_rate": 7.584372306123746e-05, "loss": 0.9603, "step": 12449 }, { "epoch": 2.3136963389704515, "grad_norm": 0.8065061569213867, "learning_rate": 7.582954663100477e-05, "loss": 0.9499, "step": 12450 }, { "epoch": 2.313882178033823, "grad_norm": 0.7675348520278931, "learning_rate": 7.581537071664891e-05, "loss": 0.9969, "step": 12451 }, { "epoch": 2.3140680170971937, "grad_norm": 0.8798878192901611, "learning_rate": 7.58011953184725e-05, "loss": 0.9539, "step": 12452 }, { "epoch": 2.314253856160565, "grad_norm": 0.8737105131149292, "learning_rate": 7.5787020436778e-05, "loss": 0.9449, "step": 12453 }, { "epoch": 2.314439695223936, "grad_norm": 0.7421537041664124, "learning_rate": 7.577284607186798e-05, "loss": 0.9947, "step": 12454 }, { "epoch": 2.314625534287307, "grad_norm": 0.8042177557945251, "learning_rate": 7.575867222404503e-05, "loss": 0.9808, "step": 12455 }, { "epoch": 2.314811373350678, "grad_norm": 0.7402068376541138, "learning_rate": 7.574449889361162e-05, "loss": 0.7764, "step": 12456 }, { "epoch": 2.3149972124140494, "grad_norm": 0.8809178471565247, "learning_rate": 7.573032608087024e-05, "loss": 0.9017, "step": 12457 }, { "epoch": 2.3151830514774208, "grad_norm": 0.9781234264373779, "learning_rate": 7.571615378612342e-05, "loss": 1.0225, "step": 12458 }, { "epoch": 2.3153688905407916, "grad_norm": 0.7336781024932861, "learning_rate": 7.570198200967362e-05, "loss": 0.7536, "step": 12459 }, { "epoch": 2.315554729604163, "grad_norm": 0.8988581895828247, "learning_rate": 7.568781075182332e-05, "loss": 1.04, "step": 12460 }, { "epoch": 2.315740568667534, "grad_norm": 0.9049788117408752, "learning_rate": 7.567364001287496e-05, "loss": 0.8962, "step": 12461 }, { "epoch": 2.315926407730905, "grad_norm": 0.9063324928283691, "learning_rate": 7.565946979313105e-05, "loss": 0.8652, "step": 12462 }, { "epoch": 2.316112246794276, "grad_norm": 1.0230926275253296, "learning_rate": 7.564530009289395e-05, "loss": 0.8893, "step": 12463 }, { "epoch": 2.3162980858576474, "grad_norm": 0.8703257441520691, "learning_rate": 7.563113091246614e-05, "loss": 1.0691, "step": 12464 }, { "epoch": 2.3164839249210183, "grad_norm": 0.8154070377349854, "learning_rate": 7.561696225214995e-05, "loss": 0.8211, "step": 12465 }, { "epoch": 2.3166697639843896, "grad_norm": 0.68623948097229, "learning_rate": 7.560279411224795e-05, "loss": 0.5873, "step": 12466 }, { "epoch": 2.3168556030477605, "grad_norm": 0.8234708309173584, "learning_rate": 7.558862649306239e-05, "loss": 0.9179, "step": 12467 }, { "epoch": 2.317041442111132, "grad_norm": 0.8765814900398254, "learning_rate": 7.557445939489576e-05, "loss": 1.0854, "step": 12468 }, { "epoch": 2.3172272811745027, "grad_norm": 0.8760924935340881, "learning_rate": 7.556029281805034e-05, "loss": 1.1933, "step": 12469 }, { "epoch": 2.317413120237874, "grad_norm": 0.7699716687202454, "learning_rate": 7.554612676282854e-05, "loss": 0.854, "step": 12470 }, { "epoch": 2.317598959301245, "grad_norm": 0.7311826348304749, "learning_rate": 7.55319612295327e-05, "loss": 0.9411, "step": 12471 }, { "epoch": 2.3177847983646163, "grad_norm": 0.7913200855255127, "learning_rate": 7.551779621846517e-05, "loss": 1.0598, "step": 12472 }, { "epoch": 2.3179706374279876, "grad_norm": 0.6416594386100769, "learning_rate": 7.550363172992824e-05, "loss": 0.7361, "step": 12473 }, { "epoch": 2.3181564764913585, "grad_norm": 0.8257509469985962, "learning_rate": 7.548946776422428e-05, "loss": 1.0523, "step": 12474 }, { "epoch": 2.3183423155547294, "grad_norm": 0.9835213422775269, "learning_rate": 7.54753043216555e-05, "loss": 0.8885, "step": 12475 }, { "epoch": 2.3185281546181007, "grad_norm": 1.0134283304214478, "learning_rate": 7.546114140252433e-05, "loss": 0.9571, "step": 12476 }, { "epoch": 2.318713993681472, "grad_norm": 0.855441153049469, "learning_rate": 7.544697900713298e-05, "loss": 0.8827, "step": 12477 }, { "epoch": 2.318899832744843, "grad_norm": 0.8818331956863403, "learning_rate": 7.543281713578374e-05, "loss": 1.0948, "step": 12478 }, { "epoch": 2.3190856718082142, "grad_norm": 1.4963585138320923, "learning_rate": 7.541865578877882e-05, "loss": 1.4983, "step": 12479 }, { "epoch": 2.319271510871585, "grad_norm": 0.7784662246704102, "learning_rate": 7.540449496642056e-05, "loss": 0.9539, "step": 12480 }, { "epoch": 2.3194573499349564, "grad_norm": 0.7249751091003418, "learning_rate": 7.539033466901111e-05, "loss": 0.9508, "step": 12481 }, { "epoch": 2.3196431889983273, "grad_norm": 0.8944977521896362, "learning_rate": 7.537617489685276e-05, "loss": 0.912, "step": 12482 }, { "epoch": 2.3198290280616987, "grad_norm": 0.7689292430877686, "learning_rate": 7.536201565024767e-05, "loss": 0.8769, "step": 12483 }, { "epoch": 2.3200148671250695, "grad_norm": 0.9170524477958679, "learning_rate": 7.53478569294981e-05, "loss": 0.7936, "step": 12484 }, { "epoch": 2.320200706188441, "grad_norm": 0.7663047313690186, "learning_rate": 7.533369873490622e-05, "loss": 0.7094, "step": 12485 }, { "epoch": 2.3203865452518118, "grad_norm": 0.9506276845932007, "learning_rate": 7.531954106677416e-05, "loss": 0.7563, "step": 12486 }, { "epoch": 2.320572384315183, "grad_norm": 0.8071599006652832, "learning_rate": 7.530538392540418e-05, "loss": 0.8353, "step": 12487 }, { "epoch": 2.3207582233785544, "grad_norm": 0.87018883228302, "learning_rate": 7.52912273110984e-05, "loss": 1.0631, "step": 12488 }, { "epoch": 2.3209440624419253, "grad_norm": 0.9310821890830994, "learning_rate": 7.527707122415898e-05, "loss": 0.9587, "step": 12489 }, { "epoch": 2.321129901505296, "grad_norm": 0.8222516775131226, "learning_rate": 7.526291566488805e-05, "loss": 1.0424, "step": 12490 }, { "epoch": 2.3213157405686675, "grad_norm": 0.9288331270217896, "learning_rate": 7.524876063358773e-05, "loss": 0.7647, "step": 12491 }, { "epoch": 2.321501579632039, "grad_norm": 0.8477649688720703, "learning_rate": 7.523460613056015e-05, "loss": 0.8382, "step": 12492 }, { "epoch": 2.3216874186954097, "grad_norm": 0.9424648880958557, "learning_rate": 7.522045215610738e-05, "loss": 1.1071, "step": 12493 }, { "epoch": 2.321873257758781, "grad_norm": 0.7217926979064941, "learning_rate": 7.520629871053157e-05, "loss": 0.7972, "step": 12494 }, { "epoch": 2.322059096822152, "grad_norm": 0.7429036498069763, "learning_rate": 7.519214579413472e-05, "loss": 0.8802, "step": 12495 }, { "epoch": 2.3222449358855233, "grad_norm": 1.660508394241333, "learning_rate": 7.517799340721895e-05, "loss": 1.2279, "step": 12496 }, { "epoch": 2.322430774948894, "grad_norm": 0.7576091885566711, "learning_rate": 7.516384155008633e-05, "loss": 0.8146, "step": 12497 }, { "epoch": 2.3226166140122655, "grad_norm": 0.8176612257957458, "learning_rate": 7.51496902230389e-05, "loss": 0.889, "step": 12498 }, { "epoch": 2.3228024530756364, "grad_norm": 0.8361815810203552, "learning_rate": 7.513553942637868e-05, "loss": 0.8844, "step": 12499 }, { "epoch": 2.3229882921390077, "grad_norm": 0.8504658937454224, "learning_rate": 7.512138916040772e-05, "loss": 0.73, "step": 12500 }, { "epoch": 2.3231741312023786, "grad_norm": 0.8520758152008057, "learning_rate": 7.5107239425428e-05, "loss": 0.8937, "step": 12501 }, { "epoch": 2.32335997026575, "grad_norm": 0.8601714968681335, "learning_rate": 7.509309022174153e-05, "loss": 0.8883, "step": 12502 }, { "epoch": 2.323545809329121, "grad_norm": 0.8504960536956787, "learning_rate": 7.507894154965031e-05, "loss": 0.9509, "step": 12503 }, { "epoch": 2.323731648392492, "grad_norm": 0.7272476553916931, "learning_rate": 7.506479340945634e-05, "loss": 0.9195, "step": 12504 }, { "epoch": 2.323917487455863, "grad_norm": 0.834908127784729, "learning_rate": 7.505064580146154e-05, "loss": 0.8367, "step": 12505 }, { "epoch": 2.3241033265192343, "grad_norm": 0.7319338917732239, "learning_rate": 7.503649872596789e-05, "loss": 0.8302, "step": 12506 }, { "epoch": 2.3242891655826057, "grad_norm": 0.7511029243469238, "learning_rate": 7.502235218327731e-05, "loss": 0.7277, "step": 12507 }, { "epoch": 2.3244750046459766, "grad_norm": 0.8672081232070923, "learning_rate": 7.500820617369179e-05, "loss": 0.9919, "step": 12508 }, { "epoch": 2.324660843709348, "grad_norm": 0.742408812046051, "learning_rate": 7.499406069751323e-05, "loss": 0.8629, "step": 12509 }, { "epoch": 2.3248466827727188, "grad_norm": 1.0467314720153809, "learning_rate": 7.497991575504353e-05, "loss": 1.0455, "step": 12510 }, { "epoch": 2.32503252183609, "grad_norm": 0.8941596746444702, "learning_rate": 7.496577134658458e-05, "loss": 1.0436, "step": 12511 }, { "epoch": 2.325218360899461, "grad_norm": 0.8611307144165039, "learning_rate": 7.495162747243829e-05, "loss": 0.9668, "step": 12512 }, { "epoch": 2.3254041999628323, "grad_norm": 0.651538074016571, "learning_rate": 7.493748413290653e-05, "loss": 0.8183, "step": 12513 }, { "epoch": 2.325590039026203, "grad_norm": 0.9411389827728271, "learning_rate": 7.492334132829115e-05, "loss": 0.8401, "step": 12514 }, { "epoch": 2.3257758780895745, "grad_norm": 0.7687920928001404, "learning_rate": 7.490919905889403e-05, "loss": 0.9558, "step": 12515 }, { "epoch": 2.3259617171529454, "grad_norm": 0.8409010767936707, "learning_rate": 7.489505732501699e-05, "loss": 1.0783, "step": 12516 }, { "epoch": 2.3261475562163167, "grad_norm": 0.8515502214431763, "learning_rate": 7.488091612696182e-05, "loss": 0.9425, "step": 12517 }, { "epoch": 2.3263333952796876, "grad_norm": 0.7506030797958374, "learning_rate": 7.486677546503044e-05, "loss": 0.8782, "step": 12518 }, { "epoch": 2.326519234343059, "grad_norm": 0.6447235345840454, "learning_rate": 7.485263533952461e-05, "loss": 0.7478, "step": 12519 }, { "epoch": 2.32670507340643, "grad_norm": 0.8545085191726685, "learning_rate": 7.483849575074612e-05, "loss": 0.8679, "step": 12520 }, { "epoch": 2.326890912469801, "grad_norm": 0.7526487708091736, "learning_rate": 7.482435669899677e-05, "loss": 0.9467, "step": 12521 }, { "epoch": 2.3270767515331725, "grad_norm": 0.7747771739959717, "learning_rate": 7.481021818457831e-05, "loss": 0.9128, "step": 12522 }, { "epoch": 2.3272625905965434, "grad_norm": 1.074554681777954, "learning_rate": 7.479608020779252e-05, "loss": 0.8633, "step": 12523 }, { "epoch": 2.3274484296599143, "grad_norm": 0.5868602991104126, "learning_rate": 7.478194276894116e-05, "loss": 0.5134, "step": 12524 }, { "epoch": 2.3276342687232856, "grad_norm": 0.8135141730308533, "learning_rate": 7.476780586832594e-05, "loss": 0.8191, "step": 12525 }, { "epoch": 2.327820107786657, "grad_norm": 0.7354833483695984, "learning_rate": 7.475366950624858e-05, "loss": 0.9799, "step": 12526 }, { "epoch": 2.328005946850028, "grad_norm": 0.9085583090782166, "learning_rate": 7.473953368301086e-05, "loss": 1.0275, "step": 12527 }, { "epoch": 2.328191785913399, "grad_norm": 0.8576337695121765, "learning_rate": 7.47253983989144e-05, "loss": 1.0876, "step": 12528 }, { "epoch": 2.32837762497677, "grad_norm": 0.6999117136001587, "learning_rate": 7.471126365426095e-05, "loss": 0.871, "step": 12529 }, { "epoch": 2.3285634640401414, "grad_norm": 0.9206585884094238, "learning_rate": 7.469712944935222e-05, "loss": 0.8432, "step": 12530 }, { "epoch": 2.3287493031035122, "grad_norm": 0.8157764673233032, "learning_rate": 7.46829957844898e-05, "loss": 0.9118, "step": 12531 }, { "epoch": 2.3289351421668836, "grad_norm": 0.9429844617843628, "learning_rate": 7.466886265997543e-05, "loss": 1.0772, "step": 12532 }, { "epoch": 2.3291209812302545, "grad_norm": 0.6998193860054016, "learning_rate": 7.465473007611068e-05, "loss": 0.7093, "step": 12533 }, { "epoch": 2.329306820293626, "grad_norm": 1.0119681358337402, "learning_rate": 7.464059803319725e-05, "loss": 0.9456, "step": 12534 }, { "epoch": 2.3294926593569967, "grad_norm": 0.9467625021934509, "learning_rate": 7.462646653153671e-05, "loss": 1.052, "step": 12535 }, { "epoch": 2.329678498420368, "grad_norm": 0.8921098709106445, "learning_rate": 7.46123355714307e-05, "loss": 0.8632, "step": 12536 }, { "epoch": 2.329864337483739, "grad_norm": 0.8844832181930542, "learning_rate": 7.459820515318085e-05, "loss": 0.8857, "step": 12537 }, { "epoch": 2.33005017654711, "grad_norm": 1.073271632194519, "learning_rate": 7.458407527708868e-05, "loss": 0.8916, "step": 12538 }, { "epoch": 2.330236015610481, "grad_norm": 0.814613938331604, "learning_rate": 7.45699459434558e-05, "loss": 0.942, "step": 12539 }, { "epoch": 2.3304218546738524, "grad_norm": 0.8630864024162292, "learning_rate": 7.455581715258385e-05, "loss": 1.1416, "step": 12540 }, { "epoch": 2.3306076937372238, "grad_norm": 0.9294757843017578, "learning_rate": 7.454168890477427e-05, "loss": 1.0254, "step": 12541 }, { "epoch": 2.3307935328005946, "grad_norm": 0.8275464177131653, "learning_rate": 7.452756120032867e-05, "loss": 0.6494, "step": 12542 }, { "epoch": 2.330979371863966, "grad_norm": 0.7378981113433838, "learning_rate": 7.451343403954856e-05, "loss": 0.595, "step": 12543 }, { "epoch": 2.331165210927337, "grad_norm": 0.6890910267829895, "learning_rate": 7.449930742273547e-05, "loss": 0.739, "step": 12544 }, { "epoch": 2.331351049990708, "grad_norm": 0.77701735496521, "learning_rate": 7.448518135019088e-05, "loss": 0.8901, "step": 12545 }, { "epoch": 2.331536889054079, "grad_norm": 0.8840491771697998, "learning_rate": 7.447105582221631e-05, "loss": 0.8107, "step": 12546 }, { "epoch": 2.3317227281174504, "grad_norm": 0.7704744338989258, "learning_rate": 7.445693083911327e-05, "loss": 0.8986, "step": 12547 }, { "epoch": 2.3319085671808213, "grad_norm": 0.7400922775268555, "learning_rate": 7.444280640118319e-05, "loss": 0.8463, "step": 12548 }, { "epoch": 2.3320944062441926, "grad_norm": 0.7758222818374634, "learning_rate": 7.442868250872753e-05, "loss": 0.7794, "step": 12549 }, { "epoch": 2.3322802453075635, "grad_norm": 0.7377352118492126, "learning_rate": 7.44145591620478e-05, "loss": 0.8337, "step": 12550 }, { "epoch": 2.332466084370935, "grad_norm": 0.9961763620376587, "learning_rate": 7.440043636144536e-05, "loss": 0.8354, "step": 12551 }, { "epoch": 2.3326519234343057, "grad_norm": 0.8004982471466064, "learning_rate": 7.438631410722172e-05, "loss": 0.8029, "step": 12552 }, { "epoch": 2.332837762497677, "grad_norm": 0.8638790845870972, "learning_rate": 7.43721923996782e-05, "loss": 0.7802, "step": 12553 }, { "epoch": 2.333023601561048, "grad_norm": 0.8142632842063904, "learning_rate": 7.435807123911631e-05, "loss": 0.8276, "step": 12554 }, { "epoch": 2.3332094406244193, "grad_norm": 0.8604399561882019, "learning_rate": 7.434395062583734e-05, "loss": 0.8149, "step": 12555 }, { "epoch": 2.3333952796877906, "grad_norm": 0.7470417618751526, "learning_rate": 7.432983056014273e-05, "loss": 0.7667, "step": 12556 }, { "epoch": 2.3335811187511615, "grad_norm": 0.8007526397705078, "learning_rate": 7.431571104233385e-05, "loss": 0.8139, "step": 12557 }, { "epoch": 2.333766957814533, "grad_norm": 0.8846230506896973, "learning_rate": 7.430159207271203e-05, "loss": 0.8382, "step": 12558 }, { "epoch": 2.3339527968779037, "grad_norm": 0.7674434185028076, "learning_rate": 7.42874736515786e-05, "loss": 1.01, "step": 12559 }, { "epoch": 2.334138635941275, "grad_norm": 0.7947673797607422, "learning_rate": 7.427335577923496e-05, "loss": 0.9574, "step": 12560 }, { "epoch": 2.334324475004646, "grad_norm": 0.8595078587532043, "learning_rate": 7.425923845598237e-05, "loss": 0.9556, "step": 12561 }, { "epoch": 2.3345103140680172, "grad_norm": 0.9428784251213074, "learning_rate": 7.424512168212219e-05, "loss": 0.7944, "step": 12562 }, { "epoch": 2.334696153131388, "grad_norm": 0.8632462620735168, "learning_rate": 7.423100545795565e-05, "loss": 0.9847, "step": 12563 }, { "epoch": 2.3348819921947594, "grad_norm": 0.90449059009552, "learning_rate": 7.421688978378411e-05, "loss": 0.9834, "step": 12564 }, { "epoch": 2.3350678312581303, "grad_norm": 0.9151463508605957, "learning_rate": 7.42027746599088e-05, "loss": 0.9398, "step": 12565 }, { "epoch": 2.3352536703215017, "grad_norm": 0.9773748517036438, "learning_rate": 7.418866008663099e-05, "loss": 1.0023, "step": 12566 }, { "epoch": 2.3354395093848725, "grad_norm": 0.7529578804969788, "learning_rate": 7.417454606425196e-05, "loss": 0.8994, "step": 12567 }, { "epoch": 2.335625348448244, "grad_norm": 0.9450651407241821, "learning_rate": 7.41604325930729e-05, "loss": 1.1175, "step": 12568 }, { "epoch": 2.3358111875116148, "grad_norm": 0.8100607395172119, "learning_rate": 7.414631967339509e-05, "loss": 0.9399, "step": 12569 }, { "epoch": 2.335997026574986, "grad_norm": 1.0437417030334473, "learning_rate": 7.413220730551966e-05, "loss": 0.9058, "step": 12570 }, { "epoch": 2.3361828656383574, "grad_norm": 0.7519281506538391, "learning_rate": 7.411809548974792e-05, "loss": 0.9539, "step": 12571 }, { "epoch": 2.3363687047017283, "grad_norm": 1.2798535823822021, "learning_rate": 7.410398422638102e-05, "loss": 1.2466, "step": 12572 }, { "epoch": 2.336554543765099, "grad_norm": 0.7466521859169006, "learning_rate": 7.408987351572013e-05, "loss": 1.0244, "step": 12573 }, { "epoch": 2.3367403828284705, "grad_norm": 0.6776727437973022, "learning_rate": 7.407576335806644e-05, "loss": 0.7165, "step": 12574 }, { "epoch": 2.336926221891842, "grad_norm": 0.9132217168807983, "learning_rate": 7.406165375372107e-05, "loss": 1.2458, "step": 12575 }, { "epoch": 2.3371120609552127, "grad_norm": 0.8114913105964661, "learning_rate": 7.40475447029852e-05, "loss": 0.9963, "step": 12576 }, { "epoch": 2.337297900018584, "grad_norm": 1.0077768564224243, "learning_rate": 7.403343620615997e-05, "loss": 0.946, "step": 12577 }, { "epoch": 2.337483739081955, "grad_norm": 0.749036431312561, "learning_rate": 7.401932826354646e-05, "loss": 0.676, "step": 12578 }, { "epoch": 2.3376695781453263, "grad_norm": 0.7417488694190979, "learning_rate": 7.400522087544582e-05, "loss": 0.9087, "step": 12579 }, { "epoch": 2.337855417208697, "grad_norm": 0.8185071349143982, "learning_rate": 7.399111404215909e-05, "loss": 0.9881, "step": 12580 }, { "epoch": 2.3380412562720685, "grad_norm": 0.8285645842552185, "learning_rate": 7.397700776398741e-05, "loss": 1.1504, "step": 12581 }, { "epoch": 2.3382270953354394, "grad_norm": 0.7775911092758179, "learning_rate": 7.396290204123188e-05, "loss": 0.97, "step": 12582 }, { "epoch": 2.3384129343988107, "grad_norm": 0.7964914441108704, "learning_rate": 7.39487968741935e-05, "loss": 1.1304, "step": 12583 }, { "epoch": 2.3385987734621816, "grad_norm": 0.8669666051864624, "learning_rate": 7.393469226317335e-05, "loss": 0.8356, "step": 12584 }, { "epoch": 2.338784612525553, "grad_norm": 0.9136351346969604, "learning_rate": 7.392058820847245e-05, "loss": 0.7691, "step": 12585 }, { "epoch": 2.338970451588924, "grad_norm": 0.8523462414741516, "learning_rate": 7.390648471039185e-05, "loss": 0.8044, "step": 12586 }, { "epoch": 2.339156290652295, "grad_norm": 0.9019622206687927, "learning_rate": 7.389238176923258e-05, "loss": 0.8436, "step": 12587 }, { "epoch": 2.339342129715666, "grad_norm": 0.8432104587554932, "learning_rate": 7.387827938529559e-05, "loss": 0.7952, "step": 12588 }, { "epoch": 2.3395279687790373, "grad_norm": 0.714282214641571, "learning_rate": 7.386417755888191e-05, "loss": 0.7451, "step": 12589 }, { "epoch": 2.3397138078424087, "grad_norm": 0.8890557289123535, "learning_rate": 7.38500762902925e-05, "loss": 0.8865, "step": 12590 }, { "epoch": 2.3398996469057796, "grad_norm": 0.6751204133033752, "learning_rate": 7.383597557982833e-05, "loss": 0.8756, "step": 12591 }, { "epoch": 2.340085485969151, "grad_norm": 0.7461575865745544, "learning_rate": 7.382187542779038e-05, "loss": 0.8794, "step": 12592 }, { "epoch": 2.3402713250325218, "grad_norm": 0.8565900325775146, "learning_rate": 7.380777583447957e-05, "loss": 1.0726, "step": 12593 }, { "epoch": 2.340457164095893, "grad_norm": 0.7575398087501526, "learning_rate": 7.379367680019685e-05, "loss": 0.9309, "step": 12594 }, { "epoch": 2.340643003159264, "grad_norm": 1.0048532485961914, "learning_rate": 7.377957832524311e-05, "loss": 0.9448, "step": 12595 }, { "epoch": 2.3408288422226353, "grad_norm": 0.8079115152359009, "learning_rate": 7.376548040991927e-05, "loss": 0.8584, "step": 12596 }, { "epoch": 2.341014681286006, "grad_norm": 0.8296436071395874, "learning_rate": 7.375138305452624e-05, "loss": 0.9667, "step": 12597 }, { "epoch": 2.3412005203493775, "grad_norm": 0.8004963994026184, "learning_rate": 7.37372862593649e-05, "loss": 0.9276, "step": 12598 }, { "epoch": 2.3413863594127484, "grad_norm": 0.7355859279632568, "learning_rate": 7.372319002473613e-05, "loss": 0.7174, "step": 12599 }, { "epoch": 2.3415721984761197, "grad_norm": 0.8082651495933533, "learning_rate": 7.370909435094074e-05, "loss": 0.9901, "step": 12600 }, { "epoch": 2.3417580375394906, "grad_norm": 0.893548846244812, "learning_rate": 7.36949992382796e-05, "loss": 1.0624, "step": 12601 }, { "epoch": 2.341943876602862, "grad_norm": 0.8404073119163513, "learning_rate": 7.368090468705361e-05, "loss": 0.8835, "step": 12602 }, { "epoch": 2.342129715666233, "grad_norm": 0.8230233192443848, "learning_rate": 7.366681069756352e-05, "loss": 1.0559, "step": 12603 }, { "epoch": 2.342315554729604, "grad_norm": 1.057384729385376, "learning_rate": 7.365271727011019e-05, "loss": 1.1518, "step": 12604 }, { "epoch": 2.3425013937929755, "grad_norm": 0.9644550681114197, "learning_rate": 7.363862440499436e-05, "loss": 0.8966, "step": 12605 }, { "epoch": 2.3426872328563464, "grad_norm": 0.7263677716255188, "learning_rate": 7.362453210251686e-05, "loss": 0.7729, "step": 12606 }, { "epoch": 2.3428730719197173, "grad_norm": 0.7990922331809998, "learning_rate": 7.361044036297849e-05, "loss": 0.9411, "step": 12607 }, { "epoch": 2.3430589109830886, "grad_norm": 0.7927680015563965, "learning_rate": 7.359634918667996e-05, "loss": 0.891, "step": 12608 }, { "epoch": 2.34324475004646, "grad_norm": 0.8109414577484131, "learning_rate": 7.358225857392207e-05, "loss": 1.0352, "step": 12609 }, { "epoch": 2.343430589109831, "grad_norm": 0.8968274593353271, "learning_rate": 7.35681685250055e-05, "loss": 0.9016, "step": 12610 }, { "epoch": 2.343616428173202, "grad_norm": 0.8033596873283386, "learning_rate": 7.355407904023103e-05, "loss": 0.8873, "step": 12611 }, { "epoch": 2.343802267236573, "grad_norm": 1.0283641815185547, "learning_rate": 7.353999011989935e-05, "loss": 1.1681, "step": 12612 }, { "epoch": 2.3439881062999444, "grad_norm": 0.7543270587921143, "learning_rate": 7.352590176431118e-05, "loss": 0.6605, "step": 12613 }, { "epoch": 2.3441739453633152, "grad_norm": 0.8932667374610901, "learning_rate": 7.351181397376724e-05, "loss": 0.895, "step": 12614 }, { "epoch": 2.3443597844266866, "grad_norm": 0.8471357822418213, "learning_rate": 7.349772674856812e-05, "loss": 1.0288, "step": 12615 }, { "epoch": 2.3445456234900575, "grad_norm": 0.865290105342865, "learning_rate": 7.348364008901456e-05, "loss": 0.7652, "step": 12616 }, { "epoch": 2.344731462553429, "grad_norm": 0.8844984769821167, "learning_rate": 7.346955399540723e-05, "loss": 0.8963, "step": 12617 }, { "epoch": 2.3449173016167997, "grad_norm": 0.8787597417831421, "learning_rate": 7.34554684680467e-05, "loss": 1.081, "step": 12618 }, { "epoch": 2.345103140680171, "grad_norm": 0.7446838021278381, "learning_rate": 7.344138350723369e-05, "loss": 0.9378, "step": 12619 }, { "epoch": 2.3452889797435423, "grad_norm": 0.8385235667228699, "learning_rate": 7.342729911326873e-05, "loss": 0.9505, "step": 12620 }, { "epoch": 2.345474818806913, "grad_norm": 0.6965145468711853, "learning_rate": 7.34132152864525e-05, "loss": 0.7226, "step": 12621 }, { "epoch": 2.345660657870284, "grad_norm": 0.8368469476699829, "learning_rate": 7.339913202708552e-05, "loss": 0.8274, "step": 12622 }, { "epoch": 2.3458464969336554, "grad_norm": 0.7920130491256714, "learning_rate": 7.338504933546844e-05, "loss": 0.8632, "step": 12623 }, { "epoch": 2.3460323359970268, "grad_norm": 0.7846779227256775, "learning_rate": 7.337096721190182e-05, "loss": 0.9491, "step": 12624 }, { "epoch": 2.3462181750603976, "grad_norm": 0.8140245676040649, "learning_rate": 7.335688565668621e-05, "loss": 0.8609, "step": 12625 }, { "epoch": 2.346404014123769, "grad_norm": 0.7807095646858215, "learning_rate": 7.334280467012214e-05, "loss": 0.8984, "step": 12626 }, { "epoch": 2.34658985318714, "grad_norm": 0.8692532181739807, "learning_rate": 7.332872425251018e-05, "loss": 1.0086, "step": 12627 }, { "epoch": 2.346775692250511, "grad_norm": 0.7482457160949707, "learning_rate": 7.331464440415082e-05, "loss": 0.8269, "step": 12628 }, { "epoch": 2.346961531313882, "grad_norm": 0.9138236045837402, "learning_rate": 7.330056512534458e-05, "loss": 1.0416, "step": 12629 }, { "epoch": 2.3471473703772534, "grad_norm": 0.8924627304077148, "learning_rate": 7.328648641639195e-05, "loss": 0.8862, "step": 12630 }, { "epoch": 2.3473332094406243, "grad_norm": 0.8058710098266602, "learning_rate": 7.327240827759345e-05, "loss": 0.7509, "step": 12631 }, { "epoch": 2.3475190485039956, "grad_norm": 0.9041599631309509, "learning_rate": 7.32583307092495e-05, "loss": 0.7448, "step": 12632 }, { "epoch": 2.3477048875673665, "grad_norm": 0.8715543150901794, "learning_rate": 7.324425371166057e-05, "loss": 1.1062, "step": 12633 }, { "epoch": 2.347890726630738, "grad_norm": 0.7531129717826843, "learning_rate": 7.323017728512717e-05, "loss": 0.8701, "step": 12634 }, { "epoch": 2.3480765656941087, "grad_norm": 1.0183310508728027, "learning_rate": 7.32161014299497e-05, "loss": 0.8623, "step": 12635 }, { "epoch": 2.34826240475748, "grad_norm": 0.7323870658874512, "learning_rate": 7.320202614642856e-05, "loss": 0.973, "step": 12636 }, { "epoch": 2.348448243820851, "grad_norm": 1.0801042318344116, "learning_rate": 7.31879514348642e-05, "loss": 0.9972, "step": 12637 }, { "epoch": 2.3486340828842223, "grad_norm": 0.7815359234809875, "learning_rate": 7.3173877295557e-05, "loss": 0.9521, "step": 12638 }, { "epoch": 2.3488199219475936, "grad_norm": 0.7192123532295227, "learning_rate": 7.315980372880738e-05, "loss": 0.9116, "step": 12639 }, { "epoch": 2.3490057610109645, "grad_norm": 0.7962202429771423, "learning_rate": 7.314573073491566e-05, "loss": 0.6876, "step": 12640 }, { "epoch": 2.349191600074336, "grad_norm": 0.9301812052726746, "learning_rate": 7.313165831418225e-05, "loss": 1.0148, "step": 12641 }, { "epoch": 2.3493774391377067, "grad_norm": 0.751960277557373, "learning_rate": 7.311758646690748e-05, "loss": 0.8908, "step": 12642 }, { "epoch": 2.349563278201078, "grad_norm": 0.9244655966758728, "learning_rate": 7.310351519339165e-05, "loss": 0.9406, "step": 12643 }, { "epoch": 2.349749117264449, "grad_norm": 0.8816783428192139, "learning_rate": 7.308944449393518e-05, "loss": 1.0295, "step": 12644 }, { "epoch": 2.3499349563278202, "grad_norm": 0.8948429226875305, "learning_rate": 7.307537436883833e-05, "loss": 0.8946, "step": 12645 }, { "epoch": 2.350120795391191, "grad_norm": 0.5853556990623474, "learning_rate": 7.306130481840139e-05, "loss": 0.5547, "step": 12646 }, { "epoch": 2.3503066344545624, "grad_norm": 0.7878490686416626, "learning_rate": 7.30472358429247e-05, "loss": 0.9347, "step": 12647 }, { "epoch": 2.3504924735179333, "grad_norm": 0.8565953373908997, "learning_rate": 7.303316744270849e-05, "loss": 0.8626, "step": 12648 }, { "epoch": 2.3506783125813047, "grad_norm": 0.8676490783691406, "learning_rate": 7.301909961805306e-05, "loss": 0.9516, "step": 12649 }, { "epoch": 2.3508641516446755, "grad_norm": 0.8519862294197083, "learning_rate": 7.300503236925864e-05, "loss": 0.9389, "step": 12650 }, { "epoch": 2.351049990708047, "grad_norm": 0.7775579690933228, "learning_rate": 7.29909656966255e-05, "loss": 0.7831, "step": 12651 }, { "epoch": 2.3512358297714178, "grad_norm": 0.8417463302612305, "learning_rate": 7.29768996004538e-05, "loss": 0.918, "step": 12652 }, { "epoch": 2.351421668834789, "grad_norm": 0.7603926062583923, "learning_rate": 7.296283408104385e-05, "loss": 1.0112, "step": 12653 }, { "epoch": 2.3516075078981604, "grad_norm": 0.6868374943733215, "learning_rate": 7.294876913869573e-05, "loss": 0.6459, "step": 12654 }, { "epoch": 2.3517933469615313, "grad_norm": 0.8233698010444641, "learning_rate": 7.293470477370979e-05, "loss": 1.1302, "step": 12655 }, { "epoch": 2.351979186024902, "grad_norm": 0.9082133173942566, "learning_rate": 7.29206409863861e-05, "loss": 1.0395, "step": 12656 }, { "epoch": 2.3521650250882735, "grad_norm": 0.7943674921989441, "learning_rate": 7.290657777702488e-05, "loss": 0.6356, "step": 12657 }, { "epoch": 2.352350864151645, "grad_norm": 0.9202367067337036, "learning_rate": 7.289251514592625e-05, "loss": 1.0519, "step": 12658 }, { "epoch": 2.3525367032150157, "grad_norm": 0.8511589169502258, "learning_rate": 7.287845309339038e-05, "loss": 1.1179, "step": 12659 }, { "epoch": 2.352722542278387, "grad_norm": 0.796493411064148, "learning_rate": 7.286439161971737e-05, "loss": 0.8966, "step": 12660 }, { "epoch": 2.352908381341758, "grad_norm": 0.7692650556564331, "learning_rate": 7.285033072520736e-05, "loss": 0.9399, "step": 12661 }, { "epoch": 2.3530942204051293, "grad_norm": 0.8270449042320251, "learning_rate": 7.283627041016043e-05, "loss": 0.9297, "step": 12662 }, { "epoch": 2.3532800594685, "grad_norm": 1.3305835723876953, "learning_rate": 7.282221067487673e-05, "loss": 0.759, "step": 12663 }, { "epoch": 2.3534658985318715, "grad_norm": 0.8420296311378479, "learning_rate": 7.280815151965624e-05, "loss": 0.9815, "step": 12664 }, { "epoch": 2.3536517375952424, "grad_norm": 0.7621555924415588, "learning_rate": 7.279409294479915e-05, "loss": 0.8782, "step": 12665 }, { "epoch": 2.3538375766586137, "grad_norm": 0.8693684339523315, "learning_rate": 7.278003495060543e-05, "loss": 0.915, "step": 12666 }, { "epoch": 2.3540234157219846, "grad_norm": 0.8019158840179443, "learning_rate": 7.27659775373752e-05, "loss": 0.7045, "step": 12667 }, { "epoch": 2.354209254785356, "grad_norm": 0.8997321724891663, "learning_rate": 7.27519207054084e-05, "loss": 0.8401, "step": 12668 }, { "epoch": 2.3543950938487272, "grad_norm": 0.7859323024749756, "learning_rate": 7.273786445500513e-05, "loss": 0.7796, "step": 12669 }, { "epoch": 2.354580932912098, "grad_norm": 0.8584409952163696, "learning_rate": 7.272380878646532e-05, "loss": 1.0528, "step": 12670 }, { "epoch": 2.354766771975469, "grad_norm": 0.8414707779884338, "learning_rate": 7.270975370008903e-05, "loss": 1.0077, "step": 12671 }, { "epoch": 2.3549526110388403, "grad_norm": 0.8825092911720276, "learning_rate": 7.26956991961762e-05, "loss": 1.1716, "step": 12672 }, { "epoch": 2.3551384501022117, "grad_norm": 0.8332867622375488, "learning_rate": 7.268164527502683e-05, "loss": 0.8462, "step": 12673 }, { "epoch": 2.3553242891655826, "grad_norm": 0.7624945044517517, "learning_rate": 7.266759193694084e-05, "loss": 0.696, "step": 12674 }, { "epoch": 2.355510128228954, "grad_norm": 0.8629702925682068, "learning_rate": 7.265353918221817e-05, "loss": 1.1784, "step": 12675 }, { "epoch": 2.3556959672923248, "grad_norm": 0.6932694315910339, "learning_rate": 7.263948701115881e-05, "loss": 0.8929, "step": 12676 }, { "epoch": 2.355881806355696, "grad_norm": 0.7592587471008301, "learning_rate": 7.262543542406267e-05, "loss": 0.8846, "step": 12677 }, { "epoch": 2.356067645419067, "grad_norm": 0.7431928515434265, "learning_rate": 7.261138442122962e-05, "loss": 0.8915, "step": 12678 }, { "epoch": 2.3562534844824383, "grad_norm": 0.9140413999557495, "learning_rate": 7.259733400295956e-05, "loss": 1.0119, "step": 12679 }, { "epoch": 2.356439323545809, "grad_norm": 0.7461313009262085, "learning_rate": 7.258328416955238e-05, "loss": 0.9977, "step": 12680 }, { "epoch": 2.3566251626091805, "grad_norm": 0.9505020976066589, "learning_rate": 7.256923492130797e-05, "loss": 0.8069, "step": 12681 }, { "epoch": 2.3568110016725514, "grad_norm": 0.8287774920463562, "learning_rate": 7.255518625852615e-05, "loss": 0.9404, "step": 12682 }, { "epoch": 2.3569968407359227, "grad_norm": 0.8033880591392517, "learning_rate": 7.25411381815068e-05, "loss": 0.9273, "step": 12683 }, { "epoch": 2.3571826797992936, "grad_norm": 0.7415333986282349, "learning_rate": 7.252709069054971e-05, "loss": 0.8007, "step": 12684 }, { "epoch": 2.357368518862665, "grad_norm": 0.8875011801719666, "learning_rate": 7.251304378595471e-05, "loss": 0.8221, "step": 12685 }, { "epoch": 2.357554357926036, "grad_norm": 0.8783387541770935, "learning_rate": 7.249899746802163e-05, "loss": 0.9782, "step": 12686 }, { "epoch": 2.357740196989407, "grad_norm": 0.7870514988899231, "learning_rate": 7.248495173705027e-05, "loss": 0.869, "step": 12687 }, { "epoch": 2.3579260360527785, "grad_norm": 0.8032506704330444, "learning_rate": 7.247090659334039e-05, "loss": 1.1387, "step": 12688 }, { "epoch": 2.3581118751161494, "grad_norm": 0.823181688785553, "learning_rate": 7.245686203719179e-05, "loss": 0.8583, "step": 12689 }, { "epoch": 2.3582977141795207, "grad_norm": 0.9091300964355469, "learning_rate": 7.244281806890419e-05, "loss": 0.9424, "step": 12690 }, { "epoch": 2.3584835532428916, "grad_norm": 0.6754640936851501, "learning_rate": 7.242877468877735e-05, "loss": 0.7311, "step": 12691 }, { "epoch": 2.358669392306263, "grad_norm": 0.984846293926239, "learning_rate": 7.241473189711099e-05, "loss": 1.1976, "step": 12692 }, { "epoch": 2.358855231369634, "grad_norm": 0.9455552101135254, "learning_rate": 7.240068969420486e-05, "loss": 0.8349, "step": 12693 }, { "epoch": 2.359041070433005, "grad_norm": 0.8178308010101318, "learning_rate": 7.238664808035863e-05, "loss": 1.069, "step": 12694 }, { "epoch": 2.359226909496376, "grad_norm": 0.8013490438461304, "learning_rate": 7.237260705587203e-05, "loss": 0.8185, "step": 12695 }, { "epoch": 2.3594127485597474, "grad_norm": 0.8036800622940063, "learning_rate": 7.235856662104465e-05, "loss": 0.8689, "step": 12696 }, { "epoch": 2.3595985876231182, "grad_norm": 0.8035423755645752, "learning_rate": 7.23445267761763e-05, "loss": 1.0646, "step": 12697 }, { "epoch": 2.3597844266864896, "grad_norm": 0.872822105884552, "learning_rate": 7.233048752156655e-05, "loss": 0.9459, "step": 12698 }, { "epoch": 2.3599702657498605, "grad_norm": 0.7758422493934631, "learning_rate": 7.231644885751507e-05, "loss": 0.8219, "step": 12699 }, { "epoch": 2.360156104813232, "grad_norm": 0.882146418094635, "learning_rate": 7.230241078432148e-05, "loss": 0.8086, "step": 12700 }, { "epoch": 2.3603419438766027, "grad_norm": 0.7079089879989624, "learning_rate": 7.22883733022854e-05, "loss": 0.9594, "step": 12701 }, { "epoch": 2.360527782939974, "grad_norm": 0.8441744446754456, "learning_rate": 7.227433641170643e-05, "loss": 0.9397, "step": 12702 }, { "epoch": 2.3607136220033453, "grad_norm": 0.6709832549095154, "learning_rate": 7.226030011288419e-05, "loss": 0.7539, "step": 12703 }, { "epoch": 2.360899461066716, "grad_norm": 0.8459264636039734, "learning_rate": 7.224626440611821e-05, "loss": 0.9691, "step": 12704 }, { "epoch": 2.361085300130087, "grad_norm": 0.7479331493377686, "learning_rate": 7.223222929170812e-05, "loss": 0.7687, "step": 12705 }, { "epoch": 2.3612711391934584, "grad_norm": 0.9345939755439758, "learning_rate": 7.221819476995337e-05, "loss": 1.0044, "step": 12706 }, { "epoch": 2.3614569782568298, "grad_norm": 0.7808400988578796, "learning_rate": 7.220416084115364e-05, "loss": 0.9883, "step": 12707 }, { "epoch": 2.3616428173202006, "grad_norm": 0.7628604173660278, "learning_rate": 7.219012750560839e-05, "loss": 0.7652, "step": 12708 }, { "epoch": 2.361828656383572, "grad_norm": 0.7046692371368408, "learning_rate": 7.217609476361717e-05, "loss": 0.7371, "step": 12709 }, { "epoch": 2.362014495446943, "grad_norm": 0.7415536046028137, "learning_rate": 7.216206261547942e-05, "loss": 0.7985, "step": 12710 }, { "epoch": 2.362200334510314, "grad_norm": 0.7339528799057007, "learning_rate": 7.21480310614947e-05, "loss": 0.8325, "step": 12711 }, { "epoch": 2.362386173573685, "grad_norm": 0.7695592045783997, "learning_rate": 7.213400010196245e-05, "loss": 0.6426, "step": 12712 }, { "epoch": 2.3625720126370564, "grad_norm": 0.9707488417625427, "learning_rate": 7.211996973718216e-05, "loss": 0.9557, "step": 12713 }, { "epoch": 2.3627578517004273, "grad_norm": 0.7279425263404846, "learning_rate": 7.210593996745325e-05, "loss": 0.9252, "step": 12714 }, { "epoch": 2.3629436907637986, "grad_norm": 0.7701457142829895, "learning_rate": 7.209191079307521e-05, "loss": 0.9995, "step": 12715 }, { "epoch": 2.3631295298271695, "grad_norm": 0.706351101398468, "learning_rate": 7.207788221434741e-05, "loss": 0.8566, "step": 12716 }, { "epoch": 2.363315368890541, "grad_norm": 0.9042361974716187, "learning_rate": 7.206385423156929e-05, "loss": 1.0993, "step": 12717 }, { "epoch": 2.3635012079539117, "grad_norm": 0.820608377456665, "learning_rate": 7.204982684504027e-05, "loss": 0.6605, "step": 12718 }, { "epoch": 2.363687047017283, "grad_norm": 0.8894959092140198, "learning_rate": 7.203580005505977e-05, "loss": 0.9441, "step": 12719 }, { "epoch": 2.363872886080654, "grad_norm": 0.7809706926345825, "learning_rate": 7.202177386192707e-05, "loss": 0.8342, "step": 12720 }, { "epoch": 2.3640587251440253, "grad_norm": 0.9790495038032532, "learning_rate": 7.200774826594166e-05, "loss": 0.9595, "step": 12721 }, { "epoch": 2.3642445642073966, "grad_norm": 0.7445716261863708, "learning_rate": 7.19937232674028e-05, "loss": 1.1687, "step": 12722 }, { "epoch": 2.3644304032707675, "grad_norm": 0.80829256772995, "learning_rate": 7.197969886660984e-05, "loss": 0.8791, "step": 12723 }, { "epoch": 2.364616242334139, "grad_norm": 0.8042446970939636, "learning_rate": 7.196567506386214e-05, "loss": 1.124, "step": 12724 }, { "epoch": 2.3648020813975097, "grad_norm": 0.8191190361976624, "learning_rate": 7.1951651859459e-05, "loss": 1.0811, "step": 12725 }, { "epoch": 2.364987920460881, "grad_norm": 0.8683209419250488, "learning_rate": 7.19376292536997e-05, "loss": 0.7933, "step": 12726 }, { "epoch": 2.365173759524252, "grad_norm": 0.6210349798202515, "learning_rate": 7.192360724688352e-05, "loss": 0.5691, "step": 12727 }, { "epoch": 2.3653595985876232, "grad_norm": 0.7325350642204285, "learning_rate": 7.19095858393098e-05, "loss": 0.6499, "step": 12728 }, { "epoch": 2.365545437650994, "grad_norm": 0.8471894264221191, "learning_rate": 7.189556503127776e-05, "loss": 0.756, "step": 12729 }, { "epoch": 2.3657312767143654, "grad_norm": 1.4356576204299927, "learning_rate": 7.188154482308667e-05, "loss": 1.328, "step": 12730 }, { "epoch": 2.3659171157777363, "grad_norm": 0.7856905460357666, "learning_rate": 7.186752521503575e-05, "loss": 0.8839, "step": 12731 }, { "epoch": 2.3661029548411077, "grad_norm": 0.7553587555885315, "learning_rate": 7.185350620742421e-05, "loss": 0.9472, "step": 12732 }, { "epoch": 2.3662887939044785, "grad_norm": 0.9476107358932495, "learning_rate": 7.18394878005513e-05, "loss": 0.7734, "step": 12733 }, { "epoch": 2.36647463296785, "grad_norm": 0.9446849822998047, "learning_rate": 7.182546999471617e-05, "loss": 1.077, "step": 12734 }, { "epoch": 2.3666604720312208, "grad_norm": 0.7738652229309082, "learning_rate": 7.181145279021806e-05, "loss": 0.8729, "step": 12735 }, { "epoch": 2.366846311094592, "grad_norm": 0.8181040287017822, "learning_rate": 7.179743618735611e-05, "loss": 1.0301, "step": 12736 }, { "epoch": 2.3670321501579634, "grad_norm": 0.89509516954422, "learning_rate": 7.178342018642947e-05, "loss": 1.0886, "step": 12737 }, { "epoch": 2.3672179892213343, "grad_norm": 0.8342881202697754, "learning_rate": 7.176940478773729e-05, "loss": 1.0007, "step": 12738 }, { "epoch": 2.3674038282847056, "grad_norm": 0.81234210729599, "learning_rate": 7.175538999157876e-05, "loss": 1.1919, "step": 12739 }, { "epoch": 2.3675896673480765, "grad_norm": 0.806109607219696, "learning_rate": 7.174137579825293e-05, "loss": 0.9236, "step": 12740 }, { "epoch": 2.367775506411448, "grad_norm": 0.8120869398117065, "learning_rate": 7.172736220805898e-05, "loss": 0.8751, "step": 12741 }, { "epoch": 2.3679613454748187, "grad_norm": 0.756284773349762, "learning_rate": 7.171334922129593e-05, "loss": 0.9141, "step": 12742 }, { "epoch": 2.36814718453819, "grad_norm": 0.9308738112449646, "learning_rate": 7.169933683826292e-05, "loss": 0.8915, "step": 12743 }, { "epoch": 2.368333023601561, "grad_norm": 0.8234387636184692, "learning_rate": 7.168532505925899e-05, "loss": 0.8855, "step": 12744 }, { "epoch": 2.3685188626649323, "grad_norm": 0.8203461170196533, "learning_rate": 7.167131388458321e-05, "loss": 0.6825, "step": 12745 }, { "epoch": 2.368704701728303, "grad_norm": 0.9258405566215515, "learning_rate": 7.165730331453462e-05, "loss": 0.9386, "step": 12746 }, { "epoch": 2.3688905407916745, "grad_norm": 1.023603081703186, "learning_rate": 7.164329334941224e-05, "loss": 1.0062, "step": 12747 }, { "epoch": 2.3690763798550454, "grad_norm": 0.7846123576164246, "learning_rate": 7.162928398951508e-05, "loss": 0.9227, "step": 12748 }, { "epoch": 2.3692622189184167, "grad_norm": 0.836129903793335, "learning_rate": 7.161527523514221e-05, "loss": 0.6194, "step": 12749 }, { "epoch": 2.3694480579817876, "grad_norm": 0.8389571905136108, "learning_rate": 7.160126708659256e-05, "loss": 0.8251, "step": 12750 }, { "epoch": 2.369633897045159, "grad_norm": 0.9334648847579956, "learning_rate": 7.158725954416516e-05, "loss": 0.8305, "step": 12751 }, { "epoch": 2.3698197361085303, "grad_norm": 0.7365804314613342, "learning_rate": 7.157325260815892e-05, "loss": 0.6032, "step": 12752 }, { "epoch": 2.370005575171901, "grad_norm": 1.0147955417633057, "learning_rate": 7.155924627887282e-05, "loss": 0.9548, "step": 12753 }, { "epoch": 2.370191414235272, "grad_norm": 0.7993833422660828, "learning_rate": 7.154524055660581e-05, "loss": 0.7528, "step": 12754 }, { "epoch": 2.3703772532986433, "grad_norm": 0.8479723334312439, "learning_rate": 7.153123544165683e-05, "loss": 0.9525, "step": 12755 }, { "epoch": 2.3705630923620147, "grad_norm": 0.7671613097190857, "learning_rate": 7.151723093432474e-05, "loss": 0.8971, "step": 12756 }, { "epoch": 2.3707489314253856, "grad_norm": 0.9010011553764343, "learning_rate": 7.150322703490849e-05, "loss": 0.9733, "step": 12757 }, { "epoch": 2.370934770488757, "grad_norm": 0.9608262181282043, "learning_rate": 7.148922374370697e-05, "loss": 1.0727, "step": 12758 }, { "epoch": 2.371120609552128, "grad_norm": 0.6904570460319519, "learning_rate": 7.147522106101901e-05, "loss": 0.819, "step": 12759 }, { "epoch": 2.371306448615499, "grad_norm": 0.8582159280776978, "learning_rate": 7.146121898714351e-05, "loss": 0.8103, "step": 12760 }, { "epoch": 2.37149228767887, "grad_norm": 0.8501172065734863, "learning_rate": 7.144721752237935e-05, "loss": 1.0177, "step": 12761 }, { "epoch": 2.3716781267422413, "grad_norm": 0.9760304093360901, "learning_rate": 7.143321666702531e-05, "loss": 0.9418, "step": 12762 }, { "epoch": 2.371863965805612, "grad_norm": 0.557565450668335, "learning_rate": 7.141921642138025e-05, "loss": 0.5414, "step": 12763 }, { "epoch": 2.3720498048689835, "grad_norm": 0.7802746891975403, "learning_rate": 7.140521678574296e-05, "loss": 0.9173, "step": 12764 }, { "epoch": 2.3722356439323544, "grad_norm": 0.8309347033500671, "learning_rate": 7.139121776041226e-05, "loss": 0.8513, "step": 12765 }, { "epoch": 2.3724214829957257, "grad_norm": 0.865175187587738, "learning_rate": 7.137721934568691e-05, "loss": 0.958, "step": 12766 }, { "epoch": 2.3726073220590966, "grad_norm": 1.1070252656936646, "learning_rate": 7.136322154186568e-05, "loss": 0.9722, "step": 12767 }, { "epoch": 2.372793161122468, "grad_norm": 0.9914840459823608, "learning_rate": 7.134922434924737e-05, "loss": 1.0765, "step": 12768 }, { "epoch": 2.372979000185839, "grad_norm": 0.751986563205719, "learning_rate": 7.133522776813066e-05, "loss": 1.1364, "step": 12769 }, { "epoch": 2.37316483924921, "grad_norm": 0.7722371220588684, "learning_rate": 7.132123179881435e-05, "loss": 0.7617, "step": 12770 }, { "epoch": 2.3733506783125815, "grad_norm": 0.8863736391067505, "learning_rate": 7.130723644159715e-05, "loss": 0.6999, "step": 12771 }, { "epoch": 2.3735365173759524, "grad_norm": 0.8677679896354675, "learning_rate": 7.129324169677773e-05, "loss": 1.1685, "step": 12772 }, { "epoch": 2.3737223564393237, "grad_norm": 0.8729515671730042, "learning_rate": 7.12792475646548e-05, "loss": 1.0509, "step": 12773 }, { "epoch": 2.3739081955026946, "grad_norm": 0.849473774433136, "learning_rate": 7.126525404552706e-05, "loss": 0.9959, "step": 12774 }, { "epoch": 2.374094034566066, "grad_norm": 0.9366466999053955, "learning_rate": 7.125126113969316e-05, "loss": 0.8327, "step": 12775 }, { "epoch": 2.374279873629437, "grad_norm": 0.8395025730133057, "learning_rate": 7.123726884745174e-05, "loss": 0.8906, "step": 12776 }, { "epoch": 2.374465712692808, "grad_norm": 0.7203903794288635, "learning_rate": 7.122327716910146e-05, "loss": 0.8016, "step": 12777 }, { "epoch": 2.374651551756179, "grad_norm": 0.8004384636878967, "learning_rate": 7.120928610494097e-05, "loss": 0.9767, "step": 12778 }, { "epoch": 2.3748373908195504, "grad_norm": 0.8686206340789795, "learning_rate": 7.119529565526883e-05, "loss": 1.0413, "step": 12779 }, { "epoch": 2.3750232298829212, "grad_norm": 0.7640944123268127, "learning_rate": 7.118130582038367e-05, "loss": 0.8518, "step": 12780 }, { "epoch": 2.3752090689462926, "grad_norm": 0.7188209891319275, "learning_rate": 7.11673166005841e-05, "loss": 0.8153, "step": 12781 }, { "epoch": 2.3753949080096635, "grad_norm": 0.8678622841835022, "learning_rate": 7.115332799616868e-05, "loss": 1.1128, "step": 12782 }, { "epoch": 2.375580747073035, "grad_norm": 0.8177844882011414, "learning_rate": 7.113934000743598e-05, "loss": 0.8343, "step": 12783 }, { "epoch": 2.3757665861364057, "grad_norm": 0.8202971816062927, "learning_rate": 7.112535263468452e-05, "loss": 0.9535, "step": 12784 }, { "epoch": 2.375952425199777, "grad_norm": 0.9193758368492126, "learning_rate": 7.111136587821288e-05, "loss": 0.6666, "step": 12785 }, { "epoch": 2.3761382642631483, "grad_norm": 0.7091536521911621, "learning_rate": 7.109737973831954e-05, "loss": 0.9264, "step": 12786 }, { "epoch": 2.376324103326519, "grad_norm": 0.8636837601661682, "learning_rate": 7.108339421530303e-05, "loss": 0.7951, "step": 12787 }, { "epoch": 2.37650994238989, "grad_norm": 0.9927158355712891, "learning_rate": 7.106940930946186e-05, "loss": 1.1423, "step": 12788 }, { "epoch": 2.3766957814532614, "grad_norm": 0.7743449211120605, "learning_rate": 7.105542502109451e-05, "loss": 1.0022, "step": 12789 }, { "epoch": 2.3768816205166328, "grad_norm": 0.8231563568115234, "learning_rate": 7.104144135049939e-05, "loss": 0.9358, "step": 12790 }, { "epoch": 2.3770674595800036, "grad_norm": 0.702055037021637, "learning_rate": 7.102745829797506e-05, "loss": 0.809, "step": 12791 }, { "epoch": 2.377253298643375, "grad_norm": 0.7115417122840881, "learning_rate": 7.101347586381989e-05, "loss": 0.8033, "step": 12792 }, { "epoch": 2.377439137706746, "grad_norm": 0.7683055996894836, "learning_rate": 7.099949404833236e-05, "loss": 0.9871, "step": 12793 }, { "epoch": 2.377624976770117, "grad_norm": 0.7212742567062378, "learning_rate": 7.098551285181084e-05, "loss": 0.7506, "step": 12794 }, { "epoch": 2.377810815833488, "grad_norm": 1.4942293167114258, "learning_rate": 7.097153227455379e-05, "loss": 1.0214, "step": 12795 }, { "epoch": 2.3779966548968594, "grad_norm": 0.8741627335548401, "learning_rate": 7.095755231685953e-05, "loss": 0.8677, "step": 12796 }, { "epoch": 2.3781824939602303, "grad_norm": 0.833321750164032, "learning_rate": 7.094357297902649e-05, "loss": 0.8144, "step": 12797 }, { "epoch": 2.3783683330236016, "grad_norm": 0.7516381144523621, "learning_rate": 7.092959426135304e-05, "loss": 1.2043, "step": 12798 }, { "epoch": 2.3785541720869725, "grad_norm": 1.009989619255066, "learning_rate": 7.091561616413749e-05, "loss": 1.1505, "step": 12799 }, { "epoch": 2.378740011150344, "grad_norm": 0.8444727659225464, "learning_rate": 7.090163868767822e-05, "loss": 1.0908, "step": 12800 }, { "epoch": 2.378925850213715, "grad_norm": 0.7950802445411682, "learning_rate": 7.08876618322735e-05, "loss": 0.8567, "step": 12801 }, { "epoch": 2.379111689277086, "grad_norm": 0.7582051753997803, "learning_rate": 7.087368559822169e-05, "loss": 0.9553, "step": 12802 }, { "epoch": 2.379297528340457, "grad_norm": 0.7837277054786682, "learning_rate": 7.085970998582112e-05, "loss": 0.9817, "step": 12803 }, { "epoch": 2.3794833674038283, "grad_norm": 1.0931239128112793, "learning_rate": 7.084573499536999e-05, "loss": 0.8585, "step": 12804 }, { "epoch": 2.3796692064671996, "grad_norm": 0.7584730982780457, "learning_rate": 7.083176062716664e-05, "loss": 0.9564, "step": 12805 }, { "epoch": 2.3798550455305705, "grad_norm": 1.0813666582107544, "learning_rate": 7.08177868815093e-05, "loss": 1.1069, "step": 12806 }, { "epoch": 2.380040884593942, "grad_norm": 0.7736077308654785, "learning_rate": 7.080381375869623e-05, "loss": 1.0861, "step": 12807 }, { "epoch": 2.3802267236573127, "grad_norm": 0.8041399121284485, "learning_rate": 7.078984125902565e-05, "loss": 0.8578, "step": 12808 }, { "epoch": 2.380412562720684, "grad_norm": 0.8879541754722595, "learning_rate": 7.077586938279576e-05, "loss": 0.9983, "step": 12809 }, { "epoch": 2.380598401784055, "grad_norm": 0.8086082935333252, "learning_rate": 7.076189813030481e-05, "loss": 0.9249, "step": 12810 }, { "epoch": 2.3807842408474262, "grad_norm": 0.7989996671676636, "learning_rate": 7.074792750185094e-05, "loss": 0.9374, "step": 12811 }, { "epoch": 2.380970079910797, "grad_norm": 0.7615584135055542, "learning_rate": 7.073395749773238e-05, "loss": 0.695, "step": 12812 }, { "epoch": 2.3811559189741685, "grad_norm": 0.9058272838592529, "learning_rate": 7.071998811824727e-05, "loss": 0.9893, "step": 12813 }, { "epoch": 2.3813417580375393, "grad_norm": 0.9969415664672852, "learning_rate": 7.070601936369376e-05, "loss": 1.0421, "step": 12814 }, { "epoch": 2.3815275971009107, "grad_norm": 1.01531982421875, "learning_rate": 7.069205123437001e-05, "loss": 1.0108, "step": 12815 }, { "epoch": 2.3817134361642815, "grad_norm": 0.9354668259620667, "learning_rate": 7.067808373057414e-05, "loss": 0.9271, "step": 12816 }, { "epoch": 2.381899275227653, "grad_norm": 0.8690527081489563, "learning_rate": 7.066411685260424e-05, "loss": 0.8625, "step": 12817 }, { "epoch": 2.3820851142910238, "grad_norm": 0.7677668929100037, "learning_rate": 7.065015060075845e-05, "loss": 1.0756, "step": 12818 }, { "epoch": 2.382270953354395, "grad_norm": 0.6953485608100891, "learning_rate": 7.063618497533479e-05, "loss": 0.8802, "step": 12819 }, { "epoch": 2.3824567924177664, "grad_norm": 0.8176363110542297, "learning_rate": 7.062221997663141e-05, "loss": 1.1312, "step": 12820 }, { "epoch": 2.3826426314811373, "grad_norm": 0.9096832275390625, "learning_rate": 7.060825560494629e-05, "loss": 1.027, "step": 12821 }, { "epoch": 2.3828284705445086, "grad_norm": 0.7821842432022095, "learning_rate": 7.059429186057752e-05, "loss": 1.0861, "step": 12822 }, { "epoch": 2.3830143096078795, "grad_norm": 0.9728854894638062, "learning_rate": 7.058032874382314e-05, "loss": 0.978, "step": 12823 }, { "epoch": 2.383200148671251, "grad_norm": 0.8028355240821838, "learning_rate": 7.056636625498117e-05, "loss": 0.7403, "step": 12824 }, { "epoch": 2.3833859877346217, "grad_norm": 0.8021082878112793, "learning_rate": 7.055240439434958e-05, "loss": 0.9734, "step": 12825 }, { "epoch": 2.383571826797993, "grad_norm": 0.7374281287193298, "learning_rate": 7.053844316222639e-05, "loss": 0.8877, "step": 12826 }, { "epoch": 2.383757665861364, "grad_norm": 0.8111301064491272, "learning_rate": 7.052448255890957e-05, "loss": 1.1204, "step": 12827 }, { "epoch": 2.3839435049247353, "grad_norm": 0.7399295568466187, "learning_rate": 7.051052258469713e-05, "loss": 0.9523, "step": 12828 }, { "epoch": 2.384129343988106, "grad_norm": 0.8343197703361511, "learning_rate": 7.049656323988692e-05, "loss": 1.0284, "step": 12829 }, { "epoch": 2.3843151830514775, "grad_norm": 0.87831711769104, "learning_rate": 7.048260452477699e-05, "loss": 0.914, "step": 12830 }, { "epoch": 2.3845010221148484, "grad_norm": 0.7254018783569336, "learning_rate": 7.046864643966517e-05, "loss": 0.7349, "step": 12831 }, { "epoch": 2.3846868611782197, "grad_norm": 0.7538672685623169, "learning_rate": 7.04546889848494e-05, "loss": 0.9437, "step": 12832 }, { "epoch": 2.3848727002415906, "grad_norm": 1.0284329652786255, "learning_rate": 7.044073216062763e-05, "loss": 1.0807, "step": 12833 }, { "epoch": 2.385058539304962, "grad_norm": 0.7986030578613281, "learning_rate": 7.04267759672977e-05, "loss": 0.8098, "step": 12834 }, { "epoch": 2.3852443783683333, "grad_norm": 0.958909273147583, "learning_rate": 7.041282040515748e-05, "loss": 1.0232, "step": 12835 }, { "epoch": 2.385430217431704, "grad_norm": 0.6816046833992004, "learning_rate": 7.039886547450485e-05, "loss": 0.659, "step": 12836 }, { "epoch": 2.385616056495075, "grad_norm": 0.8010440468788147, "learning_rate": 7.038491117563761e-05, "loss": 0.9317, "step": 12837 }, { "epoch": 2.3858018955584464, "grad_norm": 0.8590589761734009, "learning_rate": 7.037095750885366e-05, "loss": 0.8401, "step": 12838 }, { "epoch": 2.3859877346218177, "grad_norm": 0.8526574969291687, "learning_rate": 7.035700447445075e-05, "loss": 0.9058, "step": 12839 }, { "epoch": 2.3861735736851886, "grad_norm": 0.7676329016685486, "learning_rate": 7.034305207272673e-05, "loss": 0.757, "step": 12840 }, { "epoch": 2.38635941274856, "grad_norm": 0.7403881549835205, "learning_rate": 7.032910030397934e-05, "loss": 0.8974, "step": 12841 }, { "epoch": 2.386545251811931, "grad_norm": 0.7914829850196838, "learning_rate": 7.031514916850641e-05, "loss": 0.9674, "step": 12842 }, { "epoch": 2.386731090875302, "grad_norm": 0.8582102656364441, "learning_rate": 7.030119866660564e-05, "loss": 0.8858, "step": 12843 }, { "epoch": 2.386916929938673, "grad_norm": 0.737777590751648, "learning_rate": 7.028724879857484e-05, "loss": 0.9357, "step": 12844 }, { "epoch": 2.3871027690020443, "grad_norm": 0.7970533967018127, "learning_rate": 7.027329956471174e-05, "loss": 0.759, "step": 12845 }, { "epoch": 2.387288608065415, "grad_norm": 0.8642406463623047, "learning_rate": 7.025935096531407e-05, "loss": 0.9972, "step": 12846 }, { "epoch": 2.3874744471287865, "grad_norm": 0.8043685555458069, "learning_rate": 7.024540300067947e-05, "loss": 0.8481, "step": 12847 }, { "epoch": 2.3876602861921574, "grad_norm": 0.6988628506660461, "learning_rate": 7.023145567110572e-05, "loss": 0.9632, "step": 12848 }, { "epoch": 2.3878461252555288, "grad_norm": 0.7697068452835083, "learning_rate": 7.021750897689043e-05, "loss": 1.008, "step": 12849 }, { "epoch": 2.3880319643189, "grad_norm": 0.7987003326416016, "learning_rate": 7.020356291833134e-05, "loss": 0.806, "step": 12850 }, { "epoch": 2.388217803382271, "grad_norm": 0.7947388291358948, "learning_rate": 7.018961749572604e-05, "loss": 0.9329, "step": 12851 }, { "epoch": 2.388403642445642, "grad_norm": 0.7829242944717407, "learning_rate": 7.017567270937221e-05, "loss": 0.8627, "step": 12852 }, { "epoch": 2.388589481509013, "grad_norm": 0.7357932925224304, "learning_rate": 7.016172855956743e-05, "loss": 0.9987, "step": 12853 }, { "epoch": 2.3887753205723845, "grad_norm": 0.7504284381866455, "learning_rate": 7.014778504660938e-05, "loss": 0.8692, "step": 12854 }, { "epoch": 2.3889611596357554, "grad_norm": 0.7871151566505432, "learning_rate": 7.01338421707956e-05, "loss": 0.8557, "step": 12855 }, { "epoch": 2.3891469986991267, "grad_norm": 0.8007663488388062, "learning_rate": 7.011989993242376e-05, "loss": 0.8101, "step": 12856 }, { "epoch": 2.3893328377624976, "grad_norm": 0.7916004061698914, "learning_rate": 7.010595833179134e-05, "loss": 0.8924, "step": 12857 }, { "epoch": 2.389518676825869, "grad_norm": 0.8495494723320007, "learning_rate": 7.009201736919597e-05, "loss": 1.0092, "step": 12858 }, { "epoch": 2.38970451588924, "grad_norm": 0.7786046862602234, "learning_rate": 7.007807704493514e-05, "loss": 0.7221, "step": 12859 }, { "epoch": 2.389890354952611, "grad_norm": 0.9809713959693909, "learning_rate": 7.006413735930642e-05, "loss": 1.0681, "step": 12860 }, { "epoch": 2.390076194015982, "grad_norm": 0.7984169125556946, "learning_rate": 7.005019831260731e-05, "loss": 0.8321, "step": 12861 }, { "epoch": 2.3902620330793534, "grad_norm": 0.82598876953125, "learning_rate": 7.003625990513534e-05, "loss": 1.0514, "step": 12862 }, { "epoch": 2.3904478721427243, "grad_norm": 0.7637234926223755, "learning_rate": 7.002232213718795e-05, "loss": 1.1037, "step": 12863 }, { "epoch": 2.3906337112060956, "grad_norm": 0.7955513596534729, "learning_rate": 7.000838500906263e-05, "loss": 0.9274, "step": 12864 }, { "epoch": 2.3908195502694665, "grad_norm": 0.894230306148529, "learning_rate": 6.99944485210569e-05, "loss": 0.914, "step": 12865 }, { "epoch": 2.391005389332838, "grad_norm": 0.9638434052467346, "learning_rate": 6.998051267346819e-05, "loss": 0.9538, "step": 12866 }, { "epoch": 2.3911912283962087, "grad_norm": 0.7865784764289856, "learning_rate": 6.99665774665939e-05, "loss": 0.8582, "step": 12867 }, { "epoch": 2.39137706745958, "grad_norm": 0.7800189256668091, "learning_rate": 6.99526429007315e-05, "loss": 0.8443, "step": 12868 }, { "epoch": 2.3915629065229513, "grad_norm": 0.9118089079856873, "learning_rate": 6.993870897617834e-05, "loss": 0.9001, "step": 12869 }, { "epoch": 2.3917487455863222, "grad_norm": 0.8228194713592529, "learning_rate": 6.992477569323187e-05, "loss": 0.7806, "step": 12870 }, { "epoch": 2.3919345846496936, "grad_norm": 0.7540860772132874, "learning_rate": 6.991084305218943e-05, "loss": 0.9232, "step": 12871 }, { "epoch": 2.3921204237130644, "grad_norm": 0.8330655694007874, "learning_rate": 6.989691105334844e-05, "loss": 0.8367, "step": 12872 }, { "epoch": 2.3923062627764358, "grad_norm": 1.076634407043457, "learning_rate": 6.988297969700621e-05, "loss": 1.0661, "step": 12873 }, { "epoch": 2.3924921018398067, "grad_norm": 0.6492782831192017, "learning_rate": 6.986904898346006e-05, "loss": 0.6156, "step": 12874 }, { "epoch": 2.392677940903178, "grad_norm": 0.7901709675788879, "learning_rate": 6.985511891300737e-05, "loss": 1.0402, "step": 12875 }, { "epoch": 2.392863779966549, "grad_norm": 0.9432883858680725, "learning_rate": 6.984118948594547e-05, "loss": 1.0928, "step": 12876 }, { "epoch": 2.39304961902992, "grad_norm": 0.8093352913856506, "learning_rate": 6.982726070257161e-05, "loss": 1.0158, "step": 12877 }, { "epoch": 2.393235458093291, "grad_norm": 0.8153324723243713, "learning_rate": 6.98133325631831e-05, "loss": 0.8456, "step": 12878 }, { "epoch": 2.3934212971566624, "grad_norm": 0.8256557583808899, "learning_rate": 6.97994050680772e-05, "loss": 1.0952, "step": 12879 }, { "epoch": 2.3936071362200333, "grad_norm": 0.8582428693771362, "learning_rate": 6.978547821755118e-05, "loss": 0.9545, "step": 12880 }, { "epoch": 2.3937929752834046, "grad_norm": 0.7831152677536011, "learning_rate": 6.977155201190226e-05, "loss": 0.8716, "step": 12881 }, { "epoch": 2.3939788143467755, "grad_norm": 0.893572986125946, "learning_rate": 6.975762645142771e-05, "loss": 0.9879, "step": 12882 }, { "epoch": 2.394164653410147, "grad_norm": 0.8478510975837708, "learning_rate": 6.974370153642468e-05, "loss": 0.9668, "step": 12883 }, { "epoch": 2.394350492473518, "grad_norm": 0.831895112991333, "learning_rate": 6.972977726719048e-05, "loss": 0.7504, "step": 12884 }, { "epoch": 2.394536331536889, "grad_norm": 0.865291953086853, "learning_rate": 6.971585364402216e-05, "loss": 1.0494, "step": 12885 }, { "epoch": 2.39472217060026, "grad_norm": 0.809294581413269, "learning_rate": 6.970193066721701e-05, "loss": 0.9431, "step": 12886 }, { "epoch": 2.3949080096636313, "grad_norm": 0.7376503348350525, "learning_rate": 6.968800833707218e-05, "loss": 0.7132, "step": 12887 }, { "epoch": 2.3950938487270026, "grad_norm": 0.9075321555137634, "learning_rate": 6.967408665388479e-05, "loss": 1.1907, "step": 12888 }, { "epoch": 2.3952796877903735, "grad_norm": 0.8884609937667847, "learning_rate": 6.966016561795196e-05, "loss": 0.8951, "step": 12889 }, { "epoch": 2.395465526853745, "grad_norm": 0.8289088606834412, "learning_rate": 6.964624522957087e-05, "loss": 1.1564, "step": 12890 }, { "epoch": 2.3956513659171157, "grad_norm": 1.029945969581604, "learning_rate": 6.963232548903853e-05, "loss": 0.9681, "step": 12891 }, { "epoch": 2.395837204980487, "grad_norm": 0.8445291519165039, "learning_rate": 6.961840639665214e-05, "loss": 0.7358, "step": 12892 }, { "epoch": 2.396023044043858, "grad_norm": 0.8968092799186707, "learning_rate": 6.960448795270868e-05, "loss": 1.0552, "step": 12893 }, { "epoch": 2.3962088831072292, "grad_norm": 0.8325434327125549, "learning_rate": 6.959057015750532e-05, "loss": 0.7296, "step": 12894 }, { "epoch": 2.3963947221706, "grad_norm": 0.8439461588859558, "learning_rate": 6.957665301133897e-05, "loss": 0.769, "step": 12895 }, { "epoch": 2.3965805612339715, "grad_norm": 1.0753918886184692, "learning_rate": 6.956273651450682e-05, "loss": 0.8952, "step": 12896 }, { "epoch": 2.3967664002973423, "grad_norm": 0.8330729007720947, "learning_rate": 6.95488206673058e-05, "loss": 0.9998, "step": 12897 }, { "epoch": 2.3969522393607137, "grad_norm": 0.824369490146637, "learning_rate": 6.953490547003298e-05, "loss": 1.0072, "step": 12898 }, { "epoch": 2.3971380784240846, "grad_norm": 0.7380400896072388, "learning_rate": 6.95209909229853e-05, "loss": 0.8426, "step": 12899 }, { "epoch": 2.397323917487456, "grad_norm": 0.8798367977142334, "learning_rate": 6.950707702645978e-05, "loss": 1.0807, "step": 12900 }, { "epoch": 2.3975097565508268, "grad_norm": 0.8785592913627625, "learning_rate": 6.949316378075334e-05, "loss": 0.7244, "step": 12901 }, { "epoch": 2.397695595614198, "grad_norm": 0.7741357684135437, "learning_rate": 6.9479251186163e-05, "loss": 1.1072, "step": 12902 }, { "epoch": 2.3978814346775694, "grad_norm": 0.8932114243507385, "learning_rate": 6.946533924298566e-05, "loss": 0.9851, "step": 12903 }, { "epoch": 2.3980672737409403, "grad_norm": 0.8525800108909607, "learning_rate": 6.945142795151827e-05, "loss": 0.9463, "step": 12904 }, { "epoch": 2.3982531128043116, "grad_norm": 0.9864116311073303, "learning_rate": 6.943751731205769e-05, "loss": 0.9916, "step": 12905 }, { "epoch": 2.3984389518676825, "grad_norm": 0.8220023512840271, "learning_rate": 6.942360732490084e-05, "loss": 1.0048, "step": 12906 }, { "epoch": 2.398624790931054, "grad_norm": 0.956392228603363, "learning_rate": 6.940969799034465e-05, "loss": 1.0739, "step": 12907 }, { "epoch": 2.3988106299944247, "grad_norm": 0.8336243629455566, "learning_rate": 6.939578930868598e-05, "loss": 1.1104, "step": 12908 }, { "epoch": 2.398996469057796, "grad_norm": 0.7887905240058899, "learning_rate": 6.938188128022164e-05, "loss": 0.9338, "step": 12909 }, { "epoch": 2.399182308121167, "grad_norm": 0.7147557139396667, "learning_rate": 6.936797390524852e-05, "loss": 0.7779, "step": 12910 }, { "epoch": 2.3993681471845383, "grad_norm": 0.7383899688720703, "learning_rate": 6.935406718406342e-05, "loss": 0.8503, "step": 12911 }, { "epoch": 2.399553986247909, "grad_norm": 0.8463869094848633, "learning_rate": 6.934016111696318e-05, "loss": 0.9774, "step": 12912 }, { "epoch": 2.3997398253112805, "grad_norm": 0.769568681716919, "learning_rate": 6.932625570424457e-05, "loss": 0.9911, "step": 12913 }, { "epoch": 2.3999256643746514, "grad_norm": 0.772621750831604, "learning_rate": 6.93123509462044e-05, "loss": 0.8799, "step": 12914 }, { "epoch": 2.4001115034380227, "grad_norm": 0.7122842073440552, "learning_rate": 6.929844684313945e-05, "loss": 0.7471, "step": 12915 }, { "epoch": 2.4002973425013936, "grad_norm": 0.7505543828010559, "learning_rate": 6.928454339534642e-05, "loss": 1.0051, "step": 12916 }, { "epoch": 2.400483181564765, "grad_norm": 0.9213287830352783, "learning_rate": 6.927064060312212e-05, "loss": 0.9575, "step": 12917 }, { "epoch": 2.4006690206281363, "grad_norm": 1.6840814352035522, "learning_rate": 6.925673846676329e-05, "loss": 1.3871, "step": 12918 }, { "epoch": 2.400854859691507, "grad_norm": 0.835517406463623, "learning_rate": 6.92428369865666e-05, "loss": 0.924, "step": 12919 }, { "epoch": 2.4010406987548785, "grad_norm": 0.7431739568710327, "learning_rate": 6.92289361628288e-05, "loss": 0.8386, "step": 12920 }, { "epoch": 2.4012265378182494, "grad_norm": 0.730377197265625, "learning_rate": 6.921503599584652e-05, "loss": 0.8848, "step": 12921 }, { "epoch": 2.4014123768816207, "grad_norm": 0.7416588664054871, "learning_rate": 6.920113648591649e-05, "loss": 0.733, "step": 12922 }, { "epoch": 2.4015982159449916, "grad_norm": 0.7663400769233704, "learning_rate": 6.918723763333532e-05, "loss": 0.9261, "step": 12923 }, { "epoch": 2.401784055008363, "grad_norm": 0.9053961038589478, "learning_rate": 6.917333943839972e-05, "loss": 0.7423, "step": 12924 }, { "epoch": 2.401969894071734, "grad_norm": 0.9448252320289612, "learning_rate": 6.915944190140628e-05, "loss": 0.7936, "step": 12925 }, { "epoch": 2.402155733135105, "grad_norm": 0.8270347118377686, "learning_rate": 6.914554502265162e-05, "loss": 1.041, "step": 12926 }, { "epoch": 2.402341572198476, "grad_norm": 0.7604723572731018, "learning_rate": 6.913164880243231e-05, "loss": 0.9407, "step": 12927 }, { "epoch": 2.4025274112618473, "grad_norm": 0.8789168000221252, "learning_rate": 6.911775324104504e-05, "loss": 0.9699, "step": 12928 }, { "epoch": 2.402713250325218, "grad_norm": 0.777075469493866, "learning_rate": 6.910385833878632e-05, "loss": 0.9532, "step": 12929 }, { "epoch": 2.4028990893885895, "grad_norm": 0.6907746195793152, "learning_rate": 6.908996409595273e-05, "loss": 0.9357, "step": 12930 }, { "epoch": 2.4030849284519604, "grad_norm": 0.7531406283378601, "learning_rate": 6.907607051284079e-05, "loss": 0.7451, "step": 12931 }, { "epoch": 2.4032707675153318, "grad_norm": 0.8290823101997375, "learning_rate": 6.906217758974709e-05, "loss": 0.9023, "step": 12932 }, { "epoch": 2.403456606578703, "grad_norm": 0.8053004145622253, "learning_rate": 6.90482853269681e-05, "loss": 0.9673, "step": 12933 }, { "epoch": 2.403642445642074, "grad_norm": 0.795650839805603, "learning_rate": 6.903439372480037e-05, "loss": 0.7834, "step": 12934 }, { "epoch": 2.403828284705445, "grad_norm": 0.7404899597167969, "learning_rate": 6.902050278354033e-05, "loss": 0.7151, "step": 12935 }, { "epoch": 2.404014123768816, "grad_norm": 0.7955520749092102, "learning_rate": 6.900661250348452e-05, "loss": 0.9404, "step": 12936 }, { "epoch": 2.4041999628321875, "grad_norm": 0.8237014412879944, "learning_rate": 6.89927228849293e-05, "loss": 0.8605, "step": 12937 }, { "epoch": 2.4043858018955584, "grad_norm": 0.8399513363838196, "learning_rate": 6.897883392817128e-05, "loss": 0.7486, "step": 12938 }, { "epoch": 2.4045716409589297, "grad_norm": 0.6245372295379639, "learning_rate": 6.89649456335068e-05, "loss": 0.5818, "step": 12939 }, { "epoch": 2.4047574800223006, "grad_norm": 0.7267877459526062, "learning_rate": 6.895105800123231e-05, "loss": 0.7855, "step": 12940 }, { "epoch": 2.404943319085672, "grad_norm": 0.7312634587287903, "learning_rate": 6.893717103164419e-05, "loss": 0.9549, "step": 12941 }, { "epoch": 2.405129158149043, "grad_norm": 0.7766819596290588, "learning_rate": 6.892328472503887e-05, "loss": 0.784, "step": 12942 }, { "epoch": 2.405314997212414, "grad_norm": 0.738930881023407, "learning_rate": 6.890939908171268e-05, "loss": 0.7427, "step": 12943 }, { "epoch": 2.405500836275785, "grad_norm": 0.7638840079307556, "learning_rate": 6.889551410196203e-05, "loss": 0.973, "step": 12944 }, { "epoch": 2.4056866753391564, "grad_norm": 0.7316983938217163, "learning_rate": 6.888162978608325e-05, "loss": 0.9748, "step": 12945 }, { "epoch": 2.4058725144025273, "grad_norm": 0.8835756182670593, "learning_rate": 6.886774613437266e-05, "loss": 0.7174, "step": 12946 }, { "epoch": 2.4060583534658986, "grad_norm": 0.7415609955787659, "learning_rate": 6.885386314712663e-05, "loss": 0.8063, "step": 12947 }, { "epoch": 2.4062441925292695, "grad_norm": 0.7552850842475891, "learning_rate": 6.883998082464142e-05, "loss": 0.8563, "step": 12948 }, { "epoch": 2.406430031592641, "grad_norm": 0.7759203314781189, "learning_rate": 6.882609916721335e-05, "loss": 0.7608, "step": 12949 }, { "epoch": 2.4066158706560117, "grad_norm": 0.7742679715156555, "learning_rate": 6.881221817513873e-05, "loss": 1.0519, "step": 12950 }, { "epoch": 2.406801709719383, "grad_norm": 0.8516238331794739, "learning_rate": 6.879833784871374e-05, "loss": 0.9311, "step": 12951 }, { "epoch": 2.4069875487827543, "grad_norm": 0.6422767639160156, "learning_rate": 6.878445818823473e-05, "loss": 0.7316, "step": 12952 }, { "epoch": 2.4071733878461252, "grad_norm": 0.7644221782684326, "learning_rate": 6.877057919399788e-05, "loss": 0.7009, "step": 12953 }, { "epoch": 2.4073592269094966, "grad_norm": 0.7887259721755981, "learning_rate": 6.875670086629942e-05, "loss": 0.9655, "step": 12954 }, { "epoch": 2.4075450659728674, "grad_norm": 0.9073466062545776, "learning_rate": 6.874282320543557e-05, "loss": 1.0732, "step": 12955 }, { "epoch": 2.4077309050362388, "grad_norm": 0.8829473853111267, "learning_rate": 6.87289462117025e-05, "loss": 1.0061, "step": 12956 }, { "epoch": 2.4079167440996097, "grad_norm": 0.7792870998382568, "learning_rate": 6.871506988539644e-05, "loss": 0.7244, "step": 12957 }, { "epoch": 2.408102583162981, "grad_norm": 0.9756807088851929, "learning_rate": 6.87011942268135e-05, "loss": 1.0455, "step": 12958 }, { "epoch": 2.408288422226352, "grad_norm": 0.9650059342384338, "learning_rate": 6.868731923624985e-05, "loss": 1.1628, "step": 12959 }, { "epoch": 2.408474261289723, "grad_norm": 0.8020375370979309, "learning_rate": 6.867344491400168e-05, "loss": 1.0403, "step": 12960 }, { "epoch": 2.408660100353094, "grad_norm": 0.9928075075149536, "learning_rate": 6.865957126036502e-05, "loss": 0.9012, "step": 12961 }, { "epoch": 2.4088459394164654, "grad_norm": 0.6184268593788147, "learning_rate": 6.864569827563607e-05, "loss": 0.5636, "step": 12962 }, { "epoch": 2.4090317784798363, "grad_norm": 0.893981397151947, "learning_rate": 6.863182596011087e-05, "loss": 1.0563, "step": 12963 }, { "epoch": 2.4092176175432076, "grad_norm": 0.8809736967086792, "learning_rate": 6.86179543140855e-05, "loss": 0.9638, "step": 12964 }, { "epoch": 2.4094034566065785, "grad_norm": 0.9104132652282715, "learning_rate": 6.860408333785606e-05, "loss": 0.8908, "step": 12965 }, { "epoch": 2.40958929566995, "grad_norm": 0.7690507769584656, "learning_rate": 6.859021303171857e-05, "loss": 0.8789, "step": 12966 }, { "epoch": 2.409775134733321, "grad_norm": 0.7475899457931519, "learning_rate": 6.85763433959691e-05, "loss": 0.9408, "step": 12967 }, { "epoch": 2.409960973796692, "grad_norm": 0.8373811841011047, "learning_rate": 6.856247443090365e-05, "loss": 0.8776, "step": 12968 }, { "epoch": 2.410146812860063, "grad_norm": 0.9541831016540527, "learning_rate": 6.85486061368182e-05, "loss": 0.9184, "step": 12969 }, { "epoch": 2.4103326519234343, "grad_norm": 0.759236752986908, "learning_rate": 6.853473851400882e-05, "loss": 0.7704, "step": 12970 }, { "epoch": 2.4105184909868056, "grad_norm": 0.8918933272361755, "learning_rate": 6.852087156277143e-05, "loss": 0.9457, "step": 12971 }, { "epoch": 2.4107043300501765, "grad_norm": 1.1025477647781372, "learning_rate": 6.850700528340204e-05, "loss": 0.8847, "step": 12972 }, { "epoch": 2.410890169113548, "grad_norm": 0.7837464213371277, "learning_rate": 6.849313967619655e-05, "loss": 0.6502, "step": 12973 }, { "epoch": 2.4110760081769187, "grad_norm": 0.7813313007354736, "learning_rate": 6.847927474145095e-05, "loss": 1.0479, "step": 12974 }, { "epoch": 2.41126184724029, "grad_norm": 0.8003170490264893, "learning_rate": 6.846541047946112e-05, "loss": 0.9041, "step": 12975 }, { "epoch": 2.411447686303661, "grad_norm": 0.7718076705932617, "learning_rate": 6.845154689052299e-05, "loss": 0.9193, "step": 12976 }, { "epoch": 2.4116335253670322, "grad_norm": 0.810656726360321, "learning_rate": 6.843768397493246e-05, "loss": 0.9488, "step": 12977 }, { "epoch": 2.411819364430403, "grad_norm": 0.7257324457168579, "learning_rate": 6.84238217329854e-05, "loss": 1.0075, "step": 12978 }, { "epoch": 2.4120052034937745, "grad_norm": 0.770806074142456, "learning_rate": 6.840996016497765e-05, "loss": 0.9499, "step": 12979 }, { "epoch": 2.4121910425571453, "grad_norm": 0.6285768747329712, "learning_rate": 6.839609927120513e-05, "loss": 0.7179, "step": 12980 }, { "epoch": 2.4123768816205167, "grad_norm": 0.9031039476394653, "learning_rate": 6.838223905196362e-05, "loss": 0.9258, "step": 12981 }, { "epoch": 2.412562720683888, "grad_norm": 1.1146539449691772, "learning_rate": 6.836837950754897e-05, "loss": 0.9575, "step": 12982 }, { "epoch": 2.412748559747259, "grad_norm": 0.6355863809585571, "learning_rate": 6.835452063825697e-05, "loss": 0.7045, "step": 12983 }, { "epoch": 2.4129343988106298, "grad_norm": 0.8352602124214172, "learning_rate": 6.834066244438343e-05, "loss": 0.9539, "step": 12984 }, { "epoch": 2.413120237874001, "grad_norm": 0.7984040975570679, "learning_rate": 6.832680492622411e-05, "loss": 0.9267, "step": 12985 }, { "epoch": 2.4133060769373724, "grad_norm": 0.8858015537261963, "learning_rate": 6.831294808407478e-05, "loss": 0.9607, "step": 12986 }, { "epoch": 2.4134919160007433, "grad_norm": 0.818161129951477, "learning_rate": 6.829909191823121e-05, "loss": 0.9118, "step": 12987 }, { "epoch": 2.4136777550641146, "grad_norm": 0.9285476207733154, "learning_rate": 6.828523642898911e-05, "loss": 0.8921, "step": 12988 }, { "epoch": 2.4138635941274855, "grad_norm": 0.6942713856697083, "learning_rate": 6.827138161664423e-05, "loss": 0.9212, "step": 12989 }, { "epoch": 2.414049433190857, "grad_norm": 0.8579042553901672, "learning_rate": 6.825752748149224e-05, "loss": 0.8859, "step": 12990 }, { "epoch": 2.4142352722542277, "grad_norm": 0.8150649070739746, "learning_rate": 6.824367402382885e-05, "loss": 0.95, "step": 12991 }, { "epoch": 2.414421111317599, "grad_norm": 0.8967882394790649, "learning_rate": 6.822982124394977e-05, "loss": 0.8815, "step": 12992 }, { "epoch": 2.41460695038097, "grad_norm": 0.6919797658920288, "learning_rate": 6.82159691421506e-05, "loss": 0.7887, "step": 12993 }, { "epoch": 2.4147927894443413, "grad_norm": 0.8287318348884583, "learning_rate": 6.820211771872707e-05, "loss": 0.8545, "step": 12994 }, { "epoch": 2.414978628507712, "grad_norm": 0.7475675940513611, "learning_rate": 6.818826697397474e-05, "loss": 0.9978, "step": 12995 }, { "epoch": 2.4151644675710835, "grad_norm": 0.9870370626449585, "learning_rate": 6.817441690818925e-05, "loss": 0.9869, "step": 12996 }, { "epoch": 2.4153503066344544, "grad_norm": 0.8511314988136292, "learning_rate": 6.816056752166626e-05, "loss": 0.8065, "step": 12997 }, { "epoch": 2.4155361456978257, "grad_norm": 0.8184236884117126, "learning_rate": 6.814671881470128e-05, "loss": 1.0464, "step": 12998 }, { "epoch": 2.4157219847611966, "grad_norm": 0.7355121374130249, "learning_rate": 6.813287078758995e-05, "loss": 0.8323, "step": 12999 }, { "epoch": 2.415907823824568, "grad_norm": 0.859524667263031, "learning_rate": 6.811902344062775e-05, "loss": 0.9679, "step": 13000 }, { "epoch": 2.4160936628879393, "grad_norm": 0.6515125036239624, "learning_rate": 6.810517677411032e-05, "loss": 0.8337, "step": 13001 }, { "epoch": 2.41627950195131, "grad_norm": 1.004290223121643, "learning_rate": 6.809133078833319e-05, "loss": 1.0798, "step": 13002 }, { "epoch": 2.4164653410146815, "grad_norm": 0.7877489328384399, "learning_rate": 6.807748548359181e-05, "loss": 1.0417, "step": 13003 }, { "epoch": 2.4166511800780524, "grad_norm": 0.6995466351509094, "learning_rate": 6.806364086018174e-05, "loss": 0.9388, "step": 13004 }, { "epoch": 2.4168370191414237, "grad_norm": 0.7970275282859802, "learning_rate": 6.804979691839843e-05, "loss": 0.9569, "step": 13005 }, { "epoch": 2.4170228582047946, "grad_norm": 0.8142886757850647, "learning_rate": 6.803595365853739e-05, "loss": 0.9285, "step": 13006 }, { "epoch": 2.417208697268166, "grad_norm": 2.7952983379364014, "learning_rate": 6.80221110808941e-05, "loss": 1.3252, "step": 13007 }, { "epoch": 2.417394536331537, "grad_norm": 0.7739753723144531, "learning_rate": 6.800826918576392e-05, "loss": 0.856, "step": 13008 }, { "epoch": 2.417580375394908, "grad_norm": 0.6938930749893188, "learning_rate": 6.799442797344236e-05, "loss": 0.8958, "step": 13009 }, { "epoch": 2.417766214458279, "grad_norm": 0.7906118631362915, "learning_rate": 6.79805874442248e-05, "loss": 0.9118, "step": 13010 }, { "epoch": 2.4179520535216503, "grad_norm": 0.9179455637931824, "learning_rate": 6.796674759840662e-05, "loss": 0.9237, "step": 13011 }, { "epoch": 2.418137892585021, "grad_norm": 0.9764026403427124, "learning_rate": 6.79529084362833e-05, "loss": 1.0781, "step": 13012 }, { "epoch": 2.4183237316483925, "grad_norm": 0.7278144955635071, "learning_rate": 6.793906995815013e-05, "loss": 0.7157, "step": 13013 }, { "epoch": 2.4185095707117634, "grad_norm": 0.8127259016036987, "learning_rate": 6.792523216430251e-05, "loss": 0.9512, "step": 13014 }, { "epoch": 2.4186954097751348, "grad_norm": 0.866875171661377, "learning_rate": 6.791139505503574e-05, "loss": 1.025, "step": 13015 }, { "epoch": 2.418881248838506, "grad_norm": 0.7558534145355225, "learning_rate": 6.789755863064519e-05, "loss": 1.0285, "step": 13016 }, { "epoch": 2.419067087901877, "grad_norm": 0.8152698278427124, "learning_rate": 6.788372289142618e-05, "loss": 0.8862, "step": 13017 }, { "epoch": 2.419252926965248, "grad_norm": 0.7945411801338196, "learning_rate": 6.786988783767397e-05, "loss": 0.9398, "step": 13018 }, { "epoch": 2.419438766028619, "grad_norm": 0.9634117484092712, "learning_rate": 6.785605346968386e-05, "loss": 0.8342, "step": 13019 }, { "epoch": 2.4196246050919905, "grad_norm": 0.7568840384483337, "learning_rate": 6.784221978775114e-05, "loss": 0.8844, "step": 13020 }, { "epoch": 2.4198104441553614, "grad_norm": 0.9052173495292664, "learning_rate": 6.782838679217102e-05, "loss": 0.8794, "step": 13021 }, { "epoch": 2.4199962832187327, "grad_norm": 0.7473658323287964, "learning_rate": 6.781455448323881e-05, "loss": 0.6503, "step": 13022 }, { "epoch": 2.4201821222821036, "grad_norm": 0.8628986477851868, "learning_rate": 6.78007228612497e-05, "loss": 0.8545, "step": 13023 }, { "epoch": 2.420367961345475, "grad_norm": 0.8227683901786804, "learning_rate": 6.77868919264989e-05, "loss": 1.0456, "step": 13024 }, { "epoch": 2.420553800408846, "grad_norm": 0.7157145738601685, "learning_rate": 6.777306167928161e-05, "loss": 0.6423, "step": 13025 }, { "epoch": 2.420739639472217, "grad_norm": 0.9058414697647095, "learning_rate": 6.775923211989301e-05, "loss": 1.0976, "step": 13026 }, { "epoch": 2.420925478535588, "grad_norm": 0.8798935413360596, "learning_rate": 6.77454032486283e-05, "loss": 1.0868, "step": 13027 }, { "epoch": 2.4211113175989594, "grad_norm": 0.8619986176490784, "learning_rate": 6.773157506578258e-05, "loss": 0.9352, "step": 13028 }, { "epoch": 2.4212971566623303, "grad_norm": 0.8048561811447144, "learning_rate": 6.771774757165103e-05, "loss": 0.9036, "step": 13029 }, { "epoch": 2.4214829957257016, "grad_norm": 0.6234624981880188, "learning_rate": 6.770392076652875e-05, "loss": 0.7566, "step": 13030 }, { "epoch": 2.421668834789073, "grad_norm": 0.7719530463218689, "learning_rate": 6.769009465071088e-05, "loss": 1.0081, "step": 13031 }, { "epoch": 2.421854673852444, "grad_norm": 0.9368285536766052, "learning_rate": 6.767626922449244e-05, "loss": 0.9753, "step": 13032 }, { "epoch": 2.4220405129158147, "grad_norm": 0.876006007194519, "learning_rate": 6.766244448816859e-05, "loss": 0.9372, "step": 13033 }, { "epoch": 2.422226351979186, "grad_norm": 0.9135903716087341, "learning_rate": 6.764862044203439e-05, "loss": 0.9031, "step": 13034 }, { "epoch": 2.4224121910425573, "grad_norm": 0.8877546787261963, "learning_rate": 6.763479708638485e-05, "loss": 0.9847, "step": 13035 }, { "epoch": 2.4225980301059282, "grad_norm": 0.7598859071731567, "learning_rate": 6.762097442151503e-05, "loss": 0.9687, "step": 13036 }, { "epoch": 2.4227838691692996, "grad_norm": 1.9608190059661865, "learning_rate": 6.760715244771995e-05, "loss": 1.5244, "step": 13037 }, { "epoch": 2.4229697082326704, "grad_norm": 0.7144572734832764, "learning_rate": 6.759333116529461e-05, "loss": 0.8079, "step": 13038 }, { "epoch": 2.4231555472960418, "grad_norm": 0.8622708916664124, "learning_rate": 6.757951057453402e-05, "loss": 0.9636, "step": 13039 }, { "epoch": 2.4233413863594127, "grad_norm": 0.9091222286224365, "learning_rate": 6.756569067573313e-05, "loss": 0.8942, "step": 13040 }, { "epoch": 2.423527225422784, "grad_norm": 0.8898884057998657, "learning_rate": 6.755187146918691e-05, "loss": 0.9219, "step": 13041 }, { "epoch": 2.423713064486155, "grad_norm": 0.8423311114311218, "learning_rate": 6.753805295519028e-05, "loss": 0.9356, "step": 13042 }, { "epoch": 2.423898903549526, "grad_norm": 0.7890492677688599, "learning_rate": 6.752423513403824e-05, "loss": 0.7311, "step": 13043 }, { "epoch": 2.424084742612897, "grad_norm": 0.8877571225166321, "learning_rate": 6.751041800602566e-05, "loss": 0.86, "step": 13044 }, { "epoch": 2.4242705816762684, "grad_norm": 0.7152068018913269, "learning_rate": 6.749660157144746e-05, "loss": 0.9282, "step": 13045 }, { "epoch": 2.4244564207396393, "grad_norm": 0.7144151329994202, "learning_rate": 6.74827858305985e-05, "loss": 0.7036, "step": 13046 }, { "epoch": 2.4246422598030106, "grad_norm": 0.8931900858879089, "learning_rate": 6.746897078377372e-05, "loss": 1.0599, "step": 13047 }, { "epoch": 2.4248280988663815, "grad_norm": 0.7475230097770691, "learning_rate": 6.745515643126789e-05, "loss": 0.6593, "step": 13048 }, { "epoch": 2.425013937929753, "grad_norm": 0.744490921497345, "learning_rate": 6.744134277337592e-05, "loss": 0.9392, "step": 13049 }, { "epoch": 2.425199776993124, "grad_norm": 0.7806805968284607, "learning_rate": 6.742752981039262e-05, "loss": 1.1194, "step": 13050 }, { "epoch": 2.425385616056495, "grad_norm": 0.787963330745697, "learning_rate": 6.741371754261278e-05, "loss": 0.934, "step": 13051 }, { "epoch": 2.4255714551198664, "grad_norm": 1.080190896987915, "learning_rate": 6.739990597033124e-05, "loss": 0.8801, "step": 13052 }, { "epoch": 2.4257572941832373, "grad_norm": 0.8024685978889465, "learning_rate": 6.738609509384272e-05, "loss": 0.8261, "step": 13053 }, { "epoch": 2.4259431332466086, "grad_norm": 0.7282275557518005, "learning_rate": 6.737228491344207e-05, "loss": 0.9063, "step": 13054 }, { "epoch": 2.4261289723099795, "grad_norm": 0.8026573061943054, "learning_rate": 6.7358475429424e-05, "loss": 0.9185, "step": 13055 }, { "epoch": 2.426314811373351, "grad_norm": 0.8870404362678528, "learning_rate": 6.734466664208324e-05, "loss": 1.0354, "step": 13056 }, { "epoch": 2.4265006504367217, "grad_norm": 0.7788059115409851, "learning_rate": 6.733085855171457e-05, "loss": 0.8882, "step": 13057 }, { "epoch": 2.426686489500093, "grad_norm": 0.8050462007522583, "learning_rate": 6.731705115861262e-05, "loss": 0.8745, "step": 13058 }, { "epoch": 2.426872328563464, "grad_norm": 0.9904711246490479, "learning_rate": 6.730324446307217e-05, "loss": 1.1831, "step": 13059 }, { "epoch": 2.4270581676268352, "grad_norm": 0.8144270181655884, "learning_rate": 6.728943846538782e-05, "loss": 0.9835, "step": 13060 }, { "epoch": 2.427244006690206, "grad_norm": 0.7658560872077942, "learning_rate": 6.72756331658543e-05, "loss": 0.849, "step": 13061 }, { "epoch": 2.4274298457535775, "grad_norm": 0.7519936561584473, "learning_rate": 6.726182856476621e-05, "loss": 0.8053, "step": 13062 }, { "epoch": 2.4276156848169483, "grad_norm": 0.7167922854423523, "learning_rate": 6.724802466241819e-05, "loss": 0.8097, "step": 13063 }, { "epoch": 2.4278015238803197, "grad_norm": 0.8672042489051819, "learning_rate": 6.723422145910492e-05, "loss": 0.9202, "step": 13064 }, { "epoch": 2.427987362943691, "grad_norm": 0.9349759817123413, "learning_rate": 6.722041895512094e-05, "loss": 1.0381, "step": 13065 }, { "epoch": 2.428173202007062, "grad_norm": 0.8431971073150635, "learning_rate": 6.720661715076087e-05, "loss": 1.1296, "step": 13066 }, { "epoch": 2.4283590410704328, "grad_norm": 0.7165058851242065, "learning_rate": 6.71928160463193e-05, "loss": 0.9111, "step": 13067 }, { "epoch": 2.428544880133804, "grad_norm": 0.9309887290000916, "learning_rate": 6.717901564209075e-05, "loss": 1.0847, "step": 13068 }, { "epoch": 2.4287307191971754, "grad_norm": 0.7920277118682861, "learning_rate": 6.71652159383698e-05, "loss": 1.044, "step": 13069 }, { "epoch": 2.4289165582605463, "grad_norm": 0.9909117221832275, "learning_rate": 6.715141693545098e-05, "loss": 1.2691, "step": 13070 }, { "epoch": 2.4291023973239176, "grad_norm": 1.0344980955123901, "learning_rate": 6.713761863362878e-05, "loss": 0.7816, "step": 13071 }, { "epoch": 2.4292882363872885, "grad_norm": 0.8635357022285461, "learning_rate": 6.712382103319771e-05, "loss": 1.0432, "step": 13072 }, { "epoch": 2.42947407545066, "grad_norm": 0.8340578675270081, "learning_rate": 6.711002413445229e-05, "loss": 0.7909, "step": 13073 }, { "epoch": 2.4296599145140307, "grad_norm": 0.6920415759086609, "learning_rate": 6.709622793768691e-05, "loss": 0.9075, "step": 13074 }, { "epoch": 2.429845753577402, "grad_norm": 0.7110925316810608, "learning_rate": 6.708243244319611e-05, "loss": 0.8178, "step": 13075 }, { "epoch": 2.430031592640773, "grad_norm": 0.8715147376060486, "learning_rate": 6.706863765127429e-05, "loss": 0.9181, "step": 13076 }, { "epoch": 2.4302174317041443, "grad_norm": 0.7485066652297974, "learning_rate": 6.705484356221591e-05, "loss": 0.9947, "step": 13077 }, { "epoch": 2.430403270767515, "grad_norm": 0.83921217918396, "learning_rate": 6.704105017631534e-05, "loss": 0.8474, "step": 13078 }, { "epoch": 2.4305891098308865, "grad_norm": 0.8253395557403564, "learning_rate": 6.702725749386703e-05, "loss": 0.9027, "step": 13079 }, { "epoch": 2.4307749488942574, "grad_norm": 0.8697446584701538, "learning_rate": 6.701346551516528e-05, "loss": 1.0753, "step": 13080 }, { "epoch": 2.4309607879576287, "grad_norm": 0.7758882641792297, "learning_rate": 6.699967424050453e-05, "loss": 1.0756, "step": 13081 }, { "epoch": 2.4311466270209996, "grad_norm": 0.7584431767463684, "learning_rate": 6.698588367017909e-05, "loss": 0.9126, "step": 13082 }, { "epoch": 2.431332466084371, "grad_norm": 0.7834359407424927, "learning_rate": 6.697209380448333e-05, "loss": 0.8498, "step": 13083 }, { "epoch": 2.4315183051477423, "grad_norm": 0.6805732250213623, "learning_rate": 6.69583046437115e-05, "loss": 0.7568, "step": 13084 }, { "epoch": 2.431704144211113, "grad_norm": 0.9775819778442383, "learning_rate": 6.694451618815797e-05, "loss": 0.9638, "step": 13085 }, { "epoch": 2.4318899832744845, "grad_norm": 1.0010170936584473, "learning_rate": 6.693072843811703e-05, "loss": 1.1458, "step": 13086 }, { "epoch": 2.4320758223378554, "grad_norm": 0.8021188974380493, "learning_rate": 6.691694139388295e-05, "loss": 1.0033, "step": 13087 }, { "epoch": 2.4322616614012267, "grad_norm": 0.834514856338501, "learning_rate": 6.690315505574997e-05, "loss": 0.8317, "step": 13088 }, { "epoch": 2.4324475004645976, "grad_norm": 0.8626028299331665, "learning_rate": 6.688936942401237e-05, "loss": 0.9276, "step": 13089 }, { "epoch": 2.432633339527969, "grad_norm": 0.9956173300743103, "learning_rate": 6.687558449896434e-05, "loss": 0.7991, "step": 13090 }, { "epoch": 2.43281917859134, "grad_norm": 0.7554524540901184, "learning_rate": 6.686180028090014e-05, "loss": 0.8468, "step": 13091 }, { "epoch": 2.433005017654711, "grad_norm": 0.9718710780143738, "learning_rate": 6.684801677011392e-05, "loss": 1.2913, "step": 13092 }, { "epoch": 2.433190856718082, "grad_norm": 0.7081608772277832, "learning_rate": 6.683423396689991e-05, "loss": 0.6683, "step": 13093 }, { "epoch": 2.4333766957814533, "grad_norm": 0.7922672033309937, "learning_rate": 6.682045187155223e-05, "loss": 0.8012, "step": 13094 }, { "epoch": 2.433562534844824, "grad_norm": 0.7638914585113525, "learning_rate": 6.680667048436506e-05, "loss": 0.8774, "step": 13095 }, { "epoch": 2.4337483739081955, "grad_norm": 0.931211531162262, "learning_rate": 6.679288980563256e-05, "loss": 0.9557, "step": 13096 }, { "epoch": 2.4339342129715664, "grad_norm": 0.8707103729248047, "learning_rate": 6.677910983564888e-05, "loss": 0.908, "step": 13097 }, { "epoch": 2.4341200520349378, "grad_norm": 0.8395317196846008, "learning_rate": 6.676533057470806e-05, "loss": 0.9026, "step": 13098 }, { "epoch": 2.434305891098309, "grad_norm": 0.7544784545898438, "learning_rate": 6.675155202310424e-05, "loss": 0.9046, "step": 13099 }, { "epoch": 2.43449173016168, "grad_norm": 0.9859960675239563, "learning_rate": 6.673777418113147e-05, "loss": 0.8512, "step": 13100 }, { "epoch": 2.4346775692250513, "grad_norm": 0.7868154048919678, "learning_rate": 6.672399704908384e-05, "loss": 0.7508, "step": 13101 }, { "epoch": 2.434863408288422, "grad_norm": 0.866687536239624, "learning_rate": 6.671022062725539e-05, "loss": 0.9324, "step": 13102 }, { "epoch": 2.4350492473517935, "grad_norm": 0.9121086597442627, "learning_rate": 6.669644491594015e-05, "loss": 1.1329, "step": 13103 }, { "epoch": 2.4352350864151644, "grad_norm": 0.811203122138977, "learning_rate": 6.668266991543212e-05, "loss": 0.8548, "step": 13104 }, { "epoch": 2.4354209254785357, "grad_norm": 0.830508291721344, "learning_rate": 6.666889562602532e-05, "loss": 0.9622, "step": 13105 }, { "epoch": 2.4356067645419066, "grad_norm": 0.9460077881813049, "learning_rate": 6.665512204801376e-05, "loss": 0.9933, "step": 13106 }, { "epoch": 2.435792603605278, "grad_norm": 1.0581727027893066, "learning_rate": 6.664134918169143e-05, "loss": 0.9589, "step": 13107 }, { "epoch": 2.435978442668649, "grad_norm": 0.9143171906471252, "learning_rate": 6.662757702735223e-05, "loss": 0.9227, "step": 13108 }, { "epoch": 2.43616428173202, "grad_norm": 0.8102787733078003, "learning_rate": 6.661380558529013e-05, "loss": 0.8932, "step": 13109 }, { "epoch": 2.436350120795391, "grad_norm": 0.7931554317474365, "learning_rate": 6.660003485579907e-05, "loss": 0.8818, "step": 13110 }, { "epoch": 2.4365359598587624, "grad_norm": 0.886027455329895, "learning_rate": 6.658626483917295e-05, "loss": 0.9567, "step": 13111 }, { "epoch": 2.4367217989221333, "grad_norm": 0.7048546671867371, "learning_rate": 6.657249553570563e-05, "loss": 0.8424, "step": 13112 }, { "epoch": 2.4369076379855046, "grad_norm": 0.8832733035087585, "learning_rate": 6.655872694569107e-05, "loss": 0.966, "step": 13113 }, { "epoch": 2.437093477048876, "grad_norm": 0.87441486120224, "learning_rate": 6.654495906942309e-05, "loss": 0.8732, "step": 13114 }, { "epoch": 2.437279316112247, "grad_norm": 0.8636944890022278, "learning_rate": 6.653119190719554e-05, "loss": 0.9355, "step": 13115 }, { "epoch": 2.4374651551756177, "grad_norm": 0.7891425490379333, "learning_rate": 6.651742545930222e-05, "loss": 0.9178, "step": 13116 }, { "epoch": 2.437650994238989, "grad_norm": 0.9344954490661621, "learning_rate": 6.650365972603705e-05, "loss": 1.0686, "step": 13117 }, { "epoch": 2.4378368333023603, "grad_norm": 1.0192102193832397, "learning_rate": 6.648989470769376e-05, "loss": 1.1044, "step": 13118 }, { "epoch": 2.4380226723657312, "grad_norm": 0.8289742469787598, "learning_rate": 6.647613040456618e-05, "loss": 1.0682, "step": 13119 }, { "epoch": 2.4382085114291026, "grad_norm": 0.8244996070861816, "learning_rate": 6.646236681694805e-05, "loss": 1.0694, "step": 13120 }, { "epoch": 2.4383943504924734, "grad_norm": 0.857039213180542, "learning_rate": 6.644860394513317e-05, "loss": 0.889, "step": 13121 }, { "epoch": 2.4385801895558448, "grad_norm": 0.971135139465332, "learning_rate": 6.643484178941524e-05, "loss": 1.0055, "step": 13122 }, { "epoch": 2.4387660286192157, "grad_norm": 0.8699045777320862, "learning_rate": 6.642108035008803e-05, "loss": 0.8433, "step": 13123 }, { "epoch": 2.438951867682587, "grad_norm": 1.0612807273864746, "learning_rate": 6.640731962744521e-05, "loss": 0.8707, "step": 13124 }, { "epoch": 2.439137706745958, "grad_norm": 0.8168467283248901, "learning_rate": 6.639355962178053e-05, "loss": 0.8866, "step": 13125 }, { "epoch": 2.439323545809329, "grad_norm": 0.8367739319801331, "learning_rate": 6.63798003333876e-05, "loss": 0.9637, "step": 13126 }, { "epoch": 2.4395093848727, "grad_norm": 0.7842310667037964, "learning_rate": 6.636604176256019e-05, "loss": 0.903, "step": 13127 }, { "epoch": 2.4396952239360714, "grad_norm": 2.4124133586883545, "learning_rate": 6.635228390959187e-05, "loss": 1.5411, "step": 13128 }, { "epoch": 2.4398810629994423, "grad_norm": 0.7578110098838806, "learning_rate": 6.633852677477635e-05, "loss": 0.881, "step": 13129 }, { "epoch": 2.4400669020628136, "grad_norm": 0.8522405624389648, "learning_rate": 6.632477035840718e-05, "loss": 1.0018, "step": 13130 }, { "epoch": 2.4402527411261845, "grad_norm": 0.6682462692260742, "learning_rate": 6.6311014660778e-05, "loss": 0.81, "step": 13131 }, { "epoch": 2.440438580189556, "grad_norm": 1.0970356464385986, "learning_rate": 6.62972596821824e-05, "loss": 0.7741, "step": 13132 }, { "epoch": 2.440624419252927, "grad_norm": 0.9331226944923401, "learning_rate": 6.628350542291397e-05, "loss": 0.9969, "step": 13133 }, { "epoch": 2.440810258316298, "grad_norm": 0.7452312707901001, "learning_rate": 6.626975188326624e-05, "loss": 0.7214, "step": 13134 }, { "epoch": 2.4409960973796694, "grad_norm": 0.740334689617157, "learning_rate": 6.62559990635328e-05, "loss": 0.8288, "step": 13135 }, { "epoch": 2.4411819364430403, "grad_norm": 0.8428611755371094, "learning_rate": 6.624224696400711e-05, "loss": 0.9081, "step": 13136 }, { "epoch": 2.4413677755064116, "grad_norm": 0.7663208246231079, "learning_rate": 6.622849558498274e-05, "loss": 0.9875, "step": 13137 }, { "epoch": 2.4415536145697825, "grad_norm": 1.5824609994888306, "learning_rate": 6.621474492675318e-05, "loss": 1.1943, "step": 13138 }, { "epoch": 2.441739453633154, "grad_norm": 0.8739747405052185, "learning_rate": 6.620099498961193e-05, "loss": 1.0732, "step": 13139 }, { "epoch": 2.4419252926965247, "grad_norm": 0.931546688079834, "learning_rate": 6.618724577385241e-05, "loss": 1.037, "step": 13140 }, { "epoch": 2.442111131759896, "grad_norm": 0.7744889259338379, "learning_rate": 6.617349727976814e-05, "loss": 0.8209, "step": 13141 }, { "epoch": 2.442296970823267, "grad_norm": 0.8622698783874512, "learning_rate": 6.615974950765249e-05, "loss": 0.638, "step": 13142 }, { "epoch": 2.4424828098866382, "grad_norm": 1.1232050657272339, "learning_rate": 6.614600245779894e-05, "loss": 1.1343, "step": 13143 }, { "epoch": 2.442668648950009, "grad_norm": 0.8128316402435303, "learning_rate": 6.613225613050085e-05, "loss": 0.9589, "step": 13144 }, { "epoch": 2.4428544880133805, "grad_norm": 0.8051408529281616, "learning_rate": 6.611851052605167e-05, "loss": 1.0296, "step": 13145 }, { "epoch": 2.4430403270767513, "grad_norm": 0.8059528470039368, "learning_rate": 6.610476564474471e-05, "loss": 0.9228, "step": 13146 }, { "epoch": 2.4432261661401227, "grad_norm": 0.8602081537246704, "learning_rate": 6.609102148687333e-05, "loss": 1.0438, "step": 13147 }, { "epoch": 2.443412005203494, "grad_norm": 0.8533175587654114, "learning_rate": 6.607727805273094e-05, "loss": 0.979, "step": 13148 }, { "epoch": 2.443597844266865, "grad_norm": 0.7846186757087708, "learning_rate": 6.606353534261085e-05, "loss": 0.9215, "step": 13149 }, { "epoch": 2.4437836833302358, "grad_norm": 0.7387604117393494, "learning_rate": 6.604979335680633e-05, "loss": 0.5422, "step": 13150 }, { "epoch": 2.443969522393607, "grad_norm": 0.8741371631622314, "learning_rate": 6.603605209561075e-05, "loss": 0.9177, "step": 13151 }, { "epoch": 2.4441553614569784, "grad_norm": 0.8448536992073059, "learning_rate": 6.602231155931731e-05, "loss": 0.9546, "step": 13152 }, { "epoch": 2.4443412005203493, "grad_norm": 0.8914835453033447, "learning_rate": 6.600857174821938e-05, "loss": 0.8356, "step": 13153 }, { "epoch": 2.4445270395837206, "grad_norm": 0.7318701148033142, "learning_rate": 6.599483266261009e-05, "loss": 0.855, "step": 13154 }, { "epoch": 2.4447128786470915, "grad_norm": 1.010781168937683, "learning_rate": 6.598109430278279e-05, "loss": 0.7821, "step": 13155 }, { "epoch": 2.444898717710463, "grad_norm": 0.8632301688194275, "learning_rate": 6.596735666903063e-05, "loss": 0.6817, "step": 13156 }, { "epoch": 2.4450845567738337, "grad_norm": 0.9101744294166565, "learning_rate": 6.595361976164683e-05, "loss": 0.8722, "step": 13157 }, { "epoch": 2.445270395837205, "grad_norm": 0.7859084010124207, "learning_rate": 6.593988358092458e-05, "loss": 1.0306, "step": 13158 }, { "epoch": 2.445456234900576, "grad_norm": 0.7916283011436462, "learning_rate": 6.59261481271571e-05, "loss": 0.9579, "step": 13159 }, { "epoch": 2.4456420739639473, "grad_norm": 0.7866398692131042, "learning_rate": 6.59124134006375e-05, "loss": 0.963, "step": 13160 }, { "epoch": 2.445827913027318, "grad_norm": 0.8245794773101807, "learning_rate": 6.589867940165895e-05, "loss": 0.9524, "step": 13161 }, { "epoch": 2.4460137520906895, "grad_norm": 0.7416488528251648, "learning_rate": 6.588494613051455e-05, "loss": 1.0268, "step": 13162 }, { "epoch": 2.446199591154061, "grad_norm": 0.8672488927841187, "learning_rate": 6.587121358749745e-05, "loss": 0.9448, "step": 13163 }, { "epoch": 2.4463854302174317, "grad_norm": 0.8468174934387207, "learning_rate": 6.585748177290072e-05, "loss": 0.9216, "step": 13164 }, { "epoch": 2.4465712692808026, "grad_norm": 0.6494128704071045, "learning_rate": 6.584375068701746e-05, "loss": 0.6241, "step": 13165 }, { "epoch": 2.446757108344174, "grad_norm": 0.67799973487854, "learning_rate": 6.583002033014071e-05, "loss": 0.7008, "step": 13166 }, { "epoch": 2.4469429474075453, "grad_norm": 0.7948530912399292, "learning_rate": 6.581629070256354e-05, "loss": 0.8765, "step": 13167 }, { "epoch": 2.447128786470916, "grad_norm": 0.641555905342102, "learning_rate": 6.580256180457895e-05, "loss": 0.7714, "step": 13168 }, { "epoch": 2.4473146255342875, "grad_norm": 0.8227325081825256, "learning_rate": 6.578883363648005e-05, "loss": 1.0227, "step": 13169 }, { "epoch": 2.4475004645976584, "grad_norm": 0.7780269384384155, "learning_rate": 6.577510619855977e-05, "loss": 0.9948, "step": 13170 }, { "epoch": 2.4476863036610297, "grad_norm": 0.9374097585678101, "learning_rate": 6.576137949111113e-05, "loss": 1.0773, "step": 13171 }, { "epoch": 2.4478721427244006, "grad_norm": 0.7884427905082703, "learning_rate": 6.574765351442707e-05, "loss": 0.9597, "step": 13172 }, { "epoch": 2.448057981787772, "grad_norm": 0.782262921333313, "learning_rate": 6.573392826880059e-05, "loss": 0.7153, "step": 13173 }, { "epoch": 2.448243820851143, "grad_norm": 0.8168174624443054, "learning_rate": 6.572020375452457e-05, "loss": 0.7016, "step": 13174 }, { "epoch": 2.448429659914514, "grad_norm": 0.8480477929115295, "learning_rate": 6.570647997189201e-05, "loss": 0.823, "step": 13175 }, { "epoch": 2.448615498977885, "grad_norm": 0.8476222157478333, "learning_rate": 6.569275692119576e-05, "loss": 0.7272, "step": 13176 }, { "epoch": 2.4488013380412563, "grad_norm": 0.8779712915420532, "learning_rate": 6.567903460272874e-05, "loss": 0.9677, "step": 13177 }, { "epoch": 2.448987177104627, "grad_norm": 0.8764821290969849, "learning_rate": 6.566531301678384e-05, "loss": 1.1981, "step": 13178 }, { "epoch": 2.4491730161679985, "grad_norm": 0.8151456117630005, "learning_rate": 6.565159216365389e-05, "loss": 0.9573, "step": 13179 }, { "epoch": 2.4493588552313694, "grad_norm": 0.8219358325004578, "learning_rate": 6.563787204363177e-05, "loss": 0.8288, "step": 13180 }, { "epoch": 2.4495446942947408, "grad_norm": 0.9229908585548401, "learning_rate": 6.562415265701034e-05, "loss": 1.1221, "step": 13181 }, { "epoch": 2.449730533358112, "grad_norm": 0.8629090189933777, "learning_rate": 6.561043400408234e-05, "loss": 0.9825, "step": 13182 }, { "epoch": 2.449916372421483, "grad_norm": 0.8018071055412292, "learning_rate": 6.559671608514065e-05, "loss": 1.011, "step": 13183 }, { "epoch": 2.4501022114848543, "grad_norm": 0.8450409173965454, "learning_rate": 6.558299890047798e-05, "loss": 0.8215, "step": 13184 }, { "epoch": 2.450288050548225, "grad_norm": 0.7382701635360718, "learning_rate": 6.556928245038716e-05, "loss": 0.8025, "step": 13185 }, { "epoch": 2.4504738896115965, "grad_norm": 0.7963736057281494, "learning_rate": 6.55555667351609e-05, "loss": 0.818, "step": 13186 }, { "epoch": 2.4506597286749674, "grad_norm": 1.4558244943618774, "learning_rate": 6.554185175509197e-05, "loss": 1.3749, "step": 13187 }, { "epoch": 2.4508455677383387, "grad_norm": 1.0389723777770996, "learning_rate": 6.552813751047309e-05, "loss": 1.1356, "step": 13188 }, { "epoch": 2.4510314068017096, "grad_norm": 0.6506426930427551, "learning_rate": 6.551442400159692e-05, "loss": 0.6469, "step": 13189 }, { "epoch": 2.451217245865081, "grad_norm": 0.739996612071991, "learning_rate": 6.550071122875621e-05, "loss": 0.8171, "step": 13190 }, { "epoch": 2.451403084928452, "grad_norm": 0.821476399898529, "learning_rate": 6.548699919224364e-05, "loss": 0.9817, "step": 13191 }, { "epoch": 2.451588923991823, "grad_norm": 1.0348422527313232, "learning_rate": 6.547328789235183e-05, "loss": 1.2025, "step": 13192 }, { "epoch": 2.451774763055194, "grad_norm": 0.7092103958129883, "learning_rate": 6.545957732937345e-05, "loss": 0.7236, "step": 13193 }, { "epoch": 2.4519606021185654, "grad_norm": 1.1522830724716187, "learning_rate": 6.54458675036011e-05, "loss": 1.1493, "step": 13194 }, { "epoch": 2.4521464411819363, "grad_norm": 0.945174515247345, "learning_rate": 6.543215841532744e-05, "loss": 1.1024, "step": 13195 }, { "epoch": 2.4523322802453076, "grad_norm": 0.9081987738609314, "learning_rate": 6.5418450064845e-05, "loss": 1.2248, "step": 13196 }, { "epoch": 2.452518119308679, "grad_norm": 0.8504126667976379, "learning_rate": 6.54047424524464e-05, "loss": 1.0508, "step": 13197 }, { "epoch": 2.45270395837205, "grad_norm": 0.8007256388664246, "learning_rate": 6.539103557842425e-05, "loss": 0.8587, "step": 13198 }, { "epoch": 2.4528897974354207, "grad_norm": 1.0808576345443726, "learning_rate": 6.537732944307102e-05, "loss": 0.8679, "step": 13199 }, { "epoch": 2.453075636498792, "grad_norm": 0.7359915971755981, "learning_rate": 6.536362404667924e-05, "loss": 0.7308, "step": 13200 }, { "epoch": 2.4532614755621633, "grad_norm": 0.7679325342178345, "learning_rate": 6.534991938954154e-05, "loss": 0.9018, "step": 13201 }, { "epoch": 2.4534473146255342, "grad_norm": 0.8206538558006287, "learning_rate": 6.53362154719503e-05, "loss": 0.7636, "step": 13202 }, { "epoch": 2.4536331536889056, "grad_norm": 0.9894808530807495, "learning_rate": 6.53225122941981e-05, "loss": 0.9901, "step": 13203 }, { "epoch": 2.4538189927522764, "grad_norm": 0.8593584895133972, "learning_rate": 6.530880985657733e-05, "loss": 0.7279, "step": 13204 }, { "epoch": 2.4540048318156478, "grad_norm": 1.3301892280578613, "learning_rate": 6.52951081593805e-05, "loss": 1.1438, "step": 13205 }, { "epoch": 2.4541906708790187, "grad_norm": 0.8623514175415039, "learning_rate": 6.528140720290001e-05, "loss": 0.8775, "step": 13206 }, { "epoch": 2.45437650994239, "grad_norm": 0.7796410322189331, "learning_rate": 6.526770698742831e-05, "loss": 0.8452, "step": 13207 }, { "epoch": 2.454562349005761, "grad_norm": 0.8087790608406067, "learning_rate": 6.525400751325783e-05, "loss": 0.8721, "step": 13208 }, { "epoch": 2.454748188069132, "grad_norm": 0.8593664765357971, "learning_rate": 6.524030878068091e-05, "loss": 1.0969, "step": 13209 }, { "epoch": 2.454934027132503, "grad_norm": 0.7332558631896973, "learning_rate": 6.52266107899899e-05, "loss": 0.9816, "step": 13210 }, { "epoch": 2.4551198661958744, "grad_norm": 0.9154184460639954, "learning_rate": 6.521291354147727e-05, "loss": 1.099, "step": 13211 }, { "epoch": 2.4553057052592457, "grad_norm": 0.8243273496627808, "learning_rate": 6.51992170354353e-05, "loss": 1.0562, "step": 13212 }, { "epoch": 2.4554915443226166, "grad_norm": 0.9075212478637695, "learning_rate": 6.518552127215632e-05, "loss": 1.0337, "step": 13213 }, { "epoch": 2.4556773833859875, "grad_norm": 0.7624395489692688, "learning_rate": 6.517182625193263e-05, "loss": 1.1125, "step": 13214 }, { "epoch": 2.455863222449359, "grad_norm": 1.781527042388916, "learning_rate": 6.515813197505656e-05, "loss": 1.4969, "step": 13215 }, { "epoch": 2.45604906151273, "grad_norm": 0.8518239855766296, "learning_rate": 6.514443844182035e-05, "loss": 1.0883, "step": 13216 }, { "epoch": 2.456234900576101, "grad_norm": 0.7943968176841736, "learning_rate": 6.513074565251629e-05, "loss": 0.8723, "step": 13217 }, { "epoch": 2.4564207396394724, "grad_norm": 0.8362912535667419, "learning_rate": 6.511705360743665e-05, "loss": 1.0535, "step": 13218 }, { "epoch": 2.4566065787028433, "grad_norm": 0.8788190484046936, "learning_rate": 6.510336230687362e-05, "loss": 0.9461, "step": 13219 }, { "epoch": 2.4567924177662146, "grad_norm": 1.0436252355575562, "learning_rate": 6.508967175111945e-05, "loss": 1.0679, "step": 13220 }, { "epoch": 2.4569782568295855, "grad_norm": 0.8631687760353088, "learning_rate": 6.507598194046629e-05, "loss": 0.9971, "step": 13221 }, { "epoch": 2.457164095892957, "grad_norm": 0.816724956035614, "learning_rate": 6.506229287520638e-05, "loss": 0.939, "step": 13222 }, { "epoch": 2.4573499349563277, "grad_norm": 0.8277599215507507, "learning_rate": 6.50486045556319e-05, "loss": 0.9906, "step": 13223 }, { "epoch": 2.457535774019699, "grad_norm": 0.7382860779762268, "learning_rate": 6.503491698203496e-05, "loss": 0.8693, "step": 13224 }, { "epoch": 2.45772161308307, "grad_norm": 0.767471194267273, "learning_rate": 6.502123015470772e-05, "loss": 0.6254, "step": 13225 }, { "epoch": 2.4579074521464412, "grad_norm": 0.8623015880584717, "learning_rate": 6.50075440739423e-05, "loss": 0.7919, "step": 13226 }, { "epoch": 2.458093291209812, "grad_norm": 0.7773959040641785, "learning_rate": 6.499385874003076e-05, "loss": 0.9748, "step": 13227 }, { "epoch": 2.4582791302731835, "grad_norm": 0.8545732498168945, "learning_rate": 6.49801741532653e-05, "loss": 1.1325, "step": 13228 }, { "epoch": 2.4584649693365543, "grad_norm": 0.7644531726837158, "learning_rate": 6.496649031393788e-05, "loss": 0.9449, "step": 13229 }, { "epoch": 2.4586508083999257, "grad_norm": 0.8056524991989136, "learning_rate": 6.495280722234063e-05, "loss": 1.0851, "step": 13230 }, { "epoch": 2.458836647463297, "grad_norm": 0.6984783411026001, "learning_rate": 6.493912487876553e-05, "loss": 1.0835, "step": 13231 }, { "epoch": 2.459022486526668, "grad_norm": 0.7677348256111145, "learning_rate": 6.492544328350465e-05, "loss": 0.8166, "step": 13232 }, { "epoch": 2.459208325590039, "grad_norm": 0.6787754893302917, "learning_rate": 6.491176243685003e-05, "loss": 0.6079, "step": 13233 }, { "epoch": 2.45939416465341, "grad_norm": 1.002785086631775, "learning_rate": 6.489808233909359e-05, "loss": 0.9857, "step": 13234 }, { "epoch": 2.4595800037167814, "grad_norm": 0.7710455060005188, "learning_rate": 6.488440299052738e-05, "loss": 1.0792, "step": 13235 }, { "epoch": 2.4597658427801523, "grad_norm": 0.8439406752586365, "learning_rate": 6.487072439144331e-05, "loss": 0.9279, "step": 13236 }, { "epoch": 2.4599516818435236, "grad_norm": 0.6731829643249512, "learning_rate": 6.485704654213331e-05, "loss": 0.8066, "step": 13237 }, { "epoch": 2.4601375209068945, "grad_norm": 0.922251284122467, "learning_rate": 6.48433694428894e-05, "loss": 0.9569, "step": 13238 }, { "epoch": 2.460323359970266, "grad_norm": 0.8056465983390808, "learning_rate": 6.482969309400341e-05, "loss": 0.8648, "step": 13239 }, { "epoch": 2.4605091990336367, "grad_norm": 0.8848257660865784, "learning_rate": 6.481601749576728e-05, "loss": 0.7394, "step": 13240 }, { "epoch": 2.460695038097008, "grad_norm": 0.7655457854270935, "learning_rate": 6.480234264847286e-05, "loss": 0.938, "step": 13241 }, { "epoch": 2.460880877160379, "grad_norm": 0.8142226934432983, "learning_rate": 6.478866855241202e-05, "loss": 0.912, "step": 13242 }, { "epoch": 2.4610667162237503, "grad_norm": 0.9077260494232178, "learning_rate": 6.477499520787665e-05, "loss": 0.9718, "step": 13243 }, { "epoch": 2.461252555287121, "grad_norm": 0.8445703387260437, "learning_rate": 6.476132261515854e-05, "loss": 0.9174, "step": 13244 }, { "epoch": 2.4614383943504925, "grad_norm": 0.8891257643699646, "learning_rate": 6.474765077454957e-05, "loss": 0.9767, "step": 13245 }, { "epoch": 2.461624233413864, "grad_norm": 0.787733793258667, "learning_rate": 6.473397968634147e-05, "loss": 0.9266, "step": 13246 }, { "epoch": 2.4618100724772347, "grad_norm": 0.8732960224151611, "learning_rate": 6.472030935082605e-05, "loss": 0.8442, "step": 13247 }, { "epoch": 2.4619959115406056, "grad_norm": 0.9979159235954285, "learning_rate": 6.470663976829509e-05, "loss": 1.0616, "step": 13248 }, { "epoch": 2.462181750603977, "grad_norm": 0.8794299960136414, "learning_rate": 6.469297093904035e-05, "loss": 0.9818, "step": 13249 }, { "epoch": 2.4623675896673483, "grad_norm": 0.9490398168563843, "learning_rate": 6.467930286335356e-05, "loss": 0.9454, "step": 13250 }, { "epoch": 2.462553428730719, "grad_norm": 0.8701783418655396, "learning_rate": 6.466563554152641e-05, "loss": 0.7952, "step": 13251 }, { "epoch": 2.4627392677940905, "grad_norm": 0.7783467769622803, "learning_rate": 6.465196897385062e-05, "loss": 0.9286, "step": 13252 }, { "epoch": 2.4629251068574614, "grad_norm": 0.7469505667686462, "learning_rate": 6.463830316061792e-05, "loss": 0.7903, "step": 13253 }, { "epoch": 2.4631109459208327, "grad_norm": 0.8126850724220276, "learning_rate": 6.462463810211995e-05, "loss": 0.9425, "step": 13254 }, { "epoch": 2.4632967849842036, "grad_norm": 0.7326277494430542, "learning_rate": 6.46109737986484e-05, "loss": 0.718, "step": 13255 }, { "epoch": 2.463482624047575, "grad_norm": 0.8058909773826599, "learning_rate": 6.459731025049484e-05, "loss": 1.518, "step": 13256 }, { "epoch": 2.463668463110946, "grad_norm": 0.8176577091217041, "learning_rate": 6.458364745795096e-05, "loss": 1.1653, "step": 13257 }, { "epoch": 2.463854302174317, "grad_norm": 0.6765969395637512, "learning_rate": 6.456998542130837e-05, "loss": 0.7163, "step": 13258 }, { "epoch": 2.464040141237688, "grad_norm": 0.7259505391120911, "learning_rate": 6.455632414085861e-05, "loss": 0.9289, "step": 13259 }, { "epoch": 2.4642259803010593, "grad_norm": 0.8830696940422058, "learning_rate": 6.454266361689331e-05, "loss": 1.0349, "step": 13260 }, { "epoch": 2.46441181936443, "grad_norm": 0.9436715245246887, "learning_rate": 6.4529003849704e-05, "loss": 0.9728, "step": 13261 }, { "epoch": 2.4645976584278015, "grad_norm": 0.7746622562408447, "learning_rate": 6.451534483958222e-05, "loss": 0.9486, "step": 13262 }, { "epoch": 2.4647834974911724, "grad_norm": 0.7761104702949524, "learning_rate": 6.45016865868195e-05, "loss": 1.0427, "step": 13263 }, { "epoch": 2.4649693365545438, "grad_norm": 0.7415323853492737, "learning_rate": 6.448802909170737e-05, "loss": 1.0287, "step": 13264 }, { "epoch": 2.465155175617915, "grad_norm": 0.7377958297729492, "learning_rate": 6.447437235453735e-05, "loss": 0.9365, "step": 13265 }, { "epoch": 2.465341014681286, "grad_norm": 0.9537144303321838, "learning_rate": 6.446071637560085e-05, "loss": 0.9043, "step": 13266 }, { "epoch": 2.4655268537446573, "grad_norm": 0.6393646597862244, "learning_rate": 6.444706115518938e-05, "loss": 0.9902, "step": 13267 }, { "epoch": 2.465712692808028, "grad_norm": 0.8335622549057007, "learning_rate": 6.44334066935944e-05, "loss": 0.7365, "step": 13268 }, { "epoch": 2.4658985318713995, "grad_norm": 0.8195977210998535, "learning_rate": 6.44197529911073e-05, "loss": 0.9792, "step": 13269 }, { "epoch": 2.4660843709347704, "grad_norm": 0.8189404606819153, "learning_rate": 6.440610004801953e-05, "loss": 1.0515, "step": 13270 }, { "epoch": 2.4662702099981417, "grad_norm": 0.8037553429603577, "learning_rate": 6.439244786462245e-05, "loss": 1.0898, "step": 13271 }, { "epoch": 2.4664560490615126, "grad_norm": 0.7144148945808411, "learning_rate": 6.43787964412075e-05, "loss": 0.8562, "step": 13272 }, { "epoch": 2.466641888124884, "grad_norm": 0.8356459736824036, "learning_rate": 6.436514577806595e-05, "loss": 0.9447, "step": 13273 }, { "epoch": 2.466827727188255, "grad_norm": 0.8574797511100769, "learning_rate": 6.435149587548925e-05, "loss": 0.9169, "step": 13274 }, { "epoch": 2.467013566251626, "grad_norm": 0.7633848190307617, "learning_rate": 6.43378467337687e-05, "loss": 1.0534, "step": 13275 }, { "epoch": 2.467199405314997, "grad_norm": 0.9943694472312927, "learning_rate": 6.43241983531956e-05, "loss": 0.9129, "step": 13276 }, { "epoch": 2.4673852443783684, "grad_norm": 0.779023289680481, "learning_rate": 6.431055073406127e-05, "loss": 0.9427, "step": 13277 }, { "epoch": 2.4675710834417393, "grad_norm": 0.832300066947937, "learning_rate": 6.429690387665702e-05, "loss": 0.8809, "step": 13278 }, { "epoch": 2.4677569225051106, "grad_norm": 0.8126018047332764, "learning_rate": 6.428325778127404e-05, "loss": 1.1159, "step": 13279 }, { "epoch": 2.467942761568482, "grad_norm": 0.8588595986366272, "learning_rate": 6.426961244820366e-05, "loss": 0.8322, "step": 13280 }, { "epoch": 2.468128600631853, "grad_norm": 0.8416522145271301, "learning_rate": 6.425596787773706e-05, "loss": 0.9026, "step": 13281 }, { "epoch": 2.468314439695224, "grad_norm": 0.756829023361206, "learning_rate": 6.424232407016553e-05, "loss": 0.8206, "step": 13282 }, { "epoch": 2.468500278758595, "grad_norm": 0.7184683084487915, "learning_rate": 6.422868102578018e-05, "loss": 0.6891, "step": 13283 }, { "epoch": 2.4686861178219663, "grad_norm": 0.9127999544143677, "learning_rate": 6.421503874487224e-05, "loss": 0.8975, "step": 13284 }, { "epoch": 2.4688719568853372, "grad_norm": 0.7633183002471924, "learning_rate": 6.420139722773296e-05, "loss": 0.9248, "step": 13285 }, { "epoch": 2.4690577959487086, "grad_norm": 0.8028041124343872, "learning_rate": 6.418775647465336e-05, "loss": 0.8665, "step": 13286 }, { "epoch": 2.4692436350120794, "grad_norm": 1.1864566802978516, "learning_rate": 6.417411648592466e-05, "loss": 1.0498, "step": 13287 }, { "epoch": 2.4694294740754508, "grad_norm": 0.7055113911628723, "learning_rate": 6.416047726183798e-05, "loss": 0.7556, "step": 13288 }, { "epoch": 2.4696153131388217, "grad_norm": 0.8412120938301086, "learning_rate": 6.41468388026844e-05, "loss": 0.9165, "step": 13289 }, { "epoch": 2.469801152202193, "grad_norm": 0.9094467759132385, "learning_rate": 6.413320110875502e-05, "loss": 0.9182, "step": 13290 }, { "epoch": 2.469986991265564, "grad_norm": 0.9734574556350708, "learning_rate": 6.41195641803409e-05, "loss": 1.0071, "step": 13291 }, { "epoch": 2.470172830328935, "grad_norm": 0.7539117336273193, "learning_rate": 6.410592801773313e-05, "loss": 0.9826, "step": 13292 }, { "epoch": 2.470358669392306, "grad_norm": 0.8051267266273499, "learning_rate": 6.409229262122271e-05, "loss": 0.7357, "step": 13293 }, { "epoch": 2.4705445084556774, "grad_norm": 1.0722055435180664, "learning_rate": 6.407865799110067e-05, "loss": 1.0554, "step": 13294 }, { "epoch": 2.4707303475190487, "grad_norm": 0.8269297480583191, "learning_rate": 6.406502412765807e-05, "loss": 0.8793, "step": 13295 }, { "epoch": 2.4709161865824196, "grad_norm": 0.8753021955490112, "learning_rate": 6.405139103118583e-05, "loss": 0.8462, "step": 13296 }, { "epoch": 2.4711020256457905, "grad_norm": 0.7838700413703918, "learning_rate": 6.403775870197497e-05, "loss": 0.973, "step": 13297 }, { "epoch": 2.471287864709162, "grad_norm": 0.7521423697471619, "learning_rate": 6.402412714031645e-05, "loss": 0.9985, "step": 13298 }, { "epoch": 2.471473703772533, "grad_norm": 0.8879348039627075, "learning_rate": 6.401049634650118e-05, "loss": 0.932, "step": 13299 }, { "epoch": 2.471659542835904, "grad_norm": 0.8718003034591675, "learning_rate": 6.399686632082014e-05, "loss": 0.9369, "step": 13300 }, { "epoch": 2.4718453818992754, "grad_norm": 1.2705172300338745, "learning_rate": 6.398323706356417e-05, "loss": 0.9817, "step": 13301 }, { "epoch": 2.4720312209626463, "grad_norm": 0.7764171361923218, "learning_rate": 6.396960857502422e-05, "loss": 0.8831, "step": 13302 }, { "epoch": 2.4722170600260176, "grad_norm": 0.8133841156959534, "learning_rate": 6.395598085549113e-05, "loss": 1.0403, "step": 13303 }, { "epoch": 2.4724028990893885, "grad_norm": 0.7242906093597412, "learning_rate": 6.394235390525579e-05, "loss": 0.8429, "step": 13304 }, { "epoch": 2.47258873815276, "grad_norm": 0.7564440965652466, "learning_rate": 6.392872772460898e-05, "loss": 1.0826, "step": 13305 }, { "epoch": 2.4727745772161307, "grad_norm": 0.7228856682777405, "learning_rate": 6.391510231384162e-05, "loss": 0.7916, "step": 13306 }, { "epoch": 2.472960416279502, "grad_norm": 0.7421514987945557, "learning_rate": 6.390147767324445e-05, "loss": 0.852, "step": 13307 }, { "epoch": 2.473146255342873, "grad_norm": 0.8280564546585083, "learning_rate": 6.388785380310833e-05, "loss": 0.8453, "step": 13308 }, { "epoch": 2.4733320944062442, "grad_norm": 0.8708766102790833, "learning_rate": 6.387423070372395e-05, "loss": 0.7161, "step": 13309 }, { "epoch": 2.473517933469615, "grad_norm": 0.9055882096290588, "learning_rate": 6.386060837538218e-05, "loss": 1.1629, "step": 13310 }, { "epoch": 2.4737037725329865, "grad_norm": 0.8641484975814819, "learning_rate": 6.384698681837366e-05, "loss": 0.8631, "step": 13311 }, { "epoch": 2.4738896115963573, "grad_norm": 0.9759421944618225, "learning_rate": 6.383336603298921e-05, "loss": 0.9113, "step": 13312 }, { "epoch": 2.4740754506597287, "grad_norm": 0.7598199844360352, "learning_rate": 6.381974601951945e-05, "loss": 0.9642, "step": 13313 }, { "epoch": 2.4742612897231, "grad_norm": 0.8624680638313293, "learning_rate": 6.380612677825518e-05, "loss": 0.9527, "step": 13314 }, { "epoch": 2.474447128786471, "grad_norm": 0.8249783515930176, "learning_rate": 6.379250830948694e-05, "loss": 0.8295, "step": 13315 }, { "epoch": 2.474632967849842, "grad_norm": 0.9170358777046204, "learning_rate": 6.377889061350551e-05, "loss": 0.8197, "step": 13316 }, { "epoch": 2.474818806913213, "grad_norm": 0.766761302947998, "learning_rate": 6.376527369060151e-05, "loss": 0.9006, "step": 13317 }, { "epoch": 2.4750046459765844, "grad_norm": 0.8031441569328308, "learning_rate": 6.375165754106558e-05, "loss": 0.9433, "step": 13318 }, { "epoch": 2.4751904850399553, "grad_norm": 0.8161746263504028, "learning_rate": 6.37380421651883e-05, "loss": 0.5266, "step": 13319 }, { "epoch": 2.4753763241033266, "grad_norm": 0.9765466451644897, "learning_rate": 6.37244275632603e-05, "loss": 0.965, "step": 13320 }, { "epoch": 2.4755621631666975, "grad_norm": 0.8644874691963196, "learning_rate": 6.37108137355721e-05, "loss": 0.9189, "step": 13321 }, { "epoch": 2.475748002230069, "grad_norm": 0.8705654740333557, "learning_rate": 6.369720068241435e-05, "loss": 0.9405, "step": 13322 }, { "epoch": 2.4759338412934397, "grad_norm": 0.8883621692657471, "learning_rate": 6.368358840407753e-05, "loss": 0.8004, "step": 13323 }, { "epoch": 2.476119680356811, "grad_norm": 0.6940230131149292, "learning_rate": 6.366997690085221e-05, "loss": 0.9775, "step": 13324 }, { "epoch": 2.476305519420182, "grad_norm": 0.9831171631813049, "learning_rate": 6.365636617302888e-05, "loss": 0.8483, "step": 13325 }, { "epoch": 2.4764913584835533, "grad_norm": 0.8517470359802246, "learning_rate": 6.364275622089802e-05, "loss": 0.9926, "step": 13326 }, { "epoch": 2.476677197546924, "grad_norm": 0.962731659412384, "learning_rate": 6.362914704475014e-05, "loss": 0.923, "step": 13327 }, { "epoch": 2.4768630366102955, "grad_norm": 0.8018013834953308, "learning_rate": 6.361553864487573e-05, "loss": 0.6676, "step": 13328 }, { "epoch": 2.477048875673667, "grad_norm": 0.8119571208953857, "learning_rate": 6.36019310215652e-05, "loss": 0.8344, "step": 13329 }, { "epoch": 2.4772347147370377, "grad_norm": 0.999383807182312, "learning_rate": 6.3588324175109e-05, "loss": 0.671, "step": 13330 }, { "epoch": 2.4774205538004086, "grad_norm": 0.6964300870895386, "learning_rate": 6.357471810579752e-05, "loss": 0.7833, "step": 13331 }, { "epoch": 2.47760639286378, "grad_norm": 0.7667035460472107, "learning_rate": 6.356111281392119e-05, "loss": 0.9719, "step": 13332 }, { "epoch": 2.4777922319271513, "grad_norm": 0.9297795295715332, "learning_rate": 6.354750829977036e-05, "loss": 1.0311, "step": 13333 }, { "epoch": 2.477978070990522, "grad_norm": 0.9912102818489075, "learning_rate": 6.353390456363544e-05, "loss": 0.9136, "step": 13334 }, { "epoch": 2.4781639100538935, "grad_norm": 0.8039162158966064, "learning_rate": 6.352030160580671e-05, "loss": 0.7175, "step": 13335 }, { "epoch": 2.4783497491172644, "grad_norm": 0.7512396574020386, "learning_rate": 6.350669942657452e-05, "loss": 0.7518, "step": 13336 }, { "epoch": 2.4785355881806357, "grad_norm": 0.9079875349998474, "learning_rate": 6.349309802622925e-05, "loss": 1.1857, "step": 13337 }, { "epoch": 2.4787214272440066, "grad_norm": 0.9301310777664185, "learning_rate": 6.347949740506116e-05, "loss": 1.211, "step": 13338 }, { "epoch": 2.478907266307378, "grad_norm": 0.8027583360671997, "learning_rate": 6.34658975633605e-05, "loss": 0.8181, "step": 13339 }, { "epoch": 2.479093105370749, "grad_norm": 0.7249844670295715, "learning_rate": 6.345229850141758e-05, "loss": 1.0283, "step": 13340 }, { "epoch": 2.47927894443412, "grad_norm": 0.8149129748344421, "learning_rate": 6.343870021952262e-05, "loss": 0.9977, "step": 13341 }, { "epoch": 2.479464783497491, "grad_norm": 0.6895486116409302, "learning_rate": 6.342510271796588e-05, "loss": 0.8257, "step": 13342 }, { "epoch": 2.4796506225608623, "grad_norm": 0.8426530957221985, "learning_rate": 6.341150599703753e-05, "loss": 0.9625, "step": 13343 }, { "epoch": 2.4798364616242337, "grad_norm": 0.6276504397392273, "learning_rate": 6.339791005702783e-05, "loss": 0.4804, "step": 13344 }, { "epoch": 2.4800223006876045, "grad_norm": 0.7405399680137634, "learning_rate": 6.33843148982269e-05, "loss": 0.9845, "step": 13345 }, { "epoch": 2.4802081397509754, "grad_norm": 0.788058340549469, "learning_rate": 6.337072052092495e-05, "loss": 0.9082, "step": 13346 }, { "epoch": 2.4803939788143468, "grad_norm": 0.829477846622467, "learning_rate": 6.335712692541204e-05, "loss": 0.7689, "step": 13347 }, { "epoch": 2.480579817877718, "grad_norm": 0.8082651495933533, "learning_rate": 6.334353411197846e-05, "loss": 0.8273, "step": 13348 }, { "epoch": 2.480765656941089, "grad_norm": 0.8245391845703125, "learning_rate": 6.33299420809142e-05, "loss": 0.9454, "step": 13349 }, { "epoch": 2.4809514960044603, "grad_norm": 0.7668597102165222, "learning_rate": 6.331635083250943e-05, "loss": 0.9144, "step": 13350 }, { "epoch": 2.481137335067831, "grad_norm": 0.7859448790550232, "learning_rate": 6.330276036705418e-05, "loss": 0.8892, "step": 13351 }, { "epoch": 2.4813231741312025, "grad_norm": 0.8485981225967407, "learning_rate": 6.328917068483855e-05, "loss": 0.9379, "step": 13352 }, { "epoch": 2.4815090131945734, "grad_norm": 0.6385952234268188, "learning_rate": 6.327558178615256e-05, "loss": 0.6124, "step": 13353 }, { "epoch": 2.4816948522579447, "grad_norm": 0.851843535900116, "learning_rate": 6.326199367128627e-05, "loss": 1.0969, "step": 13354 }, { "epoch": 2.4818806913213156, "grad_norm": 0.7018309235572815, "learning_rate": 6.324840634052967e-05, "loss": 0.7291, "step": 13355 }, { "epoch": 2.482066530384687, "grad_norm": 0.9294320344924927, "learning_rate": 6.323481979417277e-05, "loss": 0.9707, "step": 13356 }, { "epoch": 2.482252369448058, "grad_norm": 0.8005899786949158, "learning_rate": 6.32212340325055e-05, "loss": 1.074, "step": 13357 }, { "epoch": 2.482438208511429, "grad_norm": 0.8252381682395935, "learning_rate": 6.320764905581793e-05, "loss": 0.8933, "step": 13358 }, { "epoch": 2.4826240475748, "grad_norm": 0.7771022319793701, "learning_rate": 6.319406486439993e-05, "loss": 0.94, "step": 13359 }, { "epoch": 2.4828098866381714, "grad_norm": 0.8648090958595276, "learning_rate": 6.31804814585415e-05, "loss": 0.9789, "step": 13360 }, { "epoch": 2.4829957257015423, "grad_norm": 0.7643998265266418, "learning_rate": 6.316689883853246e-05, "loss": 0.8146, "step": 13361 }, { "epoch": 2.4831815647649136, "grad_norm": 0.8901168704032898, "learning_rate": 6.315331700466278e-05, "loss": 1.0039, "step": 13362 }, { "epoch": 2.483367403828285, "grad_norm": 0.8452150821685791, "learning_rate": 6.31397359572223e-05, "loss": 0.909, "step": 13363 }, { "epoch": 2.483553242891656, "grad_norm": 0.7983315587043762, "learning_rate": 6.312615569650093e-05, "loss": 0.9486, "step": 13364 }, { "epoch": 2.483739081955027, "grad_norm": 0.7734251618385315, "learning_rate": 6.311257622278845e-05, "loss": 0.8991, "step": 13365 }, { "epoch": 2.483924921018398, "grad_norm": 0.8092442154884338, "learning_rate": 6.309899753637478e-05, "loss": 0.9363, "step": 13366 }, { "epoch": 2.4841107600817693, "grad_norm": 0.779928982257843, "learning_rate": 6.308541963754964e-05, "loss": 1.0153, "step": 13367 }, { "epoch": 2.4842965991451402, "grad_norm": 0.8639279007911682, "learning_rate": 6.307184252660285e-05, "loss": 0.9031, "step": 13368 }, { "epoch": 2.4844824382085116, "grad_norm": 0.7608044147491455, "learning_rate": 6.305826620382422e-05, "loss": 0.9893, "step": 13369 }, { "epoch": 2.4846682772718824, "grad_norm": 0.8655931949615479, "learning_rate": 6.304469066950355e-05, "loss": 1.0699, "step": 13370 }, { "epoch": 2.4848541163352538, "grad_norm": 0.7753689289093018, "learning_rate": 6.30311159239305e-05, "loss": 0.8803, "step": 13371 }, { "epoch": 2.4850399553986247, "grad_norm": 0.8257173895835876, "learning_rate": 6.301754196739487e-05, "loss": 0.7816, "step": 13372 }, { "epoch": 2.485225794461996, "grad_norm": 0.9522544145584106, "learning_rate": 6.300396880018632e-05, "loss": 1.0108, "step": 13373 }, { "epoch": 2.485411633525367, "grad_norm": 0.9205360412597656, "learning_rate": 6.299039642259458e-05, "loss": 0.9166, "step": 13374 }, { "epoch": 2.485597472588738, "grad_norm": 0.9840677380561829, "learning_rate": 6.29768248349093e-05, "loss": 0.9388, "step": 13375 }, { "epoch": 2.485783311652109, "grad_norm": 0.8334775567054749, "learning_rate": 6.29632540374202e-05, "loss": 0.9968, "step": 13376 }, { "epoch": 2.4859691507154804, "grad_norm": 0.6557118892669678, "learning_rate": 6.294968403041685e-05, "loss": 0.6664, "step": 13377 }, { "epoch": 2.4861549897788517, "grad_norm": 0.7033586502075195, "learning_rate": 6.293611481418889e-05, "loss": 0.7885, "step": 13378 }, { "epoch": 2.4863408288422226, "grad_norm": 0.8240175247192383, "learning_rate": 6.292254638902597e-05, "loss": 0.7301, "step": 13379 }, { "epoch": 2.4865266679055935, "grad_norm": 0.7722638249397278, "learning_rate": 6.290897875521769e-05, "loss": 0.8163, "step": 13380 }, { "epoch": 2.486712506968965, "grad_norm": 0.7567647099494934, "learning_rate": 6.28954119130536e-05, "loss": 0.915, "step": 13381 }, { "epoch": 2.486898346032336, "grad_norm": 0.9332913160324097, "learning_rate": 6.288184586282329e-05, "loss": 0.9552, "step": 13382 }, { "epoch": 2.487084185095707, "grad_norm": 0.756186306476593, "learning_rate": 6.286828060481626e-05, "loss": 0.9971, "step": 13383 }, { "epoch": 2.4872700241590784, "grad_norm": 0.8372758626937866, "learning_rate": 6.285471613932207e-05, "loss": 1.0718, "step": 13384 }, { "epoch": 2.4874558632224493, "grad_norm": 0.9867591857910156, "learning_rate": 6.28411524666302e-05, "loss": 1.1196, "step": 13385 }, { "epoch": 2.4876417022858206, "grad_norm": 0.789208173751831, "learning_rate": 6.282758958703018e-05, "loss": 0.6183, "step": 13386 }, { "epoch": 2.4878275413491915, "grad_norm": 0.9247452020645142, "learning_rate": 6.281402750081147e-05, "loss": 0.8366, "step": 13387 }, { "epoch": 2.488013380412563, "grad_norm": 0.7863283157348633, "learning_rate": 6.280046620826352e-05, "loss": 0.9512, "step": 13388 }, { "epoch": 2.4881992194759337, "grad_norm": 0.8000710010528564, "learning_rate": 6.278690570967576e-05, "loss": 0.8256, "step": 13389 }, { "epoch": 2.488385058539305, "grad_norm": 0.7943741083145142, "learning_rate": 6.277334600533768e-05, "loss": 0.9896, "step": 13390 }, { "epoch": 2.488570897602676, "grad_norm": 0.7985944151878357, "learning_rate": 6.275978709553862e-05, "loss": 1.0321, "step": 13391 }, { "epoch": 2.4887567366660472, "grad_norm": 0.8724450469017029, "learning_rate": 6.274622898056804e-05, "loss": 1.058, "step": 13392 }, { "epoch": 2.4889425757294186, "grad_norm": 0.6923696994781494, "learning_rate": 6.273267166071521e-05, "loss": 0.8085, "step": 13393 }, { "epoch": 2.4891284147927895, "grad_norm": 0.9188840389251709, "learning_rate": 6.27191151362696e-05, "loss": 1.063, "step": 13394 }, { "epoch": 2.4893142538561603, "grad_norm": 0.7417453527450562, "learning_rate": 6.270555940752047e-05, "loss": 0.834, "step": 13395 }, { "epoch": 2.4895000929195317, "grad_norm": 0.7921097278594971, "learning_rate": 6.269200447475721e-05, "loss": 1.0216, "step": 13396 }, { "epoch": 2.489685931982903, "grad_norm": 0.8512747883796692, "learning_rate": 6.267845033826906e-05, "loss": 0.9431, "step": 13397 }, { "epoch": 2.489871771046274, "grad_norm": 0.7549306154251099, "learning_rate": 6.266489699834535e-05, "loss": 0.9412, "step": 13398 }, { "epoch": 2.490057610109645, "grad_norm": 0.8000445365905762, "learning_rate": 6.26513444552753e-05, "loss": 0.7382, "step": 13399 }, { "epoch": 2.490243449173016, "grad_norm": 0.7849511504173279, "learning_rate": 6.263779270934828e-05, "loss": 0.9786, "step": 13400 }, { "epoch": 2.4904292882363874, "grad_norm": 0.7787767052650452, "learning_rate": 6.262424176085342e-05, "loss": 0.9891, "step": 13401 }, { "epoch": 2.4906151272997583, "grad_norm": 0.8131736516952515, "learning_rate": 6.261069161008001e-05, "loss": 1.0321, "step": 13402 }, { "epoch": 2.4908009663631296, "grad_norm": 0.6768377423286438, "learning_rate": 6.259714225731718e-05, "loss": 0.7298, "step": 13403 }, { "epoch": 2.4909868054265005, "grad_norm": 0.7723026871681213, "learning_rate": 6.258359370285422e-05, "loss": 0.8469, "step": 13404 }, { "epoch": 2.491172644489872, "grad_norm": 0.7771666646003723, "learning_rate": 6.257004594698021e-05, "loss": 0.9502, "step": 13405 }, { "epoch": 2.4913584835532427, "grad_norm": 0.8538346886634827, "learning_rate": 6.255649898998434e-05, "loss": 0.9947, "step": 13406 }, { "epoch": 2.491544322616614, "grad_norm": 0.7655419111251831, "learning_rate": 6.254295283215574e-05, "loss": 1.0506, "step": 13407 }, { "epoch": 2.491730161679985, "grad_norm": 0.8938736319541931, "learning_rate": 6.252940747378353e-05, "loss": 0.8542, "step": 13408 }, { "epoch": 2.4919160007433563, "grad_norm": 0.8296077847480774, "learning_rate": 6.251586291515683e-05, "loss": 0.8791, "step": 13409 }, { "epoch": 2.492101839806727, "grad_norm": 0.8586382269859314, "learning_rate": 6.250231915656468e-05, "loss": 0.8881, "step": 13410 }, { "epoch": 2.4922876788700985, "grad_norm": 0.8820834755897522, "learning_rate": 6.248877619829619e-05, "loss": 1.1859, "step": 13411 }, { "epoch": 2.49247351793347, "grad_norm": 0.7467595934867859, "learning_rate": 6.247523404064042e-05, "loss": 0.7304, "step": 13412 }, { "epoch": 2.4926593569968407, "grad_norm": 0.9176929593086243, "learning_rate": 6.246169268388638e-05, "loss": 0.8659, "step": 13413 }, { "epoch": 2.492845196060212, "grad_norm": 0.705633819103241, "learning_rate": 6.244815212832309e-05, "loss": 0.8766, "step": 13414 }, { "epoch": 2.493031035123583, "grad_norm": 0.7432262897491455, "learning_rate": 6.243461237423955e-05, "loss": 1.0674, "step": 13415 }, { "epoch": 2.4932168741869543, "grad_norm": 0.7909014225006104, "learning_rate": 6.242107342192475e-05, "loss": 1.0911, "step": 13416 }, { "epoch": 2.493402713250325, "grad_norm": 0.9045495986938477, "learning_rate": 6.240753527166763e-05, "loss": 0.9914, "step": 13417 }, { "epoch": 2.4935885523136965, "grad_norm": 0.9623762369155884, "learning_rate": 6.239399792375718e-05, "loss": 1.0743, "step": 13418 }, { "epoch": 2.4937743913770674, "grad_norm": 0.7921001315116882, "learning_rate": 6.23804613784823e-05, "loss": 0.906, "step": 13419 }, { "epoch": 2.4939602304404387, "grad_norm": 0.6968930959701538, "learning_rate": 6.236692563613189e-05, "loss": 0.6455, "step": 13420 }, { "epoch": 2.4941460695038096, "grad_norm": 1.0765269994735718, "learning_rate": 6.235339069699489e-05, "loss": 0.9525, "step": 13421 }, { "epoch": 2.494331908567181, "grad_norm": 0.8552760481834412, "learning_rate": 6.233985656136019e-05, "loss": 0.9474, "step": 13422 }, { "epoch": 2.494517747630552, "grad_norm": 0.8925338387489319, "learning_rate": 6.23263232295166e-05, "loss": 1.0033, "step": 13423 }, { "epoch": 2.494703586693923, "grad_norm": 0.8355891108512878, "learning_rate": 6.231279070175298e-05, "loss": 0.8004, "step": 13424 }, { "epoch": 2.494889425757294, "grad_norm": 0.7731959819793701, "learning_rate": 6.229925897835818e-05, "loss": 0.8918, "step": 13425 }, { "epoch": 2.4950752648206653, "grad_norm": 1.1086838245391846, "learning_rate": 6.228572805962102e-05, "loss": 0.9063, "step": 13426 }, { "epoch": 2.4952611038840367, "grad_norm": 0.8130460977554321, "learning_rate": 6.227219794583023e-05, "loss": 1.0371, "step": 13427 }, { "epoch": 2.4954469429474075, "grad_norm": 0.815039873123169, "learning_rate": 6.225866863727467e-05, "loss": 1.1554, "step": 13428 }, { "epoch": 2.4956327820107784, "grad_norm": 0.9628624320030212, "learning_rate": 6.224514013424305e-05, "loss": 0.8759, "step": 13429 }, { "epoch": 2.4958186210741498, "grad_norm": 0.7636185884475708, "learning_rate": 6.223161243702412e-05, "loss": 0.8707, "step": 13430 }, { "epoch": 2.496004460137521, "grad_norm": 0.8707903027534485, "learning_rate": 6.221808554590657e-05, "loss": 1.0159, "step": 13431 }, { "epoch": 2.496190299200892, "grad_norm": 0.7642168402671814, "learning_rate": 6.22045594611792e-05, "loss": 0.7965, "step": 13432 }, { "epoch": 2.4963761382642633, "grad_norm": 0.8629317283630371, "learning_rate": 6.219103418313064e-05, "loss": 1.0283, "step": 13433 }, { "epoch": 2.496561977327634, "grad_norm": 0.9485316872596741, "learning_rate": 6.217750971204958e-05, "loss": 1.0264, "step": 13434 }, { "epoch": 2.4967478163910055, "grad_norm": 0.7349181771278381, "learning_rate": 6.216398604822466e-05, "loss": 0.8394, "step": 13435 }, { "epoch": 2.4969336554543764, "grad_norm": 0.7712982892990112, "learning_rate": 6.215046319194454e-05, "loss": 0.8971, "step": 13436 }, { "epoch": 2.4971194945177477, "grad_norm": 0.8418555855751038, "learning_rate": 6.21369411434978e-05, "loss": 0.9512, "step": 13437 }, { "epoch": 2.4973053335811186, "grad_norm": 0.7971872091293335, "learning_rate": 6.212341990317309e-05, "loss": 0.939, "step": 13438 }, { "epoch": 2.49749117264449, "grad_norm": 0.7714715003967285, "learning_rate": 6.2109899471259e-05, "loss": 1.1059, "step": 13439 }, { "epoch": 2.497677011707861, "grad_norm": 0.8538257479667664, "learning_rate": 6.209637984804406e-05, "loss": 0.8529, "step": 13440 }, { "epoch": 2.497862850771232, "grad_norm": 0.7392212748527527, "learning_rate": 6.208286103381684e-05, "loss": 0.7408, "step": 13441 }, { "epoch": 2.498048689834603, "grad_norm": 1.0642915964126587, "learning_rate": 6.206934302886591e-05, "loss": 0.7987, "step": 13442 }, { "epoch": 2.4982345288979744, "grad_norm": 0.7957573533058167, "learning_rate": 6.205582583347974e-05, "loss": 0.9889, "step": 13443 }, { "epoch": 2.4984203679613453, "grad_norm": 0.7173166275024414, "learning_rate": 6.204230944794688e-05, "loss": 0.9598, "step": 13444 }, { "epoch": 2.4986062070247166, "grad_norm": 0.9373538494110107, "learning_rate": 6.202879387255576e-05, "loss": 1.0222, "step": 13445 }, { "epoch": 2.498792046088088, "grad_norm": 0.8266227841377258, "learning_rate": 6.20152791075949e-05, "loss": 0.9327, "step": 13446 }, { "epoch": 2.498977885151459, "grad_norm": 0.9247116446495056, "learning_rate": 6.20017651533527e-05, "loss": 0.7081, "step": 13447 }, { "epoch": 2.49916372421483, "grad_norm": 0.7575215697288513, "learning_rate": 6.198825201011763e-05, "loss": 0.824, "step": 13448 }, { "epoch": 2.499349563278201, "grad_norm": 0.9333925843238831, "learning_rate": 6.197473967817808e-05, "loss": 0.8717, "step": 13449 }, { "epoch": 2.4995354023415723, "grad_norm": 0.8797184824943542, "learning_rate": 6.196122815782245e-05, "loss": 0.9035, "step": 13450 }, { "epoch": 2.4997212414049432, "grad_norm": 0.8298222422599792, "learning_rate": 6.194771744933916e-05, "loss": 0.8492, "step": 13451 }, { "epoch": 2.4999070804683146, "grad_norm": 0.9510545134544373, "learning_rate": 6.193420755301649e-05, "loss": 0.9619, "step": 13452 }, { "epoch": 2.5000929195316854, "grad_norm": 0.7924848794937134, "learning_rate": 6.192069846914285e-05, "loss": 0.8717, "step": 13453 }, { "epoch": 2.5002787585950568, "grad_norm": 0.8562452793121338, "learning_rate": 6.19071901980066e-05, "loss": 0.9483, "step": 13454 }, { "epoch": 2.5004645976584277, "grad_norm": 0.7453281283378601, "learning_rate": 6.189368273989596e-05, "loss": 0.863, "step": 13455 }, { "epoch": 2.500650436721799, "grad_norm": 0.8021848797798157, "learning_rate": 6.18801760950993e-05, "loss": 1.1081, "step": 13456 }, { "epoch": 2.5008362757851703, "grad_norm": 0.8074384331703186, "learning_rate": 6.186667026390485e-05, "loss": 0.8636, "step": 13457 }, { "epoch": 2.501022114848541, "grad_norm": 0.8265329003334045, "learning_rate": 6.185316524660087e-05, "loss": 0.8431, "step": 13458 }, { "epoch": 2.501207953911912, "grad_norm": 0.7328630089759827, "learning_rate": 6.183966104347564e-05, "loss": 1.054, "step": 13459 }, { "epoch": 2.5013937929752834, "grad_norm": 0.9158023595809937, "learning_rate": 6.182615765481734e-05, "loss": 1.1426, "step": 13460 }, { "epoch": 2.5013937929752834, "eval_loss": 1.0169224739074707, "eval_runtime": 23.4685, "eval_samples_per_second": 46.53, "eval_steps_per_second": 23.265, "step": 13460 }, { "epoch": 2.5015796320386547, "grad_norm": 0.8677700757980347, "learning_rate": 6.181265508091422e-05, "loss": 1.0208, "step": 13461 }, { "epoch": 2.5017654711020256, "grad_norm": 0.9003196954727173, "learning_rate": 6.179915332205439e-05, "loss": 0.9279, "step": 13462 }, { "epoch": 2.5019513101653965, "grad_norm": 0.8088935613632202, "learning_rate": 6.178565237852611e-05, "loss": 0.8697, "step": 13463 }, { "epoch": 2.502137149228768, "grad_norm": 0.7889655828475952, "learning_rate": 6.177215225061753e-05, "loss": 1.1017, "step": 13464 }, { "epoch": 2.502322988292139, "grad_norm": 0.7348492741584778, "learning_rate": 6.175865293861674e-05, "loss": 0.9419, "step": 13465 }, { "epoch": 2.50250882735551, "grad_norm": 0.7974409461021423, "learning_rate": 6.174515444281188e-05, "loss": 0.9685, "step": 13466 }, { "epoch": 2.5026946664188814, "grad_norm": 0.7407426238059998, "learning_rate": 6.173165676349103e-05, "loss": 0.8596, "step": 13467 }, { "epoch": 2.5028805054822523, "grad_norm": 0.9617146849632263, "learning_rate": 6.171815990094231e-05, "loss": 0.986, "step": 13468 }, { "epoch": 2.5030663445456236, "grad_norm": 0.7930120229721069, "learning_rate": 6.170466385545378e-05, "loss": 1.0472, "step": 13469 }, { "epoch": 2.5032521836089945, "grad_norm": 0.8628350496292114, "learning_rate": 6.169116862731346e-05, "loss": 0.8275, "step": 13470 }, { "epoch": 2.503438022672366, "grad_norm": 0.8005587458610535, "learning_rate": 6.167767421680944e-05, "loss": 0.8874, "step": 13471 }, { "epoch": 2.5036238617357367, "grad_norm": 0.8890085816383362, "learning_rate": 6.166418062422967e-05, "loss": 0.9465, "step": 13472 }, { "epoch": 2.503809700799108, "grad_norm": 0.7766950130462646, "learning_rate": 6.165068784986216e-05, "loss": 1.0052, "step": 13473 }, { "epoch": 2.503995539862479, "grad_norm": 0.8336291909217834, "learning_rate": 6.163719589399495e-05, "loss": 1.1179, "step": 13474 }, { "epoch": 2.5041813789258502, "grad_norm": 0.8241628408432007, "learning_rate": 6.162370475691594e-05, "loss": 1.2073, "step": 13475 }, { "epoch": 2.5043672179892216, "grad_norm": 1.0504337549209595, "learning_rate": 6.161021443891313e-05, "loss": 0.991, "step": 13476 }, { "epoch": 2.5045530570525925, "grad_norm": 0.8310957551002502, "learning_rate": 6.159672494027438e-05, "loss": 0.9675, "step": 13477 }, { "epoch": 2.5047388961159633, "grad_norm": 1.0969948768615723, "learning_rate": 6.158323626128762e-05, "loss": 1.1299, "step": 13478 }, { "epoch": 2.5049247351793347, "grad_norm": 0.9421222805976868, "learning_rate": 6.15697484022408e-05, "loss": 0.8132, "step": 13479 }, { "epoch": 2.505110574242706, "grad_norm": 0.7072616815567017, "learning_rate": 6.155626136342171e-05, "loss": 0.8952, "step": 13480 }, { "epoch": 2.505296413306077, "grad_norm": 0.7259114384651184, "learning_rate": 6.154277514511829e-05, "loss": 0.8849, "step": 13481 }, { "epoch": 2.505482252369448, "grad_norm": 0.8621964454650879, "learning_rate": 6.152928974761829e-05, "loss": 0.8402, "step": 13482 }, { "epoch": 2.505668091432819, "grad_norm": 1.2493312358856201, "learning_rate": 6.151580517120958e-05, "loss": 1.0745, "step": 13483 }, { "epoch": 2.5058539304961904, "grad_norm": 0.8947202563285828, "learning_rate": 6.150232141618e-05, "loss": 0.948, "step": 13484 }, { "epoch": 2.5060397695595613, "grad_norm": 0.9812045693397522, "learning_rate": 6.14888384828173e-05, "loss": 0.9176, "step": 13485 }, { "epoch": 2.5062256086229326, "grad_norm": 0.8159515857696533, "learning_rate": 6.147535637140926e-05, "loss": 1.0679, "step": 13486 }, { "epoch": 2.5064114476863035, "grad_norm": 0.8644919991493225, "learning_rate": 6.146187508224362e-05, "loss": 0.9617, "step": 13487 }, { "epoch": 2.506597286749675, "grad_norm": 0.7351353764533997, "learning_rate": 6.144839461560812e-05, "loss": 0.9063, "step": 13488 }, { "epoch": 2.5067831258130457, "grad_norm": 0.8311001658439636, "learning_rate": 6.14349149717905e-05, "loss": 0.8582, "step": 13489 }, { "epoch": 2.506968964876417, "grad_norm": 0.7653113007545471, "learning_rate": 6.142143615107841e-05, "loss": 0.8755, "step": 13490 }, { "epoch": 2.5071548039397884, "grad_norm": 0.8149527907371521, "learning_rate": 6.140795815375958e-05, "loss": 0.9695, "step": 13491 }, { "epoch": 2.5073406430031593, "grad_norm": 0.8174289464950562, "learning_rate": 6.139448098012163e-05, "loss": 0.9273, "step": 13492 }, { "epoch": 2.50752648206653, "grad_norm": 1.8553173542022705, "learning_rate": 6.138100463045229e-05, "loss": 1.341, "step": 13493 }, { "epoch": 2.5077123211299015, "grad_norm": 0.729652464389801, "learning_rate": 6.136752910503906e-05, "loss": 0.9325, "step": 13494 }, { "epoch": 2.507898160193273, "grad_norm": 0.8200655579566956, "learning_rate": 6.135405440416966e-05, "loss": 0.9185, "step": 13495 }, { "epoch": 2.5080839992566437, "grad_norm": 0.8505041599273682, "learning_rate": 6.134058052813166e-05, "loss": 0.9047, "step": 13496 }, { "epoch": 2.5082698383200146, "grad_norm": 0.9282901287078857, "learning_rate": 6.132710747721261e-05, "loss": 0.8199, "step": 13497 }, { "epoch": 2.508455677383386, "grad_norm": 0.8327956199645996, "learning_rate": 6.131363525170008e-05, "loss": 0.9152, "step": 13498 }, { "epoch": 2.5086415164467573, "grad_norm": 0.8083164095878601, "learning_rate": 6.130016385188166e-05, "loss": 0.9533, "step": 13499 }, { "epoch": 2.508827355510128, "grad_norm": 0.8243703246116638, "learning_rate": 6.128669327804479e-05, "loss": 1.1127, "step": 13500 }, { "epoch": 2.5090131945734995, "grad_norm": 0.97088623046875, "learning_rate": 6.127322353047704e-05, "loss": 0.8968, "step": 13501 }, { "epoch": 2.5091990336368704, "grad_norm": 0.7824402451515198, "learning_rate": 6.125975460946587e-05, "loss": 0.8047, "step": 13502 }, { "epoch": 2.5093848727002417, "grad_norm": 0.9820107817649841, "learning_rate": 6.124628651529875e-05, "loss": 0.8793, "step": 13503 }, { "epoch": 2.5095707117636126, "grad_norm": 0.8072025179862976, "learning_rate": 6.123281924826313e-05, "loss": 0.73, "step": 13504 }, { "epoch": 2.509756550826984, "grad_norm": 1.0725669860839844, "learning_rate": 6.121935280864646e-05, "loss": 0.9829, "step": 13505 }, { "epoch": 2.5099423898903552, "grad_norm": 1.0136836767196655, "learning_rate": 6.120588719673617e-05, "loss": 1.1713, "step": 13506 }, { "epoch": 2.510128228953726, "grad_norm": 0.8955618739128113, "learning_rate": 6.119242241281964e-05, "loss": 0.8974, "step": 13507 }, { "epoch": 2.510314068017097, "grad_norm": 0.816618800163269, "learning_rate": 6.117895845718424e-05, "loss": 0.934, "step": 13508 }, { "epoch": 2.5104999070804683, "grad_norm": 0.8638303279876709, "learning_rate": 6.116549533011738e-05, "loss": 0.944, "step": 13509 }, { "epoch": 2.5106857461438397, "grad_norm": 0.7912139892578125, "learning_rate": 6.115203303190637e-05, "loss": 1.0496, "step": 13510 }, { "epoch": 2.5108715852072105, "grad_norm": 0.8715540766716003, "learning_rate": 6.113857156283856e-05, "loss": 0.937, "step": 13511 }, { "epoch": 2.5110574242705814, "grad_norm": 0.8288502097129822, "learning_rate": 6.112511092320122e-05, "loss": 0.8559, "step": 13512 }, { "epoch": 2.5112432633339528, "grad_norm": 0.8309054970741272, "learning_rate": 6.11116511132817e-05, "loss": 0.9803, "step": 13513 }, { "epoch": 2.511429102397324, "grad_norm": 0.755159854888916, "learning_rate": 6.109819213336724e-05, "loss": 0.8886, "step": 13514 }, { "epoch": 2.511614941460695, "grad_norm": 0.884611964225769, "learning_rate": 6.10847339837451e-05, "loss": 1.0456, "step": 13515 }, { "epoch": 2.5118007805240663, "grad_norm": 0.8449623584747314, "learning_rate": 6.107127666470255e-05, "loss": 0.7734, "step": 13516 }, { "epoch": 2.511986619587437, "grad_norm": 0.9435641169548035, "learning_rate": 6.105782017652679e-05, "loss": 1.0571, "step": 13517 }, { "epoch": 2.5121724586508085, "grad_norm": 1.208054780960083, "learning_rate": 6.104436451950501e-05, "loss": 0.8751, "step": 13518 }, { "epoch": 2.5123582977141794, "grad_norm": 0.6422603726387024, "learning_rate": 6.103090969392447e-05, "loss": 0.5961, "step": 13519 }, { "epoch": 2.5125441367775507, "grad_norm": 0.770844042301178, "learning_rate": 6.101745570007224e-05, "loss": 1.1031, "step": 13520 }, { "epoch": 2.5127299758409216, "grad_norm": 0.7535857558250427, "learning_rate": 6.100400253823556e-05, "loss": 0.8568, "step": 13521 }, { "epoch": 2.512915814904293, "grad_norm": 0.7936834692955017, "learning_rate": 6.09905502087015e-05, "loss": 0.9994, "step": 13522 }, { "epoch": 2.513101653967664, "grad_norm": 0.9213391542434692, "learning_rate": 6.097709871175723e-05, "loss": 0.8465, "step": 13523 }, { "epoch": 2.513287493031035, "grad_norm": 0.8148267269134521, "learning_rate": 6.096364804768979e-05, "loss": 0.7718, "step": 13524 }, { "epoch": 2.5134733320944065, "grad_norm": 1.343180775642395, "learning_rate": 6.095019821678627e-05, "loss": 0.9792, "step": 13525 }, { "epoch": 2.5136591711577774, "grad_norm": 0.8396459221839905, "learning_rate": 6.093674921933381e-05, "loss": 1.22, "step": 13526 }, { "epoch": 2.5138450102211483, "grad_norm": 0.724976658821106, "learning_rate": 6.0923301055619384e-05, "loss": 0.9506, "step": 13527 }, { "epoch": 2.5140308492845196, "grad_norm": 0.9246887564659119, "learning_rate": 6.0909853725930034e-05, "loss": 1.0247, "step": 13528 }, { "epoch": 2.514216688347891, "grad_norm": 0.8594968318939209, "learning_rate": 6.089640723055279e-05, "loss": 0.9865, "step": 13529 }, { "epoch": 2.514402527411262, "grad_norm": 0.734356701374054, "learning_rate": 6.088296156977462e-05, "loss": 1.0941, "step": 13530 }, { "epoch": 2.514588366474633, "grad_norm": 0.7757397890090942, "learning_rate": 6.0869516743882516e-05, "loss": 1.0349, "step": 13531 }, { "epoch": 2.514774205538004, "grad_norm": 0.6831685304641724, "learning_rate": 6.0856072753163406e-05, "loss": 0.635, "step": 13532 }, { "epoch": 2.5149600446013753, "grad_norm": 1.8188354969024658, "learning_rate": 6.084262959790428e-05, "loss": 1.5332, "step": 13533 }, { "epoch": 2.5151458836647462, "grad_norm": 0.8049079775810242, "learning_rate": 6.0829187278391996e-05, "loss": 0.9258, "step": 13534 }, { "epoch": 2.5153317227281176, "grad_norm": 0.8711287379264832, "learning_rate": 6.0815745794913524e-05, "loss": 1.0228, "step": 13535 }, { "epoch": 2.5155175617914884, "grad_norm": 0.8077500462532043, "learning_rate": 6.080230514775567e-05, "loss": 0.9122, "step": 13536 }, { "epoch": 2.5157034008548598, "grad_norm": 1.6004647016525269, "learning_rate": 6.0788865337205355e-05, "loss": 1.438, "step": 13537 }, { "epoch": 2.5158892399182307, "grad_norm": 1.7043529748916626, "learning_rate": 6.077542636354944e-05, "loss": 1.2784, "step": 13538 }, { "epoch": 2.516075078981602, "grad_norm": 0.8830113410949707, "learning_rate": 6.076198822707475e-05, "loss": 0.8335, "step": 13539 }, { "epoch": 2.5162609180449733, "grad_norm": 0.7613457441329956, "learning_rate": 6.074855092806806e-05, "loss": 1.0871, "step": 13540 }, { "epoch": 2.516446757108344, "grad_norm": 0.8486202359199524, "learning_rate": 6.0735114466816214e-05, "loss": 0.8161, "step": 13541 }, { "epoch": 2.516632596171715, "grad_norm": 0.7651652097702026, "learning_rate": 6.072167884360596e-05, "loss": 0.8114, "step": 13542 }, { "epoch": 2.5168184352350864, "grad_norm": 0.6826961636543274, "learning_rate": 6.070824405872406e-05, "loss": 0.7817, "step": 13543 }, { "epoch": 2.5170042742984577, "grad_norm": 0.8031132817268372, "learning_rate": 6.0694810112457265e-05, "loss": 0.9292, "step": 13544 }, { "epoch": 2.5171901133618286, "grad_norm": 1.1389261484146118, "learning_rate": 6.068137700509231e-05, "loss": 1.064, "step": 13545 }, { "epoch": 2.5173759524251995, "grad_norm": 0.7853626012802124, "learning_rate": 6.066794473691586e-05, "loss": 0.9105, "step": 13546 }, { "epoch": 2.517561791488571, "grad_norm": 0.8402011394500732, "learning_rate": 6.0654513308214645e-05, "loss": 0.964, "step": 13547 }, { "epoch": 2.517747630551942, "grad_norm": 0.9154853820800781, "learning_rate": 6.064108271927532e-05, "loss": 1.1316, "step": 13548 }, { "epoch": 2.517933469615313, "grad_norm": 0.792783260345459, "learning_rate": 6.062765297038457e-05, "loss": 0.7286, "step": 13549 }, { "epoch": 2.5181193086786844, "grad_norm": 0.9570148587226868, "learning_rate": 6.061422406182897e-05, "loss": 0.9536, "step": 13550 }, { "epoch": 2.5183051477420553, "grad_norm": 0.837431788444519, "learning_rate": 6.060079599389521e-05, "loss": 0.8922, "step": 13551 }, { "epoch": 2.5184909868054266, "grad_norm": 0.7448437213897705, "learning_rate": 6.058736876686981e-05, "loss": 0.8407, "step": 13552 }, { "epoch": 2.5186768258687975, "grad_norm": 0.7879940271377563, "learning_rate": 6.057394238103942e-05, "loss": 1.0672, "step": 13553 }, { "epoch": 2.518862664932169, "grad_norm": 0.7715732455253601, "learning_rate": 6.056051683669053e-05, "loss": 0.7682, "step": 13554 }, { "epoch": 2.5190485039955397, "grad_norm": 0.7592453360557556, "learning_rate": 6.0547092134109764e-05, "loss": 0.6859, "step": 13555 }, { "epoch": 2.519234343058911, "grad_norm": 0.8092079162597656, "learning_rate": 6.05336682735836e-05, "loss": 0.7248, "step": 13556 }, { "epoch": 2.519420182122282, "grad_norm": 0.8283814191818237, "learning_rate": 6.052024525539852e-05, "loss": 0.8203, "step": 13557 }, { "epoch": 2.5196060211856532, "grad_norm": 0.7173612713813782, "learning_rate": 6.050682307984109e-05, "loss": 0.9486, "step": 13558 }, { "epoch": 2.5197918602490246, "grad_norm": 0.9390692114830017, "learning_rate": 6.049340174719776e-05, "loss": 0.9158, "step": 13559 }, { "epoch": 2.5199776993123955, "grad_norm": 0.7687232494354248, "learning_rate": 6.047998125775496e-05, "loss": 0.8785, "step": 13560 }, { "epoch": 2.5201635383757663, "grad_norm": 0.8472660183906555, "learning_rate": 6.046656161179915e-05, "loss": 1.0567, "step": 13561 }, { "epoch": 2.5203493774391377, "grad_norm": 0.7931621074676514, "learning_rate": 6.045314280961674e-05, "loss": 0.9247, "step": 13562 }, { "epoch": 2.520535216502509, "grad_norm": 1.0868077278137207, "learning_rate": 6.043972485149414e-05, "loss": 1.158, "step": 13563 }, { "epoch": 2.52072105556588, "grad_norm": 0.8607794642448425, "learning_rate": 6.0426307737717715e-05, "loss": 0.9757, "step": 13564 }, { "epoch": 2.520906894629251, "grad_norm": 0.7563510537147522, "learning_rate": 6.041289146857384e-05, "loss": 0.8482, "step": 13565 }, { "epoch": 2.521092733692622, "grad_norm": 0.8587755560874939, "learning_rate": 6.039947604434886e-05, "loss": 0.9595, "step": 13566 }, { "epoch": 2.5212785727559934, "grad_norm": 0.7921454310417175, "learning_rate": 6.038606146532908e-05, "loss": 1.0287, "step": 13567 }, { "epoch": 2.5214644118193643, "grad_norm": 0.8255019187927246, "learning_rate": 6.0372647731800844e-05, "loss": 1.0019, "step": 13568 }, { "epoch": 2.5216502508827356, "grad_norm": 0.8133623003959656, "learning_rate": 6.0359234844050486e-05, "loss": 1.0032, "step": 13569 }, { "epoch": 2.5218360899461065, "grad_norm": 0.8292799592018127, "learning_rate": 6.034582280236419e-05, "loss": 1.0846, "step": 13570 }, { "epoch": 2.522021929009478, "grad_norm": 0.8180259466171265, "learning_rate": 6.033241160702829e-05, "loss": 1.0461, "step": 13571 }, { "epoch": 2.5222077680728487, "grad_norm": 0.6881454586982727, "learning_rate": 6.031900125832897e-05, "loss": 0.9085, "step": 13572 }, { "epoch": 2.52239360713622, "grad_norm": 0.833112359046936, "learning_rate": 6.030559175655249e-05, "loss": 1.047, "step": 13573 }, { "epoch": 2.5225794461995914, "grad_norm": 0.8363716006278992, "learning_rate": 6.029218310198502e-05, "loss": 0.9768, "step": 13574 }, { "epoch": 2.5227652852629623, "grad_norm": 0.9038662910461426, "learning_rate": 6.0278775294912784e-05, "loss": 0.9416, "step": 13575 }, { "epoch": 2.522951124326333, "grad_norm": 0.743466317653656, "learning_rate": 6.02653683356219e-05, "loss": 0.7738, "step": 13576 }, { "epoch": 2.5231369633897045, "grad_norm": 0.9426493644714355, "learning_rate": 6.025196222439855e-05, "loss": 1.1298, "step": 13577 }, { "epoch": 2.523322802453076, "grad_norm": 0.8024913668632507, "learning_rate": 6.023855696152881e-05, "loss": 0.9108, "step": 13578 }, { "epoch": 2.5235086415164467, "grad_norm": 0.9854022860527039, "learning_rate": 6.02251525472989e-05, "loss": 1.063, "step": 13579 }, { "epoch": 2.5236944805798176, "grad_norm": 0.942895233631134, "learning_rate": 6.021174898199482e-05, "loss": 1.2723, "step": 13580 }, { "epoch": 2.523880319643189, "grad_norm": 0.7434505224227905, "learning_rate": 6.0198346265902706e-05, "loss": 0.9639, "step": 13581 }, { "epoch": 2.5240661587065603, "grad_norm": 0.7559301853179932, "learning_rate": 6.018494439930857e-05, "loss": 0.9286, "step": 13582 }, { "epoch": 2.524251997769931, "grad_norm": 1.4655334949493408, "learning_rate": 6.0171543382498485e-05, "loss": 1.1281, "step": 13583 }, { "epoch": 2.5244378368333025, "grad_norm": 0.7436730265617371, "learning_rate": 6.0158143215758455e-05, "loss": 0.9599, "step": 13584 }, { "epoch": 2.5246236758966734, "grad_norm": 0.8883718252182007, "learning_rate": 6.014474389937449e-05, "loss": 0.9478, "step": 13585 }, { "epoch": 2.5248095149600447, "grad_norm": 0.7029927372932434, "learning_rate": 6.013134543363255e-05, "loss": 0.6995, "step": 13586 }, { "epoch": 2.5249953540234156, "grad_norm": 0.8948057889938354, "learning_rate": 6.011794781881866e-05, "loss": 0.9246, "step": 13587 }, { "epoch": 2.525181193086787, "grad_norm": 0.8800473809242249, "learning_rate": 6.010455105521866e-05, "loss": 0.9495, "step": 13588 }, { "epoch": 2.5253670321501582, "grad_norm": 0.6592245101928711, "learning_rate": 6.009115514311862e-05, "loss": 0.7117, "step": 13589 }, { "epoch": 2.525552871213529, "grad_norm": 0.9519256353378296, "learning_rate": 6.0077760082804355e-05, "loss": 0.8683, "step": 13590 }, { "epoch": 2.5257387102769, "grad_norm": 0.9765269160270691, "learning_rate": 6.0064365874561835e-05, "loss": 0.8311, "step": 13591 }, { "epoch": 2.5259245493402713, "grad_norm": 0.7550574541091919, "learning_rate": 6.005097251867685e-05, "loss": 0.9183, "step": 13592 }, { "epoch": 2.5261103884036427, "grad_norm": 0.8850460648536682, "learning_rate": 6.0037580015435334e-05, "loss": 0.9423, "step": 13593 }, { "epoch": 2.5262962274670135, "grad_norm": 0.9110420942306519, "learning_rate": 6.002418836512307e-05, "loss": 0.9126, "step": 13594 }, { "epoch": 2.5264820665303844, "grad_norm": 0.9628833532333374, "learning_rate": 6.001079756802592e-05, "loss": 1.0606, "step": 13595 }, { "epoch": 2.5266679055937558, "grad_norm": 1.0309127569198608, "learning_rate": 5.999740762442967e-05, "loss": 0.8266, "step": 13596 }, { "epoch": 2.526853744657127, "grad_norm": 0.7594256401062012, "learning_rate": 5.99840185346201e-05, "loss": 0.9296, "step": 13597 }, { "epoch": 2.527039583720498, "grad_norm": 0.7407244443893433, "learning_rate": 5.997063029888299e-05, "loss": 0.8898, "step": 13598 }, { "epoch": 2.5272254227838693, "grad_norm": 0.7121025919914246, "learning_rate": 5.995724291750404e-05, "loss": 0.848, "step": 13599 }, { "epoch": 2.52741126184724, "grad_norm": 0.8437877893447876, "learning_rate": 5.994385639076907e-05, "loss": 1.2426, "step": 13600 }, { "epoch": 2.5275971009106115, "grad_norm": 0.8755210041999817, "learning_rate": 5.993047071896374e-05, "loss": 0.9305, "step": 13601 }, { "epoch": 2.5277829399739824, "grad_norm": 0.9839112162590027, "learning_rate": 5.991708590237375e-05, "loss": 1.1434, "step": 13602 }, { "epoch": 2.5279687790373537, "grad_norm": 0.8365402817726135, "learning_rate": 5.990370194128479e-05, "loss": 1.0356, "step": 13603 }, { "epoch": 2.5281546181007246, "grad_norm": 0.8370087146759033, "learning_rate": 5.989031883598248e-05, "loss": 0.809, "step": 13604 }, { "epoch": 2.528340457164096, "grad_norm": 0.7712308168411255, "learning_rate": 5.987693658675251e-05, "loss": 0.8797, "step": 13605 }, { "epoch": 2.528526296227467, "grad_norm": 0.7896856665611267, "learning_rate": 5.986355519388045e-05, "loss": 0.8417, "step": 13606 }, { "epoch": 2.528712135290838, "grad_norm": 1.0379797220230103, "learning_rate": 5.9850174657651946e-05, "loss": 1.0399, "step": 13607 }, { "epoch": 2.5288979743542095, "grad_norm": 1.0011177062988281, "learning_rate": 5.983679497835255e-05, "loss": 1.0312, "step": 13608 }, { "epoch": 2.5290838134175804, "grad_norm": 0.777677059173584, "learning_rate": 5.982341615626781e-05, "loss": 0.6413, "step": 13609 }, { "epoch": 2.5292696524809513, "grad_norm": 0.8527577519416809, "learning_rate": 5.9810038191683335e-05, "loss": 0.8086, "step": 13610 }, { "epoch": 2.5294554915443226, "grad_norm": 0.8974331617355347, "learning_rate": 5.9796661084884644e-05, "loss": 0.8363, "step": 13611 }, { "epoch": 2.529641330607694, "grad_norm": 0.9144704341888428, "learning_rate": 5.978328483615722e-05, "loss": 0.9432, "step": 13612 }, { "epoch": 2.529827169671065, "grad_norm": 0.9022623896598816, "learning_rate": 5.976990944578658e-05, "loss": 0.7475, "step": 13613 }, { "epoch": 2.530013008734436, "grad_norm": 0.991865336894989, "learning_rate": 5.975653491405816e-05, "loss": 0.9885, "step": 13614 }, { "epoch": 2.530198847797807, "grad_norm": 0.7925933003425598, "learning_rate": 5.9743161241257475e-05, "loss": 1.0289, "step": 13615 }, { "epoch": 2.5303846868611783, "grad_norm": 0.9255247712135315, "learning_rate": 5.9729788427669895e-05, "loss": 0.9356, "step": 13616 }, { "epoch": 2.5305705259245492, "grad_norm": 0.8493859767913818, "learning_rate": 5.9716416473580905e-05, "loss": 0.928, "step": 13617 }, { "epoch": 2.5307563649879206, "grad_norm": 0.8425911068916321, "learning_rate": 5.9703045379275845e-05, "loss": 1.0973, "step": 13618 }, { "epoch": 2.5309422040512914, "grad_norm": 0.827329158782959, "learning_rate": 5.968967514504014e-05, "loss": 0.9527, "step": 13619 }, { "epoch": 2.5311280431146628, "grad_norm": 0.7627117037773132, "learning_rate": 5.9676305771159114e-05, "loss": 0.9594, "step": 13620 }, { "epoch": 2.5313138821780337, "grad_norm": 1.0649540424346924, "learning_rate": 5.966293725791818e-05, "loss": 0.7797, "step": 13621 }, { "epoch": 2.531499721241405, "grad_norm": 2.9969398975372314, "learning_rate": 5.964956960560263e-05, "loss": 1.3579, "step": 13622 }, { "epoch": 2.5316855603047763, "grad_norm": 0.7865945100784302, "learning_rate": 5.963620281449778e-05, "loss": 0.7402, "step": 13623 }, { "epoch": 2.531871399368147, "grad_norm": 0.7578870058059692, "learning_rate": 5.96228368848889e-05, "loss": 0.8787, "step": 13624 }, { "epoch": 2.532057238431518, "grad_norm": 0.7157632112503052, "learning_rate": 5.960947181706128e-05, "loss": 0.9353, "step": 13625 }, { "epoch": 2.5322430774948894, "grad_norm": 0.9781704545021057, "learning_rate": 5.9596107611300164e-05, "loss": 1.0551, "step": 13626 }, { "epoch": 2.5324289165582607, "grad_norm": 0.8759918212890625, "learning_rate": 5.9582744267890814e-05, "loss": 1.019, "step": 13627 }, { "epoch": 2.5326147556216316, "grad_norm": 0.8140712976455688, "learning_rate": 5.9569381787118404e-05, "loss": 1.1218, "step": 13628 }, { "epoch": 2.5328005946850025, "grad_norm": 0.7885955572128296, "learning_rate": 5.9556020169268156e-05, "loss": 0.8807, "step": 13629 }, { "epoch": 2.532986433748374, "grad_norm": 1.094180941581726, "learning_rate": 5.9542659414625246e-05, "loss": 1.0929, "step": 13630 }, { "epoch": 2.533172272811745, "grad_norm": 0.8771237730979919, "learning_rate": 5.952929952347488e-05, "loss": 1.0978, "step": 13631 }, { "epoch": 2.533358111875116, "grad_norm": 0.7179327011108398, "learning_rate": 5.951594049610214e-05, "loss": 0.8338, "step": 13632 }, { "epoch": 2.5335439509384874, "grad_norm": 0.7645947933197021, "learning_rate": 5.9502582332792204e-05, "loss": 0.6564, "step": 13633 }, { "epoch": 2.5337297900018583, "grad_norm": 0.8140125274658203, "learning_rate": 5.948922503383012e-05, "loss": 1.1048, "step": 13634 }, { "epoch": 2.5339156290652296, "grad_norm": 0.8520426750183105, "learning_rate": 5.947586859950103e-05, "loss": 0.8477, "step": 13635 }, { "epoch": 2.5341014681286005, "grad_norm": 0.8290002346038818, "learning_rate": 5.946251303008996e-05, "loss": 0.8047, "step": 13636 }, { "epoch": 2.534287307191972, "grad_norm": 0.709611177444458, "learning_rate": 5.944915832588201e-05, "loss": 0.8665, "step": 13637 }, { "epoch": 2.534473146255343, "grad_norm": 0.8430138230323792, "learning_rate": 5.943580448716217e-05, "loss": 1.024, "step": 13638 }, { "epoch": 2.534658985318714, "grad_norm": 0.981663167476654, "learning_rate": 5.942245151421548e-05, "loss": 1.0096, "step": 13639 }, { "epoch": 2.534844824382085, "grad_norm": 0.778021514415741, "learning_rate": 5.940909940732693e-05, "loss": 0.7695, "step": 13640 }, { "epoch": 2.5350306634454562, "grad_norm": 0.9181351065635681, "learning_rate": 5.939574816678146e-05, "loss": 0.9499, "step": 13641 }, { "epoch": 2.5352165025088276, "grad_norm": 0.7551867365837097, "learning_rate": 5.93823977928641e-05, "loss": 1.0542, "step": 13642 }, { "epoch": 2.5354023415721985, "grad_norm": 0.8496702909469604, "learning_rate": 5.936904828585976e-05, "loss": 0.9137, "step": 13643 }, { "epoch": 2.5355881806355693, "grad_norm": 0.8048803806304932, "learning_rate": 5.9355699646053356e-05, "loss": 0.8004, "step": 13644 }, { "epoch": 2.5357740196989407, "grad_norm": 0.9019173383712769, "learning_rate": 5.934235187372981e-05, "loss": 1.0288, "step": 13645 }, { "epoch": 2.535959858762312, "grad_norm": 0.8891580700874329, "learning_rate": 5.9329004969173965e-05, "loss": 1.027, "step": 13646 }, { "epoch": 2.536145697825683, "grad_norm": 0.8195191621780396, "learning_rate": 5.9315658932670726e-05, "loss": 0.9147, "step": 13647 }, { "epoch": 2.536331536889054, "grad_norm": 0.7364562153816223, "learning_rate": 5.9302313764504927e-05, "loss": 0.913, "step": 13648 }, { "epoch": 2.536517375952425, "grad_norm": 0.723248302936554, "learning_rate": 5.9288969464961385e-05, "loss": 0.8202, "step": 13649 }, { "epoch": 2.5367032150157964, "grad_norm": 0.9361730813980103, "learning_rate": 5.927562603432497e-05, "loss": 0.8501, "step": 13650 }, { "epoch": 2.5368890540791673, "grad_norm": 0.8435133695602417, "learning_rate": 5.9262283472880365e-05, "loss": 0.8544, "step": 13651 }, { "epoch": 2.5370748931425386, "grad_norm": 0.7979792952537537, "learning_rate": 5.924894178091245e-05, "loss": 0.9817, "step": 13652 }, { "epoch": 2.5372607322059095, "grad_norm": 0.9304459095001221, "learning_rate": 5.923560095870595e-05, "loss": 0.8531, "step": 13653 }, { "epoch": 2.537446571269281, "grad_norm": 0.7247792482376099, "learning_rate": 5.9222261006545576e-05, "loss": 0.7344, "step": 13654 }, { "epoch": 2.5376324103326517, "grad_norm": 0.778704047203064, "learning_rate": 5.92089219247161e-05, "loss": 0.9272, "step": 13655 }, { "epoch": 2.537818249396023, "grad_norm": 0.6862264275550842, "learning_rate": 5.919558371350213e-05, "loss": 0.9147, "step": 13656 }, { "epoch": 2.5380040884593944, "grad_norm": 0.8819536566734314, "learning_rate": 5.9182246373188456e-05, "loss": 0.882, "step": 13657 }, { "epoch": 2.5381899275227653, "grad_norm": 0.780981183052063, "learning_rate": 5.916890990405966e-05, "loss": 0.8523, "step": 13658 }, { "epoch": 2.538375766586136, "grad_norm": 0.828430712223053, "learning_rate": 5.9155574306400395e-05, "loss": 1.0052, "step": 13659 }, { "epoch": 2.5385616056495075, "grad_norm": 1.1634652614593506, "learning_rate": 5.914223958049534e-05, "loss": 0.9461, "step": 13660 }, { "epoch": 2.538747444712879, "grad_norm": 0.7423953413963318, "learning_rate": 5.912890572662903e-05, "loss": 0.6167, "step": 13661 }, { "epoch": 2.5389332837762497, "grad_norm": 0.7293835878372192, "learning_rate": 5.911557274508609e-05, "loss": 0.8475, "step": 13662 }, { "epoch": 2.539119122839621, "grad_norm": 0.8138513565063477, "learning_rate": 5.9102240636151126e-05, "loss": 0.9412, "step": 13663 }, { "epoch": 2.539304961902992, "grad_norm": 0.8363872766494751, "learning_rate": 5.908890940010863e-05, "loss": 0.9112, "step": 13664 }, { "epoch": 2.5394908009663633, "grad_norm": 0.921448290348053, "learning_rate": 5.907557903724317e-05, "loss": 0.9629, "step": 13665 }, { "epoch": 2.539676640029734, "grad_norm": 0.8595923185348511, "learning_rate": 5.906224954783923e-05, "loss": 0.9787, "step": 13666 }, { "epoch": 2.5398624790931055, "grad_norm": 0.8172317147254944, "learning_rate": 5.904892093218134e-05, "loss": 0.7755, "step": 13667 }, { "epoch": 2.5400483181564764, "grad_norm": 0.8472798466682434, "learning_rate": 5.903559319055394e-05, "loss": 0.752, "step": 13668 }, { "epoch": 2.5402341572198477, "grad_norm": 0.7198326587677002, "learning_rate": 5.90222663232415e-05, "loss": 0.8107, "step": 13669 }, { "epoch": 2.5404199962832186, "grad_norm": 0.8129236102104187, "learning_rate": 5.9008940330528505e-05, "loss": 0.8908, "step": 13670 }, { "epoch": 2.54060583534659, "grad_norm": 0.768535852432251, "learning_rate": 5.899561521269929e-05, "loss": 0.9418, "step": 13671 }, { "epoch": 2.5407916744099612, "grad_norm": 0.9059661626815796, "learning_rate": 5.898229097003829e-05, "loss": 0.8386, "step": 13672 }, { "epoch": 2.540977513473332, "grad_norm": 0.8625373840332031, "learning_rate": 5.896896760282995e-05, "loss": 1.1597, "step": 13673 }, { "epoch": 2.541163352536703, "grad_norm": 0.7552303671836853, "learning_rate": 5.895564511135856e-05, "loss": 0.886, "step": 13674 }, { "epoch": 2.5413491916000743, "grad_norm": 0.7569305896759033, "learning_rate": 5.89423234959085e-05, "loss": 0.8546, "step": 13675 }, { "epoch": 2.5415350306634457, "grad_norm": 0.802854597568512, "learning_rate": 5.892900275676407e-05, "loss": 0.9316, "step": 13676 }, { "epoch": 2.5417208697268165, "grad_norm": 0.7180258631706238, "learning_rate": 5.8915682894209626e-05, "loss": 0.8168, "step": 13677 }, { "epoch": 2.5419067087901874, "grad_norm": 0.8847736120223999, "learning_rate": 5.89023639085294e-05, "loss": 0.8559, "step": 13678 }, { "epoch": 2.5420925478535588, "grad_norm": 0.7706083655357361, "learning_rate": 5.888904580000769e-05, "loss": 0.9055, "step": 13679 }, { "epoch": 2.54227838691693, "grad_norm": 0.9170486330986023, "learning_rate": 5.887572856892877e-05, "loss": 0.8428, "step": 13680 }, { "epoch": 2.542464225980301, "grad_norm": 0.9619743227958679, "learning_rate": 5.8862412215576824e-05, "loss": 1.0794, "step": 13681 }, { "epoch": 2.5426500650436723, "grad_norm": 0.8550994396209717, "learning_rate": 5.8849096740236134e-05, "loss": 0.907, "step": 13682 }, { "epoch": 2.542835904107043, "grad_norm": 0.9619942903518677, "learning_rate": 5.88357821431908e-05, "loss": 0.9376, "step": 13683 }, { "epoch": 2.5430217431704145, "grad_norm": 1.087105393409729, "learning_rate": 5.8822468424725075e-05, "loss": 0.8564, "step": 13684 }, { "epoch": 2.5432075822337854, "grad_norm": 9.038790702819824, "learning_rate": 5.880915558512312e-05, "loss": 1.658, "step": 13685 }, { "epoch": 2.5433934212971567, "grad_norm": 0.8297736644744873, "learning_rate": 5.879584362466903e-05, "loss": 0.9431, "step": 13686 }, { "epoch": 2.543579260360528, "grad_norm": 0.9380455017089844, "learning_rate": 5.8782532543646985e-05, "loss": 1.0081, "step": 13687 }, { "epoch": 2.543765099423899, "grad_norm": 0.6814043521881104, "learning_rate": 5.876922234234101e-05, "loss": 0.8086, "step": 13688 }, { "epoch": 2.54395093848727, "grad_norm": 0.9108039140701294, "learning_rate": 5.875591302103525e-05, "loss": 0.8482, "step": 13689 }, { "epoch": 2.544136777550641, "grad_norm": 0.8931623697280884, "learning_rate": 5.874260458001375e-05, "loss": 0.9589, "step": 13690 }, { "epoch": 2.5443226166140125, "grad_norm": 0.7117545008659363, "learning_rate": 5.872929701956054e-05, "loss": 0.6862, "step": 13691 }, { "epoch": 2.5445084556773834, "grad_norm": 1.050232172012329, "learning_rate": 5.871599033995968e-05, "loss": 0.9403, "step": 13692 }, { "epoch": 2.5446942947407543, "grad_norm": 0.9568449854850769, "learning_rate": 5.8702684541495125e-05, "loss": 0.9937, "step": 13693 }, { "epoch": 2.5448801338041256, "grad_norm": 0.7261242270469666, "learning_rate": 5.8689379624450914e-05, "loss": 0.7272, "step": 13694 }, { "epoch": 2.545065972867497, "grad_norm": 0.8362358808517456, "learning_rate": 5.867607558911103e-05, "loss": 0.949, "step": 13695 }, { "epoch": 2.545251811930868, "grad_norm": 0.9631180763244629, "learning_rate": 5.866277243575937e-05, "loss": 0.9955, "step": 13696 }, { "epoch": 2.545437650994239, "grad_norm": 0.8250423073768616, "learning_rate": 5.864947016467992e-05, "loss": 0.8525, "step": 13697 }, { "epoch": 2.54562349005761, "grad_norm": 0.706535279750824, "learning_rate": 5.863616877615656e-05, "loss": 0.7951, "step": 13698 }, { "epoch": 2.5458093291209813, "grad_norm": 0.8374561667442322, "learning_rate": 5.862286827047319e-05, "loss": 0.8642, "step": 13699 }, { "epoch": 2.5459951681843522, "grad_norm": 0.800125241279602, "learning_rate": 5.860956864791369e-05, "loss": 0.883, "step": 13700 }, { "epoch": 2.5461810072477236, "grad_norm": 0.8778433203697205, "learning_rate": 5.859626990876191e-05, "loss": 1.1478, "step": 13701 }, { "epoch": 2.5463668463110944, "grad_norm": 1.111130952835083, "learning_rate": 5.858297205330172e-05, "loss": 0.9495, "step": 13702 }, { "epoch": 2.5465526853744658, "grad_norm": 0.8640955090522766, "learning_rate": 5.85696750818169e-05, "loss": 0.8719, "step": 13703 }, { "epoch": 2.5467385244378367, "grad_norm": 0.7243440747261047, "learning_rate": 5.8556378994591244e-05, "loss": 0.7848, "step": 13704 }, { "epoch": 2.546924363501208, "grad_norm": 0.844635009765625, "learning_rate": 5.8543083791908604e-05, "loss": 0.8264, "step": 13705 }, { "epoch": 2.5471102025645793, "grad_norm": 0.804513156414032, "learning_rate": 5.852978947405267e-05, "loss": 1.0957, "step": 13706 }, { "epoch": 2.54729604162795, "grad_norm": 0.818187415599823, "learning_rate": 5.851649604130723e-05, "loss": 0.7591, "step": 13707 }, { "epoch": 2.547481880691321, "grad_norm": 0.7252411842346191, "learning_rate": 5.8503203493955985e-05, "loss": 0.9239, "step": 13708 }, { "epoch": 2.5476677197546924, "grad_norm": 0.8514246344566345, "learning_rate": 5.848991183228264e-05, "loss": 0.961, "step": 13709 }, { "epoch": 2.5478535588180637, "grad_norm": 0.8979095816612244, "learning_rate": 5.8476621056570924e-05, "loss": 1.0529, "step": 13710 }, { "epoch": 2.5480393978814346, "grad_norm": 0.7347493767738342, "learning_rate": 5.8463331167104453e-05, "loss": 0.7018, "step": 13711 }, { "epoch": 2.548225236944806, "grad_norm": 0.9283837676048279, "learning_rate": 5.84500421641669e-05, "loss": 1.0879, "step": 13712 }, { "epoch": 2.548411076008177, "grad_norm": 0.9232157468795776, "learning_rate": 5.8436754048041896e-05, "loss": 0.9902, "step": 13713 }, { "epoch": 2.548596915071548, "grad_norm": 0.824123740196228, "learning_rate": 5.842346681901302e-05, "loss": 0.9658, "step": 13714 }, { "epoch": 2.548782754134919, "grad_norm": 0.8550252914428711, "learning_rate": 5.841018047736394e-05, "loss": 0.7703, "step": 13715 }, { "epoch": 2.5489685931982904, "grad_norm": 0.7530178427696228, "learning_rate": 5.839689502337817e-05, "loss": 0.8716, "step": 13716 }, { "epoch": 2.5491544322616613, "grad_norm": 0.9273643493652344, "learning_rate": 5.838361045733927e-05, "loss": 1.0495, "step": 13717 }, { "epoch": 2.5493402713250326, "grad_norm": 0.9969702363014221, "learning_rate": 5.837032677953083e-05, "loss": 0.9738, "step": 13718 }, { "epoch": 2.5495261103884035, "grad_norm": 0.8267450928688049, "learning_rate": 5.83570439902363e-05, "loss": 0.8757, "step": 13719 }, { "epoch": 2.549711949451775, "grad_norm": 0.7460461854934692, "learning_rate": 5.8343762089739175e-05, "loss": 1.0294, "step": 13720 }, { "epoch": 2.549897788515146, "grad_norm": 0.7614554762840271, "learning_rate": 5.833048107832301e-05, "loss": 0.8374, "step": 13721 }, { "epoch": 2.550083627578517, "grad_norm": 0.7778684496879578, "learning_rate": 5.831720095627121e-05, "loss": 1.0021, "step": 13722 }, { "epoch": 2.550269466641888, "grad_norm": 0.8780698776245117, "learning_rate": 5.830392172386723e-05, "loss": 1.0871, "step": 13723 }, { "epoch": 2.5504553057052592, "grad_norm": 0.7678472399711609, "learning_rate": 5.829064338139445e-05, "loss": 0.8444, "step": 13724 }, { "epoch": 2.5506411447686306, "grad_norm": 0.805841863155365, "learning_rate": 5.827736592913631e-05, "loss": 0.6904, "step": 13725 }, { "epoch": 2.5508269838320015, "grad_norm": 0.8294493556022644, "learning_rate": 5.826408936737622e-05, "loss": 0.9244, "step": 13726 }, { "epoch": 2.5510128228953723, "grad_norm": 0.7568511962890625, "learning_rate": 5.825081369639753e-05, "loss": 0.7845, "step": 13727 }, { "epoch": 2.5511986619587437, "grad_norm": 0.8128231763839722, "learning_rate": 5.823753891648353e-05, "loss": 0.9111, "step": 13728 }, { "epoch": 2.551384501022115, "grad_norm": 0.8409896492958069, "learning_rate": 5.822426502791766e-05, "loss": 0.7971, "step": 13729 }, { "epoch": 2.551570340085486, "grad_norm": 0.8081522583961487, "learning_rate": 5.8210992030983144e-05, "loss": 0.8657, "step": 13730 }, { "epoch": 2.551756179148857, "grad_norm": 0.65762859582901, "learning_rate": 5.81977199259633e-05, "loss": 0.4465, "step": 13731 }, { "epoch": 2.551942018212228, "grad_norm": 1.1293163299560547, "learning_rate": 5.8184448713141334e-05, "loss": 1.0436, "step": 13732 }, { "epoch": 2.5521278572755994, "grad_norm": 1.0125041007995605, "learning_rate": 5.8171178392800595e-05, "loss": 1.0815, "step": 13733 }, { "epoch": 2.5523136963389703, "grad_norm": 0.8967159986495972, "learning_rate": 5.815790896522428e-05, "loss": 1.1234, "step": 13734 }, { "epoch": 2.5524995354023416, "grad_norm": 0.7436438202857971, "learning_rate": 5.814464043069556e-05, "loss": 0.7549, "step": 13735 }, { "epoch": 2.5526853744657125, "grad_norm": 0.8675150275230408, "learning_rate": 5.813137278949769e-05, "loss": 0.8948, "step": 13736 }, { "epoch": 2.552871213529084, "grad_norm": 0.7287111282348633, "learning_rate": 5.811810604191379e-05, "loss": 0.6142, "step": 13737 }, { "epoch": 2.5530570525924547, "grad_norm": 0.7745053172111511, "learning_rate": 5.8104840188227075e-05, "loss": 0.8256, "step": 13738 }, { "epoch": 2.553242891655826, "grad_norm": 0.8518672585487366, "learning_rate": 5.809157522872065e-05, "loss": 0.952, "step": 13739 }, { "epoch": 2.5534287307191974, "grad_norm": 0.7259352207183838, "learning_rate": 5.807831116367759e-05, "loss": 0.871, "step": 13740 }, { "epoch": 2.5536145697825683, "grad_norm": 0.8650814294815063, "learning_rate": 5.806504799338109e-05, "loss": 1.0228, "step": 13741 }, { "epoch": 2.553800408845939, "grad_norm": 0.8809137940406799, "learning_rate": 5.8051785718114185e-05, "loss": 0.8819, "step": 13742 }, { "epoch": 2.5539862479093105, "grad_norm": 0.7917113304138184, "learning_rate": 5.80385243381599e-05, "loss": 0.8703, "step": 13743 }, { "epoch": 2.554172086972682, "grad_norm": 0.9165493845939636, "learning_rate": 5.802526385380128e-05, "loss": 1.0186, "step": 13744 }, { "epoch": 2.5543579260360527, "grad_norm": 0.8668243288993835, "learning_rate": 5.8012004265321406e-05, "loss": 0.9955, "step": 13745 }, { "epoch": 2.554543765099424, "grad_norm": 1.047468900680542, "learning_rate": 5.799874557300322e-05, "loss": 1.0813, "step": 13746 }, { "epoch": 2.554729604162795, "grad_norm": 0.7318970561027527, "learning_rate": 5.7985487777129765e-05, "loss": 1.0389, "step": 13747 }, { "epoch": 2.5549154432261663, "grad_norm": 0.8629610538482666, "learning_rate": 5.7972230877983933e-05, "loss": 0.6949, "step": 13748 }, { "epoch": 2.555101282289537, "grad_norm": 0.7724053263664246, "learning_rate": 5.795897487584875e-05, "loss": 0.9542, "step": 13749 }, { "epoch": 2.5552871213529085, "grad_norm": 0.8183825016021729, "learning_rate": 5.79457197710071e-05, "loss": 0.884, "step": 13750 }, { "epoch": 2.5554729604162794, "grad_norm": 1.026491403579712, "learning_rate": 5.79324655637419e-05, "loss": 1.1477, "step": 13751 }, { "epoch": 2.5556587994796507, "grad_norm": 0.7449808120727539, "learning_rate": 5.7919212254335987e-05, "loss": 0.9825, "step": 13752 }, { "epoch": 2.5558446385430216, "grad_norm": 0.8822129368782043, "learning_rate": 5.790595984307232e-05, "loss": 0.9226, "step": 13753 }, { "epoch": 2.556030477606393, "grad_norm": 0.8080644011497498, "learning_rate": 5.78927083302337e-05, "loss": 0.7673, "step": 13754 }, { "epoch": 2.5562163166697642, "grad_norm": 0.775949239730835, "learning_rate": 5.787945771610296e-05, "loss": 1.1468, "step": 13755 }, { "epoch": 2.556402155733135, "grad_norm": 0.9412445425987244, "learning_rate": 5.7866208000962873e-05, "loss": 0.9846, "step": 13756 }, { "epoch": 2.556587994796506, "grad_norm": 0.8279641270637512, "learning_rate": 5.785295918509628e-05, "loss": 1.0843, "step": 13757 }, { "epoch": 2.5567738338598773, "grad_norm": 0.7588843107223511, "learning_rate": 5.783971126878598e-05, "loss": 0.9479, "step": 13758 }, { "epoch": 2.5569596729232487, "grad_norm": 1.0775775909423828, "learning_rate": 5.7826464252314683e-05, "loss": 1.0048, "step": 13759 }, { "epoch": 2.5571455119866195, "grad_norm": 0.7340498566627502, "learning_rate": 5.7813218135965105e-05, "loss": 0.8742, "step": 13760 }, { "epoch": 2.5573313510499904, "grad_norm": 0.872485339641571, "learning_rate": 5.779997292002004e-05, "loss": 0.9653, "step": 13761 }, { "epoch": 2.5575171901133618, "grad_norm": 0.7634876370429993, "learning_rate": 5.778672860476212e-05, "loss": 0.7495, "step": 13762 }, { "epoch": 2.557703029176733, "grad_norm": 0.8036033511161804, "learning_rate": 5.7773485190474044e-05, "loss": 0.8182, "step": 13763 }, { "epoch": 2.557888868240104, "grad_norm": 0.9266873598098755, "learning_rate": 5.7760242677438416e-05, "loss": 1.1838, "step": 13764 }, { "epoch": 2.5580747073034753, "grad_norm": 0.8402783870697021, "learning_rate": 5.774700106593797e-05, "loss": 0.8673, "step": 13765 }, { "epoch": 2.558260546366846, "grad_norm": 0.7656075358390808, "learning_rate": 5.7733760356255264e-05, "loss": 1.1087, "step": 13766 }, { "epoch": 2.5584463854302175, "grad_norm": 0.8482323288917542, "learning_rate": 5.772052054867289e-05, "loss": 1.0285, "step": 13767 }, { "epoch": 2.5586322244935884, "grad_norm": 0.8206349611282349, "learning_rate": 5.770728164347343e-05, "loss": 0.899, "step": 13768 }, { "epoch": 2.5588180635569597, "grad_norm": 0.8543358445167542, "learning_rate": 5.769404364093952e-05, "loss": 0.8919, "step": 13769 }, { "epoch": 2.559003902620331, "grad_norm": 1.1699315309524536, "learning_rate": 5.768080654135365e-05, "loss": 0.7744, "step": 13770 }, { "epoch": 2.559189741683702, "grad_norm": 0.8294997215270996, "learning_rate": 5.7667570344998343e-05, "loss": 0.9095, "step": 13771 }, { "epoch": 2.559375580747073, "grad_norm": 0.8407626748085022, "learning_rate": 5.765433505215605e-05, "loss": 0.9098, "step": 13772 }, { "epoch": 2.559561419810444, "grad_norm": 0.927783191204071, "learning_rate": 5.764110066310935e-05, "loss": 0.7629, "step": 13773 }, { "epoch": 2.5597472588738155, "grad_norm": 0.8324963450431824, "learning_rate": 5.762786717814067e-05, "loss": 0.8243, "step": 13774 }, { "epoch": 2.5599330979371864, "grad_norm": 0.8019198775291443, "learning_rate": 5.761463459753245e-05, "loss": 0.809, "step": 13775 }, { "epoch": 2.5601189370005573, "grad_norm": 0.9161101579666138, "learning_rate": 5.760140292156708e-05, "loss": 0.8882, "step": 13776 }, { "epoch": 2.5603047760639286, "grad_norm": 1.7970740795135498, "learning_rate": 5.7588172150527006e-05, "loss": 1.314, "step": 13777 }, { "epoch": 2.5604906151273, "grad_norm": 1.0165661573410034, "learning_rate": 5.7574942284694656e-05, "loss": 0.9316, "step": 13778 }, { "epoch": 2.560676454190671, "grad_norm": 0.8424798250198364, "learning_rate": 5.7561713324352365e-05, "loss": 1.0966, "step": 13779 }, { "epoch": 2.560862293254042, "grad_norm": 0.7895606756210327, "learning_rate": 5.754848526978242e-05, "loss": 0.7786, "step": 13780 }, { "epoch": 2.561048132317413, "grad_norm": 0.6329425573348999, "learning_rate": 5.753525812126726e-05, "loss": 0.6509, "step": 13781 }, { "epoch": 2.5612339713807843, "grad_norm": 0.8896647691726685, "learning_rate": 5.7522031879089133e-05, "loss": 1.1891, "step": 13782 }, { "epoch": 2.5614198104441552, "grad_norm": 0.838678240776062, "learning_rate": 5.7508806543530346e-05, "loss": 1.099, "step": 13783 }, { "epoch": 2.5616056495075266, "grad_norm": 0.7234699130058289, "learning_rate": 5.7495582114873114e-05, "loss": 0.8925, "step": 13784 }, { "epoch": 2.5617914885708974, "grad_norm": 0.9089459180831909, "learning_rate": 5.7482358593399796e-05, "loss": 0.8512, "step": 13785 }, { "epoch": 2.5619773276342688, "grad_norm": 0.8577240705490112, "learning_rate": 5.7469135979392576e-05, "loss": 0.8033, "step": 13786 }, { "epoch": 2.5621631666976397, "grad_norm": 0.7673660516738892, "learning_rate": 5.7455914273133646e-05, "loss": 1.0081, "step": 13787 }, { "epoch": 2.562349005761011, "grad_norm": 0.7493413686752319, "learning_rate": 5.744269347490514e-05, "loss": 0.7829, "step": 13788 }, { "epoch": 2.5625348448243823, "grad_norm": 0.8880299925804138, "learning_rate": 5.74294735849894e-05, "loss": 0.6736, "step": 13789 }, { "epoch": 2.562720683887753, "grad_norm": 0.787715494632721, "learning_rate": 5.7416254603668484e-05, "loss": 1.0808, "step": 13790 }, { "epoch": 2.562906522951124, "grad_norm": 0.8745567798614502, "learning_rate": 5.7403036531224544e-05, "loss": 1.0884, "step": 13791 }, { "epoch": 2.5630923620144954, "grad_norm": 0.8465336561203003, "learning_rate": 5.7389819367939656e-05, "loss": 0.7757, "step": 13792 }, { "epoch": 2.5632782010778667, "grad_norm": 0.9584832787513733, "learning_rate": 5.7376603114095986e-05, "loss": 1.0241, "step": 13793 }, { "epoch": 2.5634640401412376, "grad_norm": 0.758416473865509, "learning_rate": 5.736338776997556e-05, "loss": 0.972, "step": 13794 }, { "epoch": 2.563649879204609, "grad_norm": 1.0920721292495728, "learning_rate": 5.735017333586048e-05, "loss": 0.9077, "step": 13795 }, { "epoch": 2.56383571826798, "grad_norm": 0.7854132652282715, "learning_rate": 5.733695981203271e-05, "loss": 0.7501, "step": 13796 }, { "epoch": 2.564021557331351, "grad_norm": 0.7899761199951172, "learning_rate": 5.732374719877436e-05, "loss": 0.9115, "step": 13797 }, { "epoch": 2.564207396394722, "grad_norm": 0.7128276824951172, "learning_rate": 5.731053549636733e-05, "loss": 0.7982, "step": 13798 }, { "epoch": 2.5643932354580934, "grad_norm": 0.9120762944221497, "learning_rate": 5.729732470509372e-05, "loss": 0.9454, "step": 13799 }, { "epoch": 2.5645790745214643, "grad_norm": 0.9080113768577576, "learning_rate": 5.7284114825235416e-05, "loss": 0.9263, "step": 13800 }, { "epoch": 2.5647649135848356, "grad_norm": 0.7981117963790894, "learning_rate": 5.727090585707439e-05, "loss": 0.7399, "step": 13801 }, { "epoch": 2.5649507526482065, "grad_norm": 1.0038573741912842, "learning_rate": 5.725769780089256e-05, "loss": 1.034, "step": 13802 }, { "epoch": 2.565136591711578, "grad_norm": 1.3188296556472778, "learning_rate": 5.7244490656971815e-05, "loss": 1.2923, "step": 13803 }, { "epoch": 2.565322430774949, "grad_norm": 0.8303518891334534, "learning_rate": 5.723128442559401e-05, "loss": 0.7213, "step": 13804 }, { "epoch": 2.56550826983832, "grad_norm": 0.918181300163269, "learning_rate": 5.721807910704109e-05, "loss": 0.9924, "step": 13805 }, { "epoch": 2.565694108901691, "grad_norm": 0.896000325679779, "learning_rate": 5.720487470159484e-05, "loss": 0.9482, "step": 13806 }, { "epoch": 2.5658799479650622, "grad_norm": 0.8845444321632385, "learning_rate": 5.7191671209537124e-05, "loss": 1.0871, "step": 13807 }, { "epoch": 2.5660657870284336, "grad_norm": 0.8426279425621033, "learning_rate": 5.717846863114966e-05, "loss": 1.1556, "step": 13808 }, { "epoch": 2.5662516260918045, "grad_norm": 0.9557228088378906, "learning_rate": 5.716526696671431e-05, "loss": 1.0941, "step": 13809 }, { "epoch": 2.5664374651551753, "grad_norm": 0.5845919251441956, "learning_rate": 5.715206621651288e-05, "loss": 0.4136, "step": 13810 }, { "epoch": 2.5666233042185467, "grad_norm": 0.8088479042053223, "learning_rate": 5.713886638082705e-05, "loss": 0.9118, "step": 13811 }, { "epoch": 2.566809143281918, "grad_norm": 0.937294602394104, "learning_rate": 5.712566745993853e-05, "loss": 0.9451, "step": 13812 }, { "epoch": 2.566994982345289, "grad_norm": 1.0141209363937378, "learning_rate": 5.711246945412909e-05, "loss": 1.0489, "step": 13813 }, { "epoch": 2.56718082140866, "grad_norm": 0.970757246017456, "learning_rate": 5.7099272363680414e-05, "loss": 0.9403, "step": 13814 }, { "epoch": 2.567366660472031, "grad_norm": 0.8191547989845276, "learning_rate": 5.708607618887414e-05, "loss": 0.7734, "step": 13815 }, { "epoch": 2.5675524995354024, "grad_norm": 1.0061275959014893, "learning_rate": 5.707288092999188e-05, "loss": 0.9257, "step": 13816 }, { "epoch": 2.5677383385987733, "grad_norm": 0.7247336506843567, "learning_rate": 5.705968658731535e-05, "loss": 0.828, "step": 13817 }, { "epoch": 2.5679241776621446, "grad_norm": 0.8073325157165527, "learning_rate": 5.7046493161126135e-05, "loss": 0.9172, "step": 13818 }, { "epoch": 2.568110016725516, "grad_norm": 0.9320276975631714, "learning_rate": 5.703330065170577e-05, "loss": 1.0238, "step": 13819 }, { "epoch": 2.568295855788887, "grad_norm": 0.7512152791023254, "learning_rate": 5.7020109059335855e-05, "loss": 0.8337, "step": 13820 }, { "epoch": 2.5684816948522577, "grad_norm": 0.7233678102493286, "learning_rate": 5.7006918384298015e-05, "loss": 1.1078, "step": 13821 }, { "epoch": 2.568667533915629, "grad_norm": 0.8729320764541626, "learning_rate": 5.699372862687371e-05, "loss": 1.0297, "step": 13822 }, { "epoch": 2.5688533729790004, "grad_norm": 0.7991220355033875, "learning_rate": 5.6980539787344454e-05, "loss": 0.7869, "step": 13823 }, { "epoch": 2.5690392120423713, "grad_norm": 0.8474665880203247, "learning_rate": 5.696735186599174e-05, "loss": 1.0024, "step": 13824 }, { "epoch": 2.569225051105742, "grad_norm": 0.7745363712310791, "learning_rate": 5.695416486309707e-05, "loss": 0.8602, "step": 13825 }, { "epoch": 2.5694108901691135, "grad_norm": 0.7119348049163818, "learning_rate": 5.694097877894189e-05, "loss": 1.0427, "step": 13826 }, { "epoch": 2.569596729232485, "grad_norm": 0.8804404735565186, "learning_rate": 5.6927793613807624e-05, "loss": 0.9245, "step": 13827 }, { "epoch": 2.5697825682958557, "grad_norm": 0.7989339232444763, "learning_rate": 5.691460936797566e-05, "loss": 0.8493, "step": 13828 }, { "epoch": 2.569968407359227, "grad_norm": 0.8049229979515076, "learning_rate": 5.6901426041727447e-05, "loss": 0.989, "step": 13829 }, { "epoch": 2.570154246422598, "grad_norm": 0.7842298746109009, "learning_rate": 5.6888243635344295e-05, "loss": 0.8367, "step": 13830 }, { "epoch": 2.5703400854859693, "grad_norm": 0.72968590259552, "learning_rate": 5.687506214910765e-05, "loss": 0.9222, "step": 13831 }, { "epoch": 2.57052592454934, "grad_norm": 1.1353753805160522, "learning_rate": 5.686188158329875e-05, "loss": 0.9685, "step": 13832 }, { "epoch": 2.5707117636127115, "grad_norm": 0.8593534827232361, "learning_rate": 5.6848701938199e-05, "loss": 0.9871, "step": 13833 }, { "epoch": 2.5708976026760824, "grad_norm": 0.7766969203948975, "learning_rate": 5.683552321408967e-05, "loss": 0.9892, "step": 13834 }, { "epoch": 2.5710834417394537, "grad_norm": 0.7157514095306396, "learning_rate": 5.6822345411251995e-05, "loss": 0.8832, "step": 13835 }, { "epoch": 2.5712692808028246, "grad_norm": 0.8342850804328918, "learning_rate": 5.680916852996725e-05, "loss": 1.031, "step": 13836 }, { "epoch": 2.571455119866196, "grad_norm": 0.8797349333763123, "learning_rate": 5.679599257051671e-05, "loss": 0.9466, "step": 13837 }, { "epoch": 2.5716409589295672, "grad_norm": 0.7610642910003662, "learning_rate": 5.678281753318157e-05, "loss": 0.8896, "step": 13838 }, { "epoch": 2.571826797992938, "grad_norm": 1.056077241897583, "learning_rate": 5.676964341824299e-05, "loss": 0.8226, "step": 13839 }, { "epoch": 2.572012637056309, "grad_norm": 0.898135244846344, "learning_rate": 5.6756470225982186e-05, "loss": 0.7951, "step": 13840 }, { "epoch": 2.5721984761196803, "grad_norm": 0.8857336640357971, "learning_rate": 5.674329795668034e-05, "loss": 0.8316, "step": 13841 }, { "epoch": 2.5723843151830517, "grad_norm": 0.7753831744194031, "learning_rate": 5.673012661061858e-05, "loss": 0.9243, "step": 13842 }, { "epoch": 2.5725701542464225, "grad_norm": 0.8753922581672668, "learning_rate": 5.671695618807802e-05, "loss": 0.7834, "step": 13843 }, { "epoch": 2.572755993309794, "grad_norm": 0.8811227679252625, "learning_rate": 5.670378668933971e-05, "loss": 0.8982, "step": 13844 }, { "epoch": 2.5729418323731648, "grad_norm": 0.912420928478241, "learning_rate": 5.669061811468481e-05, "loss": 1.0495, "step": 13845 }, { "epoch": 2.573127671436536, "grad_norm": 0.7488943338394165, "learning_rate": 5.667745046439434e-05, "loss": 0.9085, "step": 13846 }, { "epoch": 2.573313510499907, "grad_norm": 0.8022164702415466, "learning_rate": 5.666428373874936e-05, "loss": 0.8339, "step": 13847 }, { "epoch": 2.5734993495632783, "grad_norm": 0.8374843001365662, "learning_rate": 5.665111793803082e-05, "loss": 0.9486, "step": 13848 }, { "epoch": 2.573685188626649, "grad_norm": 0.6516796350479126, "learning_rate": 5.663795306251982e-05, "loss": 0.6117, "step": 13849 }, { "epoch": 2.5738710276900205, "grad_norm": 0.8019716739654541, "learning_rate": 5.662478911249731e-05, "loss": 0.7115, "step": 13850 }, { "epoch": 2.5740568667533914, "grad_norm": 0.9151284098625183, "learning_rate": 5.6611626088244194e-05, "loss": 1.0861, "step": 13851 }, { "epoch": 2.5742427058167627, "grad_norm": 0.8689349889755249, "learning_rate": 5.659846399004147e-05, "loss": 0.9528, "step": 13852 }, { "epoch": 2.574428544880134, "grad_norm": 0.7861039042472839, "learning_rate": 5.658530281817008e-05, "loss": 0.8674, "step": 13853 }, { "epoch": 2.574614383943505, "grad_norm": 0.7574406266212463, "learning_rate": 5.65721425729109e-05, "loss": 0.9153, "step": 13854 }, { "epoch": 2.574800223006876, "grad_norm": 0.7896664142608643, "learning_rate": 5.655898325454483e-05, "loss": 0.8182, "step": 13855 }, { "epoch": 2.574986062070247, "grad_norm": 0.8285431265830994, "learning_rate": 5.6545824863352645e-05, "loss": 1.0559, "step": 13856 }, { "epoch": 2.5751719011336185, "grad_norm": 0.7879959940910339, "learning_rate": 5.653266739961532e-05, "loss": 0.7827, "step": 13857 }, { "epoch": 2.5753577401969894, "grad_norm": 0.933560311794281, "learning_rate": 5.651951086361361e-05, "loss": 1.1124, "step": 13858 }, { "epoch": 2.5755435792603603, "grad_norm": 1.057924747467041, "learning_rate": 5.6506355255628284e-05, "loss": 1.003, "step": 13859 }, { "epoch": 2.5757294183237316, "grad_norm": 0.8306922912597656, "learning_rate": 5.649320057594021e-05, "loss": 0.8607, "step": 13860 }, { "epoch": 2.575915257387103, "grad_norm": 0.7214329838752747, "learning_rate": 5.648004682483008e-05, "loss": 0.5074, "step": 13861 }, { "epoch": 2.576101096450474, "grad_norm": 0.9435710310935974, "learning_rate": 5.646689400257868e-05, "loss": 1.0446, "step": 13862 }, { "epoch": 2.576286935513845, "grad_norm": 0.7284060716629028, "learning_rate": 5.645374210946674e-05, "loss": 0.7227, "step": 13863 }, { "epoch": 2.576472774577216, "grad_norm": 0.8803086280822754, "learning_rate": 5.644059114577491e-05, "loss": 0.9937, "step": 13864 }, { "epoch": 2.5766586136405873, "grad_norm": 0.7651980519294739, "learning_rate": 5.642744111178394e-05, "loss": 0.7798, "step": 13865 }, { "epoch": 2.5768444527039582, "grad_norm": 0.836889922618866, "learning_rate": 5.641429200777446e-05, "loss": 0.8243, "step": 13866 }, { "epoch": 2.5770302917673296, "grad_norm": 0.8004657626152039, "learning_rate": 5.640114383402714e-05, "loss": 1.093, "step": 13867 }, { "epoch": 2.5772161308307004, "grad_norm": 1.3861210346221924, "learning_rate": 5.6387996590822535e-05, "loss": 1.1794, "step": 13868 }, { "epoch": 2.5774019698940718, "grad_norm": 0.8555946946144104, "learning_rate": 5.637485027844134e-05, "loss": 0.8745, "step": 13869 }, { "epoch": 2.5775878089574427, "grad_norm": 0.8644457459449768, "learning_rate": 5.63617048971641e-05, "loss": 1.1022, "step": 13870 }, { "epoch": 2.577773648020814, "grad_norm": 0.8424917459487915, "learning_rate": 5.634856044727136e-05, "loss": 0.9504, "step": 13871 }, { "epoch": 2.5779594870841853, "grad_norm": 0.8418530821800232, "learning_rate": 5.633541692904367e-05, "loss": 0.9645, "step": 13872 }, { "epoch": 2.578145326147556, "grad_norm": 0.7905413508415222, "learning_rate": 5.632227434276163e-05, "loss": 0.9744, "step": 13873 }, { "epoch": 2.578331165210927, "grad_norm": 0.9202725887298584, "learning_rate": 5.630913268870568e-05, "loss": 0.9128, "step": 13874 }, { "epoch": 2.5785170042742984, "grad_norm": 0.6845600605010986, "learning_rate": 5.6295991967156314e-05, "loss": 0.9016, "step": 13875 }, { "epoch": 2.5787028433376697, "grad_norm": 1.0831317901611328, "learning_rate": 5.6282852178393966e-05, "loss": 1.083, "step": 13876 }, { "epoch": 2.5788886824010406, "grad_norm": 0.9119095802307129, "learning_rate": 5.6269713322699146e-05, "loss": 0.9246, "step": 13877 }, { "epoch": 2.579074521464412, "grad_norm": 0.8733465671539307, "learning_rate": 5.625657540035226e-05, "loss": 0.9341, "step": 13878 }, { "epoch": 2.579260360527783, "grad_norm": 0.7767495512962341, "learning_rate": 5.624343841163367e-05, "loss": 0.8646, "step": 13879 }, { "epoch": 2.579446199591154, "grad_norm": 1.0635108947753906, "learning_rate": 5.6230302356823825e-05, "loss": 0.8414, "step": 13880 }, { "epoch": 2.579632038654525, "grad_norm": 0.7466744184494019, "learning_rate": 5.621716723620307e-05, "loss": 0.9437, "step": 13881 }, { "epoch": 2.5798178777178964, "grad_norm": 1.0021867752075195, "learning_rate": 5.620403305005172e-05, "loss": 0.8329, "step": 13882 }, { "epoch": 2.5800037167812673, "grad_norm": 0.9675542116165161, "learning_rate": 5.6190899798650156e-05, "loss": 0.7331, "step": 13883 }, { "epoch": 2.5801895558446386, "grad_norm": 0.7637116312980652, "learning_rate": 5.617776748227861e-05, "loss": 0.8081, "step": 13884 }, { "epoch": 2.5803753949080095, "grad_norm": 0.7900305390357971, "learning_rate": 5.616463610121746e-05, "loss": 0.8334, "step": 13885 }, { "epoch": 2.580561233971381, "grad_norm": 0.7600497603416443, "learning_rate": 5.615150565574694e-05, "loss": 0.9039, "step": 13886 }, { "epoch": 2.580747073034752, "grad_norm": 0.8872308135032654, "learning_rate": 5.613837614614727e-05, "loss": 0.7615, "step": 13887 }, { "epoch": 2.580932912098123, "grad_norm": 0.7821314334869385, "learning_rate": 5.612524757269866e-05, "loss": 0.8217, "step": 13888 }, { "epoch": 2.581118751161494, "grad_norm": 0.8883917331695557, "learning_rate": 5.6112119935681375e-05, "loss": 0.915, "step": 13889 }, { "epoch": 2.5813045902248652, "grad_norm": 0.7755342721939087, "learning_rate": 5.6098993235375576e-05, "loss": 1.0398, "step": 13890 }, { "epoch": 2.5814904292882366, "grad_norm": 0.9562380909919739, "learning_rate": 5.60858674720614e-05, "loss": 0.9848, "step": 13891 }, { "epoch": 2.5816762683516075, "grad_norm": 0.9915200471878052, "learning_rate": 5.607274264601907e-05, "loss": 1.1596, "step": 13892 }, { "epoch": 2.581862107414979, "grad_norm": 0.8348504900932312, "learning_rate": 5.605961875752861e-05, "loss": 1.2279, "step": 13893 }, { "epoch": 2.5820479464783497, "grad_norm": 0.7134225368499756, "learning_rate": 5.604649580687022e-05, "loss": 0.8465, "step": 13894 }, { "epoch": 2.582233785541721, "grad_norm": 0.6985647082328796, "learning_rate": 5.603337379432396e-05, "loss": 0.7941, "step": 13895 }, { "epoch": 2.582419624605092, "grad_norm": 0.7442715167999268, "learning_rate": 5.602025272016985e-05, "loss": 0.908, "step": 13896 }, { "epoch": 2.582605463668463, "grad_norm": 0.7957456111907959, "learning_rate": 5.6007132584688014e-05, "loss": 0.8506, "step": 13897 }, { "epoch": 2.582791302731834, "grad_norm": 0.8388959169387817, "learning_rate": 5.599401338815844e-05, "loss": 0.7865, "step": 13898 }, { "epoch": 2.5829771417952054, "grad_norm": 1.0251314640045166, "learning_rate": 5.598089513086108e-05, "loss": 1.122, "step": 13899 }, { "epoch": 2.5831629808585763, "grad_norm": 0.863593339920044, "learning_rate": 5.596777781307602e-05, "loss": 0.7528, "step": 13900 }, { "epoch": 2.5833488199219476, "grad_norm": 0.8443089723587036, "learning_rate": 5.595466143508318e-05, "loss": 0.8237, "step": 13901 }, { "epoch": 2.583534658985319, "grad_norm": 0.8671047687530518, "learning_rate": 5.5941545997162506e-05, "loss": 0.988, "step": 13902 }, { "epoch": 2.58372049804869, "grad_norm": 0.8505552411079407, "learning_rate": 5.592843149959388e-05, "loss": 0.7937, "step": 13903 }, { "epoch": 2.5839063371120607, "grad_norm": 0.7884801626205444, "learning_rate": 5.5915317942657264e-05, "loss": 0.942, "step": 13904 }, { "epoch": 2.584092176175432, "grad_norm": 0.7203455567359924, "learning_rate": 5.5902205326632576e-05, "loss": 0.8007, "step": 13905 }, { "epoch": 2.5842780152388034, "grad_norm": 0.9898548126220703, "learning_rate": 5.5889093651799626e-05, "loss": 0.9014, "step": 13906 }, { "epoch": 2.5844638543021743, "grad_norm": 1.3909528255462646, "learning_rate": 5.587598291843829e-05, "loss": 1.3544, "step": 13907 }, { "epoch": 2.584649693365545, "grad_norm": 0.8734009861946106, "learning_rate": 5.586287312682833e-05, "loss": 0.8794, "step": 13908 }, { "epoch": 2.5848355324289165, "grad_norm": 0.6974748373031616, "learning_rate": 5.584976427724964e-05, "loss": 0.7835, "step": 13909 }, { "epoch": 2.585021371492288, "grad_norm": 0.9306507706642151, "learning_rate": 5.5836656369981965e-05, "loss": 0.8179, "step": 13910 }, { "epoch": 2.5852072105556587, "grad_norm": 0.7667589783668518, "learning_rate": 5.582354940530504e-05, "loss": 0.9037, "step": 13911 }, { "epoch": 2.58539304961903, "grad_norm": 0.7419784069061279, "learning_rate": 5.581044338349868e-05, "loss": 0.8633, "step": 13912 }, { "epoch": 2.585578888682401, "grad_norm": 0.8643168807029724, "learning_rate": 5.579733830484257e-05, "loss": 1.0308, "step": 13913 }, { "epoch": 2.5857647277457723, "grad_norm": 0.7932475805282593, "learning_rate": 5.578423416961639e-05, "loss": 1.003, "step": 13914 }, { "epoch": 2.585950566809143, "grad_norm": 0.8723037838935852, "learning_rate": 5.577113097809989e-05, "loss": 1.0774, "step": 13915 }, { "epoch": 2.5861364058725145, "grad_norm": 0.81297367811203, "learning_rate": 5.575802873057265e-05, "loss": 0.993, "step": 13916 }, { "epoch": 2.5863222449358854, "grad_norm": 0.7683242559432983, "learning_rate": 5.574492742731441e-05, "loss": 0.8725, "step": 13917 }, { "epoch": 2.5865080839992567, "grad_norm": 0.7096510529518127, "learning_rate": 5.573182706860476e-05, "loss": 0.8755, "step": 13918 }, { "epoch": 2.5866939230626276, "grad_norm": 0.8059236407279968, "learning_rate": 5.571872765472324e-05, "loss": 0.9017, "step": 13919 }, { "epoch": 2.586879762125999, "grad_norm": 0.8255543112754822, "learning_rate": 5.5705629185949526e-05, "loss": 0.8532, "step": 13920 }, { "epoch": 2.5870656011893702, "grad_norm": 0.8200182914733887, "learning_rate": 5.5692531662563165e-05, "loss": 0.8727, "step": 13921 }, { "epoch": 2.587251440252741, "grad_norm": 0.9070665240287781, "learning_rate": 5.5679435084843655e-05, "loss": 0.7226, "step": 13922 }, { "epoch": 2.587437279316112, "grad_norm": 0.8500924110412598, "learning_rate": 5.566633945307052e-05, "loss": 0.9021, "step": 13923 }, { "epoch": 2.5876231183794833, "grad_norm": 0.7485762238502502, "learning_rate": 5.565324476752329e-05, "loss": 0.7641, "step": 13924 }, { "epoch": 2.5878089574428547, "grad_norm": 0.9648006558418274, "learning_rate": 5.564015102848149e-05, "loss": 0.8558, "step": 13925 }, { "epoch": 2.5879947965062255, "grad_norm": 0.9895826578140259, "learning_rate": 5.562705823622454e-05, "loss": 0.8674, "step": 13926 }, { "epoch": 2.588180635569597, "grad_norm": 0.773406982421875, "learning_rate": 5.561396639103189e-05, "loss": 0.8504, "step": 13927 }, { "epoch": 2.5883664746329678, "grad_norm": 0.9207121133804321, "learning_rate": 5.560087549318291e-05, "loss": 0.8726, "step": 13928 }, { "epoch": 2.588552313696339, "grad_norm": 0.8121840357780457, "learning_rate": 5.558778554295709e-05, "loss": 0.8818, "step": 13929 }, { "epoch": 2.58873815275971, "grad_norm": 1.0189287662506104, "learning_rate": 5.557469654063377e-05, "loss": 1.0176, "step": 13930 }, { "epoch": 2.5889239918230813, "grad_norm": 0.8649277687072754, "learning_rate": 5.556160848649228e-05, "loss": 1.0651, "step": 13931 }, { "epoch": 2.589109830886452, "grad_norm": 0.9580209851264954, "learning_rate": 5.5548521380812036e-05, "loss": 0.9707, "step": 13932 }, { "epoch": 2.5892956699498235, "grad_norm": 0.760979175567627, "learning_rate": 5.5535435223872325e-05, "loss": 0.9167, "step": 13933 }, { "epoch": 2.5894815090131944, "grad_norm": 0.9788494110107422, "learning_rate": 5.552235001595245e-05, "loss": 0.9616, "step": 13934 }, { "epoch": 2.5896673480765657, "grad_norm": 0.8736420273780823, "learning_rate": 5.550926575733164e-05, "loss": 0.8962, "step": 13935 }, { "epoch": 2.589853187139937, "grad_norm": 0.8266481161117554, "learning_rate": 5.54961824482892e-05, "loss": 0.9473, "step": 13936 }, { "epoch": 2.590039026203308, "grad_norm": 0.9490053057670593, "learning_rate": 5.548310008910442e-05, "loss": 0.7363, "step": 13937 }, { "epoch": 2.590224865266679, "grad_norm": 0.9110895395278931, "learning_rate": 5.547001868005648e-05, "loss": 1.0962, "step": 13938 }, { "epoch": 2.59041070433005, "grad_norm": 0.7902200818061829, "learning_rate": 5.545693822142453e-05, "loss": 0.8136, "step": 13939 }, { "epoch": 2.5905965433934215, "grad_norm": 0.783957302570343, "learning_rate": 5.5443858713487827e-05, "loss": 1.0766, "step": 13940 }, { "epoch": 2.5907823824567924, "grad_norm": 0.8990013003349304, "learning_rate": 5.54307801565255e-05, "loss": 1.0653, "step": 13941 }, { "epoch": 2.5909682215201633, "grad_norm": 0.8461039662361145, "learning_rate": 5.5417702550816684e-05, "loss": 0.9389, "step": 13942 }, { "epoch": 2.5911540605835346, "grad_norm": 0.9367736577987671, "learning_rate": 5.540462589664047e-05, "loss": 0.832, "step": 13943 }, { "epoch": 2.591339899646906, "grad_norm": 0.7142069935798645, "learning_rate": 5.539155019427602e-05, "loss": 0.9313, "step": 13944 }, { "epoch": 2.591525738710277, "grad_norm": 0.814614474773407, "learning_rate": 5.537847544400234e-05, "loss": 1.0447, "step": 13945 }, { "epoch": 2.591711577773648, "grad_norm": 2.7057414054870605, "learning_rate": 5.536540164609858e-05, "loss": 1.3782, "step": 13946 }, { "epoch": 2.591897416837019, "grad_norm": 0.7128685712814331, "learning_rate": 5.5352328800843724e-05, "loss": 0.8777, "step": 13947 }, { "epoch": 2.5920832559003903, "grad_norm": 0.7369137406349182, "learning_rate": 5.533925690851673e-05, "loss": 0.8638, "step": 13948 }, { "epoch": 2.5922690949637612, "grad_norm": 0.902484118938446, "learning_rate": 5.5326185969396714e-05, "loss": 1.0354, "step": 13949 }, { "epoch": 2.5924549340271326, "grad_norm": 0.9577115774154663, "learning_rate": 5.531311598376261e-05, "loss": 0.8224, "step": 13950 }, { "epoch": 2.592640773090504, "grad_norm": 0.8262429237365723, "learning_rate": 5.530004695189329e-05, "loss": 0.9783, "step": 13951 }, { "epoch": 2.5928266121538748, "grad_norm": 0.766438901424408, "learning_rate": 5.528697887406782e-05, "loss": 0.7031, "step": 13952 }, { "epoch": 2.5930124512172457, "grad_norm": 0.6652514934539795, "learning_rate": 5.5273911750565045e-05, "loss": 0.6129, "step": 13953 }, { "epoch": 2.593198290280617, "grad_norm": 0.8506471514701843, "learning_rate": 5.526084558166387e-05, "loss": 0.738, "step": 13954 }, { "epoch": 2.5933841293439883, "grad_norm": 0.9001114964485168, "learning_rate": 5.524778036764313e-05, "loss": 0.7862, "step": 13955 }, { "epoch": 2.593569968407359, "grad_norm": 0.7212256193161011, "learning_rate": 5.523471610878172e-05, "loss": 0.8856, "step": 13956 }, { "epoch": 2.59375580747073, "grad_norm": 0.7791721224784851, "learning_rate": 5.522165280535853e-05, "loss": 0.8765, "step": 13957 }, { "epoch": 2.5939416465341014, "grad_norm": 0.7826201319694519, "learning_rate": 5.52085904576523e-05, "loss": 1.0375, "step": 13958 }, { "epoch": 2.5941274855974727, "grad_norm": 0.8129811882972717, "learning_rate": 5.519552906594181e-05, "loss": 1.0598, "step": 13959 }, { "epoch": 2.5943133246608436, "grad_norm": 0.9655573964118958, "learning_rate": 5.51824686305059e-05, "loss": 1.03, "step": 13960 }, { "epoch": 2.594499163724215, "grad_norm": 0.7806291580200195, "learning_rate": 5.5169409151623294e-05, "loss": 1.0793, "step": 13961 }, { "epoch": 2.594685002787586, "grad_norm": 0.7537856101989746, "learning_rate": 5.515635062957272e-05, "loss": 0.7853, "step": 13962 }, { "epoch": 2.594870841850957, "grad_norm": 0.8779000639915466, "learning_rate": 5.514329306463284e-05, "loss": 0.8949, "step": 13963 }, { "epoch": 2.595056680914328, "grad_norm": 0.9200186133384705, "learning_rate": 5.513023645708242e-05, "loss": 0.7334, "step": 13964 }, { "epoch": 2.5952425199776994, "grad_norm": 0.9138427376747131, "learning_rate": 5.511718080720012e-05, "loss": 0.9246, "step": 13965 }, { "epoch": 2.5954283590410703, "grad_norm": 0.9949511885643005, "learning_rate": 5.510412611526452e-05, "loss": 1.0521, "step": 13966 }, { "epoch": 2.5956141981044416, "grad_norm": 0.7660025358200073, "learning_rate": 5.5091072381554355e-05, "loss": 1.0903, "step": 13967 }, { "epoch": 2.5958000371678125, "grad_norm": 1.9562807083129883, "learning_rate": 5.507801960634815e-05, "loss": 1.4339, "step": 13968 }, { "epoch": 2.595985876231184, "grad_norm": 0.8114579319953918, "learning_rate": 5.506496778992456e-05, "loss": 0.9345, "step": 13969 }, { "epoch": 2.596171715294555, "grad_norm": 0.9337628483772278, "learning_rate": 5.505191693256211e-05, "loss": 0.731, "step": 13970 }, { "epoch": 2.596357554357926, "grad_norm": 0.9045740962028503, "learning_rate": 5.503886703453933e-05, "loss": 0.7923, "step": 13971 }, { "epoch": 2.596543393421297, "grad_norm": 0.7792370319366455, "learning_rate": 5.502581809613482e-05, "loss": 0.6075, "step": 13972 }, { "epoch": 2.5967292324846682, "grad_norm": 0.8924160599708557, "learning_rate": 5.5012770117627046e-05, "loss": 0.9055, "step": 13973 }, { "epoch": 2.5969150715480396, "grad_norm": 0.7485678791999817, "learning_rate": 5.499972309929448e-05, "loss": 0.9435, "step": 13974 }, { "epoch": 2.5971009106114105, "grad_norm": 0.9983780384063721, "learning_rate": 5.498667704141556e-05, "loss": 0.7541, "step": 13975 }, { "epoch": 2.597286749674782, "grad_norm": 0.8822980523109436, "learning_rate": 5.497363194426882e-05, "loss": 0.9006, "step": 13976 }, { "epoch": 2.5974725887381527, "grad_norm": 1.0196903944015503, "learning_rate": 5.49605878081326e-05, "loss": 1.0334, "step": 13977 }, { "epoch": 2.597658427801524, "grad_norm": 0.7188934683799744, "learning_rate": 5.494754463328538e-05, "loss": 0.8842, "step": 13978 }, { "epoch": 2.597844266864895, "grad_norm": 0.9956566095352173, "learning_rate": 5.493450242000546e-05, "loss": 0.7571, "step": 13979 }, { "epoch": 2.598030105928266, "grad_norm": 0.7895113229751587, "learning_rate": 5.49214611685713e-05, "loss": 0.7521, "step": 13980 }, { "epoch": 2.598215944991637, "grad_norm": 0.8534258008003235, "learning_rate": 5.490842087926119e-05, "loss": 0.875, "step": 13981 }, { "epoch": 2.5984017840550084, "grad_norm": 0.8755273222923279, "learning_rate": 5.4895381552353455e-05, "loss": 0.8375, "step": 13982 }, { "epoch": 2.5985876231183793, "grad_norm": 0.96613609790802, "learning_rate": 5.488234318812636e-05, "loss": 0.972, "step": 13983 }, { "epoch": 2.5987734621817506, "grad_norm": 0.8058257102966309, "learning_rate": 5.486930578685826e-05, "loss": 1.0076, "step": 13984 }, { "epoch": 2.598959301245122, "grad_norm": 0.7786903977394104, "learning_rate": 5.485626934882739e-05, "loss": 0.7411, "step": 13985 }, { "epoch": 2.599145140308493, "grad_norm": 1.0404150485992432, "learning_rate": 5.484323387431197e-05, "loss": 1.0283, "step": 13986 }, { "epoch": 2.5993309793718637, "grad_norm": 0.7860246896743774, "learning_rate": 5.483019936359019e-05, "loss": 0.8234, "step": 13987 }, { "epoch": 2.599516818435235, "grad_norm": 0.9711631536483765, "learning_rate": 5.48171658169403e-05, "loss": 0.9653, "step": 13988 }, { "epoch": 2.5997026574986064, "grad_norm": 1.0118674039840698, "learning_rate": 5.480413323464051e-05, "loss": 1.0696, "step": 13989 }, { "epoch": 2.5998884965619773, "grad_norm": 0.7147940397262573, "learning_rate": 5.479110161696893e-05, "loss": 0.7738, "step": 13990 }, { "epoch": 2.600074335625348, "grad_norm": 0.8029753565788269, "learning_rate": 5.4778070964203667e-05, "loss": 0.9146, "step": 13991 }, { "epoch": 2.6002601746887195, "grad_norm": 0.7796580195426941, "learning_rate": 5.476504127662292e-05, "loss": 0.9461, "step": 13992 }, { "epoch": 2.600446013752091, "grad_norm": 0.7540099024772644, "learning_rate": 5.4752012554504725e-05, "loss": 0.9506, "step": 13993 }, { "epoch": 2.6006318528154617, "grad_norm": 1.0693564414978027, "learning_rate": 5.473898479812719e-05, "loss": 0.7965, "step": 13994 }, { "epoch": 2.600817691878833, "grad_norm": 0.730949878692627, "learning_rate": 5.4725958007768304e-05, "loss": 0.9624, "step": 13995 }, { "epoch": 2.601003530942204, "grad_norm": 0.897834062576294, "learning_rate": 5.471293218370619e-05, "loss": 0.8675, "step": 13996 }, { "epoch": 2.6011893700055753, "grad_norm": 0.7791353464126587, "learning_rate": 5.4699907326218836e-05, "loss": 1.0761, "step": 13997 }, { "epoch": 2.601375209068946, "grad_norm": 0.793819010257721, "learning_rate": 5.4686883435584166e-05, "loss": 1.0897, "step": 13998 }, { "epoch": 2.6015610481323175, "grad_norm": 0.8791100978851318, "learning_rate": 5.467386051208022e-05, "loss": 1.1172, "step": 13999 }, { "epoch": 2.601746887195689, "grad_norm": 0.7686331868171692, "learning_rate": 5.4660838555984974e-05, "loss": 0.8535, "step": 14000 }, { "epoch": 2.6019327262590597, "grad_norm": 0.8077011108398438, "learning_rate": 5.464781756757633e-05, "loss": 0.9649, "step": 14001 }, { "epoch": 2.6021185653224306, "grad_norm": 0.8720762729644775, "learning_rate": 5.463479754713219e-05, "loss": 0.9052, "step": 14002 }, { "epoch": 2.602304404385802, "grad_norm": 0.9532802104949951, "learning_rate": 5.4621778494930397e-05, "loss": 0.9079, "step": 14003 }, { "epoch": 2.6024902434491732, "grad_norm": 0.8844078183174133, "learning_rate": 5.460876041124892e-05, "loss": 0.9669, "step": 14004 }, { "epoch": 2.602676082512544, "grad_norm": 0.8178666234016418, "learning_rate": 5.459574329636554e-05, "loss": 0.8515, "step": 14005 }, { "epoch": 2.602861921575915, "grad_norm": 0.7567655444145203, "learning_rate": 5.458272715055812e-05, "loss": 0.9037, "step": 14006 }, { "epoch": 2.6030477606392863, "grad_norm": 1.0607315301895142, "learning_rate": 5.456971197410441e-05, "loss": 0.6469, "step": 14007 }, { "epoch": 2.6032335997026577, "grad_norm": 0.8017802238464355, "learning_rate": 5.4556697767282224e-05, "loss": 0.9601, "step": 14008 }, { "epoch": 2.6034194387660285, "grad_norm": 1.093765377998352, "learning_rate": 5.4543684530369396e-05, "loss": 0.7734, "step": 14009 }, { "epoch": 2.6036052778294, "grad_norm": 0.7641136050224304, "learning_rate": 5.453067226364361e-05, "loss": 0.9624, "step": 14010 }, { "epoch": 2.6037911168927708, "grad_norm": 0.8289857506752014, "learning_rate": 5.451766096738256e-05, "loss": 0.958, "step": 14011 }, { "epoch": 2.603976955956142, "grad_norm": 0.7701115012168884, "learning_rate": 5.450465064186401e-05, "loss": 0.7542, "step": 14012 }, { "epoch": 2.604162795019513, "grad_norm": 0.8174500465393066, "learning_rate": 5.4491641287365635e-05, "loss": 1.0229, "step": 14013 }, { "epoch": 2.6043486340828843, "grad_norm": 0.8088310956954956, "learning_rate": 5.4478632904165074e-05, "loss": 0.9266, "step": 14014 }, { "epoch": 2.604534473146255, "grad_norm": 0.7646427750587463, "learning_rate": 5.446562549253993e-05, "loss": 1.0236, "step": 14015 }, { "epoch": 2.6047203122096265, "grad_norm": 0.7802772521972656, "learning_rate": 5.445261905276793e-05, "loss": 0.9357, "step": 14016 }, { "epoch": 2.6049061512729974, "grad_norm": 0.8911521434783936, "learning_rate": 5.4439613585126594e-05, "loss": 0.8832, "step": 14017 }, { "epoch": 2.6050919903363687, "grad_norm": 0.8874276876449585, "learning_rate": 5.442660908989352e-05, "loss": 0.8062, "step": 14018 }, { "epoch": 2.60527782939974, "grad_norm": 0.7851524353027344, "learning_rate": 5.44136055673462e-05, "loss": 0.9741, "step": 14019 }, { "epoch": 2.605463668463111, "grad_norm": 0.8898999691009521, "learning_rate": 5.440060301776232e-05, "loss": 0.9239, "step": 14020 }, { "epoch": 2.605649507526482, "grad_norm": 0.8409792184829712, "learning_rate": 5.438760144141931e-05, "loss": 0.901, "step": 14021 }, { "epoch": 2.605835346589853, "grad_norm": 0.8695258498191833, "learning_rate": 5.437460083859467e-05, "loss": 0.9999, "step": 14022 }, { "epoch": 2.6060211856532245, "grad_norm": 0.7994899749755859, "learning_rate": 5.436160120956585e-05, "loss": 0.8466, "step": 14023 }, { "epoch": 2.6062070247165954, "grad_norm": 0.84480881690979, "learning_rate": 5.434860255461036e-05, "loss": 0.8116, "step": 14024 }, { "epoch": 2.6063928637799667, "grad_norm": 0.6807081699371338, "learning_rate": 5.433560487400562e-05, "loss": 0.784, "step": 14025 }, { "epoch": 2.6065787028433376, "grad_norm": 0.81355881690979, "learning_rate": 5.432260816802903e-05, "loss": 1.1106, "step": 14026 }, { "epoch": 2.606764541906709, "grad_norm": 1.743600845336914, "learning_rate": 5.4309612436957937e-05, "loss": 1.1663, "step": 14027 }, { "epoch": 2.60695038097008, "grad_norm": 1.0914791822433472, "learning_rate": 5.429661768106981e-05, "loss": 1.3701, "step": 14028 }, { "epoch": 2.607136220033451, "grad_norm": 0.771381139755249, "learning_rate": 5.42836239006419e-05, "loss": 1.0859, "step": 14029 }, { "epoch": 2.607322059096822, "grad_norm": 0.7847346067428589, "learning_rate": 5.427063109595162e-05, "loss": 0.8319, "step": 14030 }, { "epoch": 2.6075078981601933, "grad_norm": 1.1157923936843872, "learning_rate": 5.4257639267276205e-05, "loss": 1.3161, "step": 14031 }, { "epoch": 2.6076937372235642, "grad_norm": 0.726537823677063, "learning_rate": 5.424464841489303e-05, "loss": 1.032, "step": 14032 }, { "epoch": 2.6078795762869356, "grad_norm": 0.7729559540748596, "learning_rate": 5.423165853907931e-05, "loss": 0.8861, "step": 14033 }, { "epoch": 2.608065415350307, "grad_norm": 0.8606934547424316, "learning_rate": 5.421866964011231e-05, "loss": 0.9862, "step": 14034 }, { "epoch": 2.6082512544136778, "grad_norm": 0.6861522793769836, "learning_rate": 5.4205681718269184e-05, "loss": 0.6266, "step": 14035 }, { "epoch": 2.6084370934770487, "grad_norm": 0.7935603260993958, "learning_rate": 5.419269477382725e-05, "loss": 0.8256, "step": 14036 }, { "epoch": 2.60862293254042, "grad_norm": 0.8142755031585693, "learning_rate": 5.4179708807063625e-05, "loss": 1.0308, "step": 14037 }, { "epoch": 2.6088087716037913, "grad_norm": 1.0010840892791748, "learning_rate": 5.416672381825549e-05, "loss": 0.9534, "step": 14038 }, { "epoch": 2.608994610667162, "grad_norm": 0.8254314661026001, "learning_rate": 5.415373980767995e-05, "loss": 1.0915, "step": 14039 }, { "epoch": 2.609180449730533, "grad_norm": 0.799942672252655, "learning_rate": 5.4140756775614146e-05, "loss": 0.7815, "step": 14040 }, { "epoch": 2.6093662887939044, "grad_norm": 0.8121205568313599, "learning_rate": 5.412777472233523e-05, "loss": 1.0408, "step": 14041 }, { "epoch": 2.6095521278572757, "grad_norm": 0.8696049451828003, "learning_rate": 5.4114793648120256e-05, "loss": 1.0698, "step": 14042 }, { "epoch": 2.6097379669206466, "grad_norm": 0.98824143409729, "learning_rate": 5.410181355324622e-05, "loss": 1.2393, "step": 14043 }, { "epoch": 2.609923805984018, "grad_norm": 0.8695021271705627, "learning_rate": 5.408883443799026e-05, "loss": 1.0092, "step": 14044 }, { "epoch": 2.610109645047389, "grad_norm": 0.8957984447479248, "learning_rate": 5.407585630262932e-05, "loss": 0.9223, "step": 14045 }, { "epoch": 2.61029548411076, "grad_norm": 0.7589720487594604, "learning_rate": 5.406287914744044e-05, "loss": 1.1484, "step": 14046 }, { "epoch": 2.610481323174131, "grad_norm": 0.8064348697662354, "learning_rate": 5.4049902972700515e-05, "loss": 1.0092, "step": 14047 }, { "epoch": 2.6106671622375024, "grad_norm": 0.6093027591705322, "learning_rate": 5.4036927778686616e-05, "loss": 0.575, "step": 14048 }, { "epoch": 2.6108530013008733, "grad_norm": 0.7983278036117554, "learning_rate": 5.4023953565675614e-05, "loss": 0.9588, "step": 14049 }, { "epoch": 2.6110388403642446, "grad_norm": 0.8939300179481506, "learning_rate": 5.4010980333944386e-05, "loss": 0.8152, "step": 14050 }, { "epoch": 2.6112246794276155, "grad_norm": 1.0524145364761353, "learning_rate": 5.3998008083769846e-05, "loss": 0.827, "step": 14051 }, { "epoch": 2.611410518490987, "grad_norm": 0.8120452165603638, "learning_rate": 5.398503681542893e-05, "loss": 1.0297, "step": 14052 }, { "epoch": 2.611596357554358, "grad_norm": 0.7002134323120117, "learning_rate": 5.397206652919844e-05, "loss": 0.9747, "step": 14053 }, { "epoch": 2.611782196617729, "grad_norm": 1.1011672019958496, "learning_rate": 5.395909722535522e-05, "loss": 1.1254, "step": 14054 }, { "epoch": 2.6119680356811, "grad_norm": 0.8079476356506348, "learning_rate": 5.3946128904176e-05, "loss": 0.8523, "step": 14055 }, { "epoch": 2.6121538747444712, "grad_norm": 0.7758453488349915, "learning_rate": 5.3933161565937665e-05, "loss": 1.0984, "step": 14056 }, { "epoch": 2.6123397138078426, "grad_norm": 0.8518459796905518, "learning_rate": 5.392019521091695e-05, "loss": 0.8602, "step": 14057 }, { "epoch": 2.6125255528712135, "grad_norm": 0.7195460200309753, "learning_rate": 5.390722983939058e-05, "loss": 0.6718, "step": 14058 }, { "epoch": 2.612711391934585, "grad_norm": 0.820122480392456, "learning_rate": 5.389426545163527e-05, "loss": 0.9311, "step": 14059 }, { "epoch": 2.6128972309979557, "grad_norm": 0.8300378918647766, "learning_rate": 5.3881302047927774e-05, "loss": 0.7584, "step": 14060 }, { "epoch": 2.613083070061327, "grad_norm": 0.8203210830688477, "learning_rate": 5.3868339628544695e-05, "loss": 1.0066, "step": 14061 }, { "epoch": 2.613268909124698, "grad_norm": 1.0901485681533813, "learning_rate": 5.38553781937628e-05, "loss": 1.0315, "step": 14062 }, { "epoch": 2.613454748188069, "grad_norm": 0.7519093155860901, "learning_rate": 5.384241774385862e-05, "loss": 0.7722, "step": 14063 }, { "epoch": 2.61364058725144, "grad_norm": 0.833781898021698, "learning_rate": 5.382945827910886e-05, "loss": 1.022, "step": 14064 }, { "epoch": 2.6138264263148114, "grad_norm": 0.7555166482925415, "learning_rate": 5.381649979979009e-05, "loss": 0.8817, "step": 14065 }, { "epoch": 2.6140122653781823, "grad_norm": 0.7260909676551819, "learning_rate": 5.3803542306178865e-05, "loss": 0.8799, "step": 14066 }, { "epoch": 2.6141981044415536, "grad_norm": 0.7376378178596497, "learning_rate": 5.379058579855172e-05, "loss": 0.7841, "step": 14067 }, { "epoch": 2.614383943504925, "grad_norm": 0.8301506638526917, "learning_rate": 5.3777630277185264e-05, "loss": 0.846, "step": 14068 }, { "epoch": 2.614569782568296, "grad_norm": 0.9465357065200806, "learning_rate": 5.3764675742355974e-05, "loss": 0.9891, "step": 14069 }, { "epoch": 2.6147556216316667, "grad_norm": 0.8111327886581421, "learning_rate": 5.375172219434028e-05, "loss": 0.945, "step": 14070 }, { "epoch": 2.614941460695038, "grad_norm": 0.7847363352775574, "learning_rate": 5.373876963341473e-05, "loss": 0.8943, "step": 14071 }, { "epoch": 2.6151272997584094, "grad_norm": 0.970258891582489, "learning_rate": 5.3725818059855796e-05, "loss": 1.0219, "step": 14072 }, { "epoch": 2.6153131388217803, "grad_norm": 0.8293201327323914, "learning_rate": 5.3712867473939845e-05, "loss": 1.0821, "step": 14073 }, { "epoch": 2.6154989778851516, "grad_norm": 0.8214329481124878, "learning_rate": 5.369991787594332e-05, "loss": 0.6193, "step": 14074 }, { "epoch": 2.6156848169485225, "grad_norm": 1.016649603843689, "learning_rate": 5.368696926614255e-05, "loss": 1.1271, "step": 14075 }, { "epoch": 2.615870656011894, "grad_norm": 0.8211673498153687, "learning_rate": 5.367402164481399e-05, "loss": 0.9893, "step": 14076 }, { "epoch": 2.6160564950752647, "grad_norm": 0.8138982057571411, "learning_rate": 5.366107501223393e-05, "loss": 0.9162, "step": 14077 }, { "epoch": 2.616242334138636, "grad_norm": 0.8277393579483032, "learning_rate": 5.364812936867871e-05, "loss": 1.2818, "step": 14078 }, { "epoch": 2.616428173202007, "grad_norm": 0.9266590476036072, "learning_rate": 5.3635184714424594e-05, "loss": 0.8863, "step": 14079 }, { "epoch": 2.6166140122653783, "grad_norm": 0.8038361072540283, "learning_rate": 5.3622241049747914e-05, "loss": 1.0169, "step": 14080 }, { "epoch": 2.616799851328749, "grad_norm": 0.8010311722755432, "learning_rate": 5.360929837492492e-05, "loss": 0.7726, "step": 14081 }, { "epoch": 2.6169856903921205, "grad_norm": 0.8894407153129578, "learning_rate": 5.359635669023181e-05, "loss": 0.9489, "step": 14082 }, { "epoch": 2.617171529455492, "grad_norm": 0.7788730263710022, "learning_rate": 5.358341599594483e-05, "loss": 0.866, "step": 14083 }, { "epoch": 2.6173573685188627, "grad_norm": 0.9567217826843262, "learning_rate": 5.357047629234022e-05, "loss": 0.5063, "step": 14084 }, { "epoch": 2.6175432075822336, "grad_norm": 0.8853626847267151, "learning_rate": 5.3557537579694114e-05, "loss": 0.9121, "step": 14085 }, { "epoch": 2.617729046645605, "grad_norm": 0.8689543604850769, "learning_rate": 5.354459985828268e-05, "loss": 0.9747, "step": 14086 }, { "epoch": 2.6179148857089762, "grad_norm": 1.1644787788391113, "learning_rate": 5.353166312838199e-05, "loss": 1.1248, "step": 14087 }, { "epoch": 2.618100724772347, "grad_norm": 0.8772037625312805, "learning_rate": 5.351872739026826e-05, "loss": 0.7357, "step": 14088 }, { "epoch": 2.618286563835718, "grad_norm": 0.8058109283447266, "learning_rate": 5.3505792644217525e-05, "loss": 0.8819, "step": 14089 }, { "epoch": 2.6184724028990893, "grad_norm": 0.838029146194458, "learning_rate": 5.349285889050582e-05, "loss": 0.8949, "step": 14090 }, { "epoch": 2.6186582419624607, "grad_norm": 0.8061281442642212, "learning_rate": 5.347992612940926e-05, "loss": 0.9215, "step": 14091 }, { "epoch": 2.6188440810258315, "grad_norm": 0.8494970202445984, "learning_rate": 5.3466994361203816e-05, "loss": 0.9996, "step": 14092 }, { "epoch": 2.619029920089203, "grad_norm": 0.7258545756340027, "learning_rate": 5.345406358616556e-05, "loss": 0.9192, "step": 14093 }, { "epoch": 2.6192157591525738, "grad_norm": 0.7305350303649902, "learning_rate": 5.344113380457044e-05, "loss": 0.7586, "step": 14094 }, { "epoch": 2.619401598215945, "grad_norm": 0.6838800311088562, "learning_rate": 5.342820501669439e-05, "loss": 0.8023, "step": 14095 }, { "epoch": 2.619587437279316, "grad_norm": 0.9066343903541565, "learning_rate": 5.3415277222813406e-05, "loss": 1.204, "step": 14096 }, { "epoch": 2.6197732763426873, "grad_norm": 0.9841775298118591, "learning_rate": 5.340235042320341e-05, "loss": 1.0951, "step": 14097 }, { "epoch": 2.619959115406058, "grad_norm": 0.8012330532073975, "learning_rate": 5.338942461814025e-05, "loss": 0.8781, "step": 14098 }, { "epoch": 2.6201449544694295, "grad_norm": 0.8908490538597107, "learning_rate": 5.337649980789982e-05, "loss": 0.6783, "step": 14099 }, { "epoch": 2.6203307935328004, "grad_norm": 0.8828836679458618, "learning_rate": 5.336357599275802e-05, "loss": 1.0055, "step": 14100 }, { "epoch": 2.6205166325961717, "grad_norm": 0.7191521525382996, "learning_rate": 5.335065317299066e-05, "loss": 0.6156, "step": 14101 }, { "epoch": 2.620702471659543, "grad_norm": 0.6662703156471252, "learning_rate": 5.333773134887352e-05, "loss": 0.6017, "step": 14102 }, { "epoch": 2.620888310722914, "grad_norm": 0.9348257184028625, "learning_rate": 5.332481052068243e-05, "loss": 1.1609, "step": 14103 }, { "epoch": 2.621074149786285, "grad_norm": 0.7789109945297241, "learning_rate": 5.33118906886932e-05, "loss": 0.7852, "step": 14104 }, { "epoch": 2.621259988849656, "grad_norm": 0.8765986561775208, "learning_rate": 5.329897185318153e-05, "loss": 0.9478, "step": 14105 }, { "epoch": 2.6214458279130275, "grad_norm": 0.8632599711418152, "learning_rate": 5.328605401442318e-05, "loss": 0.9694, "step": 14106 }, { "epoch": 2.6216316669763984, "grad_norm": 0.7510929703712463, "learning_rate": 5.32731371726938e-05, "loss": 0.7101, "step": 14107 }, { "epoch": 2.6218175060397697, "grad_norm": 0.9050139784812927, "learning_rate": 5.3260221328269156e-05, "loss": 0.8079, "step": 14108 }, { "epoch": 2.6220033451031406, "grad_norm": 0.7674015164375305, "learning_rate": 5.324730648142489e-05, "loss": 1.0027, "step": 14109 }, { "epoch": 2.622189184166512, "grad_norm": 0.8893373012542725, "learning_rate": 5.323439263243657e-05, "loss": 0.8087, "step": 14110 }, { "epoch": 2.622375023229883, "grad_norm": 0.7976099252700806, "learning_rate": 5.3221479781579955e-05, "loss": 0.7984, "step": 14111 }, { "epoch": 2.622560862293254, "grad_norm": 0.6816142797470093, "learning_rate": 5.320856792913056e-05, "loss": 0.8604, "step": 14112 }, { "epoch": 2.622746701356625, "grad_norm": 0.9564425945281982, "learning_rate": 5.3195657075363956e-05, "loss": 0.9432, "step": 14113 }, { "epoch": 2.6229325404199964, "grad_norm": 0.8268784284591675, "learning_rate": 5.318274722055576e-05, "loss": 0.8341, "step": 14114 }, { "epoch": 2.6231183794833672, "grad_norm": 0.9182666540145874, "learning_rate": 5.316983836498146e-05, "loss": 1.0505, "step": 14115 }, { "epoch": 2.6233042185467386, "grad_norm": 0.7662116885185242, "learning_rate": 5.3156930508916634e-05, "loss": 0.9504, "step": 14116 }, { "epoch": 2.62349005761011, "grad_norm": 0.7458078861236572, "learning_rate": 5.314402365263673e-05, "loss": 0.8565, "step": 14117 }, { "epoch": 2.623675896673481, "grad_norm": 0.7623659372329712, "learning_rate": 5.313111779641724e-05, "loss": 1.1978, "step": 14118 }, { "epoch": 2.6238617357368517, "grad_norm": 0.7925945520401001, "learning_rate": 5.311821294053356e-05, "loss": 0.7834, "step": 14119 }, { "epoch": 2.624047574800223, "grad_norm": 0.9102346301078796, "learning_rate": 5.3105309085261224e-05, "loss": 0.9621, "step": 14120 }, { "epoch": 2.6242334138635943, "grad_norm": 0.992512047290802, "learning_rate": 5.309240623087559e-05, "loss": 0.8519, "step": 14121 }, { "epoch": 2.624419252926965, "grad_norm": 0.777908205986023, "learning_rate": 5.3079504377652e-05, "loss": 0.7648, "step": 14122 }, { "epoch": 2.624605091990336, "grad_norm": 0.7941464185714722, "learning_rate": 5.3066603525865924e-05, "loss": 0.9803, "step": 14123 }, { "epoch": 2.6247909310537074, "grad_norm": 0.8003142476081848, "learning_rate": 5.30537036757926e-05, "loss": 0.7327, "step": 14124 }, { "epoch": 2.6249767701170788, "grad_norm": 1.1307048797607422, "learning_rate": 5.3040804827707457e-05, "loss": 0.8996, "step": 14125 }, { "epoch": 2.6251626091804496, "grad_norm": 0.903079092502594, "learning_rate": 5.3027906981885747e-05, "loss": 0.7964, "step": 14126 }, { "epoch": 2.625348448243821, "grad_norm": 0.8964478969573975, "learning_rate": 5.301501013860272e-05, "loss": 0.9799, "step": 14127 }, { "epoch": 2.625534287307192, "grad_norm": 1.1879934072494507, "learning_rate": 5.300211429813371e-05, "loss": 1.138, "step": 14128 }, { "epoch": 2.625720126370563, "grad_norm": 0.8264209628105164, "learning_rate": 5.29892194607539e-05, "loss": 0.9496, "step": 14129 }, { "epoch": 2.625905965433934, "grad_norm": 0.8963238000869751, "learning_rate": 5.297632562673851e-05, "loss": 0.8034, "step": 14130 }, { "epoch": 2.6260918044973054, "grad_norm": 1.0959327220916748, "learning_rate": 5.296343279636279e-05, "loss": 0.832, "step": 14131 }, { "epoch": 2.6262776435606767, "grad_norm": 1.2209457159042358, "learning_rate": 5.2950540969901876e-05, "loss": 1.1996, "step": 14132 }, { "epoch": 2.6264634826240476, "grad_norm": 1.5622323751449585, "learning_rate": 5.293765014763093e-05, "loss": 1.3739, "step": 14133 }, { "epoch": 2.6266493216874185, "grad_norm": 0.7270522117614746, "learning_rate": 5.292476032982503e-05, "loss": 0.7721, "step": 14134 }, { "epoch": 2.62683516075079, "grad_norm": 0.879499614238739, "learning_rate": 5.2911871516759336e-05, "loss": 0.9366, "step": 14135 }, { "epoch": 2.627020999814161, "grad_norm": 0.8861373662948608, "learning_rate": 5.289898370870898e-05, "loss": 0.892, "step": 14136 }, { "epoch": 2.627206838877532, "grad_norm": 0.7887271046638489, "learning_rate": 5.288609690594899e-05, "loss": 0.9441, "step": 14137 }, { "epoch": 2.627392677940903, "grad_norm": 1.050877332687378, "learning_rate": 5.287321110875442e-05, "loss": 0.8373, "step": 14138 }, { "epoch": 2.6275785170042743, "grad_norm": 0.7968853712081909, "learning_rate": 5.286032631740023e-05, "loss": 0.7597, "step": 14139 }, { "epoch": 2.6277643560676456, "grad_norm": 0.7845934629440308, "learning_rate": 5.284744253216154e-05, "loss": 0.9509, "step": 14140 }, { "epoch": 2.6279501951310165, "grad_norm": 0.7953255772590637, "learning_rate": 5.283455975331325e-05, "loss": 0.9759, "step": 14141 }, { "epoch": 2.628136034194388, "grad_norm": 0.7907785177230835, "learning_rate": 5.282167798113031e-05, "loss": 0.901, "step": 14142 }, { "epoch": 2.6283218732577587, "grad_norm": 0.825691282749176, "learning_rate": 5.280879721588774e-05, "loss": 0.9301, "step": 14143 }, { "epoch": 2.62850771232113, "grad_norm": 0.8010538816452026, "learning_rate": 5.2795917457860414e-05, "loss": 0.8767, "step": 14144 }, { "epoch": 2.628693551384501, "grad_norm": 0.7336350083351135, "learning_rate": 5.2783038707323174e-05, "loss": 0.759, "step": 14145 }, { "epoch": 2.628879390447872, "grad_norm": 0.8091769814491272, "learning_rate": 5.2770160964550985e-05, "loss": 0.9816, "step": 14146 }, { "epoch": 2.629065229511243, "grad_norm": 0.812383770942688, "learning_rate": 5.275728422981861e-05, "loss": 0.7816, "step": 14147 }, { "epoch": 2.6292510685746144, "grad_norm": 0.9698837399482727, "learning_rate": 5.274440850340099e-05, "loss": 1.1908, "step": 14148 }, { "epoch": 2.6294369076379853, "grad_norm": 0.7005797624588013, "learning_rate": 5.273153378557287e-05, "loss": 0.8051, "step": 14149 }, { "epoch": 2.6296227467013567, "grad_norm": 0.7625775933265686, "learning_rate": 5.2718660076609005e-05, "loss": 0.9715, "step": 14150 }, { "epoch": 2.629808585764728, "grad_norm": 0.7771723866462708, "learning_rate": 5.270578737678425e-05, "loss": 0.8616, "step": 14151 }, { "epoch": 2.629994424828099, "grad_norm": 0.8352897763252258, "learning_rate": 5.269291568637329e-05, "loss": 0.7741, "step": 14152 }, { "epoch": 2.6301802638914697, "grad_norm": 0.8637375235557556, "learning_rate": 5.268004500565087e-05, "loss": 1.0465, "step": 14153 }, { "epoch": 2.630366102954841, "grad_norm": 0.882972776889801, "learning_rate": 5.266717533489164e-05, "loss": 1.052, "step": 14154 }, { "epoch": 2.6305519420182124, "grad_norm": 0.8657705187797546, "learning_rate": 5.265430667437033e-05, "loss": 0.9644, "step": 14155 }, { "epoch": 2.6307377810815833, "grad_norm": 0.8730942606925964, "learning_rate": 5.264143902436164e-05, "loss": 1.0131, "step": 14156 }, { "epoch": 2.6309236201449546, "grad_norm": 0.7059460878372192, "learning_rate": 5.2628572385140165e-05, "loss": 0.711, "step": 14157 }, { "epoch": 2.6311094592083255, "grad_norm": 0.9356588125228882, "learning_rate": 5.261570675698051e-05, "loss": 0.6547, "step": 14158 }, { "epoch": 2.631295298271697, "grad_norm": 0.8111671209335327, "learning_rate": 5.260284214015724e-05, "loss": 0.7721, "step": 14159 }, { "epoch": 2.6314811373350677, "grad_norm": 0.9288527369499207, "learning_rate": 5.258997853494501e-05, "loss": 0.8618, "step": 14160 }, { "epoch": 2.631666976398439, "grad_norm": 0.9449626207351685, "learning_rate": 5.257711594161833e-05, "loss": 0.9077, "step": 14161 }, { "epoch": 2.63185281546181, "grad_norm": 0.9024152159690857, "learning_rate": 5.2564254360451694e-05, "loss": 1.0439, "step": 14162 }, { "epoch": 2.6320386545251813, "grad_norm": 0.9024345874786377, "learning_rate": 5.255139379171967e-05, "loss": 1.0034, "step": 14163 }, { "epoch": 2.632224493588552, "grad_norm": 1.0499695539474487, "learning_rate": 5.253853423569675e-05, "loss": 0.9327, "step": 14164 }, { "epoch": 2.6324103326519235, "grad_norm": 0.8915688991546631, "learning_rate": 5.252567569265734e-05, "loss": 0.8831, "step": 14165 }, { "epoch": 2.632596171715295, "grad_norm": 0.8023610711097717, "learning_rate": 5.251281816287589e-05, "loss": 0.8582, "step": 14166 }, { "epoch": 2.6327820107786657, "grad_norm": 0.7795034646987915, "learning_rate": 5.2499961646626824e-05, "loss": 1.0204, "step": 14167 }, { "epoch": 2.6329678498420366, "grad_norm": 0.7348303198814392, "learning_rate": 5.248710614418463e-05, "loss": 0.7561, "step": 14168 }, { "epoch": 2.633153688905408, "grad_norm": 0.7823712229728699, "learning_rate": 5.24742516558236e-05, "loss": 0.9611, "step": 14169 }, { "epoch": 2.6333395279687792, "grad_norm": 0.9677551984786987, "learning_rate": 5.246139818181809e-05, "loss": 1.1466, "step": 14170 }, { "epoch": 2.63352536703215, "grad_norm": 0.8288678526878357, "learning_rate": 5.2448545722442486e-05, "loss": 1.0228, "step": 14171 }, { "epoch": 2.633711206095521, "grad_norm": 0.8328431844711304, "learning_rate": 5.243569427797107e-05, "loss": 0.7301, "step": 14172 }, { "epoch": 2.6338970451588923, "grad_norm": 0.7218412756919861, "learning_rate": 5.2422843848678136e-05, "loss": 0.8425, "step": 14173 }, { "epoch": 2.6340828842222637, "grad_norm": 0.9164881110191345, "learning_rate": 5.2409994434837914e-05, "loss": 1.1753, "step": 14174 }, { "epoch": 2.6342687232856346, "grad_norm": 0.7694655060768127, "learning_rate": 5.239714603672474e-05, "loss": 0.7652, "step": 14175 }, { "epoch": 2.634454562349006, "grad_norm": 0.7973865866661072, "learning_rate": 5.238429865461275e-05, "loss": 1.0709, "step": 14176 }, { "epoch": 2.6346404014123768, "grad_norm": 1.0666766166687012, "learning_rate": 5.237145228877623e-05, "loss": 0.9486, "step": 14177 }, { "epoch": 2.634826240475748, "grad_norm": 0.8147100806236267, "learning_rate": 5.235860693948933e-05, "loss": 0.8283, "step": 14178 }, { "epoch": 2.635012079539119, "grad_norm": 0.7925849556922913, "learning_rate": 5.2345762607026174e-05, "loss": 0.8076, "step": 14179 }, { "epoch": 2.6351979186024903, "grad_norm": 0.7890560626983643, "learning_rate": 5.2332919291660976e-05, "loss": 0.8417, "step": 14180 }, { "epoch": 2.6353837576658616, "grad_norm": 0.7491629123687744, "learning_rate": 5.232007699366781e-05, "loss": 1.0209, "step": 14181 }, { "epoch": 2.6355695967292325, "grad_norm": 0.8427138328552246, "learning_rate": 5.230723571332075e-05, "loss": 1.0749, "step": 14182 }, { "epoch": 2.6357554357926034, "grad_norm": 0.8499711155891418, "learning_rate": 5.229439545089392e-05, "loss": 0.8501, "step": 14183 }, { "epoch": 2.6359412748559747, "grad_norm": 0.8856374621391296, "learning_rate": 5.2281556206661366e-05, "loss": 1.0512, "step": 14184 }, { "epoch": 2.636127113919346, "grad_norm": 0.8373563289642334, "learning_rate": 5.2268717980897095e-05, "loss": 0.8741, "step": 14185 }, { "epoch": 2.636312952982717, "grad_norm": 0.8471721410751343, "learning_rate": 5.225588077387511e-05, "loss": 0.9665, "step": 14186 }, { "epoch": 2.636498792046088, "grad_norm": 0.7628297805786133, "learning_rate": 5.2243044585869394e-05, "loss": 0.7661, "step": 14187 }, { "epoch": 2.636684631109459, "grad_norm": 0.8188031315803528, "learning_rate": 5.2230209417153994e-05, "loss": 0.9453, "step": 14188 }, { "epoch": 2.6368704701728305, "grad_norm": 0.8792202472686768, "learning_rate": 5.221737526800279e-05, "loss": 0.9707, "step": 14189 }, { "epoch": 2.6370563092362014, "grad_norm": 1.096123456954956, "learning_rate": 5.2204542138689685e-05, "loss": 0.9645, "step": 14190 }, { "epoch": 2.6372421482995727, "grad_norm": 0.7871784567832947, "learning_rate": 5.219171002948863e-05, "loss": 0.9165, "step": 14191 }, { "epoch": 2.6374279873629436, "grad_norm": 0.94199538230896, "learning_rate": 5.217887894067348e-05, "loss": 0.8837, "step": 14192 }, { "epoch": 2.637613826426315, "grad_norm": 0.8055230975151062, "learning_rate": 5.21660488725181e-05, "loss": 0.8327, "step": 14193 }, { "epoch": 2.637799665489686, "grad_norm": 0.8414456248283386, "learning_rate": 5.215321982529629e-05, "loss": 1.0833, "step": 14194 }, { "epoch": 2.637985504553057, "grad_norm": 0.8373480439186096, "learning_rate": 5.214039179928194e-05, "loss": 0.9982, "step": 14195 }, { "epoch": 2.638171343616428, "grad_norm": 0.7460327744483948, "learning_rate": 5.212756479474878e-05, "loss": 0.9654, "step": 14196 }, { "epoch": 2.6383571826797994, "grad_norm": 0.8177512288093567, "learning_rate": 5.211473881197056e-05, "loss": 0.9626, "step": 14197 }, { "epoch": 2.6385430217431702, "grad_norm": 0.6966250538825989, "learning_rate": 5.210191385122112e-05, "loss": 0.7215, "step": 14198 }, { "epoch": 2.6387288608065416, "grad_norm": 0.78021240234375, "learning_rate": 5.208908991277408e-05, "loss": 0.8037, "step": 14199 }, { "epoch": 2.638914699869913, "grad_norm": 0.922143280506134, "learning_rate": 5.2076266996903225e-05, "loss": 1.046, "step": 14200 }, { "epoch": 2.639100538933284, "grad_norm": 0.810273289680481, "learning_rate": 5.206344510388224e-05, "loss": 0.9812, "step": 14201 }, { "epoch": 2.6392863779966547, "grad_norm": 0.782341718673706, "learning_rate": 5.205062423398469e-05, "loss": 0.8912, "step": 14202 }, { "epoch": 2.639472217060026, "grad_norm": 1.0330047607421875, "learning_rate": 5.203780438748433e-05, "loss": 0.8249, "step": 14203 }, { "epoch": 2.6396580561233973, "grad_norm": 0.9015677571296692, "learning_rate": 5.202498556465474e-05, "loss": 1.1167, "step": 14204 }, { "epoch": 2.639843895186768, "grad_norm": 0.908305823802948, "learning_rate": 5.2012167765769494e-05, "loss": 0.9548, "step": 14205 }, { "epoch": 2.6400297342501395, "grad_norm": 0.8112418055534363, "learning_rate": 5.199935099110214e-05, "loss": 1.0671, "step": 14206 }, { "epoch": 2.6402155733135104, "grad_norm": 0.8479462265968323, "learning_rate": 5.1986535240926294e-05, "loss": 0.9551, "step": 14207 }, { "epoch": 2.6404014123768818, "grad_norm": 0.6957316398620605, "learning_rate": 5.1973720515515434e-05, "loss": 0.8288, "step": 14208 }, { "epoch": 2.6405872514402526, "grad_norm": 0.8121285438537598, "learning_rate": 5.196090681514313e-05, "loss": 0.7677, "step": 14209 }, { "epoch": 2.640773090503624, "grad_norm": 1.094486117362976, "learning_rate": 5.194809414008279e-05, "loss": 1.0898, "step": 14210 }, { "epoch": 2.640958929566995, "grad_norm": 0.9745634198188782, "learning_rate": 5.193528249060796e-05, "loss": 1.0696, "step": 14211 }, { "epoch": 2.641144768630366, "grad_norm": 0.7873470187187195, "learning_rate": 5.192247186699204e-05, "loss": 0.8372, "step": 14212 }, { "epoch": 2.641330607693737, "grad_norm": 0.8493298888206482, "learning_rate": 5.190966226950846e-05, "loss": 0.9002, "step": 14213 }, { "epoch": 2.6415164467571084, "grad_norm": 0.7814367413520813, "learning_rate": 5.189685369843057e-05, "loss": 0.8651, "step": 14214 }, { "epoch": 2.6417022858204797, "grad_norm": 1.3405762910842896, "learning_rate": 5.188404615403183e-05, "loss": 0.9166, "step": 14215 }, { "epoch": 2.6418881248838506, "grad_norm": 0.8147268295288086, "learning_rate": 5.187123963658556e-05, "loss": 0.7849, "step": 14216 }, { "epoch": 2.6420739639472215, "grad_norm": 0.8468379378318787, "learning_rate": 5.1858434146365075e-05, "loss": 0.8855, "step": 14217 }, { "epoch": 2.642259803010593, "grad_norm": 0.8435200452804565, "learning_rate": 5.184562968364366e-05, "loss": 1.2382, "step": 14218 }, { "epoch": 2.642445642073964, "grad_norm": 1.185233235359192, "learning_rate": 5.1832826248694654e-05, "loss": 0.9162, "step": 14219 }, { "epoch": 2.642631481137335, "grad_norm": 0.8596548438072205, "learning_rate": 5.182002384179134e-05, "loss": 0.8396, "step": 14220 }, { "epoch": 2.642817320200706, "grad_norm": 0.7079363465309143, "learning_rate": 5.180722246320695e-05, "loss": 0.7973, "step": 14221 }, { "epoch": 2.6430031592640773, "grad_norm": 0.7993518114089966, "learning_rate": 5.179442211321466e-05, "loss": 0.8655, "step": 14222 }, { "epoch": 2.6431889983274486, "grad_norm": 0.8697411417961121, "learning_rate": 5.1781622792087735e-05, "loss": 0.8161, "step": 14223 }, { "epoch": 2.6433748373908195, "grad_norm": 1.0144784450531006, "learning_rate": 5.176882450009932e-05, "loss": 0.9606, "step": 14224 }, { "epoch": 2.643560676454191, "grad_norm": 0.7561770677566528, "learning_rate": 5.175602723752258e-05, "loss": 0.698, "step": 14225 }, { "epoch": 2.6437465155175617, "grad_norm": 0.9197513461112976, "learning_rate": 5.174323100463061e-05, "loss": 0.9161, "step": 14226 }, { "epoch": 2.643932354580933, "grad_norm": 0.7830883264541626, "learning_rate": 5.173043580169661e-05, "loss": 0.8977, "step": 14227 }, { "epoch": 2.644118193644304, "grad_norm": 0.7943947911262512, "learning_rate": 5.1717641628993615e-05, "loss": 1.0571, "step": 14228 }, { "epoch": 2.6443040327076752, "grad_norm": 0.800528883934021, "learning_rate": 5.1704848486794664e-05, "loss": 0.9082, "step": 14229 }, { "epoch": 2.644489871771046, "grad_norm": 0.7897242903709412, "learning_rate": 5.1692056375372855e-05, "loss": 0.8276, "step": 14230 }, { "epoch": 2.6446757108344174, "grad_norm": 1.4376215934753418, "learning_rate": 5.167926529500124e-05, "loss": 1.0305, "step": 14231 }, { "epoch": 2.6448615498977883, "grad_norm": 0.7697019577026367, "learning_rate": 5.166647524595278e-05, "loss": 0.8588, "step": 14232 }, { "epoch": 2.6450473889611597, "grad_norm": 0.8492146134376526, "learning_rate": 5.1653686228500465e-05, "loss": 0.996, "step": 14233 }, { "epoch": 2.645233228024531, "grad_norm": 1.0387376546859741, "learning_rate": 5.16408982429172e-05, "loss": 0.9314, "step": 14234 }, { "epoch": 2.645419067087902, "grad_norm": 1.6353427171707153, "learning_rate": 5.162811128947602e-05, "loss": 1.4163, "step": 14235 }, { "epoch": 2.6456049061512728, "grad_norm": 0.9683260321617126, "learning_rate": 5.161532536844981e-05, "loss": 0.8203, "step": 14236 }, { "epoch": 2.645790745214644, "grad_norm": 0.874306321144104, "learning_rate": 5.1602540480111436e-05, "loss": 0.8829, "step": 14237 }, { "epoch": 2.6459765842780154, "grad_norm": 0.7753832936286926, "learning_rate": 5.1589756624733756e-05, "loss": 0.8694, "step": 14238 }, { "epoch": 2.6461624233413863, "grad_norm": 0.7963182330131531, "learning_rate": 5.157697380258962e-05, "loss": 0.9056, "step": 14239 }, { "epoch": 2.6463482624047576, "grad_norm": 1.00355064868927, "learning_rate": 5.156419201395194e-05, "loss": 1.0158, "step": 14240 }, { "epoch": 2.6465341014681285, "grad_norm": 1.5292688608169556, "learning_rate": 5.1551411259093466e-05, "loss": 1.0719, "step": 14241 }, { "epoch": 2.6467199405315, "grad_norm": 0.725684404373169, "learning_rate": 5.153863153828692e-05, "loss": 0.9253, "step": 14242 }, { "epoch": 2.6469057795948707, "grad_norm": 0.6308286786079407, "learning_rate": 5.152585285180517e-05, "loss": 0.536, "step": 14243 }, { "epoch": 2.647091618658242, "grad_norm": 0.7836467623710632, "learning_rate": 5.15130751999209e-05, "loss": 0.9303, "step": 14244 }, { "epoch": 2.647277457721613, "grad_norm": 0.6353241801261902, "learning_rate": 5.150029858290684e-05, "loss": 0.5537, "step": 14245 }, { "epoch": 2.6474632967849843, "grad_norm": 0.959025502204895, "learning_rate": 5.148752300103563e-05, "loss": 1.0209, "step": 14246 }, { "epoch": 2.647649135848355, "grad_norm": 0.9193606972694397, "learning_rate": 5.1474748454580047e-05, "loss": 1.0573, "step": 14247 }, { "epoch": 2.6478349749117265, "grad_norm": 0.7997956871986389, "learning_rate": 5.1461974943812674e-05, "loss": 0.9159, "step": 14248 }, { "epoch": 2.648020813975098, "grad_norm": 0.8786797523498535, "learning_rate": 5.1449202469006155e-05, "loss": 0.7875, "step": 14249 }, { "epoch": 2.6482066530384687, "grad_norm": 0.9094030261039734, "learning_rate": 5.1436431030433005e-05, "loss": 1.0004, "step": 14250 }, { "epoch": 2.6483924921018396, "grad_norm": 0.9047142267227173, "learning_rate": 5.142366062836599e-05, "loss": 0.9802, "step": 14251 }, { "epoch": 2.648578331165211, "grad_norm": 1.208447813987732, "learning_rate": 5.1410891263077566e-05, "loss": 0.9259, "step": 14252 }, { "epoch": 2.6487641702285822, "grad_norm": 0.8681517243385315, "learning_rate": 5.13981229348403e-05, "loss": 1.0942, "step": 14253 }, { "epoch": 2.648950009291953, "grad_norm": 0.6221885681152344, "learning_rate": 5.138535564392664e-05, "loss": 0.7514, "step": 14254 }, { "epoch": 2.6491358483553245, "grad_norm": 0.7742706537246704, "learning_rate": 5.137258939060918e-05, "loss": 0.9531, "step": 14255 }, { "epoch": 2.6493216874186953, "grad_norm": 0.757107138633728, "learning_rate": 5.135982417516036e-05, "loss": 0.9992, "step": 14256 }, { "epoch": 2.6495075264820667, "grad_norm": 0.9197544455528259, "learning_rate": 5.134705999785261e-05, "loss": 0.9013, "step": 14257 }, { "epoch": 2.6496933655454376, "grad_norm": 1.5455429553985596, "learning_rate": 5.133429685895834e-05, "loss": 1.1803, "step": 14258 }, { "epoch": 2.649879204608809, "grad_norm": 0.7492552995681763, "learning_rate": 5.1321534758750037e-05, "loss": 0.8355, "step": 14259 }, { "epoch": 2.6500650436721798, "grad_norm": 0.8414443135261536, "learning_rate": 5.130877369749999e-05, "loss": 1.0262, "step": 14260 }, { "epoch": 2.650250882735551, "grad_norm": 0.9861397743225098, "learning_rate": 5.129601367548065e-05, "loss": 1.1301, "step": 14261 }, { "epoch": 2.650436721798922, "grad_norm": 0.833222508430481, "learning_rate": 5.1283254692964286e-05, "loss": 1.2049, "step": 14262 }, { "epoch": 2.6506225608622933, "grad_norm": 0.7767835855484009, "learning_rate": 5.127049675022329e-05, "loss": 0.7874, "step": 14263 }, { "epoch": 2.6508083999256646, "grad_norm": 0.7894871830940247, "learning_rate": 5.125773984752991e-05, "loss": 0.8248, "step": 14264 }, { "epoch": 2.6509942389890355, "grad_norm": 0.7002455592155457, "learning_rate": 5.124498398515642e-05, "loss": 0.8009, "step": 14265 }, { "epoch": 2.6511800780524064, "grad_norm": 0.8562579154968262, "learning_rate": 5.1232229163375064e-05, "loss": 1.1361, "step": 14266 }, { "epoch": 2.6513659171157777, "grad_norm": 0.7994339466094971, "learning_rate": 5.12194753824581e-05, "loss": 0.7451, "step": 14267 }, { "epoch": 2.651551756179149, "grad_norm": 0.9162044525146484, "learning_rate": 5.120672264267775e-05, "loss": 0.9215, "step": 14268 }, { "epoch": 2.65173759524252, "grad_norm": 0.9385553598403931, "learning_rate": 5.119397094430616e-05, "loss": 0.9916, "step": 14269 }, { "epoch": 2.651923434305891, "grad_norm": 1.3068491220474243, "learning_rate": 5.118122028761547e-05, "loss": 1.2854, "step": 14270 }, { "epoch": 2.652109273369262, "grad_norm": 0.8506157398223877, "learning_rate": 5.1168470672877845e-05, "loss": 0.9981, "step": 14271 }, { "epoch": 2.6522951124326335, "grad_norm": 0.7934330701828003, "learning_rate": 5.1155722100365465e-05, "loss": 0.989, "step": 14272 }, { "epoch": 2.6524809514960044, "grad_norm": 0.8915961384773254, "learning_rate": 5.114297457035038e-05, "loss": 0.9278, "step": 14273 }, { "epoch": 2.6526667905593757, "grad_norm": 0.7989098429679871, "learning_rate": 5.1130228083104615e-05, "loss": 0.8468, "step": 14274 }, { "epoch": 2.6528526296227466, "grad_norm": 0.7344555258750916, "learning_rate": 5.111748263890031e-05, "loss": 0.8673, "step": 14275 }, { "epoch": 2.653038468686118, "grad_norm": 0.7172108888626099, "learning_rate": 5.1104738238009444e-05, "loss": 0.9453, "step": 14276 }, { "epoch": 2.653224307749489, "grad_norm": 0.7355473637580872, "learning_rate": 5.109199488070404e-05, "loss": 0.9495, "step": 14277 }, { "epoch": 2.65341014681286, "grad_norm": 0.8460148572921753, "learning_rate": 5.107925256725603e-05, "loss": 0.8353, "step": 14278 }, { "epoch": 2.653595985876231, "grad_norm": 1.0438544750213623, "learning_rate": 5.106651129793747e-05, "loss": 0.8366, "step": 14279 }, { "epoch": 2.6537818249396024, "grad_norm": 0.8582540154457092, "learning_rate": 5.1053771073020254e-05, "loss": 1.1757, "step": 14280 }, { "epoch": 2.6539676640029732, "grad_norm": 0.7860969305038452, "learning_rate": 5.104103189277625e-05, "loss": 0.6489, "step": 14281 }, { "epoch": 2.6541535030663446, "grad_norm": 0.7694686055183411, "learning_rate": 5.1028293757477416e-05, "loss": 1.0067, "step": 14282 }, { "epoch": 2.654339342129716, "grad_norm": 0.863415539264679, "learning_rate": 5.1015556667395636e-05, "loss": 0.8971, "step": 14283 }, { "epoch": 2.654525181193087, "grad_norm": 1.3249080181121826, "learning_rate": 5.100282062280275e-05, "loss": 1.3751, "step": 14284 }, { "epoch": 2.6547110202564577, "grad_norm": 1.5945967435836792, "learning_rate": 5.0990085623970585e-05, "loss": 1.4996, "step": 14285 }, { "epoch": 2.654896859319829, "grad_norm": 0.8719706535339355, "learning_rate": 5.097735167117088e-05, "loss": 1.0089, "step": 14286 }, { "epoch": 2.6550826983832003, "grad_norm": 0.7747623920440674, "learning_rate": 5.096461876467553e-05, "loss": 0.8771, "step": 14287 }, { "epoch": 2.655268537446571, "grad_norm": 0.7249778509140015, "learning_rate": 5.095188690475623e-05, "loss": 0.9572, "step": 14288 }, { "epoch": 2.6554543765099425, "grad_norm": 0.9162830114364624, "learning_rate": 5.093915609168474e-05, "loss": 0.9416, "step": 14289 }, { "epoch": 2.6556402155733134, "grad_norm": 0.6986264586448669, "learning_rate": 5.0926426325732746e-05, "loss": 1.0275, "step": 14290 }, { "epoch": 2.6558260546366848, "grad_norm": 0.9343037009239197, "learning_rate": 5.091369760717199e-05, "loss": 1.0384, "step": 14291 }, { "epoch": 2.6560118937000556, "grad_norm": 0.8413649797439575, "learning_rate": 5.090096993627409e-05, "loss": 0.6088, "step": 14292 }, { "epoch": 2.656197732763427, "grad_norm": 0.788287341594696, "learning_rate": 5.088824331331077e-05, "loss": 0.897, "step": 14293 }, { "epoch": 2.656383571826798, "grad_norm": 0.8309547305107117, "learning_rate": 5.087551773855357e-05, "loss": 0.8824, "step": 14294 }, { "epoch": 2.656569410890169, "grad_norm": 0.8467617630958557, "learning_rate": 5.0862793212274186e-05, "loss": 0.7063, "step": 14295 }, { "epoch": 2.65675524995354, "grad_norm": 0.8307881355285645, "learning_rate": 5.0850069734744166e-05, "loss": 0.9054, "step": 14296 }, { "epoch": 2.6569410890169114, "grad_norm": 0.9663828015327454, "learning_rate": 5.083734730623506e-05, "loss": 0.7634, "step": 14297 }, { "epoch": 2.6571269280802827, "grad_norm": 0.9168074727058411, "learning_rate": 5.0824625927018366e-05, "loss": 0.9365, "step": 14298 }, { "epoch": 2.6573127671436536, "grad_norm": 0.7665404677391052, "learning_rate": 5.0811905597365684e-05, "loss": 0.7316, "step": 14299 }, { "epoch": 2.6574986062070245, "grad_norm": 0.8714559674263, "learning_rate": 5.079918631754847e-05, "loss": 0.993, "step": 14300 }, { "epoch": 2.657684445270396, "grad_norm": 0.7602294087409973, "learning_rate": 5.078646808783815e-05, "loss": 0.8304, "step": 14301 }, { "epoch": 2.657870284333767, "grad_norm": 0.9733949303627014, "learning_rate": 5.077375090850621e-05, "loss": 0.7749, "step": 14302 }, { "epoch": 2.658056123397138, "grad_norm": 0.9775307178497314, "learning_rate": 5.0761034779824126e-05, "loss": 0.7289, "step": 14303 }, { "epoch": 2.658241962460509, "grad_norm": 0.7890309691429138, "learning_rate": 5.074831970206326e-05, "loss": 0.9095, "step": 14304 }, { "epoch": 2.6584278015238803, "grad_norm": 0.8886024951934814, "learning_rate": 5.073560567549498e-05, "loss": 0.9967, "step": 14305 }, { "epoch": 2.6586136405872516, "grad_norm": 0.7603307962417603, "learning_rate": 5.072289270039063e-05, "loss": 0.6832, "step": 14306 }, { "epoch": 2.6587994796506225, "grad_norm": 0.7396262884140015, "learning_rate": 5.071018077702161e-05, "loss": 0.9276, "step": 14307 }, { "epoch": 2.658985318713994, "grad_norm": 0.7766372561454773, "learning_rate": 5.0697469905659176e-05, "loss": 0.8897, "step": 14308 }, { "epoch": 2.6591711577773647, "grad_norm": 0.79985511302948, "learning_rate": 5.0684760086574665e-05, "loss": 0.8603, "step": 14309 }, { "epoch": 2.659356996840736, "grad_norm": 0.9170798659324646, "learning_rate": 5.067205132003927e-05, "loss": 0.9559, "step": 14310 }, { "epoch": 2.659542835904107, "grad_norm": 0.8943963050842285, "learning_rate": 5.065934360632434e-05, "loss": 0.8446, "step": 14311 }, { "epoch": 2.6597286749674782, "grad_norm": 0.8036159873008728, "learning_rate": 5.064663694570104e-05, "loss": 0.7597, "step": 14312 }, { "epoch": 2.6599145140308496, "grad_norm": 0.7508684396743774, "learning_rate": 5.063393133844055e-05, "loss": 0.7334, "step": 14313 }, { "epoch": 2.6601003530942204, "grad_norm": 0.9858553409576416, "learning_rate": 5.062122678481407e-05, "loss": 0.7561, "step": 14314 }, { "epoch": 2.6602861921575913, "grad_norm": 0.7608938813209534, "learning_rate": 5.06085232850928e-05, "loss": 0.7628, "step": 14315 }, { "epoch": 2.6604720312209627, "grad_norm": 0.8505204319953918, "learning_rate": 5.059582083954785e-05, "loss": 1.1395, "step": 14316 }, { "epoch": 2.660657870284334, "grad_norm": 0.7545738816261292, "learning_rate": 5.0583119448450324e-05, "loss": 0.8516, "step": 14317 }, { "epoch": 2.660843709347705, "grad_norm": 1.0585992336273193, "learning_rate": 5.057041911207128e-05, "loss": 0.7651, "step": 14318 }, { "epoch": 2.6610295484110758, "grad_norm": 0.9138060212135315, "learning_rate": 5.055771983068183e-05, "loss": 1.0043, "step": 14319 }, { "epoch": 2.661215387474447, "grad_norm": 0.801011860370636, "learning_rate": 5.0545021604553024e-05, "loss": 0.8839, "step": 14320 }, { "epoch": 2.6614012265378184, "grad_norm": 0.9903347492218018, "learning_rate": 5.053232443395581e-05, "loss": 0.8389, "step": 14321 }, { "epoch": 2.6615870656011893, "grad_norm": 0.7993159890174866, "learning_rate": 5.05196283191613e-05, "loss": 1.0068, "step": 14322 }, { "epoch": 2.6617729046645606, "grad_norm": 0.9454100728034973, "learning_rate": 5.050693326044036e-05, "loss": 0.8917, "step": 14323 }, { "epoch": 2.6619587437279315, "grad_norm": 0.971154510974884, "learning_rate": 5.049423925806403e-05, "loss": 0.763, "step": 14324 }, { "epoch": 2.662144582791303, "grad_norm": 0.7480639815330505, "learning_rate": 5.0481546312303216e-05, "loss": 0.8984, "step": 14325 }, { "epoch": 2.6623304218546737, "grad_norm": 0.8236550092697144, "learning_rate": 5.046885442342879e-05, "loss": 0.8049, "step": 14326 }, { "epoch": 2.662516260918045, "grad_norm": 0.8084285259246826, "learning_rate": 5.045616359171169e-05, "loss": 1.0081, "step": 14327 }, { "epoch": 2.662702099981416, "grad_norm": 0.7616249322891235, "learning_rate": 5.044347381742276e-05, "loss": 1.01, "step": 14328 }, { "epoch": 2.6628879390447873, "grad_norm": 0.9445034265518188, "learning_rate": 5.0430785100832846e-05, "loss": 0.9182, "step": 14329 }, { "epoch": 2.663073778108158, "grad_norm": 0.7334463596343994, "learning_rate": 5.0418097442212716e-05, "loss": 0.9515, "step": 14330 }, { "epoch": 2.6632596171715295, "grad_norm": 0.8281427621841431, "learning_rate": 5.0405410841833253e-05, "loss": 0.5546, "step": 14331 }, { "epoch": 2.663445456234901, "grad_norm": 0.8622934818267822, "learning_rate": 5.039272529996518e-05, "loss": 0.9142, "step": 14332 }, { "epoch": 2.6636312952982717, "grad_norm": 0.8588988780975342, "learning_rate": 5.038004081687922e-05, "loss": 0.9058, "step": 14333 }, { "epoch": 2.6638171343616426, "grad_norm": 1.0082697868347168, "learning_rate": 5.0367357392846146e-05, "loss": 0.7629, "step": 14334 }, { "epoch": 2.664002973425014, "grad_norm": 0.8957542777061462, "learning_rate": 5.0354675028136685e-05, "loss": 1.014, "step": 14335 }, { "epoch": 2.6641888124883852, "grad_norm": 0.828673243522644, "learning_rate": 5.03419937230215e-05, "loss": 0.8816, "step": 14336 }, { "epoch": 2.664374651551756, "grad_norm": 0.9521352648735046, "learning_rate": 5.032931347777123e-05, "loss": 1.1619, "step": 14337 }, { "epoch": 2.6645604906151275, "grad_norm": 0.7346090078353882, "learning_rate": 5.0316634292656495e-05, "loss": 0.9334, "step": 14338 }, { "epoch": 2.6647463296784983, "grad_norm": 0.8352324962615967, "learning_rate": 5.0303956167947976e-05, "loss": 0.9386, "step": 14339 }, { "epoch": 2.6649321687418697, "grad_norm": 0.9011669754981995, "learning_rate": 5.029127910391623e-05, "loss": 1.0561, "step": 14340 }, { "epoch": 2.6651180078052406, "grad_norm": 0.796430230140686, "learning_rate": 5.027860310083179e-05, "loss": 1.1265, "step": 14341 }, { "epoch": 2.665303846868612, "grad_norm": 0.8729113340377808, "learning_rate": 5.0265928158965295e-05, "loss": 1.0006, "step": 14342 }, { "epoch": 2.6654896859319828, "grad_norm": 0.9211896657943726, "learning_rate": 5.0253254278587195e-05, "loss": 1.0051, "step": 14343 }, { "epoch": 2.665675524995354, "grad_norm": 0.8242248892784119, "learning_rate": 5.024058145996797e-05, "loss": 1.1681, "step": 14344 }, { "epoch": 2.665861364058725, "grad_norm": 1.0670288801193237, "learning_rate": 5.0227909703378204e-05, "loss": 1.1635, "step": 14345 }, { "epoch": 2.6660472031220963, "grad_norm": 0.8117260932922363, "learning_rate": 5.021523900908824e-05, "loss": 0.8082, "step": 14346 }, { "epoch": 2.6662330421854676, "grad_norm": 0.9363516569137573, "learning_rate": 5.0202569377368616e-05, "loss": 1.0035, "step": 14347 }, { "epoch": 2.6664188812488385, "grad_norm": 0.8591864705085754, "learning_rate": 5.018990080848969e-05, "loss": 0.9849, "step": 14348 }, { "epoch": 2.6666047203122094, "grad_norm": 0.7635031342506409, "learning_rate": 5.017723330272184e-05, "loss": 1.0059, "step": 14349 }, { "epoch": 2.6667905593755807, "grad_norm": 0.7889599204063416, "learning_rate": 5.0164566860335414e-05, "loss": 0.5671, "step": 14350 }, { "epoch": 2.666976398438952, "grad_norm": 0.7520024180412292, "learning_rate": 5.015190148160083e-05, "loss": 0.9146, "step": 14351 }, { "epoch": 2.667162237502323, "grad_norm": 0.6558710932731628, "learning_rate": 5.013923716678837e-05, "loss": 0.7512, "step": 14352 }, { "epoch": 2.667348076565694, "grad_norm": 0.8916223645210266, "learning_rate": 5.0126573916168286e-05, "loss": 0.9032, "step": 14353 }, { "epoch": 2.667533915629065, "grad_norm": 0.8456600904464722, "learning_rate": 5.011391173001093e-05, "loss": 0.8913, "step": 14354 }, { "epoch": 2.6677197546924365, "grad_norm": 0.7922993898391724, "learning_rate": 5.010125060858648e-05, "loss": 1.0398, "step": 14355 }, { "epoch": 2.6679055937558074, "grad_norm": 0.8712417483329773, "learning_rate": 5.008859055216526e-05, "loss": 0.8474, "step": 14356 }, { "epoch": 2.6680914328191787, "grad_norm": 0.8451159000396729, "learning_rate": 5.0075931561017396e-05, "loss": 0.7022, "step": 14357 }, { "epoch": 2.6682772718825496, "grad_norm": 0.9817770719528198, "learning_rate": 5.006327363541308e-05, "loss": 0.9718, "step": 14358 }, { "epoch": 2.668463110945921, "grad_norm": 0.8464716076850891, "learning_rate": 5.005061677562253e-05, "loss": 0.907, "step": 14359 }, { "epoch": 2.668648950009292, "grad_norm": 0.8030047416687012, "learning_rate": 5.0037960981915844e-05, "loss": 0.7352, "step": 14360 }, { "epoch": 2.668834789072663, "grad_norm": 0.8626695871353149, "learning_rate": 5.002530625456312e-05, "loss": 0.8694, "step": 14361 }, { "epoch": 2.6690206281360345, "grad_norm": 0.7666325569152832, "learning_rate": 5.0012652593834496e-05, "loss": 0.796, "step": 14362 }, { "epoch": 2.6692064671994054, "grad_norm": 0.7711402773857117, "learning_rate": 5.000000000000002e-05, "loss": 0.7858, "step": 14363 }, { "epoch": 2.6693923062627762, "grad_norm": 0.7479420304298401, "learning_rate": 4.998734847332974e-05, "loss": 0.9035, "step": 14364 }, { "epoch": 2.6695781453261476, "grad_norm": 1.0149599313735962, "learning_rate": 4.9974698014093655e-05, "loss": 0.9755, "step": 14365 }, { "epoch": 2.669763984389519, "grad_norm": 0.8029696941375732, "learning_rate": 4.996204862256179e-05, "loss": 1.0167, "step": 14366 }, { "epoch": 2.66994982345289, "grad_norm": 0.8907744884490967, "learning_rate": 4.9949400299004155e-05, "loss": 0.9284, "step": 14367 }, { "epoch": 2.6701356625162607, "grad_norm": 0.8258806467056274, "learning_rate": 4.9936753043690695e-05, "loss": 0.8525, "step": 14368 }, { "epoch": 2.670321501579632, "grad_norm": 0.7489919066429138, "learning_rate": 4.992410685689133e-05, "loss": 0.8296, "step": 14369 }, { "epoch": 2.6705073406430033, "grad_norm": 0.8124386072158813, "learning_rate": 4.991146173887592e-05, "loss": 0.7661, "step": 14370 }, { "epoch": 2.670693179706374, "grad_norm": 0.9028873443603516, "learning_rate": 4.9898817689914445e-05, "loss": 0.9848, "step": 14371 }, { "epoch": 2.6708790187697455, "grad_norm": 0.892085075378418, "learning_rate": 4.988617471027673e-05, "loss": 0.8562, "step": 14372 }, { "epoch": 2.6710648578331164, "grad_norm": 0.9520072340965271, "learning_rate": 4.987353280023259e-05, "loss": 1.12, "step": 14373 }, { "epoch": 2.6712506968964878, "grad_norm": 0.8968404531478882, "learning_rate": 4.98608919600519e-05, "loss": 1.0637, "step": 14374 }, { "epoch": 2.6714365359598586, "grad_norm": 1.0067741870880127, "learning_rate": 4.984825219000443e-05, "loss": 1.0221, "step": 14375 }, { "epoch": 2.67162237502323, "grad_norm": 0.9988031983375549, "learning_rate": 4.983561349035991e-05, "loss": 1.0664, "step": 14376 }, { "epoch": 2.671808214086601, "grad_norm": 0.7657214999198914, "learning_rate": 4.982297586138819e-05, "loss": 0.9417, "step": 14377 }, { "epoch": 2.671994053149972, "grad_norm": 0.8332259058952332, "learning_rate": 4.981033930335889e-05, "loss": 0.8141, "step": 14378 }, { "epoch": 2.672179892213343, "grad_norm": 0.9903144240379333, "learning_rate": 4.979770381654181e-05, "loss": 0.9264, "step": 14379 }, { "epoch": 2.6723657312767144, "grad_norm": 0.9208203554153442, "learning_rate": 4.978506940120659e-05, "loss": 1.1661, "step": 14380 }, { "epoch": 2.6725515703400857, "grad_norm": 0.9300960898399353, "learning_rate": 4.977243605762285e-05, "loss": 0.9891, "step": 14381 }, { "epoch": 2.6727374094034566, "grad_norm": 0.740096926689148, "learning_rate": 4.975980378606031e-05, "loss": 0.8299, "step": 14382 }, { "epoch": 2.6729232484668275, "grad_norm": 0.8849905133247375, "learning_rate": 4.9747172586788546e-05, "loss": 0.8402, "step": 14383 }, { "epoch": 2.673109087530199, "grad_norm": 0.8056560754776001, "learning_rate": 4.973454246007714e-05, "loss": 0.9675, "step": 14384 }, { "epoch": 2.67329492659357, "grad_norm": 0.8289986252784729, "learning_rate": 4.972191340619563e-05, "loss": 0.7964, "step": 14385 }, { "epoch": 2.673480765656941, "grad_norm": 0.9971672892570496, "learning_rate": 4.97092854254136e-05, "loss": 0.8966, "step": 14386 }, { "epoch": 2.6736666047203124, "grad_norm": 0.9446187019348145, "learning_rate": 4.9696658518000613e-05, "loss": 0.8894, "step": 14387 }, { "epoch": 2.6738524437836833, "grad_norm": 0.7367177605628967, "learning_rate": 4.968403268422612e-05, "loss": 0.8897, "step": 14388 }, { "epoch": 2.6740382828470546, "grad_norm": 0.9521549940109253, "learning_rate": 4.96714079243596e-05, "loss": 0.9574, "step": 14389 }, { "epoch": 2.6742241219104255, "grad_norm": 0.8508783578872681, "learning_rate": 4.9658784238670475e-05, "loss": 0.9341, "step": 14390 }, { "epoch": 2.674409960973797, "grad_norm": 0.9512181282043457, "learning_rate": 4.964616162742826e-05, "loss": 0.9891, "step": 14391 }, { "epoch": 2.6745958000371677, "grad_norm": 0.8128776550292969, "learning_rate": 4.9633540090902296e-05, "loss": 0.9037, "step": 14392 }, { "epoch": 2.674781639100539, "grad_norm": 0.8413349390029907, "learning_rate": 4.962091962936196e-05, "loss": 0.7678, "step": 14393 }, { "epoch": 2.67496747816391, "grad_norm": 0.9310625195503235, "learning_rate": 4.9608300243076674e-05, "loss": 0.9023, "step": 14394 }, { "epoch": 2.6751533172272812, "grad_norm": 0.7659721970558167, "learning_rate": 4.959568193231575e-05, "loss": 0.9902, "step": 14395 }, { "epoch": 2.6753391562906526, "grad_norm": 0.7874768376350403, "learning_rate": 4.9583064697348495e-05, "loss": 0.9488, "step": 14396 }, { "epoch": 2.6755249953540234, "grad_norm": 0.7902805209159851, "learning_rate": 4.957044853844417e-05, "loss": 0.8912, "step": 14397 }, { "epoch": 2.6757108344173943, "grad_norm": 0.7868594527244568, "learning_rate": 4.9557833455872074e-05, "loss": 1.2056, "step": 14398 }, { "epoch": 2.6758966734807657, "grad_norm": 0.7617980241775513, "learning_rate": 4.95452194499015e-05, "loss": 0.8513, "step": 14399 }, { "epoch": 2.676082512544137, "grad_norm": 0.8179283142089844, "learning_rate": 4.953260652080163e-05, "loss": 0.8543, "step": 14400 }, { "epoch": 2.676268351607508, "grad_norm": 0.8407628536224365, "learning_rate": 4.951999466884164e-05, "loss": 0.9947, "step": 14401 }, { "epoch": 2.6764541906708788, "grad_norm": 0.8140806555747986, "learning_rate": 4.950738389429077e-05, "loss": 0.9707, "step": 14402 }, { "epoch": 2.67664002973425, "grad_norm": 0.8304571509361267, "learning_rate": 4.949477419741814e-05, "loss": 0.8568, "step": 14403 }, { "epoch": 2.6768258687976214, "grad_norm": 0.7684166431427002, "learning_rate": 4.948216557849288e-05, "loss": 0.8984, "step": 14404 }, { "epoch": 2.6770117078609923, "grad_norm": 0.8779479265213013, "learning_rate": 4.946955803778407e-05, "loss": 1.0934, "step": 14405 }, { "epoch": 2.6771975469243636, "grad_norm": 0.8864030241966248, "learning_rate": 4.945695157556087e-05, "loss": 0.6961, "step": 14406 }, { "epoch": 2.6773833859877345, "grad_norm": 0.8695092797279358, "learning_rate": 4.944434619209226e-05, "loss": 0.9414, "step": 14407 }, { "epoch": 2.677569225051106, "grad_norm": 0.8751302361488342, "learning_rate": 4.943174188764737e-05, "loss": 0.9969, "step": 14408 }, { "epoch": 2.6777550641144767, "grad_norm": 0.8184906840324402, "learning_rate": 4.941913866249517e-05, "loss": 0.9043, "step": 14409 }, { "epoch": 2.677940903177848, "grad_norm": 0.7370218634605408, "learning_rate": 4.9406536516904615e-05, "loss": 0.8803, "step": 14410 }, { "epoch": 2.678126742241219, "grad_norm": 0.8426598310470581, "learning_rate": 4.939393545114475e-05, "loss": 0.7561, "step": 14411 }, { "epoch": 2.6783125813045903, "grad_norm": 0.9060295820236206, "learning_rate": 4.938133546548449e-05, "loss": 0.8861, "step": 14412 }, { "epoch": 2.678498420367961, "grad_norm": 0.797497570514679, "learning_rate": 4.936873656019273e-05, "loss": 0.767, "step": 14413 }, { "epoch": 2.6786842594313325, "grad_norm": 0.7489145398139954, "learning_rate": 4.9356138735538425e-05, "loss": 0.8747, "step": 14414 }, { "epoch": 2.678870098494704, "grad_norm": 0.7489208579063416, "learning_rate": 4.934354199179044e-05, "loss": 0.943, "step": 14415 }, { "epoch": 2.6790559375580747, "grad_norm": 0.7728361487388611, "learning_rate": 4.933094632921762e-05, "loss": 0.7232, "step": 14416 }, { "epoch": 2.6792417766214456, "grad_norm": 1.0955910682678223, "learning_rate": 4.931835174808876e-05, "loss": 0.9856, "step": 14417 }, { "epoch": 2.679427615684817, "grad_norm": 0.7960167527198792, "learning_rate": 4.930575824867271e-05, "loss": 0.9901, "step": 14418 }, { "epoch": 2.6796134547481882, "grad_norm": 1.1254215240478516, "learning_rate": 4.92931658312383e-05, "loss": 0.6882, "step": 14419 }, { "epoch": 2.679799293811559, "grad_norm": 0.8247450590133667, "learning_rate": 4.928057449605423e-05, "loss": 0.9769, "step": 14420 }, { "epoch": 2.6799851328749305, "grad_norm": 0.8798690438270569, "learning_rate": 4.926798424338923e-05, "loss": 1.0393, "step": 14421 }, { "epoch": 2.6801709719383013, "grad_norm": 0.7442476749420166, "learning_rate": 4.925539507351209e-05, "loss": 0.8805, "step": 14422 }, { "epoch": 2.6803568110016727, "grad_norm": 0.8409537076950073, "learning_rate": 4.924280698669146e-05, "loss": 1.1698, "step": 14423 }, { "epoch": 2.6805426500650436, "grad_norm": 0.7573469281196594, "learning_rate": 4.9230219983196015e-05, "loss": 0.9185, "step": 14424 }, { "epoch": 2.680728489128415, "grad_norm": 0.8142333030700684, "learning_rate": 4.921763406329436e-05, "loss": 0.997, "step": 14425 }, { "epoch": 2.6809143281917858, "grad_norm": 0.7045801281929016, "learning_rate": 4.9205049227255194e-05, "loss": 0.7366, "step": 14426 }, { "epoch": 2.681100167255157, "grad_norm": 0.756362795829773, "learning_rate": 4.919246547534708e-05, "loss": 0.9306, "step": 14427 }, { "epoch": 2.681286006318528, "grad_norm": 0.893324077129364, "learning_rate": 4.9179882807838575e-05, "loss": 1.0491, "step": 14428 }, { "epoch": 2.6814718453818993, "grad_norm": 0.7502169013023376, "learning_rate": 4.9167301224998285e-05, "loss": 1.0363, "step": 14429 }, { "epoch": 2.6816576844452706, "grad_norm": 0.6055135726928711, "learning_rate": 4.9154720727094695e-05, "loss": 0.7655, "step": 14430 }, { "epoch": 2.6818435235086415, "grad_norm": 0.8633196949958801, "learning_rate": 4.914214131439636e-05, "loss": 0.937, "step": 14431 }, { "epoch": 2.6820293625720124, "grad_norm": 0.7565577030181885, "learning_rate": 4.912956298717175e-05, "loss": 0.8577, "step": 14432 }, { "epoch": 2.6822152016353837, "grad_norm": 1.0513862371444702, "learning_rate": 4.9116985745689294e-05, "loss": 1.1963, "step": 14433 }, { "epoch": 2.682401040698755, "grad_norm": 0.7482678890228271, "learning_rate": 4.910440959021748e-05, "loss": 0.8609, "step": 14434 }, { "epoch": 2.682586879762126, "grad_norm": 0.7162086367607117, "learning_rate": 4.9091834521024714e-05, "loss": 0.8151, "step": 14435 }, { "epoch": 2.6827727188254973, "grad_norm": 0.7310633659362793, "learning_rate": 4.9079260538379366e-05, "loss": 0.7638, "step": 14436 }, { "epoch": 2.682958557888868, "grad_norm": 0.8538556098937988, "learning_rate": 4.906668764254979e-05, "loss": 0.9732, "step": 14437 }, { "epoch": 2.6831443969522395, "grad_norm": 0.8188505172729492, "learning_rate": 4.90541158338044e-05, "loss": 0.9777, "step": 14438 }, { "epoch": 2.6833302360156104, "grad_norm": 0.6868131160736084, "learning_rate": 4.9041545112411434e-05, "loss": 0.7902, "step": 14439 }, { "epoch": 2.6835160750789817, "grad_norm": 0.7646917700767517, "learning_rate": 4.902897547863928e-05, "loss": 0.8853, "step": 14440 }, { "epoch": 2.6837019141423526, "grad_norm": 0.8058961033821106, "learning_rate": 4.9016406932756155e-05, "loss": 0.9065, "step": 14441 }, { "epoch": 2.683887753205724, "grad_norm": 0.9321679472923279, "learning_rate": 4.900383947503034e-05, "loss": 1.0314, "step": 14442 }, { "epoch": 2.684073592269095, "grad_norm": 0.786422610282898, "learning_rate": 4.899127310573009e-05, "loss": 0.7887, "step": 14443 }, { "epoch": 2.684259431332466, "grad_norm": 0.874234676361084, "learning_rate": 4.897870782512356e-05, "loss": 1.1024, "step": 14444 }, { "epoch": 2.6844452703958375, "grad_norm": 0.8297612071037292, "learning_rate": 4.896614363347892e-05, "loss": 0.8138, "step": 14445 }, { "epoch": 2.6846311094592084, "grad_norm": 0.9445776343345642, "learning_rate": 4.89535805310644e-05, "loss": 0.8972, "step": 14446 }, { "epoch": 2.6848169485225792, "grad_norm": 0.9599937796592712, "learning_rate": 4.894101851814812e-05, "loss": 0.7803, "step": 14447 }, { "epoch": 2.6850027875859506, "grad_norm": 0.8982528448104858, "learning_rate": 4.8928457594998165e-05, "loss": 0.8521, "step": 14448 }, { "epoch": 2.685188626649322, "grad_norm": 0.9184148907661438, "learning_rate": 4.891589776188261e-05, "loss": 1.1211, "step": 14449 }, { "epoch": 2.685374465712693, "grad_norm": 0.8056932687759399, "learning_rate": 4.890333901906955e-05, "loss": 0.9148, "step": 14450 }, { "epoch": 2.6855603047760637, "grad_norm": 0.847355306148529, "learning_rate": 4.889078136682708e-05, "loss": 0.9258, "step": 14451 }, { "epoch": 2.685746143839435, "grad_norm": 0.9067242741584778, "learning_rate": 4.8878224805423167e-05, "loss": 0.8694, "step": 14452 }, { "epoch": 2.6859319829028063, "grad_norm": 1.0393316745758057, "learning_rate": 4.886566933512578e-05, "loss": 1.007, "step": 14453 }, { "epoch": 2.686117821966177, "grad_norm": 0.8801686763763428, "learning_rate": 4.885311495620296e-05, "loss": 1.0337, "step": 14454 }, { "epoch": 2.6863036610295485, "grad_norm": 0.6567339897155762, "learning_rate": 4.884056166892265e-05, "loss": 0.5863, "step": 14455 }, { "epoch": 2.6864895000929194, "grad_norm": 0.8444393873214722, "learning_rate": 4.882800947355274e-05, "loss": 1.0625, "step": 14456 }, { "epoch": 2.6866753391562908, "grad_norm": 0.8692553043365479, "learning_rate": 4.881545837036112e-05, "loss": 0.8933, "step": 14457 }, { "epoch": 2.6868611782196616, "grad_norm": 0.9198605418205261, "learning_rate": 4.880290835961574e-05, "loss": 0.9862, "step": 14458 }, { "epoch": 2.687047017283033, "grad_norm": 0.8040031790733337, "learning_rate": 4.879035944158443e-05, "loss": 0.9413, "step": 14459 }, { "epoch": 2.687232856346404, "grad_norm": 0.8228282928466797, "learning_rate": 4.8777811616534976e-05, "loss": 0.7872, "step": 14460 }, { "epoch": 2.687418695409775, "grad_norm": 0.8292063474655151, "learning_rate": 4.876526488473524e-05, "loss": 0.8434, "step": 14461 }, { "epoch": 2.687604534473146, "grad_norm": 0.9853034615516663, "learning_rate": 4.875271924645303e-05, "loss": 0.9802, "step": 14462 }, { "epoch": 2.6877903735365174, "grad_norm": 0.7087395191192627, "learning_rate": 4.8740174701956085e-05, "loss": 0.8545, "step": 14463 }, { "epoch": 2.6879762125998887, "grad_norm": 0.8959962725639343, "learning_rate": 4.872763125151215e-05, "loss": 0.8218, "step": 14464 }, { "epoch": 2.6881620516632596, "grad_norm": 1.5733524560928345, "learning_rate": 4.8715088895388904e-05, "loss": 1.4046, "step": 14465 }, { "epoch": 2.6883478907266305, "grad_norm": 0.7405545711517334, "learning_rate": 4.870254763385411e-05, "loss": 0.9044, "step": 14466 }, { "epoch": 2.688533729790002, "grad_norm": 0.7159562706947327, "learning_rate": 4.8690007467175414e-05, "loss": 0.9171, "step": 14467 }, { "epoch": 2.688719568853373, "grad_norm": 0.800383448600769, "learning_rate": 4.867746839562045e-05, "loss": 0.9989, "step": 14468 }, { "epoch": 2.688905407916744, "grad_norm": 0.7202253341674805, "learning_rate": 4.866493041945681e-05, "loss": 0.7756, "step": 14469 }, { "epoch": 2.6890912469801154, "grad_norm": 0.7838736772537231, "learning_rate": 4.8652393538952146e-05, "loss": 0.9914, "step": 14470 }, { "epoch": 2.6892770860434863, "grad_norm": 0.86966872215271, "learning_rate": 4.863985775437405e-05, "loss": 0.967, "step": 14471 }, { "epoch": 2.6894629251068576, "grad_norm": 0.8207091689109802, "learning_rate": 4.862732306599006e-05, "loss": 0.8466, "step": 14472 }, { "epoch": 2.6896487641702285, "grad_norm": 0.8837814331054688, "learning_rate": 4.861478947406767e-05, "loss": 0.975, "step": 14473 }, { "epoch": 2.6898346032336, "grad_norm": 0.8468865156173706, "learning_rate": 4.860225697887444e-05, "loss": 0.8451, "step": 14474 }, { "epoch": 2.6900204422969707, "grad_norm": 0.7788984775543213, "learning_rate": 4.8589725580677835e-05, "loss": 0.904, "step": 14475 }, { "epoch": 2.690206281360342, "grad_norm": 1.0463881492614746, "learning_rate": 4.857719527974532e-05, "loss": 0.9791, "step": 14476 }, { "epoch": 2.690392120423713, "grad_norm": 0.8855894804000854, "learning_rate": 4.856466607634429e-05, "loss": 1.1418, "step": 14477 }, { "epoch": 2.6905779594870842, "grad_norm": 0.7966851592063904, "learning_rate": 4.855213797074224e-05, "loss": 1.0921, "step": 14478 }, { "epoch": 2.6907637985504556, "grad_norm": 0.7545307874679565, "learning_rate": 4.8539610963206504e-05, "loss": 0.8605, "step": 14479 }, { "epoch": 2.6909496376138264, "grad_norm": 0.8396836519241333, "learning_rate": 4.852708505400447e-05, "loss": 0.6345, "step": 14480 }, { "epoch": 2.6911354766771973, "grad_norm": 0.9581857919692993, "learning_rate": 4.8514560243403396e-05, "loss": 1.1265, "step": 14481 }, { "epoch": 2.6913213157405687, "grad_norm": 0.8851617574691772, "learning_rate": 4.850203653167076e-05, "loss": 0.8745, "step": 14482 }, { "epoch": 2.69150715480394, "grad_norm": 0.9810107946395874, "learning_rate": 4.848951391907377e-05, "loss": 1.0509, "step": 14483 }, { "epoch": 2.691692993867311, "grad_norm": 0.8600289821624756, "learning_rate": 4.847699240587972e-05, "loss": 0.8622, "step": 14484 }, { "epoch": 2.6918788329306818, "grad_norm": 0.9160045385360718, "learning_rate": 4.8464471992355805e-05, "loss": 0.9755, "step": 14485 }, { "epoch": 2.692064671994053, "grad_norm": 0.7092505097389221, "learning_rate": 4.845195267876934e-05, "loss": 0.7566, "step": 14486 }, { "epoch": 2.6922505110574244, "grad_norm": 1.0010638236999512, "learning_rate": 4.843943446538749e-05, "loss": 0.9392, "step": 14487 }, { "epoch": 2.6924363501207953, "grad_norm": 0.8743418455123901, "learning_rate": 4.842691735247742e-05, "loss": 0.9309, "step": 14488 }, { "epoch": 2.6926221891841666, "grad_norm": 0.7750649452209473, "learning_rate": 4.841440134030627e-05, "loss": 0.8315, "step": 14489 }, { "epoch": 2.6928080282475375, "grad_norm": 0.8826529383659363, "learning_rate": 4.8401886429141216e-05, "loss": 0.9026, "step": 14490 }, { "epoch": 2.692993867310909, "grad_norm": 0.8705019354820251, "learning_rate": 4.8389372619249326e-05, "loss": 1.0637, "step": 14491 }, { "epoch": 2.6931797063742797, "grad_norm": 0.861504852771759, "learning_rate": 4.8376859910897756e-05, "loss": 0.9621, "step": 14492 }, { "epoch": 2.693365545437651, "grad_norm": 0.8101507425308228, "learning_rate": 4.836434830435348e-05, "loss": 0.8763, "step": 14493 }, { "epoch": 2.6935513845010224, "grad_norm": 0.943341076374054, "learning_rate": 4.8351837799883605e-05, "loss": 0.7872, "step": 14494 }, { "epoch": 2.6937372235643933, "grad_norm": 0.8089012503623962, "learning_rate": 4.8339328397755135e-05, "loss": 0.7785, "step": 14495 }, { "epoch": 2.693923062627764, "grad_norm": 0.9210566878318787, "learning_rate": 4.832682009823504e-05, "loss": 1.0245, "step": 14496 }, { "epoch": 2.6941089016911355, "grad_norm": 0.8864029049873352, "learning_rate": 4.831431290159025e-05, "loss": 1.0957, "step": 14497 }, { "epoch": 2.694294740754507, "grad_norm": 0.8884971737861633, "learning_rate": 4.8301806808087804e-05, "loss": 0.8855, "step": 14498 }, { "epoch": 2.6944805798178777, "grad_norm": 0.8697060942649841, "learning_rate": 4.828930181799457e-05, "loss": 1.0415, "step": 14499 }, { "epoch": 2.6946664188812486, "grad_norm": 0.708972156047821, "learning_rate": 4.827679793157746e-05, "loss": 0.8836, "step": 14500 }, { "epoch": 2.69485225794462, "grad_norm": 0.946927547454834, "learning_rate": 4.8264295149103275e-05, "loss": 1.2003, "step": 14501 }, { "epoch": 2.6950380970079912, "grad_norm": 0.9309900999069214, "learning_rate": 4.825179347083895e-05, "loss": 0.8666, "step": 14502 }, { "epoch": 2.695223936071362, "grad_norm": 0.7605841159820557, "learning_rate": 4.823929289705132e-05, "loss": 0.7412, "step": 14503 }, { "epoch": 2.6954097751347335, "grad_norm": 0.7765486836433411, "learning_rate": 4.8226793428007156e-05, "loss": 0.9386, "step": 14504 }, { "epoch": 2.6955956141981043, "grad_norm": 0.9206398129463196, "learning_rate": 4.82142950639732e-05, "loss": 0.8607, "step": 14505 }, { "epoch": 2.6957814532614757, "grad_norm": 0.7876893281936646, "learning_rate": 4.8201797805216294e-05, "loss": 0.8987, "step": 14506 }, { "epoch": 2.6959672923248466, "grad_norm": 0.8226445913314819, "learning_rate": 4.818930165200313e-05, "loss": 0.7574, "step": 14507 }, { "epoch": 2.696153131388218, "grad_norm": 0.9933581352233887, "learning_rate": 4.81768066046004e-05, "loss": 0.8114, "step": 14508 }, { "epoch": 2.6963389704515888, "grad_norm": 0.7784883975982666, "learning_rate": 4.816431266327477e-05, "loss": 1.0112, "step": 14509 }, { "epoch": 2.69652480951496, "grad_norm": 0.7937835454940796, "learning_rate": 4.8151819828292954e-05, "loss": 0.5749, "step": 14510 }, { "epoch": 2.696710648578331, "grad_norm": 0.7758409380912781, "learning_rate": 4.813932809992159e-05, "loss": 0.8044, "step": 14511 }, { "epoch": 2.6968964876417023, "grad_norm": 0.7109744548797607, "learning_rate": 4.8126837478427223e-05, "loss": 0.7163, "step": 14512 }, { "epoch": 2.6970823267050736, "grad_norm": 0.8645885586738586, "learning_rate": 4.811434796407649e-05, "loss": 1.0301, "step": 14513 }, { "epoch": 2.6972681657684445, "grad_norm": 0.9839845299720764, "learning_rate": 4.8101859557136e-05, "loss": 0.9307, "step": 14514 }, { "epoch": 2.6974540048318154, "grad_norm": 0.8526188135147095, "learning_rate": 4.8089372257872266e-05, "loss": 0.7929, "step": 14515 }, { "epoch": 2.6976398438951867, "grad_norm": 0.746585488319397, "learning_rate": 4.807688606655179e-05, "loss": 0.9063, "step": 14516 }, { "epoch": 2.697825682958558, "grad_norm": 0.8589296936988831, "learning_rate": 4.806440098344104e-05, "loss": 0.9476, "step": 14517 }, { "epoch": 2.698011522021929, "grad_norm": 0.7648781538009644, "learning_rate": 4.805191700880657e-05, "loss": 0.7917, "step": 14518 }, { "epoch": 2.6981973610853003, "grad_norm": 0.8353559374809265, "learning_rate": 4.803943414291477e-05, "loss": 0.9623, "step": 14519 }, { "epoch": 2.698383200148671, "grad_norm": 0.915885329246521, "learning_rate": 4.8026952386032096e-05, "loss": 0.8834, "step": 14520 }, { "epoch": 2.6985690392120425, "grad_norm": 0.9126646518707275, "learning_rate": 4.8014471738424885e-05, "loss": 0.9102, "step": 14521 }, { "epoch": 2.6987548782754134, "grad_norm": 1.121840476989746, "learning_rate": 4.8001992200359616e-05, "loss": 1.0407, "step": 14522 }, { "epoch": 2.6989407173387847, "grad_norm": 0.8188127279281616, "learning_rate": 4.7989513772102537e-05, "loss": 1.1356, "step": 14523 }, { "epoch": 2.6991265564021556, "grad_norm": 0.8660606145858765, "learning_rate": 4.7977036453920074e-05, "loss": 0.9382, "step": 14524 }, { "epoch": 2.699312395465527, "grad_norm": 0.9244834780693054, "learning_rate": 4.796456024607846e-05, "loss": 0.9308, "step": 14525 }, { "epoch": 2.699498234528898, "grad_norm": 0.7831543684005737, "learning_rate": 4.795208514884405e-05, "loss": 0.9356, "step": 14526 }, { "epoch": 2.699684073592269, "grad_norm": 0.7805042862892151, "learning_rate": 4.7939611162483065e-05, "loss": 0.8295, "step": 14527 }, { "epoch": 2.6998699126556405, "grad_norm": 0.9255910515785217, "learning_rate": 4.792713828726174e-05, "loss": 1.0148, "step": 14528 }, { "epoch": 2.7000557517190114, "grad_norm": 0.8160353302955627, "learning_rate": 4.791466652344625e-05, "loss": 0.8816, "step": 14529 }, { "epoch": 2.7002415907823822, "grad_norm": 0.8323808908462524, "learning_rate": 4.790219587130286e-05, "loss": 0.9375, "step": 14530 }, { "epoch": 2.7004274298457536, "grad_norm": 0.8540868163108826, "learning_rate": 4.7889726331097686e-05, "loss": 0.9339, "step": 14531 }, { "epoch": 2.700613268909125, "grad_norm": 0.8291054368019104, "learning_rate": 4.787725790309685e-05, "loss": 0.8972, "step": 14532 }, { "epoch": 2.700799107972496, "grad_norm": 1.0824410915374756, "learning_rate": 4.78647905875665e-05, "loss": 0.9822, "step": 14533 }, { "epoch": 2.7009849470358667, "grad_norm": 0.8491551876068115, "learning_rate": 4.785232438477276e-05, "loss": 0.9835, "step": 14534 }, { "epoch": 2.701170786099238, "grad_norm": 0.8033833503723145, "learning_rate": 4.783985929498166e-05, "loss": 0.9216, "step": 14535 }, { "epoch": 2.7013566251626093, "grad_norm": 1.0135128498077393, "learning_rate": 4.782739531845927e-05, "loss": 0.8598, "step": 14536 }, { "epoch": 2.70154246422598, "grad_norm": 0.950504720211029, "learning_rate": 4.781493245547154e-05, "loss": 0.8575, "step": 14537 }, { "epoch": 2.7017283032893515, "grad_norm": 0.7731863260269165, "learning_rate": 4.780247070628457e-05, "loss": 0.9419, "step": 14538 }, { "epoch": 2.7019141423527224, "grad_norm": 0.8756670951843262, "learning_rate": 4.7790010071164295e-05, "loss": 1.0453, "step": 14539 }, { "epoch": 2.7020999814160938, "grad_norm": 0.9157370924949646, "learning_rate": 4.777755055037665e-05, "loss": 0.8694, "step": 14540 }, { "epoch": 2.7022858204794646, "grad_norm": 0.8106825947761536, "learning_rate": 4.776509214418753e-05, "loss": 0.903, "step": 14541 }, { "epoch": 2.702471659542836, "grad_norm": 0.81466144323349, "learning_rate": 4.775263485286292e-05, "loss": 0.97, "step": 14542 }, { "epoch": 2.7026574986062073, "grad_norm": 0.7218373417854309, "learning_rate": 4.774017867666867e-05, "loss": 0.7417, "step": 14543 }, { "epoch": 2.702843337669578, "grad_norm": 0.6913556456565857, "learning_rate": 4.772772361587058e-05, "loss": 0.7463, "step": 14544 }, { "epoch": 2.703029176732949, "grad_norm": 0.776649534702301, "learning_rate": 4.771526967073452e-05, "loss": 0.9157, "step": 14545 }, { "epoch": 2.7032150157963204, "grad_norm": 0.7431927919387817, "learning_rate": 4.770281684152634e-05, "loss": 0.9396, "step": 14546 }, { "epoch": 2.7034008548596917, "grad_norm": 0.9437835216522217, "learning_rate": 4.76903651285118e-05, "loss": 0.7785, "step": 14547 }, { "epoch": 2.7035866939230626, "grad_norm": 0.7822114825248718, "learning_rate": 4.7677914531956645e-05, "loss": 0.9485, "step": 14548 }, { "epoch": 2.7037725329864335, "grad_norm": 0.7751176357269287, "learning_rate": 4.766546505212658e-05, "loss": 0.899, "step": 14549 }, { "epoch": 2.703958372049805, "grad_norm": 0.8520172834396362, "learning_rate": 4.76530166892874e-05, "loss": 0.8291, "step": 14550 }, { "epoch": 2.704144211113176, "grad_norm": 0.8314445614814758, "learning_rate": 4.764056944370473e-05, "loss": 0.6832, "step": 14551 }, { "epoch": 2.704330050176547, "grad_norm": 0.7597737908363342, "learning_rate": 4.762812331564422e-05, "loss": 0.9719, "step": 14552 }, { "epoch": 2.7045158892399184, "grad_norm": 0.8481391072273254, "learning_rate": 4.761567830537159e-05, "loss": 0.9278, "step": 14553 }, { "epoch": 2.7047017283032893, "grad_norm": 0.8097413182258606, "learning_rate": 4.760323441315237e-05, "loss": 0.8921, "step": 14554 }, { "epoch": 2.7048875673666606, "grad_norm": 0.6693623661994934, "learning_rate": 4.759079163925223e-05, "loss": 0.6959, "step": 14555 }, { "epoch": 2.7050734064300315, "grad_norm": 0.7500324249267578, "learning_rate": 4.757834998393671e-05, "loss": 0.7453, "step": 14556 }, { "epoch": 2.705259245493403, "grad_norm": 0.9771433472633362, "learning_rate": 4.7565909447471313e-05, "loss": 0.9639, "step": 14557 }, { "epoch": 2.7054450845567737, "grad_norm": 0.8323385119438171, "learning_rate": 4.7553470030121626e-05, "loss": 0.7888, "step": 14558 }, { "epoch": 2.705630923620145, "grad_norm": 0.7277812957763672, "learning_rate": 4.754103173215313e-05, "loss": 0.7471, "step": 14559 }, { "epoch": 2.705816762683516, "grad_norm": 0.8828470706939697, "learning_rate": 4.752859455383129e-05, "loss": 0.9009, "step": 14560 }, { "epoch": 2.7060026017468872, "grad_norm": 0.7129109501838684, "learning_rate": 4.75161584954215e-05, "loss": 0.7362, "step": 14561 }, { "epoch": 2.7061884408102586, "grad_norm": 0.7221847772598267, "learning_rate": 4.75037235571893e-05, "loss": 0.941, "step": 14562 }, { "epoch": 2.7063742798736294, "grad_norm": 0.9126443266868591, "learning_rate": 4.749128973940001e-05, "loss": 0.9747, "step": 14563 }, { "epoch": 2.7065601189370003, "grad_norm": 1.0719454288482666, "learning_rate": 4.747885704231901e-05, "loss": 0.9455, "step": 14564 }, { "epoch": 2.7067459580003717, "grad_norm": 0.6202034950256348, "learning_rate": 4.746642546621167e-05, "loss": 0.7132, "step": 14565 }, { "epoch": 2.706931797063743, "grad_norm": 0.8351492881774902, "learning_rate": 4.745399501134338e-05, "loss": 0.968, "step": 14566 }, { "epoch": 2.707117636127114, "grad_norm": 0.8512027859687805, "learning_rate": 4.744156567797938e-05, "loss": 0.845, "step": 14567 }, { "epoch": 2.707303475190485, "grad_norm": 0.8470203876495361, "learning_rate": 4.7429137466384964e-05, "loss": 0.9657, "step": 14568 }, { "epoch": 2.707489314253856, "grad_norm": 0.7456303834915161, "learning_rate": 4.741671037682537e-05, "loss": 0.8667, "step": 14569 }, { "epoch": 2.7076751533172274, "grad_norm": 1.0743417739868164, "learning_rate": 4.740428440956588e-05, "loss": 1.013, "step": 14570 }, { "epoch": 2.7078609923805983, "grad_norm": 0.7952508330345154, "learning_rate": 4.739185956487169e-05, "loss": 0.846, "step": 14571 }, { "epoch": 2.7080468314439696, "grad_norm": 0.9333015084266663, "learning_rate": 4.737943584300793e-05, "loss": 0.6875, "step": 14572 }, { "epoch": 2.7082326705073405, "grad_norm": 0.8915261030197144, "learning_rate": 4.736701324423987e-05, "loss": 1.0861, "step": 14573 }, { "epoch": 2.708418509570712, "grad_norm": 0.8243820071220398, "learning_rate": 4.7354591768832566e-05, "loss": 0.8534, "step": 14574 }, { "epoch": 2.7086043486340827, "grad_norm": 0.88190758228302, "learning_rate": 4.734217141705114e-05, "loss": 1.1287, "step": 14575 }, { "epoch": 2.708790187697454, "grad_norm": 0.9980376362800598, "learning_rate": 4.732975218916072e-05, "loss": 0.8787, "step": 14576 }, { "epoch": 2.7089760267608254, "grad_norm": 0.8127431273460388, "learning_rate": 4.731733408542634e-05, "loss": 0.9969, "step": 14577 }, { "epoch": 2.7091618658241963, "grad_norm": 0.8898497819900513, "learning_rate": 4.7304917106113064e-05, "loss": 0.7366, "step": 14578 }, { "epoch": 2.709347704887567, "grad_norm": 0.861444354057312, "learning_rate": 4.729250125148592e-05, "loss": 1.0436, "step": 14579 }, { "epoch": 2.7095335439509385, "grad_norm": 0.8095132112503052, "learning_rate": 4.7280086521809894e-05, "loss": 0.8042, "step": 14580 }, { "epoch": 2.70971938301431, "grad_norm": 0.8140536546707153, "learning_rate": 4.72676729173499e-05, "loss": 1.058, "step": 14581 }, { "epoch": 2.7099052220776807, "grad_norm": 0.8666590452194214, "learning_rate": 4.7255260438370984e-05, "loss": 1.0129, "step": 14582 }, { "epoch": 2.7100910611410516, "grad_norm": 0.8712512850761414, "learning_rate": 4.724284908513802e-05, "loss": 1.0081, "step": 14583 }, { "epoch": 2.710276900204423, "grad_norm": 0.736910343170166, "learning_rate": 4.723043885791586e-05, "loss": 0.5821, "step": 14584 }, { "epoch": 2.7104627392677942, "grad_norm": 0.7968629002571106, "learning_rate": 4.721802975696947e-05, "loss": 0.8978, "step": 14585 }, { "epoch": 2.710648578331165, "grad_norm": 0.762345552444458, "learning_rate": 4.7205621782563614e-05, "loss": 0.8701, "step": 14586 }, { "epoch": 2.7108344173945365, "grad_norm": 0.9853722453117371, "learning_rate": 4.7193214934963206e-05, "loss": 0.8083, "step": 14587 }, { "epoch": 2.7110202564579073, "grad_norm": 0.8430361151695251, "learning_rate": 4.718080921443301e-05, "loss": 0.9613, "step": 14588 }, { "epoch": 2.7112060955212787, "grad_norm": 0.7686344385147095, "learning_rate": 4.7168404621237746e-05, "loss": 0.9783, "step": 14589 }, { "epoch": 2.7113919345846496, "grad_norm": 0.7804996967315674, "learning_rate": 4.7156001155642273e-05, "loss": 1.1721, "step": 14590 }, { "epoch": 2.711577773648021, "grad_norm": 1.0992109775543213, "learning_rate": 4.714359881791127e-05, "loss": 1.1223, "step": 14591 }, { "epoch": 2.7117636127113918, "grad_norm": 0.7722237706184387, "learning_rate": 4.713119760830941e-05, "loss": 1.0712, "step": 14592 }, { "epoch": 2.711949451774763, "grad_norm": 0.7525615692138672, "learning_rate": 4.7118797527101446e-05, "loss": 0.846, "step": 14593 }, { "epoch": 2.712135290838134, "grad_norm": 0.7216577529907227, "learning_rate": 4.710639857455199e-05, "loss": 0.711, "step": 14594 }, { "epoch": 2.7123211299015053, "grad_norm": 2.185736656188965, "learning_rate": 4.70940007509257e-05, "loss": 1.5017, "step": 14595 }, { "epoch": 2.7125069689648766, "grad_norm": 0.7739629149436951, "learning_rate": 4.708160405648713e-05, "loss": 0.8468, "step": 14596 }, { "epoch": 2.7126928080282475, "grad_norm": 0.8368583917617798, "learning_rate": 4.706920849150092e-05, "loss": 0.7949, "step": 14597 }, { "epoch": 2.7128786470916184, "grad_norm": 0.8572446703910828, "learning_rate": 4.705681405623165e-05, "loss": 0.9776, "step": 14598 }, { "epoch": 2.7130644861549897, "grad_norm": 0.926463782787323, "learning_rate": 4.704442075094383e-05, "loss": 0.9535, "step": 14599 }, { "epoch": 2.713250325218361, "grad_norm": 0.7697812914848328, "learning_rate": 4.703202857590198e-05, "loss": 1.0249, "step": 14600 }, { "epoch": 2.713436164281732, "grad_norm": 0.8938848376274109, "learning_rate": 4.701963753137053e-05, "loss": 0.8571, "step": 14601 }, { "epoch": 2.7136220033451033, "grad_norm": 0.7829850316047668, "learning_rate": 4.700724761761405e-05, "loss": 1.0279, "step": 14602 }, { "epoch": 2.713807842408474, "grad_norm": 0.8015586137771606, "learning_rate": 4.699485883489693e-05, "loss": 1.1516, "step": 14603 }, { "epoch": 2.7139936814718455, "grad_norm": 0.8683645725250244, "learning_rate": 4.6982471183483545e-05, "loss": 0.7275, "step": 14604 }, { "epoch": 2.7141795205352164, "grad_norm": 0.7575713992118835, "learning_rate": 4.697008466363837e-05, "loss": 0.9761, "step": 14605 }, { "epoch": 2.7143653595985877, "grad_norm": 0.7274417877197266, "learning_rate": 4.6957699275625744e-05, "loss": 1.0053, "step": 14606 }, { "epoch": 2.7145511986619586, "grad_norm": 0.9730561375617981, "learning_rate": 4.6945315019709956e-05, "loss": 1.051, "step": 14607 }, { "epoch": 2.71473703772533, "grad_norm": 0.7930693030357361, "learning_rate": 4.693293189615541e-05, "loss": 1.1249, "step": 14608 }, { "epoch": 2.714922876788701, "grad_norm": 0.877459704875946, "learning_rate": 4.6920549905226326e-05, "loss": 0.9589, "step": 14609 }, { "epoch": 2.715108715852072, "grad_norm": 0.6798363924026489, "learning_rate": 4.690816904718707e-05, "loss": 0.7238, "step": 14610 }, { "epoch": 2.7152945549154435, "grad_norm": 1.2394379377365112, "learning_rate": 4.689578932230182e-05, "loss": 1.0593, "step": 14611 }, { "epoch": 2.7154803939788144, "grad_norm": 0.9165498614311218, "learning_rate": 4.6883410730834785e-05, "loss": 1.0986, "step": 14612 }, { "epoch": 2.7156662330421852, "grad_norm": 0.9275784492492676, "learning_rate": 4.6871033273050235e-05, "loss": 0.8614, "step": 14613 }, { "epoch": 2.7158520721055566, "grad_norm": 0.7050016522407532, "learning_rate": 4.68586569492123e-05, "loss": 0.8167, "step": 14614 }, { "epoch": 2.716037911168928, "grad_norm": 0.8385757207870483, "learning_rate": 4.6846281759585144e-05, "loss": 0.9408, "step": 14615 }, { "epoch": 2.716223750232299, "grad_norm": 0.8483341932296753, "learning_rate": 4.683390770443284e-05, "loss": 0.9463, "step": 14616 }, { "epoch": 2.71640958929567, "grad_norm": 1.039903163909912, "learning_rate": 4.682153478401956e-05, "loss": 0.9554, "step": 14617 }, { "epoch": 2.716595428359041, "grad_norm": 0.9295901656150818, "learning_rate": 4.680916299860939e-05, "loss": 0.8416, "step": 14618 }, { "epoch": 2.7167812674224123, "grad_norm": 0.7862735986709595, "learning_rate": 4.6796792348466356e-05, "loss": 1.0495, "step": 14619 }, { "epoch": 2.716967106485783, "grad_norm": 0.8048015236854553, "learning_rate": 4.678442283385448e-05, "loss": 0.7554, "step": 14620 }, { "epoch": 2.7171529455491545, "grad_norm": 0.8309585452079773, "learning_rate": 4.677205445503775e-05, "loss": 0.9189, "step": 14621 }, { "epoch": 2.7173387846125254, "grad_norm": 0.7203308343887329, "learning_rate": 4.675968721228021e-05, "loss": 1.0408, "step": 14622 }, { "epoch": 2.7175246236758968, "grad_norm": 0.8534119725227356, "learning_rate": 4.674732110584579e-05, "loss": 0.7916, "step": 14623 }, { "epoch": 2.7177104627392676, "grad_norm": 0.8853223323822021, "learning_rate": 4.673495613599836e-05, "loss": 1.0103, "step": 14624 }, { "epoch": 2.717896301802639, "grad_norm": 0.8163644671440125, "learning_rate": 4.6722592303001944e-05, "loss": 1.0153, "step": 14625 }, { "epoch": 2.7180821408660103, "grad_norm": 0.8186414241790771, "learning_rate": 4.671022960712035e-05, "loss": 0.8593, "step": 14626 }, { "epoch": 2.718267979929381, "grad_norm": 0.8188729286193848, "learning_rate": 4.6697868048617467e-05, "loss": 0.8212, "step": 14627 }, { "epoch": 2.718453818992752, "grad_norm": 0.8711169958114624, "learning_rate": 4.6685507627757074e-05, "loss": 1.0026, "step": 14628 }, { "epoch": 2.7186396580561234, "grad_norm": 0.8931276798248291, "learning_rate": 4.667314834480304e-05, "loss": 0.8006, "step": 14629 }, { "epoch": 2.7188254971194947, "grad_norm": 0.9605731964111328, "learning_rate": 4.6660790200019165e-05, "loss": 1.0555, "step": 14630 }, { "epoch": 2.7190113361828656, "grad_norm": 0.8598033785820007, "learning_rate": 4.664843319366921e-05, "loss": 0.8958, "step": 14631 }, { "epoch": 2.7191971752462365, "grad_norm": 0.8986445069313049, "learning_rate": 4.663607732601684e-05, "loss": 0.9925, "step": 14632 }, { "epoch": 2.719383014309608, "grad_norm": 0.7430914640426636, "learning_rate": 4.662372259732587e-05, "loss": 1.013, "step": 14633 }, { "epoch": 2.719568853372979, "grad_norm": 0.7768040299415588, "learning_rate": 4.661136900785993e-05, "loss": 0.6541, "step": 14634 }, { "epoch": 2.71975469243635, "grad_norm": 0.8109592795372009, "learning_rate": 4.6599016557882715e-05, "loss": 0.8924, "step": 14635 }, { "epoch": 2.7199405314997214, "grad_norm": 0.8567116260528564, "learning_rate": 4.65866652476578e-05, "loss": 0.5437, "step": 14636 }, { "epoch": 2.7201263705630923, "grad_norm": 0.8742349743843079, "learning_rate": 4.6574315077448905e-05, "loss": 0.9358, "step": 14637 }, { "epoch": 2.7203122096264636, "grad_norm": 0.8746808767318726, "learning_rate": 4.656196604751954e-05, "loss": 0.9686, "step": 14638 }, { "epoch": 2.7204980486898345, "grad_norm": 0.9990546107292175, "learning_rate": 4.654961815813332e-05, "loss": 0.896, "step": 14639 }, { "epoch": 2.720683887753206, "grad_norm": 0.7727985382080078, "learning_rate": 4.653727140955379e-05, "loss": 0.9727, "step": 14640 }, { "epoch": 2.7208697268165767, "grad_norm": 0.6992377042770386, "learning_rate": 4.652492580204442e-05, "loss": 0.7702, "step": 14641 }, { "epoch": 2.721055565879948, "grad_norm": 0.8959800601005554, "learning_rate": 4.651258133586878e-05, "loss": 0.97, "step": 14642 }, { "epoch": 2.721241404943319, "grad_norm": 0.8036636710166931, "learning_rate": 4.6500238011290295e-05, "loss": 1.0769, "step": 14643 }, { "epoch": 2.7214272440066902, "grad_norm": 0.8741715550422668, "learning_rate": 4.648789582857239e-05, "loss": 1.1427, "step": 14644 }, { "epoch": 2.7216130830700616, "grad_norm": 0.8315646648406982, "learning_rate": 4.6475554787978556e-05, "loss": 0.8081, "step": 14645 }, { "epoch": 2.7217989221334324, "grad_norm": 0.9681646823883057, "learning_rate": 4.6463214889772154e-05, "loss": 0.8694, "step": 14646 }, { "epoch": 2.7219847611968033, "grad_norm": 0.8161425590515137, "learning_rate": 4.645087613421656e-05, "loss": 0.9062, "step": 14647 }, { "epoch": 2.7221706002601747, "grad_norm": 0.8496885299682617, "learning_rate": 4.643853852157507e-05, "loss": 0.767, "step": 14648 }, { "epoch": 2.722356439323546, "grad_norm": 1.285094141960144, "learning_rate": 4.6426202052111064e-05, "loss": 1.2346, "step": 14649 }, { "epoch": 2.722542278386917, "grad_norm": 0.8886316418647766, "learning_rate": 4.6413866726087884e-05, "loss": 0.7106, "step": 14650 }, { "epoch": 2.722728117450288, "grad_norm": 1.214694619178772, "learning_rate": 4.6401532543768766e-05, "loss": 0.8667, "step": 14651 }, { "epoch": 2.722913956513659, "grad_norm": 0.8256045579910278, "learning_rate": 4.638919950541691e-05, "loss": 1.0791, "step": 14652 }, { "epoch": 2.7230997955770304, "grad_norm": 0.8462839126586914, "learning_rate": 4.6376867611295624e-05, "loss": 0.927, "step": 14653 }, { "epoch": 2.7232856346404013, "grad_norm": 0.8731264472007751, "learning_rate": 4.636453686166808e-05, "loss": 1.161, "step": 14654 }, { "epoch": 2.7234714737037726, "grad_norm": 1.0182912349700928, "learning_rate": 4.6352207256797464e-05, "loss": 0.9539, "step": 14655 }, { "epoch": 2.7236573127671435, "grad_norm": 0.8832132816314697, "learning_rate": 4.633987879694688e-05, "loss": 1.0213, "step": 14656 }, { "epoch": 2.723843151830515, "grad_norm": 0.8857216835021973, "learning_rate": 4.632755148237954e-05, "loss": 1.0354, "step": 14657 }, { "epoch": 2.7240289908938857, "grad_norm": 0.7460151314735413, "learning_rate": 4.63152253133585e-05, "loss": 0.8688, "step": 14658 }, { "epoch": 2.724214829957257, "grad_norm": 0.8710841536521912, "learning_rate": 4.630290029014681e-05, "loss": 0.8975, "step": 14659 }, { "epoch": 2.7244006690206284, "grad_norm": 0.7464547753334045, "learning_rate": 4.62905764130076e-05, "loss": 0.5561, "step": 14660 }, { "epoch": 2.7245865080839993, "grad_norm": 1.018832802772522, "learning_rate": 4.6278253682203844e-05, "loss": 0.9404, "step": 14661 }, { "epoch": 2.72477234714737, "grad_norm": 0.7565674781799316, "learning_rate": 4.6265932097998596e-05, "loss": 0.8725, "step": 14662 }, { "epoch": 2.7249581862107415, "grad_norm": 0.9171304106712341, "learning_rate": 4.625361166065483e-05, "loss": 0.8651, "step": 14663 }, { "epoch": 2.725144025274113, "grad_norm": 0.9045332074165344, "learning_rate": 4.624129237043544e-05, "loss": 0.9935, "step": 14664 }, { "epoch": 2.7253298643374837, "grad_norm": 0.8097667098045349, "learning_rate": 4.622897422760346e-05, "loss": 0.9527, "step": 14665 }, { "epoch": 2.7255157034008546, "grad_norm": 0.8182275891304016, "learning_rate": 4.6216657232421744e-05, "loss": 0.9202, "step": 14666 }, { "epoch": 2.725701542464226, "grad_norm": 0.9292824864387512, "learning_rate": 4.6204341385153186e-05, "loss": 0.7853, "step": 14667 }, { "epoch": 2.7258873815275972, "grad_norm": 0.900060772895813, "learning_rate": 4.6192026686060596e-05, "loss": 1.0285, "step": 14668 }, { "epoch": 2.726073220590968, "grad_norm": 0.8207162022590637, "learning_rate": 4.617971313540691e-05, "loss": 0.884, "step": 14669 }, { "epoch": 2.7262590596543395, "grad_norm": 0.6432229280471802, "learning_rate": 4.616740073345485e-05, "loss": 0.6324, "step": 14670 }, { "epoch": 2.7264448987177103, "grad_norm": 0.7894888520240784, "learning_rate": 4.615508948046726e-05, "loss": 0.8774, "step": 14671 }, { "epoch": 2.7266307377810817, "grad_norm": 0.7836933135986328, "learning_rate": 4.614277937670687e-05, "loss": 0.7802, "step": 14672 }, { "epoch": 2.7268165768444526, "grad_norm": 0.7634031176567078, "learning_rate": 4.613047042243646e-05, "loss": 0.7226, "step": 14673 }, { "epoch": 2.727002415907824, "grad_norm": 0.7131878733634949, "learning_rate": 4.6118162617918705e-05, "loss": 0.8122, "step": 14674 }, { "epoch": 2.727188254971195, "grad_norm": 0.6382018327713013, "learning_rate": 4.610585596341631e-05, "loss": 0.7506, "step": 14675 }, { "epoch": 2.727374094034566, "grad_norm": 0.8915993571281433, "learning_rate": 4.60935504591919e-05, "loss": 0.8766, "step": 14676 }, { "epoch": 2.727559933097937, "grad_norm": 0.7661280035972595, "learning_rate": 4.608124610550818e-05, "loss": 1.0099, "step": 14677 }, { "epoch": 2.7277457721613083, "grad_norm": 0.9038292765617371, "learning_rate": 4.606894290262774e-05, "loss": 1.0057, "step": 14678 }, { "epoch": 2.7279316112246796, "grad_norm": 0.833634078502655, "learning_rate": 4.605664085081316e-05, "loss": 0.7077, "step": 14679 }, { "epoch": 2.7281174502880505, "grad_norm": 0.7558378577232361, "learning_rate": 4.6044339950326944e-05, "loss": 0.5531, "step": 14680 }, { "epoch": 2.7283032893514214, "grad_norm": 0.877690315246582, "learning_rate": 4.603204020143176e-05, "loss": 0.8854, "step": 14681 }, { "epoch": 2.7284891284147927, "grad_norm": 0.8856509923934937, "learning_rate": 4.6019741604390075e-05, "loss": 0.7802, "step": 14682 }, { "epoch": 2.728674967478164, "grad_norm": 0.7319670915603638, "learning_rate": 4.600744415946438e-05, "loss": 1.0037, "step": 14683 }, { "epoch": 2.728860806541535, "grad_norm": 0.8687145709991455, "learning_rate": 4.599514786691708e-05, "loss": 1.1824, "step": 14684 }, { "epoch": 2.7290466456049063, "grad_norm": 0.8814767599105835, "learning_rate": 4.5982852727010715e-05, "loss": 1.0184, "step": 14685 }, { "epoch": 2.729232484668277, "grad_norm": 0.7699080109596252, "learning_rate": 4.5970558740007666e-05, "loss": 0.9656, "step": 14686 }, { "epoch": 2.7294183237316485, "grad_norm": 0.6471914052963257, "learning_rate": 4.5958265906170315e-05, "loss": 0.6988, "step": 14687 }, { "epoch": 2.7296041627950194, "grad_norm": 0.8503636121749878, "learning_rate": 4.594597422576101e-05, "loss": 0.7991, "step": 14688 }, { "epoch": 2.7297900018583907, "grad_norm": 0.8441890478134155, "learning_rate": 4.593368369904216e-05, "loss": 0.9067, "step": 14689 }, { "epoch": 2.7299758409217616, "grad_norm": 0.7589361071586609, "learning_rate": 4.592139432627605e-05, "loss": 0.8271, "step": 14690 }, { "epoch": 2.730161679985133, "grad_norm": 0.6977306604385376, "learning_rate": 4.590910610772493e-05, "loss": 0.76, "step": 14691 }, { "epoch": 2.730347519048504, "grad_norm": 0.8234983086585999, "learning_rate": 4.589681904365113e-05, "loss": 0.8081, "step": 14692 }, { "epoch": 2.730533358111875, "grad_norm": 0.7815526127815247, "learning_rate": 4.588453313431691e-05, "loss": 0.7782, "step": 14693 }, { "epoch": 2.7307191971752465, "grad_norm": 0.8292824029922485, "learning_rate": 4.5872248379984464e-05, "loss": 0.6498, "step": 14694 }, { "epoch": 2.7309050362386174, "grad_norm": 0.8337152600288391, "learning_rate": 4.5859964780916e-05, "loss": 0.9102, "step": 14695 }, { "epoch": 2.7310908753019882, "grad_norm": 0.9309200048446655, "learning_rate": 4.584768233737363e-05, "loss": 1.03, "step": 14696 }, { "epoch": 2.7312767143653596, "grad_norm": 0.779767632484436, "learning_rate": 4.58354010496196e-05, "loss": 0.9953, "step": 14697 }, { "epoch": 2.731462553428731, "grad_norm": 0.8021158576011658, "learning_rate": 4.582312091791597e-05, "loss": 0.8515, "step": 14698 }, { "epoch": 2.731648392492102, "grad_norm": 0.9271323084831238, "learning_rate": 4.5810841942524864e-05, "loss": 0.8251, "step": 14699 }, { "epoch": 2.731834231555473, "grad_norm": 0.8800750970840454, "learning_rate": 4.579856412370831e-05, "loss": 0.8703, "step": 14700 }, { "epoch": 2.732020070618844, "grad_norm": 0.8031773567199707, "learning_rate": 4.578628746172838e-05, "loss": 0.8584, "step": 14701 }, { "epoch": 2.7322059096822153, "grad_norm": 0.8233345150947571, "learning_rate": 4.577401195684715e-05, "loss": 1.0454, "step": 14702 }, { "epoch": 2.732391748745586, "grad_norm": 0.8056541681289673, "learning_rate": 4.576173760932658e-05, "loss": 1.0582, "step": 14703 }, { "epoch": 2.7325775878089575, "grad_norm": 0.9316117763519287, "learning_rate": 4.574946441942861e-05, "loss": 1.0211, "step": 14704 }, { "epoch": 2.7327634268723284, "grad_norm": 0.7821735739707947, "learning_rate": 4.573719238741526e-05, "loss": 0.6886, "step": 14705 }, { "epoch": 2.7329492659356998, "grad_norm": 0.8058688640594482, "learning_rate": 4.572492151354842e-05, "loss": 1.0934, "step": 14706 }, { "epoch": 2.7331351049990706, "grad_norm": 0.8328232765197754, "learning_rate": 4.571265179808999e-05, "loss": 0.8818, "step": 14707 }, { "epoch": 2.733320944062442, "grad_norm": 0.7413308620452881, "learning_rate": 4.570038324130181e-05, "loss": 1.0653, "step": 14708 }, { "epoch": 2.7335067831258133, "grad_norm": 0.8222902417182922, "learning_rate": 4.568811584344581e-05, "loss": 1.0763, "step": 14709 }, { "epoch": 2.733692622189184, "grad_norm": 0.8213867545127869, "learning_rate": 4.567584960478377e-05, "loss": 0.8748, "step": 14710 }, { "epoch": 2.733878461252555, "grad_norm": 0.9049264788627625, "learning_rate": 4.56635845255775e-05, "loss": 1.0071, "step": 14711 }, { "epoch": 2.7340643003159264, "grad_norm": 0.9140104651451111, "learning_rate": 4.565132060608869e-05, "loss": 0.9517, "step": 14712 }, { "epoch": 2.7342501393792977, "grad_norm": 0.8153535723686218, "learning_rate": 4.5639057846579246e-05, "loss": 0.8449, "step": 14713 }, { "epoch": 2.7344359784426686, "grad_norm": 0.9286594390869141, "learning_rate": 4.562679624731084e-05, "loss": 0.8305, "step": 14714 }, { "epoch": 2.7346218175060395, "grad_norm": 0.7901486158370972, "learning_rate": 4.561453580854516e-05, "loss": 0.7905, "step": 14715 }, { "epoch": 2.734807656569411, "grad_norm": 0.7989631295204163, "learning_rate": 4.560227653054383e-05, "loss": 0.7111, "step": 14716 }, { "epoch": 2.734993495632782, "grad_norm": 0.8573057651519775, "learning_rate": 4.559001841356861e-05, "loss": 1.0795, "step": 14717 }, { "epoch": 2.735179334696153, "grad_norm": 0.9220576286315918, "learning_rate": 4.557776145788108e-05, "loss": 1.1402, "step": 14718 }, { "epoch": 2.7353651737595244, "grad_norm": 0.8109697699546814, "learning_rate": 4.556550566374283e-05, "loss": 1.1164, "step": 14719 }, { "epoch": 2.7355510128228953, "grad_norm": 0.8072250485420227, "learning_rate": 4.55532510314154e-05, "loss": 0.8904, "step": 14720 }, { "epoch": 2.7357368518862666, "grad_norm": 0.8466371893882751, "learning_rate": 4.5540997561160434e-05, "loss": 0.9383, "step": 14721 }, { "epoch": 2.7359226909496375, "grad_norm": 0.8313689231872559, "learning_rate": 4.552874525323939e-05, "loss": 0.8661, "step": 14722 }, { "epoch": 2.736108530013009, "grad_norm": 0.724134087562561, "learning_rate": 4.551649410791384e-05, "loss": 0.7092, "step": 14723 }, { "epoch": 2.73629436907638, "grad_norm": 0.7286176681518555, "learning_rate": 4.55042441254452e-05, "loss": 0.848, "step": 14724 }, { "epoch": 2.736480208139751, "grad_norm": 0.8972499966621399, "learning_rate": 4.549199530609497e-05, "loss": 1.1611, "step": 14725 }, { "epoch": 2.736666047203122, "grad_norm": 0.8000126481056213, "learning_rate": 4.547974765012456e-05, "loss": 0.8529, "step": 14726 }, { "epoch": 2.7368518862664932, "grad_norm": 0.7437418699264526, "learning_rate": 4.546750115779538e-05, "loss": 0.873, "step": 14727 }, { "epoch": 2.7370377253298646, "grad_norm": 0.7979970574378967, "learning_rate": 4.545525582936877e-05, "loss": 0.8908, "step": 14728 }, { "epoch": 2.7372235643932354, "grad_norm": 0.9213851690292358, "learning_rate": 4.544301166510616e-05, "loss": 0.9213, "step": 14729 }, { "epoch": 2.7374094034566063, "grad_norm": 0.9173702001571655, "learning_rate": 4.543076866526884e-05, "loss": 0.886, "step": 14730 }, { "epoch": 2.7375952425199777, "grad_norm": 0.7364941835403442, "learning_rate": 4.541852683011812e-05, "loss": 0.9047, "step": 14731 }, { "epoch": 2.737781081583349, "grad_norm": 0.8463900089263916, "learning_rate": 4.540628615991525e-05, "loss": 0.82, "step": 14732 }, { "epoch": 2.73796692064672, "grad_norm": 0.7539997100830078, "learning_rate": 4.539404665492151e-05, "loss": 0.8949, "step": 14733 }, { "epoch": 2.738152759710091, "grad_norm": 0.8457027077674866, "learning_rate": 4.538180831539817e-05, "loss": 1.0184, "step": 14734 }, { "epoch": 2.738338598773462, "grad_norm": 1.0152959823608398, "learning_rate": 4.5369571141606406e-05, "loss": 0.9442, "step": 14735 }, { "epoch": 2.7385244378368334, "grad_norm": 0.7157995104789734, "learning_rate": 4.5357335133807356e-05, "loss": 0.882, "step": 14736 }, { "epoch": 2.7387102769002043, "grad_norm": 0.8495806455612183, "learning_rate": 4.5345100292262254e-05, "loss": 0.8718, "step": 14737 }, { "epoch": 2.7388961159635756, "grad_norm": 0.876046895980835, "learning_rate": 4.533286661723219e-05, "loss": 0.9261, "step": 14738 }, { "epoch": 2.7390819550269465, "grad_norm": 0.7684153914451599, "learning_rate": 4.532063410897829e-05, "loss": 0.9394, "step": 14739 }, { "epoch": 2.739267794090318, "grad_norm": 0.8016115427017212, "learning_rate": 4.5308402767761573e-05, "loss": 0.8162, "step": 14740 }, { "epoch": 2.7394536331536887, "grad_norm": 0.7203810811042786, "learning_rate": 4.5296172593843177e-05, "loss": 0.7493, "step": 14741 }, { "epoch": 2.73963947221706, "grad_norm": 0.8666025996208191, "learning_rate": 4.5283943587484115e-05, "loss": 0.9386, "step": 14742 }, { "epoch": 2.7398253112804314, "grad_norm": 0.8360776901245117, "learning_rate": 4.527171574894534e-05, "loss": 0.812, "step": 14743 }, { "epoch": 2.7400111503438023, "grad_norm": 0.817794919013977, "learning_rate": 4.525948907848787e-05, "loss": 1.0972, "step": 14744 }, { "epoch": 2.740196989407173, "grad_norm": 0.8460525870323181, "learning_rate": 4.524726357637271e-05, "loss": 1.0712, "step": 14745 }, { "epoch": 2.7403828284705445, "grad_norm": 0.943310022354126, "learning_rate": 4.523503924286074e-05, "loss": 0.9896, "step": 14746 }, { "epoch": 2.740568667533916, "grad_norm": 0.7673540115356445, "learning_rate": 4.522281607821288e-05, "loss": 0.8957, "step": 14747 }, { "epoch": 2.7407545065972867, "grad_norm": 0.9330706000328064, "learning_rate": 4.5210594082689986e-05, "loss": 0.9737, "step": 14748 }, { "epoch": 2.740940345660658, "grad_norm": 0.9254998564720154, "learning_rate": 4.519837325655296e-05, "loss": 0.5841, "step": 14749 }, { "epoch": 2.741126184724029, "grad_norm": 0.7357127070426941, "learning_rate": 4.518615360006262e-05, "loss": 0.8598, "step": 14750 }, { "epoch": 2.7413120237874002, "grad_norm": 0.8163831233978271, "learning_rate": 4.517393511347976e-05, "loss": 1.0535, "step": 14751 }, { "epoch": 2.741497862850771, "grad_norm": 0.8442891836166382, "learning_rate": 4.516171779706514e-05, "loss": 0.9516, "step": 14752 }, { "epoch": 2.7416837019141425, "grad_norm": 0.9410925507545471, "learning_rate": 4.514950165107959e-05, "loss": 0.9114, "step": 14753 }, { "epoch": 2.7418695409775133, "grad_norm": 0.6992850303649902, "learning_rate": 4.513728667578375e-05, "loss": 0.9611, "step": 14754 }, { "epoch": 2.7420553800408847, "grad_norm": 0.894448459148407, "learning_rate": 4.512507287143842e-05, "loss": 0.9718, "step": 14755 }, { "epoch": 2.7422412191042556, "grad_norm": 1.4124360084533691, "learning_rate": 4.511286023830421e-05, "loss": 1.4758, "step": 14756 }, { "epoch": 2.742427058167627, "grad_norm": 0.8501431941986084, "learning_rate": 4.5100648776641844e-05, "loss": 0.6057, "step": 14757 }, { "epoch": 2.742612897230998, "grad_norm": 0.7781663537025452, "learning_rate": 4.5088438486711905e-05, "loss": 0.831, "step": 14758 }, { "epoch": 2.742798736294369, "grad_norm": 0.8618481755256653, "learning_rate": 4.5076229368775024e-05, "loss": 0.7601, "step": 14759 }, { "epoch": 2.74298457535774, "grad_norm": 0.7881844639778137, "learning_rate": 4.506402142309174e-05, "loss": 0.9385, "step": 14760 }, { "epoch": 2.7431704144211113, "grad_norm": 0.9128740429878235, "learning_rate": 4.5051814649922674e-05, "loss": 0.9802, "step": 14761 }, { "epoch": 2.7433562534844826, "grad_norm": 0.892898440361023, "learning_rate": 4.5039609049528333e-05, "loss": 0.8683, "step": 14762 }, { "epoch": 2.7435420925478535, "grad_norm": 0.8797706365585327, "learning_rate": 4.502740462216919e-05, "loss": 1.0483, "step": 14763 }, { "epoch": 2.7437279316112244, "grad_norm": 0.8321303725242615, "learning_rate": 4.501520136810575e-05, "loss": 0.9829, "step": 14764 }, { "epoch": 2.7439137706745957, "grad_norm": 1.2347742319107056, "learning_rate": 4.5002999287598524e-05, "loss": 1.357, "step": 14765 }, { "epoch": 2.744099609737967, "grad_norm": 0.7855826020240784, "learning_rate": 4.4990798380907895e-05, "loss": 0.8295, "step": 14766 }, { "epoch": 2.744285448801338, "grad_norm": 0.7561647891998291, "learning_rate": 4.497859864829427e-05, "loss": 0.879, "step": 14767 }, { "epoch": 2.7444712878647093, "grad_norm": 0.9717286229133606, "learning_rate": 4.4966400090018e-05, "loss": 0.7701, "step": 14768 }, { "epoch": 2.74465712692808, "grad_norm": 0.8584936857223511, "learning_rate": 4.495420270633953e-05, "loss": 0.7957, "step": 14769 }, { "epoch": 2.7448429659914515, "grad_norm": 0.7288717031478882, "learning_rate": 4.494200649751913e-05, "loss": 0.8592, "step": 14770 }, { "epoch": 2.7450288050548224, "grad_norm": 1.1725331544876099, "learning_rate": 4.492981146381712e-05, "loss": 0.9022, "step": 14771 }, { "epoch": 2.7452146441181937, "grad_norm": 0.8128632307052612, "learning_rate": 4.491761760549374e-05, "loss": 0.7259, "step": 14772 }, { "epoch": 2.7454004831815646, "grad_norm": 0.9310014843940735, "learning_rate": 4.4905424922809326e-05, "loss": 0.8722, "step": 14773 }, { "epoch": 2.745586322244936, "grad_norm": 0.790547788143158, "learning_rate": 4.489323341602407e-05, "loss": 1.0479, "step": 14774 }, { "epoch": 2.745772161308307, "grad_norm": 0.8988275527954102, "learning_rate": 4.488104308539814e-05, "loss": 0.9611, "step": 14775 }, { "epoch": 2.745958000371678, "grad_norm": 0.7555850744247437, "learning_rate": 4.486885393119177e-05, "loss": 0.9676, "step": 14776 }, { "epoch": 2.7461438394350495, "grad_norm": 0.7913243770599365, "learning_rate": 4.485666595366512e-05, "loss": 0.8562, "step": 14777 }, { "epoch": 2.7463296784984204, "grad_norm": 0.8432217836380005, "learning_rate": 4.4844479153078325e-05, "loss": 1.0425, "step": 14778 }, { "epoch": 2.7465155175617912, "grad_norm": 0.7686410546302795, "learning_rate": 4.483229352969146e-05, "loss": 0.9279, "step": 14779 }, { "epoch": 2.7467013566251626, "grad_norm": 0.8736889362335205, "learning_rate": 4.482010908376459e-05, "loss": 1.0946, "step": 14780 }, { "epoch": 2.746887195688534, "grad_norm": 0.8477604389190674, "learning_rate": 4.480792581555783e-05, "loss": 0.9413, "step": 14781 }, { "epoch": 2.747073034751905, "grad_norm": 0.881502628326416, "learning_rate": 4.4795743725331185e-05, "loss": 0.757, "step": 14782 }, { "epoch": 2.747258873815276, "grad_norm": 0.7617281675338745, "learning_rate": 4.4783562813344616e-05, "loss": 0.7723, "step": 14783 }, { "epoch": 2.747444712878647, "grad_norm": 0.7542743682861328, "learning_rate": 4.4771383079858175e-05, "loss": 0.8692, "step": 14784 }, { "epoch": 2.7476305519420183, "grad_norm": 0.8307807445526123, "learning_rate": 4.475920452513176e-05, "loss": 0.7277, "step": 14785 }, { "epoch": 2.747816391005389, "grad_norm": 0.954990804195404, "learning_rate": 4.474702714942536e-05, "loss": 0.968, "step": 14786 }, { "epoch": 2.7480022300687605, "grad_norm": 0.774532675743103, "learning_rate": 4.473485095299884e-05, "loss": 0.8424, "step": 14787 }, { "epoch": 2.7481880691321314, "grad_norm": 0.8852267265319824, "learning_rate": 4.472267593611206e-05, "loss": 1.0926, "step": 14788 }, { "epoch": 2.7483739081955028, "grad_norm": 0.7718761563301086, "learning_rate": 4.471050209902493e-05, "loss": 1.0268, "step": 14789 }, { "epoch": 2.7485597472588736, "grad_norm": 0.897894561290741, "learning_rate": 4.469832944199727e-05, "loss": 0.8493, "step": 14790 }, { "epoch": 2.748745586322245, "grad_norm": 0.7675634622573853, "learning_rate": 4.4686157965288845e-05, "loss": 0.9872, "step": 14791 }, { "epoch": 2.7489314253856163, "grad_norm": 0.7812278866767883, "learning_rate": 4.4673987669159434e-05, "loss": 0.9965, "step": 14792 }, { "epoch": 2.749117264448987, "grad_norm": 0.9022442698478699, "learning_rate": 4.4661818553868836e-05, "loss": 0.7828, "step": 14793 }, { "epoch": 2.749303103512358, "grad_norm": 0.8825806379318237, "learning_rate": 4.464965061967677e-05, "loss": 0.859, "step": 14794 }, { "epoch": 2.7494889425757294, "grad_norm": 1.4941855669021606, "learning_rate": 4.463748386684289e-05, "loss": 1.2713, "step": 14795 }, { "epoch": 2.7496747816391007, "grad_norm": 0.8661191463470459, "learning_rate": 4.46253182956269e-05, "loss": 0.9672, "step": 14796 }, { "epoch": 2.7498606207024716, "grad_norm": 0.7032533288002014, "learning_rate": 4.4613153906288506e-05, "loss": 0.866, "step": 14797 }, { "epoch": 2.750046459765843, "grad_norm": 0.8711912631988525, "learning_rate": 4.4600990699087294e-05, "loss": 0.8178, "step": 14798 }, { "epoch": 2.750232298829214, "grad_norm": 0.6879599094390869, "learning_rate": 4.458882867428288e-05, "loss": 0.784, "step": 14799 }, { "epoch": 2.750418137892585, "grad_norm": 0.8086994290351868, "learning_rate": 4.4576667832134775e-05, "loss": 0.811, "step": 14800 }, { "epoch": 2.750603976955956, "grad_norm": 0.7266994714736938, "learning_rate": 4.456450817290263e-05, "loss": 0.8126, "step": 14801 }, { "epoch": 2.7507898160193274, "grad_norm": 1.0289912223815918, "learning_rate": 4.455234969684592e-05, "loss": 0.8967, "step": 14802 }, { "epoch": 2.7509756550826983, "grad_norm": 1.1989860534667969, "learning_rate": 4.454019240422412e-05, "loss": 1.0713, "step": 14803 }, { "epoch": 2.7511614941460696, "grad_norm": 0.8081581592559814, "learning_rate": 4.452803629529678e-05, "loss": 0.7533, "step": 14804 }, { "epoch": 2.7513473332094405, "grad_norm": 0.8733363151550293, "learning_rate": 4.4515881370323306e-05, "loss": 0.914, "step": 14805 }, { "epoch": 2.751533172272812, "grad_norm": 0.8047726154327393, "learning_rate": 4.45037276295631e-05, "loss": 1.0163, "step": 14806 }, { "epoch": 2.751533172272812, "eval_loss": 1.016861081123352, "eval_runtime": 23.1696, "eval_samples_per_second": 47.131, "eval_steps_per_second": 23.565, "step": 14806 }, { "epoch": 2.751719011336183, "grad_norm": 0.8058808445930481, "learning_rate": 4.4491575073275625e-05, "loss": 0.9496, "step": 14807 }, { "epoch": 2.751904850399554, "grad_norm": 0.9865077137947083, "learning_rate": 4.447942370172019e-05, "loss": 0.73, "step": 14808 }, { "epoch": 2.752090689462925, "grad_norm": 1.0397051572799683, "learning_rate": 4.4467273515156216e-05, "loss": 0.9428, "step": 14809 }, { "epoch": 2.7522765285262962, "grad_norm": 1.0134010314941406, "learning_rate": 4.445512451384298e-05, "loss": 0.9326, "step": 14810 }, { "epoch": 2.7524623675896676, "grad_norm": 0.6431830525398254, "learning_rate": 4.444297669803981e-05, "loss": 0.6813, "step": 14811 }, { "epoch": 2.7526482066530384, "grad_norm": 0.7811623215675354, "learning_rate": 4.4430830068005914e-05, "loss": 0.8558, "step": 14812 }, { "epoch": 2.7528340457164093, "grad_norm": 0.8130550980567932, "learning_rate": 4.4418684624000626e-05, "loss": 0.8384, "step": 14813 }, { "epoch": 2.7530198847797807, "grad_norm": 0.9185358881950378, "learning_rate": 4.4406540366283134e-05, "loss": 1.1363, "step": 14814 }, { "epoch": 2.753205723843152, "grad_norm": 0.9589024186134338, "learning_rate": 4.43943972951126e-05, "loss": 0.799, "step": 14815 }, { "epoch": 2.753391562906523, "grad_norm": 0.8474277853965759, "learning_rate": 4.438225541074827e-05, "loss": 0.8946, "step": 14816 }, { "epoch": 2.753577401969894, "grad_norm": 1.0162084102630615, "learning_rate": 4.437011471344922e-05, "loss": 0.8196, "step": 14817 }, { "epoch": 2.753763241033265, "grad_norm": 0.7180842161178589, "learning_rate": 4.4357975203474644e-05, "loss": 0.9129, "step": 14818 }, { "epoch": 2.7539490800966364, "grad_norm": 0.729688286781311, "learning_rate": 4.43458368810836e-05, "loss": 0.903, "step": 14819 }, { "epoch": 2.7541349191600073, "grad_norm": 0.9457212090492249, "learning_rate": 4.433369974653512e-05, "loss": 1.0448, "step": 14820 }, { "epoch": 2.7543207582233786, "grad_norm": 0.7897746562957764, "learning_rate": 4.432156380008834e-05, "loss": 0.8628, "step": 14821 }, { "epoch": 2.7545065972867495, "grad_norm": 0.8315415978431702, "learning_rate": 4.430942904200223e-05, "loss": 1.0471, "step": 14822 }, { "epoch": 2.754692436350121, "grad_norm": 0.8469882011413574, "learning_rate": 4.429729547253574e-05, "loss": 0.8774, "step": 14823 }, { "epoch": 2.7548782754134917, "grad_norm": 0.785725474357605, "learning_rate": 4.428516309194793e-05, "loss": 1.0214, "step": 14824 }, { "epoch": 2.755064114476863, "grad_norm": 0.8006011843681335, "learning_rate": 4.427303190049771e-05, "loss": 1.0044, "step": 14825 }, { "epoch": 2.7552499535402344, "grad_norm": 0.6976585388183594, "learning_rate": 4.426090189844398e-05, "loss": 0.8216, "step": 14826 }, { "epoch": 2.7554357926036053, "grad_norm": 1.048567533493042, "learning_rate": 4.424877308604563e-05, "loss": 1.1218, "step": 14827 }, { "epoch": 2.755621631666976, "grad_norm": 0.8750372529029846, "learning_rate": 4.423664546356152e-05, "loss": 0.9318, "step": 14828 }, { "epoch": 2.7558074707303475, "grad_norm": 0.7322916984558105, "learning_rate": 4.422451903125058e-05, "loss": 0.7277, "step": 14829 }, { "epoch": 2.755993309793719, "grad_norm": 0.9944577813148499, "learning_rate": 4.421239378937156e-05, "loss": 1.1098, "step": 14830 }, { "epoch": 2.7561791488570897, "grad_norm": 0.8471137881278992, "learning_rate": 4.420026973818321e-05, "loss": 1.0242, "step": 14831 }, { "epoch": 2.756364987920461, "grad_norm": 0.8469953536987305, "learning_rate": 4.418814687794439e-05, "loss": 0.9368, "step": 14832 }, { "epoch": 2.756550826983832, "grad_norm": 0.7500290274620056, "learning_rate": 4.41760252089138e-05, "loss": 0.5646, "step": 14833 }, { "epoch": 2.7567366660472032, "grad_norm": 0.9665776491165161, "learning_rate": 4.4163904731350144e-05, "loss": 0.9747, "step": 14834 }, { "epoch": 2.756922505110574, "grad_norm": 0.6673862934112549, "learning_rate": 4.415178544551211e-05, "loss": 0.6564, "step": 14835 }, { "epoch": 2.7571083441739455, "grad_norm": 0.7555088400840759, "learning_rate": 4.413966735165839e-05, "loss": 1.0257, "step": 14836 }, { "epoch": 2.7572941832373163, "grad_norm": 0.8664906024932861, "learning_rate": 4.4127550450047616e-05, "loss": 0.8143, "step": 14837 }, { "epoch": 2.7574800223006877, "grad_norm": 0.9580231308937073, "learning_rate": 4.4115434740938365e-05, "loss": 1.1836, "step": 14838 }, { "epoch": 2.7576658613640586, "grad_norm": 1.0043007135391235, "learning_rate": 4.41033202245893e-05, "loss": 0.8902, "step": 14839 }, { "epoch": 2.75785170042743, "grad_norm": 0.8290864825248718, "learning_rate": 4.40912069012589e-05, "loss": 0.9894, "step": 14840 }, { "epoch": 2.758037539490801, "grad_norm": 1.0630974769592285, "learning_rate": 4.407909477120578e-05, "loss": 0.7752, "step": 14841 }, { "epoch": 2.758223378554172, "grad_norm": 0.760078489780426, "learning_rate": 4.4066983834688415e-05, "loss": 0.8939, "step": 14842 }, { "epoch": 2.758409217617543, "grad_norm": 1.1572614908218384, "learning_rate": 4.405487409196526e-05, "loss": 1.0579, "step": 14843 }, { "epoch": 2.7585950566809143, "grad_norm": 0.8342961668968201, "learning_rate": 4.404276554329485e-05, "loss": 0.9826, "step": 14844 }, { "epoch": 2.7587808957442856, "grad_norm": 0.9802063703536987, "learning_rate": 4.403065818893558e-05, "loss": 1.1177, "step": 14845 }, { "epoch": 2.7589667348076565, "grad_norm": 0.8134041428565979, "learning_rate": 4.401855202914586e-05, "loss": 0.9372, "step": 14846 }, { "epoch": 2.7591525738710274, "grad_norm": 0.7945948243141174, "learning_rate": 4.400644706418405e-05, "loss": 0.9862, "step": 14847 }, { "epoch": 2.7593384129343987, "grad_norm": 0.7153193354606628, "learning_rate": 4.399434329430854e-05, "loss": 0.7187, "step": 14848 }, { "epoch": 2.75952425199777, "grad_norm": 0.9038686752319336, "learning_rate": 4.39822407197777e-05, "loss": 0.7832, "step": 14849 }, { "epoch": 2.759710091061141, "grad_norm": 0.7421132326126099, "learning_rate": 4.397013934084979e-05, "loss": 0.8534, "step": 14850 }, { "epoch": 2.7598959301245123, "grad_norm": 0.8442568778991699, "learning_rate": 4.395803915778308e-05, "loss": 0.6722, "step": 14851 }, { "epoch": 2.760081769187883, "grad_norm": 0.9152247309684753, "learning_rate": 4.3945940170835885e-05, "loss": 0.9758, "step": 14852 }, { "epoch": 2.7602676082512545, "grad_norm": 0.8400849103927612, "learning_rate": 4.393384238026641e-05, "loss": 0.9794, "step": 14853 }, { "epoch": 2.7604534473146254, "grad_norm": 0.7551645636558533, "learning_rate": 4.3921745786332845e-05, "loss": 0.9922, "step": 14854 }, { "epoch": 2.7606392863779967, "grad_norm": 0.8360217213630676, "learning_rate": 4.3909650389293355e-05, "loss": 1.1151, "step": 14855 }, { "epoch": 2.760825125441368, "grad_norm": 0.9101220965385437, "learning_rate": 4.389755618940615e-05, "loss": 1.1536, "step": 14856 }, { "epoch": 2.761010964504739, "grad_norm": 0.7217719554901123, "learning_rate": 4.388546318692934e-05, "loss": 0.6163, "step": 14857 }, { "epoch": 2.76119680356811, "grad_norm": 0.9773128032684326, "learning_rate": 4.387337138212102e-05, "loss": 1.1546, "step": 14858 }, { "epoch": 2.761382642631481, "grad_norm": 0.7525717616081238, "learning_rate": 4.386128077523923e-05, "loss": 0.7095, "step": 14859 }, { "epoch": 2.7615684816948525, "grad_norm": 0.8013903498649597, "learning_rate": 4.384919136654207e-05, "loss": 1.0295, "step": 14860 }, { "epoch": 2.7617543207582234, "grad_norm": 1.510123372077942, "learning_rate": 4.3837103156287596e-05, "loss": 1.1998, "step": 14861 }, { "epoch": 2.7619401598215942, "grad_norm": 0.8579853773117065, "learning_rate": 4.382501614473378e-05, "loss": 1.1587, "step": 14862 }, { "epoch": 2.7621259988849656, "grad_norm": 0.8998013138771057, "learning_rate": 4.3812930332138557e-05, "loss": 0.9156, "step": 14863 }, { "epoch": 2.762311837948337, "grad_norm": 1.0269873142242432, "learning_rate": 4.3800845718759944e-05, "loss": 1.0224, "step": 14864 }, { "epoch": 2.762497677011708, "grad_norm": 0.7870360016822815, "learning_rate": 4.3788762304855844e-05, "loss": 1.1499, "step": 14865 }, { "epoch": 2.762683516075079, "grad_norm": 0.8245033025741577, "learning_rate": 4.377668009068415e-05, "loss": 0.9342, "step": 14866 }, { "epoch": 2.76286935513845, "grad_norm": 0.6947269439697266, "learning_rate": 4.3764599076502696e-05, "loss": 0.8029, "step": 14867 }, { "epoch": 2.7630551942018213, "grad_norm": 0.75066077709198, "learning_rate": 4.37525192625694e-05, "loss": 0.8048, "step": 14868 }, { "epoch": 2.763241033265192, "grad_norm": 0.7691546678543091, "learning_rate": 4.374044064914203e-05, "loss": 0.93, "step": 14869 }, { "epoch": 2.7634268723285635, "grad_norm": 0.8516491055488586, "learning_rate": 4.372836323647845e-05, "loss": 0.8159, "step": 14870 }, { "epoch": 2.7636127113919344, "grad_norm": 0.6997004151344299, "learning_rate": 4.3716287024836346e-05, "loss": 0.7847, "step": 14871 }, { "epoch": 2.7637985504553058, "grad_norm": 0.7898983359336853, "learning_rate": 4.370421201447356e-05, "loss": 0.5884, "step": 14872 }, { "epoch": 2.7639843895186766, "grad_norm": 0.8238096833229065, "learning_rate": 4.369213820564774e-05, "loss": 1.0009, "step": 14873 }, { "epoch": 2.764170228582048, "grad_norm": 0.9211301803588867, "learning_rate": 4.36800655986166e-05, "loss": 0.9284, "step": 14874 }, { "epoch": 2.7643560676454193, "grad_norm": 0.7392138242721558, "learning_rate": 4.3667994193637796e-05, "loss": 0.6207, "step": 14875 }, { "epoch": 2.76454190670879, "grad_norm": 0.7954480051994324, "learning_rate": 4.3655923990969005e-05, "loss": 0.9053, "step": 14876 }, { "epoch": 2.764727745772161, "grad_norm": 0.7361290454864502, "learning_rate": 4.364385499086784e-05, "loss": 0.9016, "step": 14877 }, { "epoch": 2.7649135848355324, "grad_norm": 0.9881463646888733, "learning_rate": 4.363178719359187e-05, "loss": 1.0199, "step": 14878 }, { "epoch": 2.7650994238989037, "grad_norm": 0.7867943048477173, "learning_rate": 4.3619720599398637e-05, "loss": 0.756, "step": 14879 }, { "epoch": 2.7652852629622746, "grad_norm": 1.01762855052948, "learning_rate": 4.3607655208545715e-05, "loss": 0.8002, "step": 14880 }, { "epoch": 2.765471102025646, "grad_norm": 0.8283891081809998, "learning_rate": 4.359559102129065e-05, "loss": 0.8995, "step": 14881 }, { "epoch": 2.765656941089017, "grad_norm": 0.9196315407752991, "learning_rate": 4.358352803789091e-05, "loss": 0.9448, "step": 14882 }, { "epoch": 2.765842780152388, "grad_norm": 0.6811094284057617, "learning_rate": 4.357146625860391e-05, "loss": 0.8073, "step": 14883 }, { "epoch": 2.766028619215759, "grad_norm": 0.7988422513008118, "learning_rate": 4.355940568368716e-05, "loss": 0.9822, "step": 14884 }, { "epoch": 2.7662144582791304, "grad_norm": 0.9253214001655579, "learning_rate": 4.354734631339804e-05, "loss": 0.8889, "step": 14885 }, { "epoch": 2.7664002973425013, "grad_norm": 1.163099765777588, "learning_rate": 4.3535288147993946e-05, "loss": 1.0403, "step": 14886 }, { "epoch": 2.7665861364058726, "grad_norm": 0.729446530342102, "learning_rate": 4.352323118773218e-05, "loss": 0.9152, "step": 14887 }, { "epoch": 2.7667719754692435, "grad_norm": 1.0046440362930298, "learning_rate": 4.351117543287018e-05, "loss": 0.7264, "step": 14888 }, { "epoch": 2.766957814532615, "grad_norm": 0.8194682598114014, "learning_rate": 4.349912088366518e-05, "loss": 0.9762, "step": 14889 }, { "epoch": 2.767143653595986, "grad_norm": 0.8480716347694397, "learning_rate": 4.348706754037446e-05, "loss": 0.7538, "step": 14890 }, { "epoch": 2.767329492659357, "grad_norm": 0.7657939195632935, "learning_rate": 4.3475015403255304e-05, "loss": 0.7417, "step": 14891 }, { "epoch": 2.767515331722728, "grad_norm": 0.7754166126251221, "learning_rate": 4.346296447256498e-05, "loss": 0.7323, "step": 14892 }, { "epoch": 2.7677011707860992, "grad_norm": 0.8909863233566284, "learning_rate": 4.345091474856066e-05, "loss": 1.0459, "step": 14893 }, { "epoch": 2.7678870098494706, "grad_norm": 0.9239513874053955, "learning_rate": 4.343886623149952e-05, "loss": 1.0778, "step": 14894 }, { "epoch": 2.7680728489128414, "grad_norm": 0.7537717819213867, "learning_rate": 4.342681892163868e-05, "loss": 0.8128, "step": 14895 }, { "epoch": 2.7682586879762123, "grad_norm": 0.8553297519683838, "learning_rate": 4.341477281923534e-05, "loss": 0.8233, "step": 14896 }, { "epoch": 2.7684445270395837, "grad_norm": 0.9643924832344055, "learning_rate": 4.340272792454657e-05, "loss": 1.0147, "step": 14897 }, { "epoch": 2.768630366102955, "grad_norm": 0.9254254698753357, "learning_rate": 4.339068423782945e-05, "loss": 0.9021, "step": 14898 }, { "epoch": 2.768816205166326, "grad_norm": 0.7231565117835999, "learning_rate": 4.3378641759341e-05, "loss": 0.7963, "step": 14899 }, { "epoch": 2.769002044229697, "grad_norm": 0.8661108016967773, "learning_rate": 4.3366600489338305e-05, "loss": 1.2158, "step": 14900 }, { "epoch": 2.769187883293068, "grad_norm": 0.7713779211044312, "learning_rate": 4.335456042807829e-05, "loss": 0.7526, "step": 14901 }, { "epoch": 2.7693737223564394, "grad_norm": 0.8019841313362122, "learning_rate": 4.334252157581803e-05, "loss": 0.8314, "step": 14902 }, { "epoch": 2.7695595614198103, "grad_norm": 0.8363486528396606, "learning_rate": 4.333048393281437e-05, "loss": 0.9437, "step": 14903 }, { "epoch": 2.7697454004831816, "grad_norm": 0.7701266407966614, "learning_rate": 4.331844749932432e-05, "loss": 0.9542, "step": 14904 }, { "epoch": 2.769931239546553, "grad_norm": 0.7874062657356262, "learning_rate": 4.330641227560474e-05, "loss": 0.6885, "step": 14905 }, { "epoch": 2.770117078609924, "grad_norm": 0.9805448651313782, "learning_rate": 4.3294378261912504e-05, "loss": 1.0802, "step": 14906 }, { "epoch": 2.7703029176732947, "grad_norm": 0.6709483861923218, "learning_rate": 4.328234545850442e-05, "loss": 0.9273, "step": 14907 }, { "epoch": 2.770488756736666, "grad_norm": 0.8253301382064819, "learning_rate": 4.327031386563737e-05, "loss": 1.024, "step": 14908 }, { "epoch": 2.7706745958000374, "grad_norm": 0.907292902469635, "learning_rate": 4.325828348356812e-05, "loss": 1.0564, "step": 14909 }, { "epoch": 2.7708604348634083, "grad_norm": 0.7271608114242554, "learning_rate": 4.324625431255342e-05, "loss": 0.7091, "step": 14910 }, { "epoch": 2.771046273926779, "grad_norm": 0.9254564642906189, "learning_rate": 4.323422635284998e-05, "loss": 0.8868, "step": 14911 }, { "epoch": 2.7712321129901505, "grad_norm": 0.8518773913383484, "learning_rate": 4.322219960471463e-05, "loss": 0.7656, "step": 14912 }, { "epoch": 2.771417952053522, "grad_norm": 0.7480016946792603, "learning_rate": 4.3210174068404e-05, "loss": 0.6123, "step": 14913 }, { "epoch": 2.7716037911168927, "grad_norm": 0.8283432126045227, "learning_rate": 4.319814974417473e-05, "loss": 0.9516, "step": 14914 }, { "epoch": 2.771789630180264, "grad_norm": 0.8425053954124451, "learning_rate": 4.3186126632283465e-05, "loss": 0.9492, "step": 14915 }, { "epoch": 2.771975469243635, "grad_norm": 0.7290083169937134, "learning_rate": 4.3174104732986854e-05, "loss": 0.6701, "step": 14916 }, { "epoch": 2.7721613083070062, "grad_norm": 0.8720309734344482, "learning_rate": 4.316208404654146e-05, "loss": 0.9944, "step": 14917 }, { "epoch": 2.772347147370377, "grad_norm": 0.8075978755950928, "learning_rate": 4.315006457320384e-05, "loss": 0.7447, "step": 14918 }, { "epoch": 2.7725329864337485, "grad_norm": 0.797562301158905, "learning_rate": 4.3138046313230506e-05, "loss": 0.9379, "step": 14919 }, { "epoch": 2.7727188254971193, "grad_norm": 1.662497878074646, "learning_rate": 4.312602926687803e-05, "loss": 1.0968, "step": 14920 }, { "epoch": 2.7729046645604907, "grad_norm": 0.7986350655555725, "learning_rate": 4.3114013434402856e-05, "loss": 0.7646, "step": 14921 }, { "epoch": 2.7730905036238616, "grad_norm": 0.7795454859733582, "learning_rate": 4.310199881606141e-05, "loss": 0.8714, "step": 14922 }, { "epoch": 2.773276342687233, "grad_norm": 0.8627483248710632, "learning_rate": 4.308998541211015e-05, "loss": 0.9569, "step": 14923 }, { "epoch": 2.773462181750604, "grad_norm": 1.0427031517028809, "learning_rate": 4.307797322280555e-05, "loss": 1.386, "step": 14924 }, { "epoch": 2.773648020813975, "grad_norm": 0.7577645778656006, "learning_rate": 4.306596224840391e-05, "loss": 0.8991, "step": 14925 }, { "epoch": 2.773833859877346, "grad_norm": 0.8506353497505188, "learning_rate": 4.305395248916161e-05, "loss": 0.7597, "step": 14926 }, { "epoch": 2.7740196989407173, "grad_norm": 1.018517017364502, "learning_rate": 4.3041943945334925e-05, "loss": 0.8695, "step": 14927 }, { "epoch": 2.7742055380040886, "grad_norm": 0.7532228231430054, "learning_rate": 4.3029936617180255e-05, "loss": 0.6172, "step": 14928 }, { "epoch": 2.7743913770674595, "grad_norm": 0.7622318267822266, "learning_rate": 4.301793050495381e-05, "loss": 0.8329, "step": 14929 }, { "epoch": 2.774577216130831, "grad_norm": 0.7729710340499878, "learning_rate": 4.3005925608911856e-05, "loss": 0.9368, "step": 14930 }, { "epoch": 2.7747630551942017, "grad_norm": 1.1952792406082153, "learning_rate": 4.2993921929310576e-05, "loss": 1.0395, "step": 14931 }, { "epoch": 2.774948894257573, "grad_norm": 0.8073039650917053, "learning_rate": 4.298191946640622e-05, "loss": 0.8195, "step": 14932 }, { "epoch": 2.775134733320944, "grad_norm": 0.9328490495681763, "learning_rate": 4.296991822045496e-05, "loss": 0.8152, "step": 14933 }, { "epoch": 2.7753205723843153, "grad_norm": 3.2860829830169678, "learning_rate": 4.2957918191712945e-05, "loss": 1.2947, "step": 14934 }, { "epoch": 2.775506411447686, "grad_norm": 0.9785019159317017, "learning_rate": 4.294591938043624e-05, "loss": 0.635, "step": 14935 }, { "epoch": 2.7756922505110575, "grad_norm": 0.9144778251647949, "learning_rate": 4.293392178688101e-05, "loss": 0.8483, "step": 14936 }, { "epoch": 2.7758780895744284, "grad_norm": 0.9231557846069336, "learning_rate": 4.292192541130329e-05, "loss": 1.1413, "step": 14937 }, { "epoch": 2.7760639286377997, "grad_norm": 0.7811604142189026, "learning_rate": 4.290993025395912e-05, "loss": 0.8274, "step": 14938 }, { "epoch": 2.776249767701171, "grad_norm": 0.9679073691368103, "learning_rate": 4.289793631510448e-05, "loss": 1.0898, "step": 14939 }, { "epoch": 2.776435606764542, "grad_norm": 0.7433283925056458, "learning_rate": 4.2885943594995434e-05, "loss": 0.7807, "step": 14940 }, { "epoch": 2.776621445827913, "grad_norm": 0.696416437625885, "learning_rate": 4.287395209388793e-05, "loss": 0.6271, "step": 14941 }, { "epoch": 2.776807284891284, "grad_norm": 0.7764953374862671, "learning_rate": 4.2861961812037866e-05, "loss": 0.8496, "step": 14942 }, { "epoch": 2.7769931239546555, "grad_norm": 0.7799713611602783, "learning_rate": 4.2849972749701104e-05, "loss": 0.8081, "step": 14943 }, { "epoch": 2.7771789630180264, "grad_norm": 0.7540068626403809, "learning_rate": 4.283798490713368e-05, "loss": 0.8996, "step": 14944 }, { "epoch": 2.7773648020813972, "grad_norm": 0.7690991163253784, "learning_rate": 4.282599828459136e-05, "loss": 0.9814, "step": 14945 }, { "epoch": 2.7775506411447686, "grad_norm": 0.7605803608894348, "learning_rate": 4.281401288233001e-05, "loss": 0.7908, "step": 14946 }, { "epoch": 2.77773648020814, "grad_norm": 0.7501049637794495, "learning_rate": 4.280202870060538e-05, "loss": 0.865, "step": 14947 }, { "epoch": 2.777922319271511, "grad_norm": 1.371169090270996, "learning_rate": 4.279004573967332e-05, "loss": 1.3352, "step": 14948 }, { "epoch": 2.778108158334882, "grad_norm": 0.9031583070755005, "learning_rate": 4.277806399978956e-05, "loss": 1.0714, "step": 14949 }, { "epoch": 2.778293997398253, "grad_norm": 0.8056634664535522, "learning_rate": 4.276608348120983e-05, "loss": 1.1175, "step": 14950 }, { "epoch": 2.7784798364616243, "grad_norm": 0.7620245814323425, "learning_rate": 4.275410418418979e-05, "loss": 0.9606, "step": 14951 }, { "epoch": 2.778665675524995, "grad_norm": 1.1092839241027832, "learning_rate": 4.2742126108985205e-05, "loss": 1.157, "step": 14952 }, { "epoch": 2.7788515145883665, "grad_norm": 0.829846203327179, "learning_rate": 4.273014925585163e-05, "loss": 0.7582, "step": 14953 }, { "epoch": 2.7790373536517374, "grad_norm": 0.9030061960220337, "learning_rate": 4.271817362504479e-05, "loss": 0.9176, "step": 14954 }, { "epoch": 2.7792231927151088, "grad_norm": 0.824874997138977, "learning_rate": 4.2706199216820185e-05, "loss": 1.064, "step": 14955 }, { "epoch": 2.7794090317784796, "grad_norm": 0.6992202401161194, "learning_rate": 4.269422603143348e-05, "loss": 0.6227, "step": 14956 }, { "epoch": 2.779594870841851, "grad_norm": 0.887396514415741, "learning_rate": 4.268225406914018e-05, "loss": 1.094, "step": 14957 }, { "epoch": 2.7797807099052223, "grad_norm": 0.7713488340377808, "learning_rate": 4.26702833301958e-05, "loss": 0.9086, "step": 14958 }, { "epoch": 2.779966548968593, "grad_norm": 0.9397330284118652, "learning_rate": 4.265831381485581e-05, "loss": 0.9952, "step": 14959 }, { "epoch": 2.780152388031964, "grad_norm": 0.8007112145423889, "learning_rate": 4.2646345523375744e-05, "loss": 0.9232, "step": 14960 }, { "epoch": 2.7803382270953354, "grad_norm": 0.8054376244544983, "learning_rate": 4.2634378456011015e-05, "loss": 0.9057, "step": 14961 }, { "epoch": 2.7805240661587067, "grad_norm": 0.9074475765228271, "learning_rate": 4.2622412613016995e-05, "loss": 1.0884, "step": 14962 }, { "epoch": 2.7807099052220776, "grad_norm": 0.7887566089630127, "learning_rate": 4.261044799464915e-05, "loss": 0.8381, "step": 14963 }, { "epoch": 2.780895744285449, "grad_norm": 0.842220664024353, "learning_rate": 4.259848460116277e-05, "loss": 1.0426, "step": 14964 }, { "epoch": 2.78108158334882, "grad_norm": 0.7846879363059998, "learning_rate": 4.2586522432813266e-05, "loss": 0.7901, "step": 14965 }, { "epoch": 2.781267422412191, "grad_norm": 0.9481580853462219, "learning_rate": 4.257456148985592e-05, "loss": 1.0801, "step": 14966 }, { "epoch": 2.781453261475562, "grad_norm": 0.9254017472267151, "learning_rate": 4.2562601772545975e-05, "loss": 1.0059, "step": 14967 }, { "epoch": 2.7816391005389334, "grad_norm": 0.984853982925415, "learning_rate": 4.2550643281138756e-05, "loss": 1.0964, "step": 14968 }, { "epoch": 2.7818249396023043, "grad_norm": 0.8644289970397949, "learning_rate": 4.2538686015889486e-05, "loss": 0.9188, "step": 14969 }, { "epoch": 2.7820107786656756, "grad_norm": 0.8592866063117981, "learning_rate": 4.2526729977053345e-05, "loss": 0.9523, "step": 14970 }, { "epoch": 2.7821966177290465, "grad_norm": 0.7572512030601501, "learning_rate": 4.2514775164885487e-05, "loss": 1.0383, "step": 14971 }, { "epoch": 2.782382456792418, "grad_norm": 0.8783193826675415, "learning_rate": 4.250282157964115e-05, "loss": 0.8557, "step": 14972 }, { "epoch": 2.782568295855789, "grad_norm": 1.02633535861969, "learning_rate": 4.2490869221575415e-05, "loss": 1.2725, "step": 14973 }, { "epoch": 2.78275413491916, "grad_norm": 0.7333510518074036, "learning_rate": 4.2478918090943346e-05, "loss": 1.0315, "step": 14974 }, { "epoch": 2.782939973982531, "grad_norm": 1.0128086805343628, "learning_rate": 4.246696818800006e-05, "loss": 0.8859, "step": 14975 }, { "epoch": 2.7831258130459022, "grad_norm": 0.8416959047317505, "learning_rate": 4.245501951300064e-05, "loss": 0.7682, "step": 14976 }, { "epoch": 2.7833116521092736, "grad_norm": 0.6340311169624329, "learning_rate": 4.244307206620009e-05, "loss": 0.7206, "step": 14977 }, { "epoch": 2.7834974911726444, "grad_norm": 0.8692113757133484, "learning_rate": 4.243112584785338e-05, "loss": 1.1894, "step": 14978 }, { "epoch": 2.7836833302360158, "grad_norm": 0.8255258202552795, "learning_rate": 4.241918085821547e-05, "loss": 0.8858, "step": 14979 }, { "epoch": 2.7838691692993867, "grad_norm": 0.9497602581977844, "learning_rate": 4.2407237097541364e-05, "loss": 1.0581, "step": 14980 }, { "epoch": 2.784055008362758, "grad_norm": 0.6905065178871155, "learning_rate": 4.239529456608595e-05, "loss": 0.8081, "step": 14981 }, { "epoch": 2.784240847426129, "grad_norm": 0.7528558373451233, "learning_rate": 4.238335326410408e-05, "loss": 1.0204, "step": 14982 }, { "epoch": 2.7844266864895, "grad_norm": 0.7711358666419983, "learning_rate": 4.237141319185071e-05, "loss": 0.869, "step": 14983 }, { "epoch": 2.784612525552871, "grad_norm": 0.9386207461357117, "learning_rate": 4.2359474349580616e-05, "loss": 0.9466, "step": 14984 }, { "epoch": 2.7847983646162424, "grad_norm": 0.8581230044364929, "learning_rate": 4.23475367375486e-05, "loss": 0.8827, "step": 14985 }, { "epoch": 2.7849842036796133, "grad_norm": 0.920850396156311, "learning_rate": 4.233560035600951e-05, "loss": 1.0693, "step": 14986 }, { "epoch": 2.7851700427429846, "grad_norm": 0.7897260785102844, "learning_rate": 4.232366520521803e-05, "loss": 0.8717, "step": 14987 }, { "epoch": 2.785355881806356, "grad_norm": 0.9491976499557495, "learning_rate": 4.231173128542899e-05, "loss": 0.9194, "step": 14988 }, { "epoch": 2.785541720869727, "grad_norm": 0.9895254373550415, "learning_rate": 4.229979859689704e-05, "loss": 0.9675, "step": 14989 }, { "epoch": 2.7857275599330977, "grad_norm": 0.8803383111953735, "learning_rate": 4.228786713987688e-05, "loss": 0.8465, "step": 14990 }, { "epoch": 2.785913398996469, "grad_norm": 0.9793798327445984, "learning_rate": 4.2275936914623105e-05, "loss": 1.2113, "step": 14991 }, { "epoch": 2.7860992380598404, "grad_norm": 0.8835542798042297, "learning_rate": 4.2264007921390446e-05, "loss": 0.9977, "step": 14992 }, { "epoch": 2.7862850771232113, "grad_norm": 0.7891272902488708, "learning_rate": 4.225208016043346e-05, "loss": 0.7701, "step": 14993 }, { "epoch": 2.786470916186582, "grad_norm": 0.8491330742835999, "learning_rate": 4.224015363200668e-05, "loss": 0.7749, "step": 14994 }, { "epoch": 2.7866567552499535, "grad_norm": 0.8505903482437134, "learning_rate": 4.222822833636472e-05, "loss": 1.1129, "step": 14995 }, { "epoch": 2.786842594313325, "grad_norm": 0.9466049671173096, "learning_rate": 4.2216304273762094e-05, "loss": 1.1278, "step": 14996 }, { "epoch": 2.7870284333766957, "grad_norm": 0.8390737771987915, "learning_rate": 4.220438144445331e-05, "loss": 0.9739, "step": 14997 }, { "epoch": 2.787214272440067, "grad_norm": 0.8704586625099182, "learning_rate": 4.219245984869282e-05, "loss": 0.9557, "step": 14998 }, { "epoch": 2.787400111503438, "grad_norm": 0.9225991368293762, "learning_rate": 4.2180539486735036e-05, "loss": 0.7677, "step": 14999 }, { "epoch": 2.7875859505668092, "grad_norm": 0.7355827689170837, "learning_rate": 4.216862035883445e-05, "loss": 0.8258, "step": 15000 }, { "epoch": 2.78777178963018, "grad_norm": 0.896327793598175, "learning_rate": 4.2156702465245424e-05, "loss": 0.9988, "step": 15001 }, { "epoch": 2.7879576286935515, "grad_norm": 0.7767103910446167, "learning_rate": 4.214478580622228e-05, "loss": 0.9715, "step": 15002 }, { "epoch": 2.7881434677569223, "grad_norm": 0.9789737462997437, "learning_rate": 4.213287038201943e-05, "loss": 0.957, "step": 15003 }, { "epoch": 2.7883293068202937, "grad_norm": 0.8596428632736206, "learning_rate": 4.212095619289117e-05, "loss": 1.0102, "step": 15004 }, { "epoch": 2.7885151458836646, "grad_norm": 0.7422680258750916, "learning_rate": 4.210904323909177e-05, "loss": 1.1146, "step": 15005 }, { "epoch": 2.788700984947036, "grad_norm": 0.6503537893295288, "learning_rate": 4.2097131520875456e-05, "loss": 0.5747, "step": 15006 }, { "epoch": 2.788886824010407, "grad_norm": 0.7762205600738525, "learning_rate": 4.2085221038496515e-05, "loss": 0.9154, "step": 15007 }, { "epoch": 2.789072663073778, "grad_norm": 0.9213268160820007, "learning_rate": 4.2073311792209166e-05, "loss": 0.739, "step": 15008 }, { "epoch": 2.789258502137149, "grad_norm": 0.8095465302467346, "learning_rate": 4.206140378226758e-05, "loss": 1.0868, "step": 15009 }, { "epoch": 2.7894443412005203, "grad_norm": 0.8037760257720947, "learning_rate": 4.204949700892591e-05, "loss": 0.8164, "step": 15010 }, { "epoch": 2.7896301802638916, "grad_norm": 0.9405101537704468, "learning_rate": 4.2037591472438245e-05, "loss": 0.9941, "step": 15011 }, { "epoch": 2.7898160193272625, "grad_norm": 0.8144403696060181, "learning_rate": 4.202568717305875e-05, "loss": 0.9442, "step": 15012 }, { "epoch": 2.790001858390634, "grad_norm": 0.905034065246582, "learning_rate": 4.201378411104146e-05, "loss": 0.9174, "step": 15013 }, { "epoch": 2.7901876974540047, "grad_norm": 0.8545786142349243, "learning_rate": 4.200188228664043e-05, "loss": 0.7051, "step": 15014 }, { "epoch": 2.790373536517376, "grad_norm": 0.9535547494888306, "learning_rate": 4.198998170010971e-05, "loss": 0.8842, "step": 15015 }, { "epoch": 2.790559375580747, "grad_norm": 0.9396203756332397, "learning_rate": 4.197808235170326e-05, "loss": 0.7345, "step": 15016 }, { "epoch": 2.7907452146441183, "grad_norm": 1.2389658689498901, "learning_rate": 4.19661842416751e-05, "loss": 1.2049, "step": 15017 }, { "epoch": 2.790931053707489, "grad_norm": 0.7988381385803223, "learning_rate": 4.1954287370279155e-05, "loss": 0.8315, "step": 15018 }, { "epoch": 2.7911168927708605, "grad_norm": 0.8601673245429993, "learning_rate": 4.194239173776929e-05, "loss": 1.1607, "step": 15019 }, { "epoch": 2.7913027318342314, "grad_norm": 0.9635416269302368, "learning_rate": 4.193049734439949e-05, "loss": 0.7352, "step": 15020 }, { "epoch": 2.7914885708976027, "grad_norm": 0.886246383190155, "learning_rate": 4.1918604190423564e-05, "loss": 1.0577, "step": 15021 }, { "epoch": 2.791674409960974, "grad_norm": 0.7917489409446716, "learning_rate": 4.1906712276095325e-05, "loss": 0.8374, "step": 15022 }, { "epoch": 2.791860249024345, "grad_norm": 0.9550040364265442, "learning_rate": 4.189482160166865e-05, "loss": 0.7641, "step": 15023 }, { "epoch": 2.792046088087716, "grad_norm": 0.8474428653717041, "learning_rate": 4.188293216739732e-05, "loss": 1.126, "step": 15024 }, { "epoch": 2.792231927151087, "grad_norm": 0.7299398183822632, "learning_rate": 4.187104397353504e-05, "loss": 0.7369, "step": 15025 }, { "epoch": 2.7924177662144585, "grad_norm": 0.9598284363746643, "learning_rate": 4.185915702033555e-05, "loss": 0.6783, "step": 15026 }, { "epoch": 2.7926036052778294, "grad_norm": 0.8690744638442993, "learning_rate": 4.1847271308052583e-05, "loss": 0.8888, "step": 15027 }, { "epoch": 2.7927894443412002, "grad_norm": 0.8142648935317993, "learning_rate": 4.183538683693985e-05, "loss": 1.0293, "step": 15028 }, { "epoch": 2.7929752834045716, "grad_norm": 0.9416887164115906, "learning_rate": 4.182350360725097e-05, "loss": 0.8944, "step": 15029 }, { "epoch": 2.793161122467943, "grad_norm": 1.0445160865783691, "learning_rate": 4.181162161923957e-05, "loss": 1.0746, "step": 15030 }, { "epoch": 2.793346961531314, "grad_norm": 0.922220766544342, "learning_rate": 4.179974087315921e-05, "loss": 0.9097, "step": 15031 }, { "epoch": 2.793532800594685, "grad_norm": 0.7769144773483276, "learning_rate": 4.1787861369263546e-05, "loss": 0.8806, "step": 15032 }, { "epoch": 2.793718639658056, "grad_norm": 1.0614155530929565, "learning_rate": 4.1775983107806086e-05, "loss": 0.8616, "step": 15033 }, { "epoch": 2.7939044787214273, "grad_norm": 0.803954541683197, "learning_rate": 4.176410608904031e-05, "loss": 0.8816, "step": 15034 }, { "epoch": 2.794090317784798, "grad_norm": 0.7549547553062439, "learning_rate": 4.17522303132198e-05, "loss": 0.8736, "step": 15035 }, { "epoch": 2.7942761568481695, "grad_norm": 0.9446969032287598, "learning_rate": 4.174035578059796e-05, "loss": 0.889, "step": 15036 }, { "epoch": 2.794461995911541, "grad_norm": 0.8554919362068176, "learning_rate": 4.1728482491428224e-05, "loss": 0.9947, "step": 15037 }, { "epoch": 2.7946478349749118, "grad_norm": 0.8129143714904785, "learning_rate": 4.171661044596407e-05, "loss": 0.8565, "step": 15038 }, { "epoch": 2.7948336740382826, "grad_norm": 0.8183847069740295, "learning_rate": 4.17047396444588e-05, "loss": 0.9955, "step": 15039 }, { "epoch": 2.795019513101654, "grad_norm": 0.7391417622566223, "learning_rate": 4.169287008716587e-05, "loss": 0.9115, "step": 15040 }, { "epoch": 2.7952053521650253, "grad_norm": 0.9141026139259338, "learning_rate": 4.1681001774338566e-05, "loss": 0.8662, "step": 15041 }, { "epoch": 2.795391191228396, "grad_norm": 0.9632161259651184, "learning_rate": 4.166913470623016e-05, "loss": 0.8851, "step": 15042 }, { "epoch": 2.795577030291767, "grad_norm": 0.8051280975341797, "learning_rate": 4.165726888309402e-05, "loss": 0.9795, "step": 15043 }, { "epoch": 2.7957628693551384, "grad_norm": 0.8322173953056335, "learning_rate": 4.1645404305183345e-05, "loss": 0.7878, "step": 15044 }, { "epoch": 2.7959487084185097, "grad_norm": 0.8591897487640381, "learning_rate": 4.163354097275137e-05, "loss": 1.0312, "step": 15045 }, { "epoch": 2.7961345474818806, "grad_norm": 0.9375728368759155, "learning_rate": 4.162167888605127e-05, "loss": 1.0866, "step": 15046 }, { "epoch": 2.796320386545252, "grad_norm": 0.8724905252456665, "learning_rate": 4.16098180453363e-05, "loss": 0.784, "step": 15047 }, { "epoch": 2.796506225608623, "grad_norm": 0.8089430928230286, "learning_rate": 4.159795845085952e-05, "loss": 0.6377, "step": 15048 }, { "epoch": 2.796692064671994, "grad_norm": 0.838665246963501, "learning_rate": 4.158610010287413e-05, "loss": 0.967, "step": 15049 }, { "epoch": 2.796877903735365, "grad_norm": 0.7745414972305298, "learning_rate": 4.15742430016332e-05, "loss": 0.848, "step": 15050 }, { "epoch": 2.7970637427987364, "grad_norm": 0.8479868769645691, "learning_rate": 4.1562387147389735e-05, "loss": 0.9451, "step": 15051 }, { "epoch": 2.7972495818621073, "grad_norm": 0.9081116914749146, "learning_rate": 4.155053254039688e-05, "loss": 0.9511, "step": 15052 }, { "epoch": 2.7974354209254786, "grad_norm": 0.7565174102783203, "learning_rate": 4.15386791809076e-05, "loss": 0.8633, "step": 15053 }, { "epoch": 2.7976212599888495, "grad_norm": 0.7815892100334167, "learning_rate": 4.152682706917487e-05, "loss": 0.9766, "step": 15054 }, { "epoch": 2.797807099052221, "grad_norm": 0.9136144518852234, "learning_rate": 4.1514976205451696e-05, "loss": 1.0633, "step": 15055 }, { "epoch": 2.797992938115592, "grad_norm": 0.9465920329093933, "learning_rate": 4.1503126589990984e-05, "loss": 1.0764, "step": 15056 }, { "epoch": 2.798178777178963, "grad_norm": 0.9290378093719482, "learning_rate": 4.149127822304566e-05, "loss": 0.9421, "step": 15057 }, { "epoch": 2.798364616242334, "grad_norm": 0.7892298698425293, "learning_rate": 4.147943110486856e-05, "loss": 1.0366, "step": 15058 }, { "epoch": 2.7985504553057052, "grad_norm": 0.9133583307266235, "learning_rate": 4.146758523571257e-05, "loss": 0.966, "step": 15059 }, { "epoch": 2.7987362943690766, "grad_norm": 0.9047538042068481, "learning_rate": 4.1455740615830574e-05, "loss": 1.0607, "step": 15060 }, { "epoch": 2.7989221334324474, "grad_norm": 1.4756627082824707, "learning_rate": 4.1443897245475315e-05, "loss": 1.1325, "step": 15061 }, { "epoch": 2.7991079724958188, "grad_norm": 0.9655285477638245, "learning_rate": 4.143205512489956e-05, "loss": 0.7489, "step": 15062 }, { "epoch": 2.7992938115591897, "grad_norm": 0.73797208070755, "learning_rate": 4.142021425435612e-05, "loss": 0.6195, "step": 15063 }, { "epoch": 2.799479650622561, "grad_norm": 0.8950634598731995, "learning_rate": 4.140837463409767e-05, "loss": 0.9471, "step": 15064 }, { "epoch": 2.799665489685932, "grad_norm": 0.909694492816925, "learning_rate": 4.139653626437692e-05, "loss": 0.8666, "step": 15065 }, { "epoch": 2.799851328749303, "grad_norm": 0.7249810099601746, "learning_rate": 4.1384699145446495e-05, "loss": 0.6832, "step": 15066 }, { "epoch": 2.800037167812674, "grad_norm": 0.869342029094696, "learning_rate": 4.137286327755913e-05, "loss": 0.984, "step": 15067 }, { "epoch": 2.8002230068760454, "grad_norm": 0.6247124671936035, "learning_rate": 4.136102866096737e-05, "loss": 0.6172, "step": 15068 }, { "epoch": 2.8004088459394163, "grad_norm": 0.7755012512207031, "learning_rate": 4.13491952959238e-05, "loss": 0.9734, "step": 15069 }, { "epoch": 2.8005946850027876, "grad_norm": 0.719447910785675, "learning_rate": 4.1337363182681046e-05, "loss": 0.8898, "step": 15070 }, { "epoch": 2.800780524066159, "grad_norm": 0.8762292861938477, "learning_rate": 4.1325532321491555e-05, "loss": 0.997, "step": 15071 }, { "epoch": 2.80096636312953, "grad_norm": 0.982386589050293, "learning_rate": 4.131370271260794e-05, "loss": 0.993, "step": 15072 }, { "epoch": 2.8011522021929007, "grad_norm": 0.7873516082763672, "learning_rate": 4.1301874356282624e-05, "loss": 1.0242, "step": 15073 }, { "epoch": 2.801338041256272, "grad_norm": 0.8276214003562927, "learning_rate": 4.1290047252768036e-05, "loss": 0.9155, "step": 15074 }, { "epoch": 2.8015238803196434, "grad_norm": 0.9610852599143982, "learning_rate": 4.1278221402316674e-05, "loss": 0.9898, "step": 15075 }, { "epoch": 2.8017097193830143, "grad_norm": 0.762342631816864, "learning_rate": 4.1266396805180905e-05, "loss": 1.0075, "step": 15076 }, { "epoch": 2.801895558446385, "grad_norm": 1.317216396331787, "learning_rate": 4.12545734616131e-05, "loss": 1.2906, "step": 15077 }, { "epoch": 2.8020813975097565, "grad_norm": 0.8017038106918335, "learning_rate": 4.124275137186559e-05, "loss": 0.7105, "step": 15078 }, { "epoch": 2.802267236573128, "grad_norm": 1.9257043600082397, "learning_rate": 4.1230930536190715e-05, "loss": 1.3639, "step": 15079 }, { "epoch": 2.8024530756364987, "grad_norm": 0.8237961530685425, "learning_rate": 4.12191109548408e-05, "loss": 0.8367, "step": 15080 }, { "epoch": 2.80263891469987, "grad_norm": 0.7258276343345642, "learning_rate": 4.1207292628068106e-05, "loss": 0.7606, "step": 15081 }, { "epoch": 2.802824753763241, "grad_norm": 0.8935003280639648, "learning_rate": 4.119547555612482e-05, "loss": 0.97, "step": 15082 }, { "epoch": 2.8030105928266122, "grad_norm": 0.7853168845176697, "learning_rate": 4.118365973926322e-05, "loss": 0.781, "step": 15083 }, { "epoch": 2.803196431889983, "grad_norm": 0.853007435798645, "learning_rate": 4.117184517773548e-05, "loss": 0.9727, "step": 15084 }, { "epoch": 2.8033822709533545, "grad_norm": 0.9908416867256165, "learning_rate": 4.116003187179375e-05, "loss": 0.9773, "step": 15085 }, { "epoch": 2.803568110016726, "grad_norm": 0.8514252305030823, "learning_rate": 4.114821982169013e-05, "loss": 1.0044, "step": 15086 }, { "epoch": 2.8037539490800967, "grad_norm": 0.9149060249328613, "learning_rate": 4.11364090276768e-05, "loss": 1.1324, "step": 15087 }, { "epoch": 2.8039397881434676, "grad_norm": 0.9491459727287292, "learning_rate": 4.11245994900058e-05, "loss": 0.9364, "step": 15088 }, { "epoch": 2.804125627206839, "grad_norm": 0.7908619046211243, "learning_rate": 4.11127912089292e-05, "loss": 0.8661, "step": 15089 }, { "epoch": 2.80431146627021, "grad_norm": 0.775545060634613, "learning_rate": 4.110098418469898e-05, "loss": 0.8741, "step": 15090 }, { "epoch": 2.804497305333581, "grad_norm": 0.7879462242126465, "learning_rate": 4.1089178417567164e-05, "loss": 0.8878, "step": 15091 }, { "epoch": 2.804683144396952, "grad_norm": 0.9746586084365845, "learning_rate": 4.107737390778579e-05, "loss": 0.6941, "step": 15092 }, { "epoch": 2.8048689834603233, "grad_norm": 0.7224279046058655, "learning_rate": 4.1065570655606754e-05, "loss": 0.5672, "step": 15093 }, { "epoch": 2.8050548225236946, "grad_norm": 0.7029162049293518, "learning_rate": 4.105376866128193e-05, "loss": 0.8019, "step": 15094 }, { "epoch": 2.8052406615870655, "grad_norm": 0.9751745462417603, "learning_rate": 4.104196792506331e-05, "loss": 0.7743, "step": 15095 }, { "epoch": 2.805426500650437, "grad_norm": 0.9375293850898743, "learning_rate": 4.10301684472027e-05, "loss": 0.8651, "step": 15096 }, { "epoch": 2.8056123397138077, "grad_norm": 0.9242647886276245, "learning_rate": 4.101837022795194e-05, "loss": 1.0177, "step": 15097 }, { "epoch": 2.805798178777179, "grad_norm": 0.8099009394645691, "learning_rate": 4.100657326756282e-05, "loss": 0.8403, "step": 15098 }, { "epoch": 2.80598401784055, "grad_norm": 0.811262309551239, "learning_rate": 4.0994777566287204e-05, "loss": 0.9109, "step": 15099 }, { "epoch": 2.8061698569039213, "grad_norm": 0.8253323435783386, "learning_rate": 4.0982983124376763e-05, "loss": 0.708, "step": 15100 }, { "epoch": 2.806355695967292, "grad_norm": 0.8106323480606079, "learning_rate": 4.09711899420833e-05, "loss": 1.1688, "step": 15101 }, { "epoch": 2.8065415350306635, "grad_norm": 0.7565474510192871, "learning_rate": 4.0959398019658456e-05, "loss": 0.9421, "step": 15102 }, { "epoch": 2.8067273740940344, "grad_norm": 0.8208668828010559, "learning_rate": 4.094760735735398e-05, "loss": 0.8534, "step": 15103 }, { "epoch": 2.8069132131574057, "grad_norm": 0.8241255879402161, "learning_rate": 4.093581795542149e-05, "loss": 1.0367, "step": 15104 }, { "epoch": 2.807099052220777, "grad_norm": 0.9445957541465759, "learning_rate": 4.0924029814112594e-05, "loss": 0.9277, "step": 15105 }, { "epoch": 2.807284891284148, "grad_norm": 0.8766359686851501, "learning_rate": 4.091224293367888e-05, "loss": 0.8135, "step": 15106 }, { "epoch": 2.807470730347519, "grad_norm": 0.8228170275688171, "learning_rate": 4.090045731437198e-05, "loss": 0.7023, "step": 15107 }, { "epoch": 2.80765656941089, "grad_norm": 0.8613119721412659, "learning_rate": 4.08886729564434e-05, "loss": 0.9599, "step": 15108 }, { "epoch": 2.8078424084742615, "grad_norm": 0.8965815901756287, "learning_rate": 4.087688986014464e-05, "loss": 0.9747, "step": 15109 }, { "epoch": 2.8080282475376324, "grad_norm": 0.8584528565406799, "learning_rate": 4.086510802572718e-05, "loss": 0.9173, "step": 15110 }, { "epoch": 2.8082140866010037, "grad_norm": 0.7747320532798767, "learning_rate": 4.0853327453442525e-05, "loss": 0.9965, "step": 15111 }, { "epoch": 2.8083999256643746, "grad_norm": 0.8218334913253784, "learning_rate": 4.0841548143542116e-05, "loss": 0.936, "step": 15112 }, { "epoch": 2.808585764727746, "grad_norm": 0.8838158845901489, "learning_rate": 4.082977009627734e-05, "loss": 0.9228, "step": 15113 }, { "epoch": 2.808771603791117, "grad_norm": 0.8490532040596008, "learning_rate": 4.081799331189955e-05, "loss": 0.9187, "step": 15114 }, { "epoch": 2.808957442854488, "grad_norm": 0.7764754295349121, "learning_rate": 4.0806217790660176e-05, "loss": 0.8403, "step": 15115 }, { "epoch": 2.809143281917859, "grad_norm": 0.7498995661735535, "learning_rate": 4.079444353281049e-05, "loss": 0.8031, "step": 15116 }, { "epoch": 2.8093291209812303, "grad_norm": 1.1498351097106934, "learning_rate": 4.0782670538601806e-05, "loss": 1.1147, "step": 15117 }, { "epoch": 2.809514960044601, "grad_norm": 0.9433811902999878, "learning_rate": 4.077089880828537e-05, "loss": 0.5933, "step": 15118 }, { "epoch": 2.8097007991079725, "grad_norm": 0.7371313571929932, "learning_rate": 4.07591283421125e-05, "loss": 0.8163, "step": 15119 }, { "epoch": 2.809886638171344, "grad_norm": 1.1072052717208862, "learning_rate": 4.074735914033436e-05, "loss": 0.8688, "step": 15120 }, { "epoch": 2.8100724772347148, "grad_norm": 0.973744809627533, "learning_rate": 4.0735591203202125e-05, "loss": 0.9479, "step": 15121 }, { "epoch": 2.8102583162980856, "grad_norm": 0.7982321977615356, "learning_rate": 4.0723824530967004e-05, "loss": 0.8084, "step": 15122 }, { "epoch": 2.810444155361457, "grad_norm": 0.8149759769439697, "learning_rate": 4.0712059123880155e-05, "loss": 0.8018, "step": 15123 }, { "epoch": 2.8106299944248283, "grad_norm": 0.8275374174118042, "learning_rate": 4.070029498219266e-05, "loss": 0.9292, "step": 15124 }, { "epoch": 2.810815833488199, "grad_norm": 0.7239959836006165, "learning_rate": 4.0688532106155606e-05, "loss": 0.9979, "step": 15125 }, { "epoch": 2.81100167255157, "grad_norm": 0.8658648133277893, "learning_rate": 4.067677049602001e-05, "loss": 1.0328, "step": 15126 }, { "epoch": 2.8111875116149414, "grad_norm": 0.8098410964012146, "learning_rate": 4.0665010152036985e-05, "loss": 0.7847, "step": 15127 }, { "epoch": 2.8113733506783127, "grad_norm": 0.722276508808136, "learning_rate": 4.065325107445751e-05, "loss": 0.8967, "step": 15128 }, { "epoch": 2.8115591897416836, "grad_norm": 0.9402121305465698, "learning_rate": 4.0641493263532524e-05, "loss": 0.8615, "step": 15129 }, { "epoch": 2.811745028805055, "grad_norm": 0.8405276536941528, "learning_rate": 4.062973671951299e-05, "loss": 0.933, "step": 15130 }, { "epoch": 2.811930867868426, "grad_norm": 0.943343997001648, "learning_rate": 4.0617981442649855e-05, "loss": 0.9931, "step": 15131 }, { "epoch": 2.812116706931797, "grad_norm": 0.8641842007637024, "learning_rate": 4.060622743319398e-05, "loss": 0.9604, "step": 15132 }, { "epoch": 2.812302545995168, "grad_norm": 0.8120639324188232, "learning_rate": 4.059447469139629e-05, "loss": 0.9545, "step": 15133 }, { "epoch": 2.8124883850585394, "grad_norm": 0.9698015451431274, "learning_rate": 4.0582723217507554e-05, "loss": 0.9675, "step": 15134 }, { "epoch": 2.8126742241219103, "grad_norm": 0.8321253061294556, "learning_rate": 4.0570973011778665e-05, "loss": 1.0917, "step": 15135 }, { "epoch": 2.8128600631852816, "grad_norm": 0.7853554487228394, "learning_rate": 4.055922407446038e-05, "loss": 0.9034, "step": 15136 }, { "epoch": 2.8130459022486525, "grad_norm": 1.1400892734527588, "learning_rate": 4.054747640580344e-05, "loss": 1.0301, "step": 15137 }, { "epoch": 2.813231741312024, "grad_norm": 0.802152156829834, "learning_rate": 4.053573000605857e-05, "loss": 0.8063, "step": 15138 }, { "epoch": 2.813417580375395, "grad_norm": 0.7391362190246582, "learning_rate": 4.052398487547653e-05, "loss": 0.8216, "step": 15139 }, { "epoch": 2.813603419438766, "grad_norm": 0.8531056046485901, "learning_rate": 4.051224101430796e-05, "loss": 0.7364, "step": 15140 }, { "epoch": 2.813789258502137, "grad_norm": 0.7853450179100037, "learning_rate": 4.050049842280352e-05, "loss": 0.8467, "step": 15141 }, { "epoch": 2.8139750975655082, "grad_norm": 0.738269567489624, "learning_rate": 4.048875710121377e-05, "loss": 0.9664, "step": 15142 }, { "epoch": 2.8141609366288796, "grad_norm": 0.8238461017608643, "learning_rate": 4.047701704978944e-05, "loss": 1.1125, "step": 15143 }, { "epoch": 2.8143467756922504, "grad_norm": 0.897844672203064, "learning_rate": 4.046527826878105e-05, "loss": 0.8101, "step": 15144 }, { "epoch": 2.8145326147556218, "grad_norm": 0.7138451933860779, "learning_rate": 4.045354075843911e-05, "loss": 0.8195, "step": 15145 }, { "epoch": 2.8147184538189927, "grad_norm": 0.8560433983802795, "learning_rate": 4.044180451901414e-05, "loss": 0.7273, "step": 15146 }, { "epoch": 2.814904292882364, "grad_norm": 0.7499582171440125, "learning_rate": 4.0430069550756665e-05, "loss": 0.953, "step": 15147 }, { "epoch": 2.815090131945735, "grad_norm": 0.95637446641922, "learning_rate": 4.0418335853917145e-05, "loss": 1.0183, "step": 15148 }, { "epoch": 2.815275971009106, "grad_norm": 0.8454667329788208, "learning_rate": 4.0406603428745996e-05, "loss": 0.8944, "step": 15149 }, { "epoch": 2.815461810072477, "grad_norm": 0.7893767356872559, "learning_rate": 4.039487227549359e-05, "loss": 0.9952, "step": 15150 }, { "epoch": 2.8156476491358484, "grad_norm": 0.8175316452980042, "learning_rate": 4.038314239441038e-05, "loss": 0.8697, "step": 15151 }, { "epoch": 2.8158334881992193, "grad_norm": 0.7143104672431946, "learning_rate": 4.037141378574669e-05, "loss": 0.6834, "step": 15152 }, { "epoch": 2.8160193272625906, "grad_norm": 0.7967771291732788, "learning_rate": 4.035968644975282e-05, "loss": 0.8128, "step": 15153 }, { "epoch": 2.816205166325962, "grad_norm": 0.878592312335968, "learning_rate": 4.03479603866791e-05, "loss": 0.9127, "step": 15154 }, { "epoch": 2.816391005389333, "grad_norm": 0.8325759172439575, "learning_rate": 4.033623559677582e-05, "loss": 1.088, "step": 15155 }, { "epoch": 2.8165768444527037, "grad_norm": 0.9697619080543518, "learning_rate": 4.032451208029321e-05, "loss": 0.9117, "step": 15156 }, { "epoch": 2.816762683516075, "grad_norm": 0.7757670283317566, "learning_rate": 4.031278983748148e-05, "loss": 0.7649, "step": 15157 }, { "epoch": 2.8169485225794464, "grad_norm": 0.842522144317627, "learning_rate": 4.030106886859079e-05, "loss": 0.9733, "step": 15158 }, { "epoch": 2.8171343616428173, "grad_norm": 0.7370792031288147, "learning_rate": 4.028934917387137e-05, "loss": 0.8486, "step": 15159 }, { "epoch": 2.8173202007061886, "grad_norm": 0.8743408918380737, "learning_rate": 4.027763075357333e-05, "loss": 0.8942, "step": 15160 }, { "epoch": 2.8175060397695595, "grad_norm": 0.8407097458839417, "learning_rate": 4.026591360794677e-05, "loss": 0.979, "step": 15161 }, { "epoch": 2.817691878832931, "grad_norm": 0.8373445272445679, "learning_rate": 4.025419773724174e-05, "loss": 0.9727, "step": 15162 }, { "epoch": 2.8178777178963017, "grad_norm": 0.7689995765686035, "learning_rate": 4.0242483141708334e-05, "loss": 0.8183, "step": 15163 }, { "epoch": 2.818063556959673, "grad_norm": 0.959061861038208, "learning_rate": 4.0230769821596605e-05, "loss": 1.0507, "step": 15164 }, { "epoch": 2.818249396023044, "grad_norm": 0.756231963634491, "learning_rate": 4.0219057777156536e-05, "loss": 0.9184, "step": 15165 }, { "epoch": 2.8184352350864152, "grad_norm": 0.864276111125946, "learning_rate": 4.0207347008638055e-05, "loss": 0.8817, "step": 15166 }, { "epoch": 2.818621074149786, "grad_norm": 0.6302124261856079, "learning_rate": 4.019563751629117e-05, "loss": 0.6543, "step": 15167 }, { "epoch": 2.8188069132131575, "grad_norm": 0.8549239039421082, "learning_rate": 4.0183929300365783e-05, "loss": 0.8403, "step": 15168 }, { "epoch": 2.818992752276529, "grad_norm": 0.8719046711921692, "learning_rate": 4.017222236111178e-05, "loss": 1.0681, "step": 15169 }, { "epoch": 2.8191785913398997, "grad_norm": 0.7814198136329651, "learning_rate": 4.016051669877897e-05, "loss": 0.9229, "step": 15170 }, { "epoch": 2.8193644304032706, "grad_norm": 0.8007107377052307, "learning_rate": 4.014881231361729e-05, "loss": 0.8217, "step": 15171 }, { "epoch": 2.819550269466642, "grad_norm": 0.6752849817276001, "learning_rate": 4.01371092058765e-05, "loss": 0.684, "step": 15172 }, { "epoch": 2.819736108530013, "grad_norm": 0.7651057839393616, "learning_rate": 4.012540737580635e-05, "loss": 0.7586, "step": 15173 }, { "epoch": 2.819921947593384, "grad_norm": 0.7388724684715271, "learning_rate": 4.0113706823656625e-05, "loss": 0.9782, "step": 15174 }, { "epoch": 2.820107786656755, "grad_norm": 1.1076356172561646, "learning_rate": 4.0102007549677114e-05, "loss": 0.7971, "step": 15175 }, { "epoch": 2.8202936257201263, "grad_norm": 0.8896591663360596, "learning_rate": 4.0090309554117444e-05, "loss": 0.6954, "step": 15176 }, { "epoch": 2.8204794647834976, "grad_norm": 0.782341718673706, "learning_rate": 4.007861283722731e-05, "loss": 0.8519, "step": 15177 }, { "epoch": 2.8206653038468685, "grad_norm": 1.0588573217391968, "learning_rate": 4.006691739925632e-05, "loss": 0.6504, "step": 15178 }, { "epoch": 2.82085114291024, "grad_norm": 0.7600374817848206, "learning_rate": 4.0055223240454167e-05, "loss": 0.8186, "step": 15179 }, { "epoch": 2.8210369819736107, "grad_norm": 0.7226833701133728, "learning_rate": 4.0043530361070405e-05, "loss": 1.042, "step": 15180 }, { "epoch": 2.821222821036982, "grad_norm": 1.0568703413009644, "learning_rate": 4.00318387613546e-05, "loss": 0.929, "step": 15181 }, { "epoch": 2.821408660100353, "grad_norm": 0.8555261492729187, "learning_rate": 4.002014844155625e-05, "loss": 0.7783, "step": 15182 }, { "epoch": 2.8215944991637243, "grad_norm": 0.9463980793952942, "learning_rate": 4.0008459401924936e-05, "loss": 0.7808, "step": 15183 }, { "epoch": 2.821780338227095, "grad_norm": 0.8411297798156738, "learning_rate": 3.999677164271007e-05, "loss": 0.9684, "step": 15184 }, { "epoch": 2.8219661772904665, "grad_norm": 0.7953760623931885, "learning_rate": 3.998508516416118e-05, "loss": 0.8714, "step": 15185 }, { "epoch": 2.8221520163538374, "grad_norm": 0.90069979429245, "learning_rate": 3.997339996652762e-05, "loss": 0.8948, "step": 15186 }, { "epoch": 2.8223378554172087, "grad_norm": 0.9424738883972168, "learning_rate": 3.996171605005887e-05, "loss": 1.0742, "step": 15187 }, { "epoch": 2.82252369448058, "grad_norm": 0.9908680319786072, "learning_rate": 3.995003341500426e-05, "loss": 0.9633, "step": 15188 }, { "epoch": 2.822709533543951, "grad_norm": 0.718686580657959, "learning_rate": 3.993835206161313e-05, "loss": 0.7854, "step": 15189 }, { "epoch": 2.822895372607322, "grad_norm": 0.7781935334205627, "learning_rate": 3.992667199013478e-05, "loss": 0.9473, "step": 15190 }, { "epoch": 2.823081211670693, "grad_norm": 0.7772121429443359, "learning_rate": 3.991499320081855e-05, "loss": 0.7848, "step": 15191 }, { "epoch": 2.8232670507340645, "grad_norm": 0.8744450807571411, "learning_rate": 3.990331569391369e-05, "loss": 0.8523, "step": 15192 }, { "epoch": 2.8234528897974354, "grad_norm": 0.751013457775116, "learning_rate": 3.9891639469669384e-05, "loss": 0.8262, "step": 15193 }, { "epoch": 2.8236387288608067, "grad_norm": 0.8620365262031555, "learning_rate": 3.987996452833492e-05, "loss": 0.5675, "step": 15194 }, { "epoch": 2.8238245679241776, "grad_norm": 1.0090669393539429, "learning_rate": 3.9868290870159405e-05, "loss": 1.1258, "step": 15195 }, { "epoch": 2.824010406987549, "grad_norm": 0.9945322871208191, "learning_rate": 3.9856618495392074e-05, "loss": 0.7712, "step": 15196 }, { "epoch": 2.82419624605092, "grad_norm": 0.9460313320159912, "learning_rate": 3.9844947404282006e-05, "loss": 0.9587, "step": 15197 }, { "epoch": 2.824382085114291, "grad_norm": 0.6281357407569885, "learning_rate": 3.9833277597078275e-05, "loss": 0.5446, "step": 15198 }, { "epoch": 2.824567924177662, "grad_norm": 0.9807770848274231, "learning_rate": 3.982160907403002e-05, "loss": 1.1381, "step": 15199 }, { "epoch": 2.8247537632410333, "grad_norm": 1.112540602684021, "learning_rate": 3.980994183538624e-05, "loss": 1.2307, "step": 15200 }, { "epoch": 2.824939602304404, "grad_norm": 0.8312342762947083, "learning_rate": 3.979827588139596e-05, "loss": 1.1071, "step": 15201 }, { "epoch": 2.8251254413677755, "grad_norm": 0.8704702854156494, "learning_rate": 3.9786611212308144e-05, "loss": 1.0509, "step": 15202 }, { "epoch": 2.825311280431147, "grad_norm": 0.7412673234939575, "learning_rate": 3.977494782837182e-05, "loss": 1.0044, "step": 15203 }, { "epoch": 2.8254971194945178, "grad_norm": 0.9747570753097534, "learning_rate": 3.9763285729835874e-05, "loss": 1.1114, "step": 15204 }, { "epoch": 2.8256829585578886, "grad_norm": 0.79355788230896, "learning_rate": 3.97516249169492e-05, "loss": 0.8749, "step": 15205 }, { "epoch": 2.82586879762126, "grad_norm": 0.9315033555030823, "learning_rate": 3.97399653899607e-05, "loss": 1.0057, "step": 15206 }, { "epoch": 2.8260546366846313, "grad_norm": 0.7607800364494324, "learning_rate": 3.9728307149119245e-05, "loss": 0.9749, "step": 15207 }, { "epoch": 2.826240475748002, "grad_norm": 0.7286087870597839, "learning_rate": 3.9716650194673666e-05, "loss": 1.038, "step": 15208 }, { "epoch": 2.826426314811373, "grad_norm": 0.7711015939712524, "learning_rate": 3.970499452687273e-05, "loss": 1.0462, "step": 15209 }, { "epoch": 2.8266121538747444, "grad_norm": 0.7511913180351257, "learning_rate": 3.9693340145965174e-05, "loss": 0.8697, "step": 15210 }, { "epoch": 2.8267979929381157, "grad_norm": 0.7339666485786438, "learning_rate": 3.968168705219982e-05, "loss": 0.7829, "step": 15211 }, { "epoch": 2.8269838320014866, "grad_norm": 0.6857532858848572, "learning_rate": 3.9670035245825353e-05, "loss": 0.7155, "step": 15212 }, { "epoch": 2.827169671064858, "grad_norm": 0.7902857065200806, "learning_rate": 3.9658384727090415e-05, "loss": 0.901, "step": 15213 }, { "epoch": 2.827355510128229, "grad_norm": 0.738915205001831, "learning_rate": 3.9646735496243736e-05, "loss": 0.86, "step": 15214 }, { "epoch": 2.8275413491916, "grad_norm": 0.7594788074493408, "learning_rate": 3.9635087553533924e-05, "loss": 0.8544, "step": 15215 }, { "epoch": 2.827727188254971, "grad_norm": 0.7334456443786621, "learning_rate": 3.962344089920954e-05, "loss": 0.9809, "step": 15216 }, { "epoch": 2.8279130273183424, "grad_norm": 0.7943280935287476, "learning_rate": 3.961179553351924e-05, "loss": 0.7557, "step": 15217 }, { "epoch": 2.8280988663817137, "grad_norm": 1.2078486680984497, "learning_rate": 3.9600151456711485e-05, "loss": 1.3622, "step": 15218 }, { "epoch": 2.8282847054450846, "grad_norm": 0.7896971702575684, "learning_rate": 3.9588508669034886e-05, "loss": 0.8955, "step": 15219 }, { "epoch": 2.8284705445084555, "grad_norm": 0.8287630081176758, "learning_rate": 3.957686717073791e-05, "loss": 0.9633, "step": 15220 }, { "epoch": 2.828656383571827, "grad_norm": 0.7887197732925415, "learning_rate": 3.9565226962069e-05, "loss": 0.9236, "step": 15221 }, { "epoch": 2.828842222635198, "grad_norm": 1.049709439277649, "learning_rate": 3.955358804327658e-05, "loss": 1.019, "step": 15222 }, { "epoch": 2.829028061698569, "grad_norm": 0.7993926405906677, "learning_rate": 3.954195041460913e-05, "loss": 0.9172, "step": 15223 }, { "epoch": 2.82921390076194, "grad_norm": 0.781864583492279, "learning_rate": 3.9530314076315e-05, "loss": 0.9311, "step": 15224 }, { "epoch": 2.8293997398253112, "grad_norm": 0.7717763185501099, "learning_rate": 3.9518679028642515e-05, "loss": 0.9092, "step": 15225 }, { "epoch": 2.8295855788886826, "grad_norm": 0.7482540011405945, "learning_rate": 3.950704527184003e-05, "loss": 0.8378, "step": 15226 }, { "epoch": 2.8297714179520534, "grad_norm": 0.791333794593811, "learning_rate": 3.9495412806155883e-05, "loss": 1.0969, "step": 15227 }, { "epoch": 2.8299572570154248, "grad_norm": 0.801101565361023, "learning_rate": 3.948378163183832e-05, "loss": 0.9298, "step": 15228 }, { "epoch": 2.8301430960787957, "grad_norm": 1.3604888916015625, "learning_rate": 3.94721517491356e-05, "loss": 1.3488, "step": 15229 }, { "epoch": 2.830328935142167, "grad_norm": 0.8929975628852844, "learning_rate": 3.946052315829589e-05, "loss": 0.9312, "step": 15230 }, { "epoch": 2.830514774205538, "grad_norm": 1.0351811647415161, "learning_rate": 3.944889585956746e-05, "loss": 0.996, "step": 15231 }, { "epoch": 2.830700613268909, "grad_norm": 0.8826019167900085, "learning_rate": 3.943726985319843e-05, "loss": 1.0848, "step": 15232 }, { "epoch": 2.83088645233228, "grad_norm": 0.8329673409461975, "learning_rate": 3.942564513943692e-05, "loss": 0.943, "step": 15233 }, { "epoch": 2.8310722913956514, "grad_norm": 0.7803268432617188, "learning_rate": 3.941402171853109e-05, "loss": 0.8656, "step": 15234 }, { "epoch": 2.8312581304590223, "grad_norm": 0.9162929654121399, "learning_rate": 3.9402399590729e-05, "loss": 1.0986, "step": 15235 }, { "epoch": 2.8314439695223936, "grad_norm": 0.6823164820671082, "learning_rate": 3.9390778756278716e-05, "loss": 0.7321, "step": 15236 }, { "epoch": 2.831629808585765, "grad_norm": 0.7181358933448792, "learning_rate": 3.93791592154282e-05, "loss": 0.7124, "step": 15237 }, { "epoch": 2.831815647649136, "grad_norm": 0.8273493647575378, "learning_rate": 3.9367540968425496e-05, "loss": 0.9114, "step": 15238 }, { "epoch": 2.8320014867125067, "grad_norm": 0.9327648282051086, "learning_rate": 3.935592401551862e-05, "loss": 1.1341, "step": 15239 }, { "epoch": 2.832187325775878, "grad_norm": 0.8144948482513428, "learning_rate": 3.9344308356955483e-05, "loss": 0.8627, "step": 15240 }, { "epoch": 2.8323731648392494, "grad_norm": 0.849429190158844, "learning_rate": 3.933269399298399e-05, "loss": 0.8588, "step": 15241 }, { "epoch": 2.8325590039026203, "grad_norm": 0.804368793964386, "learning_rate": 3.9321080923852e-05, "loss": 0.8856, "step": 15242 }, { "epoch": 2.8327448429659916, "grad_norm": 0.7444977760314941, "learning_rate": 3.930946914980744e-05, "loss": 0.8866, "step": 15243 }, { "epoch": 2.8329306820293625, "grad_norm": 0.8025500774383545, "learning_rate": 3.929785867109812e-05, "loss": 0.9125, "step": 15244 }, { "epoch": 2.833116521092734, "grad_norm": 0.8177288770675659, "learning_rate": 3.92862494879718e-05, "loss": 0.8098, "step": 15245 }, { "epoch": 2.8333023601561047, "grad_norm": 0.7735792994499207, "learning_rate": 3.927464160067633e-05, "loss": 0.7662, "step": 15246 }, { "epoch": 2.833488199219476, "grad_norm": 0.8327000141143799, "learning_rate": 3.9263035009459395e-05, "loss": 0.9096, "step": 15247 }, { "epoch": 2.833674038282847, "grad_norm": 0.8771273493766785, "learning_rate": 3.925142971456878e-05, "loss": 0.9426, "step": 15248 }, { "epoch": 2.8338598773462182, "grad_norm": 0.7832034230232239, "learning_rate": 3.923982571625214e-05, "loss": 0.8878, "step": 15249 }, { "epoch": 2.834045716409589, "grad_norm": 0.7822767496109009, "learning_rate": 3.9228223014757124e-05, "loss": 0.797, "step": 15250 }, { "epoch": 2.8342315554729605, "grad_norm": 0.8614217638969421, "learning_rate": 3.921662161033144e-05, "loss": 1.123, "step": 15251 }, { "epoch": 2.834417394536332, "grad_norm": 0.8838312029838562, "learning_rate": 3.920502150322265e-05, "loss": 0.9017, "step": 15252 }, { "epoch": 2.8346032335997027, "grad_norm": 0.7245998978614807, "learning_rate": 3.919342269367832e-05, "loss": 0.8462, "step": 15253 }, { "epoch": 2.8347890726630736, "grad_norm": 0.8327683806419373, "learning_rate": 3.9181825181946065e-05, "loss": 0.9388, "step": 15254 }, { "epoch": 2.834974911726445, "grad_norm": 1.6923487186431885, "learning_rate": 3.9170228968273384e-05, "loss": 1.4495, "step": 15255 }, { "epoch": 2.835160750789816, "grad_norm": 1.0795648097991943, "learning_rate": 3.915863405290777e-05, "loss": 0.9367, "step": 15256 }, { "epoch": 2.835346589853187, "grad_norm": 1.0243604183197021, "learning_rate": 3.914704043609668e-05, "loss": 0.9431, "step": 15257 }, { "epoch": 2.835532428916558, "grad_norm": 0.8284094929695129, "learning_rate": 3.913544811808758e-05, "loss": 1.0057, "step": 15258 }, { "epoch": 2.8357182679799293, "grad_norm": 0.8010451793670654, "learning_rate": 3.9123857099127936e-05, "loss": 0.8132, "step": 15259 }, { "epoch": 2.8359041070433006, "grad_norm": 0.93331378698349, "learning_rate": 3.9112267379465087e-05, "loss": 1.0041, "step": 15260 }, { "epoch": 2.8360899461066715, "grad_norm": 0.8245121836662292, "learning_rate": 3.91006789593464e-05, "loss": 0.7767, "step": 15261 }, { "epoch": 2.836275785170043, "grad_norm": 0.9565784335136414, "learning_rate": 3.9089091839019176e-05, "loss": 0.8926, "step": 15262 }, { "epoch": 2.8364616242334137, "grad_norm": 0.801906406879425, "learning_rate": 3.90775060187308e-05, "loss": 0.8542, "step": 15263 }, { "epoch": 2.836647463296785, "grad_norm": 0.814352810382843, "learning_rate": 3.906592149872852e-05, "loss": 0.8702, "step": 15264 }, { "epoch": 2.836833302360156, "grad_norm": 0.7679254412651062, "learning_rate": 3.9054338279259526e-05, "loss": 0.6711, "step": 15265 }, { "epoch": 2.8370191414235273, "grad_norm": 1.0212868452072144, "learning_rate": 3.904275636057114e-05, "loss": 1.1643, "step": 15266 }, { "epoch": 2.8372049804868986, "grad_norm": 1.1639509201049805, "learning_rate": 3.9031175742910506e-05, "loss": 1.2237, "step": 15267 }, { "epoch": 2.8373908195502695, "grad_norm": 0.8592068552970886, "learning_rate": 3.901959642652477e-05, "loss": 0.9922, "step": 15268 }, { "epoch": 2.8375766586136404, "grad_norm": 0.7639787793159485, "learning_rate": 3.900801841166114e-05, "loss": 0.8276, "step": 15269 }, { "epoch": 2.8377624976770117, "grad_norm": 0.8858164548873901, "learning_rate": 3.8996441698566656e-05, "loss": 1.1325, "step": 15270 }, { "epoch": 2.837948336740383, "grad_norm": 0.9063674807548523, "learning_rate": 3.8984866287488465e-05, "loss": 0.972, "step": 15271 }, { "epoch": 2.838134175803754, "grad_norm": 0.8093922138214111, "learning_rate": 3.897329217867359e-05, "loss": 0.7104, "step": 15272 }, { "epoch": 2.838320014867125, "grad_norm": 0.8616423010826111, "learning_rate": 3.896171937236904e-05, "loss": 1.0722, "step": 15273 }, { "epoch": 2.838505853930496, "grad_norm": 1.0238964557647705, "learning_rate": 3.895014786882189e-05, "loss": 1.0427, "step": 15274 }, { "epoch": 2.8386916929938675, "grad_norm": 0.893201470375061, "learning_rate": 3.8938577668279055e-05, "loss": 1.1476, "step": 15275 }, { "epoch": 2.8388775320572384, "grad_norm": 0.9454589486122131, "learning_rate": 3.892700877098749e-05, "loss": 1.1824, "step": 15276 }, { "epoch": 2.8390633711206097, "grad_norm": 1.279524326324463, "learning_rate": 3.89154411771941e-05, "loss": 1.1519, "step": 15277 }, { "epoch": 2.8392492101839806, "grad_norm": 0.8839907050132751, "learning_rate": 3.890387488714582e-05, "loss": 0.9033, "step": 15278 }, { "epoch": 2.839435049247352, "grad_norm": 0.9178303480148315, "learning_rate": 3.889230990108944e-05, "loss": 1.024, "step": 15279 }, { "epoch": 2.839620888310723, "grad_norm": 0.6783154606819153, "learning_rate": 3.888074621927189e-05, "loss": 0.7864, "step": 15280 }, { "epoch": 2.839806727374094, "grad_norm": 0.8283354640007019, "learning_rate": 3.886918384193992e-05, "loss": 0.9406, "step": 15281 }, { "epoch": 2.839992566437465, "grad_norm": 0.8283498287200928, "learning_rate": 3.8857622769340286e-05, "loss": 0.8384, "step": 15282 }, { "epoch": 2.8401784055008363, "grad_norm": 0.8397684693336487, "learning_rate": 3.884606300171979e-05, "loss": 0.9565, "step": 15283 }, { "epoch": 2.840364244564207, "grad_norm": 1.0575993061065674, "learning_rate": 3.883450453932514e-05, "loss": 1.0169, "step": 15284 }, { "epoch": 2.8405500836275785, "grad_norm": 0.7142122387886047, "learning_rate": 3.8822947382403005e-05, "loss": 0.953, "step": 15285 }, { "epoch": 2.84073592269095, "grad_norm": 0.7820481061935425, "learning_rate": 3.8811391531200105e-05, "loss": 0.9879, "step": 15286 }, { "epoch": 2.8409217617543208, "grad_norm": 0.7898126840591431, "learning_rate": 3.8799836985963047e-05, "loss": 0.9148, "step": 15287 }, { "epoch": 2.8411076008176916, "grad_norm": 0.854949951171875, "learning_rate": 3.8788283746938437e-05, "loss": 0.9617, "step": 15288 }, { "epoch": 2.841293439881063, "grad_norm": 0.854562520980835, "learning_rate": 3.8776731814372836e-05, "loss": 0.8635, "step": 15289 }, { "epoch": 2.8414792789444343, "grad_norm": 0.9364863038063049, "learning_rate": 3.8765181188512835e-05, "loss": 0.858, "step": 15290 }, { "epoch": 2.841665118007805, "grad_norm": 0.7348250150680542, "learning_rate": 3.875363186960499e-05, "loss": 0.5607, "step": 15291 }, { "epoch": 2.8418509570711765, "grad_norm": 0.817834198474884, "learning_rate": 3.8742083857895775e-05, "loss": 0.877, "step": 15292 }, { "epoch": 2.8420367961345474, "grad_norm": 0.9872136116027832, "learning_rate": 3.873053715363161e-05, "loss": 1.1492, "step": 15293 }, { "epoch": 2.8422226351979187, "grad_norm": 0.9656583070755005, "learning_rate": 3.871899175705903e-05, "loss": 1.1006, "step": 15294 }, { "epoch": 2.8424084742612896, "grad_norm": 0.7747172713279724, "learning_rate": 3.870744766842441e-05, "loss": 1.0136, "step": 15295 }, { "epoch": 2.842594313324661, "grad_norm": 0.7914901375770569, "learning_rate": 3.869590488797413e-05, "loss": 0.7739, "step": 15296 }, { "epoch": 2.842780152388032, "grad_norm": 0.8331795930862427, "learning_rate": 3.868436341595453e-05, "loss": 0.7103, "step": 15297 }, { "epoch": 2.842965991451403, "grad_norm": 0.7876693606376648, "learning_rate": 3.8672823252612015e-05, "loss": 0.8948, "step": 15298 }, { "epoch": 2.843151830514774, "grad_norm": 0.967292308807373, "learning_rate": 3.866128439819283e-05, "loss": 1.1008, "step": 15299 }, { "epoch": 2.8433376695781454, "grad_norm": 0.772185742855072, "learning_rate": 3.8649746852943234e-05, "loss": 0.9245, "step": 15300 }, { "epoch": 2.8435235086415167, "grad_norm": 0.9624976515769958, "learning_rate": 3.8638210617109546e-05, "loss": 0.8139, "step": 15301 }, { "epoch": 2.8437093477048876, "grad_norm": 0.8755714297294617, "learning_rate": 3.8626675690937934e-05, "loss": 0.7875, "step": 15302 }, { "epoch": 2.8438951867682585, "grad_norm": 3.687150001525879, "learning_rate": 3.8615142074674625e-05, "loss": 1.5541, "step": 15303 }, { "epoch": 2.84408102583163, "grad_norm": 0.9187508821487427, "learning_rate": 3.8603609768565776e-05, "loss": 1.1961, "step": 15304 }, { "epoch": 2.844266864895001, "grad_norm": 0.8098819255828857, "learning_rate": 3.8592078772857465e-05, "loss": 0.8885, "step": 15305 }, { "epoch": 2.844452703958372, "grad_norm": 0.9316312074661255, "learning_rate": 3.85805490877959e-05, "loss": 1.0535, "step": 15306 }, { "epoch": 2.844638543021743, "grad_norm": 0.8622249960899353, "learning_rate": 3.8569020713627126e-05, "loss": 1.0789, "step": 15307 }, { "epoch": 2.8448243820851142, "grad_norm": 0.9714615345001221, "learning_rate": 3.855749365059716e-05, "loss": 0.8778, "step": 15308 }, { "epoch": 2.8450102211484856, "grad_norm": 1.0000050067901611, "learning_rate": 3.854596789895204e-05, "loss": 1.0138, "step": 15309 }, { "epoch": 2.8451960602118564, "grad_norm": 0.8454744815826416, "learning_rate": 3.8534443458937764e-05, "loss": 0.7292, "step": 15310 }, { "epoch": 2.8453818992752278, "grad_norm": 0.7739028334617615, "learning_rate": 3.852292033080035e-05, "loss": 0.4697, "step": 15311 }, { "epoch": 2.8455677383385987, "grad_norm": 1.3835629224777222, "learning_rate": 3.85113985147857e-05, "loss": 1.4786, "step": 15312 }, { "epoch": 2.84575357740197, "grad_norm": 0.7709473967552185, "learning_rate": 3.84998780111397e-05, "loss": 0.842, "step": 15313 }, { "epoch": 2.845939416465341, "grad_norm": 0.718707263469696, "learning_rate": 3.8488358820108305e-05, "loss": 0.772, "step": 15314 }, { "epoch": 2.846125255528712, "grad_norm": 0.9819291234016418, "learning_rate": 3.847684094193733e-05, "loss": 1.1089, "step": 15315 }, { "epoch": 2.846311094592083, "grad_norm": 0.7201307415962219, "learning_rate": 3.8465324376872604e-05, "loss": 0.8921, "step": 15316 }, { "epoch": 2.8464969336554544, "grad_norm": 0.8110470771789551, "learning_rate": 3.845380912515989e-05, "loss": 1.0143, "step": 15317 }, { "epoch": 2.8466827727188253, "grad_norm": 0.9493751525878906, "learning_rate": 3.844229518704505e-05, "loss": 0.9172, "step": 15318 }, { "epoch": 2.8468686117821966, "grad_norm": 1.0635316371917725, "learning_rate": 3.843078256277377e-05, "loss": 0.8767, "step": 15319 }, { "epoch": 2.847054450845568, "grad_norm": 0.9986346960067749, "learning_rate": 3.841927125259179e-05, "loss": 0.908, "step": 15320 }, { "epoch": 2.847240289908939, "grad_norm": 0.7902888059616089, "learning_rate": 3.840776125674475e-05, "loss": 0.7851, "step": 15321 }, { "epoch": 2.8474261289723097, "grad_norm": 0.8674366474151611, "learning_rate": 3.8396252575478344e-05, "loss": 0.9204, "step": 15322 }, { "epoch": 2.847611968035681, "grad_norm": 0.8738417029380798, "learning_rate": 3.838474520903825e-05, "loss": 0.9319, "step": 15323 }, { "epoch": 2.8477978070990524, "grad_norm": 0.7828598022460938, "learning_rate": 3.837323915767004e-05, "loss": 0.8096, "step": 15324 }, { "epoch": 2.8479836461624233, "grad_norm": 0.8735978603363037, "learning_rate": 3.836173442161923e-05, "loss": 1.2256, "step": 15325 }, { "epoch": 2.8481694852257946, "grad_norm": 0.7538908123970032, "learning_rate": 3.8350231001131476e-05, "loss": 0.8915, "step": 15326 }, { "epoch": 2.8483553242891655, "grad_norm": 0.7566878795623779, "learning_rate": 3.833872889645225e-05, "loss": 0.9013, "step": 15327 }, { "epoch": 2.848541163352537, "grad_norm": 0.7990183234214783, "learning_rate": 3.832722810782703e-05, "loss": 1.1669, "step": 15328 }, { "epoch": 2.8487270024159077, "grad_norm": 0.8590530157089233, "learning_rate": 3.831572863550126e-05, "loss": 0.9329, "step": 15329 }, { "epoch": 2.848912841479279, "grad_norm": 0.819938600063324, "learning_rate": 3.830423047972044e-05, "loss": 1.0213, "step": 15330 }, { "epoch": 2.84909868054265, "grad_norm": 0.8118523955345154, "learning_rate": 3.8292733640729905e-05, "loss": 0.9333, "step": 15331 }, { "epoch": 2.8492845196060212, "grad_norm": 0.7628287076950073, "learning_rate": 3.828123811877512e-05, "loss": 0.6968, "step": 15332 }, { "epoch": 2.849470358669392, "grad_norm": 0.956183910369873, "learning_rate": 3.826974391410135e-05, "loss": 1.0995, "step": 15333 }, { "epoch": 2.8496561977327635, "grad_norm": 0.7814002633094788, "learning_rate": 3.825825102695402e-05, "loss": 0.957, "step": 15334 }, { "epoch": 2.849842036796135, "grad_norm": 0.9299039244651794, "learning_rate": 3.8246759457578354e-05, "loss": 0.9723, "step": 15335 }, { "epoch": 2.8500278758595057, "grad_norm": 0.7808316946029663, "learning_rate": 3.823526920621963e-05, "loss": 0.7032, "step": 15336 }, { "epoch": 2.8502137149228766, "grad_norm": 0.9004837274551392, "learning_rate": 3.822378027312305e-05, "loss": 0.8531, "step": 15337 }, { "epoch": 2.850399553986248, "grad_norm": 1.0717692375183105, "learning_rate": 3.821229265853392e-05, "loss": 0.9096, "step": 15338 }, { "epoch": 2.850585393049619, "grad_norm": 0.9275893568992615, "learning_rate": 3.820080636269737e-05, "loss": 0.965, "step": 15339 }, { "epoch": 2.85077123211299, "grad_norm": 0.7810984253883362, "learning_rate": 3.8189321385858556e-05, "loss": 0.8819, "step": 15340 }, { "epoch": 2.8509570711763614, "grad_norm": 0.8521530032157898, "learning_rate": 3.8177837728262564e-05, "loss": 1.0317, "step": 15341 }, { "epoch": 2.8511429102397323, "grad_norm": 0.9492397904396057, "learning_rate": 3.816635539015454e-05, "loss": 0.8219, "step": 15342 }, { "epoch": 2.8513287493031036, "grad_norm": 0.751935601234436, "learning_rate": 3.815487437177958e-05, "loss": 0.7021, "step": 15343 }, { "epoch": 2.8515145883664745, "grad_norm": 0.7729502320289612, "learning_rate": 3.814339467338269e-05, "loss": 0.8726, "step": 15344 }, { "epoch": 2.851700427429846, "grad_norm": 0.7897372841835022, "learning_rate": 3.813191629520887e-05, "loss": 1.0875, "step": 15345 }, { "epoch": 2.8518862664932167, "grad_norm": 1.0642775297164917, "learning_rate": 3.812043923750316e-05, "loss": 1.1055, "step": 15346 }, { "epoch": 2.852072105556588, "grad_norm": 0.7688366770744324, "learning_rate": 3.810896350051048e-05, "loss": 0.8611, "step": 15347 }, { "epoch": 2.852257944619959, "grad_norm": 0.8489544987678528, "learning_rate": 3.809748908447576e-05, "loss": 0.9513, "step": 15348 }, { "epoch": 2.8524437836833303, "grad_norm": 0.7277908325195312, "learning_rate": 3.8086015989643875e-05, "loss": 0.9195, "step": 15349 }, { "epoch": 2.8526296227467016, "grad_norm": 0.8600225448608398, "learning_rate": 3.8074544216259766e-05, "loss": 0.9291, "step": 15350 }, { "epoch": 2.8528154618100725, "grad_norm": 0.8020973205566406, "learning_rate": 3.806307376456825e-05, "loss": 0.7507, "step": 15351 }, { "epoch": 2.8530013008734434, "grad_norm": 1.6078366041183472, "learning_rate": 3.8051604634814085e-05, "loss": 1.2141, "step": 15352 }, { "epoch": 2.8531871399368147, "grad_norm": 0.8335413336753845, "learning_rate": 3.8040136827242124e-05, "loss": 0.9712, "step": 15353 }, { "epoch": 2.853372979000186, "grad_norm": 0.8307401537895203, "learning_rate": 3.802867034209715e-05, "loss": 0.8872, "step": 15354 }, { "epoch": 2.853558818063557, "grad_norm": 0.8642200827598572, "learning_rate": 3.801720517962386e-05, "loss": 0.9417, "step": 15355 }, { "epoch": 2.853744657126928, "grad_norm": 0.8002011179924011, "learning_rate": 3.800574134006695e-05, "loss": 0.8183, "step": 15356 }, { "epoch": 2.853930496190299, "grad_norm": 0.8602607250213623, "learning_rate": 3.7994278823671084e-05, "loss": 1.0025, "step": 15357 }, { "epoch": 2.8541163352536705, "grad_norm": 0.9555697441101074, "learning_rate": 3.798281763068096e-05, "loss": 0.7349, "step": 15358 }, { "epoch": 2.8543021743170414, "grad_norm": 0.7865543365478516, "learning_rate": 3.7971357761341154e-05, "loss": 1.0243, "step": 15359 }, { "epoch": 2.8544880133804127, "grad_norm": 0.7438862919807434, "learning_rate": 3.795989921589629e-05, "loss": 0.8489, "step": 15360 }, { "epoch": 2.8546738524437836, "grad_norm": 0.9199408888816833, "learning_rate": 3.794844199459087e-05, "loss": 0.9497, "step": 15361 }, { "epoch": 2.854859691507155, "grad_norm": 0.8037041425704956, "learning_rate": 3.79369860976695e-05, "loss": 0.7723, "step": 15362 }, { "epoch": 2.855045530570526, "grad_norm": 0.9081737995147705, "learning_rate": 3.7925531525376623e-05, "loss": 0.9376, "step": 15363 }, { "epoch": 2.855231369633897, "grad_norm": 0.8575348258018494, "learning_rate": 3.7914078277956786e-05, "loss": 1.0563, "step": 15364 }, { "epoch": 2.855417208697268, "grad_norm": 0.8420919179916382, "learning_rate": 3.790262635565438e-05, "loss": 0.9279, "step": 15365 }, { "epoch": 2.8556030477606393, "grad_norm": 0.8386310935020447, "learning_rate": 3.7891175758713885e-05, "loss": 0.7817, "step": 15366 }, { "epoch": 2.85578888682401, "grad_norm": 1.0895872116088867, "learning_rate": 3.7879726487379655e-05, "loss": 1.1757, "step": 15367 }, { "epoch": 2.8559747258873815, "grad_norm": 0.6934189796447754, "learning_rate": 3.786827854189606e-05, "loss": 0.6941, "step": 15368 }, { "epoch": 2.856160564950753, "grad_norm": 0.744887113571167, "learning_rate": 3.785683192250741e-05, "loss": 0.8822, "step": 15369 }, { "epoch": 2.8563464040141238, "grad_norm": 0.8233052492141724, "learning_rate": 3.784538662945807e-05, "loss": 1.0015, "step": 15370 }, { "epoch": 2.8565322430774946, "grad_norm": 0.9672362208366394, "learning_rate": 3.783394266299228e-05, "loss": 0.9964, "step": 15371 }, { "epoch": 2.856718082140866, "grad_norm": 0.8399665951728821, "learning_rate": 3.782250002335431e-05, "loss": 0.8387, "step": 15372 }, { "epoch": 2.8569039212042373, "grad_norm": 0.9578357934951782, "learning_rate": 3.7811058710788314e-05, "loss": 1.1728, "step": 15373 }, { "epoch": 2.857089760267608, "grad_norm": 0.8220307230949402, "learning_rate": 3.779961872553862e-05, "loss": 0.9484, "step": 15374 }, { "epoch": 2.8572755993309795, "grad_norm": 0.84305739402771, "learning_rate": 3.778818006784931e-05, "loss": 1.0166, "step": 15375 }, { "epoch": 2.8574614383943504, "grad_norm": 0.8652834296226501, "learning_rate": 3.7776742737964555e-05, "loss": 1.0987, "step": 15376 }, { "epoch": 2.8576472774577217, "grad_norm": 0.8326297402381897, "learning_rate": 3.7765306736128395e-05, "loss": 0.9049, "step": 15377 }, { "epoch": 2.8578331165210926, "grad_norm": 0.73166823387146, "learning_rate": 3.7753872062585e-05, "loss": 0.9023, "step": 15378 }, { "epoch": 2.858018955584464, "grad_norm": 0.9456663131713867, "learning_rate": 3.77424387175784e-05, "loss": 0.9886, "step": 15379 }, { "epoch": 2.858204794647835, "grad_norm": 0.9489964246749878, "learning_rate": 3.77310067013526e-05, "loss": 0.7923, "step": 15380 }, { "epoch": 2.858390633711206, "grad_norm": 0.8790481686592102, "learning_rate": 3.771957601415157e-05, "loss": 0.9042, "step": 15381 }, { "epoch": 2.858576472774577, "grad_norm": 0.7280595302581787, "learning_rate": 3.7708146656219346e-05, "loss": 1.0003, "step": 15382 }, { "epoch": 2.8587623118379484, "grad_norm": 0.8281516432762146, "learning_rate": 3.769671862779983e-05, "loss": 0.9558, "step": 15383 }, { "epoch": 2.8589481509013197, "grad_norm": 0.8291042447090149, "learning_rate": 3.768529192913691e-05, "loss": 0.943, "step": 15384 }, { "epoch": 2.8591339899646906, "grad_norm": 0.8003760576248169, "learning_rate": 3.7673866560474504e-05, "loss": 0.8982, "step": 15385 }, { "epoch": 2.8593198290280615, "grad_norm": 0.716830313205719, "learning_rate": 3.766244252205649e-05, "loss": 0.8642, "step": 15386 }, { "epoch": 2.859505668091433, "grad_norm": 0.9099149703979492, "learning_rate": 3.7651019814126654e-05, "loss": 0.8758, "step": 15387 }, { "epoch": 2.859691507154804, "grad_norm": 0.8813703060150146, "learning_rate": 3.763959843692881e-05, "loss": 0.9723, "step": 15388 }, { "epoch": 2.859877346218175, "grad_norm": 0.6747955679893494, "learning_rate": 3.762817839070669e-05, "loss": 0.6139, "step": 15389 }, { "epoch": 2.860063185281546, "grad_norm": 0.8541521430015564, "learning_rate": 3.7616759675704105e-05, "loss": 1.0557, "step": 15390 }, { "epoch": 2.8602490243449172, "grad_norm": 0.7706393599510193, "learning_rate": 3.7605342292164735e-05, "loss": 0.7235, "step": 15391 }, { "epoch": 2.8604348634082886, "grad_norm": 0.8833755254745483, "learning_rate": 3.7593926240332253e-05, "loss": 1.1436, "step": 15392 }, { "epoch": 2.8606207024716594, "grad_norm": 0.8103173971176147, "learning_rate": 3.758251152045029e-05, "loss": 0.7348, "step": 15393 }, { "epoch": 2.860806541535031, "grad_norm": 0.7386962175369263, "learning_rate": 3.7571098132762506e-05, "loss": 0.7346, "step": 15394 }, { "epoch": 2.8609923805984017, "grad_norm": 0.9515390396118164, "learning_rate": 3.7559686077512534e-05, "loss": 0.9968, "step": 15395 }, { "epoch": 2.861178219661773, "grad_norm": 0.793876588344574, "learning_rate": 3.7548275354943904e-05, "loss": 0.6786, "step": 15396 }, { "epoch": 2.861364058725144, "grad_norm": 0.8353383541107178, "learning_rate": 3.753686596530014e-05, "loss": 1.0705, "step": 15397 }, { "epoch": 2.861549897788515, "grad_norm": 0.7873873114585876, "learning_rate": 3.7525457908824815e-05, "loss": 0.7793, "step": 15398 }, { "epoch": 2.8617357368518865, "grad_norm": 0.8067834973335266, "learning_rate": 3.751405118576138e-05, "loss": 1.2097, "step": 15399 }, { "epoch": 2.8619215759152574, "grad_norm": 0.7366782426834106, "learning_rate": 3.7502645796353284e-05, "loss": 0.974, "step": 15400 }, { "epoch": 2.8621074149786283, "grad_norm": 0.7902079820632935, "learning_rate": 3.749124174084394e-05, "loss": 1.0888, "step": 15401 }, { "epoch": 2.8622932540419996, "grad_norm": 0.8420895338058472, "learning_rate": 3.7479839019476804e-05, "loss": 0.8503, "step": 15402 }, { "epoch": 2.862479093105371, "grad_norm": 1.4798964262008667, "learning_rate": 3.746843763249521e-05, "loss": 1.1554, "step": 15403 }, { "epoch": 2.862664932168742, "grad_norm": 0.8021959066390991, "learning_rate": 3.745703758014247e-05, "loss": 0.8877, "step": 15404 }, { "epoch": 2.8628507712321127, "grad_norm": 0.9752201437950134, "learning_rate": 3.744563886266195e-05, "loss": 0.833, "step": 15405 }, { "epoch": 2.863036610295484, "grad_norm": 0.8889212608337402, "learning_rate": 3.743424148029694e-05, "loss": 0.8776, "step": 15406 }, { "epoch": 2.8632224493588554, "grad_norm": 0.8550772070884705, "learning_rate": 3.742284543329068e-05, "loss": 0.8245, "step": 15407 }, { "epoch": 2.8634082884222263, "grad_norm": 0.8788866996765137, "learning_rate": 3.74114507218864e-05, "loss": 1.0853, "step": 15408 }, { "epoch": 2.8635941274855976, "grad_norm": 0.8350381255149841, "learning_rate": 3.7400057346327266e-05, "loss": 1.0919, "step": 15409 }, { "epoch": 2.8637799665489685, "grad_norm": 0.8196921944618225, "learning_rate": 3.738866530685652e-05, "loss": 0.9017, "step": 15410 }, { "epoch": 2.86396580561234, "grad_norm": 0.7616583108901978, "learning_rate": 3.737727460371726e-05, "loss": 0.9222, "step": 15411 }, { "epoch": 2.8641516446757107, "grad_norm": 0.8864631652832031, "learning_rate": 3.736588523715261e-05, "loss": 0.9886, "step": 15412 }, { "epoch": 2.864337483739082, "grad_norm": 0.7534281015396118, "learning_rate": 3.7354497207405614e-05, "loss": 0.965, "step": 15413 }, { "epoch": 2.864523322802453, "grad_norm": 0.7649170756340027, "learning_rate": 3.734311051471942e-05, "loss": 0.9695, "step": 15414 }, { "epoch": 2.8647091618658242, "grad_norm": 0.848669171333313, "learning_rate": 3.733172515933697e-05, "loss": 1.0583, "step": 15415 }, { "epoch": 2.864895000929195, "grad_norm": 0.858026921749115, "learning_rate": 3.7320341141501334e-05, "loss": 0.6962, "step": 15416 }, { "epoch": 2.8650808399925665, "grad_norm": 0.8865530490875244, "learning_rate": 3.730895846145542e-05, "loss": 0.8461, "step": 15417 }, { "epoch": 2.865266679055938, "grad_norm": 0.7353289723396301, "learning_rate": 3.729757711944224e-05, "loss": 0.6562, "step": 15418 }, { "epoch": 2.8654525181193087, "grad_norm": 0.9045048952102661, "learning_rate": 3.728619711570468e-05, "loss": 1.0692, "step": 15419 }, { "epoch": 2.8656383571826796, "grad_norm": 0.9202983975410461, "learning_rate": 3.727481845048562e-05, "loss": 0.872, "step": 15420 }, { "epoch": 2.865824196246051, "grad_norm": 0.7834516167640686, "learning_rate": 3.7263441124027875e-05, "loss": 0.9966, "step": 15421 }, { "epoch": 2.866010035309422, "grad_norm": 0.8796038031578064, "learning_rate": 3.725206513657435e-05, "loss": 0.9245, "step": 15422 }, { "epoch": 2.866195874372793, "grad_norm": 1.0300281047821045, "learning_rate": 3.7240690488367833e-05, "loss": 0.9943, "step": 15423 }, { "epoch": 2.8663817134361644, "grad_norm": 0.7049465179443359, "learning_rate": 3.722931717965103e-05, "loss": 0.7929, "step": 15424 }, { "epoch": 2.8665675524995353, "grad_norm": 0.9132517576217651, "learning_rate": 3.721794521066678e-05, "loss": 1.234, "step": 15425 }, { "epoch": 2.8667533915629066, "grad_norm": 0.8524289727210999, "learning_rate": 3.720657458165771e-05, "loss": 1.0903, "step": 15426 }, { "epoch": 2.8669392306262775, "grad_norm": 0.8202354907989502, "learning_rate": 3.719520529286659e-05, "loss": 0.9081, "step": 15427 }, { "epoch": 2.867125069689649, "grad_norm": 0.9034220576286316, "learning_rate": 3.718383734453602e-05, "loss": 1.0881, "step": 15428 }, { "epoch": 2.8673109087530197, "grad_norm": 0.7171639204025269, "learning_rate": 3.7172470736908616e-05, "loss": 1.0301, "step": 15429 }, { "epoch": 2.867496747816391, "grad_norm": 0.9133208394050598, "learning_rate": 3.716110547022704e-05, "loss": 0.9448, "step": 15430 }, { "epoch": 2.867682586879762, "grad_norm": 0.9423163533210754, "learning_rate": 3.714974154473384e-05, "loss": 1.2942, "step": 15431 }, { "epoch": 2.8678684259431333, "grad_norm": 0.8688696622848511, "learning_rate": 3.713837896067155e-05, "loss": 0.8479, "step": 15432 }, { "epoch": 2.8680542650065046, "grad_norm": 0.8275406360626221, "learning_rate": 3.712701771828265e-05, "loss": 0.8357, "step": 15433 }, { "epoch": 2.8682401040698755, "grad_norm": 0.9139199256896973, "learning_rate": 3.7115657817809704e-05, "loss": 1.305, "step": 15434 }, { "epoch": 2.8684259431332464, "grad_norm": 0.9804993271827698, "learning_rate": 3.710429925949511e-05, "loss": 0.6615, "step": 15435 }, { "epoch": 2.8686117821966177, "grad_norm": 0.821039080619812, "learning_rate": 3.709294204358129e-05, "loss": 0.9444, "step": 15436 }, { "epoch": 2.868797621259989, "grad_norm": 0.8548663854598999, "learning_rate": 3.708158617031068e-05, "loss": 0.9661, "step": 15437 }, { "epoch": 2.86898346032336, "grad_norm": 0.7277716994285583, "learning_rate": 3.7070231639925654e-05, "loss": 0.8455, "step": 15438 }, { "epoch": 2.869169299386731, "grad_norm": 0.8620544672012329, "learning_rate": 3.705887845266857e-05, "loss": 0.9819, "step": 15439 }, { "epoch": 2.869355138450102, "grad_norm": 0.7607631087303162, "learning_rate": 3.7047526608781693e-05, "loss": 0.952, "step": 15440 }, { "epoch": 2.8695409775134735, "grad_norm": 0.8347303867340088, "learning_rate": 3.703617610850729e-05, "loss": 0.8776, "step": 15441 }, { "epoch": 2.8697268165768444, "grad_norm": 0.9303619265556335, "learning_rate": 3.702482695208771e-05, "loss": 0.9928, "step": 15442 }, { "epoch": 2.8699126556402157, "grad_norm": 0.9514495730400085, "learning_rate": 3.7013479139765115e-05, "loss": 1.0331, "step": 15443 }, { "epoch": 2.8700984947035866, "grad_norm": 0.6765170693397522, "learning_rate": 3.70021326717817e-05, "loss": 0.7428, "step": 15444 }, { "epoch": 2.870284333766958, "grad_norm": 0.6782026886940002, "learning_rate": 3.699078754837967e-05, "loss": 0.6353, "step": 15445 }, { "epoch": 2.870470172830329, "grad_norm": 0.769478976726532, "learning_rate": 3.697944376980116e-05, "loss": 1.0179, "step": 15446 }, { "epoch": 2.8706560118937, "grad_norm": 0.7449698448181152, "learning_rate": 3.696810133628824e-05, "loss": 0.7441, "step": 15447 }, { "epoch": 2.8708418509570715, "grad_norm": 0.7663121819496155, "learning_rate": 3.6956760248083064e-05, "loss": 0.9675, "step": 15448 }, { "epoch": 2.8710276900204423, "grad_norm": 0.8151096105575562, "learning_rate": 3.694542050542763e-05, "loss": 0.9595, "step": 15449 }, { "epoch": 2.871213529083813, "grad_norm": 0.9838616847991943, "learning_rate": 3.6934082108564016e-05, "loss": 0.8339, "step": 15450 }, { "epoch": 2.8713993681471845, "grad_norm": 0.8464575409889221, "learning_rate": 3.692274505773419e-05, "loss": 1.0164, "step": 15451 }, { "epoch": 2.871585207210556, "grad_norm": 0.8795459270477295, "learning_rate": 3.691140935318012e-05, "loss": 0.9412, "step": 15452 }, { "epoch": 2.8717710462739268, "grad_norm": 0.864625871181488, "learning_rate": 3.690007499514374e-05, "loss": 0.7772, "step": 15453 }, { "epoch": 2.8719568853372976, "grad_norm": 0.8079337477684021, "learning_rate": 3.688874198386699e-05, "loss": 1.065, "step": 15454 }, { "epoch": 2.872142724400669, "grad_norm": 0.7533987164497375, "learning_rate": 3.687741031959175e-05, "loss": 0.942, "step": 15455 }, { "epoch": 2.8723285634640403, "grad_norm": 0.7962412238121033, "learning_rate": 3.686608000255982e-05, "loss": 1.1564, "step": 15456 }, { "epoch": 2.872514402527411, "grad_norm": 0.8227654099464417, "learning_rate": 3.685475103301308e-05, "loss": 0.8466, "step": 15457 }, { "epoch": 2.8727002415907825, "grad_norm": 0.8999118208885193, "learning_rate": 3.684342341119335e-05, "loss": 0.938, "step": 15458 }, { "epoch": 2.8728860806541534, "grad_norm": 1.2973206043243408, "learning_rate": 3.683209713734237e-05, "loss": 1.1469, "step": 15459 }, { "epoch": 2.8730719197175247, "grad_norm": 1.1290135383605957, "learning_rate": 3.682077221170186e-05, "loss": 0.9415, "step": 15460 }, { "epoch": 2.8732577587808956, "grad_norm": 0.8910056948661804, "learning_rate": 3.680944863451353e-05, "loss": 1.1199, "step": 15461 }, { "epoch": 2.873443597844267, "grad_norm": 0.7031981348991394, "learning_rate": 3.6798126406019104e-05, "loss": 0.7767, "step": 15462 }, { "epoch": 2.873629436907638, "grad_norm": 0.757603645324707, "learning_rate": 3.678680552646021e-05, "loss": 0.8649, "step": 15463 }, { "epoch": 2.873815275971009, "grad_norm": 0.8971279263496399, "learning_rate": 3.677548599607845e-05, "loss": 0.9413, "step": 15464 }, { "epoch": 2.87400111503438, "grad_norm": 0.867313027381897, "learning_rate": 3.676416781511547e-05, "loss": 0.8164, "step": 15465 }, { "epoch": 2.8741869540977514, "grad_norm": 0.9275748133659363, "learning_rate": 3.675285098381281e-05, "loss": 0.9774, "step": 15466 }, { "epoch": 2.8743727931611227, "grad_norm": 0.8772784471511841, "learning_rate": 3.674153550241202e-05, "loss": 0.726, "step": 15467 }, { "epoch": 2.8745586322244936, "grad_norm": 0.8420283794403076, "learning_rate": 3.673022137115455e-05, "loss": 1.0059, "step": 15468 }, { "epoch": 2.8747444712878645, "grad_norm": 0.8634923696517944, "learning_rate": 3.671890859028194e-05, "loss": 0.7513, "step": 15469 }, { "epoch": 2.874930310351236, "grad_norm": 0.9102967381477356, "learning_rate": 3.6707597160035667e-05, "loss": 0.9892, "step": 15470 }, { "epoch": 2.875116149414607, "grad_norm": 0.8464215397834778, "learning_rate": 3.669628708065711e-05, "loss": 1.0069, "step": 15471 }, { "epoch": 2.875301988477978, "grad_norm": 0.8910304307937622, "learning_rate": 3.668497835238768e-05, "loss": 1.0988, "step": 15472 }, { "epoch": 2.8754878275413494, "grad_norm": 0.874662458896637, "learning_rate": 3.667367097546869e-05, "loss": 0.8517, "step": 15473 }, { "epoch": 2.8756736666047202, "grad_norm": 0.841926634311676, "learning_rate": 3.666236495014156e-05, "loss": 0.7573, "step": 15474 }, { "epoch": 2.8758595056680916, "grad_norm": 0.7800859212875366, "learning_rate": 3.665106027664755e-05, "loss": 0.9069, "step": 15475 }, { "epoch": 2.8760453447314624, "grad_norm": 0.7146313190460205, "learning_rate": 3.6639756955227936e-05, "loss": 0.8104, "step": 15476 }, { "epoch": 2.876231183794834, "grad_norm": 0.6743584275245667, "learning_rate": 3.6628454986123995e-05, "loss": 0.7703, "step": 15477 }, { "epoch": 2.8764170228582047, "grad_norm": 0.878092885017395, "learning_rate": 3.66171543695769e-05, "loss": 0.9727, "step": 15478 }, { "epoch": 2.876602861921576, "grad_norm": 0.8362070322036743, "learning_rate": 3.6605855105827915e-05, "loss": 1.1399, "step": 15479 }, { "epoch": 2.876788700984947, "grad_norm": 0.9773759245872498, "learning_rate": 3.6594557195118165e-05, "loss": 0.9773, "step": 15480 }, { "epoch": 2.876974540048318, "grad_norm": 0.8178821802139282, "learning_rate": 3.6583260637688744e-05, "loss": 1.218, "step": 15481 }, { "epoch": 2.8771603791116895, "grad_norm": 0.6931003928184509, "learning_rate": 3.657196543378083e-05, "loss": 0.6433, "step": 15482 }, { "epoch": 2.8773462181750604, "grad_norm": 0.8900542855262756, "learning_rate": 3.6560671583635467e-05, "loss": 0.7365, "step": 15483 }, { "epoch": 2.8775320572384313, "grad_norm": 0.8439195156097412, "learning_rate": 3.6549379087493674e-05, "loss": 0.8774, "step": 15484 }, { "epoch": 2.8777178963018026, "grad_norm": 0.7627041935920715, "learning_rate": 3.653808794559653e-05, "loss": 0.8925, "step": 15485 }, { "epoch": 2.877903735365174, "grad_norm": 0.8097158074378967, "learning_rate": 3.6526798158184994e-05, "loss": 1.0422, "step": 15486 }, { "epoch": 2.878089574428545, "grad_norm": 0.7690940499305725, "learning_rate": 3.651550972550003e-05, "loss": 0.8416, "step": 15487 }, { "epoch": 2.8782754134919157, "grad_norm": 0.9093706607818604, "learning_rate": 3.650422264778253e-05, "loss": 0.9186, "step": 15488 }, { "epoch": 2.878461252555287, "grad_norm": 0.8120034337043762, "learning_rate": 3.649293692527344e-05, "loss": 0.949, "step": 15489 }, { "epoch": 2.8786470916186584, "grad_norm": 0.9720838069915771, "learning_rate": 3.6481652558213654e-05, "loss": 0.9473, "step": 15490 }, { "epoch": 2.8788329306820293, "grad_norm": 0.7634152173995972, "learning_rate": 3.647036954684399e-05, "loss": 0.9627, "step": 15491 }, { "epoch": 2.8790187697454006, "grad_norm": 0.7963048219680786, "learning_rate": 3.645908789140527e-05, "loss": 0.8999, "step": 15492 }, { "epoch": 2.8792046088087715, "grad_norm": 0.798642098903656, "learning_rate": 3.644780759213825e-05, "loss": 0.8625, "step": 15493 }, { "epoch": 2.879390447872143, "grad_norm": 0.859175443649292, "learning_rate": 3.6436528649283754e-05, "loss": 0.9735, "step": 15494 }, { "epoch": 2.8795762869355137, "grad_norm": 0.9950864315032959, "learning_rate": 3.6425251063082465e-05, "loss": 0.956, "step": 15495 }, { "epoch": 2.879762125998885, "grad_norm": 0.7690659761428833, "learning_rate": 3.6413974833775055e-05, "loss": 0.8988, "step": 15496 }, { "epoch": 2.879947965062256, "grad_norm": 1.670119285583496, "learning_rate": 3.6402699961602285e-05, "loss": 1.1537, "step": 15497 }, { "epoch": 2.8801338041256273, "grad_norm": 0.7463253140449524, "learning_rate": 3.639142644680473e-05, "loss": 0.6221, "step": 15498 }, { "epoch": 2.880319643188998, "grad_norm": 0.7164505124092102, "learning_rate": 3.6380154289623e-05, "loss": 0.97, "step": 15499 }, { "epoch": 2.8805054822523695, "grad_norm": 0.8878457546234131, "learning_rate": 3.6368883490297734e-05, "loss": 0.9194, "step": 15500 }, { "epoch": 2.880691321315741, "grad_norm": 0.733720600605011, "learning_rate": 3.635761404906941e-05, "loss": 0.7988, "step": 15501 }, { "epoch": 2.8808771603791117, "grad_norm": 0.839289665222168, "learning_rate": 3.634634596617864e-05, "loss": 0.8863, "step": 15502 }, { "epoch": 2.8810629994424826, "grad_norm": 0.6451913118362427, "learning_rate": 3.633507924186588e-05, "loss": 0.6838, "step": 15503 }, { "epoch": 2.881248838505854, "grad_norm": 0.7698342204093933, "learning_rate": 3.632381387637157e-05, "loss": 0.7794, "step": 15504 }, { "epoch": 2.8814346775692252, "grad_norm": 0.6710625886917114, "learning_rate": 3.6312549869936206e-05, "loss": 0.735, "step": 15505 }, { "epoch": 2.881620516632596, "grad_norm": 0.8364967107772827, "learning_rate": 3.630128722280019e-05, "loss": 0.7442, "step": 15506 }, { "epoch": 2.8818063556959674, "grad_norm": 0.6898694634437561, "learning_rate": 3.629002593520386e-05, "loss": 0.8518, "step": 15507 }, { "epoch": 2.8819921947593383, "grad_norm": 0.9124313592910767, "learning_rate": 3.627876600738759e-05, "loss": 0.8588, "step": 15508 }, { "epoch": 2.8821780338227097, "grad_norm": 0.8680217266082764, "learning_rate": 3.626750743959173e-05, "loss": 0.9466, "step": 15509 }, { "epoch": 2.8823638728860805, "grad_norm": 0.8426185250282288, "learning_rate": 3.625625023205652e-05, "loss": 0.8538, "step": 15510 }, { "epoch": 2.882549711949452, "grad_norm": 0.9768823385238647, "learning_rate": 3.624499438502229e-05, "loss": 0.8646, "step": 15511 }, { "epoch": 2.8827355510128228, "grad_norm": 0.7641948461532593, "learning_rate": 3.623373989872926e-05, "loss": 0.8495, "step": 15512 }, { "epoch": 2.882921390076194, "grad_norm": 0.8565607666969299, "learning_rate": 3.6222486773417574e-05, "loss": 0.9625, "step": 15513 }, { "epoch": 2.883107229139565, "grad_norm": 0.7964957356452942, "learning_rate": 3.6211235009327505e-05, "loss": 0.9454, "step": 15514 }, { "epoch": 2.8832930682029363, "grad_norm": 0.8411273956298828, "learning_rate": 3.6199984606699155e-05, "loss": 0.7864, "step": 15515 }, { "epoch": 2.8834789072663076, "grad_norm": 1.5076212882995605, "learning_rate": 3.618873556577261e-05, "loss": 1.1191, "step": 15516 }, { "epoch": 2.8836647463296785, "grad_norm": 0.8100115656852722, "learning_rate": 3.6177487886788054e-05, "loss": 0.8513, "step": 15517 }, { "epoch": 2.8838505853930494, "grad_norm": 0.8636518716812134, "learning_rate": 3.6166241569985484e-05, "loss": 0.8855, "step": 15518 }, { "epoch": 2.8840364244564207, "grad_norm": 0.7322862148284912, "learning_rate": 3.615499661560493e-05, "loss": 0.9125, "step": 15519 }, { "epoch": 2.884222263519792, "grad_norm": 0.7911383509635925, "learning_rate": 3.614375302388639e-05, "loss": 1.0129, "step": 15520 }, { "epoch": 2.884408102583163, "grad_norm": 0.9409505724906921, "learning_rate": 3.613251079506986e-05, "loss": 0.7448, "step": 15521 }, { "epoch": 2.8845939416465343, "grad_norm": 0.8631063103675842, "learning_rate": 3.612126992939532e-05, "loss": 0.7665, "step": 15522 }, { "epoch": 2.884779780709905, "grad_norm": 0.7590117454528809, "learning_rate": 3.611003042710266e-05, "loss": 1.132, "step": 15523 }, { "epoch": 2.8849656197732765, "grad_norm": 1.4974132776260376, "learning_rate": 3.609879228843171e-05, "loss": 1.4162, "step": 15524 }, { "epoch": 2.8851514588366474, "grad_norm": 0.9238195419311523, "learning_rate": 3.608755551362243e-05, "loss": 0.8645, "step": 15525 }, { "epoch": 2.8853372979000187, "grad_norm": 0.7798268795013428, "learning_rate": 3.6076320102914585e-05, "loss": 0.8418, "step": 15526 }, { "epoch": 2.8855231369633896, "grad_norm": 0.9914414286613464, "learning_rate": 3.6065086056548004e-05, "loss": 1.0875, "step": 15527 }, { "epoch": 2.885708976026761, "grad_norm": 0.885212779045105, "learning_rate": 3.6053853374762404e-05, "loss": 1.2031, "step": 15528 }, { "epoch": 2.885894815090132, "grad_norm": 0.7570885419845581, "learning_rate": 3.6042622057797605e-05, "loss": 0.7165, "step": 15529 }, { "epoch": 2.886080654153503, "grad_norm": 0.8846122026443481, "learning_rate": 3.603139210589327e-05, "loss": 1.0756, "step": 15530 }, { "epoch": 2.8862664932168745, "grad_norm": 0.8745748996734619, "learning_rate": 3.6020163519289076e-05, "loss": 0.9672, "step": 15531 }, { "epoch": 2.8864523322802453, "grad_norm": 0.6768541932106018, "learning_rate": 3.600893629822473e-05, "loss": 0.8916, "step": 15532 }, { "epoch": 2.886638171343616, "grad_norm": 0.9766746163368225, "learning_rate": 3.599771044293978e-05, "loss": 0.5867, "step": 15533 }, { "epoch": 2.8868240104069876, "grad_norm": 0.8952239751815796, "learning_rate": 3.598648595367392e-05, "loss": 0.9723, "step": 15534 }, { "epoch": 2.887009849470359, "grad_norm": 0.7633703351020813, "learning_rate": 3.5975262830666655e-05, "loss": 1.0225, "step": 15535 }, { "epoch": 2.8871956885337298, "grad_norm": 0.6744645833969116, "learning_rate": 3.596404107415749e-05, "loss": 0.7406, "step": 15536 }, { "epoch": 2.8873815275971006, "grad_norm": 1.0360220670700073, "learning_rate": 3.5952820684386034e-05, "loss": 1.0136, "step": 15537 }, { "epoch": 2.887567366660472, "grad_norm": 0.8803971409797668, "learning_rate": 3.59416016615917e-05, "loss": 1.083, "step": 15538 }, { "epoch": 2.8877532057238433, "grad_norm": 0.8622626066207886, "learning_rate": 3.593038400601395e-05, "loss": 1.179, "step": 15539 }, { "epoch": 2.887939044787214, "grad_norm": 0.8891432285308838, "learning_rate": 3.591916771789218e-05, "loss": 0.8029, "step": 15540 }, { "epoch": 2.8881248838505855, "grad_norm": 0.7373136281967163, "learning_rate": 3.59079527974658e-05, "loss": 0.7517, "step": 15541 }, { "epoch": 2.8883107229139564, "grad_norm": 0.9604713320732117, "learning_rate": 3.589673924497421e-05, "loss": 0.9143, "step": 15542 }, { "epoch": 2.8884965619773277, "grad_norm": 0.9270872473716736, "learning_rate": 3.588552706065672e-05, "loss": 0.9747, "step": 15543 }, { "epoch": 2.8886824010406986, "grad_norm": 0.7779958844184875, "learning_rate": 3.58743162447526e-05, "loss": 0.8861, "step": 15544 }, { "epoch": 2.88886824010407, "grad_norm": 0.78629070520401, "learning_rate": 3.5863106797501187e-05, "loss": 0.745, "step": 15545 }, { "epoch": 2.889054079167441, "grad_norm": 1.6500821113586426, "learning_rate": 3.5851898719141695e-05, "loss": 1.2476, "step": 15546 }, { "epoch": 2.889239918230812, "grad_norm": 0.9943754076957703, "learning_rate": 3.5840692009913344e-05, "loss": 0.9949, "step": 15547 }, { "epoch": 2.889425757294183, "grad_norm": 0.8301264643669128, "learning_rate": 3.582948667005529e-05, "loss": 1.0129, "step": 15548 }, { "epoch": 2.8896115963575544, "grad_norm": 0.7343300580978394, "learning_rate": 3.581828269980675e-05, "loss": 0.446, "step": 15549 }, { "epoch": 2.8897974354209257, "grad_norm": 0.7584952712059021, "learning_rate": 3.580708009940682e-05, "loss": 0.9541, "step": 15550 }, { "epoch": 2.8899832744842966, "grad_norm": 0.7859680652618408, "learning_rate": 3.579587886909461e-05, "loss": 0.9432, "step": 15551 }, { "epoch": 2.8901691135476675, "grad_norm": 1.3147470951080322, "learning_rate": 3.5784679009109154e-05, "loss": 0.6812, "step": 15552 }, { "epoch": 2.890354952611039, "grad_norm": 0.7729043364524841, "learning_rate": 3.577348051968954e-05, "loss": 0.8775, "step": 15553 }, { "epoch": 2.89054079167441, "grad_norm": 0.8663438558578491, "learning_rate": 3.576228340107478e-05, "loss": 0.7809, "step": 15554 }, { "epoch": 2.890726630737781, "grad_norm": 0.8728376030921936, "learning_rate": 3.575108765350386e-05, "loss": 1.004, "step": 15555 }, { "epoch": 2.8909124698011524, "grad_norm": 0.7745139598846436, "learning_rate": 3.5739893277215676e-05, "loss": 0.803, "step": 15556 }, { "epoch": 2.8910983088645232, "grad_norm": 0.7463011741638184, "learning_rate": 3.5728700272449234e-05, "loss": 0.8841, "step": 15557 }, { "epoch": 2.8912841479278946, "grad_norm": 1.0205782651901245, "learning_rate": 3.5717508639443387e-05, "loss": 0.9652, "step": 15558 }, { "epoch": 2.8914699869912655, "grad_norm": 0.8964250683784485, "learning_rate": 3.570631837843701e-05, "loss": 0.9396, "step": 15559 }, { "epoch": 2.891655826054637, "grad_norm": 0.7447481751441956, "learning_rate": 3.569512948966889e-05, "loss": 0.8611, "step": 15560 }, { "epoch": 2.8918416651180077, "grad_norm": 0.8456870913505554, "learning_rate": 3.5683941973377924e-05, "loss": 0.9574, "step": 15561 }, { "epoch": 2.892027504181379, "grad_norm": 0.7943773865699768, "learning_rate": 3.5672755829802805e-05, "loss": 0.959, "step": 15562 }, { "epoch": 2.89221334324475, "grad_norm": 1.0943312644958496, "learning_rate": 3.5661571059182366e-05, "loss": 1.127, "step": 15563 }, { "epoch": 2.892399182308121, "grad_norm": 0.7443644404411316, "learning_rate": 3.565038766175525e-05, "loss": 0.728, "step": 15564 }, { "epoch": 2.8925850213714925, "grad_norm": 0.8033445477485657, "learning_rate": 3.56392056377602e-05, "loss": 0.6451, "step": 15565 }, { "epoch": 2.8927708604348634, "grad_norm": 0.8828470706939697, "learning_rate": 3.562802498743587e-05, "loss": 0.8077, "step": 15566 }, { "epoch": 2.8929566994982343, "grad_norm": 0.7708657383918762, "learning_rate": 3.561684571102087e-05, "loss": 0.8274, "step": 15567 }, { "epoch": 2.8931425385616056, "grad_norm": 0.8346666693687439, "learning_rate": 3.560566780875379e-05, "loss": 1.02, "step": 15568 }, { "epoch": 2.893328377624977, "grad_norm": 0.8248069882392883, "learning_rate": 3.5594491280873254e-05, "loss": 1.0995, "step": 15569 }, { "epoch": 2.893514216688348, "grad_norm": 0.7798810005187988, "learning_rate": 3.5583316127617784e-05, "loss": 0.9831, "step": 15570 }, { "epoch": 2.8937000557517187, "grad_norm": 0.8893752694129944, "learning_rate": 3.5572142349225876e-05, "loss": 0.9212, "step": 15571 }, { "epoch": 2.89388589481509, "grad_norm": 0.8199259042739868, "learning_rate": 3.556096994593599e-05, "loss": 0.6263, "step": 15572 }, { "epoch": 2.8940717338784614, "grad_norm": 0.8772500157356262, "learning_rate": 3.554979891798663e-05, "loss": 1.0401, "step": 15573 }, { "epoch": 2.8942575729418323, "grad_norm": 1.0153173208236694, "learning_rate": 3.553862926561625e-05, "loss": 0.7838, "step": 15574 }, { "epoch": 2.8944434120052036, "grad_norm": 0.8423423171043396, "learning_rate": 3.55274609890632e-05, "loss": 0.9374, "step": 15575 }, { "epoch": 2.8946292510685745, "grad_norm": 1.033982515335083, "learning_rate": 3.5516294088565813e-05, "loss": 1.1189, "step": 15576 }, { "epoch": 2.894815090131946, "grad_norm": 0.791053056716919, "learning_rate": 3.5505128564362525e-05, "loss": 0.9697, "step": 15577 }, { "epoch": 2.8950009291953167, "grad_norm": 0.827377438545227, "learning_rate": 3.5493964416691574e-05, "loss": 0.9403, "step": 15578 }, { "epoch": 2.895186768258688, "grad_norm": 0.7587912678718567, "learning_rate": 3.548280164579126e-05, "loss": 1.0134, "step": 15579 }, { "epoch": 2.8953726073220594, "grad_norm": 0.7401148080825806, "learning_rate": 3.5471640251899796e-05, "loss": 0.9301, "step": 15580 }, { "epoch": 2.8955584463854303, "grad_norm": 0.8820634484291077, "learning_rate": 3.546048023525547e-05, "loss": 1.0362, "step": 15581 }, { "epoch": 2.895744285448801, "grad_norm": 0.8928791284561157, "learning_rate": 3.544932159609644e-05, "loss": 0.9027, "step": 15582 }, { "epoch": 2.8959301245121725, "grad_norm": 0.8220198750495911, "learning_rate": 3.543816433466083e-05, "loss": 0.741, "step": 15583 }, { "epoch": 2.896115963575544, "grad_norm": 0.8550474047660828, "learning_rate": 3.542700845118682e-05, "loss": 0.786, "step": 15584 }, { "epoch": 2.8963018026389147, "grad_norm": 0.8651540875434875, "learning_rate": 3.541585394591252e-05, "loss": 1.1396, "step": 15585 }, { "epoch": 2.8964876417022856, "grad_norm": 0.8764159679412842, "learning_rate": 3.5404700819076e-05, "loss": 0.9171, "step": 15586 }, { "epoch": 2.896673480765657, "grad_norm": 0.7330661416053772, "learning_rate": 3.539354907091528e-05, "loss": 0.8973, "step": 15587 }, { "epoch": 2.8968593198290282, "grad_norm": 0.7530513405799866, "learning_rate": 3.538239870166836e-05, "loss": 0.7893, "step": 15588 }, { "epoch": 2.897045158892399, "grad_norm": 0.8874174356460571, "learning_rate": 3.537124971157328e-05, "loss": 1.1772, "step": 15589 }, { "epoch": 2.8972309979557704, "grad_norm": 0.9068132638931274, "learning_rate": 3.5360102100867976e-05, "loss": 1.0861, "step": 15590 }, { "epoch": 2.8974168370191413, "grad_norm": 0.7956612706184387, "learning_rate": 3.534895586979036e-05, "loss": 0.7394, "step": 15591 }, { "epoch": 2.8976026760825127, "grad_norm": 0.8411120176315308, "learning_rate": 3.53378110185783e-05, "loss": 0.9235, "step": 15592 }, { "epoch": 2.8977885151458835, "grad_norm": 0.9352555871009827, "learning_rate": 3.5326667547469735e-05, "loss": 0.9848, "step": 15593 }, { "epoch": 2.897974354209255, "grad_norm": 0.6587279438972473, "learning_rate": 3.531552545670244e-05, "loss": 0.8386, "step": 15594 }, { "epoch": 2.8981601932726258, "grad_norm": 0.8666529059410095, "learning_rate": 3.530438474651428e-05, "loss": 0.9736, "step": 15595 }, { "epoch": 2.898346032335997, "grad_norm": 0.8803017139434814, "learning_rate": 3.529324541714296e-05, "loss": 1.0456, "step": 15596 }, { "epoch": 2.898531871399368, "grad_norm": 0.830962598323822, "learning_rate": 3.528210746882632e-05, "loss": 0.8811, "step": 15597 }, { "epoch": 2.8987177104627393, "grad_norm": 0.7424424290657043, "learning_rate": 3.5270970901802036e-05, "loss": 0.6607, "step": 15598 }, { "epoch": 2.8989035495261106, "grad_norm": 0.9809721112251282, "learning_rate": 3.52598357163078e-05, "loss": 1.2557, "step": 15599 }, { "epoch": 2.8990893885894815, "grad_norm": 0.8323695063591003, "learning_rate": 3.524870191258124e-05, "loss": 0.6972, "step": 15600 }, { "epoch": 2.8992752276528524, "grad_norm": 0.7893000245094299, "learning_rate": 3.523756949086004e-05, "loss": 0.8696, "step": 15601 }, { "epoch": 2.8994610667162237, "grad_norm": 0.7958328127861023, "learning_rate": 3.5226438451381794e-05, "loss": 0.8583, "step": 15602 }, { "epoch": 2.899646905779595, "grad_norm": 0.7533215880393982, "learning_rate": 3.521530879438407e-05, "loss": 0.7273, "step": 15603 }, { "epoch": 2.899832744842966, "grad_norm": 0.790684163570404, "learning_rate": 3.520418052010432e-05, "loss": 0.9137, "step": 15604 }, { "epoch": 2.9000185839063373, "grad_norm": 0.6913897395133972, "learning_rate": 3.519305362878023e-05, "loss": 0.8058, "step": 15605 }, { "epoch": 2.900204422969708, "grad_norm": 0.8906787633895874, "learning_rate": 3.518192812064918e-05, "loss": 0.964, "step": 15606 }, { "epoch": 2.9003902620330795, "grad_norm": 0.9065820574760437, "learning_rate": 3.517080399594865e-05, "loss": 0.8591, "step": 15607 }, { "epoch": 2.9005761010964504, "grad_norm": 0.7903452515602112, "learning_rate": 3.515968125491602e-05, "loss": 0.7845, "step": 15608 }, { "epoch": 2.9007619401598217, "grad_norm": 1.014837622642517, "learning_rate": 3.514855989778876e-05, "loss": 1.0081, "step": 15609 }, { "epoch": 2.9009477792231926, "grad_norm": 0.8896788954734802, "learning_rate": 3.513743992480419e-05, "loss": 0.9189, "step": 15610 }, { "epoch": 2.901133618286564, "grad_norm": 0.7536845803260803, "learning_rate": 3.5126321336199676e-05, "loss": 0.9054, "step": 15611 }, { "epoch": 2.901319457349935, "grad_norm": 0.7198872566223145, "learning_rate": 3.5115204132212454e-05, "loss": 0.7971, "step": 15612 }, { "epoch": 2.901505296413306, "grad_norm": 0.8007821440696716, "learning_rate": 3.51040883130799e-05, "loss": 0.9311, "step": 15613 }, { "epoch": 2.9016911354766775, "grad_norm": 0.8197160363197327, "learning_rate": 3.50929738790392e-05, "loss": 0.991, "step": 15614 }, { "epoch": 2.9018769745400483, "grad_norm": 0.9037057757377625, "learning_rate": 3.5081860830327564e-05, "loss": 1.0113, "step": 15615 }, { "epoch": 2.9020628136034192, "grad_norm": 0.9215180277824402, "learning_rate": 3.50707491671822e-05, "loss": 0.966, "step": 15616 }, { "epoch": 2.9022486526667906, "grad_norm": 0.7795912027359009, "learning_rate": 3.505963888984031e-05, "loss": 0.9497, "step": 15617 }, { "epoch": 2.902434491730162, "grad_norm": 0.7926895618438721, "learning_rate": 3.504852999853898e-05, "loss": 1.0439, "step": 15618 }, { "epoch": 2.9026203307935328, "grad_norm": 0.8523248434066772, "learning_rate": 3.503742249351533e-05, "loss": 0.918, "step": 15619 }, { "epoch": 2.9028061698569037, "grad_norm": 0.9220716953277588, "learning_rate": 3.502631637500637e-05, "loss": 1.1795, "step": 15620 }, { "epoch": 2.902992008920275, "grad_norm": 0.9029595255851746, "learning_rate": 3.501521164324922e-05, "loss": 0.8578, "step": 15621 }, { "epoch": 2.9031778479836463, "grad_norm": 0.9522722363471985, "learning_rate": 3.5004108298480866e-05, "loss": 0.8879, "step": 15622 }, { "epoch": 2.903363687047017, "grad_norm": 0.6594275236129761, "learning_rate": 3.499300634093828e-05, "loss": 0.6583, "step": 15623 }, { "epoch": 2.9035495261103885, "grad_norm": 0.7711548805236816, "learning_rate": 3.498190577085838e-05, "loss": 0.8813, "step": 15624 }, { "epoch": 2.9037353651737594, "grad_norm": 0.8120653629302979, "learning_rate": 3.4970806588478136e-05, "loss": 0.8379, "step": 15625 }, { "epoch": 2.9039212042371307, "grad_norm": 0.7879056930541992, "learning_rate": 3.4959708794034453e-05, "loss": 0.8443, "step": 15626 }, { "epoch": 2.9041070433005016, "grad_norm": 0.6453332901000977, "learning_rate": 3.494861238776418e-05, "loss": 0.6438, "step": 15627 }, { "epoch": 2.904292882363873, "grad_norm": 0.7373093962669373, "learning_rate": 3.493751736990411e-05, "loss": 0.7821, "step": 15628 }, { "epoch": 2.9044787214272443, "grad_norm": 0.9499757885932922, "learning_rate": 3.4926423740691106e-05, "loss": 1.0659, "step": 15629 }, { "epoch": 2.904664560490615, "grad_norm": 0.8392454385757446, "learning_rate": 3.4915331500361924e-05, "loss": 0.9771, "step": 15630 }, { "epoch": 2.904850399553986, "grad_norm": 0.9138320684432983, "learning_rate": 3.49042406491533e-05, "loss": 1.0365, "step": 15631 }, { "epoch": 2.9050362386173574, "grad_norm": 0.8812173008918762, "learning_rate": 3.489315118730191e-05, "loss": 1.1716, "step": 15632 }, { "epoch": 2.9052220776807287, "grad_norm": 0.9005640745162964, "learning_rate": 3.488206311504452e-05, "loss": 0.9469, "step": 15633 }, { "epoch": 2.9054079167440996, "grad_norm": 0.8597601652145386, "learning_rate": 3.487097643261774e-05, "loss": 0.9493, "step": 15634 }, { "epoch": 2.9055937558074705, "grad_norm": 0.8719737529754639, "learning_rate": 3.4859891140258183e-05, "loss": 0.7799, "step": 15635 }, { "epoch": 2.905779594870842, "grad_norm": 0.7709887623786926, "learning_rate": 3.484880723820245e-05, "loss": 0.8098, "step": 15636 }, { "epoch": 2.905965433934213, "grad_norm": 0.7473528385162354, "learning_rate": 3.483772472668716e-05, "loss": 1.0006, "step": 15637 }, { "epoch": 2.906151272997584, "grad_norm": 0.7472079396247864, "learning_rate": 3.482664360594882e-05, "loss": 0.8065, "step": 15638 }, { "epoch": 2.9063371120609554, "grad_norm": 0.7801439762115479, "learning_rate": 3.481556387622392e-05, "loss": 0.7806, "step": 15639 }, { "epoch": 2.9065229511243262, "grad_norm": 0.704574465751648, "learning_rate": 3.4804485537748923e-05, "loss": 0.9096, "step": 15640 }, { "epoch": 2.9067087901876976, "grad_norm": 0.8833271861076355, "learning_rate": 3.4793408590760324e-05, "loss": 0.8006, "step": 15641 }, { "epoch": 2.9068946292510685, "grad_norm": 1.2138185501098633, "learning_rate": 3.478233303549453e-05, "loss": 0.8233, "step": 15642 }, { "epoch": 2.90708046831444, "grad_norm": 0.7799171209335327, "learning_rate": 3.477125887218792e-05, "loss": 0.8199, "step": 15643 }, { "epoch": 2.9072663073778107, "grad_norm": 1.0028904676437378, "learning_rate": 3.476018610107681e-05, "loss": 1.0786, "step": 15644 }, { "epoch": 2.907452146441182, "grad_norm": 1.0555802583694458, "learning_rate": 3.474911472239761e-05, "loss": 1.0305, "step": 15645 }, { "epoch": 2.907637985504553, "grad_norm": 0.7084522843360901, "learning_rate": 3.473804473638654e-05, "loss": 0.8662, "step": 15646 }, { "epoch": 2.907823824567924, "grad_norm": 0.8872708082199097, "learning_rate": 3.472697614327996e-05, "loss": 1.1082, "step": 15647 }, { "epoch": 2.9080096636312955, "grad_norm": 0.8006177544593811, "learning_rate": 3.471590894331402e-05, "loss": 0.9288, "step": 15648 }, { "epoch": 2.9081955026946664, "grad_norm": 0.8129321336746216, "learning_rate": 3.470484313672501e-05, "loss": 0.988, "step": 15649 }, { "epoch": 2.9083813417580373, "grad_norm": 0.7988606691360474, "learning_rate": 3.469377872374907e-05, "loss": 0.8885, "step": 15650 }, { "epoch": 2.9085671808214086, "grad_norm": 1.041595697402954, "learning_rate": 3.468271570462235e-05, "loss": 1.1708, "step": 15651 }, { "epoch": 2.90875301988478, "grad_norm": 0.9497601389884949, "learning_rate": 3.467165407958094e-05, "loss": 0.8956, "step": 15652 }, { "epoch": 2.908938858948151, "grad_norm": 0.9359521865844727, "learning_rate": 3.4660593848861e-05, "loss": 0.8024, "step": 15653 }, { "epoch": 2.909124698011522, "grad_norm": 0.8633369207382202, "learning_rate": 3.464953501269855e-05, "loss": 0.8987, "step": 15654 }, { "epoch": 2.909310537074893, "grad_norm": 0.725227415561676, "learning_rate": 3.4638477571329606e-05, "loss": 0.9908, "step": 15655 }, { "epoch": 2.9094963761382644, "grad_norm": 0.8891089558601379, "learning_rate": 3.462742152499023e-05, "loss": 1.2938, "step": 15656 }, { "epoch": 2.9096822152016353, "grad_norm": 0.8465229868888855, "learning_rate": 3.461636687391632e-05, "loss": 0.8997, "step": 15657 }, { "epoch": 2.9098680542650066, "grad_norm": 0.7925376892089844, "learning_rate": 3.460531361834389e-05, "loss": 0.6933, "step": 15658 }, { "epoch": 2.9100538933283775, "grad_norm": 0.7975295782089233, "learning_rate": 3.459426175850882e-05, "loss": 0.9303, "step": 15659 }, { "epoch": 2.910239732391749, "grad_norm": 0.7541177272796631, "learning_rate": 3.458321129464696e-05, "loss": 0.8467, "step": 15660 }, { "epoch": 2.9104255714551197, "grad_norm": 0.784887969493866, "learning_rate": 3.457216222699422e-05, "loss": 0.9571, "step": 15661 }, { "epoch": 2.910611410518491, "grad_norm": 0.856224000453949, "learning_rate": 3.45611145557864e-05, "loss": 0.9387, "step": 15662 }, { "epoch": 2.9107972495818624, "grad_norm": 0.8426495790481567, "learning_rate": 3.4550068281259295e-05, "loss": 1.0054, "step": 15663 }, { "epoch": 2.9109830886452333, "grad_norm": 0.8616632223129272, "learning_rate": 3.4539023403648644e-05, "loss": 0.7377, "step": 15664 }, { "epoch": 2.911168927708604, "grad_norm": 0.8532987833023071, "learning_rate": 3.452797992319022e-05, "loss": 1.0197, "step": 15665 }, { "epoch": 2.9113547667719755, "grad_norm": 0.776501476764679, "learning_rate": 3.451693784011971e-05, "loss": 0.6837, "step": 15666 }, { "epoch": 2.911540605835347, "grad_norm": 0.8846834301948547, "learning_rate": 3.450589715467276e-05, "loss": 0.8795, "step": 15667 }, { "epoch": 2.9117264448987177, "grad_norm": 0.8329945206642151, "learning_rate": 3.449485786708505e-05, "loss": 0.9378, "step": 15668 }, { "epoch": 2.9119122839620886, "grad_norm": 0.8448494076728821, "learning_rate": 3.4483819977592216e-05, "loss": 1.0225, "step": 15669 }, { "epoch": 2.91209812302546, "grad_norm": 0.7854666709899902, "learning_rate": 3.447278348642982e-05, "loss": 0.8708, "step": 15670 }, { "epoch": 2.9122839620888312, "grad_norm": 0.8177938461303711, "learning_rate": 3.446174839383339e-05, "loss": 0.9076, "step": 15671 }, { "epoch": 2.912469801152202, "grad_norm": 0.7455102205276489, "learning_rate": 3.4450714700038453e-05, "loss": 0.8464, "step": 15672 }, { "epoch": 2.9126556402155734, "grad_norm": 0.8319911956787109, "learning_rate": 3.443968240528056e-05, "loss": 0.9907, "step": 15673 }, { "epoch": 2.9128414792789443, "grad_norm": 0.7769563794136047, "learning_rate": 3.442865150979514e-05, "loss": 0.9798, "step": 15674 }, { "epoch": 2.9130273183423157, "grad_norm": 0.6890460252761841, "learning_rate": 3.4417622013817595e-05, "loss": 0.8935, "step": 15675 }, { "epoch": 2.9132131574056865, "grad_norm": 0.9530012607574463, "learning_rate": 3.4406593917583397e-05, "loss": 0.7713, "step": 15676 }, { "epoch": 2.913398996469058, "grad_norm": 0.9716929197311401, "learning_rate": 3.439556722132788e-05, "loss": 0.9907, "step": 15677 }, { "epoch": 2.9135848355324288, "grad_norm": 0.7098780870437622, "learning_rate": 3.438454192528637e-05, "loss": 0.6473, "step": 15678 }, { "epoch": 2.9137706745958, "grad_norm": 0.800399661064148, "learning_rate": 3.437351802969425e-05, "loss": 0.9786, "step": 15679 }, { "epoch": 2.913956513659171, "grad_norm": 0.7401872873306274, "learning_rate": 3.436249553478674e-05, "loss": 0.7292, "step": 15680 }, { "epoch": 2.9141423527225423, "grad_norm": 1.1086809635162354, "learning_rate": 3.4351474440799144e-05, "loss": 1.0502, "step": 15681 }, { "epoch": 2.9143281917859136, "grad_norm": 1.0315526723861694, "learning_rate": 3.434045474796668e-05, "loss": 0.8773, "step": 15682 }, { "epoch": 2.9145140308492845, "grad_norm": 0.9345428943634033, "learning_rate": 3.432943645652453e-05, "loss": 0.9042, "step": 15683 }, { "epoch": 2.9146998699126554, "grad_norm": 0.8518444895744324, "learning_rate": 3.4318419566707826e-05, "loss": 0.9686, "step": 15684 }, { "epoch": 2.9148857089760267, "grad_norm": 0.8447026610374451, "learning_rate": 3.4307404078751784e-05, "loss": 1.016, "step": 15685 }, { "epoch": 2.915071548039398, "grad_norm": 0.8735696077346802, "learning_rate": 3.4296389992891454e-05, "loss": 1.0496, "step": 15686 }, { "epoch": 2.915257387102769, "grad_norm": 0.7049415707588196, "learning_rate": 3.428537730936191e-05, "loss": 0.8717, "step": 15687 }, { "epoch": 2.9154432261661403, "grad_norm": 0.7728877663612366, "learning_rate": 3.427436602839821e-05, "loss": 0.7514, "step": 15688 }, { "epoch": 2.915629065229511, "grad_norm": 4.035832405090332, "learning_rate": 3.42633561502354e-05, "loss": 1.3198, "step": 15689 }, { "epoch": 2.9158149042928825, "grad_norm": 0.9382774829864502, "learning_rate": 3.425234767510847e-05, "loss": 0.7971, "step": 15690 }, { "epoch": 2.9160007433562534, "grad_norm": 0.7563207149505615, "learning_rate": 3.424134060325233e-05, "loss": 0.7422, "step": 15691 }, { "epoch": 2.9161865824196247, "grad_norm": 0.8817317485809326, "learning_rate": 3.42303349349019e-05, "loss": 1.0201, "step": 15692 }, { "epoch": 2.9163724214829956, "grad_norm": 1.0669676065444946, "learning_rate": 3.421933067029214e-05, "loss": 0.9374, "step": 15693 }, { "epoch": 2.916558260546367, "grad_norm": 0.8332189321517944, "learning_rate": 3.420832780965788e-05, "loss": 0.7491, "step": 15694 }, { "epoch": 2.916744099609738, "grad_norm": 0.9186729788780212, "learning_rate": 3.419732635323394e-05, "loss": 1.0503, "step": 15695 }, { "epoch": 2.916929938673109, "grad_norm": 0.7349416017532349, "learning_rate": 3.418632630125517e-05, "loss": 0.768, "step": 15696 }, { "epoch": 2.9171157777364805, "grad_norm": 0.8888520002365112, "learning_rate": 3.4175327653956325e-05, "loss": 1.0243, "step": 15697 }, { "epoch": 2.9173016167998513, "grad_norm": 0.8811149597167969, "learning_rate": 3.416433041157214e-05, "loss": 1.1016, "step": 15698 }, { "epoch": 2.9174874558632222, "grad_norm": 0.8639944195747375, "learning_rate": 3.415333457433733e-05, "loss": 0.8876, "step": 15699 }, { "epoch": 2.9176732949265936, "grad_norm": 0.8154263496398926, "learning_rate": 3.4142340142486584e-05, "loss": 0.8681, "step": 15700 }, { "epoch": 2.917859133989965, "grad_norm": 0.6924809813499451, "learning_rate": 3.4131347116254606e-05, "loss": 0.6653, "step": 15701 }, { "epoch": 2.9180449730533358, "grad_norm": 0.7547911405563354, "learning_rate": 3.412035549587599e-05, "loss": 0.7428, "step": 15702 }, { "epoch": 2.918230812116707, "grad_norm": 0.7436400055885315, "learning_rate": 3.4109365281585336e-05, "loss": 0.85, "step": 15703 }, { "epoch": 2.918416651180078, "grad_norm": 0.9104697108268738, "learning_rate": 3.4098376473617164e-05, "loss": 1.0624, "step": 15704 }, { "epoch": 2.9186024902434493, "grad_norm": 0.7633656859397888, "learning_rate": 3.40873890722061e-05, "loss": 0.7758, "step": 15705 }, { "epoch": 2.91878832930682, "grad_norm": 0.7920777201652527, "learning_rate": 3.4076403077586605e-05, "loss": 0.9387, "step": 15706 }, { "epoch": 2.9189741683701915, "grad_norm": 0.9203588366508484, "learning_rate": 3.406541848999312e-05, "loss": 0.9305, "step": 15707 }, { "epoch": 2.9191600074335624, "grad_norm": 0.7177546620368958, "learning_rate": 3.405443530966016e-05, "loss": 0.9136, "step": 15708 }, { "epoch": 2.9193458464969337, "grad_norm": 0.8345839977264404, "learning_rate": 3.404345353682208e-05, "loss": 1.0656, "step": 15709 }, { "epoch": 2.9195316855603046, "grad_norm": 0.755546510219574, "learning_rate": 3.403247317171333e-05, "loss": 0.8703, "step": 15710 }, { "epoch": 2.919717524623676, "grad_norm": 0.9971104264259338, "learning_rate": 3.402149421456823e-05, "loss": 0.9118, "step": 15711 }, { "epoch": 2.9199033636870473, "grad_norm": 0.8281334638595581, "learning_rate": 3.401051666562108e-05, "loss": 0.9542, "step": 15712 }, { "epoch": 2.920089202750418, "grad_norm": 0.8721912503242493, "learning_rate": 3.399954052510625e-05, "loss": 0.804, "step": 15713 }, { "epoch": 2.920275041813789, "grad_norm": 0.8998188972473145, "learning_rate": 3.398856579325796e-05, "loss": 1.0668, "step": 15714 }, { "epoch": 2.9204608808771604, "grad_norm": 0.6928837895393372, "learning_rate": 3.397759247031044e-05, "loss": 0.8659, "step": 15715 }, { "epoch": 2.9206467199405317, "grad_norm": 0.926973283290863, "learning_rate": 3.396662055649792e-05, "loss": 0.9504, "step": 15716 }, { "epoch": 2.9208325590039026, "grad_norm": 0.7991766929626465, "learning_rate": 3.395565005205458e-05, "loss": 1.0994, "step": 15717 }, { "epoch": 2.9210183980672735, "grad_norm": 0.8260103464126587, "learning_rate": 3.394468095721454e-05, "loss": 0.9118, "step": 15718 }, { "epoch": 2.921204237130645, "grad_norm": 0.8974000215530396, "learning_rate": 3.39337132722119e-05, "loss": 0.8529, "step": 15719 }, { "epoch": 2.921390076194016, "grad_norm": 0.8190984725952148, "learning_rate": 3.3922746997280775e-05, "loss": 0.9201, "step": 15720 }, { "epoch": 2.921575915257387, "grad_norm": 0.807974636554718, "learning_rate": 3.391178213265525e-05, "loss": 1.111, "step": 15721 }, { "epoch": 2.9217617543207584, "grad_norm": 0.7600648403167725, "learning_rate": 3.3900818678569344e-05, "loss": 0.8193, "step": 15722 }, { "epoch": 2.9219475933841292, "grad_norm": 0.8470858335494995, "learning_rate": 3.388985663525702e-05, "loss": 1.1329, "step": 15723 }, { "epoch": 2.9221334324475006, "grad_norm": 0.9734567403793335, "learning_rate": 3.387889600295222e-05, "loss": 0.9613, "step": 15724 }, { "epoch": 2.9223192715108715, "grad_norm": 0.8636355996131897, "learning_rate": 3.386793678188895e-05, "loss": 1.0745, "step": 15725 }, { "epoch": 2.922505110574243, "grad_norm": 0.7585934996604919, "learning_rate": 3.385697897230109e-05, "loss": 1.0608, "step": 15726 }, { "epoch": 2.9226909496376137, "grad_norm": 0.8040744066238403, "learning_rate": 3.3846022574422475e-05, "loss": 0.9901, "step": 15727 }, { "epoch": 2.922876788700985, "grad_norm": 0.7978103756904602, "learning_rate": 3.383506758848701e-05, "loss": 0.6268, "step": 15728 }, { "epoch": 2.923062627764356, "grad_norm": 0.9896499514579773, "learning_rate": 3.382411401472848e-05, "loss": 1.1853, "step": 15729 }, { "epoch": 2.923248466827727, "grad_norm": 0.8281686305999756, "learning_rate": 3.381316185338065e-05, "loss": 0.9363, "step": 15730 }, { "epoch": 2.9234343058910985, "grad_norm": 0.753558337688446, "learning_rate": 3.380221110467733e-05, "loss": 0.6371, "step": 15731 }, { "epoch": 2.9236201449544694, "grad_norm": 1.2504048347473145, "learning_rate": 3.3791261768852175e-05, "loss": 1.08, "step": 15732 }, { "epoch": 2.9238059840178403, "grad_norm": 0.9097568988800049, "learning_rate": 3.378031384613897e-05, "loss": 1.1595, "step": 15733 }, { "epoch": 2.9239918230812116, "grad_norm": 0.7802842855453491, "learning_rate": 3.3769367336771306e-05, "loss": 0.9175, "step": 15734 }, { "epoch": 2.924177662144583, "grad_norm": 0.9235023856163025, "learning_rate": 3.375842224098281e-05, "loss": 1.0529, "step": 15735 }, { "epoch": 2.924363501207954, "grad_norm": 0.7967664003372192, "learning_rate": 3.374747855900716e-05, "loss": 0.8076, "step": 15736 }, { "epoch": 2.924549340271325, "grad_norm": 0.8209742307662964, "learning_rate": 3.373653629107788e-05, "loss": 1.1509, "step": 15737 }, { "epoch": 2.924735179334696, "grad_norm": 0.7549864649772644, "learning_rate": 3.372559543742852e-05, "loss": 0.7769, "step": 15738 }, { "epoch": 2.9249210183980674, "grad_norm": 0.8510519862174988, "learning_rate": 3.371465599829257e-05, "loss": 0.8683, "step": 15739 }, { "epoch": 2.9251068574614383, "grad_norm": 0.7822895050048828, "learning_rate": 3.370371797390356e-05, "loss": 0.9195, "step": 15740 }, { "epoch": 2.9252926965248096, "grad_norm": 0.845230758190155, "learning_rate": 3.3692781364494884e-05, "loss": 1.0649, "step": 15741 }, { "epoch": 2.9254785355881805, "grad_norm": 0.8666579127311707, "learning_rate": 3.3681846170300046e-05, "loss": 0.9302, "step": 15742 }, { "epoch": 2.925664374651552, "grad_norm": 0.7660261988639832, "learning_rate": 3.367091239155239e-05, "loss": 0.8797, "step": 15743 }, { "epoch": 2.9258502137149227, "grad_norm": 0.8672879934310913, "learning_rate": 3.365998002848525e-05, "loss": 1.0502, "step": 15744 }, { "epoch": 2.926036052778294, "grad_norm": 0.9607042074203491, "learning_rate": 3.364904908133203e-05, "loss": 0.9121, "step": 15745 }, { "epoch": 2.9262218918416654, "grad_norm": 1.217624545097351, "learning_rate": 3.3638119550326e-05, "loss": 0.9755, "step": 15746 }, { "epoch": 2.9264077309050363, "grad_norm": 0.8371081948280334, "learning_rate": 3.3627191435700376e-05, "loss": 0.9684, "step": 15747 }, { "epoch": 2.926593569968407, "grad_norm": 0.6598268747329712, "learning_rate": 3.3616264737688495e-05, "loss": 0.7122, "step": 15748 }, { "epoch": 2.9267794090317785, "grad_norm": 0.7115182876586914, "learning_rate": 3.360533945652352e-05, "loss": 0.9374, "step": 15749 }, { "epoch": 2.92696524809515, "grad_norm": 0.9041739106178284, "learning_rate": 3.359441559243864e-05, "loss": 0.8403, "step": 15750 }, { "epoch": 2.9271510871585207, "grad_norm": 0.706041157245636, "learning_rate": 3.358349314566697e-05, "loss": 0.7009, "step": 15751 }, { "epoch": 2.9273369262218916, "grad_norm": 0.7268288135528564, "learning_rate": 3.357257211644166e-05, "loss": 0.985, "step": 15752 }, { "epoch": 2.927522765285263, "grad_norm": 0.7373469471931458, "learning_rate": 3.3561652504995835e-05, "loss": 0.8849, "step": 15753 }, { "epoch": 2.9277086043486342, "grad_norm": 0.7528462409973145, "learning_rate": 3.3550734311562527e-05, "loss": 0.928, "step": 15754 }, { "epoch": 2.927894443412005, "grad_norm": 0.6918595433235168, "learning_rate": 3.353981753637473e-05, "loss": 0.8758, "step": 15755 }, { "epoch": 2.9280802824753764, "grad_norm": 0.7775110602378845, "learning_rate": 3.352890217966551e-05, "loss": 1.1246, "step": 15756 }, { "epoch": 2.9282661215387473, "grad_norm": 0.6666045188903809, "learning_rate": 3.351798824166781e-05, "loss": 0.7464, "step": 15757 }, { "epoch": 2.9284519606021187, "grad_norm": 0.8970504403114319, "learning_rate": 3.3507075722614556e-05, "loss": 0.9118, "step": 15758 }, { "epoch": 2.9286377996654895, "grad_norm": 0.8148529529571533, "learning_rate": 3.349616462273864e-05, "loss": 0.8598, "step": 15759 }, { "epoch": 2.928823638728861, "grad_norm": 0.7585864663124084, "learning_rate": 3.348525494227299e-05, "loss": 0.6569, "step": 15760 }, { "epoch": 2.929009477792232, "grad_norm": 0.6061311960220337, "learning_rate": 3.347434668145043e-05, "loss": 0.6898, "step": 15761 }, { "epoch": 2.929195316855603, "grad_norm": 0.8349879384040833, "learning_rate": 3.3463439840503755e-05, "loss": 1.2111, "step": 15762 }, { "epoch": 2.929381155918974, "grad_norm": 0.8286142945289612, "learning_rate": 3.345253441966579e-05, "loss": 1.0443, "step": 15763 }, { "epoch": 2.9295669949823453, "grad_norm": 0.8316406011581421, "learning_rate": 3.3441630419169254e-05, "loss": 0.8959, "step": 15764 }, { "epoch": 2.9297528340457166, "grad_norm": 0.9590152502059937, "learning_rate": 3.343072783924693e-05, "loss": 0.9913, "step": 15765 }, { "epoch": 2.9299386731090875, "grad_norm": 0.8900766372680664, "learning_rate": 3.3419826680131494e-05, "loss": 0.8112, "step": 15766 }, { "epoch": 2.9301245121724584, "grad_norm": 0.7802647352218628, "learning_rate": 3.340892694205555e-05, "loss": 1.1222, "step": 15767 }, { "epoch": 2.9303103512358297, "grad_norm": 1.0942567586898804, "learning_rate": 3.339802862525183e-05, "loss": 0.8967, "step": 15768 }, { "epoch": 2.930496190299201, "grad_norm": 0.776422381401062, "learning_rate": 3.338713172995289e-05, "loss": 0.8029, "step": 15769 }, { "epoch": 2.930682029362572, "grad_norm": 0.8660294413566589, "learning_rate": 3.3376236256391316e-05, "loss": 0.8022, "step": 15770 }, { "epoch": 2.9308678684259433, "grad_norm": 0.7707787156105042, "learning_rate": 3.336534220479961e-05, "loss": 0.8491, "step": 15771 }, { "epoch": 2.931053707489314, "grad_norm": 0.8675243258476257, "learning_rate": 3.3354449575410326e-05, "loss": 0.7361, "step": 15772 }, { "epoch": 2.9312395465526855, "grad_norm": 0.6914379596710205, "learning_rate": 3.334355836845599e-05, "loss": 0.7739, "step": 15773 }, { "epoch": 2.9314253856160564, "grad_norm": 0.911323070526123, "learning_rate": 3.3332668584169004e-05, "loss": 1.0789, "step": 15774 }, { "epoch": 2.9316112246794277, "grad_norm": 0.7367120385169983, "learning_rate": 3.3321780222781776e-05, "loss": 0.8948, "step": 15775 }, { "epoch": 2.9317970637427986, "grad_norm": 1.1031914949417114, "learning_rate": 3.331089328452674e-05, "loss": 1.0156, "step": 15776 }, { "epoch": 2.93198290280617, "grad_norm": 0.7308126091957092, "learning_rate": 3.330000776963626e-05, "loss": 0.8076, "step": 15777 }, { "epoch": 2.932168741869541, "grad_norm": 1.0267692804336548, "learning_rate": 3.3289123678342645e-05, "loss": 0.8587, "step": 15778 }, { "epoch": 2.932354580932912, "grad_norm": 0.75275057554245, "learning_rate": 3.3278241010878175e-05, "loss": 0.8548, "step": 15779 }, { "epoch": 2.9325404199962835, "grad_norm": 0.9832603335380554, "learning_rate": 3.326735976747518e-05, "loss": 1.0699, "step": 15780 }, { "epoch": 2.9327262590596543, "grad_norm": 0.7555899024009705, "learning_rate": 3.325647994836588e-05, "loss": 0.8503, "step": 15781 }, { "epoch": 2.9329120981230252, "grad_norm": 1.0316232442855835, "learning_rate": 3.324560155378248e-05, "loss": 0.8765, "step": 15782 }, { "epoch": 2.9330979371863966, "grad_norm": 0.8232181668281555, "learning_rate": 3.323472458395712e-05, "loss": 0.6197, "step": 15783 }, { "epoch": 2.933283776249768, "grad_norm": 0.8313225507736206, "learning_rate": 3.322384903912199e-05, "loss": 1.0768, "step": 15784 }, { "epoch": 2.9334696153131388, "grad_norm": 0.7981607913970947, "learning_rate": 3.321297491950925e-05, "loss": 1.0084, "step": 15785 }, { "epoch": 2.93365545437651, "grad_norm": 0.8880338668823242, "learning_rate": 3.320210222535094e-05, "loss": 0.6865, "step": 15786 }, { "epoch": 2.933841293439881, "grad_norm": 0.8243213891983032, "learning_rate": 3.3191230956879104e-05, "loss": 0.7799, "step": 15787 }, { "epoch": 2.9340271325032523, "grad_norm": 0.7768036723136902, "learning_rate": 3.318036111432582e-05, "loss": 1.0689, "step": 15788 }, { "epoch": 2.934212971566623, "grad_norm": 0.9177983999252319, "learning_rate": 3.316949269792306e-05, "loss": 0.7647, "step": 15789 }, { "epoch": 2.9343988106299945, "grad_norm": 0.8567959070205688, "learning_rate": 3.3158625707902805e-05, "loss": 0.9281, "step": 15790 }, { "epoch": 2.9345846496933654, "grad_norm": 0.800028383731842, "learning_rate": 3.314776014449694e-05, "loss": 1.0502, "step": 15791 }, { "epoch": 2.9347704887567367, "grad_norm": 0.8020050525665283, "learning_rate": 3.3136896007937444e-05, "loss": 0.9888, "step": 15792 }, { "epoch": 2.9349563278201076, "grad_norm": 2.0786726474761963, "learning_rate": 3.312603329845613e-05, "loss": 1.2364, "step": 15793 }, { "epoch": 2.935142166883479, "grad_norm": 0.8580712676048279, "learning_rate": 3.31151720162849e-05, "loss": 0.7469, "step": 15794 }, { "epoch": 2.9353280059468503, "grad_norm": 0.8118255138397217, "learning_rate": 3.310431216165551e-05, "loss": 0.7951, "step": 15795 }, { "epoch": 2.935513845010221, "grad_norm": 0.7357614040374756, "learning_rate": 3.309345373479983e-05, "loss": 0.658, "step": 15796 }, { "epoch": 2.935699684073592, "grad_norm": 1.0954209566116333, "learning_rate": 3.308259673594955e-05, "loss": 0.971, "step": 15797 }, { "epoch": 2.9358855231369634, "grad_norm": 0.95086270570755, "learning_rate": 3.307174116533641e-05, "loss": 0.7394, "step": 15798 }, { "epoch": 2.9360713622003347, "grad_norm": 0.8866342902183533, "learning_rate": 3.306088702319208e-05, "loss": 0.9712, "step": 15799 }, { "epoch": 2.9362572012637056, "grad_norm": 0.9677961468696594, "learning_rate": 3.3050034309748256e-05, "loss": 0.6273, "step": 15800 }, { "epoch": 2.9364430403270765, "grad_norm": 0.9282647371292114, "learning_rate": 3.3039183025236575e-05, "loss": 1.0787, "step": 15801 }, { "epoch": 2.936628879390448, "grad_norm": 1.0624250173568726, "learning_rate": 3.302833316988861e-05, "loss": 1.1045, "step": 15802 }, { "epoch": 2.936814718453819, "grad_norm": 0.7853368520736694, "learning_rate": 3.301748474393592e-05, "loss": 0.8321, "step": 15803 }, { "epoch": 2.93700055751719, "grad_norm": 0.7858495116233826, "learning_rate": 3.300663774761008e-05, "loss": 0.9149, "step": 15804 }, { "epoch": 2.9371863965805614, "grad_norm": 0.831453800201416, "learning_rate": 3.2995792181142615e-05, "loss": 1.0512, "step": 15805 }, { "epoch": 2.9373722356439322, "grad_norm": 0.8120449781417847, "learning_rate": 3.298494804476499e-05, "loss": 1.2116, "step": 15806 }, { "epoch": 2.9375580747073036, "grad_norm": 1.215599775314331, "learning_rate": 3.2974105338708594e-05, "loss": 0.7631, "step": 15807 }, { "epoch": 2.9377439137706745, "grad_norm": 0.47633785009384155, "learning_rate": 3.2963264063204946e-05, "loss": 0.4033, "step": 15808 }, { "epoch": 2.937929752834046, "grad_norm": 0.9493878483772278, "learning_rate": 3.295242421848539e-05, "loss": 0.8563, "step": 15809 }, { "epoch": 2.938115591897417, "grad_norm": 0.8112297654151917, "learning_rate": 3.294158580478127e-05, "loss": 0.9625, "step": 15810 }, { "epoch": 2.938301430960788, "grad_norm": 0.7687706351280212, "learning_rate": 3.2930748822323896e-05, "loss": 0.9907, "step": 15811 }, { "epoch": 2.938487270024159, "grad_norm": 0.643135666847229, "learning_rate": 3.291991327134464e-05, "loss": 0.7125, "step": 15812 }, { "epoch": 2.93867310908753, "grad_norm": 0.8122084736824036, "learning_rate": 3.2909079152074705e-05, "loss": 0.9092, "step": 15813 }, { "epoch": 2.9388589481509015, "grad_norm": 0.84363853931427, "learning_rate": 3.289824646474531e-05, "loss": 0.892, "step": 15814 }, { "epoch": 2.9390447872142724, "grad_norm": 0.7943958044052124, "learning_rate": 3.28874152095877e-05, "loss": 0.9036, "step": 15815 }, { "epoch": 2.9392306262776433, "grad_norm": 0.8364081978797913, "learning_rate": 3.287658538683307e-05, "loss": 1.013, "step": 15816 }, { "epoch": 2.9394164653410146, "grad_norm": 0.9474763870239258, "learning_rate": 3.286575699671254e-05, "loss": 0.8919, "step": 15817 }, { "epoch": 2.939602304404386, "grad_norm": 0.8956844210624695, "learning_rate": 3.285493003945722e-05, "loss": 1.0745, "step": 15818 }, { "epoch": 2.939788143467757, "grad_norm": 0.8403872847557068, "learning_rate": 3.2844104515298155e-05, "loss": 0.8356, "step": 15819 }, { "epoch": 2.939973982531128, "grad_norm": 1.1269252300262451, "learning_rate": 3.2833280424466473e-05, "loss": 0.9814, "step": 15820 }, { "epoch": 2.940159821594499, "grad_norm": 0.8535260558128357, "learning_rate": 3.282245776719316e-05, "loss": 0.93, "step": 15821 }, { "epoch": 2.9403456606578704, "grad_norm": 0.8071843385696411, "learning_rate": 3.281163654370919e-05, "loss": 1.0905, "step": 15822 }, { "epoch": 2.9405314997212413, "grad_norm": 0.7468648552894592, "learning_rate": 3.280081675424552e-05, "loss": 0.7606, "step": 15823 }, { "epoch": 2.9407173387846126, "grad_norm": 0.7991985082626343, "learning_rate": 3.2789998399033126e-05, "loss": 0.816, "step": 15824 }, { "epoch": 2.9409031778479835, "grad_norm": 0.7739672660827637, "learning_rate": 3.277918147830284e-05, "loss": 1.0061, "step": 15825 }, { "epoch": 2.941089016911355, "grad_norm": 0.8021376729011536, "learning_rate": 3.276836599228562e-05, "loss": 1.2006, "step": 15826 }, { "epoch": 2.9412748559747257, "grad_norm": 0.8987312912940979, "learning_rate": 3.275755194121221e-05, "loss": 0.9632, "step": 15827 }, { "epoch": 2.941460695038097, "grad_norm": 0.8584547638893127, "learning_rate": 3.274673932531349e-05, "loss": 0.9651, "step": 15828 }, { "epoch": 2.9416465341014684, "grad_norm": 0.7346953749656677, "learning_rate": 3.2735928144820214e-05, "loss": 0.7253, "step": 15829 }, { "epoch": 2.9418323731648393, "grad_norm": 0.9900451898574829, "learning_rate": 3.2725118399963115e-05, "loss": 0.972, "step": 15830 }, { "epoch": 2.94201821222821, "grad_norm": 0.7965803742408752, "learning_rate": 3.27143100909729e-05, "loss": 0.7316, "step": 15831 }, { "epoch": 2.9422040512915815, "grad_norm": 0.9513272643089294, "learning_rate": 3.270350321808029e-05, "loss": 0.7201, "step": 15832 }, { "epoch": 2.942389890354953, "grad_norm": 1.0336984395980835, "learning_rate": 3.2692697781515916e-05, "loss": 1.0326, "step": 15833 }, { "epoch": 2.9425757294183237, "grad_norm": 0.9182641506195068, "learning_rate": 3.2681893781510406e-05, "loss": 0.8409, "step": 15834 }, { "epoch": 2.942761568481695, "grad_norm": 0.7116926312446594, "learning_rate": 3.2671091218294284e-05, "loss": 0.8546, "step": 15835 }, { "epoch": 2.942947407545066, "grad_norm": 0.7742061614990234, "learning_rate": 3.266029009209825e-05, "loss": 0.9241, "step": 15836 }, { "epoch": 2.9431332466084372, "grad_norm": 0.9186986088752747, "learning_rate": 3.264949040315276e-05, "loss": 1.1348, "step": 15837 }, { "epoch": 2.943319085671808, "grad_norm": 0.9015552401542664, "learning_rate": 3.263869215168831e-05, "loss": 0.9576, "step": 15838 }, { "epoch": 2.9435049247351794, "grad_norm": 0.9571332335472107, "learning_rate": 3.262789533793537e-05, "loss": 0.7288, "step": 15839 }, { "epoch": 2.9436907637985503, "grad_norm": 0.8588224649429321, "learning_rate": 3.2617099962124396e-05, "loss": 1.0207, "step": 15840 }, { "epoch": 2.9438766028619217, "grad_norm": 0.8299174904823303, "learning_rate": 3.26063060244858e-05, "loss": 0.7644, "step": 15841 }, { "epoch": 2.9440624419252925, "grad_norm": 0.7573933005332947, "learning_rate": 3.2595513525249954e-05, "loss": 0.9171, "step": 15842 }, { "epoch": 2.944248280988664, "grad_norm": 0.9382292628288269, "learning_rate": 3.258472246464717e-05, "loss": 0.828, "step": 15843 }, { "epoch": 2.944434120052035, "grad_norm": 0.9817286133766174, "learning_rate": 3.257393284290782e-05, "loss": 0.9116, "step": 15844 }, { "epoch": 2.944619959115406, "grad_norm": 0.8646917939186096, "learning_rate": 3.256314466026217e-05, "loss": 1.0139, "step": 15845 }, { "epoch": 2.944805798178777, "grad_norm": 0.7684429883956909, "learning_rate": 3.255235791694043e-05, "loss": 1.1085, "step": 15846 }, { "epoch": 2.9449916372421483, "grad_norm": 0.8010138273239136, "learning_rate": 3.254157261317288e-05, "loss": 0.99, "step": 15847 }, { "epoch": 2.9451774763055196, "grad_norm": 0.9764358401298523, "learning_rate": 3.253078874918972e-05, "loss": 1.0624, "step": 15848 }, { "epoch": 2.9453633153688905, "grad_norm": 0.8024470210075378, "learning_rate": 3.252000632522111e-05, "loss": 0.8637, "step": 15849 }, { "epoch": 2.9455491544322614, "grad_norm": 0.8670867085456848, "learning_rate": 3.2509225341497154e-05, "loss": 1.1131, "step": 15850 }, { "epoch": 2.9457349934956327, "grad_norm": 0.8533880710601807, "learning_rate": 3.2498445798247926e-05, "loss": 0.8523, "step": 15851 }, { "epoch": 2.945920832559004, "grad_norm": 0.9057621359825134, "learning_rate": 3.248766769570357e-05, "loss": 0.9767, "step": 15852 }, { "epoch": 2.946106671622375, "grad_norm": 0.8551714420318604, "learning_rate": 3.2476891034094095e-05, "loss": 1.0522, "step": 15853 }, { "epoch": 2.9462925106857463, "grad_norm": 1.0271611213684082, "learning_rate": 3.24661158136495e-05, "loss": 1.0574, "step": 15854 }, { "epoch": 2.946478349749117, "grad_norm": 0.8021514415740967, "learning_rate": 3.2455342034599746e-05, "loss": 0.9811, "step": 15855 }, { "epoch": 2.9466641888124885, "grad_norm": 0.8042370080947876, "learning_rate": 3.24445696971748e-05, "loss": 0.6625, "step": 15856 }, { "epoch": 2.9468500278758594, "grad_norm": 0.8870751857757568, "learning_rate": 3.243379880160462e-05, "loss": 1.0134, "step": 15857 }, { "epoch": 2.9470358669392307, "grad_norm": 0.9629341959953308, "learning_rate": 3.242302934811905e-05, "loss": 1.0757, "step": 15858 }, { "epoch": 2.9472217060026016, "grad_norm": 0.811947762966156, "learning_rate": 3.2412261336947926e-05, "loss": 0.765, "step": 15859 }, { "epoch": 2.947407545065973, "grad_norm": 0.964412271976471, "learning_rate": 3.2401494768321125e-05, "loss": 0.9061, "step": 15860 }, { "epoch": 2.947593384129344, "grad_norm": 0.8239457607269287, "learning_rate": 3.239072964246842e-05, "loss": 0.9501, "step": 15861 }, { "epoch": 2.947779223192715, "grad_norm": 0.7884500026702881, "learning_rate": 3.237996595961956e-05, "loss": 0.8559, "step": 15862 }, { "epoch": 2.9479650622560865, "grad_norm": 0.7907791137695312, "learning_rate": 3.236920372000426e-05, "loss": 1.0445, "step": 15863 }, { "epoch": 2.9481509013194573, "grad_norm": 0.9507573246955872, "learning_rate": 3.235844292385228e-05, "loss": 0.963, "step": 15864 }, { "epoch": 2.9483367403828282, "grad_norm": 0.9838759303092957, "learning_rate": 3.234768357139326e-05, "loss": 0.8269, "step": 15865 }, { "epoch": 2.9485225794461996, "grad_norm": 1.0180253982543945, "learning_rate": 3.233692566285681e-05, "loss": 0.9124, "step": 15866 }, { "epoch": 2.948708418509571, "grad_norm": 0.8494012355804443, "learning_rate": 3.2326169198472556e-05, "loss": 1.0001, "step": 15867 }, { "epoch": 2.9488942575729418, "grad_norm": 0.9028781652450562, "learning_rate": 3.2315414178470124e-05, "loss": 0.7656, "step": 15868 }, { "epoch": 2.949080096636313, "grad_norm": 0.8880323767662048, "learning_rate": 3.230466060307903e-05, "loss": 0.9977, "step": 15869 }, { "epoch": 2.949265935699684, "grad_norm": 0.9243189692497253, "learning_rate": 3.2293908472528776e-05, "loss": 0.9917, "step": 15870 }, { "epoch": 2.9494517747630553, "grad_norm": 0.9479065537452698, "learning_rate": 3.228315778704884e-05, "loss": 1.0309, "step": 15871 }, { "epoch": 2.949637613826426, "grad_norm": 0.7647424936294556, "learning_rate": 3.227240854686872e-05, "loss": 0.8696, "step": 15872 }, { "epoch": 2.9498234528897975, "grad_norm": 1.0016605854034424, "learning_rate": 3.22616607522178e-05, "loss": 1.1685, "step": 15873 }, { "epoch": 2.9500092919531684, "grad_norm": 0.6829125881195068, "learning_rate": 3.2250914403325503e-05, "loss": 0.7039, "step": 15874 }, { "epoch": 2.9501951310165397, "grad_norm": 0.7340703010559082, "learning_rate": 3.2240169500421135e-05, "loss": 0.9722, "step": 15875 }, { "epoch": 2.9503809700799106, "grad_norm": 0.796754002571106, "learning_rate": 3.222942604373411e-05, "loss": 0.982, "step": 15876 }, { "epoch": 2.950566809143282, "grad_norm": 0.7887791991233826, "learning_rate": 3.2218684033493655e-05, "loss": 0.9354, "step": 15877 }, { "epoch": 2.9507526482066533, "grad_norm": 0.9478221535682678, "learning_rate": 3.220794346992909e-05, "loss": 1.0203, "step": 15878 }, { "epoch": 2.950938487270024, "grad_norm": 0.8265958428382874, "learning_rate": 3.2197204353269614e-05, "loss": 0.9327, "step": 15879 }, { "epoch": 2.951124326333395, "grad_norm": 0.8037706613540649, "learning_rate": 3.2186466683744485e-05, "loss": 0.9537, "step": 15880 }, { "epoch": 2.9513101653967664, "grad_norm": 0.9107820987701416, "learning_rate": 3.2175730461582866e-05, "loss": 1.0413, "step": 15881 }, { "epoch": 2.9514960044601377, "grad_norm": 0.893482506275177, "learning_rate": 3.2164995687013876e-05, "loss": 0.8939, "step": 15882 }, { "epoch": 2.9516818435235086, "grad_norm": 0.7663919925689697, "learning_rate": 3.2154262360266607e-05, "loss": 0.8662, "step": 15883 }, { "epoch": 2.95186768258688, "grad_norm": 0.7317687273025513, "learning_rate": 3.2143530481570225e-05, "loss": 0.9311, "step": 15884 }, { "epoch": 2.952053521650251, "grad_norm": 0.8877572417259216, "learning_rate": 3.2132800051153734e-05, "loss": 0.9503, "step": 15885 }, { "epoch": 2.952239360713622, "grad_norm": 1.0320180654525757, "learning_rate": 3.212207106924613e-05, "loss": 1.1679, "step": 15886 }, { "epoch": 2.952425199776993, "grad_norm": 0.8952203989028931, "learning_rate": 3.2111343536076456e-05, "loss": 0.9192, "step": 15887 }, { "epoch": 2.9526110388403644, "grad_norm": 0.8200172185897827, "learning_rate": 3.210061745187363e-05, "loss": 0.8682, "step": 15888 }, { "epoch": 2.9527968779037352, "grad_norm": 0.8985316753387451, "learning_rate": 3.208989281686664e-05, "loss": 1.077, "step": 15889 }, { "epoch": 2.9529827169671066, "grad_norm": 0.8635002374649048, "learning_rate": 3.207916963128434e-05, "loss": 0.9769, "step": 15890 }, { "epoch": 2.9531685560304775, "grad_norm": 0.8763348460197449, "learning_rate": 3.206844789535558e-05, "loss": 1.0203, "step": 15891 }, { "epoch": 2.953354395093849, "grad_norm": 0.7320058345794678, "learning_rate": 3.2057727609309265e-05, "loss": 1.0473, "step": 15892 }, { "epoch": 2.95354023415722, "grad_norm": 0.7867861390113831, "learning_rate": 3.204700877337415e-05, "loss": 0.9808, "step": 15893 }, { "epoch": 2.953726073220591, "grad_norm": 0.9463543891906738, "learning_rate": 3.203629138777903e-05, "loss": 0.9315, "step": 15894 }, { "epoch": 2.953911912283962, "grad_norm": 1.0253965854644775, "learning_rate": 3.20255754527526e-05, "loss": 1.0547, "step": 15895 }, { "epoch": 2.954097751347333, "grad_norm": 0.8799330592155457, "learning_rate": 3.201486096852364e-05, "loss": 0.8729, "step": 15896 }, { "epoch": 2.9542835904107045, "grad_norm": 0.8454210758209229, "learning_rate": 3.2004147935320814e-05, "loss": 0.9261, "step": 15897 }, { "epoch": 2.9544694294740754, "grad_norm": 0.889337956905365, "learning_rate": 3.199343635337273e-05, "loss": 1.1, "step": 15898 }, { "epoch": 2.9546552685374463, "grad_norm": 0.8433546423912048, "learning_rate": 3.198272622290804e-05, "loss": 0.7996, "step": 15899 }, { "epoch": 2.9548411076008176, "grad_norm": 0.8294208645820618, "learning_rate": 3.1972017544155376e-05, "loss": 0.7894, "step": 15900 }, { "epoch": 2.955026946664189, "grad_norm": 0.7913797497749329, "learning_rate": 3.196131031734326e-05, "loss": 0.8688, "step": 15901 }, { "epoch": 2.95521278572756, "grad_norm": 0.9375088810920715, "learning_rate": 3.195060454270021e-05, "loss": 0.9925, "step": 15902 }, { "epoch": 2.955398624790931, "grad_norm": 0.7612003087997437, "learning_rate": 3.19399002204547e-05, "loss": 0.8621, "step": 15903 }, { "epoch": 2.955584463854302, "grad_norm": 0.7709554433822632, "learning_rate": 3.192919735083525e-05, "loss": 0.7884, "step": 15904 }, { "epoch": 2.9557703029176734, "grad_norm": 0.7612934708595276, "learning_rate": 3.191849593407027e-05, "loss": 0.7994, "step": 15905 }, { "epoch": 2.9559561419810443, "grad_norm": 1.7417844533920288, "learning_rate": 3.190779597038813e-05, "loss": 1.4456, "step": 15906 }, { "epoch": 2.9561419810444156, "grad_norm": 0.9167274832725525, "learning_rate": 3.1897097460017266e-05, "loss": 0.9625, "step": 15907 }, { "epoch": 2.9563278201077865, "grad_norm": 0.7483566999435425, "learning_rate": 3.188640040318599e-05, "loss": 1.0241, "step": 15908 }, { "epoch": 2.956513659171158, "grad_norm": 0.7958605289459229, "learning_rate": 3.187570480012258e-05, "loss": 0.8882, "step": 15909 }, { "epoch": 2.9566994982345287, "grad_norm": 0.84869784116745, "learning_rate": 3.186501065105536e-05, "loss": 0.7438, "step": 15910 }, { "epoch": 2.9568853372979, "grad_norm": 0.8469640612602234, "learning_rate": 3.185431795621255e-05, "loss": 0.8025, "step": 15911 }, { "epoch": 2.9570711763612714, "grad_norm": 0.8365806341171265, "learning_rate": 3.1843626715822406e-05, "loss": 0.9082, "step": 15912 }, { "epoch": 2.9572570154246423, "grad_norm": 0.8219962120056152, "learning_rate": 3.1832936930113086e-05, "loss": 0.7641, "step": 15913 }, { "epoch": 2.957442854488013, "grad_norm": 0.7901747822761536, "learning_rate": 3.1822248599312754e-05, "loss": 0.9285, "step": 15914 }, { "epoch": 2.9576286935513845, "grad_norm": 0.8131659626960754, "learning_rate": 3.18115617236495e-05, "loss": 0.6014, "step": 15915 }, { "epoch": 2.957814532614756, "grad_norm": 1.0069524049758911, "learning_rate": 3.180087630335147e-05, "loss": 0.9341, "step": 15916 }, { "epoch": 2.9580003716781267, "grad_norm": 0.9531908631324768, "learning_rate": 3.17901923386467e-05, "loss": 0.8465, "step": 15917 }, { "epoch": 2.958186210741498, "grad_norm": 0.7890156507492065, "learning_rate": 3.1779509829763186e-05, "loss": 0.9076, "step": 15918 }, { "epoch": 2.958372049804869, "grad_norm": 0.9298142194747925, "learning_rate": 3.176882877692897e-05, "loss": 0.9867, "step": 15919 }, { "epoch": 2.9585578888682402, "grad_norm": 0.8551667928695679, "learning_rate": 3.1758149180372045e-05, "loss": 0.9891, "step": 15920 }, { "epoch": 2.958743727931611, "grad_norm": 0.7418187260627747, "learning_rate": 3.174747104032032e-05, "loss": 1.0218, "step": 15921 }, { "epoch": 2.9589295669949824, "grad_norm": 0.7891413569450378, "learning_rate": 3.1736794357001695e-05, "loss": 1.0136, "step": 15922 }, { "epoch": 2.9591154060583533, "grad_norm": 0.9421678185462952, "learning_rate": 3.172611913064402e-05, "loss": 1.0678, "step": 15923 }, { "epoch": 2.9593012451217247, "grad_norm": 1.0176401138305664, "learning_rate": 3.171544536147521e-05, "loss": 0.9857, "step": 15924 }, { "epoch": 2.9594870841850955, "grad_norm": 0.8667479753494263, "learning_rate": 3.1704773049723024e-05, "loss": 0.8421, "step": 15925 }, { "epoch": 2.959672923248467, "grad_norm": 0.8144848346710205, "learning_rate": 3.169410219561523e-05, "loss": 0.9336, "step": 15926 }, { "epoch": 2.959858762311838, "grad_norm": 0.7537040114402771, "learning_rate": 3.168343279937964e-05, "loss": 0.8843, "step": 15927 }, { "epoch": 2.960044601375209, "grad_norm": 0.880525529384613, "learning_rate": 3.1672764861243945e-05, "loss": 1.0492, "step": 15928 }, { "epoch": 2.96023044043858, "grad_norm": 0.9077096581459045, "learning_rate": 3.166209838143582e-05, "loss": 0.968, "step": 15929 }, { "epoch": 2.9604162795019513, "grad_norm": 0.8353539109230042, "learning_rate": 3.1651433360182916e-05, "loss": 0.8478, "step": 15930 }, { "epoch": 2.9606021185653226, "grad_norm": 0.8167479038238525, "learning_rate": 3.164076979771287e-05, "loss": 0.9171, "step": 15931 }, { "epoch": 2.9607879576286935, "grad_norm": 0.6608850359916687, "learning_rate": 3.163010769425331e-05, "loss": 0.7729, "step": 15932 }, { "epoch": 2.9609737966920644, "grad_norm": 0.8036105036735535, "learning_rate": 3.161944705003177e-05, "loss": 0.9901, "step": 15933 }, { "epoch": 2.9611596357554357, "grad_norm": 0.9866622090339661, "learning_rate": 3.160878786527579e-05, "loss": 1.1158, "step": 15934 }, { "epoch": 2.961345474818807, "grad_norm": 0.8332897424697876, "learning_rate": 3.159813014021285e-05, "loss": 0.9942, "step": 15935 }, { "epoch": 2.961531313882178, "grad_norm": 0.840764045715332, "learning_rate": 3.1587473875070464e-05, "loss": 1.075, "step": 15936 }, { "epoch": 2.9617171529455493, "grad_norm": 0.9720584750175476, "learning_rate": 3.1576819070076036e-05, "loss": 0.9517, "step": 15937 }, { "epoch": 2.96190299200892, "grad_norm": 0.7495701909065247, "learning_rate": 3.156616572545697e-05, "loss": 0.9145, "step": 15938 }, { "epoch": 2.9620888310722915, "grad_norm": 0.8397412896156311, "learning_rate": 3.155551384144069e-05, "loss": 0.9287, "step": 15939 }, { "epoch": 2.9622746701356624, "grad_norm": 0.7724320888519287, "learning_rate": 3.154486341825446e-05, "loss": 0.9588, "step": 15940 }, { "epoch": 2.9624605091990337, "grad_norm": 0.956950306892395, "learning_rate": 3.1534214456125696e-05, "loss": 0.7432, "step": 15941 }, { "epoch": 2.962646348262405, "grad_norm": 1.2833693027496338, "learning_rate": 3.1523566955281635e-05, "loss": 1.2552, "step": 15942 }, { "epoch": 2.962832187325776, "grad_norm": 1.0043977499008179, "learning_rate": 3.1512920915949495e-05, "loss": 0.8906, "step": 15943 }, { "epoch": 2.963018026389147, "grad_norm": 0.7243955731391907, "learning_rate": 3.150227633835655e-05, "loss": 0.7975, "step": 15944 }, { "epoch": 2.963203865452518, "grad_norm": 0.6837208271026611, "learning_rate": 3.1491633222729975e-05, "loss": 0.7359, "step": 15945 }, { "epoch": 2.9633897045158895, "grad_norm": 0.7101500630378723, "learning_rate": 3.148099156929689e-05, "loss": 0.8906, "step": 15946 }, { "epoch": 2.9635755435792603, "grad_norm": 0.9120703339576721, "learning_rate": 3.14703513782845e-05, "loss": 0.8411, "step": 15947 }, { "epoch": 2.9637613826426312, "grad_norm": 0.8641577363014221, "learning_rate": 3.145971264991985e-05, "loss": 0.9366, "step": 15948 }, { "epoch": 2.9639472217060026, "grad_norm": 0.7627655863761902, "learning_rate": 3.144907538443002e-05, "loss": 0.6819, "step": 15949 }, { "epoch": 2.964133060769374, "grad_norm": 0.7648568749427795, "learning_rate": 3.143843958204199e-05, "loss": 1.1172, "step": 15950 }, { "epoch": 2.9643188998327448, "grad_norm": 0.8323633074760437, "learning_rate": 3.1427805242982824e-05, "loss": 1.0325, "step": 15951 }, { "epoch": 2.964504738896116, "grad_norm": 0.7272006273269653, "learning_rate": 3.141717236747951e-05, "loss": 0.7491, "step": 15952 }, { "epoch": 2.964690577959487, "grad_norm": 0.8330870270729065, "learning_rate": 3.140654095575896e-05, "loss": 0.681, "step": 15953 }, { "epoch": 2.9648764170228583, "grad_norm": 0.899566650390625, "learning_rate": 3.139591100804807e-05, "loss": 1.084, "step": 15954 }, { "epoch": 2.965062256086229, "grad_norm": 0.7420375943183899, "learning_rate": 3.1385282524573715e-05, "loss": 0.673, "step": 15955 }, { "epoch": 2.9652480951496005, "grad_norm": 0.890329897403717, "learning_rate": 3.1374655505562775e-05, "loss": 1.0518, "step": 15956 }, { "epoch": 2.9654339342129714, "grad_norm": 0.6901466250419617, "learning_rate": 3.1364029951242055e-05, "loss": 1.1218, "step": 15957 }, { "epoch": 2.9656197732763427, "grad_norm": 0.8333992958068848, "learning_rate": 3.13534058618383e-05, "loss": 0.9538, "step": 15958 }, { "epoch": 2.9658056123397136, "grad_norm": 0.8375001549720764, "learning_rate": 3.134278323757833e-05, "loss": 1.0051, "step": 15959 }, { "epoch": 2.965991451403085, "grad_norm": 0.8724488019943237, "learning_rate": 3.133216207868883e-05, "loss": 0.8164, "step": 15960 }, { "epoch": 2.9661772904664563, "grad_norm": 0.7335485219955444, "learning_rate": 3.132154238539645e-05, "loss": 0.9836, "step": 15961 }, { "epoch": 2.966363129529827, "grad_norm": 0.957584798336029, "learning_rate": 3.1310924157927935e-05, "loss": 0.996, "step": 15962 }, { "epoch": 2.966548968593198, "grad_norm": 1.464234471321106, "learning_rate": 3.130030739650983e-05, "loss": 1.5019, "step": 15963 }, { "epoch": 2.9667348076565694, "grad_norm": 0.7681525945663452, "learning_rate": 3.128969210136881e-05, "loss": 0.7493, "step": 15964 }, { "epoch": 2.9669206467199407, "grad_norm": 0.7735837697982788, "learning_rate": 3.1279078272731385e-05, "loss": 0.6888, "step": 15965 }, { "epoch": 2.9671064857833116, "grad_norm": 0.7694737911224365, "learning_rate": 3.126846591082409e-05, "loss": 0.8867, "step": 15966 }, { "epoch": 2.967292324846683, "grad_norm": 0.7423407435417175, "learning_rate": 3.125785501587346e-05, "loss": 0.7762, "step": 15967 }, { "epoch": 2.967478163910054, "grad_norm": 0.734919011592865, "learning_rate": 3.124724558810596e-05, "loss": 0.637, "step": 15968 }, { "epoch": 2.967664002973425, "grad_norm": 0.9352990984916687, "learning_rate": 3.1236637627748e-05, "loss": 0.8245, "step": 15969 }, { "epoch": 2.967849842036796, "grad_norm": 0.7590056657791138, "learning_rate": 3.122603113502599e-05, "loss": 0.9774, "step": 15970 }, { "epoch": 2.9680356811001674, "grad_norm": 0.817415177822113, "learning_rate": 3.1215426110166356e-05, "loss": 0.9022, "step": 15971 }, { "epoch": 2.9682215201635382, "grad_norm": 1.0079842805862427, "learning_rate": 3.120482255339537e-05, "loss": 1.0907, "step": 15972 }, { "epoch": 2.9684073592269096, "grad_norm": 0.8362964987754822, "learning_rate": 3.1194220464939436e-05, "loss": 0.6905, "step": 15973 }, { "epoch": 2.9685931982902805, "grad_norm": 0.7991756200790405, "learning_rate": 3.118361984502479e-05, "loss": 0.8582, "step": 15974 }, { "epoch": 2.968779037353652, "grad_norm": 0.8549006581306458, "learning_rate": 3.117302069387764e-05, "loss": 1.0008, "step": 15975 }, { "epoch": 2.968964876417023, "grad_norm": 0.7779741883277893, "learning_rate": 3.116242301172429e-05, "loss": 0.76, "step": 15976 }, { "epoch": 2.969150715480394, "grad_norm": 1.0254641771316528, "learning_rate": 3.11518267987909e-05, "loss": 1.0068, "step": 15977 }, { "epoch": 2.969336554543765, "grad_norm": 0.8137785196304321, "learning_rate": 3.114123205530358e-05, "loss": 0.7319, "step": 15978 }, { "epoch": 2.969522393607136, "grad_norm": 0.7890313267707825, "learning_rate": 3.113063878148852e-05, "loss": 0.7448, "step": 15979 }, { "epoch": 2.9697082326705075, "grad_norm": 0.862103283405304, "learning_rate": 3.11200469775718e-05, "loss": 0.8145, "step": 15980 }, { "epoch": 2.9698940717338784, "grad_norm": 0.7743937969207764, "learning_rate": 3.110945664377947e-05, "loss": 0.8747, "step": 15981 }, { "epoch": 2.9700799107972493, "grad_norm": 0.800082266330719, "learning_rate": 3.109886778033754e-05, "loss": 0.8537, "step": 15982 }, { "epoch": 2.9702657498606206, "grad_norm": 0.798474133014679, "learning_rate": 3.1088280387472034e-05, "loss": 0.9413, "step": 15983 }, { "epoch": 2.970451588923992, "grad_norm": 0.7497609853744507, "learning_rate": 3.107769446540896e-05, "loss": 1.0148, "step": 15984 }, { "epoch": 2.970637427987363, "grad_norm": 0.9664625525474548, "learning_rate": 3.106711001437421e-05, "loss": 0.9903, "step": 15985 }, { "epoch": 2.970823267050734, "grad_norm": 1.123875617980957, "learning_rate": 3.105652703459368e-05, "loss": 0.7681, "step": 15986 }, { "epoch": 2.971009106114105, "grad_norm": 3.0108494758605957, "learning_rate": 3.104594552629331e-05, "loss": 1.2648, "step": 15987 }, { "epoch": 2.9711949451774764, "grad_norm": 1.2039192914962769, "learning_rate": 3.10353654896989e-05, "loss": 1.2072, "step": 15988 }, { "epoch": 2.9713807842408473, "grad_norm": 0.818681538105011, "learning_rate": 3.1024786925036255e-05, "loss": 0.9656, "step": 15989 }, { "epoch": 2.9715666233042186, "grad_norm": 0.7975749373435974, "learning_rate": 3.101420983253114e-05, "loss": 0.9338, "step": 15990 }, { "epoch": 2.97175246236759, "grad_norm": 0.8213855028152466, "learning_rate": 3.100363421240936e-05, "loss": 0.8867, "step": 15991 }, { "epoch": 2.971938301430961, "grad_norm": 0.9206775426864624, "learning_rate": 3.099306006489662e-05, "loss": 0.9823, "step": 15992 }, { "epoch": 2.9721241404943317, "grad_norm": 0.6859269738197327, "learning_rate": 3.098248739021854e-05, "loss": 0.6746, "step": 15993 }, { "epoch": 2.972309979557703, "grad_norm": 0.7936283349990845, "learning_rate": 3.097191618860087e-05, "loss": 0.9326, "step": 15994 }, { "epoch": 2.9724958186210744, "grad_norm": 0.792716383934021, "learning_rate": 3.096134646026917e-05, "loss": 0.7887, "step": 15995 }, { "epoch": 2.9726816576844453, "grad_norm": 0.9117279648780823, "learning_rate": 3.095077820544907e-05, "loss": 1.1533, "step": 15996 }, { "epoch": 2.972867496747816, "grad_norm": 0.7501056790351868, "learning_rate": 3.0940211424366125e-05, "loss": 0.6921, "step": 15997 }, { "epoch": 2.9730533358111875, "grad_norm": 0.9197656512260437, "learning_rate": 3.0929646117245815e-05, "loss": 1.0386, "step": 15998 }, { "epoch": 2.973239174874559, "grad_norm": 0.8189886212348938, "learning_rate": 3.091908228431373e-05, "loss": 0.8909, "step": 15999 }, { "epoch": 2.9734250139379297, "grad_norm": 0.8167677521705627, "learning_rate": 3.0908519925795264e-05, "loss": 0.9702, "step": 16000 }, { "epoch": 2.973610853001301, "grad_norm": 0.8350397944450378, "learning_rate": 3.089795904191588e-05, "loss": 0.8225, "step": 16001 }, { "epoch": 2.973796692064672, "grad_norm": 0.7315388321876526, "learning_rate": 3.088739963290095e-05, "loss": 0.8987, "step": 16002 }, { "epoch": 2.9739825311280432, "grad_norm": 0.7389110922813416, "learning_rate": 3.087684169897588e-05, "loss": 0.8792, "step": 16003 }, { "epoch": 2.974168370191414, "grad_norm": 0.8871979117393494, "learning_rate": 3.086628524036602e-05, "loss": 0.7208, "step": 16004 }, { "epoch": 2.9743542092547854, "grad_norm": 0.7603043913841248, "learning_rate": 3.0855730257296665e-05, "loss": 0.8907, "step": 16005 }, { "epoch": 2.9745400483181563, "grad_norm": 0.9048733711242676, "learning_rate": 3.0845176749993073e-05, "loss": 1.0397, "step": 16006 }, { "epoch": 2.9747258873815277, "grad_norm": 0.9382114410400391, "learning_rate": 3.0834624718680524e-05, "loss": 0.9248, "step": 16007 }, { "epoch": 2.9749117264448985, "grad_norm": 0.9046305418014526, "learning_rate": 3.082407416358424e-05, "loss": 0.9245, "step": 16008 }, { "epoch": 2.97509756550827, "grad_norm": 0.8209074139595032, "learning_rate": 3.0813525084929365e-05, "loss": 0.9154, "step": 16009 }, { "epoch": 2.975283404571641, "grad_norm": 0.959549605846405, "learning_rate": 3.080297748294104e-05, "loss": 0.861, "step": 16010 }, { "epoch": 2.975469243635012, "grad_norm": 1.0113213062286377, "learning_rate": 3.079243135784444e-05, "loss": 1.1289, "step": 16011 }, { "epoch": 2.975655082698383, "grad_norm": 0.8068660497665405, "learning_rate": 3.078188670986464e-05, "loss": 0.804, "step": 16012 }, { "epoch": 2.9758409217617543, "grad_norm": 0.7963927388191223, "learning_rate": 3.077134353922667e-05, "loss": 0.7001, "step": 16013 }, { "epoch": 2.9760267608251256, "grad_norm": 0.8770447373390198, "learning_rate": 3.076080184615552e-05, "loss": 1.0307, "step": 16014 }, { "epoch": 2.9762125998884965, "grad_norm": 0.8757564425468445, "learning_rate": 3.07502616308763e-05, "loss": 1.2144, "step": 16015 }, { "epoch": 2.976398438951868, "grad_norm": 0.8343798518180847, "learning_rate": 3.07397228936139e-05, "loss": 0.9809, "step": 16016 }, { "epoch": 2.9765842780152387, "grad_norm": 0.7926608920097351, "learning_rate": 3.072918563459326e-05, "loss": 1.0375, "step": 16017 }, { "epoch": 2.97677011707861, "grad_norm": 0.8190401792526245, "learning_rate": 3.071864985403925e-05, "loss": 0.9349, "step": 16018 }, { "epoch": 2.976955956141981, "grad_norm": 1.1282371282577515, "learning_rate": 3.07081155521768e-05, "loss": 1.0622, "step": 16019 }, { "epoch": 2.9771417952053523, "grad_norm": 0.8359345197677612, "learning_rate": 3.0697582729230715e-05, "loss": 0.9606, "step": 16020 }, { "epoch": 2.977327634268723, "grad_norm": 0.87877357006073, "learning_rate": 3.0687051385425794e-05, "loss": 0.7875, "step": 16021 }, { "epoch": 2.9775134733320945, "grad_norm": 0.8610250949859619, "learning_rate": 3.067652152098679e-05, "loss": 0.8837, "step": 16022 }, { "epoch": 2.9776993123954654, "grad_norm": 0.9947982430458069, "learning_rate": 3.066599313613849e-05, "loss": 0.9258, "step": 16023 }, { "epoch": 2.9778851514588367, "grad_norm": 0.836203932762146, "learning_rate": 3.0655466231105566e-05, "loss": 0.9683, "step": 16024 }, { "epoch": 2.978070990522208, "grad_norm": 0.9147794842720032, "learning_rate": 3.064494080611274e-05, "loss": 0.8862, "step": 16025 }, { "epoch": 2.978256829585579, "grad_norm": 0.9989723563194275, "learning_rate": 3.063441686138461e-05, "loss": 1.014, "step": 16026 }, { "epoch": 2.97844266864895, "grad_norm": 0.7151840925216675, "learning_rate": 3.062389439714584e-05, "loss": 0.7912, "step": 16027 }, { "epoch": 2.978628507712321, "grad_norm": 0.8194684982299805, "learning_rate": 3.0613373413620994e-05, "loss": 0.9364, "step": 16028 }, { "epoch": 2.9788143467756925, "grad_norm": 1.0978440046310425, "learning_rate": 3.0602853911034624e-05, "loss": 1.0119, "step": 16029 }, { "epoch": 2.9790001858390633, "grad_norm": 0.9319393634796143, "learning_rate": 3.0592335889611204e-05, "loss": 0.8991, "step": 16030 }, { "epoch": 2.9791860249024342, "grad_norm": 0.8846945762634277, "learning_rate": 3.05818193495753e-05, "loss": 0.9444, "step": 16031 }, { "epoch": 2.9793718639658056, "grad_norm": 0.844276487827301, "learning_rate": 3.057130429115135e-05, "loss": 0.9255, "step": 16032 }, { "epoch": 2.979557703029177, "grad_norm": 0.8965041041374207, "learning_rate": 3.056079071456375e-05, "loss": 0.9613, "step": 16033 }, { "epoch": 2.9797435420925478, "grad_norm": 0.8440106511116028, "learning_rate": 3.055027862003689e-05, "loss": 0.9324, "step": 16034 }, { "epoch": 2.979929381155919, "grad_norm": 0.8329331874847412, "learning_rate": 3.053976800779513e-05, "loss": 0.9108, "step": 16035 }, { "epoch": 2.98011522021929, "grad_norm": 0.8621357083320618, "learning_rate": 3.0529258878062886e-05, "loss": 0.7843, "step": 16036 }, { "epoch": 2.9803010592826613, "grad_norm": 1.0645602941513062, "learning_rate": 3.051875123106437e-05, "loss": 1.3506, "step": 16037 }, { "epoch": 2.980486898346032, "grad_norm": 0.845532238483429, "learning_rate": 3.0508245067023854e-05, "loss": 0.883, "step": 16038 }, { "epoch": 2.9806727374094035, "grad_norm": 0.8710725903511047, "learning_rate": 3.0497740386165618e-05, "loss": 0.9955, "step": 16039 }, { "epoch": 2.9808585764727744, "grad_norm": 0.9422255754470825, "learning_rate": 3.0487237188713858e-05, "loss": 1.0953, "step": 16040 }, { "epoch": 2.9810444155361457, "grad_norm": 0.7569517493247986, "learning_rate": 3.0476735474892713e-05, "loss": 0.8029, "step": 16041 }, { "epoch": 2.9812302545995166, "grad_norm": 0.7387254238128662, "learning_rate": 3.046623524492632e-05, "loss": 0.6981, "step": 16042 }, { "epoch": 2.981416093662888, "grad_norm": 0.8482987880706787, "learning_rate": 3.0455736499038845e-05, "loss": 0.9245, "step": 16043 }, { "epoch": 2.9816019327262593, "grad_norm": 0.8994969129562378, "learning_rate": 3.0445239237454338e-05, "loss": 0.921, "step": 16044 }, { "epoch": 2.98178777178963, "grad_norm": 0.6694866418838501, "learning_rate": 3.0434743460396796e-05, "loss": 0.7867, "step": 16045 }, { "epoch": 2.981973610853001, "grad_norm": 1.0706828832626343, "learning_rate": 3.042424916809028e-05, "loss": 0.763, "step": 16046 }, { "epoch": 2.9821594499163724, "grad_norm": 0.8097942471504211, "learning_rate": 3.0413756360758793e-05, "loss": 0.8001, "step": 16047 }, { "epoch": 2.9823452889797437, "grad_norm": 0.9327067732810974, "learning_rate": 3.0403265038626273e-05, "loss": 1.0208, "step": 16048 }, { "epoch": 2.9825311280431146, "grad_norm": 0.7851989269256592, "learning_rate": 3.0392775201916625e-05, "loss": 1.0449, "step": 16049 }, { "epoch": 2.982716967106486, "grad_norm": 0.7200372219085693, "learning_rate": 3.0382286850853703e-05, "loss": 1.0121, "step": 16050 }, { "epoch": 2.982902806169857, "grad_norm": 1.2710801362991333, "learning_rate": 3.0371799985661433e-05, "loss": 0.9335, "step": 16051 }, { "epoch": 2.983088645233228, "grad_norm": 0.8790804147720337, "learning_rate": 3.0361314606563606e-05, "loss": 0.901, "step": 16052 }, { "epoch": 2.983274484296599, "grad_norm": 0.9105607867240906, "learning_rate": 3.035083071378402e-05, "loss": 0.9344, "step": 16053 }, { "epoch": 2.9834603233599704, "grad_norm": 0.7116270661354065, "learning_rate": 3.0340348307546384e-05, "loss": 0.6129, "step": 16054 }, { "epoch": 2.9836461624233412, "grad_norm": 0.8581184148788452, "learning_rate": 3.0329867388074517e-05, "loss": 1.1956, "step": 16055 }, { "epoch": 2.9838320014867126, "grad_norm": 0.8559527397155762, "learning_rate": 3.0319387955592038e-05, "loss": 1.0101, "step": 16056 }, { "epoch": 2.9840178405500835, "grad_norm": 1.0393134355545044, "learning_rate": 3.0308910010322666e-05, "loss": 0.8381, "step": 16057 }, { "epoch": 2.984203679613455, "grad_norm": 0.76030033826828, "learning_rate": 3.029843355249e-05, "loss": 0.9155, "step": 16058 }, { "epoch": 2.984389518676826, "grad_norm": 0.7992974519729614, "learning_rate": 3.0287958582317676e-05, "loss": 1.0005, "step": 16059 }, { "epoch": 2.984575357740197, "grad_norm": 0.9479563236236572, "learning_rate": 3.0277485100029256e-05, "loss": 0.9097, "step": 16060 }, { "epoch": 2.984761196803568, "grad_norm": 1.0739942789077759, "learning_rate": 3.026701310584826e-05, "loss": 0.9447, "step": 16061 }, { "epoch": 2.984947035866939, "grad_norm": 0.7115394473075867, "learning_rate": 3.0256542599998172e-05, "loss": 0.7588, "step": 16062 }, { "epoch": 2.9851328749303105, "grad_norm": 0.9326103329658508, "learning_rate": 3.0246073582702538e-05, "loss": 0.8896, "step": 16063 }, { "epoch": 2.9853187139936814, "grad_norm": 0.9557722210884094, "learning_rate": 3.023560605418475e-05, "loss": 0.9496, "step": 16064 }, { "epoch": 2.9855045530570528, "grad_norm": 0.9472669363021851, "learning_rate": 3.022514001466824e-05, "loss": 0.8392, "step": 16065 }, { "epoch": 2.9856903921204236, "grad_norm": 1.0637836456298828, "learning_rate": 3.021467546437631e-05, "loss": 0.7144, "step": 16066 }, { "epoch": 2.985876231183795, "grad_norm": 0.705267608165741, "learning_rate": 3.020421240353244e-05, "loss": 0.7417, "step": 16067 }, { "epoch": 2.986062070247166, "grad_norm": 0.856480062007904, "learning_rate": 3.019375083235988e-05, "loss": 0.7482, "step": 16068 }, { "epoch": 2.986247909310537, "grad_norm": 0.8884443640708923, "learning_rate": 3.0183290751081918e-05, "loss": 1.1324, "step": 16069 }, { "epoch": 2.986433748373908, "grad_norm": 0.9075732827186584, "learning_rate": 3.017283215992176e-05, "loss": 0.9301, "step": 16070 }, { "epoch": 2.9866195874372794, "grad_norm": 0.8808230757713318, "learning_rate": 3.016237505910272e-05, "loss": 0.9391, "step": 16071 }, { "epoch": 2.9868054265006503, "grad_norm": 0.8501234650611877, "learning_rate": 3.0151919448847933e-05, "loss": 1.0081, "step": 16072 }, { "epoch": 2.9869912655640216, "grad_norm": 0.7957719564437866, "learning_rate": 3.014146532938056e-05, "loss": 0.8891, "step": 16073 }, { "epoch": 2.987177104627393, "grad_norm": 0.8605754971504211, "learning_rate": 3.01310127009237e-05, "loss": 0.8632, "step": 16074 }, { "epoch": 2.987362943690764, "grad_norm": 0.732107937335968, "learning_rate": 3.01205615637005e-05, "loss": 0.7144, "step": 16075 }, { "epoch": 2.9875487827541347, "grad_norm": 0.8604171276092529, "learning_rate": 3.011011191793399e-05, "loss": 0.8015, "step": 16076 }, { "epoch": 2.987734621817506, "grad_norm": 0.7981947660446167, "learning_rate": 3.0099663763847185e-05, "loss": 0.9057, "step": 16077 }, { "epoch": 2.9879204608808774, "grad_norm": 0.7912887930870056, "learning_rate": 3.0089217101663103e-05, "loss": 0.8847, "step": 16078 }, { "epoch": 2.9881062999442483, "grad_norm": 1.4333757162094116, "learning_rate": 3.0078771931604743e-05, "loss": 1.3325, "step": 16079 }, { "epoch": 2.988292139007619, "grad_norm": 0.8902912139892578, "learning_rate": 3.0068328253894997e-05, "loss": 0.8626, "step": 16080 }, { "epoch": 2.9884779780709905, "grad_norm": 0.838409960269928, "learning_rate": 3.005788606875678e-05, "loss": 1.0151, "step": 16081 }, { "epoch": 2.988663817134362, "grad_norm": 1.026604175567627, "learning_rate": 3.004744537641293e-05, "loss": 0.9868, "step": 16082 }, { "epoch": 2.9888496561977327, "grad_norm": 0.898180365562439, "learning_rate": 3.0037006177086346e-05, "loss": 1.1465, "step": 16083 }, { "epoch": 2.989035495261104, "grad_norm": 0.748232364654541, "learning_rate": 3.0026568470999806e-05, "loss": 0.8587, "step": 16084 }, { "epoch": 2.989221334324475, "grad_norm": 0.80550616979599, "learning_rate": 3.0016132258376084e-05, "loss": 0.8996, "step": 16085 }, { "epoch": 2.9894071733878462, "grad_norm": 0.7416648864746094, "learning_rate": 3.0005697539437884e-05, "loss": 0.7914, "step": 16086 }, { "epoch": 2.989593012451217, "grad_norm": 0.8478348255157471, "learning_rate": 2.9995264314407956e-05, "loss": 0.9807, "step": 16087 }, { "epoch": 2.9897788515145884, "grad_norm": 1.0586156845092773, "learning_rate": 2.9984832583509016e-05, "loss": 1.0294, "step": 16088 }, { "epoch": 2.9899646905779593, "grad_norm": 0.7601550221443176, "learning_rate": 2.997440234696367e-05, "loss": 0.6927, "step": 16089 }, { "epoch": 2.9901505296413307, "grad_norm": 0.9126309156417847, "learning_rate": 2.9963973604994498e-05, "loss": 0.9581, "step": 16090 }, { "epoch": 2.9903363687047015, "grad_norm": 0.8759339451789856, "learning_rate": 2.995354635782417e-05, "loss": 0.8344, "step": 16091 }, { "epoch": 2.990522207768073, "grad_norm": 0.885123610496521, "learning_rate": 2.9943120605675177e-05, "loss": 0.8952, "step": 16092 }, { "epoch": 2.990708046831444, "grad_norm": 0.7887548208236694, "learning_rate": 2.9932696348770063e-05, "loss": 0.8907, "step": 16093 }, { "epoch": 2.990893885894815, "grad_norm": 0.7854909896850586, "learning_rate": 2.9922273587331263e-05, "loss": 0.8581, "step": 16094 }, { "epoch": 2.991079724958186, "grad_norm": 0.9311997294425964, "learning_rate": 2.9911852321581313e-05, "loss": 0.9449, "step": 16095 }, { "epoch": 2.9912655640215573, "grad_norm": 1.2775605916976929, "learning_rate": 2.9901432551742594e-05, "loss": 1.1392, "step": 16096 }, { "epoch": 2.9914514030849286, "grad_norm": 0.8485652208328247, "learning_rate": 2.9891014278037478e-05, "loss": 0.9056, "step": 16097 }, { "epoch": 2.9916372421482995, "grad_norm": 0.9734633564949036, "learning_rate": 2.9880597500688346e-05, "loss": 0.9759, "step": 16098 }, { "epoch": 2.991823081211671, "grad_norm": 0.7715386748313904, "learning_rate": 2.9870182219917564e-05, "loss": 0.7665, "step": 16099 }, { "epoch": 2.9920089202750417, "grad_norm": 0.8023921251296997, "learning_rate": 2.9859768435947388e-05, "loss": 0.7398, "step": 16100 }, { "epoch": 2.992194759338413, "grad_norm": 0.7612713575363159, "learning_rate": 2.9849356149000098e-05, "loss": 0.8553, "step": 16101 }, { "epoch": 2.992380598401784, "grad_norm": 1.0459855794906616, "learning_rate": 2.9838945359297888e-05, "loss": 0.946, "step": 16102 }, { "epoch": 2.9925664374651553, "grad_norm": 0.9531751275062561, "learning_rate": 2.9828536067063016e-05, "loss": 0.8861, "step": 16103 }, { "epoch": 2.992752276528526, "grad_norm": 0.8266820907592773, "learning_rate": 2.981812827251762e-05, "loss": 0.5946, "step": 16104 }, { "epoch": 2.9929381155918975, "grad_norm": 0.8020736575126648, "learning_rate": 2.980772197588384e-05, "loss": 1.0465, "step": 16105 }, { "epoch": 2.9931239546552684, "grad_norm": 0.9104275107383728, "learning_rate": 2.9797317177383756e-05, "loss": 0.9653, "step": 16106 }, { "epoch": 2.9933097937186397, "grad_norm": 1.0445524454116821, "learning_rate": 2.9786913877239487e-05, "loss": 1.1114, "step": 16107 }, { "epoch": 2.993495632782011, "grad_norm": 0.6620239615440369, "learning_rate": 2.9776512075673024e-05, "loss": 0.6062, "step": 16108 }, { "epoch": 2.993681471845382, "grad_norm": 0.8883970975875854, "learning_rate": 2.9766111772906424e-05, "loss": 1.0232, "step": 16109 }, { "epoch": 2.993867310908753, "grad_norm": 0.778900682926178, "learning_rate": 2.975571296916162e-05, "loss": 0.9295, "step": 16110 }, { "epoch": 2.994053149972124, "grad_norm": 0.8331949710845947, "learning_rate": 2.974531566466061e-05, "loss": 0.9017, "step": 16111 }, { "epoch": 2.9942389890354955, "grad_norm": 0.7040413022041321, "learning_rate": 2.9734919859625275e-05, "loss": 0.8675, "step": 16112 }, { "epoch": 2.9944248280988663, "grad_norm": 0.7584128379821777, "learning_rate": 2.9724525554277495e-05, "loss": 0.8517, "step": 16113 }, { "epoch": 2.9946106671622372, "grad_norm": 1.0098402500152588, "learning_rate": 2.9714132748839086e-05, "loss": 0.7754, "step": 16114 }, { "epoch": 2.9947965062256086, "grad_norm": 0.8807827830314636, "learning_rate": 2.970374144353193e-05, "loss": 0.9387, "step": 16115 }, { "epoch": 2.99498234528898, "grad_norm": 0.7473820447921753, "learning_rate": 2.9693351638577782e-05, "loss": 0.7434, "step": 16116 }, { "epoch": 2.9951681843523508, "grad_norm": 0.9319244027137756, "learning_rate": 2.9682963334198356e-05, "loss": 0.9373, "step": 16117 }, { "epoch": 2.995354023415722, "grad_norm": 0.9080926179885864, "learning_rate": 2.9672576530615447e-05, "loss": 1.0254, "step": 16118 }, { "epoch": 2.995539862479093, "grad_norm": 1.1424757242202759, "learning_rate": 2.9662191228050672e-05, "loss": 0.8289, "step": 16119 }, { "epoch": 2.9957257015424643, "grad_norm": 0.8220130205154419, "learning_rate": 2.965180742672574e-05, "loss": 1.0567, "step": 16120 }, { "epoch": 2.995911540605835, "grad_norm": 0.8922242522239685, "learning_rate": 2.9641425126862278e-05, "loss": 0.8066, "step": 16121 }, { "epoch": 2.9960973796692065, "grad_norm": 0.9695261716842651, "learning_rate": 2.9631044328681813e-05, "loss": 0.7672, "step": 16122 }, { "epoch": 2.996283218732578, "grad_norm": 0.8676010370254517, "learning_rate": 2.962066503240598e-05, "loss": 0.7832, "step": 16123 }, { "epoch": 2.9964690577959487, "grad_norm": 0.9023370146751404, "learning_rate": 2.9610287238256275e-05, "loss": 1.0591, "step": 16124 }, { "epoch": 2.9966548968593196, "grad_norm": 0.7452178001403809, "learning_rate": 2.95999109464542e-05, "loss": 0.9342, "step": 16125 }, { "epoch": 2.996840735922691, "grad_norm": 0.8523251414299011, "learning_rate": 2.9589536157221177e-05, "loss": 0.9896, "step": 16126 }, { "epoch": 2.9970265749860623, "grad_norm": 0.7315793633460999, "learning_rate": 2.957916287077871e-05, "loss": 0.9418, "step": 16127 }, { "epoch": 2.997212414049433, "grad_norm": 0.7727538347244263, "learning_rate": 2.9568791087348168e-05, "loss": 1.2043, "step": 16128 }, { "epoch": 2.997398253112804, "grad_norm": 0.6960626840591431, "learning_rate": 2.9558420807150878e-05, "loss": 0.5409, "step": 16129 }, { "epoch": 2.9975840921761754, "grad_norm": 0.8825200796127319, "learning_rate": 2.9548052030408213e-05, "loss": 0.8736, "step": 16130 }, { "epoch": 2.9977699312395467, "grad_norm": 0.8022184371948242, "learning_rate": 2.953768475734151e-05, "loss": 0.913, "step": 16131 }, { "epoch": 2.9979557703029176, "grad_norm": 0.7228624820709229, "learning_rate": 2.9527318988172015e-05, "loss": 0.9511, "step": 16132 }, { "epoch": 2.998141609366289, "grad_norm": 0.8025230169296265, "learning_rate": 2.9516954723120948e-05, "loss": 0.9299, "step": 16133 }, { "epoch": 2.99832744842966, "grad_norm": 0.8989067673683167, "learning_rate": 2.9506591962409512e-05, "loss": 0.9173, "step": 16134 }, { "epoch": 2.998513287493031, "grad_norm": 0.7874870300292969, "learning_rate": 2.949623070625892e-05, "loss": 0.9975, "step": 16135 }, { "epoch": 2.998699126556402, "grad_norm": 0.8301673531532288, "learning_rate": 2.9485870954890294e-05, "loss": 0.9403, "step": 16136 }, { "epoch": 2.9988849656197734, "grad_norm": 0.8750012516975403, "learning_rate": 2.9475512708524723e-05, "loss": 1.0227, "step": 16137 }, { "epoch": 2.9990708046831442, "grad_norm": 0.8098914623260498, "learning_rate": 2.946515596738333e-05, "loss": 0.9104, "step": 16138 }, { "epoch": 2.9992566437465156, "grad_norm": 0.8439404368400574, "learning_rate": 2.945480073168714e-05, "loss": 1.0937, "step": 16139 }, { "epoch": 2.9994424828098865, "grad_norm": 0.7623707056045532, "learning_rate": 2.944444700165714e-05, "loss": 0.81, "step": 16140 }, { "epoch": 2.999628321873258, "grad_norm": 0.8703219294548035, "learning_rate": 2.9434094777514375e-05, "loss": 1.0107, "step": 16141 }, { "epoch": 2.999814160936629, "grad_norm": 0.7967303395271301, "learning_rate": 2.942374405947973e-05, "loss": 1.0668, "step": 16142 }, { "epoch": 3.0, "grad_norm": 1.0338119268417358, "learning_rate": 2.9413394847774178e-05, "loss": 0.8666, "step": 16143 }, { "epoch": 3.000185839063371, "grad_norm": 0.8520253300666809, "learning_rate": 2.9403047142618588e-05, "loss": 0.915, "step": 16144 }, { "epoch": 3.000371678126742, "grad_norm": 1.1674602031707764, "learning_rate": 2.9392700944233797e-05, "loss": 0.7089, "step": 16145 }, { "epoch": 3.0005575171901135, "grad_norm": 0.8320359587669373, "learning_rate": 2.9382356252840614e-05, "loss": 0.9291, "step": 16146 }, { "epoch": 3.0007433562534844, "grad_norm": 0.9347818493843079, "learning_rate": 2.9372013068659886e-05, "loss": 0.7985, "step": 16147 }, { "epoch": 3.0009291953168553, "grad_norm": 0.8586578369140625, "learning_rate": 2.9361671391912337e-05, "loss": 0.9368, "step": 16148 }, { "epoch": 3.0011150343802266, "grad_norm": 0.8137461543083191, "learning_rate": 2.9351331222818657e-05, "loss": 0.8722, "step": 16149 }, { "epoch": 3.001300873443598, "grad_norm": 0.8831539750099182, "learning_rate": 2.934099256159958e-05, "loss": 1.0599, "step": 16150 }, { "epoch": 3.001486712506969, "grad_norm": 0.9825401306152344, "learning_rate": 2.933065540847578e-05, "loss": 1.0112, "step": 16151 }, { "epoch": 3.00167255157034, "grad_norm": 0.7601832151412964, "learning_rate": 2.932031976366788e-05, "loss": 0.8814, "step": 16152 }, { "epoch": 3.00167255157034, "eval_loss": 1.0085203647613525, "eval_runtime": 23.1453, "eval_samples_per_second": 47.18, "eval_steps_per_second": 23.59, "step": 16152 }, { "epoch": 3.0018583906337115, "grad_norm": 0.8601055145263672, "learning_rate": 2.930998562739645e-05, "loss": 0.8939, "step": 16153 }, { "epoch": 3.0020442296970824, "grad_norm": 0.745703935623169, "learning_rate": 2.9299652999882054e-05, "loss": 0.8115, "step": 16154 }, { "epoch": 3.0022300687604533, "grad_norm": 0.746161162853241, "learning_rate": 2.9289321881345254e-05, "loss": 0.836, "step": 16155 }, { "epoch": 3.0024159078238246, "grad_norm": 0.9259798526763916, "learning_rate": 2.9278992272006545e-05, "loss": 0.8179, "step": 16156 }, { "epoch": 3.002601746887196, "grad_norm": 0.7527800798416138, "learning_rate": 2.9268664172086346e-05, "loss": 0.9853, "step": 16157 }, { "epoch": 3.002787585950567, "grad_norm": 0.7640870213508606, "learning_rate": 2.925833758180516e-05, "loss": 0.9494, "step": 16158 }, { "epoch": 3.0029734250139377, "grad_norm": 0.7097516059875488, "learning_rate": 2.924801250138336e-05, "loss": 0.8013, "step": 16159 }, { "epoch": 3.003159264077309, "grad_norm": 0.8351288437843323, "learning_rate": 2.923768893104133e-05, "loss": 0.7751, "step": 16160 }, { "epoch": 3.0033451031406804, "grad_norm": 1.039231300354004, "learning_rate": 2.9227366870999353e-05, "loss": 0.9235, "step": 16161 }, { "epoch": 3.0035309422040513, "grad_norm": 0.8639129400253296, "learning_rate": 2.921704632147778e-05, "loss": 0.6621, "step": 16162 }, { "epoch": 3.003716781267422, "grad_norm": 0.8037671446800232, "learning_rate": 2.920672728269692e-05, "loss": 1.0548, "step": 16163 }, { "epoch": 3.0039026203307935, "grad_norm": 0.8230457901954651, "learning_rate": 2.9196409754876984e-05, "loss": 0.8566, "step": 16164 }, { "epoch": 3.004088459394165, "grad_norm": 0.7340700030326843, "learning_rate": 2.918609373823814e-05, "loss": 0.8596, "step": 16165 }, { "epoch": 3.0042742984575357, "grad_norm": 0.8230661749839783, "learning_rate": 2.9175779233000633e-05, "loss": 1.0987, "step": 16166 }, { "epoch": 3.0044601375209066, "grad_norm": 1.0336686372756958, "learning_rate": 2.9165466239384587e-05, "loss": 0.6387, "step": 16167 }, { "epoch": 3.0046459765842783, "grad_norm": 0.7918894290924072, "learning_rate": 2.9155154757610103e-05, "loss": 0.8485, "step": 16168 }, { "epoch": 3.0048318156476492, "grad_norm": 0.8027900457382202, "learning_rate": 2.914484478789724e-05, "loss": 0.983, "step": 16169 }, { "epoch": 3.00501765471102, "grad_norm": 0.9236595034599304, "learning_rate": 2.91345363304661e-05, "loss": 1.1157, "step": 16170 }, { "epoch": 3.0052034937743914, "grad_norm": 0.8126418590545654, "learning_rate": 2.912422938553664e-05, "loss": 1.062, "step": 16171 }, { "epoch": 3.0053893328377628, "grad_norm": 0.7875935435295105, "learning_rate": 2.9113923953328903e-05, "loss": 0.8832, "step": 16172 }, { "epoch": 3.0055751719011337, "grad_norm": 0.7910284996032715, "learning_rate": 2.910362003406282e-05, "loss": 0.9957, "step": 16173 }, { "epoch": 3.0057610109645045, "grad_norm": 0.9798944592475891, "learning_rate": 2.909331762795827e-05, "loss": 1.0637, "step": 16174 }, { "epoch": 3.005946850027876, "grad_norm": 0.9422750473022461, "learning_rate": 2.9083016735235203e-05, "loss": 1.0967, "step": 16175 }, { "epoch": 3.006132689091247, "grad_norm": 0.9282810091972351, "learning_rate": 2.9072717356113454e-05, "loss": 1.149, "step": 16176 }, { "epoch": 3.006318528154618, "grad_norm": 0.8232243657112122, "learning_rate": 2.9062419490812798e-05, "loss": 1.1191, "step": 16177 }, { "epoch": 3.006504367217989, "grad_norm": 0.8733327388763428, "learning_rate": 2.9052123139553102e-05, "loss": 1.0162, "step": 16178 }, { "epoch": 3.0066902062813603, "grad_norm": 0.7954488396644592, "learning_rate": 2.9041828302554074e-05, "loss": 0.8666, "step": 16179 }, { "epoch": 3.0068760453447316, "grad_norm": 0.7659454941749573, "learning_rate": 2.9031534980035468e-05, "loss": 0.9133, "step": 16180 }, { "epoch": 3.0070618844081025, "grad_norm": 0.7625312805175781, "learning_rate": 2.9021243172216916e-05, "loss": 0.7314, "step": 16181 }, { "epoch": 3.0072477234714734, "grad_norm": 1.0805476903915405, "learning_rate": 2.9010952879318132e-05, "loss": 0.8219, "step": 16182 }, { "epoch": 3.0074335625348447, "grad_norm": 0.9051377773284912, "learning_rate": 2.9000664101558773e-05, "loss": 1.0145, "step": 16183 }, { "epoch": 3.007619401598216, "grad_norm": 0.7972425222396851, "learning_rate": 2.8990376839158384e-05, "loss": 0.89, "step": 16184 }, { "epoch": 3.007805240661587, "grad_norm": 0.7707135677337646, "learning_rate": 2.8980091092336525e-05, "loss": 0.8227, "step": 16185 }, { "epoch": 3.0079910797249583, "grad_norm": 0.9423364400863647, "learning_rate": 2.8969806861312776e-05, "loss": 0.9644, "step": 16186 }, { "epoch": 3.0081769187883296, "grad_norm": 0.8737858533859253, "learning_rate": 2.8959524146306603e-05, "loss": 1.029, "step": 16187 }, { "epoch": 3.0083627578517005, "grad_norm": 0.8065149784088135, "learning_rate": 2.894924294753748e-05, "loss": 1.063, "step": 16188 }, { "epoch": 3.0085485969150714, "grad_norm": 0.8207863569259644, "learning_rate": 2.8938963265224807e-05, "loss": 0.9712, "step": 16189 }, { "epoch": 3.0087344359784427, "grad_norm": 0.7644856572151184, "learning_rate": 2.8928685099588037e-05, "loss": 0.8829, "step": 16190 }, { "epoch": 3.008920275041814, "grad_norm": 0.8748577833175659, "learning_rate": 2.8918408450846533e-05, "loss": 1.2508, "step": 16191 }, { "epoch": 3.009106114105185, "grad_norm": 0.909366250038147, "learning_rate": 2.8908133319219588e-05, "loss": 0.8923, "step": 16192 }, { "epoch": 3.009291953168556, "grad_norm": 0.9220009446144104, "learning_rate": 2.8897859704926566e-05, "loss": 1.0259, "step": 16193 }, { "epoch": 3.009477792231927, "grad_norm": 0.7947103381156921, "learning_rate": 2.8887587608186684e-05, "loss": 0.9017, "step": 16194 }, { "epoch": 3.0096636312952985, "grad_norm": 0.7401075959205627, "learning_rate": 2.887731702921924e-05, "loss": 0.8673, "step": 16195 }, { "epoch": 3.0098494703586693, "grad_norm": 0.8753591775894165, "learning_rate": 2.886704796824341e-05, "loss": 0.8195, "step": 16196 }, { "epoch": 3.0100353094220402, "grad_norm": 0.8051326870918274, "learning_rate": 2.8856780425478347e-05, "loss": 0.9303, "step": 16197 }, { "epoch": 3.0102211484854116, "grad_norm": 0.8923689126968384, "learning_rate": 2.8846514401143254e-05, "loss": 0.7542, "step": 16198 }, { "epoch": 3.010406987548783, "grad_norm": 0.943494439125061, "learning_rate": 2.88362498954572e-05, "loss": 1.071, "step": 16199 }, { "epoch": 3.0105928266121538, "grad_norm": 0.855754554271698, "learning_rate": 2.882598690863927e-05, "loss": 0.9027, "step": 16200 }, { "epoch": 3.010778665675525, "grad_norm": 0.8352408409118652, "learning_rate": 2.881572544090848e-05, "loss": 0.6896, "step": 16201 }, { "epoch": 3.0109645047388964, "grad_norm": 0.935312807559967, "learning_rate": 2.8805465492483897e-05, "loss": 1.0309, "step": 16202 }, { "epoch": 3.0111503438022673, "grad_norm": 0.9511630535125732, "learning_rate": 2.879520706358446e-05, "loss": 0.9513, "step": 16203 }, { "epoch": 3.011336182865638, "grad_norm": 0.8023136854171753, "learning_rate": 2.878495015442916e-05, "loss": 0.8842, "step": 16204 }, { "epoch": 3.0115220219290095, "grad_norm": 1.0229847431182861, "learning_rate": 2.8774694765236854e-05, "loss": 1.0292, "step": 16205 }, { "epoch": 3.011707860992381, "grad_norm": 0.809948742389679, "learning_rate": 2.8764440896226498e-05, "loss": 0.9655, "step": 16206 }, { "epoch": 3.0118937000557517, "grad_norm": 0.7976549863815308, "learning_rate": 2.875418854761691e-05, "loss": 0.8296, "step": 16207 }, { "epoch": 3.0120795391191226, "grad_norm": 0.8738260269165039, "learning_rate": 2.8743937719626902e-05, "loss": 0.938, "step": 16208 }, { "epoch": 3.012265378182494, "grad_norm": 0.8011206388473511, "learning_rate": 2.8733688412475223e-05, "loss": 1.0696, "step": 16209 }, { "epoch": 3.0124512172458653, "grad_norm": 0.8072490692138672, "learning_rate": 2.8723440626380705e-05, "loss": 1.0296, "step": 16210 }, { "epoch": 3.012637056309236, "grad_norm": 0.7365478873252869, "learning_rate": 2.8713194361562036e-05, "loss": 0.9477, "step": 16211 }, { "epoch": 3.012822895372607, "grad_norm": 0.7511142492294312, "learning_rate": 2.8702949618237896e-05, "loss": 0.8417, "step": 16212 }, { "epoch": 3.0130087344359784, "grad_norm": 0.8439598679542542, "learning_rate": 2.869270639662691e-05, "loss": 0.8634, "step": 16213 }, { "epoch": 3.0131945734993497, "grad_norm": 0.8430346250534058, "learning_rate": 2.8682464696947742e-05, "loss": 1.0266, "step": 16214 }, { "epoch": 3.0133804125627206, "grad_norm": 0.7551569938659668, "learning_rate": 2.867222451941901e-05, "loss": 0.8768, "step": 16215 }, { "epoch": 3.0135662516260915, "grad_norm": 0.927182674407959, "learning_rate": 2.8661985864259243e-05, "loss": 0.8485, "step": 16216 }, { "epoch": 3.013752090689463, "grad_norm": 0.8453295230865479, "learning_rate": 2.8651748731686935e-05, "loss": 0.8316, "step": 16217 }, { "epoch": 3.013937929752834, "grad_norm": 0.8905909657478333, "learning_rate": 2.8641513121920637e-05, "loss": 0.9733, "step": 16218 }, { "epoch": 3.014123768816205, "grad_norm": 0.9714555144309998, "learning_rate": 2.8631279035178793e-05, "loss": 1.0372, "step": 16219 }, { "epoch": 3.0143096078795764, "grad_norm": 0.7101377248764038, "learning_rate": 2.8621046471679826e-05, "loss": 0.8919, "step": 16220 }, { "epoch": 3.0144954469429477, "grad_norm": 0.9432785511016846, "learning_rate": 2.8610815431642103e-05, "loss": 0.7906, "step": 16221 }, { "epoch": 3.0146812860063186, "grad_norm": 0.8138036727905273, "learning_rate": 2.860058591528404e-05, "loss": 0.7629, "step": 16222 }, { "epoch": 3.0148671250696895, "grad_norm": 0.7100986838340759, "learning_rate": 2.859035792282394e-05, "loss": 0.9996, "step": 16223 }, { "epoch": 3.015052964133061, "grad_norm": 0.8357480764389038, "learning_rate": 2.8580131454480074e-05, "loss": 0.7076, "step": 16224 }, { "epoch": 3.015238803196432, "grad_norm": 0.8065438866615295, "learning_rate": 2.8569906510470757e-05, "loss": 0.7609, "step": 16225 }, { "epoch": 3.015424642259803, "grad_norm": 0.9661011099815369, "learning_rate": 2.8559683091014223e-05, "loss": 1.002, "step": 16226 }, { "epoch": 3.015610481323174, "grad_norm": 0.9474563598632812, "learning_rate": 2.8549461196328663e-05, "loss": 1.0073, "step": 16227 }, { "epoch": 3.015796320386545, "grad_norm": 0.6291988492012024, "learning_rate": 2.853924082663223e-05, "loss": 0.5213, "step": 16228 }, { "epoch": 3.0159821594499165, "grad_norm": 0.7146613001823425, "learning_rate": 2.8529021982143056e-05, "loss": 0.6329, "step": 16229 }, { "epoch": 3.0161679985132874, "grad_norm": 0.780358076095581, "learning_rate": 2.8518804663079278e-05, "loss": 0.7285, "step": 16230 }, { "epoch": 3.0163538375766583, "grad_norm": 0.9360339641571045, "learning_rate": 2.8508588869658946e-05, "loss": 0.9908, "step": 16231 }, { "epoch": 3.0165396766400296, "grad_norm": 0.808726966381073, "learning_rate": 2.849837460210011e-05, "loss": 0.8783, "step": 16232 }, { "epoch": 3.016725515703401, "grad_norm": 0.9102755784988403, "learning_rate": 2.848816186062073e-05, "loss": 0.852, "step": 16233 }, { "epoch": 3.016911354766772, "grad_norm": 0.725265383720398, "learning_rate": 2.847795064543881e-05, "loss": 0.9368, "step": 16234 }, { "epoch": 3.017097193830143, "grad_norm": 0.8580981492996216, "learning_rate": 2.846774095677234e-05, "loss": 0.974, "step": 16235 }, { "epoch": 3.0172830328935145, "grad_norm": 0.7755009531974792, "learning_rate": 2.845753279483918e-05, "loss": 0.9889, "step": 16236 }, { "epoch": 3.0174688719568854, "grad_norm": 1.553571343421936, "learning_rate": 2.844732615985718e-05, "loss": 1.4221, "step": 16237 }, { "epoch": 3.0176547110202563, "grad_norm": 0.7738388180732727, "learning_rate": 2.8437121052044235e-05, "loss": 0.8568, "step": 16238 }, { "epoch": 3.0178405500836276, "grad_norm": 0.7205919027328491, "learning_rate": 2.8426917471618144e-05, "loss": 0.5961, "step": 16239 }, { "epoch": 3.018026389146999, "grad_norm": 1.14812433719635, "learning_rate": 2.841671541879668e-05, "loss": 0.9657, "step": 16240 }, { "epoch": 3.0001858390633713, "grad_norm": 0.7224998474121094, "learning_rate": 2.8406514893797543e-05, "loss": 0.7203, "step": 16241 }, { "epoch": 3.000371678126742, "grad_norm": 0.7414650321006775, "learning_rate": 2.839631589683852e-05, "loss": 0.7727, "step": 16242 }, { "epoch": 3.0005575171901135, "grad_norm": 0.8323161602020264, "learning_rate": 2.8386118428137254e-05, "loss": 0.8729, "step": 16243 }, { "epoch": 3.0007433562534844, "grad_norm": 0.6795046925544739, "learning_rate": 2.83759224879114e-05, "loss": 0.6143, "step": 16244 }, { "epoch": 3.0009291953168558, "grad_norm": 0.9268404245376587, "learning_rate": 2.8365728076378508e-05, "loss": 0.6701, "step": 16245 }, { "epoch": 3.0011150343802266, "grad_norm": 0.7565287351608276, "learning_rate": 2.835553519375628e-05, "loss": 0.7988, "step": 16246 }, { "epoch": 3.001300873443598, "grad_norm": 0.8616061210632324, "learning_rate": 2.8345343840262205e-05, "loss": 1.0423, "step": 16247 }, { "epoch": 3.001486712506969, "grad_norm": 0.7660315632820129, "learning_rate": 2.8335154016113806e-05, "loss": 0.9305, "step": 16248 }, { "epoch": 3.00167255157034, "grad_norm": 0.8274088501930237, "learning_rate": 2.8324965721528528e-05, "loss": 0.8605, "step": 16249 }, { "epoch": 3.001858390633711, "grad_norm": 0.7002495527267456, "learning_rate": 2.83147789567239e-05, "loss": 0.7109, "step": 16250 }, { "epoch": 3.0020442296970824, "grad_norm": 0.7538357973098755, "learning_rate": 2.8304593721917285e-05, "loss": 0.6587, "step": 16251 }, { "epoch": 3.0022300687604533, "grad_norm": 0.7135251760482788, "learning_rate": 2.8294410017326088e-05, "loss": 0.6873, "step": 16252 }, { "epoch": 3.0024159078238246, "grad_norm": 1.008959412574768, "learning_rate": 2.828422784316762e-05, "loss": 1.0646, "step": 16253 }, { "epoch": 3.0026017468871955, "grad_norm": 0.7986328601837158, "learning_rate": 2.8274047199659282e-05, "loss": 0.8082, "step": 16254 }, { "epoch": 3.002787585950567, "grad_norm": 0.7848049998283386, "learning_rate": 2.8263868087018274e-05, "loss": 0.8706, "step": 16255 }, { "epoch": 3.0029734250139377, "grad_norm": 0.9080967307090759, "learning_rate": 2.8253690505461937e-05, "loss": 0.758, "step": 16256 }, { "epoch": 3.003159264077309, "grad_norm": 0.875514805316925, "learning_rate": 2.8243514455207432e-05, "loss": 0.8892, "step": 16257 }, { "epoch": 3.0033451031406804, "grad_norm": 0.7878082394599915, "learning_rate": 2.8233339936471993e-05, "loss": 0.7364, "step": 16258 }, { "epoch": 3.0035309422040513, "grad_norm": 0.8241668343544006, "learning_rate": 2.8223166949472758e-05, "loss": 0.7358, "step": 16259 }, { "epoch": 3.0037167812674226, "grad_norm": 0.8483393788337708, "learning_rate": 2.8212995494426842e-05, "loss": 0.9541, "step": 16260 }, { "epoch": 3.0039026203307935, "grad_norm": 0.8021813035011292, "learning_rate": 2.8202825571551326e-05, "loss": 0.7795, "step": 16261 }, { "epoch": 3.004088459394165, "grad_norm": 0.8833257555961609, "learning_rate": 2.819265718106332e-05, "loss": 0.9313, "step": 16262 }, { "epoch": 3.0042742984575357, "grad_norm": 0.8764292597770691, "learning_rate": 2.818249032317981e-05, "loss": 0.7534, "step": 16263 }, { "epoch": 3.004460137520907, "grad_norm": 0.9224509596824646, "learning_rate": 2.8172324998117815e-05, "loss": 0.7994, "step": 16264 }, { "epoch": 3.004645976584278, "grad_norm": 0.7286317944526672, "learning_rate": 2.816216120609424e-05, "loss": 0.9616, "step": 16265 }, { "epoch": 3.0048318156476492, "grad_norm": 0.9721893072128296, "learning_rate": 2.8151998947326065e-05, "loss": 0.7647, "step": 16266 }, { "epoch": 3.00501765471102, "grad_norm": 0.6974045038223267, "learning_rate": 2.814183822203019e-05, "loss": 0.5875, "step": 16267 }, { "epoch": 3.0052034937743914, "grad_norm": 0.7608116269111633, "learning_rate": 2.8131679030423485e-05, "loss": 0.8592, "step": 16268 }, { "epoch": 3.0053893328377623, "grad_norm": 0.5959421396255493, "learning_rate": 2.8121521372722716e-05, "loss": 0.438, "step": 16269 }, { "epoch": 3.0055751719011337, "grad_norm": 0.8465961813926697, "learning_rate": 2.811136524914476e-05, "loss": 0.5456, "step": 16270 }, { "epoch": 3.0057610109645045, "grad_norm": 0.8137955665588379, "learning_rate": 2.8101210659906342e-05, "loss": 0.9552, "step": 16271 }, { "epoch": 3.005946850027876, "grad_norm": 0.905461847782135, "learning_rate": 2.8091057605224203e-05, "loss": 0.9942, "step": 16272 }, { "epoch": 3.006132689091247, "grad_norm": 0.9759244322776794, "learning_rate": 2.808090608531502e-05, "loss": 1.0752, "step": 16273 }, { "epoch": 3.006318528154618, "grad_norm": 0.9051206707954407, "learning_rate": 2.807075610039549e-05, "loss": 0.821, "step": 16274 }, { "epoch": 3.0065043672179894, "grad_norm": 0.7651662230491638, "learning_rate": 2.8060607650682246e-05, "loss": 0.7506, "step": 16275 }, { "epoch": 3.0066902062813603, "grad_norm": 1.1936324834823608, "learning_rate": 2.805046073639185e-05, "loss": 1.0375, "step": 16276 }, { "epoch": 3.0068760453447316, "grad_norm": 0.7253216505050659, "learning_rate": 2.8040315357740887e-05, "loss": 0.5289, "step": 16277 }, { "epoch": 3.0070618844081025, "grad_norm": 0.9311563968658447, "learning_rate": 2.8030171514945947e-05, "loss": 0.9183, "step": 16278 }, { "epoch": 3.007247723471474, "grad_norm": 0.8920841813087463, "learning_rate": 2.8020029208223486e-05, "loss": 1.0476, "step": 16279 }, { "epoch": 3.0074335625348447, "grad_norm": 0.7824742197990417, "learning_rate": 2.8009888437789976e-05, "loss": 0.8087, "step": 16280 }, { "epoch": 3.007619401598216, "grad_norm": 0.5891439318656921, "learning_rate": 2.7999749203861836e-05, "loss": 0.5434, "step": 16281 }, { "epoch": 3.007805240661587, "grad_norm": 0.9570181965827942, "learning_rate": 2.7989611506655513e-05, "loss": 0.8865, "step": 16282 }, { "epoch": 3.0079910797249583, "grad_norm": 0.8050483465194702, "learning_rate": 2.797947534638736e-05, "loss": 0.4971, "step": 16283 }, { "epoch": 3.008176918788329, "grad_norm": 0.7769497036933899, "learning_rate": 2.796934072327372e-05, "loss": 0.9063, "step": 16284 }, { "epoch": 3.0083627578517005, "grad_norm": 1.0004961490631104, "learning_rate": 2.795920763753086e-05, "loss": 0.8958, "step": 16285 }, { "epoch": 3.0085485969150714, "grad_norm": 0.8578765392303467, "learning_rate": 2.7949076089375116e-05, "loss": 0.8738, "step": 16286 }, { "epoch": 3.0087344359784427, "grad_norm": 0.8630593419075012, "learning_rate": 2.7938946079022665e-05, "loss": 0.7894, "step": 16287 }, { "epoch": 3.0089202750418136, "grad_norm": 0.8347944617271423, "learning_rate": 2.7928817606689774e-05, "loss": 0.8217, "step": 16288 }, { "epoch": 3.009106114105185, "grad_norm": 0.8454095125198364, "learning_rate": 2.7918690672592573e-05, "loss": 0.7392, "step": 16289 }, { "epoch": 3.0092919531685562, "grad_norm": 0.8619915843009949, "learning_rate": 2.790856527694724e-05, "loss": 0.9189, "step": 16290 }, { "epoch": 3.009477792231927, "grad_norm": 0.8524312376976013, "learning_rate": 2.7898441419969878e-05, "loss": 0.6246, "step": 16291 }, { "epoch": 3.0096636312952985, "grad_norm": 0.717739999294281, "learning_rate": 2.7888319101876546e-05, "loss": 0.8099, "step": 16292 }, { "epoch": 3.0098494703586693, "grad_norm": 0.652247965335846, "learning_rate": 2.7878198322883276e-05, "loss": 0.5795, "step": 16293 }, { "epoch": 3.0100353094220407, "grad_norm": 0.9642404913902283, "learning_rate": 2.786807908320611e-05, "loss": 0.9324, "step": 16294 }, { "epoch": 3.0102211484854116, "grad_norm": 0.9150660037994385, "learning_rate": 2.7857961383061028e-05, "loss": 0.995, "step": 16295 }, { "epoch": 3.010406987548783, "grad_norm": 0.7613053917884827, "learning_rate": 2.784784522266395e-05, "loss": 1.017, "step": 16296 }, { "epoch": 3.0105928266121538, "grad_norm": 1.062303066253662, "learning_rate": 2.7837730602230737e-05, "loss": 0.8231, "step": 16297 }, { "epoch": 3.010778665675525, "grad_norm": 0.9443035125732422, "learning_rate": 2.78276175219774e-05, "loss": 0.8127, "step": 16298 }, { "epoch": 3.010964504738896, "grad_norm": 1.8857661485671997, "learning_rate": 2.7817505982119706e-05, "loss": 1.1149, "step": 16299 }, { "epoch": 3.0111503438022673, "grad_norm": 0.7440164685249329, "learning_rate": 2.7807395982873475e-05, "loss": 0.6358, "step": 16300 }, { "epoch": 3.011336182865638, "grad_norm": 0.9950499534606934, "learning_rate": 2.7797287524454463e-05, "loss": 0.7353, "step": 16301 }, { "epoch": 3.0115220219290095, "grad_norm": 0.8099482655525208, "learning_rate": 2.7787180607078477e-05, "loss": 0.7613, "step": 16302 }, { "epoch": 3.0117078609923804, "grad_norm": 0.7734261751174927, "learning_rate": 2.7777075230961192e-05, "loss": 0.8693, "step": 16303 }, { "epoch": 3.0118937000557517, "grad_norm": 0.7725483775138855, "learning_rate": 2.7766971396318297e-05, "loss": 0.8373, "step": 16304 }, { "epoch": 3.0120795391191226, "grad_norm": 0.8226912021636963, "learning_rate": 2.7756869103365413e-05, "loss": 0.8417, "step": 16305 }, { "epoch": 3.012265378182494, "grad_norm": 0.9145111441612244, "learning_rate": 2.774676835231822e-05, "loss": 0.9199, "step": 16306 }, { "epoch": 3.0124512172458653, "grad_norm": 0.8743301033973694, "learning_rate": 2.7736669143392256e-05, "loss": 0.8631, "step": 16307 }, { "epoch": 3.012637056309236, "grad_norm": 0.9417018294334412, "learning_rate": 2.7726571476803053e-05, "loss": 0.657, "step": 16308 }, { "epoch": 3.0128228953726075, "grad_norm": 0.8447418808937073, "learning_rate": 2.7716475352766146e-05, "loss": 0.7629, "step": 16309 }, { "epoch": 3.0130087344359784, "grad_norm": 0.8393754959106445, "learning_rate": 2.7706380771497075e-05, "loss": 0.7797, "step": 16310 }, { "epoch": 3.0131945734993497, "grad_norm": 0.794707715511322, "learning_rate": 2.7696287733211234e-05, "loss": 0.8391, "step": 16311 }, { "epoch": 3.0133804125627206, "grad_norm": 0.8632328510284424, "learning_rate": 2.7686196238124063e-05, "loss": 0.6809, "step": 16312 }, { "epoch": 3.013566251626092, "grad_norm": 0.7959421873092651, "learning_rate": 2.767610628645091e-05, "loss": 0.6658, "step": 16313 }, { "epoch": 3.013752090689463, "grad_norm": 1.0240352153778076, "learning_rate": 2.766601787840717e-05, "loss": 1.1223, "step": 16314 }, { "epoch": 3.013937929752834, "grad_norm": 0.8519721031188965, "learning_rate": 2.765593101420816e-05, "loss": 0.8447, "step": 16315 }, { "epoch": 3.014123768816205, "grad_norm": 0.7613992094993591, "learning_rate": 2.7645845694069116e-05, "loss": 0.793, "step": 16316 }, { "epoch": 3.0143096078795764, "grad_norm": 0.8992709517478943, "learning_rate": 2.7635761918205372e-05, "loss": 0.8327, "step": 16317 }, { "epoch": 3.0144954469429472, "grad_norm": 0.8548943400382996, "learning_rate": 2.7625679686832072e-05, "loss": 1.0561, "step": 16318 }, { "epoch": 3.0146812860063186, "grad_norm": 1.1899662017822266, "learning_rate": 2.761559900016447e-05, "loss": 0.8694, "step": 16319 }, { "epoch": 3.0148671250696895, "grad_norm": 0.8247866630554199, "learning_rate": 2.7605519858417694e-05, "loss": 0.7833, "step": 16320 }, { "epoch": 3.015052964133061, "grad_norm": 0.7799816727638245, "learning_rate": 2.7595442261806837e-05, "loss": 0.8294, "step": 16321 }, { "epoch": 3.0152388031964317, "grad_norm": 0.8495178818702698, "learning_rate": 2.7585366210547036e-05, "loss": 0.6405, "step": 16322 }, { "epoch": 3.015424642259803, "grad_norm": 0.871384859085083, "learning_rate": 2.7575291704853323e-05, "loss": 0.7246, "step": 16323 }, { "epoch": 3.0156104813231743, "grad_norm": 0.8070007562637329, "learning_rate": 2.7565218744940724e-05, "loss": 0.791, "step": 16324 }, { "epoch": 3.015796320386545, "grad_norm": 0.9237125515937805, "learning_rate": 2.75551473310242e-05, "loss": 0.7915, "step": 16325 }, { "epoch": 3.0159821594499165, "grad_norm": 0.8128219246864319, "learning_rate": 2.754507746331877e-05, "loss": 0.8471, "step": 16326 }, { "epoch": 3.0161679985132874, "grad_norm": 0.7762858271598816, "learning_rate": 2.7535009142039315e-05, "loss": 0.7577, "step": 16327 }, { "epoch": 3.0163538375766588, "grad_norm": 1.1461944580078125, "learning_rate": 2.7524942367400706e-05, "loss": 1.1128, "step": 16328 }, { "epoch": 3.0165396766400296, "grad_norm": 1.7739145755767822, "learning_rate": 2.7514877139617833e-05, "loss": 1.3723, "step": 16329 }, { "epoch": 3.016725515703401, "grad_norm": 0.894992470741272, "learning_rate": 2.7504813458905554e-05, "loss": 0.8777, "step": 16330 }, { "epoch": 3.016911354766772, "grad_norm": 0.8429700136184692, "learning_rate": 2.749475132547862e-05, "loss": 0.9114, "step": 16331 }, { "epoch": 3.017097193830143, "grad_norm": 0.9092171788215637, "learning_rate": 2.748469073955179e-05, "loss": 0.9582, "step": 16332 }, { "epoch": 3.017283032893514, "grad_norm": 0.9921269416809082, "learning_rate": 2.747463170133977e-05, "loss": 0.8154, "step": 16333 }, { "epoch": 3.0174688719568854, "grad_norm": 0.9600369930267334, "learning_rate": 2.7464574211057304e-05, "loss": 0.6221, "step": 16334 }, { "epoch": 3.0176547110202563, "grad_norm": 0.8476983904838562, "learning_rate": 2.7454518268919027e-05, "loss": 0.8533, "step": 16335 }, { "epoch": 3.0178405500836276, "grad_norm": 0.706290602684021, "learning_rate": 2.7444463875139537e-05, "loss": 0.6707, "step": 16336 }, { "epoch": 3.0180263891469985, "grad_norm": 0.85426926612854, "learning_rate": 2.7434411029933483e-05, "loss": 0.7433, "step": 16337 }, { "epoch": 3.01821222821037, "grad_norm": 0.9000372290611267, "learning_rate": 2.7424359733515403e-05, "loss": 0.9114, "step": 16338 }, { "epoch": 3.018398067273741, "grad_norm": 0.8626327514648438, "learning_rate": 2.7414309986099783e-05, "loss": 1.0012, "step": 16339 }, { "epoch": 3.018583906337112, "grad_norm": 0.7985569834709167, "learning_rate": 2.7404261787901185e-05, "loss": 0.8213, "step": 16340 }, { "epoch": 3.0187697454004834, "grad_norm": 0.808411717414856, "learning_rate": 2.7394215139134015e-05, "loss": 0.7976, "step": 16341 }, { "epoch": 3.0189555844638543, "grad_norm": 0.8957644701004028, "learning_rate": 2.7384170040012758e-05, "loss": 0.8949, "step": 16342 }, { "epoch": 3.0191414235272256, "grad_norm": 0.8540877103805542, "learning_rate": 2.737412649075177e-05, "loss": 0.8557, "step": 16343 }, { "epoch": 3.0193272625905965, "grad_norm": 0.7954753637313843, "learning_rate": 2.7364084491565424e-05, "loss": 0.8965, "step": 16344 }, { "epoch": 3.019513101653968, "grad_norm": 0.7853621244430542, "learning_rate": 2.7354044042668014e-05, "loss": 0.7109, "step": 16345 }, { "epoch": 3.0196989407173387, "grad_norm": 0.7370253205299377, "learning_rate": 2.73440051442739e-05, "loss": 0.7888, "step": 16346 }, { "epoch": 3.01988477978071, "grad_norm": 0.8112165927886963, "learning_rate": 2.7333967796597315e-05, "loss": 0.9457, "step": 16347 }, { "epoch": 3.020070618844081, "grad_norm": 1.0122783184051514, "learning_rate": 2.732393199985246e-05, "loss": 0.5817, "step": 16348 }, { "epoch": 3.0202564579074522, "grad_norm": 0.8012350797653198, "learning_rate": 2.731389775425358e-05, "loss": 0.8897, "step": 16349 }, { "epoch": 3.020442296970823, "grad_norm": 0.803153932094574, "learning_rate": 2.73038650600148e-05, "loss": 0.6872, "step": 16350 }, { "epoch": 3.0206281360341944, "grad_norm": 1.1143475770950317, "learning_rate": 2.72938339173503e-05, "loss": 0.7349, "step": 16351 }, { "epoch": 3.0208139750975653, "grad_norm": 0.8050698637962341, "learning_rate": 2.7283804326474137e-05, "loss": 0.697, "step": 16352 }, { "epoch": 3.0209998141609367, "grad_norm": 0.8835048675537109, "learning_rate": 2.727377628760035e-05, "loss": 0.8201, "step": 16353 }, { "epoch": 3.0211856532243075, "grad_norm": 0.786395788192749, "learning_rate": 2.726374980094305e-05, "loss": 0.7965, "step": 16354 }, { "epoch": 3.021371492287679, "grad_norm": 0.8563681840896606, "learning_rate": 2.725372486671618e-05, "loss": 0.839, "step": 16355 }, { "epoch": 3.02155733135105, "grad_norm": 0.9203509092330933, "learning_rate": 2.724370148513369e-05, "loss": 0.603, "step": 16356 }, { "epoch": 3.021743170414421, "grad_norm": 0.8235127925872803, "learning_rate": 2.723367965640956e-05, "loss": 0.9054, "step": 16357 }, { "epoch": 3.0219290094777924, "grad_norm": 0.8137462735176086, "learning_rate": 2.722365938075767e-05, "loss": 0.9626, "step": 16358 }, { "epoch": 3.0221148485411633, "grad_norm": 0.9724891185760498, "learning_rate": 2.7213640658391882e-05, "loss": 0.9267, "step": 16359 }, { "epoch": 3.0223006876045346, "grad_norm": 0.9871930480003357, "learning_rate": 2.7203623489525996e-05, "loss": 0.8444, "step": 16360 }, { "epoch": 3.0224865266679055, "grad_norm": 0.7596043348312378, "learning_rate": 2.719360787437384e-05, "loss": 0.6035, "step": 16361 }, { "epoch": 3.022672365731277, "grad_norm": 0.8044273257255554, "learning_rate": 2.7183593813149223e-05, "loss": 0.9266, "step": 16362 }, { "epoch": 3.0228582047946477, "grad_norm": 0.8382756114006042, "learning_rate": 2.7173581306065833e-05, "loss": 1.0828, "step": 16363 }, { "epoch": 3.023044043858019, "grad_norm": 0.9325973987579346, "learning_rate": 2.7163570353337375e-05, "loss": 0.6733, "step": 16364 }, { "epoch": 3.02322988292139, "grad_norm": 0.8788731098175049, "learning_rate": 2.7153560955177483e-05, "loss": 0.9024, "step": 16365 }, { "epoch": 3.0234157219847613, "grad_norm": 0.9045056104660034, "learning_rate": 2.7143553111799845e-05, "loss": 1.0876, "step": 16366 }, { "epoch": 3.023601561048132, "grad_norm": 0.8674336075782776, "learning_rate": 2.7133546823418054e-05, "loss": 0.827, "step": 16367 }, { "epoch": 3.0237874001115035, "grad_norm": 0.7579058408737183, "learning_rate": 2.7123542090245625e-05, "loss": 0.7541, "step": 16368 }, { "epoch": 3.0239732391748744, "grad_norm": 0.7754246592521667, "learning_rate": 2.711353891249616e-05, "loss": 0.7431, "step": 16369 }, { "epoch": 3.0241590782382457, "grad_norm": 0.9136900305747986, "learning_rate": 2.710353729038313e-05, "loss": 0.9576, "step": 16370 }, { "epoch": 3.0243449173016166, "grad_norm": 0.8725305199623108, "learning_rate": 2.709353722411997e-05, "loss": 0.8939, "step": 16371 }, { "epoch": 3.024530756364988, "grad_norm": 0.9331547617912292, "learning_rate": 2.7083538713920174e-05, "loss": 0.6316, "step": 16372 }, { "epoch": 3.0247165954283592, "grad_norm": 0.8981873989105225, "learning_rate": 2.707354175999709e-05, "loss": 0.71, "step": 16373 }, { "epoch": 3.02490243449173, "grad_norm": 0.7866777181625366, "learning_rate": 2.706354636256414e-05, "loss": 0.7442, "step": 16374 }, { "epoch": 3.0250882735551015, "grad_norm": 0.9603359699249268, "learning_rate": 2.7053552521834624e-05, "loss": 0.9581, "step": 16375 }, { "epoch": 3.0252741126184723, "grad_norm": 1.1127327680587769, "learning_rate": 2.7043560238021825e-05, "loss": 0.7281, "step": 16376 }, { "epoch": 3.0254599516818437, "grad_norm": 0.8161758184432983, "learning_rate": 2.7033569511339062e-05, "loss": 0.8828, "step": 16377 }, { "epoch": 3.0256457907452146, "grad_norm": 0.9463825821876526, "learning_rate": 2.7023580341999532e-05, "loss": 1.1826, "step": 16378 }, { "epoch": 3.025831629808586, "grad_norm": 0.8008928298950195, "learning_rate": 2.7013592730216465e-05, "loss": 0.9116, "step": 16379 }, { "epoch": 3.0260174688719568, "grad_norm": 0.7768778204917908, "learning_rate": 2.7003606676202963e-05, "loss": 0.7778, "step": 16380 }, { "epoch": 3.026203307935328, "grad_norm": 0.8135813474655151, "learning_rate": 2.6993622180172218e-05, "loss": 0.6944, "step": 16381 }, { "epoch": 3.026389146998699, "grad_norm": 1.1667437553405762, "learning_rate": 2.6983639242337356e-05, "loss": 1.0562, "step": 16382 }, { "epoch": 3.0265749860620703, "grad_norm": 0.889934778213501, "learning_rate": 2.6973657862911418e-05, "loss": 0.8491, "step": 16383 }, { "epoch": 3.026760825125441, "grad_norm": 0.8528688549995422, "learning_rate": 2.696367804210742e-05, "loss": 0.7671, "step": 16384 }, { "epoch": 3.0269466641888125, "grad_norm": 0.9560061097145081, "learning_rate": 2.6953699780138364e-05, "loss": 0.8825, "step": 16385 }, { "epoch": 3.0271325032521834, "grad_norm": 0.8095105886459351, "learning_rate": 2.6943723077217252e-05, "loss": 0.7381, "step": 16386 }, { "epoch": 3.0273183423155547, "grad_norm": 0.9033812880516052, "learning_rate": 2.6933747933557008e-05, "loss": 0.866, "step": 16387 }, { "epoch": 3.0275041813789256, "grad_norm": 0.8631482720375061, "learning_rate": 2.692377434937049e-05, "loss": 0.8141, "step": 16388 }, { "epoch": 3.027690020442297, "grad_norm": 0.9113109111785889, "learning_rate": 2.691380232487064e-05, "loss": 0.9179, "step": 16389 }, { "epoch": 3.0278758595056683, "grad_norm": 0.7113003134727478, "learning_rate": 2.6903831860270256e-05, "loss": 0.6443, "step": 16390 }, { "epoch": 3.028061698569039, "grad_norm": 0.8057465553283691, "learning_rate": 2.6893862955782155e-05, "loss": 0.9655, "step": 16391 }, { "epoch": 3.0282475376324105, "grad_norm": 0.738502562046051, "learning_rate": 2.6883895611619047e-05, "loss": 0.5979, "step": 16392 }, { "epoch": 3.0284333766957814, "grad_norm": 0.9927711486816406, "learning_rate": 2.6873929827993727e-05, "loss": 0.8447, "step": 16393 }, { "epoch": 3.0286192157591527, "grad_norm": 0.7252799868583679, "learning_rate": 2.686396560511891e-05, "loss": 0.9221, "step": 16394 }, { "epoch": 3.0288050548225236, "grad_norm": 0.8324769139289856, "learning_rate": 2.6854002943207246e-05, "loss": 0.7464, "step": 16395 }, { "epoch": 3.028990893885895, "grad_norm": 0.8967441916465759, "learning_rate": 2.6844041842471324e-05, "loss": 0.9719, "step": 16396 }, { "epoch": 3.029176732949266, "grad_norm": 0.7824270725250244, "learning_rate": 2.683408230312383e-05, "loss": 0.7379, "step": 16397 }, { "epoch": 3.029362572012637, "grad_norm": 0.9299219846725464, "learning_rate": 2.6824124325377287e-05, "loss": 0.9068, "step": 16398 }, { "epoch": 3.029548411076008, "grad_norm": 0.9649413228034973, "learning_rate": 2.6814167909444233e-05, "loss": 0.7533, "step": 16399 }, { "epoch": 3.0297342501393794, "grad_norm": 1.058040738105774, "learning_rate": 2.680421305553714e-05, "loss": 0.8024, "step": 16400 }, { "epoch": 3.0299200892027502, "grad_norm": 0.8325514793395996, "learning_rate": 2.6794259763868555e-05, "loss": 0.9875, "step": 16401 }, { "epoch": 3.0301059282661216, "grad_norm": 0.8647835850715637, "learning_rate": 2.678430803465083e-05, "loss": 0.8065, "step": 16402 }, { "epoch": 3.0302917673294925, "grad_norm": 0.9864528775215149, "learning_rate": 2.6774357868096432e-05, "loss": 1.0541, "step": 16403 }, { "epoch": 3.030477606392864, "grad_norm": 0.8255147933959961, "learning_rate": 2.676440926441771e-05, "loss": 0.8324, "step": 16404 }, { "epoch": 3.030663445456235, "grad_norm": 0.9190768599510193, "learning_rate": 2.675446222382697e-05, "loss": 0.726, "step": 16405 }, { "epoch": 3.030849284519606, "grad_norm": 0.9106069207191467, "learning_rate": 2.6744516746536562e-05, "loss": 0.6332, "step": 16406 }, { "epoch": 3.0310351235829773, "grad_norm": 0.7660915851593018, "learning_rate": 2.673457283275873e-05, "loss": 0.7403, "step": 16407 }, { "epoch": 3.031220962646348, "grad_norm": 1.1061785221099854, "learning_rate": 2.6724630482705693e-05, "loss": 0.7549, "step": 16408 }, { "epoch": 3.0314068017097195, "grad_norm": 0.9986096620559692, "learning_rate": 2.671468969658969e-05, "loss": 0.9835, "step": 16409 }, { "epoch": 3.0315926407730904, "grad_norm": 0.8700722455978394, "learning_rate": 2.6704750474622886e-05, "loss": 0.7516, "step": 16410 }, { "epoch": 3.0317784798364618, "grad_norm": 0.7872580289840698, "learning_rate": 2.669481281701739e-05, "loss": 0.7264, "step": 16411 }, { "epoch": 3.0319643188998326, "grad_norm": 0.8272488117218018, "learning_rate": 2.6684876723985298e-05, "loss": 0.7823, "step": 16412 }, { "epoch": 3.032150157963204, "grad_norm": 0.7371950745582581, "learning_rate": 2.6674942195738695e-05, "loss": 0.7922, "step": 16413 }, { "epoch": 3.032335997026575, "grad_norm": 0.7064793705940247, "learning_rate": 2.6665009232489657e-05, "loss": 0.7347, "step": 16414 }, { "epoch": 3.032521836089946, "grad_norm": 0.8907574415206909, "learning_rate": 2.6655077834450136e-05, "loss": 0.9183, "step": 16415 }, { "epoch": 3.032707675153317, "grad_norm": 0.9381104111671448, "learning_rate": 2.6645148001832086e-05, "loss": 0.8648, "step": 16416 }, { "epoch": 3.0328935142166884, "grad_norm": 1.1486037969589233, "learning_rate": 2.6635219734847507e-05, "loss": 0.7935, "step": 16417 }, { "epoch": 3.0330793532800593, "grad_norm": 0.8872647285461426, "learning_rate": 2.6625293033708264e-05, "loss": 0.9706, "step": 16418 }, { "epoch": 3.0332651923434306, "grad_norm": 0.8415544629096985, "learning_rate": 2.661536789862622e-05, "loss": 0.8858, "step": 16419 }, { "epoch": 3.0334510314068015, "grad_norm": 0.8398575782775879, "learning_rate": 2.660544432981319e-05, "loss": 0.8452, "step": 16420 }, { "epoch": 3.033636870470173, "grad_norm": 0.8813350796699524, "learning_rate": 2.6595522327481027e-05, "loss": 0.717, "step": 16421 }, { "epoch": 3.033822709533544, "grad_norm": 0.9681750535964966, "learning_rate": 2.6585601891841472e-05, "loss": 1.1187, "step": 16422 }, { "epoch": 3.034008548596915, "grad_norm": 0.9663285613059998, "learning_rate": 2.6575683023106236e-05, "loss": 0.8364, "step": 16423 }, { "epoch": 3.0341943876602864, "grad_norm": 0.892166018486023, "learning_rate": 2.656576572148707e-05, "loss": 0.8615, "step": 16424 }, { "epoch": 3.0343802267236573, "grad_norm": 0.7774261832237244, "learning_rate": 2.655584998719558e-05, "loss": 0.9411, "step": 16425 }, { "epoch": 3.0345660657870286, "grad_norm": 0.9727977514266968, "learning_rate": 2.654593582044348e-05, "loss": 0.7237, "step": 16426 }, { "epoch": 3.0347519048503995, "grad_norm": 0.7590813636779785, "learning_rate": 2.6536023221442318e-05, "loss": 0.6202, "step": 16427 }, { "epoch": 3.034937743913771, "grad_norm": 1.093888282775879, "learning_rate": 2.652611219040365e-05, "loss": 0.712, "step": 16428 }, { "epoch": 3.0351235829771417, "grad_norm": 0.8715057969093323, "learning_rate": 2.6516202727539052e-05, "loss": 0.8436, "step": 16429 }, { "epoch": 3.035309422040513, "grad_norm": 0.8790238499641418, "learning_rate": 2.6506294833060008e-05, "loss": 0.7807, "step": 16430 }, { "epoch": 3.035495261103884, "grad_norm": 0.9124879837036133, "learning_rate": 2.6496388507177982e-05, "loss": 0.9362, "step": 16431 }, { "epoch": 3.0356811001672552, "grad_norm": 0.9696473479270935, "learning_rate": 2.6486483750104373e-05, "loss": 0.7244, "step": 16432 }, { "epoch": 3.035866939230626, "grad_norm": 0.836959958076477, "learning_rate": 2.6476580562050645e-05, "loss": 0.775, "step": 16433 }, { "epoch": 3.0360527782939974, "grad_norm": 0.9190247058868408, "learning_rate": 2.6466678943228106e-05, "loss": 0.7233, "step": 16434 }, { "epoch": 3.0362386173573683, "grad_norm": 1.5903725624084473, "learning_rate": 2.6456778893848144e-05, "loss": 1.0188, "step": 16435 }, { "epoch": 3.0364244564207397, "grad_norm": 0.8425945043563843, "learning_rate": 2.6446880414122e-05, "loss": 0.7854, "step": 16436 }, { "epoch": 3.0366102954841105, "grad_norm": 0.8605955243110657, "learning_rate": 2.6436983504261005e-05, "loss": 0.7152, "step": 16437 }, { "epoch": 3.036796134547482, "grad_norm": 1.015659213066101, "learning_rate": 2.6427088164476353e-05, "loss": 0.8773, "step": 16438 }, { "epoch": 3.036981973610853, "grad_norm": 0.7520237565040588, "learning_rate": 2.641719439497925e-05, "loss": 0.7434, "step": 16439 }, { "epoch": 3.037167812674224, "grad_norm": 0.9844590425491333, "learning_rate": 2.640730219598083e-05, "loss": 0.9365, "step": 16440 }, { "epoch": 3.0373536517375954, "grad_norm": 1.0298523902893066, "learning_rate": 2.639741156769229e-05, "loss": 0.8222, "step": 16441 }, { "epoch": 3.0375394908009663, "grad_norm": 0.746608555316925, "learning_rate": 2.638752251032468e-05, "loss": 0.8316, "step": 16442 }, { "epoch": 3.0377253298643376, "grad_norm": 0.9144389033317566, "learning_rate": 2.6377635024089087e-05, "loss": 0.5609, "step": 16443 }, { "epoch": 3.0379111689277085, "grad_norm": 1.2635074853897095, "learning_rate": 2.6367749109196505e-05, "loss": 0.6612, "step": 16444 }, { "epoch": 3.03809700799108, "grad_norm": 0.828485906124115, "learning_rate": 2.6357864765857953e-05, "loss": 0.5799, "step": 16445 }, { "epoch": 3.0382828470544507, "grad_norm": 1.4046440124511719, "learning_rate": 2.6347981994284442e-05, "loss": 0.9053, "step": 16446 }, { "epoch": 3.038468686117822, "grad_norm": 0.800578773021698, "learning_rate": 2.633810079468686e-05, "loss": 0.8299, "step": 16447 }, { "epoch": 3.038654525181193, "grad_norm": 0.8983428478240967, "learning_rate": 2.632822116727608e-05, "loss": 0.7084, "step": 16448 }, { "epoch": 3.0388403642445643, "grad_norm": 0.91361403465271, "learning_rate": 2.6318343112263012e-05, "loss": 0.6849, "step": 16449 }, { "epoch": 3.039026203307935, "grad_norm": 1.0825188159942627, "learning_rate": 2.6308466629858474e-05, "loss": 0.9871, "step": 16450 }, { "epoch": 3.0392120423713065, "grad_norm": 1.1262755393981934, "learning_rate": 2.629859172027326e-05, "loss": 0.733, "step": 16451 }, { "epoch": 3.0393978814346774, "grad_norm": 0.7709237933158875, "learning_rate": 2.6288718383718092e-05, "loss": 0.6211, "step": 16452 }, { "epoch": 3.0395837204980487, "grad_norm": 0.9766125082969666, "learning_rate": 2.6278846620403776e-05, "loss": 0.4818, "step": 16453 }, { "epoch": 3.03976955956142, "grad_norm": 0.9492712020874023, "learning_rate": 2.6268976430540958e-05, "loss": 0.858, "step": 16454 }, { "epoch": 3.039955398624791, "grad_norm": 0.9925112128257751, "learning_rate": 2.6259107814340287e-05, "loss": 0.6898, "step": 16455 }, { "epoch": 3.0401412376881622, "grad_norm": 0.9052692651748657, "learning_rate": 2.6249240772012417e-05, "loss": 0.7026, "step": 16456 }, { "epoch": 3.040327076751533, "grad_norm": 0.8264253735542297, "learning_rate": 2.6239375303767966e-05, "loss": 0.737, "step": 16457 }, { "epoch": 3.0405129158149045, "grad_norm": 0.9099843502044678, "learning_rate": 2.6229511409817474e-05, "loss": 0.7609, "step": 16458 }, { "epoch": 3.0406987548782753, "grad_norm": 0.8074800968170166, "learning_rate": 2.6219649090371466e-05, "loss": 0.7593, "step": 16459 }, { "epoch": 3.0408845939416467, "grad_norm": 1.121508240699768, "learning_rate": 2.620978834564041e-05, "loss": 0.9156, "step": 16460 }, { "epoch": 3.0410704330050176, "grad_norm": 0.8222777843475342, "learning_rate": 2.6199929175834813e-05, "loss": 0.8353, "step": 16461 }, { "epoch": 3.041256272068389, "grad_norm": 1.0044645071029663, "learning_rate": 2.6190071581165086e-05, "loss": 0.732, "step": 16462 }, { "epoch": 3.0414421111317598, "grad_norm": 1.001706600189209, "learning_rate": 2.6180215561841614e-05, "loss": 0.9493, "step": 16463 }, { "epoch": 3.041627950195131, "grad_norm": 0.9047880172729492, "learning_rate": 2.617036111807474e-05, "loss": 0.8127, "step": 16464 }, { "epoch": 3.041813789258502, "grad_norm": 0.8721327185630798, "learning_rate": 2.61605082500748e-05, "loss": 0.8758, "step": 16465 }, { "epoch": 3.0419996283218733, "grad_norm": 1.8152014017105103, "learning_rate": 2.6150656958052132e-05, "loss": 0.9119, "step": 16466 }, { "epoch": 3.042185467385244, "grad_norm": 0.8496637940406799, "learning_rate": 2.614080724221697e-05, "loss": 0.6909, "step": 16467 }, { "epoch": 3.0423713064486155, "grad_norm": 0.9403654932975769, "learning_rate": 2.6130959102779495e-05, "loss": 0.891, "step": 16468 }, { "epoch": 3.0425571455119864, "grad_norm": 0.9045391082763672, "learning_rate": 2.6121112539949954e-05, "loss": 0.7195, "step": 16469 }, { "epoch": 3.0427429845753577, "grad_norm": 0.8190896511077881, "learning_rate": 2.6111267553938502e-05, "loss": 0.777, "step": 16470 }, { "epoch": 3.042928823638729, "grad_norm": 0.892221212387085, "learning_rate": 2.6101424144955235e-05, "loss": 0.8231, "step": 16471 }, { "epoch": 3.0431146627021, "grad_norm": 0.8593822717666626, "learning_rate": 2.6091582313210216e-05, "loss": 0.8822, "step": 16472 }, { "epoch": 3.0433005017654713, "grad_norm": 0.7897011637687683, "learning_rate": 2.6081742058913584e-05, "loss": 0.6229, "step": 16473 }, { "epoch": 3.043486340828842, "grad_norm": 0.9321253299713135, "learning_rate": 2.607190338227532e-05, "loss": 0.8131, "step": 16474 }, { "epoch": 3.0436721798922135, "grad_norm": 0.8023005723953247, "learning_rate": 2.6062066283505403e-05, "loss": 0.6842, "step": 16475 }, { "epoch": 3.0438580189555844, "grad_norm": 1.1247652769088745, "learning_rate": 2.6052230762813745e-05, "loss": 0.8034, "step": 16476 }, { "epoch": 3.0440438580189557, "grad_norm": 0.7327141165733337, "learning_rate": 2.6042396820410375e-05, "loss": 0.5958, "step": 16477 }, { "epoch": 3.0442296970823266, "grad_norm": 1.011448621749878, "learning_rate": 2.6032564456505138e-05, "loss": 0.9633, "step": 16478 }, { "epoch": 3.044415536145698, "grad_norm": 0.7816374897956848, "learning_rate": 2.6022733671307876e-05, "loss": 0.691, "step": 16479 }, { "epoch": 3.044601375209069, "grad_norm": 0.9759527444839478, "learning_rate": 2.6012904465028376e-05, "loss": 0.8437, "step": 16480 }, { "epoch": 3.04478721427244, "grad_norm": 0.7570684552192688, "learning_rate": 2.6003076837876507e-05, "loss": 0.7463, "step": 16481 }, { "epoch": 3.044973053335811, "grad_norm": 0.9799579977989197, "learning_rate": 2.5993250790061973e-05, "loss": 0.8084, "step": 16482 }, { "epoch": 3.0451588923991824, "grad_norm": 1.0092289447784424, "learning_rate": 2.5983426321794502e-05, "loss": 0.9419, "step": 16483 }, { "epoch": 3.0453447314625532, "grad_norm": 0.918048620223999, "learning_rate": 2.5973603433283746e-05, "loss": 1.0072, "step": 16484 }, { "epoch": 3.0455305705259246, "grad_norm": 1.0498931407928467, "learning_rate": 2.596378212473941e-05, "loss": 0.9556, "step": 16485 }, { "epoch": 3.0457164095892955, "grad_norm": 0.6764087080955505, "learning_rate": 2.595396239637108e-05, "loss": 0.6806, "step": 16486 }, { "epoch": 3.045902248652667, "grad_norm": 0.8425265550613403, "learning_rate": 2.594414424838838e-05, "loss": 0.8192, "step": 16487 }, { "epoch": 3.046088087716038, "grad_norm": 0.8224844932556152, "learning_rate": 2.5934327681000802e-05, "loss": 0.8188, "step": 16488 }, { "epoch": 3.046273926779409, "grad_norm": 0.7876110076904297, "learning_rate": 2.5924512694417924e-05, "loss": 0.7812, "step": 16489 }, { "epoch": 3.0464597658427803, "grad_norm": 0.8870657086372375, "learning_rate": 2.591469928884922e-05, "loss": 0.6744, "step": 16490 }, { "epoch": 3.046645604906151, "grad_norm": 0.7624846696853638, "learning_rate": 2.5904887464504114e-05, "loss": 0.711, "step": 16491 }, { "epoch": 3.0468314439695225, "grad_norm": 1.009589672088623, "learning_rate": 2.5895077221592e-05, "loss": 1.0146, "step": 16492 }, { "epoch": 3.0470172830328934, "grad_norm": 0.8577511310577393, "learning_rate": 2.588526856032233e-05, "loss": 0.8643, "step": 16493 }, { "epoch": 3.0472031220962648, "grad_norm": 0.9337430596351624, "learning_rate": 2.5875461480904406e-05, "loss": 0.8704, "step": 16494 }, { "epoch": 3.0473889611596356, "grad_norm": 0.801527738571167, "learning_rate": 2.5865655983547566e-05, "loss": 0.7768, "step": 16495 }, { "epoch": 3.047574800223007, "grad_norm": 0.9945794343948364, "learning_rate": 2.585585206846104e-05, "loss": 0.8735, "step": 16496 }, { "epoch": 3.047760639286378, "grad_norm": 0.8158237338066101, "learning_rate": 2.5846049735854117e-05, "loss": 0.6968, "step": 16497 }, { "epoch": 3.047946478349749, "grad_norm": 0.779066264629364, "learning_rate": 2.583624898593605e-05, "loss": 0.7956, "step": 16498 }, { "epoch": 3.04813231741312, "grad_norm": 0.9122100472450256, "learning_rate": 2.5826449818915964e-05, "loss": 0.7876, "step": 16499 }, { "epoch": 3.0483181564764914, "grad_norm": 1.0245736837387085, "learning_rate": 2.5816652235003e-05, "loss": 0.7925, "step": 16500 }, { "epoch": 3.0485039955398623, "grad_norm": 0.6653180122375488, "learning_rate": 2.5806856234406317e-05, "loss": 0.457, "step": 16501 }, { "epoch": 3.0486898346032336, "grad_norm": 0.9029264450073242, "learning_rate": 2.5797061817334967e-05, "loss": 0.8034, "step": 16502 }, { "epoch": 3.0488756736666045, "grad_norm": 0.8435755372047424, "learning_rate": 2.578726898399799e-05, "loss": 0.8431, "step": 16503 }, { "epoch": 3.049061512729976, "grad_norm": 0.9914752244949341, "learning_rate": 2.5777477734604383e-05, "loss": 0.9909, "step": 16504 }, { "epoch": 3.049247351793347, "grad_norm": 0.776179313659668, "learning_rate": 2.576768806936316e-05, "loss": 0.7043, "step": 16505 }, { "epoch": 3.049433190856718, "grad_norm": 1.0241563320159912, "learning_rate": 2.575789998848326e-05, "loss": 0.7818, "step": 16506 }, { "epoch": 3.0496190299200894, "grad_norm": 0.928352952003479, "learning_rate": 2.5748113492173543e-05, "loss": 0.9997, "step": 16507 }, { "epoch": 3.0498048689834603, "grad_norm": 0.8682227730751038, "learning_rate": 2.5738328580642922e-05, "loss": 0.9907, "step": 16508 }, { "epoch": 3.0499907080468316, "grad_norm": 1.1242249011993408, "learning_rate": 2.5728545254100268e-05, "loss": 0.728, "step": 16509 }, { "epoch": 3.0501765471102025, "grad_norm": 0.9435965418815613, "learning_rate": 2.5718763512754363e-05, "loss": 0.8136, "step": 16510 }, { "epoch": 3.050362386173574, "grad_norm": 0.8476418852806091, "learning_rate": 2.5708983356813975e-05, "loss": 0.6715, "step": 16511 }, { "epoch": 3.0505482252369447, "grad_norm": 0.9711516499519348, "learning_rate": 2.569920478648783e-05, "loss": 0.7907, "step": 16512 }, { "epoch": 3.050734064300316, "grad_norm": 0.8876985907554626, "learning_rate": 2.5689427801984678e-05, "loss": 0.9151, "step": 16513 }, { "epoch": 3.050919903363687, "grad_norm": 0.8093453049659729, "learning_rate": 2.5679652403513167e-05, "loss": 0.6496, "step": 16514 }, { "epoch": 3.0511057424270582, "grad_norm": 1.0686590671539307, "learning_rate": 2.5669878591281926e-05, "loss": 0.775, "step": 16515 }, { "epoch": 3.051291581490429, "grad_norm": 0.7788293361663818, "learning_rate": 2.566010636549956e-05, "loss": 0.8973, "step": 16516 }, { "epoch": 3.0514774205538004, "grad_norm": 0.8514882922172546, "learning_rate": 2.5650335726374662e-05, "loss": 0.8554, "step": 16517 }, { "epoch": 3.0516632596171713, "grad_norm": 0.8660567998886108, "learning_rate": 2.5640566674115742e-05, "loss": 0.9811, "step": 16518 }, { "epoch": 3.0518490986805427, "grad_norm": 1.5931254625320435, "learning_rate": 2.5630799208931345e-05, "loss": 1.1427, "step": 16519 }, { "epoch": 3.0520349377439135, "grad_norm": 1.0193164348602295, "learning_rate": 2.56210333310299e-05, "loss": 0.9796, "step": 16520 }, { "epoch": 3.052220776807285, "grad_norm": 0.851655125617981, "learning_rate": 2.5611269040619867e-05, "loss": 0.6453, "step": 16521 }, { "epoch": 3.052406615870656, "grad_norm": 0.8646299242973328, "learning_rate": 2.5601506337909654e-05, "loss": 0.8345, "step": 16522 }, { "epoch": 3.052592454934027, "grad_norm": 0.863435685634613, "learning_rate": 2.559174522310761e-05, "loss": 0.8992, "step": 16523 }, { "epoch": 3.0527782939973984, "grad_norm": 0.7195289134979248, "learning_rate": 2.5581985696422052e-05, "loss": 0.545, "step": 16524 }, { "epoch": 3.0529641330607693, "grad_norm": 0.9412771463394165, "learning_rate": 2.5572227758061328e-05, "loss": 0.7509, "step": 16525 }, { "epoch": 3.0531499721241406, "grad_norm": 0.9601781964302063, "learning_rate": 2.556247140823368e-05, "loss": 0.8078, "step": 16526 }, { "epoch": 3.0533358111875115, "grad_norm": 0.9324098229408264, "learning_rate": 2.55527166471473e-05, "loss": 0.7121, "step": 16527 }, { "epoch": 3.053521650250883, "grad_norm": 0.7718344926834106, "learning_rate": 2.554296347501044e-05, "loss": 0.8115, "step": 16528 }, { "epoch": 3.0537074893142537, "grad_norm": 0.7441035509109497, "learning_rate": 2.5533211892031273e-05, "loss": 0.931, "step": 16529 }, { "epoch": 3.053893328377625, "grad_norm": 0.9605115056037903, "learning_rate": 2.552346189841791e-05, "loss": 0.8943, "step": 16530 }, { "epoch": 3.054079167440996, "grad_norm": 0.7816125154495239, "learning_rate": 2.5513713494378455e-05, "loss": 0.6336, "step": 16531 }, { "epoch": 3.0542650065043673, "grad_norm": 0.792016863822937, "learning_rate": 2.5503966680120916e-05, "loss": 0.7136, "step": 16532 }, { "epoch": 3.054450845567738, "grad_norm": 0.877753496170044, "learning_rate": 2.5494221455853408e-05, "loss": 0.596, "step": 16533 }, { "epoch": 3.0546366846311095, "grad_norm": 0.9530837535858154, "learning_rate": 2.548447782178388e-05, "loss": 0.9308, "step": 16534 }, { "epoch": 3.0548225236944804, "grad_norm": 0.9567283987998962, "learning_rate": 2.5474735778120306e-05, "loss": 0.827, "step": 16535 }, { "epoch": 3.0550083627578517, "grad_norm": 1.026564359664917, "learning_rate": 2.546499532507057e-05, "loss": 0.8805, "step": 16536 }, { "epoch": 3.055194201821223, "grad_norm": 0.7885897755622864, "learning_rate": 2.5455256462842638e-05, "loss": 1.095, "step": 16537 }, { "epoch": 3.055380040884594, "grad_norm": 0.815269410610199, "learning_rate": 2.544551919164433e-05, "loss": 0.7272, "step": 16538 }, { "epoch": 3.0555658799479652, "grad_norm": 0.9533278942108154, "learning_rate": 2.5435783511683443e-05, "loss": 0.8395, "step": 16539 }, { "epoch": 3.055751719011336, "grad_norm": 0.7652965784072876, "learning_rate": 2.542604942316781e-05, "loss": 0.8543, "step": 16540 }, { "epoch": 3.0559375580747075, "grad_norm": 0.8190576434135437, "learning_rate": 2.5416316926305195e-05, "loss": 0.6892, "step": 16541 }, { "epoch": 3.0561233971380783, "grad_norm": 1.004726529121399, "learning_rate": 2.5406586021303302e-05, "loss": 1.1388, "step": 16542 }, { "epoch": 3.0563092362014497, "grad_norm": 0.9118068218231201, "learning_rate": 2.5396856708369832e-05, "loss": 0.7668, "step": 16543 }, { "epoch": 3.0564950752648206, "grad_norm": 1.0773011445999146, "learning_rate": 2.5387128987712404e-05, "loss": 0.9329, "step": 16544 }, { "epoch": 3.056680914328192, "grad_norm": 0.9110521674156189, "learning_rate": 2.537740285953869e-05, "loss": 0.9904, "step": 16545 }, { "epoch": 3.0568667533915628, "grad_norm": 0.871599018573761, "learning_rate": 2.5367678324056253e-05, "loss": 0.5775, "step": 16546 }, { "epoch": 3.057052592454934, "grad_norm": 0.8627876043319702, "learning_rate": 2.535795538147262e-05, "loss": 0.8143, "step": 16547 }, { "epoch": 3.057238431518305, "grad_norm": 0.7532731294631958, "learning_rate": 2.5348234031995377e-05, "loss": 0.7186, "step": 16548 }, { "epoch": 3.0574242705816763, "grad_norm": 0.8290095329284668, "learning_rate": 2.5338514275831925e-05, "loss": 0.6237, "step": 16549 }, { "epoch": 3.057610109645047, "grad_norm": 0.8998545408248901, "learning_rate": 2.5328796113189802e-05, "loss": 0.7936, "step": 16550 }, { "epoch": 3.0577959487084185, "grad_norm": 0.8460700511932373, "learning_rate": 2.531907954427638e-05, "loss": 0.919, "step": 16551 }, { "epoch": 3.0579817877717894, "grad_norm": 0.9167879819869995, "learning_rate": 2.530936456929902e-05, "loss": 1.0441, "step": 16552 }, { "epoch": 3.0581676268351607, "grad_norm": 0.8317403197288513, "learning_rate": 2.5299651188465136e-05, "loss": 0.8466, "step": 16553 }, { "epoch": 3.058353465898532, "grad_norm": 0.8379568457603455, "learning_rate": 2.5289939401982e-05, "loss": 0.8954, "step": 16554 }, { "epoch": 3.058539304961903, "grad_norm": 0.7911828756332397, "learning_rate": 2.5280229210056895e-05, "loss": 0.8179, "step": 16555 }, { "epoch": 3.0587251440252743, "grad_norm": 0.9084653854370117, "learning_rate": 2.527052061289705e-05, "loss": 0.8155, "step": 16556 }, { "epoch": 3.058910983088645, "grad_norm": 0.8726891875267029, "learning_rate": 2.526081361070973e-05, "loss": 0.6489, "step": 16557 }, { "epoch": 3.0590968221520165, "grad_norm": 0.8376065492630005, "learning_rate": 2.525110820370209e-05, "loss": 0.7411, "step": 16558 }, { "epoch": 3.0592826612153874, "grad_norm": 0.8209545016288757, "learning_rate": 2.5241404392081237e-05, "loss": 0.7611, "step": 16559 }, { "epoch": 3.0594685002787587, "grad_norm": 0.8267958164215088, "learning_rate": 2.5231702176054317e-05, "loss": 0.6398, "step": 16560 }, { "epoch": 3.0596543393421296, "grad_norm": 0.9265934228897095, "learning_rate": 2.5222001555828434e-05, "loss": 0.9597, "step": 16561 }, { "epoch": 3.059840178405501, "grad_norm": 0.832126259803772, "learning_rate": 2.5212302531610598e-05, "loss": 0.7867, "step": 16562 }, { "epoch": 3.060026017468872, "grad_norm": 0.8703610897064209, "learning_rate": 2.5202605103607835e-05, "loss": 0.911, "step": 16563 }, { "epoch": 3.060211856532243, "grad_norm": 0.7699049711227417, "learning_rate": 2.5192909272027076e-05, "loss": 0.959, "step": 16564 }, { "epoch": 3.060397695595614, "grad_norm": 0.9440488815307617, "learning_rate": 2.5183215037075324e-05, "loss": 0.7962, "step": 16565 }, { "epoch": 3.0605835346589854, "grad_norm": 0.8586769104003906, "learning_rate": 2.517352239895946e-05, "loss": 0.8593, "step": 16566 }, { "epoch": 3.0607693737223562, "grad_norm": 0.7995127439498901, "learning_rate": 2.5163831357886314e-05, "loss": 0.6925, "step": 16567 }, { "epoch": 3.0609552127857276, "grad_norm": 0.7316787242889404, "learning_rate": 2.51541419140628e-05, "loss": 0.8178, "step": 16568 }, { "epoch": 3.0611410518490985, "grad_norm": 0.8505775928497314, "learning_rate": 2.5144454067695677e-05, "loss": 0.8638, "step": 16569 }, { "epoch": 3.06132689091247, "grad_norm": 0.7964285612106323, "learning_rate": 2.513476781899171e-05, "loss": 0.925, "step": 16570 }, { "epoch": 3.061512729975841, "grad_norm": 1.5267374515533447, "learning_rate": 2.512508316815767e-05, "loss": 1.072, "step": 16571 }, { "epoch": 3.061698569039212, "grad_norm": 0.8420001864433289, "learning_rate": 2.5115400115400213e-05, "loss": 0.8227, "step": 16572 }, { "epoch": 3.0618844081025833, "grad_norm": 0.9002656936645508, "learning_rate": 2.5105718660926058e-05, "loss": 0.808, "step": 16573 }, { "epoch": 3.062070247165954, "grad_norm": 0.9324301481246948, "learning_rate": 2.5096038804941812e-05, "loss": 0.6955, "step": 16574 }, { "epoch": 3.0622560862293255, "grad_norm": 0.7720271944999695, "learning_rate": 2.5086360547654087e-05, "loss": 0.7693, "step": 16575 }, { "epoch": 3.0624419252926964, "grad_norm": 1.048789620399475, "learning_rate": 2.5076683889269405e-05, "loss": 0.9154, "step": 16576 }, { "epoch": 3.0626277643560678, "grad_norm": 0.9712361097335815, "learning_rate": 2.5067008829994365e-05, "loss": 0.8935, "step": 16577 }, { "epoch": 3.0628136034194386, "grad_norm": 0.9016991257667542, "learning_rate": 2.5057335370035427e-05, "loss": 0.7707, "step": 16578 }, { "epoch": 3.06299944248281, "grad_norm": 0.869917631149292, "learning_rate": 2.504766350959903e-05, "loss": 0.792, "step": 16579 }, { "epoch": 3.063185281546181, "grad_norm": 0.7795620560646057, "learning_rate": 2.5037993248891667e-05, "loss": 0.8157, "step": 16580 }, { "epoch": 3.063371120609552, "grad_norm": 0.8983386754989624, "learning_rate": 2.5028324588119656e-05, "loss": 0.8888, "step": 16581 }, { "epoch": 3.063556959672923, "grad_norm": 0.8761674165725708, "learning_rate": 2.5018657527489443e-05, "loss": 0.7019, "step": 16582 }, { "epoch": 3.0637427987362944, "grad_norm": 0.8494465351104736, "learning_rate": 2.5008992067207303e-05, "loss": 0.6808, "step": 16583 }, { "epoch": 3.0639286377996653, "grad_norm": 0.8243811726570129, "learning_rate": 2.499932820747951e-05, "loss": 0.8011, "step": 16584 }, { "epoch": 3.0641144768630366, "grad_norm": 3.3844828605651855, "learning_rate": 2.4989665948512387e-05, "loss": 1.0565, "step": 16585 }, { "epoch": 3.064300315926408, "grad_norm": 0.9450771808624268, "learning_rate": 2.498000529051211e-05, "loss": 1.0121, "step": 16586 }, { "epoch": 3.064486154989779, "grad_norm": 0.8206013441085815, "learning_rate": 2.497034623368486e-05, "loss": 0.7842, "step": 16587 }, { "epoch": 3.06467199405315, "grad_norm": 0.8935214281082153, "learning_rate": 2.496068877823684e-05, "loss": 0.6792, "step": 16588 }, { "epoch": 3.064857833116521, "grad_norm": 0.9215742945671082, "learning_rate": 2.495103292437415e-05, "loss": 0.9773, "step": 16589 }, { "epoch": 3.0650436721798924, "grad_norm": 0.9969448447227478, "learning_rate": 2.4941378672302862e-05, "loss": 0.8248, "step": 16590 }, { "epoch": 3.0652295112432633, "grad_norm": 1.2787046432495117, "learning_rate": 2.493172602222902e-05, "loss": 0.8893, "step": 16591 }, { "epoch": 3.0654153503066346, "grad_norm": 0.8934160470962524, "learning_rate": 2.4922074974358657e-05, "loss": 0.7708, "step": 16592 }, { "epoch": 3.0656011893700055, "grad_norm": 0.8716941475868225, "learning_rate": 2.4912425528897798e-05, "loss": 0.9923, "step": 16593 }, { "epoch": 3.065787028433377, "grad_norm": 0.911169707775116, "learning_rate": 2.490277768605237e-05, "loss": 0.7935, "step": 16594 }, { "epoch": 3.0659728674967477, "grad_norm": 0.9753298759460449, "learning_rate": 2.489313144602826e-05, "loss": 0.8333, "step": 16595 }, { "epoch": 3.066158706560119, "grad_norm": 0.9423926472663879, "learning_rate": 2.4883486809031354e-05, "loss": 0.7983, "step": 16596 }, { "epoch": 3.06634454562349, "grad_norm": 0.8739957213401794, "learning_rate": 2.4873843775267545e-05, "loss": 0.9515, "step": 16597 }, { "epoch": 3.0665303846868612, "grad_norm": 0.9837615489959717, "learning_rate": 2.486420234494262e-05, "loss": 0.7153, "step": 16598 }, { "epoch": 3.066716223750232, "grad_norm": 0.8995093107223511, "learning_rate": 2.4854562518262327e-05, "loss": 0.7964, "step": 16599 }, { "epoch": 3.0669020628136034, "grad_norm": 0.8065444827079773, "learning_rate": 2.484492429543247e-05, "loss": 0.5506, "step": 16600 }, { "epoch": 3.0670879018769743, "grad_norm": 0.8899737596511841, "learning_rate": 2.4835287676658735e-05, "loss": 0.7886, "step": 16601 }, { "epoch": 3.0672737409403457, "grad_norm": 0.8818018436431885, "learning_rate": 2.482565266214677e-05, "loss": 0.7062, "step": 16602 }, { "epoch": 3.067459580003717, "grad_norm": 1.153317928314209, "learning_rate": 2.4816019252102273e-05, "loss": 0.9368, "step": 16603 }, { "epoch": 3.067645419067088, "grad_norm": 0.8451851010322571, "learning_rate": 2.4806387446730795e-05, "loss": 0.6308, "step": 16604 }, { "epoch": 3.067831258130459, "grad_norm": 1.00625479221344, "learning_rate": 2.479675724623797e-05, "loss": 0.8113, "step": 16605 }, { "epoch": 3.06801709719383, "grad_norm": 1.0481326580047607, "learning_rate": 2.4787128650829293e-05, "loss": 0.8003, "step": 16606 }, { "epoch": 3.0682029362572014, "grad_norm": 0.8781499266624451, "learning_rate": 2.4777501660710267e-05, "loss": 0.9148, "step": 16607 }, { "epoch": 3.0683887753205723, "grad_norm": 1.05372154712677, "learning_rate": 2.476787627608641e-05, "loss": 0.8686, "step": 16608 }, { "epoch": 3.0685746143839436, "grad_norm": 0.9408324956893921, "learning_rate": 2.4758252497163137e-05, "loss": 0.6715, "step": 16609 }, { "epoch": 3.0687604534473145, "grad_norm": 1.0712614059448242, "learning_rate": 2.474863032414584e-05, "loss": 0.7837, "step": 16610 }, { "epoch": 3.068946292510686, "grad_norm": 0.7135563492774963, "learning_rate": 2.473900975723985e-05, "loss": 0.7282, "step": 16611 }, { "epoch": 3.0691321315740567, "grad_norm": 0.9954936504364014, "learning_rate": 2.4729390796650564e-05, "loss": 0.6655, "step": 16612 }, { "epoch": 3.069317970637428, "grad_norm": 0.8331687450408936, "learning_rate": 2.4719773442583273e-05, "loss": 0.9261, "step": 16613 }, { "epoch": 3.069503809700799, "grad_norm": 1.0044938325881958, "learning_rate": 2.4710157695243242e-05, "loss": 0.8095, "step": 16614 }, { "epoch": 3.0696896487641703, "grad_norm": 1.2021206617355347, "learning_rate": 2.4700543554835697e-05, "loss": 1.1148, "step": 16615 }, { "epoch": 3.069875487827541, "grad_norm": 0.7989461421966553, "learning_rate": 2.469093102156579e-05, "loss": 0.8558, "step": 16616 }, { "epoch": 3.0700613268909125, "grad_norm": 0.9222882986068726, "learning_rate": 2.468132009563876e-05, "loss": 0.732, "step": 16617 }, { "epoch": 3.0702471659542834, "grad_norm": 0.9197480082511902, "learning_rate": 2.4671710777259703e-05, "loss": 0.8164, "step": 16618 }, { "epoch": 3.0704330050176547, "grad_norm": 0.8911687731742859, "learning_rate": 2.466210306663368e-05, "loss": 0.9387, "step": 16619 }, { "epoch": 3.070618844081026, "grad_norm": 0.8025472164154053, "learning_rate": 2.46524969639658e-05, "loss": 0.7469, "step": 16620 }, { "epoch": 3.070804683144397, "grad_norm": 0.9530259966850281, "learning_rate": 2.4642892469461088e-05, "loss": 1.0753, "step": 16621 }, { "epoch": 3.0709905222077682, "grad_norm": 0.8317549228668213, "learning_rate": 2.4633289583324503e-05, "loss": 0.8232, "step": 16622 }, { "epoch": 3.071176361271139, "grad_norm": 1.006614327430725, "learning_rate": 2.4623688305761005e-05, "loss": 0.8355, "step": 16623 }, { "epoch": 3.0713622003345105, "grad_norm": 0.7379666566848755, "learning_rate": 2.4614088636975518e-05, "loss": 0.6482, "step": 16624 }, { "epoch": 3.0715480393978813, "grad_norm": 1.028715968132019, "learning_rate": 2.4604490577172967e-05, "loss": 0.7598, "step": 16625 }, { "epoch": 3.0717338784612527, "grad_norm": 0.7389920353889465, "learning_rate": 2.459489412655819e-05, "loss": 0.6548, "step": 16626 }, { "epoch": 3.0719197175246236, "grad_norm": 0.9339531064033508, "learning_rate": 2.458529928533596e-05, "loss": 0.8555, "step": 16627 }, { "epoch": 3.072105556587995, "grad_norm": 1.1104316711425781, "learning_rate": 2.4575706053711134e-05, "loss": 1.0854, "step": 16628 }, { "epoch": 3.0722913956513658, "grad_norm": 0.7918559312820435, "learning_rate": 2.4566114431888432e-05, "loss": 0.7041, "step": 16629 }, { "epoch": 3.072477234714737, "grad_norm": 0.9732975959777832, "learning_rate": 2.455652442007257e-05, "loss": 0.6959, "step": 16630 }, { "epoch": 3.072663073778108, "grad_norm": 0.9053291082382202, "learning_rate": 2.454693601846819e-05, "loss": 0.8479, "step": 16631 }, { "epoch": 3.0728489128414793, "grad_norm": 0.800689160823822, "learning_rate": 2.4537349227280014e-05, "loss": 0.8431, "step": 16632 }, { "epoch": 3.07303475190485, "grad_norm": 0.9349857568740845, "learning_rate": 2.4527764046712587e-05, "loss": 0.8068, "step": 16633 }, { "epoch": 3.0732205909682215, "grad_norm": 1.96149742603302, "learning_rate": 2.4518180476970555e-05, "loss": 1.4771, "step": 16634 }, { "epoch": 3.073406430031593, "grad_norm": 0.8421056270599365, "learning_rate": 2.450859851825842e-05, "loss": 0.865, "step": 16635 }, { "epoch": 3.0735922690949637, "grad_norm": 0.8871978521347046, "learning_rate": 2.4499018170780673e-05, "loss": 0.8629, "step": 16636 }, { "epoch": 3.073778108158335, "grad_norm": 0.8812477588653564, "learning_rate": 2.4489439434741855e-05, "loss": 0.9481, "step": 16637 }, { "epoch": 3.073963947221706, "grad_norm": 0.890427827835083, "learning_rate": 2.447986231034636e-05, "loss": 0.8507, "step": 16638 }, { "epoch": 3.0741497862850773, "grad_norm": 0.9563420414924622, "learning_rate": 2.447028679779859e-05, "loss": 1.0243, "step": 16639 }, { "epoch": 3.074335625348448, "grad_norm": 0.8652082681655884, "learning_rate": 2.446071289730295e-05, "loss": 0.7468, "step": 16640 }, { "epoch": 3.0745214644118195, "grad_norm": 0.9163973331451416, "learning_rate": 2.445114060906377e-05, "loss": 0.8505, "step": 16641 }, { "epoch": 3.0747073034751904, "grad_norm": 0.8107301592826843, "learning_rate": 2.4441569933285345e-05, "loss": 0.6438, "step": 16642 }, { "epoch": 3.0748931425385617, "grad_norm": 0.8502079844474792, "learning_rate": 2.443200087017192e-05, "loss": 0.7834, "step": 16643 }, { "epoch": 3.0750789816019326, "grad_norm": 1.0325417518615723, "learning_rate": 2.442243341992776e-05, "loss": 0.802, "step": 16644 }, { "epoch": 3.075264820665304, "grad_norm": 0.7983143925666809, "learning_rate": 2.4412867582757083e-05, "loss": 0.8331, "step": 16645 }, { "epoch": 3.075450659728675, "grad_norm": 0.9426150321960449, "learning_rate": 2.4403303358864037e-05, "loss": 0.8964, "step": 16646 }, { "epoch": 3.075636498792046, "grad_norm": 1.0253843069076538, "learning_rate": 2.439374074845272e-05, "loss": 0.907, "step": 16647 }, { "epoch": 3.075822337855417, "grad_norm": 0.8924049139022827, "learning_rate": 2.43841797517273e-05, "loss": 0.9502, "step": 16648 }, { "epoch": 3.0760081769187884, "grad_norm": 1.0296672582626343, "learning_rate": 2.43746203688918e-05, "loss": 0.9336, "step": 16649 }, { "epoch": 3.0761940159821592, "grad_norm": 1.3383376598358154, "learning_rate": 2.4365062600150234e-05, "loss": 0.8846, "step": 16650 }, { "epoch": 3.0763798550455306, "grad_norm": 0.789011538028717, "learning_rate": 2.43555064457066e-05, "loss": 0.8331, "step": 16651 }, { "epoch": 3.076565694108902, "grad_norm": 0.9009502530097961, "learning_rate": 2.4345951905764897e-05, "loss": 0.7493, "step": 16652 }, { "epoch": 3.076751533172273, "grad_norm": 0.8864755034446716, "learning_rate": 2.4336398980529017e-05, "loss": 0.8703, "step": 16653 }, { "epoch": 3.076937372235644, "grad_norm": 0.9883394837379456, "learning_rate": 2.4326847670202823e-05, "loss": 0.9677, "step": 16654 }, { "epoch": 3.077123211299015, "grad_norm": 1.1275808811187744, "learning_rate": 2.431729797499025e-05, "loss": 0.8323, "step": 16655 }, { "epoch": 3.0773090503623863, "grad_norm": 0.9039409756660461, "learning_rate": 2.430774989509503e-05, "loss": 0.9529, "step": 16656 }, { "epoch": 3.077494889425757, "grad_norm": 0.9433819651603699, "learning_rate": 2.4298203430721033e-05, "loss": 0.8014, "step": 16657 }, { "epoch": 3.0776807284891285, "grad_norm": 0.8646581172943115, "learning_rate": 2.4288658582071977e-05, "loss": 0.6687, "step": 16658 }, { "epoch": 3.0778665675524994, "grad_norm": 0.8743041753768921, "learning_rate": 2.4279115349351543e-05, "loss": 0.8719, "step": 16659 }, { "epoch": 3.0780524066158708, "grad_norm": 0.8597197532653809, "learning_rate": 2.4269573732763475e-05, "loss": 0.8298, "step": 16660 }, { "epoch": 3.0782382456792416, "grad_norm": 0.7918018102645874, "learning_rate": 2.42600337325114e-05, "loss": 0.8337, "step": 16661 }, { "epoch": 3.078424084742613, "grad_norm": 0.7828530669212341, "learning_rate": 2.425049534879894e-05, "loss": 0.6693, "step": 16662 }, { "epoch": 3.078609923805984, "grad_norm": 0.894785463809967, "learning_rate": 2.424095858182962e-05, "loss": 0.8944, "step": 16663 }, { "epoch": 3.078795762869355, "grad_norm": 1.0368988513946533, "learning_rate": 2.4231423431807076e-05, "loss": 0.819, "step": 16664 }, { "epoch": 3.078981601932726, "grad_norm": 0.82077956199646, "learning_rate": 2.4221889898934734e-05, "loss": 0.6261, "step": 16665 }, { "epoch": 3.0791674409960974, "grad_norm": 0.8031370639801025, "learning_rate": 2.4212357983416146e-05, "loss": 0.9246, "step": 16666 }, { "epoch": 3.0793532800594683, "grad_norm": 0.8558308482170105, "learning_rate": 2.420282768545469e-05, "loss": 1.0473, "step": 16667 }, { "epoch": 3.0795391191228396, "grad_norm": 0.8579029440879822, "learning_rate": 2.4193299005253832e-05, "loss": 0.8786, "step": 16668 }, { "epoch": 3.079724958186211, "grad_norm": 0.8373103737831116, "learning_rate": 2.418377194301692e-05, "loss": 0.7825, "step": 16669 }, { "epoch": 3.079910797249582, "grad_norm": 0.8694421052932739, "learning_rate": 2.4174246498947284e-05, "loss": 0.7338, "step": 16670 }, { "epoch": 3.080096636312953, "grad_norm": 0.9413045644760132, "learning_rate": 2.4164722673248207e-05, "loss": 0.8307, "step": 16671 }, { "epoch": 3.080282475376324, "grad_norm": 0.713191568851471, "learning_rate": 2.415520046612302e-05, "loss": 0.5577, "step": 16672 }, { "epoch": 3.0804683144396954, "grad_norm": 0.9494543671607971, "learning_rate": 2.4145679877774928e-05, "loss": 0.7002, "step": 16673 }, { "epoch": 3.0806541535030663, "grad_norm": 1.0250353813171387, "learning_rate": 2.4136160908407113e-05, "loss": 0.8437, "step": 16674 }, { "epoch": 3.0808399925664376, "grad_norm": 0.9267086982727051, "learning_rate": 2.412664355822274e-05, "loss": 0.7667, "step": 16675 }, { "epoch": 3.0810258316298085, "grad_norm": 0.8907784223556519, "learning_rate": 2.4117127827424958e-05, "loss": 0.6792, "step": 16676 }, { "epoch": 3.08121167069318, "grad_norm": 0.7296506762504578, "learning_rate": 2.4107613716216882e-05, "loss": 0.9244, "step": 16677 }, { "epoch": 3.0813975097565507, "grad_norm": 0.789673388004303, "learning_rate": 2.409810122480155e-05, "loss": 0.6373, "step": 16678 }, { "epoch": 3.081583348819922, "grad_norm": 0.9214822053909302, "learning_rate": 2.408859035338198e-05, "loss": 0.8431, "step": 16679 }, { "epoch": 3.081769187883293, "grad_norm": 0.8458185791969299, "learning_rate": 2.4079081102161195e-05, "loss": 0.8627, "step": 16680 }, { "epoch": 3.0819550269466642, "grad_norm": 0.8897072076797485, "learning_rate": 2.4069573471342143e-05, "loss": 0.7229, "step": 16681 }, { "epoch": 3.082140866010035, "grad_norm": 0.9710602164268494, "learning_rate": 2.4060067461127755e-05, "loss": 0.5108, "step": 16682 }, { "epoch": 3.0823267050734064, "grad_norm": 0.9847256541252136, "learning_rate": 2.4050563071720867e-05, "loss": 0.623, "step": 16683 }, { "epoch": 3.0825125441367778, "grad_norm": 0.8316210508346558, "learning_rate": 2.4041060303324402e-05, "loss": 0.7586, "step": 16684 }, { "epoch": 3.0826983832001487, "grad_norm": 1.0670188665390015, "learning_rate": 2.403155915614117e-05, "loss": 0.8198, "step": 16685 }, { "epoch": 3.08288422226352, "grad_norm": 0.89921635389328, "learning_rate": 2.4022059630373896e-05, "loss": 0.8845, "step": 16686 }, { "epoch": 3.083070061326891, "grad_norm": 0.8962400555610657, "learning_rate": 2.401256172622538e-05, "loss": 0.9516, "step": 16687 }, { "epoch": 3.083255900390262, "grad_norm": 0.8482634425163269, "learning_rate": 2.4003065443898365e-05, "loss": 0.8793, "step": 16688 }, { "epoch": 3.083441739453633, "grad_norm": 1.0588843822479248, "learning_rate": 2.399357078359551e-05, "loss": 0.8523, "step": 16689 }, { "epoch": 3.0836275785170044, "grad_norm": 0.8351184725761414, "learning_rate": 2.398407774551944e-05, "loss": 0.8478, "step": 16690 }, { "epoch": 3.0838134175803753, "grad_norm": 0.8612925410270691, "learning_rate": 2.397458632987276e-05, "loss": 0.7712, "step": 16691 }, { "epoch": 3.0839992566437466, "grad_norm": 1.1683576107025146, "learning_rate": 2.39650965368581e-05, "loss": 0.8221, "step": 16692 }, { "epoch": 3.0841850957071175, "grad_norm": 0.9445899128913879, "learning_rate": 2.3955608366677974e-05, "loss": 0.8417, "step": 16693 }, { "epoch": 3.084370934770489, "grad_norm": 0.690139651298523, "learning_rate": 2.3946121819534885e-05, "loss": 0.6781, "step": 16694 }, { "epoch": 3.0845567738338597, "grad_norm": 0.7465941905975342, "learning_rate": 2.3936636895631292e-05, "loss": 0.7303, "step": 16695 }, { "epoch": 3.084742612897231, "grad_norm": 1.1478382349014282, "learning_rate": 2.392715359516965e-05, "loss": 0.7927, "step": 16696 }, { "epoch": 3.084928451960602, "grad_norm": 0.8253434300422668, "learning_rate": 2.391767191835239e-05, "loss": 0.7372, "step": 16697 }, { "epoch": 3.0851142910239733, "grad_norm": 0.8607159852981567, "learning_rate": 2.3908191865381868e-05, "loss": 1.0916, "step": 16698 }, { "epoch": 3.085300130087344, "grad_norm": 0.8785241842269897, "learning_rate": 2.3898713436460375e-05, "loss": 0.7048, "step": 16699 }, { "epoch": 3.0854859691507155, "grad_norm": 1.2133129835128784, "learning_rate": 2.388923663179029e-05, "loss": 1.0641, "step": 16700 }, { "epoch": 3.0856718082140864, "grad_norm": 0.7245477437973022, "learning_rate": 2.3879761451573835e-05, "loss": 0.797, "step": 16701 }, { "epoch": 3.0858576472774577, "grad_norm": 0.95304274559021, "learning_rate": 2.3870287896013232e-05, "loss": 0.6126, "step": 16702 }, { "epoch": 3.086043486340829, "grad_norm": 0.9752274751663208, "learning_rate": 2.3860815965310668e-05, "loss": 0.9041, "step": 16703 }, { "epoch": 3.0862293254042, "grad_norm": 0.9736945629119873, "learning_rate": 2.3851345659668368e-05, "loss": 0.8335, "step": 16704 }, { "epoch": 3.0864151644675712, "grad_norm": 1.0160160064697266, "learning_rate": 2.38418769792884e-05, "loss": 1.1237, "step": 16705 }, { "epoch": 3.086601003530942, "grad_norm": 0.8461440205574036, "learning_rate": 2.383240992437289e-05, "loss": 0.6576, "step": 16706 }, { "epoch": 3.0867868425943135, "grad_norm": 0.9372764825820923, "learning_rate": 2.382294449512381e-05, "loss": 0.8287, "step": 16707 }, { "epoch": 3.0869726816576843, "grad_norm": 1.056541085243225, "learning_rate": 2.3813480691743328e-05, "loss": 0.8241, "step": 16708 }, { "epoch": 3.0871585207210557, "grad_norm": 0.9410994648933411, "learning_rate": 2.3804018514433345e-05, "loss": 0.963, "step": 16709 }, { "epoch": 3.0873443597844266, "grad_norm": 1.1573671102523804, "learning_rate": 2.3794557963395836e-05, "loss": 0.7073, "step": 16710 }, { "epoch": 3.087530198847798, "grad_norm": 0.9475613236427307, "learning_rate": 2.378509903883268e-05, "loss": 0.8321, "step": 16711 }, { "epoch": 3.0877160379111688, "grad_norm": 1.05289626121521, "learning_rate": 2.377564174094583e-05, "loss": 0.8266, "step": 16712 }, { "epoch": 3.08790187697454, "grad_norm": 0.9490292072296143, "learning_rate": 2.3766186069937103e-05, "loss": 0.9654, "step": 16713 }, { "epoch": 3.088087716037911, "grad_norm": 0.788969874382019, "learning_rate": 2.37567320260083e-05, "loss": 0.8875, "step": 16714 }, { "epoch": 3.0882735551012823, "grad_norm": 0.9245768189430237, "learning_rate": 2.3747279609361196e-05, "loss": 0.642, "step": 16715 }, { "epoch": 3.088459394164653, "grad_norm": 0.9661019444465637, "learning_rate": 2.3737828820197583e-05, "loss": 0.8747, "step": 16716 }, { "epoch": 3.0886452332280245, "grad_norm": 0.8161077499389648, "learning_rate": 2.3728379658719102e-05, "loss": 0.8601, "step": 16717 }, { "epoch": 3.088831072291396, "grad_norm": 0.9887533187866211, "learning_rate": 2.3718932125127514e-05, "loss": 0.7007, "step": 16718 }, { "epoch": 3.0890169113547667, "grad_norm": 0.9117093086242676, "learning_rate": 2.3709486219624377e-05, "loss": 0.8345, "step": 16719 }, { "epoch": 3.089202750418138, "grad_norm": 0.8773633241653442, "learning_rate": 2.3700041942411367e-05, "loss": 0.8651, "step": 16720 }, { "epoch": 3.089388589481509, "grad_norm": 0.9503114819526672, "learning_rate": 2.369059929369003e-05, "loss": 0.9321, "step": 16721 }, { "epoch": 3.0895744285448803, "grad_norm": 0.8335660696029663, "learning_rate": 2.3681158273661896e-05, "loss": 0.5858, "step": 16722 }, { "epoch": 3.089760267608251, "grad_norm": 0.8890185952186584, "learning_rate": 2.3671718882528437e-05, "loss": 0.7505, "step": 16723 }, { "epoch": 3.0899461066716225, "grad_norm": 0.9524630904197693, "learning_rate": 2.3662281120491182e-05, "loss": 0.6554, "step": 16724 }, { "epoch": 3.0901319457349934, "grad_norm": 0.8818069696426392, "learning_rate": 2.3652844987751542e-05, "loss": 0.7038, "step": 16725 }, { "epoch": 3.0903177847983647, "grad_norm": 1.1302887201309204, "learning_rate": 2.3643410484510908e-05, "loss": 1.0679, "step": 16726 }, { "epoch": 3.0905036238617356, "grad_norm": 0.8042498230934143, "learning_rate": 2.3633977610970616e-05, "loss": 0.886, "step": 16727 }, { "epoch": 3.090689462925107, "grad_norm": 1.2275042533874512, "learning_rate": 2.3624546367332022e-05, "loss": 0.6698, "step": 16728 }, { "epoch": 3.090875301988478, "grad_norm": 0.8624935150146484, "learning_rate": 2.361511675379645e-05, "loss": 0.9283, "step": 16729 }, { "epoch": 3.091061141051849, "grad_norm": 0.8328248262405396, "learning_rate": 2.3605688770565127e-05, "loss": 0.7019, "step": 16730 }, { "epoch": 3.09124698011522, "grad_norm": 1.3361327648162842, "learning_rate": 2.3596262417839255e-05, "loss": 0.9344, "step": 16731 }, { "epoch": 3.0914328191785914, "grad_norm": 1.0902270078659058, "learning_rate": 2.3586837695820064e-05, "loss": 0.822, "step": 16732 }, { "epoch": 3.0916186582419622, "grad_norm": 0.9357127547264099, "learning_rate": 2.3577414604708704e-05, "loss": 0.7635, "step": 16733 }, { "epoch": 3.0918044973053336, "grad_norm": 0.9868106245994568, "learning_rate": 2.3567993144706278e-05, "loss": 0.9043, "step": 16734 }, { "epoch": 3.091990336368705, "grad_norm": 1.0099672079086304, "learning_rate": 2.3558573316013845e-05, "loss": 0.8065, "step": 16735 }, { "epoch": 3.092176175432076, "grad_norm": 0.8899115920066833, "learning_rate": 2.354915511883252e-05, "loss": 0.6263, "step": 16736 }, { "epoch": 3.092362014495447, "grad_norm": 0.9453414678573608, "learning_rate": 2.353973855336329e-05, "loss": 0.7882, "step": 16737 }, { "epoch": 3.092547853558818, "grad_norm": 0.9318817257881165, "learning_rate": 2.3530323619807083e-05, "loss": 0.9171, "step": 16738 }, { "epoch": 3.0927336926221893, "grad_norm": 1.0196037292480469, "learning_rate": 2.3520910318364907e-05, "loss": 1.0691, "step": 16739 }, { "epoch": 3.09291953168556, "grad_norm": 0.9235552549362183, "learning_rate": 2.3511498649237674e-05, "loss": 1.0074, "step": 16740 }, { "epoch": 3.0931053707489315, "grad_norm": 0.9589453935623169, "learning_rate": 2.350208861262625e-05, "loss": 0.8637, "step": 16741 }, { "epoch": 3.0932912098123024, "grad_norm": 1.0189878940582275, "learning_rate": 2.3492680208731464e-05, "loss": 0.8796, "step": 16742 }, { "epoch": 3.0934770488756738, "grad_norm": 0.9348618388175964, "learning_rate": 2.3483273437754107e-05, "loss": 0.9578, "step": 16743 }, { "epoch": 3.0936628879390446, "grad_norm": 0.9557995200157166, "learning_rate": 2.3473868299894998e-05, "loss": 0.7415, "step": 16744 }, { "epoch": 3.093848727002416, "grad_norm": 0.7726553678512573, "learning_rate": 2.3464464795354834e-05, "loss": 0.7415, "step": 16745 }, { "epoch": 3.094034566065787, "grad_norm": 0.8676804304122925, "learning_rate": 2.3455062924334337e-05, "loss": 0.9098, "step": 16746 }, { "epoch": 3.094220405129158, "grad_norm": 1.062719464302063, "learning_rate": 2.3445662687034144e-05, "loss": 0.8672, "step": 16747 }, { "epoch": 3.094406244192529, "grad_norm": 0.8634195327758789, "learning_rate": 2.3436264083654925e-05, "loss": 0.7621, "step": 16748 }, { "epoch": 3.0945920832559004, "grad_norm": 0.8746082782745361, "learning_rate": 2.342686711439723e-05, "loss": 0.9445, "step": 16749 }, { "epoch": 3.0947779223192713, "grad_norm": 0.9249411821365356, "learning_rate": 2.3417471779461685e-05, "loss": 0.9334, "step": 16750 }, { "epoch": 3.0949637613826426, "grad_norm": 0.9423879981040955, "learning_rate": 2.3408078079048767e-05, "loss": 1.103, "step": 16751 }, { "epoch": 3.095149600446014, "grad_norm": 0.8189881443977356, "learning_rate": 2.3398686013358996e-05, "loss": 0.848, "step": 16752 }, { "epoch": 3.095335439509385, "grad_norm": 1.0123745203018188, "learning_rate": 2.3389295582592828e-05, "loss": 0.7669, "step": 16753 }, { "epoch": 3.095521278572756, "grad_norm": 0.9926055669784546, "learning_rate": 2.3379906786950677e-05, "loss": 0.793, "step": 16754 }, { "epoch": 3.095707117636127, "grad_norm": 0.7395652532577515, "learning_rate": 2.3370519626632902e-05, "loss": 0.7581, "step": 16755 }, { "epoch": 3.0958929566994984, "grad_norm": 0.826623797416687, "learning_rate": 2.336113410183991e-05, "loss": 0.7201, "step": 16756 }, { "epoch": 3.0960787957628693, "grad_norm": 0.7846941947937012, "learning_rate": 2.3351750212771995e-05, "loss": 0.7227, "step": 16757 }, { "epoch": 3.0962646348262406, "grad_norm": 0.9971020817756653, "learning_rate": 2.3342367959629408e-05, "loss": 0.9792, "step": 16758 }, { "epoch": 3.0964504738896115, "grad_norm": 1.027601718902588, "learning_rate": 2.3332987342612423e-05, "loss": 0.8237, "step": 16759 }, { "epoch": 3.096636312952983, "grad_norm": 0.7158558964729309, "learning_rate": 2.332360836192129e-05, "loss": 0.7387, "step": 16760 }, { "epoch": 3.0968221520163537, "grad_norm": 0.8798509240150452, "learning_rate": 2.331423101775616e-05, "loss": 0.8709, "step": 16761 }, { "epoch": 3.097007991079725, "grad_norm": 0.866066575050354, "learning_rate": 2.3304855310317166e-05, "loss": 0.7117, "step": 16762 }, { "epoch": 3.097193830143096, "grad_norm": 0.8405870795249939, "learning_rate": 2.3295481239804384e-05, "loss": 0.739, "step": 16763 }, { "epoch": 3.0973796692064672, "grad_norm": 1.0137507915496826, "learning_rate": 2.328610880641797e-05, "loss": 1.0033, "step": 16764 }, { "epoch": 3.097565508269838, "grad_norm": 0.8602619171142578, "learning_rate": 2.3276738010357903e-05, "loss": 0.8401, "step": 16765 }, { "epoch": 3.0977513473332094, "grad_norm": 1.2083443403244019, "learning_rate": 2.3267368851824212e-05, "loss": 0.8599, "step": 16766 }, { "epoch": 3.0979371863965808, "grad_norm": 1.0404247045516968, "learning_rate": 2.325800133101682e-05, "loss": 0.7361, "step": 16767 }, { "epoch": 3.0981230254599517, "grad_norm": 0.777709424495697, "learning_rate": 2.3248635448135725e-05, "loss": 0.7952, "step": 16768 }, { "epoch": 3.098308864523323, "grad_norm": 0.9634824991226196, "learning_rate": 2.3239271203380797e-05, "loss": 0.8092, "step": 16769 }, { "epoch": 3.098494703586694, "grad_norm": 0.8834127187728882, "learning_rate": 2.3229908596951865e-05, "loss": 0.9975, "step": 16770 }, { "epoch": 3.098680542650065, "grad_norm": 1.0853376388549805, "learning_rate": 2.3220547629048796e-05, "loss": 0.9208, "step": 16771 }, { "epoch": 3.098866381713436, "grad_norm": 0.8186860084533691, "learning_rate": 2.32111882998714e-05, "loss": 0.8202, "step": 16772 }, { "epoch": 3.0990522207768074, "grad_norm": 0.86734539270401, "learning_rate": 2.3201830609619425e-05, "loss": 0.8744, "step": 16773 }, { "epoch": 3.0992380598401783, "grad_norm": 1.0710790157318115, "learning_rate": 2.319247455849257e-05, "loss": 0.8189, "step": 16774 }, { "epoch": 3.0994238989035496, "grad_norm": 1.0556211471557617, "learning_rate": 2.3183120146690528e-05, "loss": 0.7788, "step": 16775 }, { "epoch": 3.0996097379669205, "grad_norm": 1.0326217412948608, "learning_rate": 2.317376737441298e-05, "loss": 0.8049, "step": 16776 }, { "epoch": 3.099795577030292, "grad_norm": 1.3107826709747314, "learning_rate": 2.3164416241859533e-05, "loss": 1.2447, "step": 16777 }, { "epoch": 3.0999814160936627, "grad_norm": 0.8404430150985718, "learning_rate": 2.3155066749229737e-05, "loss": 0.6428, "step": 16778 }, { "epoch": 3.100167255157034, "grad_norm": 0.903167188167572, "learning_rate": 2.31457188967232e-05, "loss": 0.8984, "step": 16779 }, { "epoch": 3.100353094220405, "grad_norm": 0.7048614025115967, "learning_rate": 2.3136372684539374e-05, "loss": 0.6329, "step": 16780 }, { "epoch": 3.1005389332837763, "grad_norm": 1.206588864326477, "learning_rate": 2.3127028112877803e-05, "loss": 1.0657, "step": 16781 }, { "epoch": 3.100724772347147, "grad_norm": 0.9022656679153442, "learning_rate": 2.31176851819379e-05, "loss": 0.847, "step": 16782 }, { "epoch": 3.1009106114105185, "grad_norm": 1.3397260904312134, "learning_rate": 2.3108343891919037e-05, "loss": 1.1737, "step": 16783 }, { "epoch": 3.10109645047389, "grad_norm": 0.836466372013092, "learning_rate": 2.309900424302065e-05, "loss": 0.751, "step": 16784 }, { "epoch": 3.1012822895372607, "grad_norm": 1.0850869417190552, "learning_rate": 2.3089666235442054e-05, "loss": 0.8397, "step": 16785 }, { "epoch": 3.101468128600632, "grad_norm": 0.8306275010108948, "learning_rate": 2.3080329869382544e-05, "loss": 0.7889, "step": 16786 }, { "epoch": 3.101653967664003, "grad_norm": 0.8751552700996399, "learning_rate": 2.3070995145041364e-05, "loss": 0.6941, "step": 16787 }, { "epoch": 3.1018398067273742, "grad_norm": 1.119346022605896, "learning_rate": 2.3061662062617804e-05, "loss": 0.9556, "step": 16788 }, { "epoch": 3.102025645790745, "grad_norm": 1.1482000350952148, "learning_rate": 2.3052330622311046e-05, "loss": 0.8685, "step": 16789 }, { "epoch": 3.1022114848541165, "grad_norm": 0.9722250699996948, "learning_rate": 2.3043000824320216e-05, "loss": 0.9658, "step": 16790 }, { "epoch": 3.1023973239174873, "grad_norm": 0.9512292146682739, "learning_rate": 2.3033672668844465e-05, "loss": 1.0272, "step": 16791 }, { "epoch": 3.1025831629808587, "grad_norm": 0.9728047847747803, "learning_rate": 2.302434615608291e-05, "loss": 0.9121, "step": 16792 }, { "epoch": 3.1027690020442296, "grad_norm": 0.9288976192474365, "learning_rate": 2.3015021286234605e-05, "loss": 0.9073, "step": 16793 }, { "epoch": 3.102954841107601, "grad_norm": 0.9850062727928162, "learning_rate": 2.300569805949855e-05, "loss": 0.7848, "step": 16794 }, { "epoch": 3.1031406801709718, "grad_norm": 0.9823880791664124, "learning_rate": 2.2996376476073723e-05, "loss": 0.9452, "step": 16795 }, { "epoch": 3.103326519234343, "grad_norm": 0.9922701716423035, "learning_rate": 2.298705653615911e-05, "loss": 1.1013, "step": 16796 }, { "epoch": 3.103512358297714, "grad_norm": 1.0710746049880981, "learning_rate": 2.297773823995363e-05, "loss": 0.7522, "step": 16797 }, { "epoch": 3.1036981973610853, "grad_norm": 0.9887570738792419, "learning_rate": 2.296842158765612e-05, "loss": 0.5986, "step": 16798 }, { "epoch": 3.103884036424456, "grad_norm": 0.8032146096229553, "learning_rate": 2.2959106579465483e-05, "loss": 0.7118, "step": 16799 }, { "epoch": 3.1040698754878275, "grad_norm": 1.0194767713546753, "learning_rate": 2.2949793215580516e-05, "loss": 0.7177, "step": 16800 }, { "epoch": 3.104255714551199, "grad_norm": 0.9036667346954346, "learning_rate": 2.294048149619995e-05, "loss": 0.9459, "step": 16801 }, { "epoch": 3.1044415536145697, "grad_norm": 0.8096848130226135, "learning_rate": 2.29311714215226e-05, "loss": 0.6868, "step": 16802 }, { "epoch": 3.104627392677941, "grad_norm": 0.8234491348266602, "learning_rate": 2.292186299174712e-05, "loss": 0.7727, "step": 16803 }, { "epoch": 3.104813231741312, "grad_norm": 0.9342586994171143, "learning_rate": 2.291255620707222e-05, "loss": 0.7721, "step": 16804 }, { "epoch": 3.1049990708046833, "grad_norm": 0.8771103620529175, "learning_rate": 2.290325106769653e-05, "loss": 1.2882, "step": 16805 }, { "epoch": 3.105184909868054, "grad_norm": 0.98863285779953, "learning_rate": 2.289394757381864e-05, "loss": 0.8628, "step": 16806 }, { "epoch": 3.1053707489314255, "grad_norm": 0.9666051268577576, "learning_rate": 2.28846457256371e-05, "loss": 0.9621, "step": 16807 }, { "epoch": 3.1055565879947964, "grad_norm": 0.9185341000556946, "learning_rate": 2.2875345523350478e-05, "loss": 0.7908, "step": 16808 }, { "epoch": 3.1057424270581677, "grad_norm": 0.9975274801254272, "learning_rate": 2.2866046967157252e-05, "loss": 0.8881, "step": 16809 }, { "epoch": 3.1059282661215386, "grad_norm": 1.0821888446807861, "learning_rate": 2.285675005725586e-05, "loss": 0.8016, "step": 16810 }, { "epoch": 3.10611410518491, "grad_norm": 0.9405296444892883, "learning_rate": 2.2847454793844793e-05, "loss": 0.8053, "step": 16811 }, { "epoch": 3.106299944248281, "grad_norm": 1.0025142431259155, "learning_rate": 2.2838161177122363e-05, "loss": 0.9043, "step": 16812 }, { "epoch": 3.106485783311652, "grad_norm": 0.8691514730453491, "learning_rate": 2.2828869207287007e-05, "loss": 1.0407, "step": 16813 }, { "epoch": 3.106671622375023, "grad_norm": 0.8702809810638428, "learning_rate": 2.281957888453701e-05, "loss": 0.8272, "step": 16814 }, { "epoch": 3.1068574614383944, "grad_norm": 0.7858754992485046, "learning_rate": 2.281029020907062e-05, "loss": 0.6867, "step": 16815 }, { "epoch": 3.1070433005017657, "grad_norm": 0.9576261043548584, "learning_rate": 2.2801003181086146e-05, "loss": 0.8368, "step": 16816 }, { "epoch": 3.1072291395651366, "grad_norm": 0.8192430734634399, "learning_rate": 2.2791717800781774e-05, "loss": 0.7159, "step": 16817 }, { "epoch": 3.107414978628508, "grad_norm": 0.8357921838760376, "learning_rate": 2.2782434068355674e-05, "loss": 0.5564, "step": 16818 }, { "epoch": 3.107600817691879, "grad_norm": 1.038155436515808, "learning_rate": 2.2773151984006035e-05, "loss": 0.7805, "step": 16819 }, { "epoch": 3.10778665675525, "grad_norm": 1.601139783859253, "learning_rate": 2.2763871547930948e-05, "loss": 1.2576, "step": 16820 }, { "epoch": 3.107972495818621, "grad_norm": 0.8837090134620667, "learning_rate": 2.2754592760328475e-05, "loss": 0.8866, "step": 16821 }, { "epoch": 3.1081583348819923, "grad_norm": 0.8853998184204102, "learning_rate": 2.2745315621396623e-05, "loss": 0.7798, "step": 16822 }, { "epoch": 3.108344173945363, "grad_norm": 0.9195745587348938, "learning_rate": 2.2736040131333448e-05, "loss": 0.7439, "step": 16823 }, { "epoch": 3.1085300130087345, "grad_norm": 0.9202739000320435, "learning_rate": 2.2726766290336922e-05, "loss": 0.6378, "step": 16824 }, { "epoch": 3.1087158520721054, "grad_norm": 0.9971712827682495, "learning_rate": 2.2717494098604973e-05, "loss": 1.0663, "step": 16825 }, { "epoch": 3.1089016911354768, "grad_norm": 1.095689296722412, "learning_rate": 2.2708223556335485e-05, "loss": 1.0218, "step": 16826 }, { "epoch": 3.1090875301988476, "grad_norm": 1.0428909063339233, "learning_rate": 2.26989546637263e-05, "loss": 0.938, "step": 16827 }, { "epoch": 3.109273369262219, "grad_norm": 0.8788793087005615, "learning_rate": 2.2689687420975302e-05, "loss": 0.8181, "step": 16828 }, { "epoch": 3.10945920832559, "grad_norm": 0.9384439587593079, "learning_rate": 2.2680421828280263e-05, "loss": 0.8732, "step": 16829 }, { "epoch": 3.109645047388961, "grad_norm": 0.9256169199943542, "learning_rate": 2.26711578858389e-05, "loss": 0.8838, "step": 16830 }, { "epoch": 3.109830886452332, "grad_norm": 0.985909104347229, "learning_rate": 2.2661895593849003e-05, "loss": 0.8377, "step": 16831 }, { "epoch": 3.1100167255157034, "grad_norm": 1.1542880535125732, "learning_rate": 2.2652634952508224e-05, "loss": 0.8399, "step": 16832 }, { "epoch": 3.1102025645790747, "grad_norm": 0.7717205882072449, "learning_rate": 2.264337596201419e-05, "loss": 0.7881, "step": 16833 }, { "epoch": 3.1103884036424456, "grad_norm": 0.8536641597747803, "learning_rate": 2.263411862256457e-05, "loss": 0.6673, "step": 16834 }, { "epoch": 3.110574242705817, "grad_norm": 1.0915049314498901, "learning_rate": 2.2624862934356894e-05, "loss": 0.7309, "step": 16835 }, { "epoch": 3.110760081769188, "grad_norm": 0.7397167086601257, "learning_rate": 2.261560889758878e-05, "loss": 0.507, "step": 16836 }, { "epoch": 3.110945920832559, "grad_norm": 0.8940251469612122, "learning_rate": 2.260635651245768e-05, "loss": 0.8155, "step": 16837 }, { "epoch": 3.11113175989593, "grad_norm": 0.883128821849823, "learning_rate": 2.2597105779161076e-05, "loss": 0.9584, "step": 16838 }, { "epoch": 3.1113175989593014, "grad_norm": 0.8234800696372986, "learning_rate": 2.2587856697896436e-05, "loss": 0.783, "step": 16839 }, { "epoch": 3.1115034380226723, "grad_norm": 0.8749162554740906, "learning_rate": 2.257860926886115e-05, "loss": 0.7674, "step": 16840 }, { "epoch": 3.1116892770860436, "grad_norm": 0.8481289744377136, "learning_rate": 2.2569363492252594e-05, "loss": 0.7863, "step": 16841 }, { "epoch": 3.1118751161494145, "grad_norm": 0.9204713702201843, "learning_rate": 2.2560119368268072e-05, "loss": 0.8807, "step": 16842 }, { "epoch": 3.112060955212786, "grad_norm": 0.9275779724121094, "learning_rate": 2.2550876897104913e-05, "loss": 0.9018, "step": 16843 }, { "epoch": 3.1122467942761567, "grad_norm": 0.9116693139076233, "learning_rate": 2.2541636078960394e-05, "loss": 0.9594, "step": 16844 }, { "epoch": 3.112432633339528, "grad_norm": 0.8437976241111755, "learning_rate": 2.253239691403174e-05, "loss": 0.7266, "step": 16845 }, { "epoch": 3.112618472402899, "grad_norm": 0.8511406183242798, "learning_rate": 2.2523159402516124e-05, "loss": 0.7824, "step": 16846 }, { "epoch": 3.1128043114662702, "grad_norm": 0.8179444670677185, "learning_rate": 2.2513923544610693e-05, "loss": 0.529, "step": 16847 }, { "epoch": 3.112990150529641, "grad_norm": 1.014588475227356, "learning_rate": 2.250468934051262e-05, "loss": 1.0015, "step": 16848 }, { "epoch": 3.1131759895930124, "grad_norm": 1.113190770149231, "learning_rate": 2.2495456790418967e-05, "loss": 1.1612, "step": 16849 }, { "epoch": 3.113361828656384, "grad_norm": 1.0497996807098389, "learning_rate": 2.248622589452676e-05, "loss": 0.8133, "step": 16850 }, { "epoch": 3.1135476677197547, "grad_norm": 0.8338691592216492, "learning_rate": 2.2476996653033065e-05, "loss": 0.655, "step": 16851 }, { "epoch": 3.113733506783126, "grad_norm": 0.8171826004981995, "learning_rate": 2.2467769066134835e-05, "loss": 0.6691, "step": 16852 }, { "epoch": 3.113919345846497, "grad_norm": 0.7760305404663086, "learning_rate": 2.245854313402904e-05, "loss": 0.5202, "step": 16853 }, { "epoch": 3.114105184909868, "grad_norm": 0.8941469192504883, "learning_rate": 2.2449318856912538e-05, "loss": 0.8038, "step": 16854 }, { "epoch": 3.114291023973239, "grad_norm": 0.7653095722198486, "learning_rate": 2.2440096234982245e-05, "loss": 0.8399, "step": 16855 }, { "epoch": 3.1144768630366104, "grad_norm": 0.9781475067138672, "learning_rate": 2.2430875268435025e-05, "loss": 0.7465, "step": 16856 }, { "epoch": 3.1146627020999813, "grad_norm": 0.8678989410400391, "learning_rate": 2.242165595746766e-05, "loss": 0.8347, "step": 16857 }, { "epoch": 3.1148485411633526, "grad_norm": 0.842769980430603, "learning_rate": 2.2412438302276894e-05, "loss": 0.6127, "step": 16858 }, { "epoch": 3.1150343802267235, "grad_norm": 0.9742424488067627, "learning_rate": 2.240322230305951e-05, "loss": 0.8599, "step": 16859 }, { "epoch": 3.115220219290095, "grad_norm": 0.8025090098381042, "learning_rate": 2.2394007960012186e-05, "loss": 0.6261, "step": 16860 }, { "epoch": 3.1154060583534657, "grad_norm": 0.927635908126831, "learning_rate": 2.2384795273331582e-05, "loss": 0.8964, "step": 16861 }, { "epoch": 3.115591897416837, "grad_norm": 0.9228578805923462, "learning_rate": 2.237558424321431e-05, "loss": 0.7664, "step": 16862 }, { "epoch": 3.115777736480208, "grad_norm": 1.0590260028839111, "learning_rate": 2.2366374869856998e-05, "loss": 0.8339, "step": 16863 }, { "epoch": 3.1159635755435793, "grad_norm": 0.9382837414741516, "learning_rate": 2.2357167153456173e-05, "loss": 0.8449, "step": 16864 }, { "epoch": 3.1161494146069506, "grad_norm": 0.8885865807533264, "learning_rate": 2.2347961094208404e-05, "loss": 0.7328, "step": 16865 }, { "epoch": 3.1163352536703215, "grad_norm": 0.8921540975570679, "learning_rate": 2.2338756692310148e-05, "loss": 0.6661, "step": 16866 }, { "epoch": 3.116521092733693, "grad_norm": 0.9339958429336548, "learning_rate": 2.2329553947957826e-05, "loss": 0.8159, "step": 16867 }, { "epoch": 3.1167069317970637, "grad_norm": 1.01011061668396, "learning_rate": 2.2320352861347915e-05, "loss": 0.9934, "step": 16868 }, { "epoch": 3.116892770860435, "grad_norm": 0.9793814420700073, "learning_rate": 2.2311153432676768e-05, "loss": 0.6934, "step": 16869 }, { "epoch": 3.117078609923806, "grad_norm": 0.7937256097793579, "learning_rate": 2.2301955662140706e-05, "loss": 0.6465, "step": 16870 }, { "epoch": 3.1172644489871772, "grad_norm": 0.9259080290794373, "learning_rate": 2.2292759549936092e-05, "loss": 0.7638, "step": 16871 }, { "epoch": 3.117450288050548, "grad_norm": 0.7967198491096497, "learning_rate": 2.2283565096259174e-05, "loss": 0.7027, "step": 16872 }, { "epoch": 3.1176361271139195, "grad_norm": 0.7589902281761169, "learning_rate": 2.2274372301306202e-05, "loss": 0.7964, "step": 16873 }, { "epoch": 3.1178219661772903, "grad_norm": 0.9229668378829956, "learning_rate": 2.226518116527333e-05, "loss": 0.9169, "step": 16874 }, { "epoch": 3.1180078052406617, "grad_norm": 1.1118543148040771, "learning_rate": 2.225599168835677e-05, "loss": 0.982, "step": 16875 }, { "epoch": 3.1181936443040326, "grad_norm": 0.8196911811828613, "learning_rate": 2.2246803870752685e-05, "loss": 0.9871, "step": 16876 }, { "epoch": 3.118379483367404, "grad_norm": 0.851613461971283, "learning_rate": 2.2237617712657145e-05, "loss": 0.8474, "step": 16877 }, { "epoch": 3.1185653224307748, "grad_norm": 0.8437772989273071, "learning_rate": 2.2228433214266175e-05, "loss": 0.7872, "step": 16878 }, { "epoch": 3.118751161494146, "grad_norm": 0.9893875122070312, "learning_rate": 2.221925037577587e-05, "loss": 0.9883, "step": 16879 }, { "epoch": 3.118937000557517, "grad_norm": 1.1424741744995117, "learning_rate": 2.2210069197382178e-05, "loss": 1.1362, "step": 16880 }, { "epoch": 3.1191228396208883, "grad_norm": 1.070717692375183, "learning_rate": 2.2200889679281077e-05, "loss": 0.7183, "step": 16881 }, { "epoch": 3.119308678684259, "grad_norm": 0.8586819767951965, "learning_rate": 2.2191711821668447e-05, "loss": 0.9298, "step": 16882 }, { "epoch": 3.1194945177476305, "grad_norm": 0.9052451848983765, "learning_rate": 2.218253562474023e-05, "loss": 0.9157, "step": 16883 }, { "epoch": 3.119680356811002, "grad_norm": 0.8883751034736633, "learning_rate": 2.2173361088692247e-05, "loss": 0.5857, "step": 16884 }, { "epoch": 3.1198661958743727, "grad_norm": 0.9195517897605896, "learning_rate": 2.2164188213720295e-05, "loss": 0.7607, "step": 16885 }, { "epoch": 3.120052034937744, "grad_norm": 0.9243937730789185, "learning_rate": 2.2155017000020194e-05, "loss": 0.9328, "step": 16886 }, { "epoch": 3.120237874001115, "grad_norm": 0.8645699620246887, "learning_rate": 2.2145847447787636e-05, "loss": 0.8224, "step": 16887 }, { "epoch": 3.1204237130644863, "grad_norm": 1.0877633094787598, "learning_rate": 2.21366795572184e-05, "loss": 0.6701, "step": 16888 }, { "epoch": 3.120609552127857, "grad_norm": 0.8657917976379395, "learning_rate": 2.2127513328508122e-05, "loss": 0.7049, "step": 16889 }, { "epoch": 3.1207953911912285, "grad_norm": 0.8166100978851318, "learning_rate": 2.2118348761852403e-05, "loss": 0.8629, "step": 16890 }, { "epoch": 3.1209812302545994, "grad_norm": 0.7946850657463074, "learning_rate": 2.2109185857446903e-05, "loss": 0.6853, "step": 16891 }, { "epoch": 3.1211670693179707, "grad_norm": 0.9389623403549194, "learning_rate": 2.2100024615487168e-05, "loss": 0.8606, "step": 16892 }, { "epoch": 3.1213529083813416, "grad_norm": 0.6917771697044373, "learning_rate": 2.209086503616873e-05, "loss": 0.5993, "step": 16893 }, { "epoch": 3.121538747444713, "grad_norm": 0.8145844340324402, "learning_rate": 2.2081707119687055e-05, "loss": 0.6598, "step": 16894 }, { "epoch": 3.121724586508084, "grad_norm": 1.6039071083068848, "learning_rate": 2.207255086623765e-05, "loss": 1.1735, "step": 16895 }, { "epoch": 3.121910425571455, "grad_norm": 0.9803751111030579, "learning_rate": 2.206339627601589e-05, "loss": 0.884, "step": 16896 }, { "epoch": 3.122096264634826, "grad_norm": 0.9029519557952881, "learning_rate": 2.2054243349217217e-05, "loss": 0.7225, "step": 16897 }, { "epoch": 3.1222821036981974, "grad_norm": 0.8601951003074646, "learning_rate": 2.2045092086036946e-05, "loss": 0.8944, "step": 16898 }, { "epoch": 3.1224679427615687, "grad_norm": 0.8614652156829834, "learning_rate": 2.2035942486670425e-05, "loss": 0.6491, "step": 16899 }, { "epoch": 3.1226537818249396, "grad_norm": 0.9597631692886353, "learning_rate": 2.2026794551312923e-05, "loss": 0.9077, "step": 16900 }, { "epoch": 3.122839620888311, "grad_norm": 1.0509092807769775, "learning_rate": 2.2017648280159676e-05, "loss": 0.8838, "step": 16901 }, { "epoch": 3.123025459951682, "grad_norm": 0.8761752247810364, "learning_rate": 2.200850367340589e-05, "loss": 0.7991, "step": 16902 }, { "epoch": 3.123211299015053, "grad_norm": 0.8011770844459534, "learning_rate": 2.1999360731246764e-05, "loss": 0.891, "step": 16903 }, { "epoch": 3.123397138078424, "grad_norm": 0.8152168393135071, "learning_rate": 2.199021945387745e-05, "loss": 0.5233, "step": 16904 }, { "epoch": 3.1235829771417953, "grad_norm": 0.9296904802322388, "learning_rate": 2.1981079841493014e-05, "loss": 0.7773, "step": 16905 }, { "epoch": 3.123768816205166, "grad_norm": 0.8891332745552063, "learning_rate": 2.1971941894288517e-05, "loss": 0.7992, "step": 16906 }, { "epoch": 3.1239546552685376, "grad_norm": 1.0827486515045166, "learning_rate": 2.1962805612459024e-05, "loss": 0.7503, "step": 16907 }, { "epoch": 3.1241404943319084, "grad_norm": 0.8078093528747559, "learning_rate": 2.195367099619956e-05, "loss": 0.8623, "step": 16908 }, { "epoch": 3.1243263333952798, "grad_norm": 0.8417673110961914, "learning_rate": 2.1944538045705055e-05, "loss": 0.8114, "step": 16909 }, { "epoch": 3.1245121724586506, "grad_norm": 1.1821131706237793, "learning_rate": 2.1935406761170396e-05, "loss": 0.8338, "step": 16910 }, { "epoch": 3.124698011522022, "grad_norm": 0.895492434501648, "learning_rate": 2.1926277142790552e-05, "loss": 0.9698, "step": 16911 }, { "epoch": 3.124883850585393, "grad_norm": 1.0384591817855835, "learning_rate": 2.1917149190760346e-05, "loss": 1.0607, "step": 16912 }, { "epoch": 3.125069689648764, "grad_norm": 0.7608576416969299, "learning_rate": 2.1908022905274595e-05, "loss": 0.6077, "step": 16913 }, { "epoch": 3.1252555287121355, "grad_norm": 0.9334760904312134, "learning_rate": 2.1898898286528047e-05, "loss": 0.7666, "step": 16914 }, { "epoch": 3.1254413677755064, "grad_norm": 1.024980068206787, "learning_rate": 2.1889775334715525e-05, "loss": 0.5776, "step": 16915 }, { "epoch": 3.1256272068388777, "grad_norm": 0.8938941955566406, "learning_rate": 2.18806540500317e-05, "loss": 0.8932, "step": 16916 }, { "epoch": 3.1258130459022486, "grad_norm": 1.1975553035736084, "learning_rate": 2.1871534432671238e-05, "loss": 0.8648, "step": 16917 }, { "epoch": 3.12599888496562, "grad_norm": 0.8598381280899048, "learning_rate": 2.1862416482828783e-05, "loss": 0.9019, "step": 16918 }, { "epoch": 3.126184724028991, "grad_norm": 1.0694983005523682, "learning_rate": 2.1853300200699e-05, "loss": 0.7902, "step": 16919 }, { "epoch": 3.126370563092362, "grad_norm": 0.959589421749115, "learning_rate": 2.1844185586476416e-05, "loss": 0.9651, "step": 16920 }, { "epoch": 3.126556402155733, "grad_norm": 0.8493366241455078, "learning_rate": 2.183507264035557e-05, "loss": 0.6951, "step": 16921 }, { "epoch": 3.1267422412191044, "grad_norm": 0.893376350402832, "learning_rate": 2.1825961362530932e-05, "loss": 1.0166, "step": 16922 }, { "epoch": 3.1269280802824753, "grad_norm": 0.9279434084892273, "learning_rate": 2.181685175319702e-05, "loss": 0.6794, "step": 16923 }, { "epoch": 3.1271139193458466, "grad_norm": 0.9398536682128906, "learning_rate": 2.1807743812548253e-05, "loss": 0.6859, "step": 16924 }, { "epoch": 3.1272997584092175, "grad_norm": 0.8415723443031311, "learning_rate": 2.1798637540779e-05, "loss": 0.729, "step": 16925 }, { "epoch": 3.127485597472589, "grad_norm": 1.1050769090652466, "learning_rate": 2.178953293808361e-05, "loss": 0.869, "step": 16926 }, { "epoch": 3.1276714365359597, "grad_norm": 1.0179059505462646, "learning_rate": 2.1780430004656416e-05, "loss": 0.9382, "step": 16927 }, { "epoch": 3.127857275599331, "grad_norm": 0.912026047706604, "learning_rate": 2.1771328740691745e-05, "loss": 0.8454, "step": 16928 }, { "epoch": 3.128043114662702, "grad_norm": 1.1212213039398193, "learning_rate": 2.1762229146383816e-05, "loss": 0.7195, "step": 16929 }, { "epoch": 3.1282289537260732, "grad_norm": 0.9466208219528198, "learning_rate": 2.1753131221926825e-05, "loss": 0.976, "step": 16930 }, { "epoch": 3.128414792789444, "grad_norm": 0.9594152569770813, "learning_rate": 2.1744034967514992e-05, "loss": 0.6365, "step": 16931 }, { "epoch": 3.1286006318528155, "grad_norm": 0.9360522627830505, "learning_rate": 2.173494038334244e-05, "loss": 0.9949, "step": 16932 }, { "epoch": 3.128786470916187, "grad_norm": 0.9982490539550781, "learning_rate": 2.172584746960328e-05, "loss": 0.4391, "step": 16933 }, { "epoch": 3.1289723099795577, "grad_norm": 0.9104912877082825, "learning_rate": 2.1716756226491543e-05, "loss": 0.7869, "step": 16934 }, { "epoch": 3.129158149042929, "grad_norm": 0.8269588351249695, "learning_rate": 2.1707666654201342e-05, "loss": 0.796, "step": 16935 }, { "epoch": 3.1293439881063, "grad_norm": 1.1801178455352783, "learning_rate": 2.1698578752926634e-05, "loss": 0.8755, "step": 16936 }, { "epoch": 3.129529827169671, "grad_norm": 0.9644780158996582, "learning_rate": 2.1689492522861398e-05, "loss": 0.9328, "step": 16937 }, { "epoch": 3.129715666233042, "grad_norm": 0.8413048982620239, "learning_rate": 2.168040796419951e-05, "loss": 0.7658, "step": 16938 }, { "epoch": 3.1299015052964134, "grad_norm": 0.9241469502449036, "learning_rate": 2.1671325077134963e-05, "loss": 0.8426, "step": 16939 }, { "epoch": 3.1300873443597843, "grad_norm": 0.9627590179443359, "learning_rate": 2.166224386186155e-05, "loss": 0.9674, "step": 16940 }, { "epoch": 3.1302731834231556, "grad_norm": 0.9975087642669678, "learning_rate": 2.1653164318573126e-05, "loss": 0.7909, "step": 16941 }, { "epoch": 3.1304590224865265, "grad_norm": 0.9133538603782654, "learning_rate": 2.164408644746343e-05, "loss": 1.1582, "step": 16942 }, { "epoch": 3.130644861549898, "grad_norm": 0.8557292222976685, "learning_rate": 2.1635010248726274e-05, "loss": 0.8368, "step": 16943 }, { "epoch": 3.1308307006132687, "grad_norm": 0.9679376482963562, "learning_rate": 2.1625935722555335e-05, "loss": 0.7238, "step": 16944 }, { "epoch": 3.13101653967664, "grad_norm": 1.0886197090148926, "learning_rate": 2.1616862869144304e-05, "loss": 0.7671, "step": 16945 }, { "epoch": 3.131202378740011, "grad_norm": 0.8300682902336121, "learning_rate": 2.1607791688686808e-05, "loss": 0.7886, "step": 16946 }, { "epoch": 3.1313882178033823, "grad_norm": 0.9571086168289185, "learning_rate": 2.1598722181376497e-05, "loss": 0.829, "step": 16947 }, { "epoch": 3.1315740568667536, "grad_norm": 0.9584956169128418, "learning_rate": 2.1589654347406886e-05, "loss": 0.8305, "step": 16948 }, { "epoch": 3.1317598959301245, "grad_norm": 0.9124837517738342, "learning_rate": 2.1580588186971575e-05, "loss": 0.7435, "step": 16949 }, { "epoch": 3.131945734993496, "grad_norm": 0.8917635679244995, "learning_rate": 2.1571523700264006e-05, "loss": 0.8684, "step": 16950 }, { "epoch": 3.1321315740568667, "grad_norm": 0.9225831031799316, "learning_rate": 2.15624608874777e-05, "loss": 0.537, "step": 16951 }, { "epoch": 3.132317413120238, "grad_norm": 0.991547703742981, "learning_rate": 2.155339974880607e-05, "loss": 0.9213, "step": 16952 }, { "epoch": 3.132503252183609, "grad_norm": 0.9788913726806641, "learning_rate": 2.15443402844425e-05, "loss": 0.8236, "step": 16953 }, { "epoch": 3.1326890912469803, "grad_norm": 0.7579008340835571, "learning_rate": 2.1535282494580323e-05, "loss": 1.0362, "step": 16954 }, { "epoch": 3.132874930310351, "grad_norm": 0.9785481691360474, "learning_rate": 2.1526226379412906e-05, "loss": 0.8357, "step": 16955 }, { "epoch": 3.1330607693737225, "grad_norm": 0.907016932964325, "learning_rate": 2.1517171939133542e-05, "loss": 0.767, "step": 16956 }, { "epoch": 3.1332466084370934, "grad_norm": 0.8444396257400513, "learning_rate": 2.1508119173935448e-05, "loss": 0.8928, "step": 16957 }, { "epoch": 3.1334324475004647, "grad_norm": 1.1882593631744385, "learning_rate": 2.149906808401183e-05, "loss": 0.9047, "step": 16958 }, { "epoch": 3.1336182865638356, "grad_norm": 0.8932169675827026, "learning_rate": 2.149001866955589e-05, "loss": 0.7651, "step": 16959 }, { "epoch": 3.133804125627207, "grad_norm": 0.9666036367416382, "learning_rate": 2.148097093076079e-05, "loss": 0.6123, "step": 16960 }, { "epoch": 3.133989964690578, "grad_norm": 0.9772616028785706, "learning_rate": 2.1471924867819636e-05, "loss": 0.8967, "step": 16961 }, { "epoch": 3.134175803753949, "grad_norm": 0.933224081993103, "learning_rate": 2.1462880480925452e-05, "loss": 0.9886, "step": 16962 }, { "epoch": 3.13436164281732, "grad_norm": 0.9614982008934021, "learning_rate": 2.1453837770271334e-05, "loss": 0.7811, "step": 16963 }, { "epoch": 3.1345474818806913, "grad_norm": 0.9009011387825012, "learning_rate": 2.1444796736050266e-05, "loss": 0.9589, "step": 16964 }, { "epoch": 3.134733320944062, "grad_norm": 1.1222648620605469, "learning_rate": 2.14357573784552e-05, "loss": 0.773, "step": 16965 }, { "epoch": 3.1349191600074335, "grad_norm": 0.9041205048561096, "learning_rate": 2.1426719697679054e-05, "loss": 0.7157, "step": 16966 }, { "epoch": 3.135104999070805, "grad_norm": 0.8128951191902161, "learning_rate": 2.1417683693914747e-05, "loss": 0.7409, "step": 16967 }, { "epoch": 3.1352908381341758, "grad_norm": 0.8408454060554504, "learning_rate": 2.1408649367355148e-05, "loss": 0.5803, "step": 16968 }, { "epoch": 3.135476677197547, "grad_norm": 0.951663613319397, "learning_rate": 2.1399616718193017e-05, "loss": 0.7976, "step": 16969 }, { "epoch": 3.135662516260918, "grad_norm": 1.1752151250839233, "learning_rate": 2.1390585746621193e-05, "loss": 1.0018, "step": 16970 }, { "epoch": 3.1358483553242893, "grad_norm": 0.8627622127532959, "learning_rate": 2.138155645283244e-05, "loss": 0.8013, "step": 16971 }, { "epoch": 3.13603419438766, "grad_norm": 0.999226450920105, "learning_rate": 2.1372528837019455e-05, "loss": 1.0242, "step": 16972 }, { "epoch": 3.1362200334510315, "grad_norm": 1.0154447555541992, "learning_rate": 2.136350289937491e-05, "loss": 1.0382, "step": 16973 }, { "epoch": 3.1364058725144024, "grad_norm": 0.9527133703231812, "learning_rate": 2.1354478640091424e-05, "loss": 0.7691, "step": 16974 }, { "epoch": 3.1365917115777737, "grad_norm": 0.778480052947998, "learning_rate": 2.134545605936166e-05, "loss": 0.7514, "step": 16975 }, { "epoch": 3.1367775506411446, "grad_norm": 1.0286147594451904, "learning_rate": 2.1336435157378143e-05, "loss": 0.6675, "step": 16976 }, { "epoch": 3.136963389704516, "grad_norm": 0.9593778252601624, "learning_rate": 2.1327415934333438e-05, "loss": 0.8536, "step": 16977 }, { "epoch": 3.137149228767887, "grad_norm": 0.7049160003662109, "learning_rate": 2.1318398390420013e-05, "loss": 0.6347, "step": 16978 }, { "epoch": 3.137335067831258, "grad_norm": 1.0583542585372925, "learning_rate": 2.130938252583037e-05, "loss": 0.9364, "step": 16979 }, { "epoch": 3.137520906894629, "grad_norm": 0.9129087328910828, "learning_rate": 2.130036834075688e-05, "loss": 0.9682, "step": 16980 }, { "epoch": 3.1377067459580004, "grad_norm": 0.6961577534675598, "learning_rate": 2.1291355835392025e-05, "loss": 0.5642, "step": 16981 }, { "epoch": 3.1378925850213717, "grad_norm": 0.7811499834060669, "learning_rate": 2.128234500992806e-05, "loss": 0.5969, "step": 16982 }, { "epoch": 3.1380784240847426, "grad_norm": 0.8767191767692566, "learning_rate": 2.12733358645574e-05, "loss": 0.7998, "step": 16983 }, { "epoch": 3.138264263148114, "grad_norm": 0.809659481048584, "learning_rate": 2.1264328399472277e-05, "loss": 0.7883, "step": 16984 }, { "epoch": 3.138450102211485, "grad_norm": 0.8992799520492554, "learning_rate": 2.1255322614864947e-05, "loss": 0.8595, "step": 16985 }, { "epoch": 3.138635941274856, "grad_norm": 0.8708093762397766, "learning_rate": 2.124631851092761e-05, "loss": 0.848, "step": 16986 }, { "epoch": 3.138821780338227, "grad_norm": 0.9375101327896118, "learning_rate": 2.1237316087852466e-05, "loss": 0.8506, "step": 16987 }, { "epoch": 3.1390076194015983, "grad_norm": 0.8981173634529114, "learning_rate": 2.1228315345831662e-05, "loss": 0.8373, "step": 16988 }, { "epoch": 3.139193458464969, "grad_norm": 0.8532535433769226, "learning_rate": 2.1219316285057255e-05, "loss": 0.6353, "step": 16989 }, { "epoch": 3.1393792975283406, "grad_norm": 0.9816616177558899, "learning_rate": 2.1210318905721348e-05, "loss": 1.1142, "step": 16990 }, { "epoch": 3.1395651365917114, "grad_norm": 0.8044140934944153, "learning_rate": 2.120132320801601e-05, "loss": 0.6628, "step": 16991 }, { "epoch": 3.1397509756550828, "grad_norm": 0.943625271320343, "learning_rate": 2.1192329192133198e-05, "loss": 0.6296, "step": 16992 }, { "epoch": 3.1399368147184537, "grad_norm": 1.0095001459121704, "learning_rate": 2.118333685826489e-05, "loss": 1.1645, "step": 16993 }, { "epoch": 3.140122653781825, "grad_norm": 0.8924473524093628, "learning_rate": 2.1174346206602968e-05, "loss": 0.8996, "step": 16994 }, { "epoch": 3.140308492845196, "grad_norm": 0.93332839012146, "learning_rate": 2.116535723733938e-05, "loss": 1.076, "step": 16995 }, { "epoch": 3.140494331908567, "grad_norm": 0.9304823279380798, "learning_rate": 2.1156369950665956e-05, "loss": 0.8909, "step": 16996 }, { "epoch": 3.1406801709719385, "grad_norm": 0.8623441457748413, "learning_rate": 2.1147384346774523e-05, "loss": 0.611, "step": 16997 }, { "epoch": 3.1408660100353094, "grad_norm": 0.9617223143577576, "learning_rate": 2.113840042585682e-05, "loss": 0.9052, "step": 16998 }, { "epoch": 3.1410518490986807, "grad_norm": 0.8871750831604004, "learning_rate": 2.1129418188104656e-05, "loss": 0.6957, "step": 16999 }, { "epoch": 3.1412376881620516, "grad_norm": 0.8519712686538696, "learning_rate": 2.1120437633709712e-05, "loss": 0.5776, "step": 17000 }, { "epoch": 3.141423527225423, "grad_norm": 0.8367720246315002, "learning_rate": 2.1111458762863633e-05, "loss": 0.6709, "step": 17001 }, { "epoch": 3.141609366288794, "grad_norm": 0.8938384056091309, "learning_rate": 2.1102481575758094e-05, "loss": 0.784, "step": 17002 }, { "epoch": 3.141795205352165, "grad_norm": 1.1250386238098145, "learning_rate": 2.1093506072584724e-05, "loss": 0.9777, "step": 17003 }, { "epoch": 3.141981044415536, "grad_norm": 0.8990409970283508, "learning_rate": 2.108453225353505e-05, "loss": 0.9568, "step": 17004 }, { "epoch": 3.1421668834789074, "grad_norm": 0.9447553157806396, "learning_rate": 2.107556011880062e-05, "loss": 0.6788, "step": 17005 }, { "epoch": 3.1423527225422783, "grad_norm": 0.8468141555786133, "learning_rate": 2.106658966857288e-05, "loss": 0.776, "step": 17006 }, { "epoch": 3.1425385616056496, "grad_norm": 1.0220496654510498, "learning_rate": 2.105762090304336e-05, "loss": 0.6239, "step": 17007 }, { "epoch": 3.1427244006690205, "grad_norm": 1.0547794103622437, "learning_rate": 2.1048653822403454e-05, "loss": 0.9287, "step": 17008 }, { "epoch": 3.142910239732392, "grad_norm": 0.8928568959236145, "learning_rate": 2.103968842684452e-05, "loss": 0.768, "step": 17009 }, { "epoch": 3.1430960787957627, "grad_norm": 0.9178768992424011, "learning_rate": 2.1030724716557958e-05, "loss": 0.7972, "step": 17010 }, { "epoch": 3.143281917859134, "grad_norm": 0.9317730665206909, "learning_rate": 2.1021762691735035e-05, "loss": 0.8405, "step": 17011 }, { "epoch": 3.143467756922505, "grad_norm": 0.9283566474914551, "learning_rate": 2.1012802352567085e-05, "loss": 0.7425, "step": 17012 }, { "epoch": 3.1436535959858762, "grad_norm": 0.7513983845710754, "learning_rate": 2.1003843699245317e-05, "loss": 0.6545, "step": 17013 }, { "epoch": 3.143839435049247, "grad_norm": 0.9660999774932861, "learning_rate": 2.099488673196093e-05, "loss": 0.7218, "step": 17014 }, { "epoch": 3.1440252741126185, "grad_norm": 0.6714902520179749, "learning_rate": 2.0985931450905118e-05, "loss": 0.4998, "step": 17015 }, { "epoch": 3.14421111317599, "grad_norm": 1.0693867206573486, "learning_rate": 2.0976977856269008e-05, "loss": 0.8455, "step": 17016 }, { "epoch": 3.1443969522393607, "grad_norm": 0.9602853059768677, "learning_rate": 2.0968025948243696e-05, "loss": 0.8069, "step": 17017 }, { "epoch": 3.144582791302732, "grad_norm": 0.8815193772315979, "learning_rate": 2.095907572702023e-05, "loss": 0.9785, "step": 17018 }, { "epoch": 3.144768630366103, "grad_norm": 0.8699519038200378, "learning_rate": 2.095012719278966e-05, "loss": 0.7088, "step": 17019 }, { "epoch": 3.144954469429474, "grad_norm": 0.7912164926528931, "learning_rate": 2.0941180345742983e-05, "loss": 0.5679, "step": 17020 }, { "epoch": 3.145140308492845, "grad_norm": 1.0096666812896729, "learning_rate": 2.0932235186071114e-05, "loss": 0.8969, "step": 17021 }, { "epoch": 3.1453261475562164, "grad_norm": 0.7588926553726196, "learning_rate": 2.0923291713965e-05, "loss": 0.7405, "step": 17022 }, { "epoch": 3.1455119866195873, "grad_norm": 0.8650400638580322, "learning_rate": 2.091434992961555e-05, "loss": 0.7048, "step": 17023 }, { "epoch": 3.1456978256829586, "grad_norm": 1.0307903289794922, "learning_rate": 2.0905409833213574e-05, "loss": 0.8026, "step": 17024 }, { "epoch": 3.1458836647463295, "grad_norm": 1.0307273864746094, "learning_rate": 2.0896471424949904e-05, "loss": 0.8717, "step": 17025 }, { "epoch": 3.146069503809701, "grad_norm": 1.038767695426941, "learning_rate": 2.0887534705015276e-05, "loss": 0.8306, "step": 17026 }, { "epoch": 3.1462553428730717, "grad_norm": 0.9466065764427185, "learning_rate": 2.0878599673600486e-05, "loss": 0.8489, "step": 17027 }, { "epoch": 3.146441181936443, "grad_norm": 0.9983720183372498, "learning_rate": 2.086966633089621e-05, "loss": 0.8301, "step": 17028 }, { "epoch": 3.146627020999814, "grad_norm": 0.836397647857666, "learning_rate": 2.0860734677093074e-05, "loss": 0.7008, "step": 17029 }, { "epoch": 3.1468128600631853, "grad_norm": 0.8518416881561279, "learning_rate": 2.0851804712381783e-05, "loss": 0.8584, "step": 17030 }, { "epoch": 3.1469986991265566, "grad_norm": 0.8676844835281372, "learning_rate": 2.0842876436952897e-05, "loss": 0.648, "step": 17031 }, { "epoch": 3.1471845381899275, "grad_norm": 0.8333600163459778, "learning_rate": 2.0833949850996948e-05, "loss": 0.8164, "step": 17032 }, { "epoch": 3.147370377253299, "grad_norm": 0.8571373820304871, "learning_rate": 2.082502495470451e-05, "loss": 0.6943, "step": 17033 }, { "epoch": 3.1475562163166697, "grad_norm": 0.9203354716300964, "learning_rate": 2.0816101748266014e-05, "loss": 0.6767, "step": 17034 }, { "epoch": 3.147742055380041, "grad_norm": 0.9560515284538269, "learning_rate": 2.080718023187198e-05, "loss": 0.7109, "step": 17035 }, { "epoch": 3.147927894443412, "grad_norm": 0.9986955523490906, "learning_rate": 2.0798260405712777e-05, "loss": 0.8818, "step": 17036 }, { "epoch": 3.1481137335067833, "grad_norm": 0.9421307444572449, "learning_rate": 2.0789342269978785e-05, "loss": 0.6323, "step": 17037 }, { "epoch": 3.148299572570154, "grad_norm": 0.9801344871520996, "learning_rate": 2.078042582486033e-05, "loss": 0.8406, "step": 17038 }, { "epoch": 3.1484854116335255, "grad_norm": 0.8806696534156799, "learning_rate": 2.0771511070547768e-05, "loss": 0.7143, "step": 17039 }, { "epoch": 3.1486712506968964, "grad_norm": 0.9294523000717163, "learning_rate": 2.076259800723134e-05, "loss": 0.8736, "step": 17040 }, { "epoch": 3.1488570897602677, "grad_norm": 0.8931056261062622, "learning_rate": 2.0753686635101244e-05, "loss": 0.7932, "step": 17041 }, { "epoch": 3.1490429288236386, "grad_norm": 0.9533994793891907, "learning_rate": 2.0744776954347745e-05, "loss": 0.8303, "step": 17042 }, { "epoch": 3.14922876788701, "grad_norm": 1.0638116598129272, "learning_rate": 2.0735868965160953e-05, "loss": 1.0265, "step": 17043 }, { "epoch": 3.149414606950381, "grad_norm": 0.8813306093215942, "learning_rate": 2.0726962667731044e-05, "loss": 1.0306, "step": 17044 }, { "epoch": 3.149600446013752, "grad_norm": 0.8358373045921326, "learning_rate": 2.0718058062248068e-05, "loss": 0.6696, "step": 17045 }, { "epoch": 3.1497862850771234, "grad_norm": 0.9361814856529236, "learning_rate": 2.070915514890207e-05, "loss": 0.839, "step": 17046 }, { "epoch": 3.1499721241404943, "grad_norm": 0.9304302334785461, "learning_rate": 2.0700253927883117e-05, "loss": 0.7693, "step": 17047 }, { "epoch": 3.1501579632038657, "grad_norm": 0.9680723547935486, "learning_rate": 2.0691354399381158e-05, "loss": 0.84, "step": 17048 }, { "epoch": 3.1503438022672365, "grad_norm": 0.8648490905761719, "learning_rate": 2.0682456563586118e-05, "loss": 0.6622, "step": 17049 }, { "epoch": 3.150529641330608, "grad_norm": 1.137999415397644, "learning_rate": 2.0673560420687945e-05, "loss": 1.0689, "step": 17050 }, { "epoch": 3.1507154803939788, "grad_norm": 0.9675096869468689, "learning_rate": 2.0664665970876496e-05, "loss": 0.8624, "step": 17051 }, { "epoch": 3.15090131945735, "grad_norm": 0.9901977777481079, "learning_rate": 2.0655773214341612e-05, "loss": 0.8271, "step": 17052 }, { "epoch": 3.151087158520721, "grad_norm": 1.0320415496826172, "learning_rate": 2.0646882151273062e-05, "loss": 0.7143, "step": 17053 }, { "epoch": 3.1512729975840923, "grad_norm": 0.9752329587936401, "learning_rate": 2.063799278186064e-05, "loss": 0.8849, "step": 17054 }, { "epoch": 3.151458836647463, "grad_norm": 0.7743865251541138, "learning_rate": 2.062910510629409e-05, "loss": 0.7674, "step": 17055 }, { "epoch": 3.1516446757108345, "grad_norm": 0.9031053185462952, "learning_rate": 2.0620219124763086e-05, "loss": 0.9793, "step": 17056 }, { "epoch": 3.1518305147742054, "grad_norm": 0.9149486422538757, "learning_rate": 2.0611334837457287e-05, "loss": 0.857, "step": 17057 }, { "epoch": 3.1520163538375767, "grad_norm": 0.8257530927658081, "learning_rate": 2.0602452244566283e-05, "loss": 0.7835, "step": 17058 }, { "epoch": 3.1522021929009476, "grad_norm": 1.0256621837615967, "learning_rate": 2.0593571346279716e-05, "loss": 0.9413, "step": 17059 }, { "epoch": 3.152388031964319, "grad_norm": 0.8315449357032776, "learning_rate": 2.0584692142787086e-05, "loss": 0.9959, "step": 17060 }, { "epoch": 3.15257387102769, "grad_norm": 0.9501693248748779, "learning_rate": 2.0575814634277914e-05, "loss": 0.6163, "step": 17061 }, { "epoch": 3.152759710091061, "grad_norm": 1.0857973098754883, "learning_rate": 2.0566938820941695e-05, "loss": 0.842, "step": 17062 }, { "epoch": 3.152945549154432, "grad_norm": 0.5966393947601318, "learning_rate": 2.055806470296785e-05, "loss": 0.4318, "step": 17063 }, { "epoch": 3.1531313882178034, "grad_norm": 0.8495082855224609, "learning_rate": 2.054919228054577e-05, "loss": 0.8336, "step": 17064 }, { "epoch": 3.1533172272811747, "grad_norm": 1.0625152587890625, "learning_rate": 2.054032155386486e-05, "loss": 0.8935, "step": 17065 }, { "epoch": 3.1535030663445456, "grad_norm": 0.7919192314147949, "learning_rate": 2.0531452523114402e-05, "loss": 0.9689, "step": 17066 }, { "epoch": 3.153688905407917, "grad_norm": 0.9562473297119141, "learning_rate": 2.0522585188483745e-05, "loss": 0.8727, "step": 17067 }, { "epoch": 3.153874744471288, "grad_norm": 0.9182508587837219, "learning_rate": 2.0513719550162114e-05, "loss": 1.0428, "step": 17068 }, { "epoch": 3.154060583534659, "grad_norm": 1.3862417936325073, "learning_rate": 2.0504855608338715e-05, "loss": 1.1412, "step": 17069 }, { "epoch": 3.15424642259803, "grad_norm": 0.8006358742713928, "learning_rate": 2.049599336320277e-05, "loss": 0.7885, "step": 17070 }, { "epoch": 3.1544322616614013, "grad_norm": 1.0290948152542114, "learning_rate": 2.0487132814943422e-05, "loss": 0.8757, "step": 17071 }, { "epoch": 3.1546181007247722, "grad_norm": 0.8608807921409607, "learning_rate": 2.0478273963749774e-05, "loss": 0.7615, "step": 17072 }, { "epoch": 3.1548039397881436, "grad_norm": 0.9114319086074829, "learning_rate": 2.0469416809810872e-05, "loss": 0.8203, "step": 17073 }, { "epoch": 3.1549897788515144, "grad_norm": 0.9723644852638245, "learning_rate": 2.0460561353315788e-05, "loss": 0.8033, "step": 17074 }, { "epoch": 3.1551756179148858, "grad_norm": 0.8336568474769592, "learning_rate": 2.0451707594453562e-05, "loss": 0.6984, "step": 17075 }, { "epoch": 3.1553614569782567, "grad_norm": 0.9868106245994568, "learning_rate": 2.0442855533413117e-05, "loss": 0.9259, "step": 17076 }, { "epoch": 3.155547296041628, "grad_norm": 0.9859785437583923, "learning_rate": 2.0434005170383396e-05, "loss": 0.8286, "step": 17077 }, { "epoch": 3.155733135104999, "grad_norm": 0.8500993847846985, "learning_rate": 2.0425156505553277e-05, "loss": 0.8179, "step": 17078 }, { "epoch": 3.15591897416837, "grad_norm": 1.1688470840454102, "learning_rate": 2.0416309539111654e-05, "loss": 0.8003, "step": 17079 }, { "epoch": 3.1561048132317415, "grad_norm": 0.9138656854629517, "learning_rate": 2.0407464271247335e-05, "loss": 0.9918, "step": 17080 }, { "epoch": 3.1562906522951124, "grad_norm": 1.0699207782745361, "learning_rate": 2.0398620702149072e-05, "loss": 0.8225, "step": 17081 }, { "epoch": 3.1564764913584837, "grad_norm": 1.0861259698867798, "learning_rate": 2.038977883200569e-05, "loss": 0.6632, "step": 17082 }, { "epoch": 3.1566623304218546, "grad_norm": 1.0681793689727783, "learning_rate": 2.0380938661005854e-05, "loss": 0.7384, "step": 17083 }, { "epoch": 3.156848169485226, "grad_norm": 0.9535313248634338, "learning_rate": 2.037210018933825e-05, "loss": 0.7299, "step": 17084 }, { "epoch": 3.157034008548597, "grad_norm": 0.852402925491333, "learning_rate": 2.0363263417191493e-05, "loss": 0.9142, "step": 17085 }, { "epoch": 3.157219847611968, "grad_norm": 1.0568815469741821, "learning_rate": 2.0354428344754217e-05, "loss": 0.6579, "step": 17086 }, { "epoch": 3.157405686675339, "grad_norm": 0.798697292804718, "learning_rate": 2.0345594972215022e-05, "loss": 0.7432, "step": 17087 }, { "epoch": 3.1575915257387104, "grad_norm": 0.9491557478904724, "learning_rate": 2.0336763299762406e-05, "loss": 0.8381, "step": 17088 }, { "epoch": 3.1577773648020813, "grad_norm": 1.0051136016845703, "learning_rate": 2.0327933327584835e-05, "loss": 0.9975, "step": 17089 }, { "epoch": 3.1579632038654526, "grad_norm": 0.9311573505401611, "learning_rate": 2.0319105055870846e-05, "loss": 0.6724, "step": 17090 }, { "epoch": 3.1581490429288235, "grad_norm": 1.0000122785568237, "learning_rate": 2.031027848480881e-05, "loss": 0.8165, "step": 17091 }, { "epoch": 3.158334881992195, "grad_norm": 0.7631297707557678, "learning_rate": 2.030145361458714e-05, "loss": 0.7885, "step": 17092 }, { "epoch": 3.1585207210555657, "grad_norm": 0.937091588973999, "learning_rate": 2.0292630445394133e-05, "loss": 0.9293, "step": 17093 }, { "epoch": 3.158706560118937, "grad_norm": 0.8916681408882141, "learning_rate": 2.0283808977418185e-05, "loss": 0.7998, "step": 17094 }, { "epoch": 3.1588923991823084, "grad_norm": 0.9731296896934509, "learning_rate": 2.0274989210847495e-05, "loss": 0.8251, "step": 17095 }, { "epoch": 3.1590782382456792, "grad_norm": 1.0815353393554688, "learning_rate": 2.0266171145870383e-05, "loss": 0.8091, "step": 17096 }, { "epoch": 3.1592640773090506, "grad_norm": 0.8604963421821594, "learning_rate": 2.025735478267502e-05, "loss": 0.7841, "step": 17097 }, { "epoch": 3.1594499163724215, "grad_norm": 0.9545714855194092, "learning_rate": 2.0248540121449543e-05, "loss": 0.8389, "step": 17098 }, { "epoch": 3.159635755435793, "grad_norm": 0.8550902009010315, "learning_rate": 2.0239727162382148e-05, "loss": 0.8468, "step": 17099 }, { "epoch": 3.1598215944991637, "grad_norm": 0.9193598628044128, "learning_rate": 2.0230915905660906e-05, "loss": 0.7489, "step": 17100 }, { "epoch": 3.160007433562535, "grad_norm": 1.4833147525787354, "learning_rate": 2.0222106351473845e-05, "loss": 1.2193, "step": 17101 }, { "epoch": 3.160193272625906, "grad_norm": 0.7797554135322571, "learning_rate": 2.0213298500009047e-05, "loss": 0.6539, "step": 17102 }, { "epoch": 3.160379111689277, "grad_norm": 1.5775411128997803, "learning_rate": 2.0204492351454472e-05, "loss": 1.2373, "step": 17103 }, { "epoch": 3.160564950752648, "grad_norm": 0.8735769987106323, "learning_rate": 2.0195687905998073e-05, "loss": 0.7057, "step": 17104 }, { "epoch": 3.1607507898160194, "grad_norm": 0.8621777892112732, "learning_rate": 2.018688516382774e-05, "loss": 0.8648, "step": 17105 }, { "epoch": 3.1609366288793903, "grad_norm": 1.0254769325256348, "learning_rate": 2.017808412513137e-05, "loss": 0.9406, "step": 17106 }, { "epoch": 3.1611224679427616, "grad_norm": 1.0010100603103638, "learning_rate": 2.0169284790096853e-05, "loss": 0.9229, "step": 17107 }, { "epoch": 3.1613083070061325, "grad_norm": 0.906528115272522, "learning_rate": 2.0160487158911955e-05, "loss": 0.6888, "step": 17108 }, { "epoch": 3.161494146069504, "grad_norm": 0.8315296173095703, "learning_rate": 2.0151691231764414e-05, "loss": 0.8592, "step": 17109 }, { "epoch": 3.1616799851328747, "grad_norm": 0.8605505228042603, "learning_rate": 2.0142897008842033e-05, "loss": 0.7481, "step": 17110 }, { "epoch": 3.161865824196246, "grad_norm": 0.9833331108093262, "learning_rate": 2.0134104490332474e-05, "loss": 0.7347, "step": 17111 }, { "epoch": 3.162051663259617, "grad_norm": 0.8601307272911072, "learning_rate": 2.0125313676423396e-05, "loss": 0.7927, "step": 17112 }, { "epoch": 3.1622375023229883, "grad_norm": 0.8588778972625732, "learning_rate": 2.011652456730241e-05, "loss": 0.7516, "step": 17113 }, { "epoch": 3.1624233413863596, "grad_norm": 0.8486018180847168, "learning_rate": 2.0107737163157138e-05, "loss": 0.8462, "step": 17114 }, { "epoch": 3.1626091804497305, "grad_norm": 0.9136857986450195, "learning_rate": 2.009895146417512e-05, "loss": 0.895, "step": 17115 }, { "epoch": 3.162795019513102, "grad_norm": 1.0777807235717773, "learning_rate": 2.009016747054384e-05, "loss": 0.8649, "step": 17116 }, { "epoch": 3.1629808585764727, "grad_norm": 1.7526211738586426, "learning_rate": 2.008138518245083e-05, "loss": 1.0711, "step": 17117 }, { "epoch": 3.163166697639844, "grad_norm": 0.8413838148117065, "learning_rate": 2.007260460008348e-05, "loss": 0.5849, "step": 17118 }, { "epoch": 3.163352536703215, "grad_norm": 0.8410135507583618, "learning_rate": 2.0063825723629247e-05, "loss": 0.6995, "step": 17119 }, { "epoch": 3.1635383757665863, "grad_norm": 0.9260984659194946, "learning_rate": 2.0055048553275492e-05, "loss": 0.8481, "step": 17120 }, { "epoch": 3.163724214829957, "grad_norm": 0.7520155310630798, "learning_rate": 2.0046273089209487e-05, "loss": 0.4878, "step": 17121 }, { "epoch": 3.1639100538933285, "grad_norm": 1.036642074584961, "learning_rate": 2.0037499331618627e-05, "loss": 0.944, "step": 17122 }, { "epoch": 3.1640958929566994, "grad_norm": 1.0706443786621094, "learning_rate": 2.0028727280690107e-05, "loss": 0.6616, "step": 17123 }, { "epoch": 3.1642817320200707, "grad_norm": 0.9441154599189758, "learning_rate": 2.0019956936611172e-05, "loss": 0.8402, "step": 17124 }, { "epoch": 3.1644675710834416, "grad_norm": 0.9219971299171448, "learning_rate": 2.0011188299568973e-05, "loss": 0.8124, "step": 17125 }, { "epoch": 3.164653410146813, "grad_norm": 1.2289496660232544, "learning_rate": 2.0002421369750723e-05, "loss": 0.9161, "step": 17126 }, { "epoch": 3.164839249210184, "grad_norm": 0.9382373690605164, "learning_rate": 1.9993656147343486e-05, "loss": 0.9322, "step": 17127 }, { "epoch": 3.165025088273555, "grad_norm": 1.0162302255630493, "learning_rate": 1.998489263253438e-05, "loss": 0.7234, "step": 17128 }, { "epoch": 3.1652109273369264, "grad_norm": 0.8443132638931274, "learning_rate": 1.9976130825510408e-05, "loss": 0.8587, "step": 17129 }, { "epoch": 3.1653967664002973, "grad_norm": 1.1633564233779907, "learning_rate": 1.9967370726458623e-05, "loss": 0.9895, "step": 17130 }, { "epoch": 3.1655826054636687, "grad_norm": 1.0444053411483765, "learning_rate": 1.9958612335565975e-05, "loss": 0.931, "step": 17131 }, { "epoch": 3.1657684445270395, "grad_norm": 1.5743813514709473, "learning_rate": 1.9949855653019377e-05, "loss": 1.0989, "step": 17132 }, { "epoch": 3.165954283590411, "grad_norm": 0.9263360500335693, "learning_rate": 1.994110067900572e-05, "loss": 0.6137, "step": 17133 }, { "epoch": 3.1661401226537818, "grad_norm": 0.979530394077301, "learning_rate": 1.9932347413711905e-05, "loss": 0.9215, "step": 17134 }, { "epoch": 3.166325961717153, "grad_norm": 0.752700924873352, "learning_rate": 1.992359585732474e-05, "loss": 0.6815, "step": 17135 }, { "epoch": 3.166511800780524, "grad_norm": 1.1921418905258179, "learning_rate": 1.9914846010030995e-05, "loss": 0.8916, "step": 17136 }, { "epoch": 3.1666976398438953, "grad_norm": 0.8520069122314453, "learning_rate": 1.9906097872017415e-05, "loss": 0.7906, "step": 17137 }, { "epoch": 3.166883478907266, "grad_norm": 0.8959609866142273, "learning_rate": 1.9897351443470723e-05, "loss": 0.8337, "step": 17138 }, { "epoch": 3.1670693179706375, "grad_norm": 1.0658314228057861, "learning_rate": 1.988860672457763e-05, "loss": 0.8374, "step": 17139 }, { "epoch": 3.1672551570340084, "grad_norm": 0.938890278339386, "learning_rate": 1.9879863715524748e-05, "loss": 0.7187, "step": 17140 }, { "epoch": 3.1674409960973797, "grad_norm": 0.9903023838996887, "learning_rate": 1.9871122416498657e-05, "loss": 0.7023, "step": 17141 }, { "epoch": 3.1676268351607506, "grad_norm": 1.1366755962371826, "learning_rate": 1.986238282768598e-05, "loss": 0.6451, "step": 17142 }, { "epoch": 3.167812674224122, "grad_norm": 0.9639435410499573, "learning_rate": 1.9853644949273222e-05, "loss": 0.9946, "step": 17143 }, { "epoch": 3.167998513287493, "grad_norm": 0.9725199341773987, "learning_rate": 1.984490878144687e-05, "loss": 1.0356, "step": 17144 }, { "epoch": 3.168184352350864, "grad_norm": 0.8418335318565369, "learning_rate": 1.9836174324393365e-05, "loss": 0.8344, "step": 17145 }, { "epoch": 3.168370191414235, "grad_norm": 0.9876839518547058, "learning_rate": 1.9827441578299176e-05, "loss": 0.7208, "step": 17146 }, { "epoch": 3.1685560304776064, "grad_norm": 0.8322383165359497, "learning_rate": 1.981871054335067e-05, "loss": 0.6992, "step": 17147 }, { "epoch": 3.1687418695409777, "grad_norm": 1.0060124397277832, "learning_rate": 1.9809981219734163e-05, "loss": 0.6777, "step": 17148 }, { "epoch": 3.1689277086043486, "grad_norm": 0.933151364326477, "learning_rate": 1.9801253607635996e-05, "loss": 0.6345, "step": 17149 }, { "epoch": 3.16911354766772, "grad_norm": 1.0822348594665527, "learning_rate": 1.979252770724247e-05, "loss": 0.8977, "step": 17150 }, { "epoch": 3.169299386731091, "grad_norm": 0.8258489370346069, "learning_rate": 1.9783803518739796e-05, "loss": 0.6308, "step": 17151 }, { "epoch": 3.169485225794462, "grad_norm": 1.0155291557312012, "learning_rate": 1.977508104231418e-05, "loss": 0.9677, "step": 17152 }, { "epoch": 3.169671064857833, "grad_norm": 0.9031057953834534, "learning_rate": 1.9766360278151764e-05, "loss": 0.8015, "step": 17153 }, { "epoch": 3.1698569039212043, "grad_norm": 1.0259935855865479, "learning_rate": 1.9757641226438728e-05, "loss": 0.9574, "step": 17154 }, { "epoch": 3.1700427429845752, "grad_norm": 0.8801981210708618, "learning_rate": 1.9748923887361137e-05, "loss": 0.7554, "step": 17155 }, { "epoch": 3.1702285820479466, "grad_norm": 0.8430247902870178, "learning_rate": 1.974020826110504e-05, "loss": 0.7563, "step": 17156 }, { "epoch": 3.1704144211113174, "grad_norm": 0.9828110337257385, "learning_rate": 1.973149434785645e-05, "loss": 0.8258, "step": 17157 }, { "epoch": 3.1706002601746888, "grad_norm": 0.935191810131073, "learning_rate": 1.9722782147801365e-05, "loss": 0.7948, "step": 17158 }, { "epoch": 3.1707860992380597, "grad_norm": 1.270983338356018, "learning_rate": 1.9714071661125765e-05, "loss": 0.9351, "step": 17159 }, { "epoch": 3.170971938301431, "grad_norm": 0.9522151350975037, "learning_rate": 1.970536288801552e-05, "loss": 0.7877, "step": 17160 }, { "epoch": 3.171157777364802, "grad_norm": 0.7604349255561829, "learning_rate": 1.9696655828656497e-05, "loss": 0.5488, "step": 17161 }, { "epoch": 3.171343616428173, "grad_norm": 1.1275665760040283, "learning_rate": 1.9687950483234575e-05, "loss": 0.7563, "step": 17162 }, { "epoch": 3.1715294554915445, "grad_norm": 1.044658899307251, "learning_rate": 1.967924685193552e-05, "loss": 0.8459, "step": 17163 }, { "epoch": 3.1717152945549154, "grad_norm": 0.8572118282318115, "learning_rate": 1.9670544934945112e-05, "loss": 0.7864, "step": 17164 }, { "epoch": 3.1719011336182867, "grad_norm": 0.9118345975875854, "learning_rate": 1.9661844732449043e-05, "loss": 0.7072, "step": 17165 }, { "epoch": 3.1720869726816576, "grad_norm": 1.1098875999450684, "learning_rate": 1.965314624463306e-05, "loss": 1.0089, "step": 17166 }, { "epoch": 3.172272811745029, "grad_norm": 1.6386604309082031, "learning_rate": 1.964444947168278e-05, "loss": 0.7754, "step": 17167 }, { "epoch": 3.1724586508084, "grad_norm": 0.8848221898078918, "learning_rate": 1.9635754413783835e-05, "loss": 0.7577, "step": 17168 }, { "epoch": 3.172644489871771, "grad_norm": 1.0232877731323242, "learning_rate": 1.9627061071121755e-05, "loss": 0.8765, "step": 17169 }, { "epoch": 3.172830328935142, "grad_norm": 0.9133501648902893, "learning_rate": 1.961836944388218e-05, "loss": 0.7751, "step": 17170 }, { "epoch": 3.1730161679985134, "grad_norm": 1.5902894735336304, "learning_rate": 1.960967953225057e-05, "loss": 1.5975, "step": 17171 }, { "epoch": 3.1732020070618843, "grad_norm": 0.847976565361023, "learning_rate": 1.96009913364124e-05, "loss": 0.9335, "step": 17172 }, { "epoch": 3.1733878461252556, "grad_norm": 0.9059402942657471, "learning_rate": 1.959230485655307e-05, "loss": 0.8474, "step": 17173 }, { "epoch": 3.1735736851886265, "grad_norm": 0.8859007358551025, "learning_rate": 1.9583620092858025e-05, "loss": 0.7758, "step": 17174 }, { "epoch": 3.173759524251998, "grad_norm": 0.8168259263038635, "learning_rate": 1.957493704551262e-05, "loss": 0.6757, "step": 17175 }, { "epoch": 3.1739453633153687, "grad_norm": 0.82487952709198, "learning_rate": 1.9566255714702165e-05, "loss": 0.5136, "step": 17176 }, { "epoch": 3.17413120237874, "grad_norm": 0.9470896124839783, "learning_rate": 1.9557576100611917e-05, "loss": 0.767, "step": 17177 }, { "epoch": 3.1743170414421114, "grad_norm": 1.0997936725616455, "learning_rate": 1.9548898203427203e-05, "loss": 0.6879, "step": 17178 }, { "epoch": 3.1745028805054822, "grad_norm": 0.9015858173370361, "learning_rate": 1.9540222023333166e-05, "loss": 0.7374, "step": 17179 }, { "epoch": 3.1746887195688536, "grad_norm": 0.8093281388282776, "learning_rate": 1.953154756051504e-05, "loss": 0.5783, "step": 17180 }, { "epoch": 3.1748745586322245, "grad_norm": 1.0351872444152832, "learning_rate": 1.952287481515792e-05, "loss": 0.9301, "step": 17181 }, { "epoch": 3.175060397695596, "grad_norm": 1.540510654449463, "learning_rate": 1.951420378744695e-05, "loss": 1.0627, "step": 17182 }, { "epoch": 3.1752462367589667, "grad_norm": 0.8963640332221985, "learning_rate": 1.950553447756719e-05, "loss": 0.9015, "step": 17183 }, { "epoch": 3.175432075822338, "grad_norm": 0.9509427547454834, "learning_rate": 1.9496866885703657e-05, "loss": 0.6713, "step": 17184 }, { "epoch": 3.175617914885709, "grad_norm": 1.0596952438354492, "learning_rate": 1.948820101204133e-05, "loss": 0.9011, "step": 17185 }, { "epoch": 3.17580375394908, "grad_norm": 0.9688878059387207, "learning_rate": 1.9479536856765214e-05, "loss": 0.9906, "step": 17186 }, { "epoch": 3.175989593012451, "grad_norm": 1.1700866222381592, "learning_rate": 1.9470874420060202e-05, "loss": 0.8296, "step": 17187 }, { "epoch": 3.1761754320758224, "grad_norm": 0.825894296169281, "learning_rate": 1.9462213702111177e-05, "loss": 0.8445, "step": 17188 }, { "epoch": 3.1763612711391933, "grad_norm": 0.8722351789474487, "learning_rate": 1.945355470310297e-05, "loss": 0.8353, "step": 17189 }, { "epoch": 3.1765471102025646, "grad_norm": 0.8865044713020325, "learning_rate": 1.944489742322042e-05, "loss": 0.8187, "step": 17190 }, { "epoch": 3.1767329492659355, "grad_norm": 0.9434642791748047, "learning_rate": 1.943624186264832e-05, "loss": 0.8013, "step": 17191 }, { "epoch": 3.176918788329307, "grad_norm": 0.9115262627601624, "learning_rate": 1.9427588021571385e-05, "loss": 0.8178, "step": 17192 }, { "epoch": 3.1771046273926777, "grad_norm": 1.1289523839950562, "learning_rate": 1.9418935900174284e-05, "loss": 0.8025, "step": 17193 }, { "epoch": 3.177290466456049, "grad_norm": 0.7821084260940552, "learning_rate": 1.9410285498641745e-05, "loss": 0.7787, "step": 17194 }, { "epoch": 3.17747630551942, "grad_norm": 0.9180419445037842, "learning_rate": 1.9401636817158365e-05, "loss": 0.8752, "step": 17195 }, { "epoch": 3.1776621445827913, "grad_norm": 1.1209893226623535, "learning_rate": 1.939298985590874e-05, "loss": 0.7798, "step": 17196 }, { "epoch": 3.1778479836461626, "grad_norm": 0.9026002883911133, "learning_rate": 1.9384344615077386e-05, "loss": 0.9728, "step": 17197 }, { "epoch": 3.1780338227095335, "grad_norm": 0.9963973760604858, "learning_rate": 1.9375701094848873e-05, "loss": 0.8824, "step": 17198 }, { "epoch": 3.178219661772905, "grad_norm": 0.9234552383422852, "learning_rate": 1.9367059295407674e-05, "loss": 0.7089, "step": 17199 }, { "epoch": 3.1784055008362757, "grad_norm": 0.892646849155426, "learning_rate": 1.9358419216938195e-05, "loss": 0.5419, "step": 17200 }, { "epoch": 3.178591339899647, "grad_norm": 0.9919077157974243, "learning_rate": 1.9349780859624865e-05, "loss": 0.8166, "step": 17201 }, { "epoch": 3.178777178963018, "grad_norm": 1.040787935256958, "learning_rate": 1.9341144223652098e-05, "loss": 0.9313, "step": 17202 }, { "epoch": 3.1789630180263893, "grad_norm": 1.034578800201416, "learning_rate": 1.9332509309204183e-05, "loss": 1.0048, "step": 17203 }, { "epoch": 3.17914885708976, "grad_norm": 1.0032830238342285, "learning_rate": 1.9323876116465435e-05, "loss": 0.9563, "step": 17204 }, { "epoch": 3.1793346961531315, "grad_norm": 1.0224652290344238, "learning_rate": 1.9315244645620067e-05, "loss": 1.0332, "step": 17205 }, { "epoch": 3.1795205352165024, "grad_norm": 0.95107501745224, "learning_rate": 1.9306614896852383e-05, "loss": 0.7288, "step": 17206 }, { "epoch": 3.1797063742798737, "grad_norm": 0.8130441904067993, "learning_rate": 1.929798687034652e-05, "loss": 0.8187, "step": 17207 }, { "epoch": 3.1798922133432446, "grad_norm": 1.0477732419967651, "learning_rate": 1.928936056628663e-05, "loss": 0.858, "step": 17208 }, { "epoch": 3.180078052406616, "grad_norm": 0.8282469511032104, "learning_rate": 1.928073598485681e-05, "loss": 0.858, "step": 17209 }, { "epoch": 3.180263891469987, "grad_norm": 0.975362241268158, "learning_rate": 1.9272113126241198e-05, "loss": 0.8719, "step": 17210 }, { "epoch": 3.180449730533358, "grad_norm": 1.0730140209197998, "learning_rate": 1.926349199062376e-05, "loss": 1.0322, "step": 17211 }, { "epoch": 3.1806355695967294, "grad_norm": 0.8283026814460754, "learning_rate": 1.9254872578188567e-05, "loss": 0.9431, "step": 17212 }, { "epoch": 3.1808214086601003, "grad_norm": 1.0434376001358032, "learning_rate": 1.924625488911953e-05, "loss": 0.9381, "step": 17213 }, { "epoch": 3.1810072477234717, "grad_norm": 0.8632153868675232, "learning_rate": 1.923763892360062e-05, "loss": 0.9113, "step": 17214 }, { "epoch": 3.1811930867868425, "grad_norm": 0.7788403630256653, "learning_rate": 1.9229024681815722e-05, "loss": 0.7617, "step": 17215 }, { "epoch": 3.181378925850214, "grad_norm": 0.9866867065429688, "learning_rate": 1.9220412163948688e-05, "loss": 0.84, "step": 17216 }, { "epoch": 3.1815647649135848, "grad_norm": 0.8431900143623352, "learning_rate": 1.9211801370183292e-05, "loss": 0.7343, "step": 17217 }, { "epoch": 3.181750603976956, "grad_norm": 1.0417009592056274, "learning_rate": 1.92031923007034e-05, "loss": 0.6948, "step": 17218 }, { "epoch": 3.181936443040327, "grad_norm": 0.8335167169570923, "learning_rate": 1.9194584955692706e-05, "loss": 0.6104, "step": 17219 }, { "epoch": 3.1821222821036983, "grad_norm": 0.8310338258743286, "learning_rate": 1.9185979335334904e-05, "loss": 0.5485, "step": 17220 }, { "epoch": 3.182308121167069, "grad_norm": 0.9344164729118347, "learning_rate": 1.9177375439813684e-05, "loss": 0.9334, "step": 17221 }, { "epoch": 3.1824939602304405, "grad_norm": 0.9417360424995422, "learning_rate": 1.9168773269312724e-05, "loss": 0.7755, "step": 17222 }, { "epoch": 3.1826797992938114, "grad_norm": 1.168516755104065, "learning_rate": 1.9160172824015586e-05, "loss": 0.9884, "step": 17223 }, { "epoch": 3.1828656383571827, "grad_norm": 0.8864932060241699, "learning_rate": 1.9151574104105828e-05, "loss": 1.0201, "step": 17224 }, { "epoch": 3.1830514774205536, "grad_norm": 0.8255242705345154, "learning_rate": 1.914297710976696e-05, "loss": 0.8236, "step": 17225 }, { "epoch": 3.183237316483925, "grad_norm": 0.8763002157211304, "learning_rate": 1.9134381841182503e-05, "loss": 0.6465, "step": 17226 }, { "epoch": 3.1834231555472963, "grad_norm": 0.9340523481369019, "learning_rate": 1.9125788298535908e-05, "loss": 0.6152, "step": 17227 }, { "epoch": 3.183608994610667, "grad_norm": 1.1670637130737305, "learning_rate": 1.911719648201057e-05, "loss": 0.8589, "step": 17228 }, { "epoch": 3.1837948336740385, "grad_norm": 0.7700712084770203, "learning_rate": 1.910860639178984e-05, "loss": 0.8393, "step": 17229 }, { "epoch": 3.1839806727374094, "grad_norm": 1.044851541519165, "learning_rate": 1.9100018028057132e-05, "loss": 0.8374, "step": 17230 }, { "epoch": 3.1841665118007807, "grad_norm": 1.1065137386322021, "learning_rate": 1.9091431390995697e-05, "loss": 0.7924, "step": 17231 }, { "epoch": 3.1843523508641516, "grad_norm": 1.4392445087432861, "learning_rate": 1.9082846480788795e-05, "loss": 0.6219, "step": 17232 }, { "epoch": 3.184538189927523, "grad_norm": 1.009527325630188, "learning_rate": 1.9074263297619676e-05, "loss": 0.8973, "step": 17233 }, { "epoch": 3.184724028990894, "grad_norm": 0.8259090185165405, "learning_rate": 1.9065681841671558e-05, "loss": 0.7303, "step": 17234 }, { "epoch": 3.184909868054265, "grad_norm": 0.8061724305152893, "learning_rate": 1.905710211312757e-05, "loss": 0.7925, "step": 17235 }, { "epoch": 3.185095707117636, "grad_norm": 0.8986761569976807, "learning_rate": 1.904852411217083e-05, "loss": 0.7326, "step": 17236 }, { "epoch": 3.1852815461810073, "grad_norm": 0.910462498664856, "learning_rate": 1.9039947838984405e-05, "loss": 0.9161, "step": 17237 }, { "epoch": 3.1854673852443782, "grad_norm": 0.901995062828064, "learning_rate": 1.9031373293751377e-05, "loss": 0.6487, "step": 17238 }, { "epoch": 3.1856532243077496, "grad_norm": 0.8945409655570984, "learning_rate": 1.9022800476654744e-05, "loss": 0.8433, "step": 17239 }, { "epoch": 3.1858390633711204, "grad_norm": 0.8279741406440735, "learning_rate": 1.9014229387877448e-05, "loss": 0.7384, "step": 17240 }, { "epoch": 3.1860249024344918, "grad_norm": 0.9453520774841309, "learning_rate": 1.900566002760248e-05, "loss": 0.7548, "step": 17241 }, { "epoch": 3.1862107414978627, "grad_norm": 0.8394404053688049, "learning_rate": 1.8997092396012673e-05, "loss": 0.831, "step": 17242 }, { "epoch": 3.186396580561234, "grad_norm": 0.7381008863449097, "learning_rate": 1.898852649329095e-05, "loss": 0.661, "step": 17243 }, { "epoch": 3.186582419624605, "grad_norm": 0.7975539565086365, "learning_rate": 1.8979962319620104e-05, "loss": 0.739, "step": 17244 }, { "epoch": 3.186768258687976, "grad_norm": 0.9062146544456482, "learning_rate": 1.8971399875182905e-05, "loss": 0.7843, "step": 17245 }, { "epoch": 3.1869540977513475, "grad_norm": 0.8561675548553467, "learning_rate": 1.896283916016216e-05, "loss": 0.6319, "step": 17246 }, { "epoch": 3.1871399368147184, "grad_norm": 1.153652310371399, "learning_rate": 1.8954280174740537e-05, "loss": 0.8982, "step": 17247 }, { "epoch": 3.1873257758780897, "grad_norm": 0.8147569298744202, "learning_rate": 1.8945722919100717e-05, "loss": 0.6174, "step": 17248 }, { "epoch": 3.1875116149414606, "grad_norm": 0.8925120234489441, "learning_rate": 1.8937167393425335e-05, "loss": 0.8004, "step": 17249 }, { "epoch": 3.187697454004832, "grad_norm": 0.9293339848518372, "learning_rate": 1.8928613597897026e-05, "loss": 0.9017, "step": 17250 }, { "epoch": 3.187883293068203, "grad_norm": 0.9332923293113708, "learning_rate": 1.892006153269833e-05, "loss": 0.9528, "step": 17251 }, { "epoch": 3.188069132131574, "grad_norm": 0.9275184273719788, "learning_rate": 1.8911511198011766e-05, "loss": 1.0236, "step": 17252 }, { "epoch": 3.188254971194945, "grad_norm": 0.8863673210144043, "learning_rate": 1.8902962594019836e-05, "loss": 0.9379, "step": 17253 }, { "epoch": 3.1884408102583164, "grad_norm": 0.7908885478973389, "learning_rate": 1.8894415720905023e-05, "loss": 0.8286, "step": 17254 }, { "epoch": 3.1886266493216873, "grad_norm": 0.9230900406837463, "learning_rate": 1.888587057884973e-05, "loss": 0.9145, "step": 17255 }, { "epoch": 3.1888124883850586, "grad_norm": 0.8644905090332031, "learning_rate": 1.887732716803633e-05, "loss": 0.7901, "step": 17256 }, { "epoch": 3.1889983274484295, "grad_norm": 0.8076888918876648, "learning_rate": 1.8868785488647145e-05, "loss": 0.7581, "step": 17257 }, { "epoch": 3.189184166511801, "grad_norm": 0.994051456451416, "learning_rate": 1.886024554086453e-05, "loss": 0.7661, "step": 17258 }, { "epoch": 3.1893700055751717, "grad_norm": 0.8983469605445862, "learning_rate": 1.885170732487074e-05, "loss": 0.6752, "step": 17259 }, { "epoch": 3.189555844638543, "grad_norm": 0.7368454337120056, "learning_rate": 1.8843170840847968e-05, "loss": 0.6503, "step": 17260 }, { "epoch": 3.1897416837019144, "grad_norm": 0.9642093777656555, "learning_rate": 1.8834636088978476e-05, "loss": 0.8498, "step": 17261 }, { "epoch": 3.1899275227652852, "grad_norm": 0.9174643158912659, "learning_rate": 1.882610306944438e-05, "loss": 0.9256, "step": 17262 }, { "epoch": 3.1901133618286566, "grad_norm": 0.9920444488525391, "learning_rate": 1.88175717824278e-05, "loss": 0.6953, "step": 17263 }, { "epoch": 3.1902992008920275, "grad_norm": 0.9611275792121887, "learning_rate": 1.8809042228110852e-05, "loss": 0.7977, "step": 17264 }, { "epoch": 3.190485039955399, "grad_norm": 1.028159737586975, "learning_rate": 1.880051440667555e-05, "loss": 1.0349, "step": 17265 }, { "epoch": 3.1906708790187697, "grad_norm": 0.9539807438850403, "learning_rate": 1.8791988318303943e-05, "loss": 0.6513, "step": 17266 }, { "epoch": 3.190856718082141, "grad_norm": 0.9756878018379211, "learning_rate": 1.8783463963177993e-05, "loss": 0.6905, "step": 17267 }, { "epoch": 3.191042557145512, "grad_norm": 1.1659778356552124, "learning_rate": 1.8774941341479623e-05, "loss": 0.9444, "step": 17268 }, { "epoch": 3.191228396208883, "grad_norm": 0.9837881922721863, "learning_rate": 1.876642045339073e-05, "loss": 0.9958, "step": 17269 }, { "epoch": 3.191414235272254, "grad_norm": 1.0949634313583374, "learning_rate": 1.8757901299093207e-05, "loss": 0.939, "step": 17270 }, { "epoch": 3.1916000743356254, "grad_norm": 1.1174333095550537, "learning_rate": 1.8749383878768867e-05, "loss": 0.7475, "step": 17271 }, { "epoch": 3.1917859133989963, "grad_norm": 0.8240452408790588, "learning_rate": 1.874086819259947e-05, "loss": 0.9406, "step": 17272 }, { "epoch": 3.1919717524623676, "grad_norm": 1.0166107416152954, "learning_rate": 1.8732354240766813e-05, "loss": 0.9246, "step": 17273 }, { "epoch": 3.1921575915257385, "grad_norm": 0.9158414602279663, "learning_rate": 1.8723842023452577e-05, "loss": 0.8681, "step": 17274 }, { "epoch": 3.19234343058911, "grad_norm": 0.9893122911453247, "learning_rate": 1.8715331540838487e-05, "loss": 0.821, "step": 17275 }, { "epoch": 3.192529269652481, "grad_norm": 0.8084926009178162, "learning_rate": 1.870682279310615e-05, "loss": 0.6835, "step": 17276 }, { "epoch": 3.192715108715852, "grad_norm": 1.0281102657318115, "learning_rate": 1.8698315780437148e-05, "loss": 0.7458, "step": 17277 }, { "epoch": 3.1929009477792234, "grad_norm": 0.8300309181213379, "learning_rate": 1.8689810503013107e-05, "loss": 0.6879, "step": 17278 }, { "epoch": 3.1930867868425943, "grad_norm": 0.9366108179092407, "learning_rate": 1.868130696101552e-05, "loss": 0.7827, "step": 17279 }, { "epoch": 3.1932726259059656, "grad_norm": 0.8351370096206665, "learning_rate": 1.867280515462586e-05, "loss": 0.971, "step": 17280 }, { "epoch": 3.1934584649693365, "grad_norm": 0.9481558799743652, "learning_rate": 1.8664305084025645e-05, "loss": 0.5461, "step": 17281 }, { "epoch": 3.193644304032708, "grad_norm": 0.8852013945579529, "learning_rate": 1.8655806749396254e-05, "loss": 0.8311, "step": 17282 }, { "epoch": 3.1938301430960787, "grad_norm": 0.8930564522743225, "learning_rate": 1.8647310150919083e-05, "loss": 0.6868, "step": 17283 }, { "epoch": 3.19401598215945, "grad_norm": 0.9694168567657471, "learning_rate": 1.8638815288775436e-05, "loss": 0.8869, "step": 17284 }, { "epoch": 3.194201821222821, "grad_norm": 1.0066895484924316, "learning_rate": 1.8630322163146664e-05, "loss": 0.6339, "step": 17285 }, { "epoch": 3.1943876602861923, "grad_norm": 1.0214611291885376, "learning_rate": 1.8621830774214044e-05, "loss": 0.6687, "step": 17286 }, { "epoch": 3.194573499349563, "grad_norm": 1.0388553142547607, "learning_rate": 1.8613341122158813e-05, "loss": 0.7142, "step": 17287 }, { "epoch": 3.1947593384129345, "grad_norm": 0.9885718822479248, "learning_rate": 1.860485320716213e-05, "loss": 0.8672, "step": 17288 }, { "epoch": 3.1949451774763054, "grad_norm": 1.0320250988006592, "learning_rate": 1.859636702940516e-05, "loss": 0.7929, "step": 17289 }, { "epoch": 3.1951310165396767, "grad_norm": 1.0271234512329102, "learning_rate": 1.8587882589069073e-05, "loss": 0.7322, "step": 17290 }, { "epoch": 3.1953168556030476, "grad_norm": 0.719939112663269, "learning_rate": 1.8579399886334914e-05, "loss": 0.6652, "step": 17291 }, { "epoch": 3.195502694666419, "grad_norm": 0.9527273178100586, "learning_rate": 1.857091892138372e-05, "loss": 0.7538, "step": 17292 }, { "epoch": 3.19568853372979, "grad_norm": 0.912845253944397, "learning_rate": 1.856243969439655e-05, "loss": 0.9217, "step": 17293 }, { "epoch": 3.195874372793161, "grad_norm": 1.1026488542556763, "learning_rate": 1.8553962205554352e-05, "loss": 0.8682, "step": 17294 }, { "epoch": 3.1960602118565324, "grad_norm": 0.809587299823761, "learning_rate": 1.854548645503803e-05, "loss": 0.6614, "step": 17295 }, { "epoch": 3.1962460509199033, "grad_norm": 0.9222704768180847, "learning_rate": 1.8537012443028557e-05, "loss": 0.9814, "step": 17296 }, { "epoch": 3.1964318899832747, "grad_norm": 0.8720242381095886, "learning_rate": 1.852854016970672e-05, "loss": 0.8118, "step": 17297 }, { "epoch": 3.1966177290466455, "grad_norm": 0.8971609473228455, "learning_rate": 1.8520069635253414e-05, "loss": 0.7165, "step": 17298 }, { "epoch": 3.196803568110017, "grad_norm": 1.086275339126587, "learning_rate": 1.8511600839849397e-05, "loss": 0.9747, "step": 17299 }, { "epoch": 3.1969894071733878, "grad_norm": 0.993175745010376, "learning_rate": 1.8503133783675397e-05, "loss": 0.9135, "step": 17300 }, { "epoch": 3.197175246236759, "grad_norm": 1.087994933128357, "learning_rate": 1.8494668466912167e-05, "loss": 0.8, "step": 17301 }, { "epoch": 3.19736108530013, "grad_norm": 0.9241663813591003, "learning_rate": 1.8486204889740376e-05, "loss": 0.8348, "step": 17302 }, { "epoch": 3.1975469243635013, "grad_norm": 1.007936954498291, "learning_rate": 1.8477743052340668e-05, "loss": 0.7418, "step": 17303 }, { "epoch": 3.197732763426872, "grad_norm": 1.0289450883865356, "learning_rate": 1.8469282954893598e-05, "loss": 0.7169, "step": 17304 }, { "epoch": 3.1979186024902435, "grad_norm": 0.8627272248268127, "learning_rate": 1.8460824597579773e-05, "loss": 0.8952, "step": 17305 }, { "epoch": 3.1981044415536144, "grad_norm": 0.815715491771698, "learning_rate": 1.8452367980579756e-05, "loss": 0.8715, "step": 17306 }, { "epoch": 3.1982902806169857, "grad_norm": 1.161781907081604, "learning_rate": 1.8443913104073983e-05, "loss": 0.7697, "step": 17307 }, { "epoch": 3.1984761196803566, "grad_norm": 0.9430105686187744, "learning_rate": 1.8435459968242953e-05, "loss": 0.6636, "step": 17308 }, { "epoch": 3.198661958743728, "grad_norm": 1.0547893047332764, "learning_rate": 1.8427008573267013e-05, "loss": 0.7107, "step": 17309 }, { "epoch": 3.1988477978070993, "grad_norm": 0.8477012515068054, "learning_rate": 1.8418558919326635e-05, "loss": 1.013, "step": 17310 }, { "epoch": 3.19903363687047, "grad_norm": 0.9253641366958618, "learning_rate": 1.8410111006602105e-05, "loss": 0.6009, "step": 17311 }, { "epoch": 3.1992194759338415, "grad_norm": 1.069978952407837, "learning_rate": 1.8401664835273713e-05, "loss": 1.046, "step": 17312 }, { "epoch": 3.1994053149972124, "grad_norm": 0.9249023795127869, "learning_rate": 1.8393220405521794e-05, "loss": 0.8473, "step": 17313 }, { "epoch": 3.1995911540605837, "grad_norm": 0.9044826030731201, "learning_rate": 1.8384777717526537e-05, "loss": 0.7975, "step": 17314 }, { "epoch": 3.1997769931239546, "grad_norm": 0.8663088083267212, "learning_rate": 1.837633677146814e-05, "loss": 0.6825, "step": 17315 }, { "epoch": 3.199962832187326, "grad_norm": 1.0946561098098755, "learning_rate": 1.8367897567526738e-05, "loss": 0.994, "step": 17316 }, { "epoch": 3.200148671250697, "grad_norm": 1.0189578533172607, "learning_rate": 1.8359460105882476e-05, "loss": 0.9894, "step": 17317 }, { "epoch": 3.200334510314068, "grad_norm": 0.870308518409729, "learning_rate": 1.8351024386715464e-05, "loss": 0.8479, "step": 17318 }, { "epoch": 3.200520349377439, "grad_norm": 1.003595232963562, "learning_rate": 1.834259041020572e-05, "loss": 0.9592, "step": 17319 }, { "epoch": 3.2007061884408103, "grad_norm": 0.8877385258674622, "learning_rate": 1.8334158176533233e-05, "loss": 0.8161, "step": 17320 }, { "epoch": 3.2008920275041812, "grad_norm": 0.989646852016449, "learning_rate": 1.8325727685878014e-05, "loss": 0.7566, "step": 17321 }, { "epoch": 3.2010778665675526, "grad_norm": 0.937359631061554, "learning_rate": 1.831729893841998e-05, "loss": 0.918, "step": 17322 }, { "epoch": 3.2012637056309234, "grad_norm": 0.9940277338027954, "learning_rate": 1.8308871934339033e-05, "loss": 0.7984, "step": 17323 }, { "epoch": 3.2014495446942948, "grad_norm": 0.8262761235237122, "learning_rate": 1.8300446673815e-05, "loss": 0.6444, "step": 17324 }, { "epoch": 3.2016353837576657, "grad_norm": 0.8894662857055664, "learning_rate": 1.829202315702775e-05, "loss": 0.8029, "step": 17325 }, { "epoch": 3.201821222821037, "grad_norm": 1.0533615350723267, "learning_rate": 1.8283601384157023e-05, "loss": 0.7401, "step": 17326 }, { "epoch": 3.202007061884408, "grad_norm": 0.8021373152732849, "learning_rate": 1.827518135538263e-05, "loss": 0.9263, "step": 17327 }, { "epoch": 3.202192900947779, "grad_norm": 0.7998637557029724, "learning_rate": 1.826676307088424e-05, "loss": 0.555, "step": 17328 }, { "epoch": 3.2023787400111505, "grad_norm": 0.956844687461853, "learning_rate": 1.8258346530841508e-05, "loss": 1.0466, "step": 17329 }, { "epoch": 3.2025645790745214, "grad_norm": 1.0209529399871826, "learning_rate": 1.824993173543411e-05, "loss": 0.8735, "step": 17330 }, { "epoch": 3.2027504181378927, "grad_norm": 1.0398613214492798, "learning_rate": 1.824151868484164e-05, "loss": 0.7566, "step": 17331 }, { "epoch": 3.2029362572012636, "grad_norm": 0.9832069277763367, "learning_rate": 1.823310737924363e-05, "loss": 0.7192, "step": 17332 }, { "epoch": 3.203122096264635, "grad_norm": 0.9637802243232727, "learning_rate": 1.8224697818819646e-05, "loss": 0.6565, "step": 17333 }, { "epoch": 3.203307935328006, "grad_norm": 0.9825659990310669, "learning_rate": 1.8216290003749147e-05, "loss": 0.7692, "step": 17334 }, { "epoch": 3.203493774391377, "grad_norm": 0.9774386882781982, "learning_rate": 1.82078839342116e-05, "loss": 0.9013, "step": 17335 }, { "epoch": 3.203679613454748, "grad_norm": 0.9933068752288818, "learning_rate": 1.8199479610386382e-05, "loss": 0.8352, "step": 17336 }, { "epoch": 3.2038654525181194, "grad_norm": 0.9934918880462646, "learning_rate": 1.8191077032452898e-05, "loss": 0.6283, "step": 17337 }, { "epoch": 3.2040512915814903, "grad_norm": 0.9922161102294922, "learning_rate": 1.818267620059051e-05, "loss": 0.859, "step": 17338 }, { "epoch": 3.2042371306448616, "grad_norm": 0.8654912114143372, "learning_rate": 1.81742771149785e-05, "loss": 0.8402, "step": 17339 }, { "epoch": 3.2044229697082325, "grad_norm": 0.9573314189910889, "learning_rate": 1.816587977579609e-05, "loss": 1.0281, "step": 17340 }, { "epoch": 3.204608808771604, "grad_norm": 1.1613332033157349, "learning_rate": 1.8157484183222574e-05, "loss": 0.9246, "step": 17341 }, { "epoch": 3.2047946478349747, "grad_norm": 0.8110087513923645, "learning_rate": 1.8149090337437114e-05, "loss": 0.778, "step": 17342 }, { "epoch": 3.204980486898346, "grad_norm": 1.1747585535049438, "learning_rate": 1.8140698238618846e-05, "loss": 0.89, "step": 17343 }, { "epoch": 3.2051663259617174, "grad_norm": 0.8087934851646423, "learning_rate": 1.8132307886946886e-05, "loss": 0.7475, "step": 17344 }, { "epoch": 3.2053521650250882, "grad_norm": 0.8710451722145081, "learning_rate": 1.8123919282600342e-05, "loss": 0.7769, "step": 17345 }, { "epoch": 3.2055380040884596, "grad_norm": 0.9005964994430542, "learning_rate": 1.811553242575824e-05, "loss": 0.7139, "step": 17346 }, { "epoch": 3.2057238431518305, "grad_norm": 0.7418885827064514, "learning_rate": 1.810714731659955e-05, "loss": 0.8709, "step": 17347 }, { "epoch": 3.205909682215202, "grad_norm": 1.1336352825164795, "learning_rate": 1.80987639553033e-05, "loss": 1.1367, "step": 17348 }, { "epoch": 3.2060955212785727, "grad_norm": 0.9179961085319519, "learning_rate": 1.809038234204835e-05, "loss": 0.9771, "step": 17349 }, { "epoch": 3.206281360341944, "grad_norm": 0.9118735194206238, "learning_rate": 1.808200247701366e-05, "loss": 0.7601, "step": 17350 }, { "epoch": 3.206467199405315, "grad_norm": 1.0031778812408447, "learning_rate": 1.807362436037804e-05, "loss": 1.1395, "step": 17351 }, { "epoch": 3.206653038468686, "grad_norm": 0.9841191172599792, "learning_rate": 1.80652479923203e-05, "loss": 0.8276, "step": 17352 }, { "epoch": 3.206838877532057, "grad_norm": 0.9167536497116089, "learning_rate": 1.8056873373019255e-05, "loss": 0.9635, "step": 17353 }, { "epoch": 3.2070247165954284, "grad_norm": 1.0898497104644775, "learning_rate": 1.8048500502653632e-05, "loss": 0.706, "step": 17354 }, { "epoch": 3.2072105556587993, "grad_norm": 1.0086370706558228, "learning_rate": 1.8040129381402137e-05, "loss": 0.7848, "step": 17355 }, { "epoch": 3.2073963947221706, "grad_norm": 0.8157899975776672, "learning_rate": 1.80317600094434e-05, "loss": 0.7784, "step": 17356 }, { "epoch": 3.2075822337855415, "grad_norm": 0.9179068803787231, "learning_rate": 1.8023392386956105e-05, "loss": 1.042, "step": 17357 }, { "epoch": 3.207768072848913, "grad_norm": 0.9932705163955688, "learning_rate": 1.8015026514118794e-05, "loss": 0.6414, "step": 17358 }, { "epoch": 3.207953911912284, "grad_norm": 1.0313844680786133, "learning_rate": 1.8006662391110085e-05, "loss": 0.9211, "step": 17359 }, { "epoch": 3.208139750975655, "grad_norm": 0.9287518858909607, "learning_rate": 1.7998300018108426e-05, "loss": 0.8664, "step": 17360 }, { "epoch": 3.2083255900390264, "grad_norm": 1.0484979152679443, "learning_rate": 1.7989939395292365e-05, "loss": 0.7927, "step": 17361 }, { "epoch": 3.2085114291023973, "grad_norm": 0.7547088265419006, "learning_rate": 1.7981580522840312e-05, "loss": 0.7856, "step": 17362 }, { "epoch": 3.2086972681657686, "grad_norm": 0.8366743922233582, "learning_rate": 1.797322340093067e-05, "loss": 0.8571, "step": 17363 }, { "epoch": 3.2088831072291395, "grad_norm": 0.9730438590049744, "learning_rate": 1.79648680297418e-05, "loss": 0.907, "step": 17364 }, { "epoch": 3.209068946292511, "grad_norm": 1.0750919580459595, "learning_rate": 1.7956514409452052e-05, "loss": 0.9299, "step": 17365 }, { "epoch": 3.2092547853558817, "grad_norm": 0.958261251449585, "learning_rate": 1.7948162540239723e-05, "loss": 0.759, "step": 17366 }, { "epoch": 3.209440624419253, "grad_norm": 0.8561491370201111, "learning_rate": 1.7939812422283055e-05, "loss": 0.5556, "step": 17367 }, { "epoch": 3.209626463482624, "grad_norm": 1.0215051174163818, "learning_rate": 1.7931464055760207e-05, "loss": 0.9857, "step": 17368 }, { "epoch": 3.2098123025459953, "grad_norm": 1.0156500339508057, "learning_rate": 1.792311744084949e-05, "loss": 0.7026, "step": 17369 }, { "epoch": 3.209998141609366, "grad_norm": 0.9816356301307678, "learning_rate": 1.7914772577728966e-05, "loss": 0.9134, "step": 17370 }, { "epoch": 3.2101839806727375, "grad_norm": 1.0151969194412231, "learning_rate": 1.7906429466576767e-05, "loss": 0.8126, "step": 17371 }, { "epoch": 3.2103698197361084, "grad_norm": 0.9710429906845093, "learning_rate": 1.7898088107570932e-05, "loss": 0.8628, "step": 17372 }, { "epoch": 3.2105556587994797, "grad_norm": 1.1953200101852417, "learning_rate": 1.7889748500889536e-05, "loss": 0.7935, "step": 17373 }, { "epoch": 3.2107414978628506, "grad_norm": 0.781575620174408, "learning_rate": 1.788141064671055e-05, "loss": 0.9018, "step": 17374 }, { "epoch": 3.210927336926222, "grad_norm": 0.9112656712532043, "learning_rate": 1.7873074545211932e-05, "loss": 0.7, "step": 17375 }, { "epoch": 3.211113175989593, "grad_norm": 1.0575007200241089, "learning_rate": 1.786474019657157e-05, "loss": 0.9902, "step": 17376 }, { "epoch": 3.211299015052964, "grad_norm": 0.6355597376823425, "learning_rate": 1.7856407600967416e-05, "loss": 0.4051, "step": 17377 }, { "epoch": 3.2114848541163354, "grad_norm": 0.8127633929252625, "learning_rate": 1.784807675857727e-05, "loss": 0.7989, "step": 17378 }, { "epoch": 3.2116706931797063, "grad_norm": 0.8742978572845459, "learning_rate": 1.783974766957893e-05, "loss": 0.7368, "step": 17379 }, { "epoch": 3.2118565322430777, "grad_norm": 0.8669548630714417, "learning_rate": 1.7831420334150183e-05, "loss": 0.782, "step": 17380 }, { "epoch": 3.2120423713064485, "grad_norm": 0.8861702680587769, "learning_rate": 1.7823094752468782e-05, "loss": 0.8799, "step": 17381 }, { "epoch": 3.21222821036982, "grad_norm": 0.9047160744667053, "learning_rate": 1.78147709247124e-05, "loss": 0.9262, "step": 17382 }, { "epoch": 3.2124140494331908, "grad_norm": 0.8661258220672607, "learning_rate": 1.78064488510587e-05, "loss": 0.5885, "step": 17383 }, { "epoch": 3.212599888496562, "grad_norm": 0.955339789390564, "learning_rate": 1.7798128531685276e-05, "loss": 1.0327, "step": 17384 }, { "epoch": 3.212785727559933, "grad_norm": 0.9303874373435974, "learning_rate": 1.7789809966769753e-05, "loss": 0.4796, "step": 17385 }, { "epoch": 3.2129715666233043, "grad_norm": 0.7944729328155518, "learning_rate": 1.778149315648967e-05, "loss": 0.8049, "step": 17386 }, { "epoch": 3.213157405686675, "grad_norm": 1.2434570789337158, "learning_rate": 1.7773178101022514e-05, "loss": 0.9347, "step": 17387 }, { "epoch": 3.2133432447500465, "grad_norm": 0.9937906861305237, "learning_rate": 1.7764864800545744e-05, "loss": 0.9397, "step": 17388 }, { "epoch": 3.2135290838134174, "grad_norm": 1.1380114555358887, "learning_rate": 1.7756553255236807e-05, "loss": 0.7016, "step": 17389 }, { "epoch": 3.2137149228767887, "grad_norm": 0.9934018850326538, "learning_rate": 1.774824346527314e-05, "loss": 0.7752, "step": 17390 }, { "epoch": 3.2139007619401596, "grad_norm": 1.2391068935394287, "learning_rate": 1.7739935430832055e-05, "loss": 0.8902, "step": 17391 }, { "epoch": 3.214086601003531, "grad_norm": 0.8846229314804077, "learning_rate": 1.773162915209087e-05, "loss": 0.8731, "step": 17392 }, { "epoch": 3.2142724400669023, "grad_norm": 0.8115828633308411, "learning_rate": 1.77233246292269e-05, "loss": 0.7271, "step": 17393 }, { "epoch": 3.214458279130273, "grad_norm": 0.9347731471061707, "learning_rate": 1.771502186241738e-05, "loss": 0.7558, "step": 17394 }, { "epoch": 3.2146441181936445, "grad_norm": 1.0829119682312012, "learning_rate": 1.770672085183951e-05, "loss": 0.7962, "step": 17395 }, { "epoch": 3.2148299572570154, "grad_norm": 0.953552782535553, "learning_rate": 1.7698421597670433e-05, "loss": 1.0499, "step": 17396 }, { "epoch": 3.2150157963203867, "grad_norm": 0.8669760227203369, "learning_rate": 1.7690124100087335e-05, "loss": 0.8136, "step": 17397 }, { "epoch": 3.2152016353837576, "grad_norm": 0.8127499222755432, "learning_rate": 1.7681828359267294e-05, "loss": 0.9346, "step": 17398 }, { "epoch": 3.215387474447129, "grad_norm": 0.8423436880111694, "learning_rate": 1.7673534375387356e-05, "loss": 0.7928, "step": 17399 }, { "epoch": 3.2155733135105, "grad_norm": 0.8691553473472595, "learning_rate": 1.7665242148624517e-05, "loss": 0.8103, "step": 17400 }, { "epoch": 3.215759152573871, "grad_norm": 1.0253678560256958, "learning_rate": 1.765695167915582e-05, "loss": 0.8424, "step": 17401 }, { "epoch": 3.215944991637242, "grad_norm": 0.8192505240440369, "learning_rate": 1.7648662967158203e-05, "loss": 0.7175, "step": 17402 }, { "epoch": 3.2161308307006133, "grad_norm": 0.875633716583252, "learning_rate": 1.7640376012808536e-05, "loss": 0.953, "step": 17403 }, { "epoch": 3.2163166697639842, "grad_norm": 0.8881807923316956, "learning_rate": 1.7632090816283696e-05, "loss": 0.7815, "step": 17404 }, { "epoch": 3.2165025088273556, "grad_norm": 0.9027812480926514, "learning_rate": 1.7623807377760548e-05, "loss": 0.8665, "step": 17405 }, { "epoch": 3.2166883478907264, "grad_norm": 0.943454921245575, "learning_rate": 1.761552569741587e-05, "loss": 0.823, "step": 17406 }, { "epoch": 3.2168741869540978, "grad_norm": 1.0618342161178589, "learning_rate": 1.7607245775426418e-05, "loss": 0.7578, "step": 17407 }, { "epoch": 3.217060026017469, "grad_norm": 0.7595953941345215, "learning_rate": 1.7598967611968885e-05, "loss": 0.8006, "step": 17408 }, { "epoch": 3.21724586508084, "grad_norm": 0.9064504504203796, "learning_rate": 1.759069120722001e-05, "loss": 0.7277, "step": 17409 }, { "epoch": 3.2174317041442113, "grad_norm": 0.959622859954834, "learning_rate": 1.7582416561356395e-05, "loss": 0.8413, "step": 17410 }, { "epoch": 3.217617543207582, "grad_norm": 1.2936711311340332, "learning_rate": 1.7574143674554677e-05, "loss": 0.6811, "step": 17411 }, { "epoch": 3.2178033822709535, "grad_norm": 0.8030010461807251, "learning_rate": 1.756587254699139e-05, "loss": 0.762, "step": 17412 }, { "epoch": 3.2179892213343244, "grad_norm": 1.1652907133102417, "learning_rate": 1.755760317884313e-05, "loss": 0.8683, "step": 17413 }, { "epoch": 3.2181750603976957, "grad_norm": 0.7912318110466003, "learning_rate": 1.7549335570286342e-05, "loss": 0.6673, "step": 17414 }, { "epoch": 3.2183608994610666, "grad_norm": 0.8837103247642517, "learning_rate": 1.7541069721497493e-05, "loss": 0.9807, "step": 17415 }, { "epoch": 3.218546738524438, "grad_norm": 0.8473705053329468, "learning_rate": 1.7532805632652984e-05, "loss": 0.7719, "step": 17416 }, { "epoch": 3.218732577587809, "grad_norm": 0.7680140733718872, "learning_rate": 1.752454330392924e-05, "loss": 0.7889, "step": 17417 }, { "epoch": 3.21891841665118, "grad_norm": 0.7964486479759216, "learning_rate": 1.751628273550259e-05, "loss": 0.6018, "step": 17418 }, { "epoch": 3.219104255714551, "grad_norm": 0.9775687456130981, "learning_rate": 1.7508023927549333e-05, "loss": 0.8298, "step": 17419 }, { "epoch": 3.2192900947779224, "grad_norm": 1.0651310682296753, "learning_rate": 1.7499766880245728e-05, "loss": 0.6224, "step": 17420 }, { "epoch": 3.2194759338412933, "grad_norm": 0.9536681175231934, "learning_rate": 1.7491511593768016e-05, "loss": 0.7594, "step": 17421 }, { "epoch": 3.2196617729046646, "grad_norm": 0.8498244881629944, "learning_rate": 1.748325806829242e-05, "loss": 0.8059, "step": 17422 }, { "epoch": 3.2198476119680355, "grad_norm": 0.96226567029953, "learning_rate": 1.7475006303995077e-05, "loss": 0.9998, "step": 17423 }, { "epoch": 3.220033451031407, "grad_norm": 0.9402596354484558, "learning_rate": 1.746675630105209e-05, "loss": 0.9033, "step": 17424 }, { "epoch": 3.2202192900947777, "grad_norm": 0.9709116220474243, "learning_rate": 1.7458508059639577e-05, "loss": 0.7782, "step": 17425 }, { "epoch": 3.220405129158149, "grad_norm": 1.0609785318374634, "learning_rate": 1.7450261579933557e-05, "loss": 0.7978, "step": 17426 }, { "epoch": 3.2205909682215204, "grad_norm": 0.8852887153625488, "learning_rate": 1.7442016862110056e-05, "loss": 0.8358, "step": 17427 }, { "epoch": 3.2207768072848912, "grad_norm": 0.8573838472366333, "learning_rate": 1.7433773906344998e-05, "loss": 0.7084, "step": 17428 }, { "epoch": 3.2209626463482626, "grad_norm": 0.9779657125473022, "learning_rate": 1.742553271281436e-05, "loss": 0.7715, "step": 17429 }, { "epoch": 3.2211484854116335, "grad_norm": 0.8632031083106995, "learning_rate": 1.7417293281694035e-05, "loss": 0.7354, "step": 17430 }, { "epoch": 3.221334324475005, "grad_norm": 1.1131991147994995, "learning_rate": 1.740905561315984e-05, "loss": 0.807, "step": 17431 }, { "epoch": 3.2215201635383757, "grad_norm": 0.8839072585105896, "learning_rate": 1.740081970738763e-05, "loss": 0.7896, "step": 17432 }, { "epoch": 3.221706002601747, "grad_norm": 0.9753618836402893, "learning_rate": 1.7392585564553188e-05, "loss": 0.6865, "step": 17433 }, { "epoch": 3.221891841665118, "grad_norm": 0.8987646698951721, "learning_rate": 1.738435318483226e-05, "loss": 0.9049, "step": 17434 }, { "epoch": 3.222077680728489, "grad_norm": 0.9076732397079468, "learning_rate": 1.7376122568400532e-05, "loss": 0.9078, "step": 17435 }, { "epoch": 3.22226351979186, "grad_norm": 0.8945940732955933, "learning_rate": 1.7367893715433647e-05, "loss": 0.8435, "step": 17436 }, { "epoch": 3.2224493588552314, "grad_norm": 0.8051013350486755, "learning_rate": 1.7359666626107306e-05, "loss": 0.6429, "step": 17437 }, { "epoch": 3.2226351979186023, "grad_norm": 0.9417064785957336, "learning_rate": 1.735144130059706e-05, "loss": 0.8148, "step": 17438 }, { "epoch": 3.2228210369819736, "grad_norm": 0.9133759140968323, "learning_rate": 1.7343217739078466e-05, "loss": 1.0047, "step": 17439 }, { "epoch": 3.2230068760453445, "grad_norm": 0.8629543781280518, "learning_rate": 1.7334995941727018e-05, "loss": 0.8391, "step": 17440 }, { "epoch": 3.223192715108716, "grad_norm": 0.8090866208076477, "learning_rate": 1.732677590871825e-05, "loss": 0.8694, "step": 17441 }, { "epoch": 3.223378554172087, "grad_norm": 1.018536925315857, "learning_rate": 1.7318557640227562e-05, "loss": 0.6775, "step": 17442 }, { "epoch": 3.223564393235458, "grad_norm": 0.8848704695701599, "learning_rate": 1.7310341136430385e-05, "loss": 0.8948, "step": 17443 }, { "epoch": 3.2237502322988294, "grad_norm": 1.021011471748352, "learning_rate": 1.730212639750206e-05, "loss": 0.8119, "step": 17444 }, { "epoch": 3.2239360713622003, "grad_norm": 0.9703376889228821, "learning_rate": 1.7293913423617958e-05, "loss": 0.9911, "step": 17445 }, { "epoch": 3.2241219104255716, "grad_norm": 0.9496825337409973, "learning_rate": 1.7285702214953335e-05, "loss": 0.7845, "step": 17446 }, { "epoch": 3.2243077494889425, "grad_norm": 1.0321695804595947, "learning_rate": 1.7277492771683458e-05, "loss": 0.888, "step": 17447 }, { "epoch": 3.224493588552314, "grad_norm": 0.8181531429290771, "learning_rate": 1.7269285093983523e-05, "loss": 0.6862, "step": 17448 }, { "epoch": 3.2246794276156847, "grad_norm": 0.8088932037353516, "learning_rate": 1.7261079182028738e-05, "loss": 0.6089, "step": 17449 }, { "epoch": 3.224865266679056, "grad_norm": 0.8229671120643616, "learning_rate": 1.7252875035994242e-05, "loss": 0.8199, "step": 17450 }, { "epoch": 3.225051105742427, "grad_norm": 2.0220842361450195, "learning_rate": 1.7244672656055106e-05, "loss": 1.2741, "step": 17451 }, { "epoch": 3.2252369448057983, "grad_norm": 0.7622953057289124, "learning_rate": 1.723647204238641e-05, "loss": 0.8076, "step": 17452 }, { "epoch": 3.225422783869169, "grad_norm": 0.7097654342651367, "learning_rate": 1.7228273195163213e-05, "loss": 0.4847, "step": 17453 }, { "epoch": 3.2256086229325405, "grad_norm": 0.9323017597198486, "learning_rate": 1.722007611456049e-05, "loss": 0.8992, "step": 17454 }, { "epoch": 3.2257944619959114, "grad_norm": 0.8673163056373596, "learning_rate": 1.7211880800753187e-05, "loss": 0.7807, "step": 17455 }, { "epoch": 3.2259803010592827, "grad_norm": 0.8620083332061768, "learning_rate": 1.720368725391619e-05, "loss": 0.8554, "step": 17456 }, { "epoch": 3.226166140122654, "grad_norm": 1.004608154296875, "learning_rate": 1.719549547422443e-05, "loss": 0.6373, "step": 17457 }, { "epoch": 3.226351979186025, "grad_norm": 0.9062421321868896, "learning_rate": 1.718730546185272e-05, "loss": 0.8553, "step": 17458 }, { "epoch": 3.2265378182493962, "grad_norm": 0.8143224716186523, "learning_rate": 1.717911721697586e-05, "loss": 0.6425, "step": 17459 }, { "epoch": 3.226723657312767, "grad_norm": 1.049385905265808, "learning_rate": 1.71709307397686e-05, "loss": 0.8966, "step": 17460 }, { "epoch": 3.2269094963761384, "grad_norm": 0.9719651937484741, "learning_rate": 1.7162746030405708e-05, "loss": 1.0292, "step": 17461 }, { "epoch": 3.2270953354395093, "grad_norm": 0.8390783071517944, "learning_rate": 1.7154563089061837e-05, "loss": 0.7897, "step": 17462 }, { "epoch": 3.2272811745028807, "grad_norm": 1.06975519657135, "learning_rate": 1.7146381915911624e-05, "loss": 0.9167, "step": 17463 }, { "epoch": 3.2274670135662515, "grad_norm": 0.895122766494751, "learning_rate": 1.713820251112972e-05, "loss": 0.8312, "step": 17464 }, { "epoch": 3.227652852629623, "grad_norm": 0.9486971497535706, "learning_rate": 1.7130024874890705e-05, "loss": 0.9377, "step": 17465 }, { "epoch": 3.2278386916929938, "grad_norm": 0.9098740220069885, "learning_rate": 1.7121849007369095e-05, "loss": 0.9408, "step": 17466 }, { "epoch": 3.228024530756365, "grad_norm": 0.9361106157302856, "learning_rate": 1.7113674908739396e-05, "loss": 0.5457, "step": 17467 }, { "epoch": 3.228210369819736, "grad_norm": 0.8573850989341736, "learning_rate": 1.7105502579176036e-05, "loss": 0.7005, "step": 17468 }, { "epoch": 3.2283962088831073, "grad_norm": 1.0105928182601929, "learning_rate": 1.7097332018853507e-05, "loss": 0.6945, "step": 17469 }, { "epoch": 3.228582047946478, "grad_norm": 0.8553135991096497, "learning_rate": 1.708916322794615e-05, "loss": 0.6846, "step": 17470 }, { "epoch": 3.2287678870098495, "grad_norm": 0.8401904702186584, "learning_rate": 1.7080996206628307e-05, "loss": 0.8056, "step": 17471 }, { "epoch": 3.2289537260732204, "grad_norm": 0.9462745785713196, "learning_rate": 1.7072830955074326e-05, "loss": 0.9097, "step": 17472 }, { "epoch": 3.2291395651365917, "grad_norm": 0.9162221550941467, "learning_rate": 1.706466747345843e-05, "loss": 0.7836, "step": 17473 }, { "epoch": 3.2293254041999626, "grad_norm": 0.9128598570823669, "learning_rate": 1.705650576195491e-05, "loss": 0.8026, "step": 17474 }, { "epoch": 3.229511243263334, "grad_norm": 0.8670196533203125, "learning_rate": 1.704834582073794e-05, "loss": 0.7359, "step": 17475 }, { "epoch": 3.2296970823267053, "grad_norm": 0.8910917639732361, "learning_rate": 1.7040187649981653e-05, "loss": 0.7666, "step": 17476 }, { "epoch": 3.229882921390076, "grad_norm": 0.9486824870109558, "learning_rate": 1.7032031249860225e-05, "loss": 0.7557, "step": 17477 }, { "epoch": 3.2300687604534475, "grad_norm": 1.0442936420440674, "learning_rate": 1.7023876620547707e-05, "loss": 0.67, "step": 17478 }, { "epoch": 3.2302545995168184, "grad_norm": 0.8497734665870667, "learning_rate": 1.701572376221815e-05, "loss": 0.7017, "step": 17479 }, { "epoch": 3.2304404385801897, "grad_norm": 0.8752564787864685, "learning_rate": 1.7007572675045536e-05, "loss": 0.6539, "step": 17480 }, { "epoch": 3.2306262776435606, "grad_norm": 1.0192726850509644, "learning_rate": 1.6999423359203893e-05, "loss": 0.6568, "step": 17481 }, { "epoch": 3.230812116706932, "grad_norm": 0.9239850640296936, "learning_rate": 1.6991275814867125e-05, "loss": 0.7994, "step": 17482 }, { "epoch": 3.230997955770303, "grad_norm": 0.8536369800567627, "learning_rate": 1.6983130042209094e-05, "loss": 0.7733, "step": 17483 }, { "epoch": 3.231183794833674, "grad_norm": 1.1056362390518188, "learning_rate": 1.6974986041403683e-05, "loss": 0.9172, "step": 17484 }, { "epoch": 3.231369633897045, "grad_norm": 1.0050475597381592, "learning_rate": 1.6966843812624754e-05, "loss": 0.5457, "step": 17485 }, { "epoch": 3.2315554729604163, "grad_norm": 0.9066400527954102, "learning_rate": 1.6958703356046056e-05, "loss": 0.9269, "step": 17486 }, { "epoch": 3.2317413120237872, "grad_norm": 1.073598861694336, "learning_rate": 1.6950564671841328e-05, "loss": 0.935, "step": 17487 }, { "epoch": 3.2319271510871586, "grad_norm": 0.8215804100036621, "learning_rate": 1.6942427760184243e-05, "loss": 0.6632, "step": 17488 }, { "epoch": 3.2321129901505294, "grad_norm": 0.8021205067634583, "learning_rate": 1.693429262124854e-05, "loss": 0.6346, "step": 17489 }, { "epoch": 3.2322988292139008, "grad_norm": 0.8880865573883057, "learning_rate": 1.6926159255207817e-05, "loss": 0.7718, "step": 17490 }, { "epoch": 3.232484668277272, "grad_norm": 0.8654438257217407, "learning_rate": 1.6918027662235637e-05, "loss": 0.6455, "step": 17491 }, { "epoch": 3.232670507340643, "grad_norm": 0.9712738990783691, "learning_rate": 1.6909897842505608e-05, "loss": 0.7434, "step": 17492 }, { "epoch": 3.2328563464040143, "grad_norm": 1.083753228187561, "learning_rate": 1.6901769796191214e-05, "loss": 0.9017, "step": 17493 }, { "epoch": 3.233042185467385, "grad_norm": 0.8520892262458801, "learning_rate": 1.689364352346592e-05, "loss": 0.8221, "step": 17494 }, { "epoch": 3.2332280245307565, "grad_norm": 0.869002103805542, "learning_rate": 1.6885519024503215e-05, "loss": 0.6924, "step": 17495 }, { "epoch": 3.2334138635941274, "grad_norm": 1.0325137376785278, "learning_rate": 1.6877396299476445e-05, "loss": 0.8872, "step": 17496 }, { "epoch": 3.2335997026574987, "grad_norm": 0.8123409152030945, "learning_rate": 1.6869275348559045e-05, "loss": 0.5695, "step": 17497 }, { "epoch": 3.2337855417208696, "grad_norm": 0.9622693061828613, "learning_rate": 1.6861156171924297e-05, "loss": 0.8806, "step": 17498 }, { "epoch": 3.2337855417208696, "eval_loss": 1.0438145399093628, "eval_runtime": 23.1525, "eval_samples_per_second": 47.165, "eval_steps_per_second": 23.583, "step": 17498 }, { "epoch": 3.233971380784241, "grad_norm": 0.9132378101348877, "learning_rate": 1.6853038769745467e-05, "loss": 0.7751, "step": 17499 }, { "epoch": 3.234157219847612, "grad_norm": 0.8262106776237488, "learning_rate": 1.6844923142195878e-05, "loss": 0.8341, "step": 17500 }, { "epoch": 3.234343058910983, "grad_norm": 0.8283272385597229, "learning_rate": 1.6836809289448698e-05, "loss": 0.9478, "step": 17501 }, { "epoch": 3.234528897974354, "grad_norm": 1.0779932737350464, "learning_rate": 1.6828697211677103e-05, "loss": 0.9412, "step": 17502 }, { "epoch": 3.2347147370377254, "grad_norm": 0.8511762619018555, "learning_rate": 1.6820586909054215e-05, "loss": 0.6303, "step": 17503 }, { "epoch": 3.2349005761010963, "grad_norm": 0.8621987104415894, "learning_rate": 1.6812478381753194e-05, "loss": 0.8246, "step": 17504 }, { "epoch": 3.2350864151644676, "grad_norm": 1.0734256505966187, "learning_rate": 1.6804371629947034e-05, "loss": 0.878, "step": 17505 }, { "epoch": 3.2352722542278385, "grad_norm": 0.904697597026825, "learning_rate": 1.6796266653808812e-05, "loss": 0.8808, "step": 17506 }, { "epoch": 3.23545809329121, "grad_norm": 0.940464973449707, "learning_rate": 1.6788163453511508e-05, "loss": 0.8067, "step": 17507 }, { "epoch": 3.2356439323545807, "grad_norm": 1.1474518775939941, "learning_rate": 1.678006202922803e-05, "loss": 0.8774, "step": 17508 }, { "epoch": 3.235829771417952, "grad_norm": 0.840255081653595, "learning_rate": 1.6771962381131335e-05, "loss": 0.8111, "step": 17509 }, { "epoch": 3.2360156104813234, "grad_norm": 1.0624665021896362, "learning_rate": 1.6763864509394277e-05, "loss": 0.7288, "step": 17510 }, { "epoch": 3.2362014495446942, "grad_norm": 0.8624823689460754, "learning_rate": 1.6755768414189664e-05, "loss": 0.9116, "step": 17511 }, { "epoch": 3.2363872886080656, "grad_norm": 1.5082919597625732, "learning_rate": 1.6747674095690357e-05, "loss": 1.1363, "step": 17512 }, { "epoch": 3.2365731276714365, "grad_norm": 0.9853875041007996, "learning_rate": 1.6739581554069072e-05, "loss": 0.7867, "step": 17513 }, { "epoch": 3.236758966734808, "grad_norm": 0.960754930973053, "learning_rate": 1.673149078949855e-05, "loss": 0.8707, "step": 17514 }, { "epoch": 3.2369448057981787, "grad_norm": 1.0832768678665161, "learning_rate": 1.672340180215143e-05, "loss": 0.7316, "step": 17515 }, { "epoch": 3.23713064486155, "grad_norm": 0.9456191658973694, "learning_rate": 1.6715314592200382e-05, "loss": 0.8601, "step": 17516 }, { "epoch": 3.237316483924921, "grad_norm": 0.8804882168769836, "learning_rate": 1.6707229159818062e-05, "loss": 0.6816, "step": 17517 }, { "epoch": 3.237502322988292, "grad_norm": 0.7851766347885132, "learning_rate": 1.6699145505176983e-05, "loss": 0.7597, "step": 17518 }, { "epoch": 3.237688162051663, "grad_norm": 0.9217889308929443, "learning_rate": 1.6691063628449677e-05, "loss": 1.0376, "step": 17519 }, { "epoch": 3.2378740011150344, "grad_norm": 1.2963614463806152, "learning_rate": 1.6682983529808683e-05, "loss": 0.8681, "step": 17520 }, { "epoch": 3.2380598401784053, "grad_norm": 0.9279267191886902, "learning_rate": 1.667490520942643e-05, "loss": 0.8838, "step": 17521 }, { "epoch": 3.2382456792417766, "grad_norm": 0.8906137347221375, "learning_rate": 1.666682866747532e-05, "loss": 0.8667, "step": 17522 }, { "epoch": 3.2384315183051475, "grad_norm": 0.7467948198318481, "learning_rate": 1.6658753904127734e-05, "loss": 0.6173, "step": 17523 }, { "epoch": 3.238617357368519, "grad_norm": 0.9630430936813354, "learning_rate": 1.665068091955604e-05, "loss": 0.82, "step": 17524 }, { "epoch": 3.23880319643189, "grad_norm": 1.0588127374649048, "learning_rate": 1.6642609713932535e-05, "loss": 0.9261, "step": 17525 }, { "epoch": 3.238989035495261, "grad_norm": 1.1427159309387207, "learning_rate": 1.6634540287429457e-05, "loss": 0.7037, "step": 17526 }, { "epoch": 3.2391748745586324, "grad_norm": 1.0751985311508179, "learning_rate": 1.6626472640219083e-05, "loss": 0.9218, "step": 17527 }, { "epoch": 3.2393607136220033, "grad_norm": 0.9019919633865356, "learning_rate": 1.661840677247354e-05, "loss": 0.8532, "step": 17528 }, { "epoch": 3.2395465526853746, "grad_norm": 1.1597665548324585, "learning_rate": 1.6610342684365044e-05, "loss": 0.8841, "step": 17529 }, { "epoch": 3.2397323917487455, "grad_norm": 0.9410503506660461, "learning_rate": 1.6602280376065682e-05, "loss": 0.84, "step": 17530 }, { "epoch": 3.239918230812117, "grad_norm": 0.9693671464920044, "learning_rate": 1.6594219847747504e-05, "loss": 0.8363, "step": 17531 }, { "epoch": 3.2401040698754877, "grad_norm": 0.992178201675415, "learning_rate": 1.6586161099582608e-05, "loss": 0.9105, "step": 17532 }, { "epoch": 3.240289908938859, "grad_norm": 1.1470305919647217, "learning_rate": 1.6578104131742945e-05, "loss": 0.8765, "step": 17533 }, { "epoch": 3.24047574800223, "grad_norm": 0.852066695690155, "learning_rate": 1.6570048944400496e-05, "loss": 0.5751, "step": 17534 }, { "epoch": 3.2406615870656013, "grad_norm": 1.0298662185668945, "learning_rate": 1.656199553772716e-05, "loss": 0.7357, "step": 17535 }, { "epoch": 3.240847426128972, "grad_norm": 0.9861373901367188, "learning_rate": 1.6553943911894843e-05, "loss": 0.9414, "step": 17536 }, { "epoch": 3.2410332651923435, "grad_norm": 0.864341676235199, "learning_rate": 1.6545894067075418e-05, "loss": 0.8659, "step": 17537 }, { "epoch": 3.2412191042557144, "grad_norm": 0.9241873621940613, "learning_rate": 1.653784600344067e-05, "loss": 0.794, "step": 17538 }, { "epoch": 3.2414049433190857, "grad_norm": 0.9814783930778503, "learning_rate": 1.6529799721162343e-05, "loss": 0.849, "step": 17539 }, { "epoch": 3.241590782382457, "grad_norm": 0.9415474534034729, "learning_rate": 1.652175522041224e-05, "loss": 0.9087, "step": 17540 }, { "epoch": 3.241776621445828, "grad_norm": 0.9838879704475403, "learning_rate": 1.6513712501362e-05, "loss": 0.8529, "step": 17541 }, { "epoch": 3.2419624605091992, "grad_norm": 0.9883627891540527, "learning_rate": 1.650567156418331e-05, "loss": 1.0156, "step": 17542 }, { "epoch": 3.24214829957257, "grad_norm": 0.9032547473907471, "learning_rate": 1.649763240904775e-05, "loss": 0.8906, "step": 17543 }, { "epoch": 3.2423341386359414, "grad_norm": 0.9878135919570923, "learning_rate": 1.6489595036126946e-05, "loss": 0.7668, "step": 17544 }, { "epoch": 3.2425199776993123, "grad_norm": 0.9744989275932312, "learning_rate": 1.6481559445592442e-05, "loss": 0.9409, "step": 17545 }, { "epoch": 3.2427058167626837, "grad_norm": 1.0206999778747559, "learning_rate": 1.6473525637615716e-05, "loss": 0.8726, "step": 17546 }, { "epoch": 3.2428916558260545, "grad_norm": 1.1411030292510986, "learning_rate": 1.6465493612368233e-05, "loss": 0.9333, "step": 17547 }, { "epoch": 3.243077494889426, "grad_norm": 1.3796285390853882, "learning_rate": 1.6457463370021432e-05, "loss": 0.8776, "step": 17548 }, { "epoch": 3.2432633339527968, "grad_norm": 0.7899224162101746, "learning_rate": 1.644943491074674e-05, "loss": 0.7336, "step": 17549 }, { "epoch": 3.243449173016168, "grad_norm": 0.810294508934021, "learning_rate": 1.6441408234715483e-05, "loss": 0.7711, "step": 17550 }, { "epoch": 3.243635012079539, "grad_norm": 0.9372333288192749, "learning_rate": 1.643338334209894e-05, "loss": 0.9952, "step": 17551 }, { "epoch": 3.2438208511429103, "grad_norm": 0.8857443332672119, "learning_rate": 1.6425360233068455e-05, "loss": 1.014, "step": 17552 }, { "epoch": 3.244006690206281, "grad_norm": 1.0264416933059692, "learning_rate": 1.6417338907795232e-05, "loss": 0.8121, "step": 17553 }, { "epoch": 3.2441925292696525, "grad_norm": 0.9513406157493591, "learning_rate": 1.6409319366450483e-05, "loss": 0.761, "step": 17554 }, { "epoch": 3.2443783683330234, "grad_norm": 1.558884620666504, "learning_rate": 1.6401301609205335e-05, "loss": 1.2663, "step": 17555 }, { "epoch": 3.2445642073963947, "grad_norm": 0.9729040265083313, "learning_rate": 1.639328563623097e-05, "loss": 0.6958, "step": 17556 }, { "epoch": 3.2447500464597656, "grad_norm": 0.9089725017547607, "learning_rate": 1.6385271447698425e-05, "loss": 0.7511, "step": 17557 }, { "epoch": 3.244935885523137, "grad_norm": 0.9211695790290833, "learning_rate": 1.637725904377879e-05, "loss": 0.8387, "step": 17558 }, { "epoch": 3.2451217245865083, "grad_norm": 0.8523728847503662, "learning_rate": 1.6369248424643035e-05, "loss": 0.7116, "step": 17559 }, { "epoch": 3.245307563649879, "grad_norm": 1.1124895811080933, "learning_rate": 1.6361239590462184e-05, "loss": 0.9035, "step": 17560 }, { "epoch": 3.2454934027132505, "grad_norm": 1.123880386352539, "learning_rate": 1.6353232541407148e-05, "loss": 0.8926, "step": 17561 }, { "epoch": 3.2456792417766214, "grad_norm": 0.828389048576355, "learning_rate": 1.6345227277648812e-05, "loss": 0.6941, "step": 17562 }, { "epoch": 3.2458650808399927, "grad_norm": 0.9504824280738831, "learning_rate": 1.6337223799358026e-05, "loss": 0.7694, "step": 17563 }, { "epoch": 3.2460509199033636, "grad_norm": 0.8812553882598877, "learning_rate": 1.6329222106705643e-05, "loss": 0.6938, "step": 17564 }, { "epoch": 3.246236758966735, "grad_norm": 1.429211974143982, "learning_rate": 1.632122219986244e-05, "loss": 0.8115, "step": 17565 }, { "epoch": 3.246422598030106, "grad_norm": 1.1453877687454224, "learning_rate": 1.6313224078999145e-05, "loss": 0.8998, "step": 17566 }, { "epoch": 3.246608437093477, "grad_norm": 0.8724762797355652, "learning_rate": 1.6305227744286443e-05, "loss": 0.8391, "step": 17567 }, { "epoch": 3.246794276156848, "grad_norm": 0.8664789199829102, "learning_rate": 1.629723319589502e-05, "loss": 0.8601, "step": 17568 }, { "epoch": 3.2469801152202193, "grad_norm": 0.8182777762413025, "learning_rate": 1.6289240433995555e-05, "loss": 0.7966, "step": 17569 }, { "epoch": 3.2471659542835902, "grad_norm": 0.8636091947555542, "learning_rate": 1.6281249458758597e-05, "loss": 0.8471, "step": 17570 }, { "epoch": 3.2473517933469616, "grad_norm": 0.9435926675796509, "learning_rate": 1.6273260270354674e-05, "loss": 0.8945, "step": 17571 }, { "epoch": 3.2475376324103324, "grad_norm": 0.7587558627128601, "learning_rate": 1.626527286895435e-05, "loss": 0.6551, "step": 17572 }, { "epoch": 3.2477234714737038, "grad_norm": 0.8410842418670654, "learning_rate": 1.625728725472809e-05, "loss": 0.7667, "step": 17573 }, { "epoch": 3.247909310537075, "grad_norm": 0.9617728590965271, "learning_rate": 1.624930342784633e-05, "loss": 0.6956, "step": 17574 }, { "epoch": 3.248095149600446, "grad_norm": 0.8479878902435303, "learning_rate": 1.6241321388479424e-05, "loss": 0.8241, "step": 17575 }, { "epoch": 3.2482809886638173, "grad_norm": 1.0032156705856323, "learning_rate": 1.6233341136797818e-05, "loss": 0.6864, "step": 17576 }, { "epoch": 3.248466827727188, "grad_norm": 0.8712114095687866, "learning_rate": 1.6225362672971788e-05, "loss": 0.883, "step": 17577 }, { "epoch": 3.2486526667905595, "grad_norm": 0.7898356318473816, "learning_rate": 1.6217385997171608e-05, "loss": 0.7023, "step": 17578 }, { "epoch": 3.2488385058539304, "grad_norm": 0.8623623847961426, "learning_rate": 1.620941110956754e-05, "loss": 0.8952, "step": 17579 }, { "epoch": 3.2490243449173017, "grad_norm": 0.857649028301239, "learning_rate": 1.620143801032984e-05, "loss": 0.7604, "step": 17580 }, { "epoch": 3.2492101839806726, "grad_norm": 1.2372016906738281, "learning_rate": 1.6193466699628622e-05, "loss": 1.1232, "step": 17581 }, { "epoch": 3.249396023044044, "grad_norm": 0.9473984241485596, "learning_rate": 1.6185497177634056e-05, "loss": 0.843, "step": 17582 }, { "epoch": 3.249581862107415, "grad_norm": 0.815261960029602, "learning_rate": 1.6177529444516194e-05, "loss": 0.7389, "step": 17583 }, { "epoch": 3.249767701170786, "grad_norm": 0.9083401560783386, "learning_rate": 1.6169563500445138e-05, "loss": 0.7564, "step": 17584 }, { "epoch": 3.249953540234157, "grad_norm": 1.1427556276321411, "learning_rate": 1.6161599345590894e-05, "loss": 0.8388, "step": 17585 }, { "epoch": 3.2501393792975284, "grad_norm": 0.8198236227035522, "learning_rate": 1.6153636980123442e-05, "loss": 0.8777, "step": 17586 }, { "epoch": 3.2503252183608993, "grad_norm": 0.9134224653244019, "learning_rate": 1.6145676404212696e-05, "loss": 0.846, "step": 17587 }, { "epoch": 3.2505110574242706, "grad_norm": 0.9644818305969238, "learning_rate": 1.6137717618028614e-05, "loss": 0.7818, "step": 17588 }, { "epoch": 3.250696896487642, "grad_norm": 0.9705414175987244, "learning_rate": 1.6129760621741008e-05, "loss": 0.8388, "step": 17589 }, { "epoch": 3.250882735551013, "grad_norm": 1.0678094625473022, "learning_rate": 1.6121805415519763e-05, "loss": 0.8321, "step": 17590 }, { "epoch": 3.2510685746143837, "grad_norm": 0.8345924615859985, "learning_rate": 1.6113851999534613e-05, "loss": 0.7277, "step": 17591 }, { "epoch": 3.251254413677755, "grad_norm": 1.1566282510757446, "learning_rate": 1.6105900373955364e-05, "loss": 0.8567, "step": 17592 }, { "epoch": 3.2514402527411264, "grad_norm": 0.8294490575790405, "learning_rate": 1.60979505389517e-05, "loss": 0.8072, "step": 17593 }, { "epoch": 3.2516260918044972, "grad_norm": 1.0158021450042725, "learning_rate": 1.60900024946933e-05, "loss": 0.9375, "step": 17594 }, { "epoch": 3.2518119308678686, "grad_norm": 0.9069027900695801, "learning_rate": 1.6082056241349786e-05, "loss": 0.9329, "step": 17595 }, { "epoch": 3.2519977699312395, "grad_norm": 0.9600441455841064, "learning_rate": 1.6074111779090784e-05, "loss": 0.8963, "step": 17596 }, { "epoch": 3.252183608994611, "grad_norm": 1.4393479824066162, "learning_rate": 1.606616910808586e-05, "loss": 0.9299, "step": 17597 }, { "epoch": 3.2523694480579817, "grad_norm": 0.8488100171089172, "learning_rate": 1.6058228228504503e-05, "loss": 0.7866, "step": 17598 }, { "epoch": 3.252555287121353, "grad_norm": 0.9428489804267883, "learning_rate": 1.6050289140516174e-05, "loss": 0.8406, "step": 17599 }, { "epoch": 3.252741126184724, "grad_norm": 0.8999878168106079, "learning_rate": 1.6042351844290404e-05, "loss": 0.7001, "step": 17600 }, { "epoch": 3.252926965248095, "grad_norm": 0.9167279005050659, "learning_rate": 1.6034416339996562e-05, "loss": 0.754, "step": 17601 }, { "epoch": 3.253112804311466, "grad_norm": 0.9463081359863281, "learning_rate": 1.602648262780402e-05, "loss": 0.7938, "step": 17602 }, { "epoch": 3.2532986433748374, "grad_norm": 0.8923173546791077, "learning_rate": 1.6018550707882062e-05, "loss": 1.0319, "step": 17603 }, { "epoch": 3.2534844824382088, "grad_norm": 0.8322475552558899, "learning_rate": 1.6010620580400047e-05, "loss": 0.7353, "step": 17604 }, { "epoch": 3.2536703215015796, "grad_norm": 0.8827752470970154, "learning_rate": 1.6002692245527218e-05, "loss": 0.7999, "step": 17605 }, { "epoch": 3.2538561605649505, "grad_norm": 0.9471423029899597, "learning_rate": 1.5994765703432767e-05, "loss": 0.7182, "step": 17606 }, { "epoch": 3.254041999628322, "grad_norm": 0.9410121440887451, "learning_rate": 1.5986840954285865e-05, "loss": 0.939, "step": 17607 }, { "epoch": 3.254227838691693, "grad_norm": 0.9756559729576111, "learning_rate": 1.597891799825568e-05, "loss": 0.8744, "step": 17608 }, { "epoch": 3.254413677755064, "grad_norm": 1.0734498500823975, "learning_rate": 1.597099683551132e-05, "loss": 0.8687, "step": 17609 }, { "epoch": 3.2545995168184354, "grad_norm": 1.6099714040756226, "learning_rate": 1.5963077466221808e-05, "loss": 0.9598, "step": 17610 }, { "epoch": 3.2547853558818063, "grad_norm": 0.8606815338134766, "learning_rate": 1.595515989055618e-05, "loss": 0.8517, "step": 17611 }, { "epoch": 3.2549711949451776, "grad_norm": 0.8491156697273254, "learning_rate": 1.5947244108683478e-05, "loss": 0.7487, "step": 17612 }, { "epoch": 3.2551570340085485, "grad_norm": 0.8932136297225952, "learning_rate": 1.59393301207726e-05, "loss": 0.8061, "step": 17613 }, { "epoch": 3.25534287307192, "grad_norm": 0.9115806221961975, "learning_rate": 1.5931417926992476e-05, "loss": 0.7714, "step": 17614 }, { "epoch": 3.2555287121352907, "grad_norm": 0.9725075960159302, "learning_rate": 1.592350752751194e-05, "loss": 0.9278, "step": 17615 }, { "epoch": 3.255714551198662, "grad_norm": 0.9991533160209656, "learning_rate": 1.5915598922499875e-05, "loss": 0.6981, "step": 17616 }, { "epoch": 3.255900390262033, "grad_norm": 0.925247311592102, "learning_rate": 1.590769211212506e-05, "loss": 0.8375, "step": 17617 }, { "epoch": 3.2560862293254043, "grad_norm": 1.1035805940628052, "learning_rate": 1.589978709655625e-05, "loss": 0.8186, "step": 17618 }, { "epoch": 3.256272068388775, "grad_norm": 0.928608238697052, "learning_rate": 1.5891883875962134e-05, "loss": 0.854, "step": 17619 }, { "epoch": 3.2564579074521465, "grad_norm": 0.9559894800186157, "learning_rate": 1.5883982450511427e-05, "loss": 1.0359, "step": 17620 }, { "epoch": 3.2566437465155174, "grad_norm": 1.0768336057662964, "learning_rate": 1.5876082820372785e-05, "loss": 0.623, "step": 17621 }, { "epoch": 3.2568295855788887, "grad_norm": 0.9776822924613953, "learning_rate": 1.5868184985714797e-05, "loss": 0.8205, "step": 17622 }, { "epoch": 3.25701542464226, "grad_norm": 0.8623474836349487, "learning_rate": 1.5860288946706003e-05, "loss": 0.5707, "step": 17623 }, { "epoch": 3.257201263705631, "grad_norm": 0.9123068451881409, "learning_rate": 1.5852394703514972e-05, "loss": 0.8048, "step": 17624 }, { "epoch": 3.2573871027690022, "grad_norm": 1.2158424854278564, "learning_rate": 1.5844502256310167e-05, "loss": 0.8853, "step": 17625 }, { "epoch": 3.257572941832373, "grad_norm": 0.7986825108528137, "learning_rate": 1.5836611605260054e-05, "loss": 0.7819, "step": 17626 }, { "epoch": 3.2577587808957444, "grad_norm": 1.1961933374404907, "learning_rate": 1.5828722750533008e-05, "loss": 0.9998, "step": 17627 }, { "epoch": 3.2579446199591153, "grad_norm": 1.1025460958480835, "learning_rate": 1.5820835692297455e-05, "loss": 0.6594, "step": 17628 }, { "epoch": 3.2581304590224867, "grad_norm": 1.039323091506958, "learning_rate": 1.5812950430721708e-05, "loss": 0.885, "step": 17629 }, { "epoch": 3.2583162980858575, "grad_norm": 0.849334716796875, "learning_rate": 1.5805066965974058e-05, "loss": 0.7472, "step": 17630 }, { "epoch": 3.258502137149229, "grad_norm": 0.9839600920677185, "learning_rate": 1.5797185298222727e-05, "loss": 0.8077, "step": 17631 }, { "epoch": 3.2586879762125998, "grad_norm": 0.8936685919761658, "learning_rate": 1.5789305427636027e-05, "loss": 0.7506, "step": 17632 }, { "epoch": 3.258873815275971, "grad_norm": 1.0029278993606567, "learning_rate": 1.5781427354382083e-05, "loss": 0.7903, "step": 17633 }, { "epoch": 3.259059654339342, "grad_norm": 1.0882457494735718, "learning_rate": 1.5773551078629056e-05, "loss": 0.7335, "step": 17634 }, { "epoch": 3.2592454934027133, "grad_norm": 0.8651626706123352, "learning_rate": 1.576567660054502e-05, "loss": 0.8915, "step": 17635 }, { "epoch": 3.259431332466084, "grad_norm": 1.1022568941116333, "learning_rate": 1.5757803920298087e-05, "loss": 0.8194, "step": 17636 }, { "epoch": 3.2596171715294555, "grad_norm": 0.8831328749656677, "learning_rate": 1.5749933038056265e-05, "loss": 1.0054, "step": 17637 }, { "epoch": 3.259803010592827, "grad_norm": 0.910919725894928, "learning_rate": 1.5742063953987528e-05, "loss": 0.8578, "step": 17638 }, { "epoch": 3.2599888496561977, "grad_norm": 0.8606796264648438, "learning_rate": 1.573419666825984e-05, "loss": 0.6577, "step": 17639 }, { "epoch": 3.2601746887195686, "grad_norm": 0.8369166851043701, "learning_rate": 1.5726331181041133e-05, "loss": 0.7804, "step": 17640 }, { "epoch": 3.26036052778294, "grad_norm": 0.8461362719535828, "learning_rate": 1.5718467492499245e-05, "loss": 0.7175, "step": 17641 }, { "epoch": 3.2605463668463113, "grad_norm": 1.1136691570281982, "learning_rate": 1.5710605602802065e-05, "loss": 0.9438, "step": 17642 }, { "epoch": 3.260732205909682, "grad_norm": 0.7843376994132996, "learning_rate": 1.5702745512117324e-05, "loss": 0.7915, "step": 17643 }, { "epoch": 3.2609180449730535, "grad_norm": 0.9372939467430115, "learning_rate": 1.5694887220612854e-05, "loss": 0.7961, "step": 17644 }, { "epoch": 3.2611038840364244, "grad_norm": 0.9335784316062927, "learning_rate": 1.5687030728456344e-05, "loss": 0.831, "step": 17645 }, { "epoch": 3.2612897230997957, "grad_norm": 0.8994518518447876, "learning_rate": 1.567917603581547e-05, "loss": 0.6857, "step": 17646 }, { "epoch": 3.2614755621631666, "grad_norm": 1.0384033918380737, "learning_rate": 1.5671323142857863e-05, "loss": 0.8729, "step": 17647 }, { "epoch": 3.261661401226538, "grad_norm": 0.9693853855133057, "learning_rate": 1.566347204975117e-05, "loss": 1.0206, "step": 17648 }, { "epoch": 3.261847240289909, "grad_norm": 0.943858802318573, "learning_rate": 1.5655622756662948e-05, "loss": 0.8711, "step": 17649 }, { "epoch": 3.26203307935328, "grad_norm": 0.8209655284881592, "learning_rate": 1.5647775263760687e-05, "loss": 0.8619, "step": 17650 }, { "epoch": 3.262218918416651, "grad_norm": 0.8807152509689331, "learning_rate": 1.563992957121193e-05, "loss": 0.8328, "step": 17651 }, { "epoch": 3.2624047574800223, "grad_norm": 0.8838180303573608, "learning_rate": 1.5632085679184093e-05, "loss": 0.5347, "step": 17652 }, { "epoch": 3.2625905965433932, "grad_norm": 0.8919572234153748, "learning_rate": 1.5624243587844622e-05, "loss": 1.106, "step": 17653 }, { "epoch": 3.2627764356067646, "grad_norm": 0.8996062278747559, "learning_rate": 1.561640329736088e-05, "loss": 0.8418, "step": 17654 }, { "epoch": 3.2629622746701354, "grad_norm": 0.9466404914855957, "learning_rate": 1.5608564807900184e-05, "loss": 1.0323, "step": 17655 }, { "epoch": 3.2631481137335068, "grad_norm": 1.0202292203903198, "learning_rate": 1.560072811962987e-05, "loss": 0.9618, "step": 17656 }, { "epoch": 3.263333952796878, "grad_norm": 0.9965801239013672, "learning_rate": 1.5592893232717175e-05, "loss": 0.7936, "step": 17657 }, { "epoch": 3.263519791860249, "grad_norm": 0.7998269200325012, "learning_rate": 1.558506014732932e-05, "loss": 0.7537, "step": 17658 }, { "epoch": 3.2637056309236203, "grad_norm": 0.9901534914970398, "learning_rate": 1.5577228863633486e-05, "loss": 0.7394, "step": 17659 }, { "epoch": 3.263891469986991, "grad_norm": 0.8547446727752686, "learning_rate": 1.5569399381796846e-05, "loss": 0.6628, "step": 17660 }, { "epoch": 3.2640773090503625, "grad_norm": 1.1768836975097656, "learning_rate": 1.5561571701986477e-05, "loss": 0.7231, "step": 17661 }, { "epoch": 3.2642631481137334, "grad_norm": 0.9869349598884583, "learning_rate": 1.555374582436945e-05, "loss": 0.6661, "step": 17662 }, { "epoch": 3.2644489871771047, "grad_norm": 1.0868933200836182, "learning_rate": 1.554592174911279e-05, "loss": 0.8784, "step": 17663 }, { "epoch": 3.2646348262404756, "grad_norm": 0.8081589341163635, "learning_rate": 1.5538099476383527e-05, "loss": 0.6805, "step": 17664 }, { "epoch": 3.264820665303847, "grad_norm": 0.9574329257011414, "learning_rate": 1.5530279006348592e-05, "loss": 0.8514, "step": 17665 }, { "epoch": 3.265006504367218, "grad_norm": 0.883391797542572, "learning_rate": 1.5522460339174892e-05, "loss": 0.8386, "step": 17666 }, { "epoch": 3.265192343430589, "grad_norm": 0.9138352274894714, "learning_rate": 1.551464347502929e-05, "loss": 0.8067, "step": 17667 }, { "epoch": 3.26537818249396, "grad_norm": 0.8999261260032654, "learning_rate": 1.5506828414078655e-05, "loss": 0.8958, "step": 17668 }, { "epoch": 3.2655640215573314, "grad_norm": 1.483202338218689, "learning_rate": 1.5499015156489784e-05, "loss": 1.3382, "step": 17669 }, { "epoch": 3.2657498606207023, "grad_norm": 0.9040524959564209, "learning_rate": 1.549120370242939e-05, "loss": 0.9632, "step": 17670 }, { "epoch": 3.2659356996840736, "grad_norm": 0.7727354168891907, "learning_rate": 1.548339405206426e-05, "loss": 0.7169, "step": 17671 }, { "epoch": 3.266121538747445, "grad_norm": 0.8451464772224426, "learning_rate": 1.5475586205561042e-05, "loss": 0.7764, "step": 17672 }, { "epoch": 3.266307377810816, "grad_norm": 0.8612467646598816, "learning_rate": 1.5467780163086365e-05, "loss": 0.9192, "step": 17673 }, { "epoch": 3.266493216874187, "grad_norm": 0.7170829772949219, "learning_rate": 1.5459975924806878e-05, "loss": 0.438, "step": 17674 }, { "epoch": 3.266679055937558, "grad_norm": 0.9012123942375183, "learning_rate": 1.545217349088911e-05, "loss": 0.7104, "step": 17675 }, { "epoch": 3.2668648950009294, "grad_norm": 0.8574148416519165, "learning_rate": 1.5444372861499634e-05, "loss": 0.8781, "step": 17676 }, { "epoch": 3.2670507340643002, "grad_norm": 1.2092663049697876, "learning_rate": 1.543657403680492e-05, "loss": 0.9431, "step": 17677 }, { "epoch": 3.2672365731276716, "grad_norm": 0.9819631576538086, "learning_rate": 1.5428777016971407e-05, "loss": 0.7433, "step": 17678 }, { "epoch": 3.2674224121910425, "grad_norm": 0.8612439632415771, "learning_rate": 1.5420981802165502e-05, "loss": 1.0082, "step": 17679 }, { "epoch": 3.267608251254414, "grad_norm": 1.0462713241577148, "learning_rate": 1.5413188392553612e-05, "loss": 0.8537, "step": 17680 }, { "epoch": 3.2677940903177847, "grad_norm": 0.8656796813011169, "learning_rate": 1.5405396788302064e-05, "loss": 0.8139, "step": 17681 }, { "epoch": 3.267979929381156, "grad_norm": 1.0041792392730713, "learning_rate": 1.539760698957713e-05, "loss": 0.6557, "step": 17682 }, { "epoch": 3.268165768444527, "grad_norm": 1.0172066688537598, "learning_rate": 1.538981899654508e-05, "loss": 0.9778, "step": 17683 }, { "epoch": 3.268351607507898, "grad_norm": 0.901366651058197, "learning_rate": 1.5382032809372182e-05, "loss": 0.7569, "step": 17684 }, { "epoch": 3.268537446571269, "grad_norm": 0.8727717399597168, "learning_rate": 1.537424842822458e-05, "loss": 0.7546, "step": 17685 }, { "epoch": 3.2687232856346404, "grad_norm": 0.9215835928916931, "learning_rate": 1.5366465853268418e-05, "loss": 0.763, "step": 17686 }, { "epoch": 3.2689091246980118, "grad_norm": 0.9440967440605164, "learning_rate": 1.5358685084669788e-05, "loss": 0.8188, "step": 17687 }, { "epoch": 3.2690949637613826, "grad_norm": 0.9095939993858337, "learning_rate": 1.53509061225948e-05, "loss": 0.9214, "step": 17688 }, { "epoch": 3.2692808028247535, "grad_norm": 1.0717787742614746, "learning_rate": 1.5343128967209452e-05, "loss": 1.0171, "step": 17689 }, { "epoch": 3.269466641888125, "grad_norm": 1.1467723846435547, "learning_rate": 1.5335353618679716e-05, "loss": 0.8464, "step": 17690 }, { "epoch": 3.269652480951496, "grad_norm": 0.6764289140701294, "learning_rate": 1.5327580077171587e-05, "loss": 0.4945, "step": 17691 }, { "epoch": 3.269838320014867, "grad_norm": 1.0119014978408813, "learning_rate": 1.5319808342850962e-05, "loss": 0.7477, "step": 17692 }, { "epoch": 3.2700241590782384, "grad_norm": 0.9415217638015747, "learning_rate": 1.5312038415883713e-05, "loss": 0.8812, "step": 17693 }, { "epoch": 3.2702099981416093, "grad_norm": 1.0114682912826538, "learning_rate": 1.5304270296435643e-05, "loss": 0.9665, "step": 17694 }, { "epoch": 3.2703958372049806, "grad_norm": 0.8940282464027405, "learning_rate": 1.5296503984672582e-05, "loss": 0.9342, "step": 17695 }, { "epoch": 3.2705816762683515, "grad_norm": 1.0693212747573853, "learning_rate": 1.5288739480760317e-05, "loss": 0.81, "step": 17696 }, { "epoch": 3.270767515331723, "grad_norm": 1.014376163482666, "learning_rate": 1.528097678486453e-05, "loss": 0.8339, "step": 17697 }, { "epoch": 3.2709533543950937, "grad_norm": 0.9386370182037354, "learning_rate": 1.5273215897150917e-05, "loss": 0.7358, "step": 17698 }, { "epoch": 3.271139193458465, "grad_norm": 1.4261482954025269, "learning_rate": 1.5265456817785084e-05, "loss": 1.314, "step": 17699 }, { "epoch": 3.271325032521836, "grad_norm": 1.1061711311340332, "learning_rate": 1.5257699546932691e-05, "loss": 0.8965, "step": 17700 }, { "epoch": 3.2715108715852073, "grad_norm": 1.0086822509765625, "learning_rate": 1.5249944084759272e-05, "loss": 0.8394, "step": 17701 }, { "epoch": 3.271696710648578, "grad_norm": 0.904879093170166, "learning_rate": 1.5242190431430347e-05, "loss": 0.8432, "step": 17702 }, { "epoch": 3.2718825497119495, "grad_norm": 1.4062803983688354, "learning_rate": 1.5234438587111433e-05, "loss": 0.6721, "step": 17703 }, { "epoch": 3.2720683887753204, "grad_norm": 0.9530239105224609, "learning_rate": 1.5226688551967937e-05, "loss": 0.7427, "step": 17704 }, { "epoch": 3.2722542278386917, "grad_norm": 1.1069839000701904, "learning_rate": 1.521894032616532e-05, "loss": 0.848, "step": 17705 }, { "epoch": 3.272440066902063, "grad_norm": 0.9540766477584839, "learning_rate": 1.5211193909868936e-05, "loss": 0.8806, "step": 17706 }, { "epoch": 3.272625905965434, "grad_norm": 0.9043301939964294, "learning_rate": 1.5203449303244099e-05, "loss": 0.7207, "step": 17707 }, { "epoch": 3.2728117450288052, "grad_norm": 1.205649733543396, "learning_rate": 1.5195706506456131e-05, "loss": 0.8976, "step": 17708 }, { "epoch": 3.272997584092176, "grad_norm": 0.8512119054794312, "learning_rate": 1.5187965519670289e-05, "loss": 0.8219, "step": 17709 }, { "epoch": 3.2731834231555474, "grad_norm": 1.1259433031082153, "learning_rate": 1.518022634305175e-05, "loss": 0.7307, "step": 17710 }, { "epoch": 3.2733692622189183, "grad_norm": 0.9172109961509705, "learning_rate": 1.5172488976765741e-05, "loss": 0.868, "step": 17711 }, { "epoch": 3.2735551012822897, "grad_norm": 0.8422805070877075, "learning_rate": 1.5164753420977395e-05, "loss": 0.9534, "step": 17712 }, { "epoch": 3.2737409403456605, "grad_norm": 0.9498412013053894, "learning_rate": 1.5157019675851803e-05, "loss": 0.763, "step": 17713 }, { "epoch": 3.273926779409032, "grad_norm": 0.9260938763618469, "learning_rate": 1.5149287741553996e-05, "loss": 0.8064, "step": 17714 }, { "epoch": 3.2741126184724028, "grad_norm": 0.8831002712249756, "learning_rate": 1.5141557618249036e-05, "loss": 0.8015, "step": 17715 }, { "epoch": 3.274298457535774, "grad_norm": 1.01204252243042, "learning_rate": 1.5133829306101932e-05, "loss": 0.6571, "step": 17716 }, { "epoch": 3.274484296599145, "grad_norm": 1.1708745956420898, "learning_rate": 1.5126102805277609e-05, "loss": 0.5718, "step": 17717 }, { "epoch": 3.2746701356625163, "grad_norm": 0.9031075835227966, "learning_rate": 1.5118378115940968e-05, "loss": 0.6955, "step": 17718 }, { "epoch": 3.274855974725887, "grad_norm": 0.9439001083374023, "learning_rate": 1.5110655238256866e-05, "loss": 0.8054, "step": 17719 }, { "epoch": 3.2750418137892585, "grad_norm": 0.7462693452835083, "learning_rate": 1.510293417239017e-05, "loss": 0.5632, "step": 17720 }, { "epoch": 3.27522765285263, "grad_norm": 1.0460938215255737, "learning_rate": 1.509521491850565e-05, "loss": 0.7548, "step": 17721 }, { "epoch": 3.2754134919160007, "grad_norm": 0.8902409076690674, "learning_rate": 1.5087497476768054e-05, "loss": 0.6419, "step": 17722 }, { "epoch": 3.2755993309793716, "grad_norm": 0.8912127017974854, "learning_rate": 1.5079781847342123e-05, "loss": 0.8265, "step": 17723 }, { "epoch": 3.275785170042743, "grad_norm": 1.0797715187072754, "learning_rate": 1.5072068030392516e-05, "loss": 0.8039, "step": 17724 }, { "epoch": 3.2759710091061143, "grad_norm": 0.9005500674247742, "learning_rate": 1.5064356026083858e-05, "loss": 0.8328, "step": 17725 }, { "epoch": 3.276156848169485, "grad_norm": 1.153085470199585, "learning_rate": 1.5056645834580784e-05, "loss": 0.8964, "step": 17726 }, { "epoch": 3.2763426872328565, "grad_norm": 0.981286346912384, "learning_rate": 1.5048937456047818e-05, "loss": 0.7963, "step": 17727 }, { "epoch": 3.2765285262962274, "grad_norm": 0.9868923425674438, "learning_rate": 1.5041230890649526e-05, "loss": 0.8792, "step": 17728 }, { "epoch": 3.2767143653595987, "grad_norm": 0.8343085646629333, "learning_rate": 1.5033526138550359e-05, "loss": 0.858, "step": 17729 }, { "epoch": 3.2769002044229696, "grad_norm": 0.832370936870575, "learning_rate": 1.5025823199914747e-05, "loss": 0.6926, "step": 17730 }, { "epoch": 3.277086043486341, "grad_norm": 0.901152491569519, "learning_rate": 1.5018122074907136e-05, "loss": 0.8358, "step": 17731 }, { "epoch": 3.277271882549712, "grad_norm": 0.8559943437576294, "learning_rate": 1.5010422763691878e-05, "loss": 0.5558, "step": 17732 }, { "epoch": 3.277457721613083, "grad_norm": 0.8962408304214478, "learning_rate": 1.5002725266433303e-05, "loss": 0.7495, "step": 17733 }, { "epoch": 3.277643560676454, "grad_norm": 0.9491870999336243, "learning_rate": 1.4995029583295672e-05, "loss": 0.7545, "step": 17734 }, { "epoch": 3.2778293997398253, "grad_norm": 0.9699558615684509, "learning_rate": 1.4987335714443274e-05, "loss": 0.8994, "step": 17735 }, { "epoch": 3.2780152388031967, "grad_norm": 0.9728564023971558, "learning_rate": 1.49796436600403e-05, "loss": 0.8333, "step": 17736 }, { "epoch": 3.2782010778665676, "grad_norm": 0.984086275100708, "learning_rate": 1.497195342025094e-05, "loss": 0.8111, "step": 17737 }, { "epoch": 3.2783869169299384, "grad_norm": 1.0738487243652344, "learning_rate": 1.496426499523933e-05, "loss": 1.048, "step": 17738 }, { "epoch": 3.2785727559933098, "grad_norm": 1.0294325351715088, "learning_rate": 1.4956578385169528e-05, "loss": 0.7576, "step": 17739 }, { "epoch": 3.278758595056681, "grad_norm": 0.9861834645271301, "learning_rate": 1.4948893590205637e-05, "loss": 0.7033, "step": 17740 }, { "epoch": 3.278944434120052, "grad_norm": 1.0322951078414917, "learning_rate": 1.494121061051167e-05, "loss": 0.6938, "step": 17741 }, { "epoch": 3.2791302731834233, "grad_norm": 0.7800756692886353, "learning_rate": 1.4933529446251571e-05, "loss": 0.636, "step": 17742 }, { "epoch": 3.279316112246794, "grad_norm": 0.9137340784072876, "learning_rate": 1.4925850097589323e-05, "loss": 0.8715, "step": 17743 }, { "epoch": 3.2795019513101655, "grad_norm": 0.8566145896911621, "learning_rate": 1.4918172564688815e-05, "loss": 0.7664, "step": 17744 }, { "epoch": 3.2796877903735364, "grad_norm": 0.8737754821777344, "learning_rate": 1.4910496847713906e-05, "loss": 0.7809, "step": 17745 }, { "epoch": 3.2798736294369077, "grad_norm": 0.9414018988609314, "learning_rate": 1.4902822946828398e-05, "loss": 0.7845, "step": 17746 }, { "epoch": 3.2800594685002786, "grad_norm": 0.845181405544281, "learning_rate": 1.4895150862196105e-05, "loss": 0.8409, "step": 17747 }, { "epoch": 3.28024530756365, "grad_norm": 0.9034122228622437, "learning_rate": 1.4887480593980797e-05, "loss": 0.8416, "step": 17748 }, { "epoch": 3.280431146627021, "grad_norm": 0.8876949548721313, "learning_rate": 1.487981214234615e-05, "loss": 0.9379, "step": 17749 }, { "epoch": 3.280616985690392, "grad_norm": 0.8697424530982971, "learning_rate": 1.4872145507455825e-05, "loss": 0.713, "step": 17750 }, { "epoch": 3.280802824753763, "grad_norm": 0.8058562278747559, "learning_rate": 1.486448068947348e-05, "loss": 0.7265, "step": 17751 }, { "epoch": 3.2809886638171344, "grad_norm": 0.9465721249580383, "learning_rate": 1.4856817688562708e-05, "loss": 0.6551, "step": 17752 }, { "epoch": 3.2811745028805053, "grad_norm": 1.1225144863128662, "learning_rate": 1.4849156504887053e-05, "loss": 0.8225, "step": 17753 }, { "epoch": 3.2813603419438766, "grad_norm": 1.046244740486145, "learning_rate": 1.4841497138609995e-05, "loss": 0.7533, "step": 17754 }, { "epoch": 3.281546181007248, "grad_norm": 0.9831907749176025, "learning_rate": 1.4833839589895071e-05, "loss": 0.7502, "step": 17755 }, { "epoch": 3.281732020070619, "grad_norm": 0.998605489730835, "learning_rate": 1.4826183858905685e-05, "loss": 0.7527, "step": 17756 }, { "epoch": 3.28191785913399, "grad_norm": 0.9687653183937073, "learning_rate": 1.4818529945805227e-05, "loss": 0.7545, "step": 17757 }, { "epoch": 3.282103698197361, "grad_norm": 1.0033705234527588, "learning_rate": 1.4810877850757088e-05, "loss": 0.9128, "step": 17758 }, { "epoch": 3.2822895372607324, "grad_norm": 0.9830369353294373, "learning_rate": 1.4803227573924539e-05, "loss": 0.8042, "step": 17759 }, { "epoch": 3.2824753763241032, "grad_norm": 0.9130050539970398, "learning_rate": 1.4795579115470925e-05, "loss": 0.8025, "step": 17760 }, { "epoch": 3.2826612153874746, "grad_norm": 0.6415044069290161, "learning_rate": 1.4787932475559462e-05, "loss": 0.3121, "step": 17761 }, { "epoch": 3.2828470544508455, "grad_norm": 0.8551591634750366, "learning_rate": 1.478028765435332e-05, "loss": 0.8739, "step": 17762 }, { "epoch": 3.283032893514217, "grad_norm": 1.0814300775527954, "learning_rate": 1.477264465201572e-05, "loss": 0.8861, "step": 17763 }, { "epoch": 3.2832187325775877, "grad_norm": 0.9738458395004272, "learning_rate": 1.476500346870977e-05, "loss": 0.8235, "step": 17764 }, { "epoch": 3.283404571640959, "grad_norm": 0.9195381999015808, "learning_rate": 1.4757364104598547e-05, "loss": 0.9812, "step": 17765 }, { "epoch": 3.28359041070433, "grad_norm": 0.8763140439987183, "learning_rate": 1.4749726559845078e-05, "loss": 0.9033, "step": 17766 }, { "epoch": 3.283776249767701, "grad_norm": 0.9802149534225464, "learning_rate": 1.474209083461241e-05, "loss": 0.9211, "step": 17767 }, { "epoch": 3.283962088831072, "grad_norm": 2.1971049308776855, "learning_rate": 1.4734456929063523e-05, "loss": 1.2841, "step": 17768 }, { "epoch": 3.2841479278944434, "grad_norm": 1.1380846500396729, "learning_rate": 1.472682484336133e-05, "loss": 1.0558, "step": 17769 }, { "epoch": 3.2843337669578148, "grad_norm": 0.9155475497245789, "learning_rate": 1.4719194577668704e-05, "loss": 0.8186, "step": 17770 }, { "epoch": 3.2845196060211856, "grad_norm": 0.9848891496658325, "learning_rate": 1.4711566132148536e-05, "loss": 0.7564, "step": 17771 }, { "epoch": 3.2847054450845565, "grad_norm": 0.9461994171142578, "learning_rate": 1.4703939506963637e-05, "loss": 0.8314, "step": 17772 }, { "epoch": 3.284891284147928, "grad_norm": 1.004651665687561, "learning_rate": 1.469631470227678e-05, "loss": 0.8203, "step": 17773 }, { "epoch": 3.285077123211299, "grad_norm": 0.7294480800628662, "learning_rate": 1.4688691718250669e-05, "loss": 0.7021, "step": 17774 }, { "epoch": 3.28526296227467, "grad_norm": 1.2479922771453857, "learning_rate": 1.468107055504806e-05, "loss": 1.0149, "step": 17775 }, { "epoch": 3.2854488013380414, "grad_norm": 0.9774560332298279, "learning_rate": 1.4673451212831579e-05, "loss": 0.8686, "step": 17776 }, { "epoch": 3.2856346404014123, "grad_norm": 1.1198142766952515, "learning_rate": 1.4665833691763864e-05, "loss": 0.8014, "step": 17777 }, { "epoch": 3.2858204794647836, "grad_norm": 1.036876916885376, "learning_rate": 1.4658217992007462e-05, "loss": 0.7655, "step": 17778 }, { "epoch": 3.2860063185281545, "grad_norm": 1.0837442874908447, "learning_rate": 1.4650604113724953e-05, "loss": 0.9843, "step": 17779 }, { "epoch": 3.286192157591526, "grad_norm": 0.9364877343177795, "learning_rate": 1.4642992057078853e-05, "loss": 0.739, "step": 17780 }, { "epoch": 3.2863779966548967, "grad_norm": 1.0207990407943726, "learning_rate": 1.4635381822231597e-05, "loss": 0.8071, "step": 17781 }, { "epoch": 3.286563835718268, "grad_norm": 0.8080578446388245, "learning_rate": 1.4627773409345613e-05, "loss": 0.8426, "step": 17782 }, { "epoch": 3.286749674781639, "grad_norm": 0.7918444871902466, "learning_rate": 1.4620166818583324e-05, "loss": 0.6927, "step": 17783 }, { "epoch": 3.2869355138450103, "grad_norm": 1.3243615627288818, "learning_rate": 1.4612562050107048e-05, "loss": 0.9785, "step": 17784 }, { "epoch": 3.2871213529083816, "grad_norm": 0.8511014580726624, "learning_rate": 1.4604959104079119e-05, "loss": 0.7664, "step": 17785 }, { "epoch": 3.2873071919717525, "grad_norm": 1.721552848815918, "learning_rate": 1.4597357980661764e-05, "loss": 0.8782, "step": 17786 }, { "epoch": 3.2874930310351234, "grad_norm": 1.078242301940918, "learning_rate": 1.4589758680017263e-05, "loss": 0.8383, "step": 17787 }, { "epoch": 3.2876788700984947, "grad_norm": 0.9365386962890625, "learning_rate": 1.4582161202307776e-05, "loss": 0.6878, "step": 17788 }, { "epoch": 3.287864709161866, "grad_norm": 0.6306977272033691, "learning_rate": 1.4574565547695495e-05, "loss": 0.4308, "step": 17789 }, { "epoch": 3.288050548225237, "grad_norm": 0.8376436829566956, "learning_rate": 1.4566971716342493e-05, "loss": 0.9278, "step": 17790 }, { "epoch": 3.2882363872886082, "grad_norm": 0.8800222873687744, "learning_rate": 1.4559379708410904e-05, "loss": 0.7149, "step": 17791 }, { "epoch": 3.288422226351979, "grad_norm": 0.9601709842681885, "learning_rate": 1.455178952406272e-05, "loss": 0.9445, "step": 17792 }, { "epoch": 3.2886080654153504, "grad_norm": 1.3182393312454224, "learning_rate": 1.4544201163459959e-05, "loss": 0.7908, "step": 17793 }, { "epoch": 3.2887939044787213, "grad_norm": 0.8644195795059204, "learning_rate": 1.4536614626764556e-05, "loss": 0.5513, "step": 17794 }, { "epoch": 3.2889797435420927, "grad_norm": 1.2175333499908447, "learning_rate": 1.452902991413847e-05, "loss": 0.7819, "step": 17795 }, { "epoch": 3.2891655826054635, "grad_norm": 0.7996311187744141, "learning_rate": 1.4521447025743572e-05, "loss": 0.9284, "step": 17796 }, { "epoch": 3.289351421668835, "grad_norm": 0.7490044236183167, "learning_rate": 1.4513865961741702e-05, "loss": 0.5596, "step": 17797 }, { "epoch": 3.2895372607322058, "grad_norm": 1.0699917078018188, "learning_rate": 1.4506286722294627e-05, "loss": 0.6434, "step": 17798 }, { "epoch": 3.289723099795577, "grad_norm": 0.8811341524124146, "learning_rate": 1.4498709307564162e-05, "loss": 0.932, "step": 17799 }, { "epoch": 3.289908938858948, "grad_norm": 0.9763567447662354, "learning_rate": 1.4491133717712036e-05, "loss": 0.6995, "step": 17800 }, { "epoch": 3.2900947779223193, "grad_norm": 0.79994797706604, "learning_rate": 1.4483559952899929e-05, "loss": 0.7669, "step": 17801 }, { "epoch": 3.29028061698569, "grad_norm": 0.8885137438774109, "learning_rate": 1.4475988013289454e-05, "loss": 0.9014, "step": 17802 }, { "epoch": 3.2904664560490615, "grad_norm": 1.0276567935943604, "learning_rate": 1.4468417899042275e-05, "loss": 0.8182, "step": 17803 }, { "epoch": 3.290652295112433, "grad_norm": 0.9631934762001038, "learning_rate": 1.4460849610319937e-05, "loss": 0.7452, "step": 17804 }, { "epoch": 3.2908381341758037, "grad_norm": 1.0075114965438843, "learning_rate": 1.4453283147283969e-05, "loss": 0.7062, "step": 17805 }, { "epoch": 3.291023973239175, "grad_norm": 0.9544219970703125, "learning_rate": 1.4445718510095852e-05, "loss": 0.7634, "step": 17806 }, { "epoch": 3.291209812302546, "grad_norm": 0.8006826043128967, "learning_rate": 1.4438155698917077e-05, "loss": 0.7816, "step": 17807 }, { "epoch": 3.2913956513659173, "grad_norm": 0.8424093127250671, "learning_rate": 1.443059471390904e-05, "loss": 0.8006, "step": 17808 }, { "epoch": 3.291581490429288, "grad_norm": 0.9989455342292786, "learning_rate": 1.4423035555233088e-05, "loss": 0.6723, "step": 17809 }, { "epoch": 3.2917673294926595, "grad_norm": 1.9413286447525024, "learning_rate": 1.4415478223050593e-05, "loss": 1.1197, "step": 17810 }, { "epoch": 3.2919531685560304, "grad_norm": 1.029294729232788, "learning_rate": 1.440792271752287e-05, "loss": 1.0826, "step": 17811 }, { "epoch": 3.2921390076194017, "grad_norm": 0.8019607663154602, "learning_rate": 1.4400369038811145e-05, "loss": 0.7248, "step": 17812 }, { "epoch": 3.2923248466827726, "grad_norm": 1.039353370666504, "learning_rate": 1.4392817187076658e-05, "loss": 1.0597, "step": 17813 }, { "epoch": 3.292510685746144, "grad_norm": 0.9127644300460815, "learning_rate": 1.4385267162480554e-05, "loss": 0.771, "step": 17814 }, { "epoch": 3.292696524809515, "grad_norm": 0.8197526931762695, "learning_rate": 1.4377718965184017e-05, "loss": 0.7415, "step": 17815 }, { "epoch": 3.292882363872886, "grad_norm": 1.0656996965408325, "learning_rate": 1.437017259534814e-05, "loss": 1.028, "step": 17816 }, { "epoch": 3.293068202936257, "grad_norm": 1.1229791641235352, "learning_rate": 1.4362628053133986e-05, "loss": 0.7646, "step": 17817 }, { "epoch": 3.2932540419996283, "grad_norm": 0.8567220568656921, "learning_rate": 1.4355085338702556e-05, "loss": 0.8046, "step": 17818 }, { "epoch": 3.2934398810629997, "grad_norm": 0.9808992743492126, "learning_rate": 1.4347544452214868e-05, "loss": 0.8, "step": 17819 }, { "epoch": 3.2936257201263706, "grad_norm": 0.7619749903678894, "learning_rate": 1.4340005393831845e-05, "loss": 0.6574, "step": 17820 }, { "epoch": 3.2938115591897414, "grad_norm": 1.034301519393921, "learning_rate": 1.433246816371443e-05, "loss": 0.9074, "step": 17821 }, { "epoch": 3.2939973982531128, "grad_norm": 0.8320847153663635, "learning_rate": 1.4324932762023447e-05, "loss": 0.728, "step": 17822 }, { "epoch": 3.294183237316484, "grad_norm": 0.8412254452705383, "learning_rate": 1.4317399188919767e-05, "loss": 0.6114, "step": 17823 }, { "epoch": 3.294369076379855, "grad_norm": 1.0957362651824951, "learning_rate": 1.4309867444564173e-05, "loss": 0.8073, "step": 17824 }, { "epoch": 3.2945549154432263, "grad_norm": 0.9523847699165344, "learning_rate": 1.4302337529117404e-05, "loss": 0.6516, "step": 17825 }, { "epoch": 3.294740754506597, "grad_norm": 1.0757582187652588, "learning_rate": 1.4294809442740153e-05, "loss": 0.9843, "step": 17826 }, { "epoch": 3.2949265935699685, "grad_norm": 1.077390193939209, "learning_rate": 1.4287283185593148e-05, "loss": 0.9113, "step": 17827 }, { "epoch": 3.2951124326333394, "grad_norm": 0.9525008797645569, "learning_rate": 1.4279758757836992e-05, "loss": 0.9461, "step": 17828 }, { "epoch": 3.2952982716967107, "grad_norm": 0.9313654899597168, "learning_rate": 1.427223615963228e-05, "loss": 0.7244, "step": 17829 }, { "epoch": 3.2954841107600816, "grad_norm": 0.8749706149101257, "learning_rate": 1.4264715391139527e-05, "loss": 0.8138, "step": 17830 }, { "epoch": 3.295669949823453, "grad_norm": 0.92448890209198, "learning_rate": 1.425719645251934e-05, "loss": 0.8129, "step": 17831 }, { "epoch": 3.295855788886824, "grad_norm": 1.0686804056167603, "learning_rate": 1.4249679343932154e-05, "loss": 0.7959, "step": 17832 }, { "epoch": 3.296041627950195, "grad_norm": 0.9681421518325806, "learning_rate": 1.424216406553841e-05, "loss": 1.0192, "step": 17833 }, { "epoch": 3.296227467013566, "grad_norm": 0.9680215120315552, "learning_rate": 1.423465061749849e-05, "loss": 1.0388, "step": 17834 }, { "epoch": 3.2964133060769374, "grad_norm": 1.0418201684951782, "learning_rate": 1.42271389999728e-05, "loss": 0.923, "step": 17835 }, { "epoch": 3.2965991451403083, "grad_norm": 1.0079830884933472, "learning_rate": 1.421962921312162e-05, "loss": 0.7838, "step": 17836 }, { "epoch": 3.2967849842036796, "grad_norm": 0.782057523727417, "learning_rate": 1.421212125710526e-05, "loss": 0.8256, "step": 17837 }, { "epoch": 3.296970823267051, "grad_norm": 0.8839608430862427, "learning_rate": 1.4204615132083931e-05, "loss": 0.5899, "step": 17838 }, { "epoch": 3.297156662330422, "grad_norm": 1.164730429649353, "learning_rate": 1.4197110838217886e-05, "loss": 1.0913, "step": 17839 }, { "epoch": 3.297342501393793, "grad_norm": 0.8485312461853027, "learning_rate": 1.4189608375667263e-05, "loss": 0.8149, "step": 17840 }, { "epoch": 3.297528340457164, "grad_norm": 0.94539874792099, "learning_rate": 1.4182107744592166e-05, "loss": 0.8123, "step": 17841 }, { "epoch": 3.2977141795205354, "grad_norm": 0.8088021278381348, "learning_rate": 1.4174608945152712e-05, "loss": 0.7983, "step": 17842 }, { "epoch": 3.2979000185839062, "grad_norm": 0.8575440049171448, "learning_rate": 1.4167111977508973e-05, "loss": 0.949, "step": 17843 }, { "epoch": 3.2980858576472776, "grad_norm": 0.7935901284217834, "learning_rate": 1.4159616841820933e-05, "loss": 0.542, "step": 17844 }, { "epoch": 3.2982716967106485, "grad_norm": 1.159449577331543, "learning_rate": 1.4152123538248563e-05, "loss": 1.146, "step": 17845 }, { "epoch": 3.29845753577402, "grad_norm": 1.0020778179168701, "learning_rate": 1.4144632066951779e-05, "loss": 0.702, "step": 17846 }, { "epoch": 3.2986433748373907, "grad_norm": 0.9826176762580872, "learning_rate": 1.4137142428090511e-05, "loss": 0.8788, "step": 17847 }, { "epoch": 3.298829213900762, "grad_norm": 0.8390106558799744, "learning_rate": 1.4129654621824583e-05, "loss": 0.7019, "step": 17848 }, { "epoch": 3.299015052964133, "grad_norm": 0.8387085795402527, "learning_rate": 1.4122168648313816e-05, "loss": 0.6126, "step": 17849 }, { "epoch": 3.299200892027504, "grad_norm": 0.884889543056488, "learning_rate": 1.411468450771798e-05, "loss": 0.7499, "step": 17850 }, { "epoch": 3.299386731090875, "grad_norm": 0.8823387026786804, "learning_rate": 1.4107202200196801e-05, "loss": 0.6584, "step": 17851 }, { "epoch": 3.2995725701542464, "grad_norm": 0.9210385084152222, "learning_rate": 1.409972172591002e-05, "loss": 0.7476, "step": 17852 }, { "epoch": 3.2997584092176178, "grad_norm": 0.9333329200744629, "learning_rate": 1.4092243085017276e-05, "loss": 0.8189, "step": 17853 }, { "epoch": 3.2999442482809886, "grad_norm": 0.8348424434661865, "learning_rate": 1.4084766277678152e-05, "loss": 0.741, "step": 17854 }, { "epoch": 3.30013008734436, "grad_norm": 1.09077787399292, "learning_rate": 1.4077291304052275e-05, "loss": 0.625, "step": 17855 }, { "epoch": 3.300315926407731, "grad_norm": 0.8905661702156067, "learning_rate": 1.4069818164299165e-05, "loss": 0.6849, "step": 17856 }, { "epoch": 3.300501765471102, "grad_norm": 0.8002813458442688, "learning_rate": 1.4062346858578335e-05, "loss": 0.7506, "step": 17857 }, { "epoch": 3.300687604534473, "grad_norm": 1.1196472644805908, "learning_rate": 1.4054877387049203e-05, "loss": 0.9203, "step": 17858 }, { "epoch": 3.3008734435978444, "grad_norm": 1.0638034343719482, "learning_rate": 1.4047409749871255e-05, "loss": 0.99, "step": 17859 }, { "epoch": 3.3010592826612153, "grad_norm": 0.917509913444519, "learning_rate": 1.4039943947203838e-05, "loss": 0.6758, "step": 17860 }, { "epoch": 3.3012451217245866, "grad_norm": 0.9278843998908997, "learning_rate": 1.4032479979206282e-05, "loss": 0.9307, "step": 17861 }, { "epoch": 3.3014309607879575, "grad_norm": 0.9846850037574768, "learning_rate": 1.4025017846037924e-05, "loss": 0.8342, "step": 17862 }, { "epoch": 3.301616799851329, "grad_norm": 0.9571072459220886, "learning_rate": 1.401755754785804e-05, "loss": 1.0341, "step": 17863 }, { "epoch": 3.3018026389146997, "grad_norm": 1.0230777263641357, "learning_rate": 1.4010099084825834e-05, "loss": 0.9664, "step": 17864 }, { "epoch": 3.301988477978071, "grad_norm": 1.0085383653640747, "learning_rate": 1.4002642457100502e-05, "loss": 1.0487, "step": 17865 }, { "epoch": 3.302174317041442, "grad_norm": 0.8759761452674866, "learning_rate": 1.3995187664841158e-05, "loss": 0.8395, "step": 17866 }, { "epoch": 3.3023601561048133, "grad_norm": 0.9805045127868652, "learning_rate": 1.398773470820698e-05, "loss": 0.7442, "step": 17867 }, { "epoch": 3.3025459951681846, "grad_norm": 0.8271703124046326, "learning_rate": 1.398028358735699e-05, "loss": 0.5299, "step": 17868 }, { "epoch": 3.3027318342315555, "grad_norm": 0.9561894536018372, "learning_rate": 1.3972834302450233e-05, "loss": 0.8613, "step": 17869 }, { "epoch": 3.3029176732949264, "grad_norm": 0.9207913875579834, "learning_rate": 1.396538685364568e-05, "loss": 1.0006, "step": 17870 }, { "epoch": 3.3031035123582977, "grad_norm": 1.1078895330429077, "learning_rate": 1.3957941241102324e-05, "loss": 0.9432, "step": 17871 }, { "epoch": 3.303289351421669, "grad_norm": 0.8856802582740784, "learning_rate": 1.395049746497904e-05, "loss": 0.8406, "step": 17872 }, { "epoch": 3.30347519048504, "grad_norm": 0.97230464220047, "learning_rate": 1.3943055525434745e-05, "loss": 0.9904, "step": 17873 }, { "epoch": 3.3036610295484112, "grad_norm": 0.9858556985855103, "learning_rate": 1.3935615422628223e-05, "loss": 0.8607, "step": 17874 }, { "epoch": 3.303846868611782, "grad_norm": 0.9251289367675781, "learning_rate": 1.3928177156718314e-05, "loss": 0.7276, "step": 17875 }, { "epoch": 3.3040327076751534, "grad_norm": 1.1109967231750488, "learning_rate": 1.3920740727863768e-05, "loss": 0.8898, "step": 17876 }, { "epoch": 3.3042185467385243, "grad_norm": 0.7776536345481873, "learning_rate": 1.3913306136223292e-05, "loss": 0.7946, "step": 17877 }, { "epoch": 3.3044043858018957, "grad_norm": 1.0654406547546387, "learning_rate": 1.3905873381955536e-05, "loss": 0.9813, "step": 17878 }, { "epoch": 3.3045902248652665, "grad_norm": 0.8269950151443481, "learning_rate": 1.3898442465219198e-05, "loss": 0.8146, "step": 17879 }, { "epoch": 3.304776063928638, "grad_norm": 0.957031786441803, "learning_rate": 1.3891013386172846e-05, "loss": 0.7226, "step": 17880 }, { "epoch": 3.3049619029920088, "grad_norm": 1.144073724746704, "learning_rate": 1.3883586144975026e-05, "loss": 0.7355, "step": 17881 }, { "epoch": 3.30514774205538, "grad_norm": 0.9513867497444153, "learning_rate": 1.3876160741784294e-05, "loss": 0.9539, "step": 17882 }, { "epoch": 3.305333581118751, "grad_norm": 0.991651177406311, "learning_rate": 1.3868737176759106e-05, "loss": 0.89, "step": 17883 }, { "epoch": 3.3055194201821223, "grad_norm": 1.1039535999298096, "learning_rate": 1.386131545005792e-05, "loss": 0.899, "step": 17884 }, { "epoch": 3.305705259245493, "grad_norm": 0.7326856851577759, "learning_rate": 1.3853895561839147e-05, "loss": 0.7802, "step": 17885 }, { "epoch": 3.3058910983088645, "grad_norm": 0.93720942735672, "learning_rate": 1.3846477512261124e-05, "loss": 0.6857, "step": 17886 }, { "epoch": 3.306076937372236, "grad_norm": 0.8614287972450256, "learning_rate": 1.3839061301482203e-05, "loss": 0.7416, "step": 17887 }, { "epoch": 3.3062627764356067, "grad_norm": 0.9519493579864502, "learning_rate": 1.3831646929660668e-05, "loss": 0.9447, "step": 17888 }, { "epoch": 3.306448615498978, "grad_norm": 0.8550201058387756, "learning_rate": 1.382423439695476e-05, "loss": 0.8227, "step": 17889 }, { "epoch": 3.306634454562349, "grad_norm": 0.94979327917099, "learning_rate": 1.3816823703522663e-05, "loss": 0.9126, "step": 17890 }, { "epoch": 3.3068202936257203, "grad_norm": 1.3942769765853882, "learning_rate": 1.3809414849522584e-05, "loss": 0.785, "step": 17891 }, { "epoch": 3.307006132689091, "grad_norm": 1.0408034324645996, "learning_rate": 1.3802007835112641e-05, "loss": 0.9444, "step": 17892 }, { "epoch": 3.3071919717524625, "grad_norm": 1.0535863637924194, "learning_rate": 1.3794602660450895e-05, "loss": 1.0558, "step": 17893 }, { "epoch": 3.3073778108158334, "grad_norm": 0.7811908721923828, "learning_rate": 1.3787199325695422e-05, "loss": 0.6766, "step": 17894 }, { "epoch": 3.3075636498792047, "grad_norm": 0.8901042938232422, "learning_rate": 1.3779797831004248e-05, "loss": 0.7227, "step": 17895 }, { "epoch": 3.3077494889425756, "grad_norm": 0.8555505871772766, "learning_rate": 1.3772398176535339e-05, "loss": 0.6997, "step": 17896 }, { "epoch": 3.307935328005947, "grad_norm": 0.9381975531578064, "learning_rate": 1.3765000362446612e-05, "loss": 0.8737, "step": 17897 }, { "epoch": 3.308121167069318, "grad_norm": 1.1561797857284546, "learning_rate": 1.3757604388895951e-05, "loss": 0.843, "step": 17898 }, { "epoch": 3.308307006132689, "grad_norm": 1.1110519170761108, "learning_rate": 1.3750210256041241e-05, "loss": 0.8357, "step": 17899 }, { "epoch": 3.30849284519606, "grad_norm": 1.0905356407165527, "learning_rate": 1.374281796404029e-05, "loss": 0.7613, "step": 17900 }, { "epoch": 3.3086786842594313, "grad_norm": 0.903249204158783, "learning_rate": 1.3735427513050836e-05, "loss": 0.6294, "step": 17901 }, { "epoch": 3.3088645233228027, "grad_norm": 1.348289132118225, "learning_rate": 1.3728038903230666e-05, "loss": 0.9079, "step": 17902 }, { "epoch": 3.3090503623861736, "grad_norm": 0.9120255708694458, "learning_rate": 1.3720652134737466e-05, "loss": 0.9507, "step": 17903 }, { "epoch": 3.3092362014495444, "grad_norm": 0.9518958330154419, "learning_rate": 1.3713267207728863e-05, "loss": 0.9064, "step": 17904 }, { "epoch": 3.3094220405129158, "grad_norm": 0.8687372207641602, "learning_rate": 1.3705884122362512e-05, "loss": 0.9743, "step": 17905 }, { "epoch": 3.309607879576287, "grad_norm": 0.9766872525215149, "learning_rate": 1.3698502878795961e-05, "loss": 0.7682, "step": 17906 }, { "epoch": 3.309793718639658, "grad_norm": 0.859889566898346, "learning_rate": 1.3691123477186785e-05, "loss": 0.7043, "step": 17907 }, { "epoch": 3.3099795577030293, "grad_norm": 1.0203168392181396, "learning_rate": 1.368374591769247e-05, "loss": 0.7855, "step": 17908 }, { "epoch": 3.3101653967664, "grad_norm": 0.8277616500854492, "learning_rate": 1.3676370200470479e-05, "loss": 0.7292, "step": 17909 }, { "epoch": 3.3103512358297715, "grad_norm": 0.981030285358429, "learning_rate": 1.3668996325678196e-05, "loss": 0.727, "step": 17910 }, { "epoch": 3.3105370748931424, "grad_norm": 0.9350651502609253, "learning_rate": 1.3661624293473063e-05, "loss": 0.8273, "step": 17911 }, { "epoch": 3.3107229139565137, "grad_norm": 0.9313578605651855, "learning_rate": 1.3654254104012398e-05, "loss": 0.8877, "step": 17912 }, { "epoch": 3.3109087530198846, "grad_norm": 0.9104059934616089, "learning_rate": 1.3646885757453488e-05, "loss": 0.7371, "step": 17913 }, { "epoch": 3.311094592083256, "grad_norm": 0.984149694442749, "learning_rate": 1.3639519253953604e-05, "loss": 1.0117, "step": 17914 }, { "epoch": 3.311280431146627, "grad_norm": 1.1708226203918457, "learning_rate": 1.363215459367001e-05, "loss": 0.7616, "step": 17915 }, { "epoch": 3.311466270209998, "grad_norm": 0.8822725415229797, "learning_rate": 1.3624791776759849e-05, "loss": 0.8812, "step": 17916 }, { "epoch": 3.3116521092733695, "grad_norm": 1.0658091306686401, "learning_rate": 1.3617430803380293e-05, "loss": 1.0615, "step": 17917 }, { "epoch": 3.3118379483367404, "grad_norm": 1.0318050384521484, "learning_rate": 1.3610071673688418e-05, "loss": 1.0045, "step": 17918 }, { "epoch": 3.3120237874001113, "grad_norm": 1.094164490699768, "learning_rate": 1.360271438784133e-05, "loss": 0.8698, "step": 17919 }, { "epoch": 3.3122096264634826, "grad_norm": 1.0132442712783813, "learning_rate": 1.359535894599604e-05, "loss": 0.983, "step": 17920 }, { "epoch": 3.312395465526854, "grad_norm": 0.9011918902397156, "learning_rate": 1.3588005348309517e-05, "loss": 0.7316, "step": 17921 }, { "epoch": 3.312581304590225, "grad_norm": 1.044669508934021, "learning_rate": 1.3580653594938752e-05, "loss": 0.8474, "step": 17922 }, { "epoch": 3.312767143653596, "grad_norm": 0.9390353560447693, "learning_rate": 1.3573303686040628e-05, "loss": 1.1222, "step": 17923 }, { "epoch": 3.312952982716967, "grad_norm": 0.7880856990814209, "learning_rate": 1.356595562177203e-05, "loss": 0.5984, "step": 17924 }, { "epoch": 3.3131388217803384, "grad_norm": 1.1461987495422363, "learning_rate": 1.3558609402289768e-05, "loss": 0.7476, "step": 17925 }, { "epoch": 3.3133246608437092, "grad_norm": 0.7984837889671326, "learning_rate": 1.3551265027750637e-05, "loss": 0.6918, "step": 17926 }, { "epoch": 3.3135104999070806, "grad_norm": 0.9764217734336853, "learning_rate": 1.3543922498311424e-05, "loss": 0.8359, "step": 17927 }, { "epoch": 3.3136963389704515, "grad_norm": 0.8988882303237915, "learning_rate": 1.3536581814128824e-05, "loss": 0.7925, "step": 17928 }, { "epoch": 3.313882178033823, "grad_norm": 0.8989973068237305, "learning_rate": 1.3529242975359514e-05, "loss": 0.7593, "step": 17929 }, { "epoch": 3.3140680170971937, "grad_norm": 0.8710231184959412, "learning_rate": 1.3521905982160088e-05, "loss": 0.7564, "step": 17930 }, { "epoch": 3.314253856160565, "grad_norm": 0.914422333240509, "learning_rate": 1.3514570834687202e-05, "loss": 0.7843, "step": 17931 }, { "epoch": 3.314439695223936, "grad_norm": 0.8551025390625, "learning_rate": 1.3507237533097395e-05, "loss": 0.8548, "step": 17932 }, { "epoch": 3.314625534287307, "grad_norm": 1.2345638275146484, "learning_rate": 1.3499906077547143e-05, "loss": 0.7326, "step": 17933 }, { "epoch": 3.314811373350678, "grad_norm": 0.9380419850349426, "learning_rate": 1.3492576468192985e-05, "loss": 1.0185, "step": 17934 }, { "epoch": 3.3149972124140494, "grad_norm": 0.9171674847602844, "learning_rate": 1.3485248705191311e-05, "loss": 0.7686, "step": 17935 }, { "epoch": 3.3151830514774208, "grad_norm": 0.7556156516075134, "learning_rate": 1.347792278869855e-05, "loss": 0.7326, "step": 17936 }, { "epoch": 3.3153688905407916, "grad_norm": 0.8587283492088318, "learning_rate": 1.3470598718871052e-05, "loss": 1.0934, "step": 17937 }, { "epoch": 3.315554729604163, "grad_norm": 0.9215811491012573, "learning_rate": 1.3463276495865118e-05, "loss": 0.5984, "step": 17938 }, { "epoch": 3.315740568667534, "grad_norm": 1.0269794464111328, "learning_rate": 1.3455956119837055e-05, "loss": 0.7353, "step": 17939 }, { "epoch": 3.315926407730905, "grad_norm": 0.8657709360122681, "learning_rate": 1.3448637590943103e-05, "loss": 0.9945, "step": 17940 }, { "epoch": 3.316112246794276, "grad_norm": 1.0170204639434814, "learning_rate": 1.3441320909339428e-05, "loss": 1.0004, "step": 17941 }, { "epoch": 3.3162980858576474, "grad_norm": 0.9796534776687622, "learning_rate": 1.3434006075182238e-05, "loss": 0.7462, "step": 17942 }, { "epoch": 3.3164839249210183, "grad_norm": 0.8594641089439392, "learning_rate": 1.342669308862764e-05, "loss": 0.6974, "step": 17943 }, { "epoch": 3.3166697639843896, "grad_norm": 1.0251960754394531, "learning_rate": 1.341938194983171e-05, "loss": 0.7658, "step": 17944 }, { "epoch": 3.3168556030477605, "grad_norm": 0.8630359172821045, "learning_rate": 1.341207265895047e-05, "loss": 0.9239, "step": 17945 }, { "epoch": 3.317041442111132, "grad_norm": 1.0533076524734497, "learning_rate": 1.3404765216139947e-05, "loss": 0.9937, "step": 17946 }, { "epoch": 3.3172272811745027, "grad_norm": 0.8733925223350525, "learning_rate": 1.339745962155613e-05, "loss": 0.9519, "step": 17947 }, { "epoch": 3.317413120237874, "grad_norm": 1.035334587097168, "learning_rate": 1.3390155875354927e-05, "loss": 0.6359, "step": 17948 }, { "epoch": 3.317598959301245, "grad_norm": 0.7760814428329468, "learning_rate": 1.3382853977692211e-05, "loss": 0.6602, "step": 17949 }, { "epoch": 3.3177847983646163, "grad_norm": 0.8502481579780579, "learning_rate": 1.3375553928723816e-05, "loss": 0.8501, "step": 17950 }, { "epoch": 3.3179706374279876, "grad_norm": 0.7824277877807617, "learning_rate": 1.3368255728605594e-05, "loss": 0.8327, "step": 17951 }, { "epoch": 3.3181564764913585, "grad_norm": 0.9095959663391113, "learning_rate": 1.3360959377493287e-05, "loss": 0.7192, "step": 17952 }, { "epoch": 3.3183423155547294, "grad_norm": 0.9373075366020203, "learning_rate": 1.3353664875542605e-05, "loss": 0.7329, "step": 17953 }, { "epoch": 3.3185281546181007, "grad_norm": 1.0542707443237305, "learning_rate": 1.3346372222909265e-05, "loss": 0.7624, "step": 17954 }, { "epoch": 3.318713993681472, "grad_norm": 0.8361062407493591, "learning_rate": 1.3339081419748922e-05, "loss": 0.8864, "step": 17955 }, { "epoch": 3.318899832744843, "grad_norm": 0.8348095417022705, "learning_rate": 1.333179246621713e-05, "loss": 0.8173, "step": 17956 }, { "epoch": 3.3190856718082142, "grad_norm": 0.9309622645378113, "learning_rate": 1.3324505362469542e-05, "loss": 0.7088, "step": 17957 }, { "epoch": 3.319271510871585, "grad_norm": 1.0715322494506836, "learning_rate": 1.331722010866161e-05, "loss": 0.7816, "step": 17958 }, { "epoch": 3.3194573499349564, "grad_norm": 0.9678760766983032, "learning_rate": 1.3309936704948877e-05, "loss": 0.8219, "step": 17959 }, { "epoch": 3.3196431889983273, "grad_norm": 1.3653862476348877, "learning_rate": 1.3302655151486798e-05, "loss": 0.8307, "step": 17960 }, { "epoch": 3.3198290280616987, "grad_norm": 1.0239980220794678, "learning_rate": 1.3295375448430724e-05, "loss": 0.8326, "step": 17961 }, { "epoch": 3.3200148671250695, "grad_norm": 0.8559108376502991, "learning_rate": 1.3288097595936111e-05, "loss": 0.6935, "step": 17962 }, { "epoch": 3.320200706188441, "grad_norm": 0.8458294868469238, "learning_rate": 1.3280821594158243e-05, "loss": 0.8023, "step": 17963 }, { "epoch": 3.3203865452518118, "grad_norm": 0.9620524644851685, "learning_rate": 1.327354744325242e-05, "loss": 0.8157, "step": 17964 }, { "epoch": 3.320572384315183, "grad_norm": 0.8043466806411743, "learning_rate": 1.3266275143373874e-05, "loss": 0.6708, "step": 17965 }, { "epoch": 3.3207582233785544, "grad_norm": 0.9349076151847839, "learning_rate": 1.3259004694677869e-05, "loss": 0.9326, "step": 17966 }, { "epoch": 3.3209440624419253, "grad_norm": 1.0523160696029663, "learning_rate": 1.3251736097319534e-05, "loss": 0.7398, "step": 17967 }, { "epoch": 3.321129901505296, "grad_norm": 1.1118494272232056, "learning_rate": 1.3244469351454048e-05, "loss": 0.809, "step": 17968 }, { "epoch": 3.3213157405686675, "grad_norm": 0.8730397820472717, "learning_rate": 1.3237204457236484e-05, "loss": 0.6698, "step": 17969 }, { "epoch": 3.321501579632039, "grad_norm": 1.2297297716140747, "learning_rate": 1.3229941414821878e-05, "loss": 0.8201, "step": 17970 }, { "epoch": 3.3216874186954097, "grad_norm": 1.0545740127563477, "learning_rate": 1.3222680224365292e-05, "loss": 0.8432, "step": 17971 }, { "epoch": 3.321873257758781, "grad_norm": 0.8901224136352539, "learning_rate": 1.321542088602168e-05, "loss": 0.8768, "step": 17972 }, { "epoch": 3.322059096822152, "grad_norm": 0.859428882598877, "learning_rate": 1.3208163399945961e-05, "loss": 0.6758, "step": 17973 }, { "epoch": 3.3222449358855233, "grad_norm": 1.1241716146469116, "learning_rate": 1.320090776629308e-05, "loss": 0.8622, "step": 17974 }, { "epoch": 3.322430774948894, "grad_norm": 0.9695386290550232, "learning_rate": 1.3193653985217858e-05, "loss": 0.7271, "step": 17975 }, { "epoch": 3.3226166140122655, "grad_norm": 0.9608593583106995, "learning_rate": 1.3186402056875135e-05, "loss": 0.6767, "step": 17976 }, { "epoch": 3.3228024530756364, "grad_norm": 0.9435471892356873, "learning_rate": 1.3179151981419658e-05, "loss": 0.7687, "step": 17977 }, { "epoch": 3.3229882921390077, "grad_norm": 1.1941502094268799, "learning_rate": 1.3171903759006188e-05, "loss": 1.0315, "step": 17978 }, { "epoch": 3.3231741312023786, "grad_norm": 1.0207300186157227, "learning_rate": 1.3164657389789458e-05, "loss": 0.6687, "step": 17979 }, { "epoch": 3.32335997026575, "grad_norm": 0.8957182765007019, "learning_rate": 1.3157412873924092e-05, "loss": 0.8879, "step": 17980 }, { "epoch": 3.323545809329121, "grad_norm": 0.877617597579956, "learning_rate": 1.3150170211564705e-05, "loss": 0.728, "step": 17981 }, { "epoch": 3.323731648392492, "grad_norm": 0.9174977540969849, "learning_rate": 1.314292940286591e-05, "loss": 0.672, "step": 17982 }, { "epoch": 3.323917487455863, "grad_norm": 1.834579348564148, "learning_rate": 1.313569044798224e-05, "loss": 1.1762, "step": 17983 }, { "epoch": 3.3241033265192343, "grad_norm": 1.1315218210220337, "learning_rate": 1.3128453347068192e-05, "loss": 0.8405, "step": 17984 }, { "epoch": 3.3242891655826057, "grad_norm": 1.1871623992919922, "learning_rate": 1.31212181002782e-05, "loss": 0.7738, "step": 17985 }, { "epoch": 3.3244750046459766, "grad_norm": 0.9807666540145874, "learning_rate": 1.3113984707766746e-05, "loss": 0.7332, "step": 17986 }, { "epoch": 3.324660843709348, "grad_norm": 0.9335218667984009, "learning_rate": 1.310675316968819e-05, "loss": 0.8143, "step": 17987 }, { "epoch": 3.3248466827727188, "grad_norm": 1.12467622756958, "learning_rate": 1.3099523486196853e-05, "loss": 0.8869, "step": 17988 }, { "epoch": 3.32503252183609, "grad_norm": 0.9656354188919067, "learning_rate": 1.3092295657447073e-05, "loss": 0.8738, "step": 17989 }, { "epoch": 3.325218360899461, "grad_norm": 1.1582108736038208, "learning_rate": 1.3085069683593076e-05, "loss": 0.8042, "step": 17990 }, { "epoch": 3.3254041999628323, "grad_norm": 0.8076069951057434, "learning_rate": 1.3077845564789148e-05, "loss": 0.6007, "step": 17991 }, { "epoch": 3.325590039026203, "grad_norm": 0.9304507374763489, "learning_rate": 1.3070623301189433e-05, "loss": 0.8243, "step": 17992 }, { "epoch": 3.3257758780895745, "grad_norm": 0.9668257832527161, "learning_rate": 1.3063402892948074e-05, "loss": 0.7761, "step": 17993 }, { "epoch": 3.3259617171529454, "grad_norm": 0.9958893060684204, "learning_rate": 1.3056184340219213e-05, "loss": 0.9935, "step": 17994 }, { "epoch": 3.3261475562163167, "grad_norm": 0.8639600872993469, "learning_rate": 1.3048967643156884e-05, "loss": 0.8225, "step": 17995 }, { "epoch": 3.3263333952796876, "grad_norm": 0.9271656274795532, "learning_rate": 1.304175280191514e-05, "loss": 0.8033, "step": 17996 }, { "epoch": 3.326519234343059, "grad_norm": 0.9010143280029297, "learning_rate": 1.3034539816647928e-05, "loss": 0.8416, "step": 17997 }, { "epoch": 3.32670507340643, "grad_norm": 1.003106713294983, "learning_rate": 1.3027328687509221e-05, "loss": 0.7771, "step": 17998 }, { "epoch": 3.326890912469801, "grad_norm": 0.7881788015365601, "learning_rate": 1.3020119414652965e-05, "loss": 0.8576, "step": 17999 }, { "epoch": 3.3270767515331725, "grad_norm": 1.155503511428833, "learning_rate": 1.3012911998232991e-05, "loss": 0.807, "step": 18000 }, { "epoch": 3.3272625905965434, "grad_norm": 0.9561105966567993, "learning_rate": 1.300570643840311e-05, "loss": 0.8275, "step": 18001 }, { "epoch": 3.3274484296599143, "grad_norm": 0.8585792183876038, "learning_rate": 1.2998502735317153e-05, "loss": 0.7732, "step": 18002 }, { "epoch": 3.3276342687232856, "grad_norm": 0.8768472075462341, "learning_rate": 1.2991300889128866e-05, "loss": 0.7609, "step": 18003 }, { "epoch": 3.327820107786657, "grad_norm": 0.9516651630401611, "learning_rate": 1.2984100899991947e-05, "loss": 0.7306, "step": 18004 }, { "epoch": 3.328005946850028, "grad_norm": 0.9200682044029236, "learning_rate": 1.297690276806004e-05, "loss": 0.8011, "step": 18005 }, { "epoch": 3.328191785913399, "grad_norm": 1.0520631074905396, "learning_rate": 1.2969706493486833e-05, "loss": 0.8373, "step": 18006 }, { "epoch": 3.32837762497677, "grad_norm": 0.8613325357437134, "learning_rate": 1.2962512076425892e-05, "loss": 0.6305, "step": 18007 }, { "epoch": 3.3285634640401414, "grad_norm": 0.7984744310379028, "learning_rate": 1.2955319517030762e-05, "loss": 0.605, "step": 18008 }, { "epoch": 3.3287493031035122, "grad_norm": 0.8289118409156799, "learning_rate": 1.2948128815454951e-05, "loss": 0.9193, "step": 18009 }, { "epoch": 3.3289351421668836, "grad_norm": 0.7974891662597656, "learning_rate": 1.294093997185194e-05, "loss": 0.5449, "step": 18010 }, { "epoch": 3.3291209812302545, "grad_norm": 0.9495866894721985, "learning_rate": 1.293375298637518e-05, "loss": 0.8973, "step": 18011 }, { "epoch": 3.329306820293626, "grad_norm": 0.9522069096565247, "learning_rate": 1.2926567859178052e-05, "loss": 0.7878, "step": 18012 }, { "epoch": 3.3294926593569967, "grad_norm": 1.059888243675232, "learning_rate": 1.2919384590413897e-05, "loss": 0.7148, "step": 18013 }, { "epoch": 3.329678498420368, "grad_norm": 0.8780260682106018, "learning_rate": 1.291220318023606e-05, "loss": 0.8033, "step": 18014 }, { "epoch": 3.329864337483739, "grad_norm": 1.0775411128997803, "learning_rate": 1.2905023628797797e-05, "loss": 0.8126, "step": 18015 }, { "epoch": 3.33005017654711, "grad_norm": 0.9820963740348816, "learning_rate": 1.2897845936252339e-05, "loss": 0.927, "step": 18016 }, { "epoch": 3.330236015610481, "grad_norm": 0.8544705510139465, "learning_rate": 1.2890670102752867e-05, "loss": 0.7791, "step": 18017 }, { "epoch": 3.3304218546738524, "grad_norm": 1.0104566812515259, "learning_rate": 1.2883496128452577e-05, "loss": 0.83, "step": 18018 }, { "epoch": 3.3306076937372238, "grad_norm": 0.9168938398361206, "learning_rate": 1.2876324013504547e-05, "loss": 0.8342, "step": 18019 }, { "epoch": 3.3307935328005946, "grad_norm": 1.260501503944397, "learning_rate": 1.286915375806188e-05, "loss": 0.7581, "step": 18020 }, { "epoch": 3.330979371863966, "grad_norm": 1.1076463460922241, "learning_rate": 1.2861985362277595e-05, "loss": 0.8086, "step": 18021 }, { "epoch": 3.331165210927337, "grad_norm": 1.2231957912445068, "learning_rate": 1.2854818826304715e-05, "loss": 0.778, "step": 18022 }, { "epoch": 3.331351049990708, "grad_norm": 0.8688556551933289, "learning_rate": 1.2847654150296184e-05, "loss": 0.8561, "step": 18023 }, { "epoch": 3.331536889054079, "grad_norm": 1.01069974899292, "learning_rate": 1.2840491334404913e-05, "loss": 1.0374, "step": 18024 }, { "epoch": 3.3317227281174504, "grad_norm": 0.9949778318405151, "learning_rate": 1.2833330378783759e-05, "loss": 0.9732, "step": 18025 }, { "epoch": 3.3319085671808213, "grad_norm": 0.9987459778785706, "learning_rate": 1.282617128358561e-05, "loss": 0.84, "step": 18026 }, { "epoch": 3.3320944062441926, "grad_norm": 0.8804039359092712, "learning_rate": 1.281901404896323e-05, "loss": 0.8324, "step": 18027 }, { "epoch": 3.3322802453075635, "grad_norm": 0.6644346714019775, "learning_rate": 1.2811858675069387e-05, "loss": 0.7908, "step": 18028 }, { "epoch": 3.332466084370935, "grad_norm": 0.868604302406311, "learning_rate": 1.2804705162056785e-05, "loss": 0.6634, "step": 18029 }, { "epoch": 3.3326519234343057, "grad_norm": 0.9592475891113281, "learning_rate": 1.279755351007812e-05, "loss": 0.757, "step": 18030 }, { "epoch": 3.332837762497677, "grad_norm": 0.9952224493026733, "learning_rate": 1.2790403719286049e-05, "loss": 1.0982, "step": 18031 }, { "epoch": 3.333023601561048, "grad_norm": 0.8424680829048157, "learning_rate": 1.2783255789833148e-05, "loss": 0.6902, "step": 18032 }, { "epoch": 3.3332094406244193, "grad_norm": 0.9481412172317505, "learning_rate": 1.2776109721871965e-05, "loss": 0.8379, "step": 18033 }, { "epoch": 3.3333952796877906, "grad_norm": 0.9572385549545288, "learning_rate": 1.2768965515555065e-05, "loss": 1.0206, "step": 18034 }, { "epoch": 3.3335811187511615, "grad_norm": 0.8761714696884155, "learning_rate": 1.2761823171034892e-05, "loss": 0.9961, "step": 18035 }, { "epoch": 3.333766957814533, "grad_norm": 0.9750075340270996, "learning_rate": 1.2754682688463903e-05, "loss": 0.8697, "step": 18036 }, { "epoch": 3.3339527968779037, "grad_norm": 0.8760260343551636, "learning_rate": 1.2747544067994465e-05, "loss": 0.8723, "step": 18037 }, { "epoch": 3.334138635941275, "grad_norm": 0.9728489518165588, "learning_rate": 1.274040730977899e-05, "loss": 0.8646, "step": 18038 }, { "epoch": 3.334324475004646, "grad_norm": 0.8067655563354492, "learning_rate": 1.2733272413969776e-05, "loss": 0.6259, "step": 18039 }, { "epoch": 3.3345103140680172, "grad_norm": 0.9222126007080078, "learning_rate": 1.2726139380719094e-05, "loss": 0.8291, "step": 18040 }, { "epoch": 3.334696153131388, "grad_norm": 1.1046346426010132, "learning_rate": 1.2719008210179195e-05, "loss": 0.777, "step": 18041 }, { "epoch": 3.3348819921947594, "grad_norm": 0.820184588432312, "learning_rate": 1.2711878902502305e-05, "loss": 0.7441, "step": 18042 }, { "epoch": 3.3350678312581303, "grad_norm": 0.8473408222198486, "learning_rate": 1.270475145784057e-05, "loss": 0.7787, "step": 18043 }, { "epoch": 3.3352536703215017, "grad_norm": 0.6779477596282959, "learning_rate": 1.2697625876346108e-05, "loss": 0.5674, "step": 18044 }, { "epoch": 3.3354395093848725, "grad_norm": 1.0007576942443848, "learning_rate": 1.269050215817099e-05, "loss": 0.9149, "step": 18045 }, { "epoch": 3.335625348448244, "grad_norm": 0.9065996408462524, "learning_rate": 1.2683380303467285e-05, "loss": 0.8983, "step": 18046 }, { "epoch": 3.3358111875116148, "grad_norm": 0.835256040096283, "learning_rate": 1.2676260312386989e-05, "loss": 1.0209, "step": 18047 }, { "epoch": 3.335997026574986, "grad_norm": 0.9279325008392334, "learning_rate": 1.2669142185082073e-05, "loss": 0.9567, "step": 18048 }, { "epoch": 3.3361828656383574, "grad_norm": 1.4079657793045044, "learning_rate": 1.2662025921704423e-05, "loss": 0.8364, "step": 18049 }, { "epoch": 3.3363687047017283, "grad_norm": 0.8377646803855896, "learning_rate": 1.2654911522405966e-05, "loss": 0.7861, "step": 18050 }, { "epoch": 3.336554543765099, "grad_norm": 0.9112058281898499, "learning_rate": 1.2647798987338521e-05, "loss": 0.9795, "step": 18051 }, { "epoch": 3.3367403828284705, "grad_norm": 0.88719242811203, "learning_rate": 1.2640688316653926e-05, "loss": 0.7678, "step": 18052 }, { "epoch": 3.336926221891842, "grad_norm": 0.9839407801628113, "learning_rate": 1.26335795105039e-05, "loss": 0.8179, "step": 18053 }, { "epoch": 3.3371120609552127, "grad_norm": 0.7015066742897034, "learning_rate": 1.2626472569040204e-05, "loss": 0.6559, "step": 18054 }, { "epoch": 3.337297900018584, "grad_norm": 0.9736129641532898, "learning_rate": 1.2619367492414524e-05, "loss": 0.6797, "step": 18055 }, { "epoch": 3.337483739081955, "grad_norm": 1.0731183290481567, "learning_rate": 1.2612264280778496e-05, "loss": 0.8349, "step": 18056 }, { "epoch": 3.3376695781453263, "grad_norm": 1.0453765392303467, "learning_rate": 1.2605162934283699e-05, "loss": 0.7079, "step": 18057 }, { "epoch": 3.337855417208697, "grad_norm": 1.546590805053711, "learning_rate": 1.2598063453081743e-05, "loss": 1.1752, "step": 18058 }, { "epoch": 3.3380412562720685, "grad_norm": 0.7495214343070984, "learning_rate": 1.2590965837324131e-05, "loss": 0.5539, "step": 18059 }, { "epoch": 3.3382270953354394, "grad_norm": 0.9308766722679138, "learning_rate": 1.2583870087162353e-05, "loss": 0.9218, "step": 18060 }, { "epoch": 3.3384129343988107, "grad_norm": 1.0387533903121948, "learning_rate": 1.257677620274782e-05, "loss": 0.9143, "step": 18061 }, { "epoch": 3.3385987734621816, "grad_norm": 0.8280618190765381, "learning_rate": 1.2569684184232011e-05, "loss": 0.8469, "step": 18062 }, { "epoch": 3.338784612525553, "grad_norm": 0.9430438280105591, "learning_rate": 1.2562594031766262e-05, "loss": 0.8007, "step": 18063 }, { "epoch": 3.338970451588924, "grad_norm": 0.8449985980987549, "learning_rate": 1.2555505745501894e-05, "loss": 0.6529, "step": 18064 }, { "epoch": 3.339156290652295, "grad_norm": 0.9311802983283997, "learning_rate": 1.2548419325590177e-05, "loss": 0.8376, "step": 18065 }, { "epoch": 3.339342129715666, "grad_norm": 0.9691295623779297, "learning_rate": 1.254133477218239e-05, "loss": 0.9887, "step": 18066 }, { "epoch": 3.3395279687790373, "grad_norm": 0.9618540406227112, "learning_rate": 1.2534252085429732e-05, "loss": 0.9186, "step": 18067 }, { "epoch": 3.3397138078424087, "grad_norm": 0.947231650352478, "learning_rate": 1.2527171265483361e-05, "loss": 0.9473, "step": 18068 }, { "epoch": 3.3398996469057796, "grad_norm": 0.9473240971565247, "learning_rate": 1.2520092312494402e-05, "loss": 1.0109, "step": 18069 }, { "epoch": 3.340085485969151, "grad_norm": 0.8536008596420288, "learning_rate": 1.2513015226613956e-05, "loss": 0.8339, "step": 18070 }, { "epoch": 3.3402713250325218, "grad_norm": 0.732178270816803, "learning_rate": 1.2505940007993078e-05, "loss": 0.695, "step": 18071 }, { "epoch": 3.340457164095893, "grad_norm": 0.906110405921936, "learning_rate": 1.2498866656782737e-05, "loss": 0.9163, "step": 18072 }, { "epoch": 3.340643003159264, "grad_norm": 0.906360387802124, "learning_rate": 1.2491795173133935e-05, "loss": 0.6139, "step": 18073 }, { "epoch": 3.3408288422226353, "grad_norm": 0.890641987323761, "learning_rate": 1.2484725557197608e-05, "loss": 0.7674, "step": 18074 }, { "epoch": 3.341014681286006, "grad_norm": 1.048957347869873, "learning_rate": 1.2477657809124631e-05, "loss": 0.7132, "step": 18075 }, { "epoch": 3.3412005203493775, "grad_norm": 1.1851656436920166, "learning_rate": 1.2470591929065866e-05, "loss": 0.8471, "step": 18076 }, { "epoch": 3.3413863594127484, "grad_norm": 1.098007082939148, "learning_rate": 1.246352791717208e-05, "loss": 1.1545, "step": 18077 }, { "epoch": 3.3415721984761197, "grad_norm": 0.9009913802146912, "learning_rate": 1.2456465773594094e-05, "loss": 0.6493, "step": 18078 }, { "epoch": 3.3417580375394906, "grad_norm": 0.9068120718002319, "learning_rate": 1.2449405498482613e-05, "loss": 0.6948, "step": 18079 }, { "epoch": 3.341943876602862, "grad_norm": 0.9160207509994507, "learning_rate": 1.2442347091988338e-05, "loss": 0.8363, "step": 18080 }, { "epoch": 3.342129715666233, "grad_norm": 0.9870322942733765, "learning_rate": 1.2435290554261881e-05, "loss": 0.8309, "step": 18081 }, { "epoch": 3.342315554729604, "grad_norm": 0.8470276594161987, "learning_rate": 1.2428235885453875e-05, "loss": 0.7559, "step": 18082 }, { "epoch": 3.3425013937929755, "grad_norm": 0.9534688591957092, "learning_rate": 1.2421183085714927e-05, "loss": 0.906, "step": 18083 }, { "epoch": 3.3426872328563464, "grad_norm": 0.8364575505256653, "learning_rate": 1.2414132155195524e-05, "loss": 0.9157, "step": 18084 }, { "epoch": 3.3428730719197173, "grad_norm": 0.9537498354911804, "learning_rate": 1.2407083094046157e-05, "loss": 0.7473, "step": 18085 }, { "epoch": 3.3430589109830886, "grad_norm": 1.1054604053497314, "learning_rate": 1.2400035902417295e-05, "loss": 1.2257, "step": 18086 }, { "epoch": 3.34324475004646, "grad_norm": 0.9021813273429871, "learning_rate": 1.2392990580459352e-05, "loss": 0.9291, "step": 18087 }, { "epoch": 3.343430589109831, "grad_norm": 1.1228984594345093, "learning_rate": 1.2385947128322672e-05, "loss": 0.9025, "step": 18088 }, { "epoch": 3.343616428173202, "grad_norm": 0.9197649359703064, "learning_rate": 1.2378905546157583e-05, "loss": 0.698, "step": 18089 }, { "epoch": 3.343802267236573, "grad_norm": 0.9107482433319092, "learning_rate": 1.2371865834114416e-05, "loss": 0.8834, "step": 18090 }, { "epoch": 3.3439881062999444, "grad_norm": 0.8883651494979858, "learning_rate": 1.2364827992343397e-05, "loss": 0.8465, "step": 18091 }, { "epoch": 3.3441739453633152, "grad_norm": 1.1927982568740845, "learning_rate": 1.2357792020994718e-05, "loss": 0.6891, "step": 18092 }, { "epoch": 3.3443597844266866, "grad_norm": 0.6591570973396301, "learning_rate": 1.2350757920218558e-05, "loss": 0.4652, "step": 18093 }, { "epoch": 3.3445456234900575, "grad_norm": 0.9535413384437561, "learning_rate": 1.2343725690165087e-05, "loss": 0.8278, "step": 18094 }, { "epoch": 3.344731462553429, "grad_norm": 0.8922446966171265, "learning_rate": 1.233669533098436e-05, "loss": 0.8075, "step": 18095 }, { "epoch": 3.3449173016167997, "grad_norm": 0.9422911405563354, "learning_rate": 1.2329666842826438e-05, "loss": 1.0, "step": 18096 }, { "epoch": 3.345103140680171, "grad_norm": 0.8566906452178955, "learning_rate": 1.2322640225841309e-05, "loss": 0.6979, "step": 18097 }, { "epoch": 3.3452889797435423, "grad_norm": 1.1618760824203491, "learning_rate": 1.2315615480178989e-05, "loss": 0.804, "step": 18098 }, { "epoch": 3.345474818806913, "grad_norm": 1.0303587913513184, "learning_rate": 1.2308592605989377e-05, "loss": 0.8608, "step": 18099 }, { "epoch": 3.345660657870284, "grad_norm": 1.1015151739120483, "learning_rate": 1.2301571603422379e-05, "loss": 0.8997, "step": 18100 }, { "epoch": 3.3458464969336554, "grad_norm": 0.9106659889221191, "learning_rate": 1.2294552472627808e-05, "loss": 0.7916, "step": 18101 }, { "epoch": 3.3460323359970268, "grad_norm": 1.1678972244262695, "learning_rate": 1.2287535213755542e-05, "loss": 0.9502, "step": 18102 }, { "epoch": 3.3462181750603976, "grad_norm": 0.9588977694511414, "learning_rate": 1.2280519826955284e-05, "loss": 0.7856, "step": 18103 }, { "epoch": 3.346404014123769, "grad_norm": 0.8328583836555481, "learning_rate": 1.2273506312376815e-05, "loss": 0.6244, "step": 18104 }, { "epoch": 3.34658985318714, "grad_norm": 0.9519017338752747, "learning_rate": 1.2266494670169792e-05, "loss": 0.8703, "step": 18105 }, { "epoch": 3.346775692250511, "grad_norm": 0.9912613034248352, "learning_rate": 1.2259484900483909e-05, "loss": 0.9899, "step": 18106 }, { "epoch": 3.346961531313882, "grad_norm": 0.9581605195999146, "learning_rate": 1.2252477003468743e-05, "loss": 0.8696, "step": 18107 }, { "epoch": 3.3471473703772534, "grad_norm": 0.8546910285949707, "learning_rate": 1.2245470979273887e-05, "loss": 0.6573, "step": 18108 }, { "epoch": 3.3473332094406243, "grad_norm": 0.848886251449585, "learning_rate": 1.2238466828048833e-05, "loss": 0.7496, "step": 18109 }, { "epoch": 3.3475190485039956, "grad_norm": 0.9090949892997742, "learning_rate": 1.2231464549943117e-05, "loss": 0.9007, "step": 18110 }, { "epoch": 3.3477048875673665, "grad_norm": 1.0155179500579834, "learning_rate": 1.2224464145106174e-05, "loss": 0.7334, "step": 18111 }, { "epoch": 3.347890726630738, "grad_norm": 1.1983330249786377, "learning_rate": 1.2217465613687395e-05, "loss": 1.0167, "step": 18112 }, { "epoch": 3.3480765656941087, "grad_norm": 0.9881544709205627, "learning_rate": 1.2210468955836196e-05, "loss": 0.805, "step": 18113 }, { "epoch": 3.34826240475748, "grad_norm": 0.9015958309173584, "learning_rate": 1.2203474171701867e-05, "loss": 0.8233, "step": 18114 }, { "epoch": 3.348448243820851, "grad_norm": 0.9489085078239441, "learning_rate": 1.2196481261433735e-05, "loss": 0.9338, "step": 18115 }, { "epoch": 3.3486340828842223, "grad_norm": 0.889735758304596, "learning_rate": 1.2189490225181022e-05, "loss": 0.8285, "step": 18116 }, { "epoch": 3.3488199219475936, "grad_norm": 0.9477465748786926, "learning_rate": 1.2182501063092943e-05, "loss": 0.9435, "step": 18117 }, { "epoch": 3.3490057610109645, "grad_norm": 0.956580400466919, "learning_rate": 1.2175513775318692e-05, "loss": 1.0321, "step": 18118 }, { "epoch": 3.349191600074336, "grad_norm": 0.8784446120262146, "learning_rate": 1.2168528362007393e-05, "loss": 0.8943, "step": 18119 }, { "epoch": 3.3493774391377067, "grad_norm": 1.0098607540130615, "learning_rate": 1.2161544823308124e-05, "loss": 0.8295, "step": 18120 }, { "epoch": 3.349563278201078, "grad_norm": 0.890578031539917, "learning_rate": 1.2154563159369924e-05, "loss": 0.6808, "step": 18121 }, { "epoch": 3.349749117264449, "grad_norm": 1.0231043100357056, "learning_rate": 1.2147583370341853e-05, "loss": 0.7606, "step": 18122 }, { "epoch": 3.3499349563278202, "grad_norm": 0.873323380947113, "learning_rate": 1.2140605456372855e-05, "loss": 0.8282, "step": 18123 }, { "epoch": 3.350120795391191, "grad_norm": 0.8876305222511292, "learning_rate": 1.2133629417611835e-05, "loss": 0.6846, "step": 18124 }, { "epoch": 3.3503066344545624, "grad_norm": 0.851346492767334, "learning_rate": 1.2126655254207709e-05, "loss": 0.922, "step": 18125 }, { "epoch": 3.3504924735179333, "grad_norm": 1.0348562002182007, "learning_rate": 1.2119682966309353e-05, "loss": 0.8517, "step": 18126 }, { "epoch": 3.3506783125813047, "grad_norm": 0.8237027525901794, "learning_rate": 1.2112712554065553e-05, "loss": 0.712, "step": 18127 }, { "epoch": 3.3508641516446755, "grad_norm": 0.8335827589035034, "learning_rate": 1.2105744017625075e-05, "loss": 0.6588, "step": 18128 }, { "epoch": 3.351049990708047, "grad_norm": 0.8928906321525574, "learning_rate": 1.2098777357136648e-05, "loss": 0.8155, "step": 18129 }, { "epoch": 3.3512358297714178, "grad_norm": 1.0455178022384644, "learning_rate": 1.2091812572748994e-05, "loss": 0.7419, "step": 18130 }, { "epoch": 3.351421668834789, "grad_norm": 1.686511754989624, "learning_rate": 1.2084849664610742e-05, "loss": 1.2921, "step": 18131 }, { "epoch": 3.3516075078981604, "grad_norm": 0.8682252764701843, "learning_rate": 1.2077888632870493e-05, "loss": 0.7061, "step": 18132 }, { "epoch": 3.3517933469615313, "grad_norm": 0.859376847743988, "learning_rate": 1.207092947767684e-05, "loss": 0.853, "step": 18133 }, { "epoch": 3.351979186024902, "grad_norm": 0.7160893082618713, "learning_rate": 1.2063972199178309e-05, "loss": 0.5596, "step": 18134 }, { "epoch": 3.3521650250882735, "grad_norm": 0.8916295170783997, "learning_rate": 1.2057016797523369e-05, "loss": 0.7075, "step": 18135 }, { "epoch": 3.352350864151645, "grad_norm": 0.7931239008903503, "learning_rate": 1.20500632728605e-05, "loss": 0.6814, "step": 18136 }, { "epoch": 3.3525367032150157, "grad_norm": 0.9768849015235901, "learning_rate": 1.20431116253381e-05, "loss": 0.638, "step": 18137 }, { "epoch": 3.352722542278387, "grad_norm": 1.1019235849380493, "learning_rate": 1.2036161855104544e-05, "loss": 0.8028, "step": 18138 }, { "epoch": 3.352908381341758, "grad_norm": 0.9690166711807251, "learning_rate": 1.2029213962308172e-05, "loss": 0.7347, "step": 18139 }, { "epoch": 3.3530942204051293, "grad_norm": 1.0368572473526, "learning_rate": 1.2022267947097266e-05, "loss": 1.0224, "step": 18140 }, { "epoch": 3.3532800594685, "grad_norm": 0.9114643931388855, "learning_rate": 1.2015323809620049e-05, "loss": 0.65, "step": 18141 }, { "epoch": 3.3534658985318715, "grad_norm": 0.9931135177612305, "learning_rate": 1.2008381550024773e-05, "loss": 0.6699, "step": 18142 }, { "epoch": 3.3536517375952424, "grad_norm": 0.930069625377655, "learning_rate": 1.2001441168459604e-05, "loss": 0.646, "step": 18143 }, { "epoch": 3.3538375766586137, "grad_norm": 0.9435836672782898, "learning_rate": 1.1994502665072637e-05, "loss": 0.7175, "step": 18144 }, { "epoch": 3.3540234157219846, "grad_norm": 0.7887563109397888, "learning_rate": 1.1987566040011989e-05, "loss": 0.6377, "step": 18145 }, { "epoch": 3.354209254785356, "grad_norm": 0.8834825158119202, "learning_rate": 1.1980631293425726e-05, "loss": 0.8163, "step": 18146 }, { "epoch": 3.3543950938487272, "grad_norm": 0.8089690208435059, "learning_rate": 1.1973698425461832e-05, "loss": 0.7909, "step": 18147 }, { "epoch": 3.354580932912098, "grad_norm": 0.8597369194030762, "learning_rate": 1.19667674362683e-05, "loss": 0.5808, "step": 18148 }, { "epoch": 3.354766771975469, "grad_norm": 1.174746036529541, "learning_rate": 1.1959838325993023e-05, "loss": 0.9286, "step": 18149 }, { "epoch": 3.3549526110388403, "grad_norm": 1.0257681608200073, "learning_rate": 1.1952911094783926e-05, "loss": 0.8775, "step": 18150 }, { "epoch": 3.3551384501022117, "grad_norm": 1.017487645149231, "learning_rate": 1.1945985742788856e-05, "loss": 0.775, "step": 18151 }, { "epoch": 3.3553242891655826, "grad_norm": 1.312432885169983, "learning_rate": 1.1939062270155588e-05, "loss": 0.8469, "step": 18152 }, { "epoch": 3.355510128228954, "grad_norm": 1.1309226751327515, "learning_rate": 1.1932140677031944e-05, "loss": 0.9789, "step": 18153 }, { "epoch": 3.3556959672923248, "grad_norm": 0.9536949396133423, "learning_rate": 1.192522096356562e-05, "loss": 0.9253, "step": 18154 }, { "epoch": 3.355881806355696, "grad_norm": 1.0114742517471313, "learning_rate": 1.1918303129904317e-05, "loss": 0.7059, "step": 18155 }, { "epoch": 3.356067645419067, "grad_norm": 0.9156493544578552, "learning_rate": 1.1911387176195654e-05, "loss": 0.7234, "step": 18156 }, { "epoch": 3.3562534844824383, "grad_norm": 0.9517843723297119, "learning_rate": 1.1904473102587266e-05, "loss": 0.7238, "step": 18157 }, { "epoch": 3.356439323545809, "grad_norm": 0.9302890300750732, "learning_rate": 1.1897560909226746e-05, "loss": 0.9865, "step": 18158 }, { "epoch": 3.3566251626091805, "grad_norm": 1.1713268756866455, "learning_rate": 1.1890650596261599e-05, "loss": 0.9072, "step": 18159 }, { "epoch": 3.3568110016725514, "grad_norm": 0.9015994071960449, "learning_rate": 1.1883742163839307e-05, "loss": 0.7619, "step": 18160 }, { "epoch": 3.3569968407359227, "grad_norm": 1.0113537311553955, "learning_rate": 1.1876835612107306e-05, "loss": 0.7482, "step": 18161 }, { "epoch": 3.3571826797992936, "grad_norm": 0.8868255615234375, "learning_rate": 1.1869930941213048e-05, "loss": 0.9041, "step": 18162 }, { "epoch": 3.357368518862665, "grad_norm": 0.7899235486984253, "learning_rate": 1.1863028151303879e-05, "loss": 0.8324, "step": 18163 }, { "epoch": 3.357554357926036, "grad_norm": 1.072371006011963, "learning_rate": 1.1856127242527094e-05, "loss": 0.9066, "step": 18164 }, { "epoch": 3.357740196989407, "grad_norm": 0.8462903499603271, "learning_rate": 1.1849228215030039e-05, "loss": 0.5531, "step": 18165 }, { "epoch": 3.3579260360527785, "grad_norm": 0.9083438515663147, "learning_rate": 1.1842331068959922e-05, "loss": 0.813, "step": 18166 }, { "epoch": 3.3581118751161494, "grad_norm": 0.9696852564811707, "learning_rate": 1.1835435804463967e-05, "loss": 0.7097, "step": 18167 }, { "epoch": 3.3582977141795207, "grad_norm": 0.8321973085403442, "learning_rate": 1.1828542421689348e-05, "loss": 0.5627, "step": 18168 }, { "epoch": 3.3584835532428916, "grad_norm": 1.0764424800872803, "learning_rate": 1.1821650920783166e-05, "loss": 0.8361, "step": 18169 }, { "epoch": 3.358669392306263, "grad_norm": 0.9155271649360657, "learning_rate": 1.1814761301892542e-05, "loss": 0.8987, "step": 18170 }, { "epoch": 3.358855231369634, "grad_norm": 0.9456757307052612, "learning_rate": 1.1807873565164506e-05, "loss": 0.7765, "step": 18171 }, { "epoch": 3.359041070433005, "grad_norm": 0.9458122849464417, "learning_rate": 1.1800987710746048e-05, "loss": 0.6491, "step": 18172 }, { "epoch": 3.359226909496376, "grad_norm": 0.9681454300880432, "learning_rate": 1.1794103738784169e-05, "loss": 0.8975, "step": 18173 }, { "epoch": 3.3594127485597474, "grad_norm": 1.0836167335510254, "learning_rate": 1.1787221649425772e-05, "loss": 0.9195, "step": 18174 }, { "epoch": 3.3595985876231182, "grad_norm": 0.8586798310279846, "learning_rate": 1.1780341442817754e-05, "loss": 0.7228, "step": 18175 }, { "epoch": 3.3597844266864896, "grad_norm": 0.9736157655715942, "learning_rate": 1.1773463119106943e-05, "loss": 0.7443, "step": 18176 }, { "epoch": 3.3599702657498605, "grad_norm": 0.9830484390258789, "learning_rate": 1.176658667844015e-05, "loss": 0.8453, "step": 18177 }, { "epoch": 3.360156104813232, "grad_norm": 0.8664001226425171, "learning_rate": 1.1759712120964184e-05, "loss": 0.6916, "step": 18178 }, { "epoch": 3.3603419438766027, "grad_norm": 0.768081784248352, "learning_rate": 1.1752839446825725e-05, "loss": 0.7665, "step": 18179 }, { "epoch": 3.360527782939974, "grad_norm": 0.8800269961357117, "learning_rate": 1.1745968656171469e-05, "loss": 0.5951, "step": 18180 }, { "epoch": 3.3607136220033453, "grad_norm": 0.9124451875686646, "learning_rate": 1.173909974914804e-05, "loss": 0.785, "step": 18181 }, { "epoch": 3.360899461066716, "grad_norm": 0.9192720055580139, "learning_rate": 1.1732232725902093e-05, "loss": 0.7034, "step": 18182 }, { "epoch": 3.361085300130087, "grad_norm": 1.033612608909607, "learning_rate": 1.1725367586580161e-05, "loss": 0.9732, "step": 18183 }, { "epoch": 3.3612711391934584, "grad_norm": 1.0083500146865845, "learning_rate": 1.171850433132875e-05, "loss": 0.7537, "step": 18184 }, { "epoch": 3.3614569782568298, "grad_norm": 0.8668590784072876, "learning_rate": 1.171164296029439e-05, "loss": 0.823, "step": 18185 }, { "epoch": 3.3616428173202006, "grad_norm": 0.9430564641952515, "learning_rate": 1.1704783473623504e-05, "loss": 0.8356, "step": 18186 }, { "epoch": 3.361828656383572, "grad_norm": 1.1226727962493896, "learning_rate": 1.1697925871462467e-05, "loss": 0.6784, "step": 18187 }, { "epoch": 3.362014495446943, "grad_norm": 1.08354651927948, "learning_rate": 1.1691070153957695e-05, "loss": 0.724, "step": 18188 }, { "epoch": 3.362200334510314, "grad_norm": 0.9508259892463684, "learning_rate": 1.168421632125547e-05, "loss": 0.8168, "step": 18189 }, { "epoch": 3.362386173573685, "grad_norm": 1.0835859775543213, "learning_rate": 1.1677364373502108e-05, "loss": 0.8825, "step": 18190 }, { "epoch": 3.3625720126370564, "grad_norm": 1.0884180068969727, "learning_rate": 1.1670514310843839e-05, "loss": 0.9111, "step": 18191 }, { "epoch": 3.3627578517004273, "grad_norm": 0.9678394794464111, "learning_rate": 1.1663666133426832e-05, "loss": 0.74, "step": 18192 }, { "epoch": 3.3629436907637986, "grad_norm": 0.9171245098114014, "learning_rate": 1.1656819841397315e-05, "loss": 0.6997, "step": 18193 }, { "epoch": 3.3631295298271695, "grad_norm": 0.9149906635284424, "learning_rate": 1.164997543490136e-05, "loss": 0.7581, "step": 18194 }, { "epoch": 3.363315368890541, "grad_norm": 1.517656922340393, "learning_rate": 1.1643132914085076e-05, "loss": 1.0754, "step": 18195 }, { "epoch": 3.3635012079539117, "grad_norm": 0.8923678994178772, "learning_rate": 1.1636292279094473e-05, "loss": 0.7914, "step": 18196 }, { "epoch": 3.363687047017283, "grad_norm": 0.9691092371940613, "learning_rate": 1.1629453530075584e-05, "loss": 1.1266, "step": 18197 }, { "epoch": 3.363872886080654, "grad_norm": 0.9492999911308289, "learning_rate": 1.1622616667174346e-05, "loss": 0.914, "step": 18198 }, { "epoch": 3.3640587251440253, "grad_norm": 0.9340925812721252, "learning_rate": 1.161578169053672e-05, "loss": 0.7787, "step": 18199 }, { "epoch": 3.3642445642073966, "grad_norm": 0.809636652469635, "learning_rate": 1.1608948600308545e-05, "loss": 0.7348, "step": 18200 }, { "epoch": 3.3644304032707675, "grad_norm": 0.9379822015762329, "learning_rate": 1.160211739663567e-05, "loss": 0.8477, "step": 18201 }, { "epoch": 3.364616242334139, "grad_norm": 1.0020337104797363, "learning_rate": 1.1595288079663912e-05, "loss": 0.8806, "step": 18202 }, { "epoch": 3.3648020813975097, "grad_norm": 0.783037006855011, "learning_rate": 1.1588460649539035e-05, "loss": 0.7428, "step": 18203 }, { "epoch": 3.364987920460881, "grad_norm": 0.8872162103652954, "learning_rate": 1.1581635106406718e-05, "loss": 0.8652, "step": 18204 }, { "epoch": 3.365173759524252, "grad_norm": 0.9861376881599426, "learning_rate": 1.1574811450412682e-05, "loss": 0.7183, "step": 18205 }, { "epoch": 3.3653595985876232, "grad_norm": 0.9283874034881592, "learning_rate": 1.1567989681702563e-05, "loss": 0.8004, "step": 18206 }, { "epoch": 3.365545437650994, "grad_norm": 0.8397024273872375, "learning_rate": 1.1561169800421944e-05, "loss": 0.8229, "step": 18207 }, { "epoch": 3.3657312767143654, "grad_norm": 0.9359708428382874, "learning_rate": 1.1554351806716358e-05, "loss": 0.7928, "step": 18208 }, { "epoch": 3.3659171157777363, "grad_norm": 1.1154730319976807, "learning_rate": 1.1547535700731372e-05, "loss": 0.7391, "step": 18209 }, { "epoch": 3.3661029548411077, "grad_norm": 0.8465034365653992, "learning_rate": 1.1540721482612448e-05, "loss": 0.7974, "step": 18210 }, { "epoch": 3.3662887939044785, "grad_norm": 0.9470143914222717, "learning_rate": 1.153390915250504e-05, "loss": 0.9165, "step": 18211 }, { "epoch": 3.36647463296785, "grad_norm": 0.9602369666099548, "learning_rate": 1.1527098710554496e-05, "loss": 0.8292, "step": 18212 }, { "epoch": 3.3666604720312208, "grad_norm": 0.9672537446022034, "learning_rate": 1.1520290156906221e-05, "loss": 0.7527, "step": 18213 }, { "epoch": 3.366846311094592, "grad_norm": 1.0739892721176147, "learning_rate": 1.1513483491705524e-05, "loss": 0.8147, "step": 18214 }, { "epoch": 3.3670321501579634, "grad_norm": 1.022194504737854, "learning_rate": 1.1506678715097673e-05, "loss": 0.7284, "step": 18215 }, { "epoch": 3.3672179892213343, "grad_norm": 1.012466549873352, "learning_rate": 1.149987582722788e-05, "loss": 0.9523, "step": 18216 }, { "epoch": 3.3674038282847056, "grad_norm": 0.8380947113037109, "learning_rate": 1.149307482824138e-05, "loss": 0.4953, "step": 18217 }, { "epoch": 3.3675896673480765, "grad_norm": 0.9516050815582275, "learning_rate": 1.1486275718283323e-05, "loss": 0.9402, "step": 18218 }, { "epoch": 3.367775506411448, "grad_norm": 0.9570687413215637, "learning_rate": 1.1479478497498797e-05, "loss": 0.7767, "step": 18219 }, { "epoch": 3.3679613454748187, "grad_norm": 1.1169215440750122, "learning_rate": 1.1472683166032904e-05, "loss": 0.9841, "step": 18220 }, { "epoch": 3.36814718453819, "grad_norm": 0.8415247797966003, "learning_rate": 1.1465889724030654e-05, "loss": 0.8199, "step": 18221 }, { "epoch": 3.368333023601561, "grad_norm": 0.9767362475395203, "learning_rate": 1.1459098171637084e-05, "loss": 0.6967, "step": 18222 }, { "epoch": 3.3685188626649323, "grad_norm": 1.0055066347122192, "learning_rate": 1.1452308508997111e-05, "loss": 0.9089, "step": 18223 }, { "epoch": 3.368704701728303, "grad_norm": 1.0055992603302002, "learning_rate": 1.1445520736255644e-05, "loss": 0.9448, "step": 18224 }, { "epoch": 3.3688905407916745, "grad_norm": 0.8618344068527222, "learning_rate": 1.1438734853557597e-05, "loss": 0.7672, "step": 18225 }, { "epoch": 3.3690763798550454, "grad_norm": 0.911933958530426, "learning_rate": 1.143195086104777e-05, "loss": 0.8706, "step": 18226 }, { "epoch": 3.3692622189184167, "grad_norm": 1.140630841255188, "learning_rate": 1.1425168758870964e-05, "loss": 0.9779, "step": 18227 }, { "epoch": 3.3694480579817876, "grad_norm": 1.1386034488677979, "learning_rate": 1.141838854717191e-05, "loss": 0.9669, "step": 18228 }, { "epoch": 3.369633897045159, "grad_norm": 0.9301897287368774, "learning_rate": 1.1411610226095349e-05, "loss": 0.641, "step": 18229 }, { "epoch": 3.3698197361085303, "grad_norm": 0.8807746767997742, "learning_rate": 1.1404833795785951e-05, "loss": 0.6094, "step": 18230 }, { "epoch": 3.370005575171901, "grad_norm": 0.8215765357017517, "learning_rate": 1.139805925638835e-05, "loss": 0.8526, "step": 18231 }, { "epoch": 3.370191414235272, "grad_norm": 0.923136293888092, "learning_rate": 1.1391286608047103e-05, "loss": 0.6587, "step": 18232 }, { "epoch": 3.3703772532986433, "grad_norm": 0.9220919013023376, "learning_rate": 1.1384515850906807e-05, "loss": 0.9753, "step": 18233 }, { "epoch": 3.3705630923620147, "grad_norm": 0.8931311964988708, "learning_rate": 1.1377746985111947e-05, "loss": 0.8369, "step": 18234 }, { "epoch": 3.3707489314253856, "grad_norm": 0.9474433660507202, "learning_rate": 1.1370980010806997e-05, "loss": 0.8126, "step": 18235 }, { "epoch": 3.370934770488757, "grad_norm": 0.9365830421447754, "learning_rate": 1.1364214928136362e-05, "loss": 0.8838, "step": 18236 }, { "epoch": 3.371120609552128, "grad_norm": 0.9512014389038086, "learning_rate": 1.1357451737244474e-05, "loss": 0.7424, "step": 18237 }, { "epoch": 3.371306448615499, "grad_norm": 1.041359782218933, "learning_rate": 1.135069043827567e-05, "loss": 0.6137, "step": 18238 }, { "epoch": 3.37149228767887, "grad_norm": 1.0264954566955566, "learning_rate": 1.1343931031374244e-05, "loss": 0.8591, "step": 18239 }, { "epoch": 3.3716781267422413, "grad_norm": 0.995843231678009, "learning_rate": 1.1337173516684451e-05, "loss": 0.9076, "step": 18240 }, { "epoch": 3.371863965805612, "grad_norm": 0.7283825874328613, "learning_rate": 1.1330417894350531e-05, "loss": 0.598, "step": 18241 }, { "epoch": 3.3720498048689835, "grad_norm": 0.9453611969947815, "learning_rate": 1.1323664164516712e-05, "loss": 0.8658, "step": 18242 }, { "epoch": 3.3722356439323544, "grad_norm": 0.9562330842018127, "learning_rate": 1.13169123273271e-05, "loss": 0.8286, "step": 18243 }, { "epoch": 3.3724214829957257, "grad_norm": 0.9837996363639832, "learning_rate": 1.1310162382925782e-05, "loss": 0.8294, "step": 18244 }, { "epoch": 3.3726073220590966, "grad_norm": 0.8887550830841064, "learning_rate": 1.1303414331456875e-05, "loss": 0.6426, "step": 18245 }, { "epoch": 3.372793161122468, "grad_norm": 1.0126186609268188, "learning_rate": 1.1296668173064385e-05, "loss": 0.6124, "step": 18246 }, { "epoch": 3.372979000185839, "grad_norm": 1.0040444135665894, "learning_rate": 1.1289923907892286e-05, "loss": 0.8762, "step": 18247 }, { "epoch": 3.37316483924921, "grad_norm": 1.069229245185852, "learning_rate": 1.1283181536084508e-05, "loss": 0.7736, "step": 18248 }, { "epoch": 3.3733506783125815, "grad_norm": 0.7893381118774414, "learning_rate": 1.1276441057785003e-05, "loss": 0.654, "step": 18249 }, { "epoch": 3.3735365173759524, "grad_norm": 1.025433897972107, "learning_rate": 1.1269702473137578e-05, "loss": 0.8712, "step": 18250 }, { "epoch": 3.3737223564393237, "grad_norm": 1.0625967979431152, "learning_rate": 1.1262965782286116e-05, "loss": 0.794, "step": 18251 }, { "epoch": 3.3739081955026946, "grad_norm": 0.9125096201896667, "learning_rate": 1.1256230985374339e-05, "loss": 0.6924, "step": 18252 }, { "epoch": 3.374094034566066, "grad_norm": 1.0169774293899536, "learning_rate": 1.124949808254605e-05, "loss": 0.783, "step": 18253 }, { "epoch": 3.374279873629437, "grad_norm": 0.9269421696662903, "learning_rate": 1.1242767073944926e-05, "loss": 0.9538, "step": 18254 }, { "epoch": 3.374465712692808, "grad_norm": 0.9135279655456543, "learning_rate": 1.1236037959714618e-05, "loss": 0.8072, "step": 18255 }, { "epoch": 3.374651551756179, "grad_norm": 0.8569638133049011, "learning_rate": 1.1229310739998733e-05, "loss": 0.7048, "step": 18256 }, { "epoch": 3.3748373908195504, "grad_norm": 1.1444765329360962, "learning_rate": 1.1222585414940901e-05, "loss": 0.815, "step": 18257 }, { "epoch": 3.3750232298829212, "grad_norm": 1.0449762344360352, "learning_rate": 1.1215861984684628e-05, "loss": 0.6666, "step": 18258 }, { "epoch": 3.3752090689462926, "grad_norm": 0.8725976943969727, "learning_rate": 1.1209140449373423e-05, "loss": 0.7466, "step": 18259 }, { "epoch": 3.3753949080096635, "grad_norm": 0.9362976551055908, "learning_rate": 1.1202420809150726e-05, "loss": 0.7889, "step": 18260 }, { "epoch": 3.375580747073035, "grad_norm": 0.8548526167869568, "learning_rate": 1.119570306415998e-05, "loss": 0.7122, "step": 18261 }, { "epoch": 3.3757665861364057, "grad_norm": 1.083701252937317, "learning_rate": 1.1188987214544565e-05, "loss": 0.8872, "step": 18262 }, { "epoch": 3.375952425199777, "grad_norm": 0.9338276982307434, "learning_rate": 1.1182273260447817e-05, "loss": 0.8801, "step": 18263 }, { "epoch": 3.3761382642631483, "grad_norm": 0.8710282444953918, "learning_rate": 1.1175561202013018e-05, "loss": 0.6658, "step": 18264 }, { "epoch": 3.376324103326519, "grad_norm": 0.90129154920578, "learning_rate": 1.1168851039383444e-05, "loss": 0.7791, "step": 18265 }, { "epoch": 3.37650994238989, "grad_norm": 1.1231645345687866, "learning_rate": 1.116214277270231e-05, "loss": 0.9588, "step": 18266 }, { "epoch": 3.3766957814532614, "grad_norm": 0.8895449042320251, "learning_rate": 1.1155436402112785e-05, "loss": 0.9344, "step": 18267 }, { "epoch": 3.3768816205166328, "grad_norm": 1.054827094078064, "learning_rate": 1.1148731927757983e-05, "loss": 0.8257, "step": 18268 }, { "epoch": 3.3770674595800036, "grad_norm": 0.7896285057067871, "learning_rate": 1.1142029349781046e-05, "loss": 0.7109, "step": 18269 }, { "epoch": 3.377253298643375, "grad_norm": 0.9272920489311218, "learning_rate": 1.1135328668325018e-05, "loss": 0.8157, "step": 18270 }, { "epoch": 3.377439137706746, "grad_norm": 0.9161113500595093, "learning_rate": 1.112862988353287e-05, "loss": 1.0153, "step": 18271 }, { "epoch": 3.377624976770117, "grad_norm": 0.7954372763633728, "learning_rate": 1.1121932995547601e-05, "loss": 0.7849, "step": 18272 }, { "epoch": 3.377810815833488, "grad_norm": 0.9211541414260864, "learning_rate": 1.1115238004512186e-05, "loss": 0.887, "step": 18273 }, { "epoch": 3.3779966548968594, "grad_norm": 0.8511643409729004, "learning_rate": 1.1108544910569474e-05, "loss": 0.9017, "step": 18274 }, { "epoch": 3.3781824939602303, "grad_norm": 1.4660279750823975, "learning_rate": 1.1101853713862332e-05, "loss": 1.1466, "step": 18275 }, { "epoch": 3.3783683330236016, "grad_norm": 0.8548551201820374, "learning_rate": 1.1095164414533543e-05, "loss": 0.9776, "step": 18276 }, { "epoch": 3.3785541720869725, "grad_norm": 0.8459223508834839, "learning_rate": 1.1088477012725917e-05, "loss": 0.7552, "step": 18277 }, { "epoch": 3.378740011150344, "grad_norm": 1.1502907276153564, "learning_rate": 1.1081791508582185e-05, "loss": 0.8632, "step": 18278 }, { "epoch": 3.378925850213715, "grad_norm": 0.996711790561676, "learning_rate": 1.1075107902245019e-05, "loss": 0.9776, "step": 18279 }, { "epoch": 3.379111689277086, "grad_norm": 0.917891800403595, "learning_rate": 1.1068426193857051e-05, "loss": 0.807, "step": 18280 }, { "epoch": 3.379297528340457, "grad_norm": 0.8591600060462952, "learning_rate": 1.1061746383560934e-05, "loss": 0.9063, "step": 18281 }, { "epoch": 3.3794833674038283, "grad_norm": 0.9120370745658875, "learning_rate": 1.1055068471499197e-05, "loss": 0.8746, "step": 18282 }, { "epoch": 3.3796692064671996, "grad_norm": 0.9349958896636963, "learning_rate": 1.1048392457814405e-05, "loss": 0.4698, "step": 18283 }, { "epoch": 3.3798550455305705, "grad_norm": 1.1120960712432861, "learning_rate": 1.104171834264901e-05, "loss": 0.874, "step": 18284 }, { "epoch": 3.380040884593942, "grad_norm": 0.9761961698532104, "learning_rate": 1.1035046126145499e-05, "loss": 0.7462, "step": 18285 }, { "epoch": 3.3802267236573127, "grad_norm": 1.0015671253204346, "learning_rate": 1.1028375808446256e-05, "loss": 0.8401, "step": 18286 }, { "epoch": 3.380412562720684, "grad_norm": 0.8225238919258118, "learning_rate": 1.102170738969366e-05, "loss": 0.743, "step": 18287 }, { "epoch": 3.380598401784055, "grad_norm": 1.0311946868896484, "learning_rate": 1.1015040870029992e-05, "loss": 0.8776, "step": 18288 }, { "epoch": 3.3807842408474262, "grad_norm": 1.012428641319275, "learning_rate": 1.1008376249597596e-05, "loss": 0.8297, "step": 18289 }, { "epoch": 3.380970079910797, "grad_norm": 1.0129367113113403, "learning_rate": 1.1001713528538704e-05, "loss": 1.0106, "step": 18290 }, { "epoch": 3.3811559189741685, "grad_norm": 0.9160915613174438, "learning_rate": 1.0995052706995502e-05, "loss": 0.6437, "step": 18291 }, { "epoch": 3.3813417580375393, "grad_norm": 0.973933756351471, "learning_rate": 1.0988393785110118e-05, "loss": 0.885, "step": 18292 }, { "epoch": 3.3815275971009107, "grad_norm": 0.8929466605186462, "learning_rate": 1.0981736763024764e-05, "loss": 0.7792, "step": 18293 }, { "epoch": 3.3817134361642815, "grad_norm": 1.1814781427383423, "learning_rate": 1.097508164088148e-05, "loss": 0.692, "step": 18294 }, { "epoch": 3.381899275227653, "grad_norm": 0.9687650203704834, "learning_rate": 1.0968428418822296e-05, "loss": 0.7951, "step": 18295 }, { "epoch": 3.3820851142910238, "grad_norm": 0.8729159235954285, "learning_rate": 1.0961777096989212e-05, "loss": 0.7703, "step": 18296 }, { "epoch": 3.382270953354395, "grad_norm": 0.712313711643219, "learning_rate": 1.0955127675524214e-05, "loss": 0.7491, "step": 18297 }, { "epoch": 3.3824567924177664, "grad_norm": 0.8763603568077087, "learning_rate": 1.0948480154569207e-05, "loss": 0.7241, "step": 18298 }, { "epoch": 3.3826426314811373, "grad_norm": 0.9360529780387878, "learning_rate": 1.0941834534266083e-05, "loss": 0.7995, "step": 18299 }, { "epoch": 3.3828284705445086, "grad_norm": 1.0026065111160278, "learning_rate": 1.0935190814756646e-05, "loss": 0.8343, "step": 18300 }, { "epoch": 3.3830143096078795, "grad_norm": 0.8706707954406738, "learning_rate": 1.092854899618273e-05, "loss": 0.6912, "step": 18301 }, { "epoch": 3.383200148671251, "grad_norm": 0.9272733330726624, "learning_rate": 1.0921909078686099e-05, "loss": 0.9772, "step": 18302 }, { "epoch": 3.3833859877346217, "grad_norm": 0.9085229635238647, "learning_rate": 1.0915271062408428e-05, "loss": 0.8226, "step": 18303 }, { "epoch": 3.383571826797993, "grad_norm": 0.8816424012184143, "learning_rate": 1.0908634947491415e-05, "loss": 0.813, "step": 18304 }, { "epoch": 3.383757665861364, "grad_norm": 0.9400708079338074, "learning_rate": 1.0902000734076723e-05, "loss": 0.7601, "step": 18305 }, { "epoch": 3.3839435049247353, "grad_norm": 0.9915024638175964, "learning_rate": 1.089536842230594e-05, "loss": 0.7915, "step": 18306 }, { "epoch": 3.384129343988106, "grad_norm": 0.8487621545791626, "learning_rate": 1.0888738012320598e-05, "loss": 0.7124, "step": 18307 }, { "epoch": 3.3843151830514775, "grad_norm": 0.9537492394447327, "learning_rate": 1.0882109504262195e-05, "loss": 0.5165, "step": 18308 }, { "epoch": 3.3845010221148484, "grad_norm": 0.9375237822532654, "learning_rate": 1.0875482898272261e-05, "loss": 0.7454, "step": 18309 }, { "epoch": 3.3846868611782197, "grad_norm": 0.8832110166549683, "learning_rate": 1.0868858194492204e-05, "loss": 0.679, "step": 18310 }, { "epoch": 3.3848727002415906, "grad_norm": 0.9369748830795288, "learning_rate": 1.0862235393063413e-05, "loss": 0.7653, "step": 18311 }, { "epoch": 3.385058539304962, "grad_norm": 1.932416558265686, "learning_rate": 1.085561449412722e-05, "loss": 1.2103, "step": 18312 }, { "epoch": 3.3852443783683333, "grad_norm": 0.9251443147659302, "learning_rate": 1.0848995497824955e-05, "loss": 0.8801, "step": 18313 }, { "epoch": 3.385430217431704, "grad_norm": 1.2950562238693237, "learning_rate": 1.0842378404297904e-05, "loss": 0.8168, "step": 18314 }, { "epoch": 3.385616056495075, "grad_norm": 0.8824008703231812, "learning_rate": 1.08357632136873e-05, "loss": 0.7471, "step": 18315 }, { "epoch": 3.3858018955584464, "grad_norm": 1.043350100517273, "learning_rate": 1.0829149926134285e-05, "loss": 1.1583, "step": 18316 }, { "epoch": 3.3859877346218177, "grad_norm": 0.9486417770385742, "learning_rate": 1.082253854178007e-05, "loss": 0.7595, "step": 18317 }, { "epoch": 3.3861735736851886, "grad_norm": 0.7901325225830078, "learning_rate": 1.0815929060765739e-05, "loss": 0.7761, "step": 18318 }, { "epoch": 3.38635941274856, "grad_norm": 0.8888583779335022, "learning_rate": 1.0809321483232348e-05, "loss": 0.7806, "step": 18319 }, { "epoch": 3.386545251811931, "grad_norm": 0.9182053208351135, "learning_rate": 1.0802715809320918e-05, "loss": 0.9955, "step": 18320 }, { "epoch": 3.386731090875302, "grad_norm": 1.0127581357955933, "learning_rate": 1.0796112039172468e-05, "loss": 0.98, "step": 18321 }, { "epoch": 3.386916929938673, "grad_norm": 1.1698704957962036, "learning_rate": 1.0789510172927918e-05, "loss": 0.7972, "step": 18322 }, { "epoch": 3.3871027690020443, "grad_norm": 0.9562602043151855, "learning_rate": 1.078291021072817e-05, "loss": 0.8788, "step": 18323 }, { "epoch": 3.387288608065415, "grad_norm": 0.9870921969413757, "learning_rate": 1.0776312152714108e-05, "loss": 0.8564, "step": 18324 }, { "epoch": 3.3874744471287865, "grad_norm": 0.949755847454071, "learning_rate": 1.0769715999026564e-05, "loss": 0.7915, "step": 18325 }, { "epoch": 3.3876602861921574, "grad_norm": 0.9226321578025818, "learning_rate": 1.0763121749806304e-05, "loss": 0.8564, "step": 18326 }, { "epoch": 3.3878461252555288, "grad_norm": 1.2454584836959839, "learning_rate": 1.075652940519407e-05, "loss": 0.8097, "step": 18327 }, { "epoch": 3.3880319643189, "grad_norm": 0.9788869023323059, "learning_rate": 1.0749938965330552e-05, "loss": 0.9494, "step": 18328 }, { "epoch": 3.388217803382271, "grad_norm": 0.7627193927764893, "learning_rate": 1.0743350430356447e-05, "loss": 0.6081, "step": 18329 }, { "epoch": 3.388403642445642, "grad_norm": 1.7664850950241089, "learning_rate": 1.0736763800412365e-05, "loss": 0.8987, "step": 18330 }, { "epoch": 3.388589481509013, "grad_norm": 0.9788818955421448, "learning_rate": 1.0730179075638868e-05, "loss": 0.7628, "step": 18331 }, { "epoch": 3.3887753205723845, "grad_norm": 0.9811448454856873, "learning_rate": 1.0723596256176483e-05, "loss": 0.8474, "step": 18332 }, { "epoch": 3.3889611596357554, "grad_norm": 0.8933404684066772, "learning_rate": 1.071701534216576e-05, "loss": 0.8238, "step": 18333 }, { "epoch": 3.3891469986991267, "grad_norm": 0.9329827427864075, "learning_rate": 1.0710436333747109e-05, "loss": 0.841, "step": 18334 }, { "epoch": 3.3893328377624976, "grad_norm": 0.7232084274291992, "learning_rate": 1.0703859231060975e-05, "loss": 0.6748, "step": 18335 }, { "epoch": 3.389518676825869, "grad_norm": 1.0448474884033203, "learning_rate": 1.0697284034247724e-05, "loss": 0.8262, "step": 18336 }, { "epoch": 3.38970451588924, "grad_norm": 0.8937177062034607, "learning_rate": 1.0690710743447707e-05, "loss": 0.9501, "step": 18337 }, { "epoch": 3.389890354952611, "grad_norm": 0.8887295126914978, "learning_rate": 1.0684139358801205e-05, "loss": 0.7002, "step": 18338 }, { "epoch": 3.390076194015982, "grad_norm": 0.675439178943634, "learning_rate": 1.067756988044848e-05, "loss": 0.4412, "step": 18339 }, { "epoch": 3.3902620330793534, "grad_norm": NaN, "learning_rate": 1.067756988044848e-05, "loss": 0.8879, "step": 18340 }, { "epoch": 3.3904478721427243, "grad_norm": 0.8766264319419861, "learning_rate": 1.067100230852972e-05, "loss": 0.787, "step": 18341 }, { "epoch": 3.3906337112060956, "grad_norm": 0.9330973029136658, "learning_rate": 1.0664436643185149e-05, "loss": 0.7302, "step": 18342 }, { "epoch": 3.3908195502694665, "grad_norm": 0.7809070348739624, "learning_rate": 1.0657872884554865e-05, "loss": 0.584, "step": 18343 }, { "epoch": 3.391005389332838, "grad_norm": 0.8240885138511658, "learning_rate": 1.0651311032778955e-05, "loss": 0.7813, "step": 18344 }, { "epoch": 3.3911912283962087, "grad_norm": 1.007537603378296, "learning_rate": 1.0644751087997495e-05, "loss": 0.9514, "step": 18345 }, { "epoch": 3.39137706745958, "grad_norm": 0.9993891716003418, "learning_rate": 1.0638193050350464e-05, "loss": 0.8986, "step": 18346 }, { "epoch": 3.3915629065229513, "grad_norm": 0.8804618120193481, "learning_rate": 1.0631636919977871e-05, "loss": 0.9539, "step": 18347 }, { "epoch": 3.3917487455863222, "grad_norm": 1.138956069946289, "learning_rate": 1.0625082697019628e-05, "loss": 0.9374, "step": 18348 }, { "epoch": 3.3919345846496936, "grad_norm": 1.0949984788894653, "learning_rate": 1.0618530381615599e-05, "loss": 0.8375, "step": 18349 }, { "epoch": 3.3921204237130644, "grad_norm": 0.9565363526344299, "learning_rate": 1.0611979973905672e-05, "loss": 0.7164, "step": 18350 }, { "epoch": 3.3923062627764358, "grad_norm": 1.3193212747573853, "learning_rate": 1.0605431474029637e-05, "loss": 0.8225, "step": 18351 }, { "epoch": 3.3924921018398067, "grad_norm": 0.884672224521637, "learning_rate": 1.0598884882127258e-05, "loss": 0.6619, "step": 18352 }, { "epoch": 3.392677940903178, "grad_norm": 1.015413522720337, "learning_rate": 1.0592340198338258e-05, "loss": 0.8915, "step": 18353 }, { "epoch": 3.392863779966549, "grad_norm": 0.8938660025596619, "learning_rate": 1.0585797422802335e-05, "loss": 0.8037, "step": 18354 }, { "epoch": 3.39304961902992, "grad_norm": 0.8355761170387268, "learning_rate": 1.0579256555659123e-05, "loss": 0.4946, "step": 18355 }, { "epoch": 3.393235458093291, "grad_norm": 0.8579012155532837, "learning_rate": 1.057271759704821e-05, "loss": 0.8542, "step": 18356 }, { "epoch": 3.3934212971566624, "grad_norm": 1.0483804941177368, "learning_rate": 1.0566180547109173e-05, "loss": 0.8588, "step": 18357 }, { "epoch": 3.3936071362200333, "grad_norm": 0.9812692403793335, "learning_rate": 1.0559645405981566e-05, "loss": 0.8812, "step": 18358 }, { "epoch": 3.3937929752834046, "grad_norm": 1.053178071975708, "learning_rate": 1.0553112173804835e-05, "loss": 0.9585, "step": 18359 }, { "epoch": 3.3939788143467755, "grad_norm": 0.8852431178092957, "learning_rate": 1.0546580850718435e-05, "loss": 0.9329, "step": 18360 }, { "epoch": 3.394164653410147, "grad_norm": 1.1948872804641724, "learning_rate": 1.054005143686173e-05, "loss": 0.9735, "step": 18361 }, { "epoch": 3.394350492473518, "grad_norm": 1.1322181224822998, "learning_rate": 1.0533523932374134e-05, "loss": 0.8999, "step": 18362 }, { "epoch": 3.394536331536889, "grad_norm": 0.8780480027198792, "learning_rate": 1.0526998337394933e-05, "loss": 0.9141, "step": 18363 }, { "epoch": 3.39472217060026, "grad_norm": 0.8715663552284241, "learning_rate": 1.0520474652063394e-05, "loss": 0.6362, "step": 18364 }, { "epoch": 3.3949080096636313, "grad_norm": 0.8854778409004211, "learning_rate": 1.0513952876518796e-05, "loss": 0.9201, "step": 18365 }, { "epoch": 3.3950938487270026, "grad_norm": 0.9006483554840088, "learning_rate": 1.0507433010900303e-05, "loss": 0.6453, "step": 18366 }, { "epoch": 3.3952796877903735, "grad_norm": 1.0067654848098755, "learning_rate": 1.050091505534706e-05, "loss": 0.8112, "step": 18367 }, { "epoch": 3.395465526853745, "grad_norm": 1.026990294456482, "learning_rate": 1.0494399009998212e-05, "loss": 0.6235, "step": 18368 }, { "epoch": 3.3956513659171157, "grad_norm": 0.9172264933586121, "learning_rate": 1.0487884874992803e-05, "loss": 0.7043, "step": 18369 }, { "epoch": 3.395837204980487, "grad_norm": 0.9033785462379456, "learning_rate": 1.0481372650469912e-05, "loss": 0.665, "step": 18370 }, { "epoch": 3.396023044043858, "grad_norm": 0.8452712297439575, "learning_rate": 1.0474862336568492e-05, "loss": 0.7495, "step": 18371 }, { "epoch": 3.3962088831072292, "grad_norm": 0.9229526519775391, "learning_rate": 1.04683539334275e-05, "loss": 0.7547, "step": 18372 }, { "epoch": 3.3963947221706, "grad_norm": 1.1461782455444336, "learning_rate": 1.0461847441185834e-05, "loss": 1.07, "step": 18373 }, { "epoch": 3.3965805612339715, "grad_norm": 1.0893611907958984, "learning_rate": 1.0455342859982409e-05, "loss": 0.8133, "step": 18374 }, { "epoch": 3.3967664002973423, "grad_norm": 1.0343410968780518, "learning_rate": 1.0448840189956022e-05, "loss": 0.9544, "step": 18375 }, { "epoch": 3.3969522393607137, "grad_norm": 0.784121036529541, "learning_rate": 1.0442339431245441e-05, "loss": 0.7844, "step": 18376 }, { "epoch": 3.3971380784240846, "grad_norm": 1.0187186002731323, "learning_rate": 1.0435840583989443e-05, "loss": 0.8634, "step": 18377 }, { "epoch": 3.397323917487456, "grad_norm": 0.8501676917076111, "learning_rate": 1.0429343648326739e-05, "loss": 0.7817, "step": 18378 }, { "epoch": 3.3975097565508268, "grad_norm": 0.8218252062797546, "learning_rate": 1.0422848624395998e-05, "loss": 0.7286, "step": 18379 }, { "epoch": 3.397695595614198, "grad_norm": 1.3224713802337646, "learning_rate": 1.041635551233583e-05, "loss": 1.24, "step": 18380 }, { "epoch": 3.3978814346775694, "grad_norm": 1.0980743169784546, "learning_rate": 1.04098643122848e-05, "loss": 0.7951, "step": 18381 }, { "epoch": 3.3980672737409403, "grad_norm": 1.0464022159576416, "learning_rate": 1.040337502438149e-05, "loss": 0.9525, "step": 18382 }, { "epoch": 3.3982531128043116, "grad_norm": 0.9397351741790771, "learning_rate": 1.0396887648764386e-05, "loss": 0.9203, "step": 18383 }, { "epoch": 3.3984389518676825, "grad_norm": 1.1105705499649048, "learning_rate": 1.0390402185571934e-05, "loss": 0.9589, "step": 18384 }, { "epoch": 3.398624790931054, "grad_norm": 1.1369887590408325, "learning_rate": 1.038391863494259e-05, "loss": 0.764, "step": 18385 }, { "epoch": 3.3988106299944247, "grad_norm": 1.0813019275665283, "learning_rate": 1.0377436997014711e-05, "loss": 0.8825, "step": 18386 }, { "epoch": 3.398996469057796, "grad_norm": 0.9052541255950928, "learning_rate": 1.0370957271926652e-05, "loss": 0.6729, "step": 18387 }, { "epoch": 3.399182308121167, "grad_norm": 1.7850995063781738, "learning_rate": 1.0364479459816667e-05, "loss": 1.1384, "step": 18388 }, { "epoch": 3.3993681471845383, "grad_norm": 0.8072879314422607, "learning_rate": 1.035800356082306e-05, "loss": 0.6948, "step": 18389 }, { "epoch": 3.399553986247909, "grad_norm": 0.8056290745735168, "learning_rate": 1.0351529575084051e-05, "loss": 0.4928, "step": 18390 }, { "epoch": 3.3997398253112805, "grad_norm": 0.8751406073570251, "learning_rate": 1.0345057502737798e-05, "loss": 0.732, "step": 18391 }, { "epoch": 3.3999256643746514, "grad_norm": 0.9247086048126221, "learning_rate": 1.0338587343922435e-05, "loss": 0.7336, "step": 18392 }, { "epoch": 3.4001115034380227, "grad_norm": 0.8679309487342834, "learning_rate": 1.033211909877605e-05, "loss": 0.7248, "step": 18393 }, { "epoch": 3.4002973425013936, "grad_norm": 0.9449750185012817, "learning_rate": 1.0325652767436711e-05, "loss": 1.1002, "step": 18394 }, { "epoch": 3.400483181564765, "grad_norm": 0.9762586951255798, "learning_rate": 1.0319188350042442e-05, "loss": 0.7576, "step": 18395 }, { "epoch": 3.4006690206281363, "grad_norm": 1.1907624006271362, "learning_rate": 1.0312725846731175e-05, "loss": 1.0439, "step": 18396 }, { "epoch": 3.400854859691507, "grad_norm": 0.8823482394218445, "learning_rate": 1.0306265257640879e-05, "loss": 0.851, "step": 18397 }, { "epoch": 3.4010406987548785, "grad_norm": 0.9875620603561401, "learning_rate": 1.029980658290941e-05, "loss": 0.8812, "step": 18398 }, { "epoch": 3.4012265378182494, "grad_norm": 1.1810065507888794, "learning_rate": 1.029334982267467e-05, "loss": 0.8582, "step": 18399 }, { "epoch": 3.4014123768816207, "grad_norm": 1.1070986986160278, "learning_rate": 1.0286894977074424e-05, "loss": 0.969, "step": 18400 }, { "epoch": 3.4015982159449916, "grad_norm": 0.8918006420135498, "learning_rate": 1.0280442046246441e-05, "loss": 0.7951, "step": 18401 }, { "epoch": 3.401784055008363, "grad_norm": 1.1689788103103638, "learning_rate": 1.0273991030328468e-05, "loss": 0.9026, "step": 18402 }, { "epoch": 3.401969894071734, "grad_norm": 0.9085296988487244, "learning_rate": 1.0267541929458179e-05, "loss": 0.817, "step": 18403 }, { "epoch": 3.402155733135105, "grad_norm": 0.7045767307281494, "learning_rate": 1.0261094743773203e-05, "loss": 0.5753, "step": 18404 }, { "epoch": 3.402341572198476, "grad_norm": 0.9169483184814453, "learning_rate": 1.0254649473411192e-05, "loss": 0.7561, "step": 18405 }, { "epoch": 3.4025274112618473, "grad_norm": 0.9103830456733704, "learning_rate": 1.0248206118509663e-05, "loss": 0.6749, "step": 18406 }, { "epoch": 3.402713250325218, "grad_norm": 1.016221523284912, "learning_rate": 1.0241764679206168e-05, "loss": 1.0526, "step": 18407 }, { "epoch": 3.4028990893885895, "grad_norm": 1.1982204914093018, "learning_rate": 1.0235325155638142e-05, "loss": 0.5418, "step": 18408 }, { "epoch": 3.4030849284519604, "grad_norm": 0.8669753074645996, "learning_rate": 1.0228887547943066e-05, "loss": 0.8356, "step": 18409 }, { "epoch": 3.4032707675153318, "grad_norm": 0.9314633011817932, "learning_rate": 1.0222451856258352e-05, "loss": 0.7528, "step": 18410 }, { "epoch": 3.403456606578703, "grad_norm": 0.9003772139549255, "learning_rate": 1.0216018080721335e-05, "loss": 0.6818, "step": 18411 }, { "epoch": 3.403642445642074, "grad_norm": 1.2028591632843018, "learning_rate": 1.0209586221469335e-05, "loss": 0.8116, "step": 18412 }, { "epoch": 3.403828284705445, "grad_norm": 0.9273260831832886, "learning_rate": 1.0203156278639625e-05, "loss": 0.5253, "step": 18413 }, { "epoch": 3.404014123768816, "grad_norm": 1.0306023359298706, "learning_rate": 1.0196728252369447e-05, "loss": 1.005, "step": 18414 }, { "epoch": 3.4041999628321875, "grad_norm": 1.029457688331604, "learning_rate": 1.0190302142796004e-05, "loss": 0.7985, "step": 18415 }, { "epoch": 3.4043858018955584, "grad_norm": 0.8798620700836182, "learning_rate": 1.0183877950056431e-05, "loss": 0.7899, "step": 18416 }, { "epoch": 3.4045716409589297, "grad_norm": 1.1171305179595947, "learning_rate": 1.0177455674287861e-05, "loss": 0.8113, "step": 18417 }, { "epoch": 3.4047574800223006, "grad_norm": 0.9819057583808899, "learning_rate": 1.0171035315627363e-05, "loss": 1.0677, "step": 18418 }, { "epoch": 3.404943319085672, "grad_norm": 0.8564400672912598, "learning_rate": 1.016461687421194e-05, "loss": 0.6969, "step": 18419 }, { "epoch": 3.405129158149043, "grad_norm": 0.968268871307373, "learning_rate": 1.0158200350178627e-05, "loss": 0.8084, "step": 18420 }, { "epoch": 3.405314997212414, "grad_norm": 0.8687854409217834, "learning_rate": 1.0151785743664333e-05, "loss": 0.8461, "step": 18421 }, { "epoch": 3.405500836275785, "grad_norm": 1.0560195446014404, "learning_rate": 1.0145373054806007e-05, "loss": 0.5994, "step": 18422 }, { "epoch": 3.4056866753391564, "grad_norm": 0.7632457613945007, "learning_rate": 1.0138962283740494e-05, "loss": 0.8112, "step": 18423 }, { "epoch": 3.4058725144025273, "grad_norm": 1.2036640644073486, "learning_rate": 1.0132553430604608e-05, "loss": 0.7384, "step": 18424 }, { "epoch": 3.4060583534658986, "grad_norm": 1.1761847734451294, "learning_rate": 1.012614649553517e-05, "loss": 0.8114, "step": 18425 }, { "epoch": 3.4062441925292695, "grad_norm": 1.1434109210968018, "learning_rate": 1.011974147866891e-05, "loss": 0.8595, "step": 18426 }, { "epoch": 3.406430031592641, "grad_norm": 0.9771609902381897, "learning_rate": 1.0113338380142513e-05, "loss": 0.9144, "step": 18427 }, { "epoch": 3.4066158706560117, "grad_norm": 0.854061484336853, "learning_rate": 1.0106937200092648e-05, "loss": 0.5983, "step": 18428 }, { "epoch": 3.406801709719383, "grad_norm": 0.9027602076530457, "learning_rate": 1.0100537938655963e-05, "loss": 0.8257, "step": 18429 }, { "epoch": 3.4069875487827543, "grad_norm": 0.917335569858551, "learning_rate": 1.0094140595969004e-05, "loss": 0.8791, "step": 18430 }, { "epoch": 3.4071733878461252, "grad_norm": 1.032515287399292, "learning_rate": 1.0087745172168338e-05, "loss": 0.7861, "step": 18431 }, { "epoch": 3.4073592269094966, "grad_norm": 0.8733752965927124, "learning_rate": 1.008135166739047e-05, "loss": 0.9026, "step": 18432 }, { "epoch": 3.4075450659728674, "grad_norm": 1.133104681968689, "learning_rate": 1.0074960081771822e-05, "loss": 0.981, "step": 18433 }, { "epoch": 3.4077309050362388, "grad_norm": 0.8792244791984558, "learning_rate": 1.0068570415448852e-05, "loss": 0.7714, "step": 18434 }, { "epoch": 3.4079167440996097, "grad_norm": 1.0705727338790894, "learning_rate": 1.0062182668557907e-05, "loss": 0.9903, "step": 18435 }, { "epoch": 3.408102583162981, "grad_norm": 1.041522741317749, "learning_rate": 1.0055796841235332e-05, "loss": 0.9209, "step": 18436 }, { "epoch": 3.408288422226352, "grad_norm": 0.8988785743713379, "learning_rate": 1.004941293361743e-05, "loss": 0.6943, "step": 18437 }, { "epoch": 3.408474261289723, "grad_norm": 0.9060993790626526, "learning_rate": 1.0043030945840447e-05, "loss": 0.7478, "step": 18438 }, { "epoch": 3.408660100353094, "grad_norm": 0.9467904567718506, "learning_rate": 1.0036650878040599e-05, "loss": 0.8402, "step": 18439 }, { "epoch": 3.4088459394164654, "grad_norm": 1.0410497188568115, "learning_rate": 1.003027273035404e-05, "loss": 0.9122, "step": 18440 }, { "epoch": 3.4090317784798363, "grad_norm": 0.9075198769569397, "learning_rate": 1.0023896502916908e-05, "loss": 0.9351, "step": 18441 }, { "epoch": 3.4092176175432076, "grad_norm": 0.8967174887657166, "learning_rate": 1.0017522195865326e-05, "loss": 0.8257, "step": 18442 }, { "epoch": 3.4094034566065785, "grad_norm": 0.8995972871780396, "learning_rate": 1.0011149809335318e-05, "loss": 0.646, "step": 18443 }, { "epoch": 3.40958929566995, "grad_norm": 0.8520565032958984, "learning_rate": 1.0004779343462867e-05, "loss": 0.7053, "step": 18444 }, { "epoch": 3.409775134733321, "grad_norm": 1.0125555992126465, "learning_rate": 9.998410798383984e-06, "loss": 0.8133, "step": 18445 }, { "epoch": 3.409960973796692, "grad_norm": 0.9866090416908264, "learning_rate": 9.992044174234561e-06, "loss": 0.9799, "step": 18446 }, { "epoch": 3.410146812860063, "grad_norm": 1.2013903856277466, "learning_rate": 9.98567947115051e-06, "loss": 0.8826, "step": 18447 }, { "epoch": 3.4103326519234343, "grad_norm": 0.846396803855896, "learning_rate": 9.979316689267636e-06, "loss": 0.8524, "step": 18448 }, { "epoch": 3.4105184909868056, "grad_norm": 0.9855815768241882, "learning_rate": 9.972955828721787e-06, "loss": 0.7695, "step": 18449 }, { "epoch": 3.4107043300501765, "grad_norm": 0.931355357170105, "learning_rate": 9.966596889648706e-06, "loss": 0.7244, "step": 18450 }, { "epoch": 3.410890169113548, "grad_norm": 0.9406512379646301, "learning_rate": 9.960239872184085e-06, "loss": 0.8765, "step": 18451 }, { "epoch": 3.4110760081769187, "grad_norm": 0.9696619510650635, "learning_rate": 9.953884776463652e-06, "loss": 1.0083, "step": 18452 }, { "epoch": 3.41126184724029, "grad_norm": 1.0051896572113037, "learning_rate": 9.947531602622995e-06, "loss": 0.6334, "step": 18453 }, { "epoch": 3.411447686303661, "grad_norm": 1.5537279844284058, "learning_rate": 9.941180350797774e-06, "loss": 1.2872, "step": 18454 }, { "epoch": 3.4116335253670322, "grad_norm": 0.8657042980194092, "learning_rate": 9.934831021123502e-06, "loss": 0.808, "step": 18455 }, { "epoch": 3.411819364430403, "grad_norm": 0.9618478417396545, "learning_rate": 9.928483613735684e-06, "loss": 0.7162, "step": 18456 }, { "epoch": 3.4120052034937745, "grad_norm": 0.9999720454216003, "learning_rate": 9.922138128769831e-06, "loss": 0.9676, "step": 18457 }, { "epoch": 3.4121910425571453, "grad_norm": 0.8654031157493591, "learning_rate": 9.91579456636137e-06, "loss": 0.8429, "step": 18458 }, { "epoch": 3.4123768816205167, "grad_norm": 0.9619836807250977, "learning_rate": 9.90945292664568e-06, "loss": 0.5053, "step": 18459 }, { "epoch": 3.412562720683888, "grad_norm": 0.8627220988273621, "learning_rate": 9.903113209758096e-06, "loss": 0.7697, "step": 18460 }, { "epoch": 3.412748559747259, "grad_norm": 0.8880993127822876, "learning_rate": 9.896775415833947e-06, "loss": 0.5013, "step": 18461 }, { "epoch": 3.4129343988106298, "grad_norm": 0.9004693031311035, "learning_rate": 9.890439545008523e-06, "loss": 0.8842, "step": 18462 }, { "epoch": 3.413120237874001, "grad_norm": 1.0259151458740234, "learning_rate": 9.884105597417026e-06, "loss": 0.8893, "step": 18463 }, { "epoch": 3.4133060769373724, "grad_norm": 0.954877495765686, "learning_rate": 9.877773573194637e-06, "loss": 0.8357, "step": 18464 }, { "epoch": 3.4134919160007433, "grad_norm": 1.0130378007888794, "learning_rate": 9.871443472476538e-06, "loss": 0.8433, "step": 18465 }, { "epoch": 3.4136777550641146, "grad_norm": 0.9062729477882385, "learning_rate": 9.865115295397808e-06, "loss": 0.878, "step": 18466 }, { "epoch": 3.4138635941274855, "grad_norm": 0.9700537323951721, "learning_rate": 9.858789042093508e-06, "loss": 0.9453, "step": 18467 }, { "epoch": 3.414049433190857, "grad_norm": 1.3633286952972412, "learning_rate": 9.85246471269866e-06, "loss": 0.9084, "step": 18468 }, { "epoch": 3.4142352722542277, "grad_norm": 0.8684259653091431, "learning_rate": 9.84614230734826e-06, "loss": 0.7808, "step": 18469 }, { "epoch": 3.414421111317599, "grad_norm": 1.0092612504959106, "learning_rate": 9.839821826177254e-06, "loss": 0.7104, "step": 18470 }, { "epoch": 3.41460695038097, "grad_norm": 0.8173282146453857, "learning_rate": 9.83350326932052e-06, "loss": 0.7987, "step": 18471 }, { "epoch": 3.4147927894443413, "grad_norm": 1.0052686929702759, "learning_rate": 9.827186636912911e-06, "loss": 0.791, "step": 18472 }, { "epoch": 3.414978628507712, "grad_norm": 1.0594308376312256, "learning_rate": 9.820871929089271e-06, "loss": 0.8965, "step": 18473 }, { "epoch": 3.4151644675710835, "grad_norm": 1.091590166091919, "learning_rate": 9.81455914598437e-06, "loss": 0.9273, "step": 18474 }, { "epoch": 3.4153503066344544, "grad_norm": 1.0309139490127563, "learning_rate": 9.808248287732947e-06, "loss": 0.7162, "step": 18475 }, { "epoch": 3.4155361456978257, "grad_norm": 0.715144693851471, "learning_rate": 9.80193935446967e-06, "loss": 0.8441, "step": 18476 }, { "epoch": 3.4157219847611966, "grad_norm": 1.0665366649627686, "learning_rate": 9.795632346329232e-06, "loss": 0.6637, "step": 18477 }, { "epoch": 3.415907823824568, "grad_norm": 0.8957089185714722, "learning_rate": 9.789327263446213e-06, "loss": 0.8185, "step": 18478 }, { "epoch": 3.4160936628879393, "grad_norm": 1.088594675064087, "learning_rate": 9.783024105955207e-06, "loss": 0.6706, "step": 18479 }, { "epoch": 3.41627950195131, "grad_norm": 1.17194402217865, "learning_rate": 9.776722873990719e-06, "loss": 0.8527, "step": 18480 }, { "epoch": 3.4164653410146815, "grad_norm": 0.8662586808204651, "learning_rate": 9.77042356768726e-06, "loss": 0.8623, "step": 18481 }, { "epoch": 3.4166511800780524, "grad_norm": 1.0301966667175293, "learning_rate": 9.764126187179256e-06, "loss": 0.8051, "step": 18482 }, { "epoch": 3.4168370191414237, "grad_norm": 0.891620934009552, "learning_rate": 9.757830732601136e-06, "loss": 0.7495, "step": 18483 }, { "epoch": 3.4170228582047946, "grad_norm": 0.9465243816375732, "learning_rate": 9.751537204087258e-06, "loss": 0.6785, "step": 18484 }, { "epoch": 3.417208697268166, "grad_norm": 1.1204577684402466, "learning_rate": 9.745245601771946e-06, "loss": 0.84, "step": 18485 }, { "epoch": 3.417394536331537, "grad_norm": 0.9050443172454834, "learning_rate": 9.738955925789494e-06, "loss": 0.9034, "step": 18486 }, { "epoch": 3.417580375394908, "grad_norm": 0.9914594888687134, "learning_rate": 9.73266817627413e-06, "loss": 0.9183, "step": 18487 }, { "epoch": 3.417766214458279, "grad_norm": 0.9951683878898621, "learning_rate": 9.726382353360041e-06, "loss": 0.8975, "step": 18488 }, { "epoch": 3.4179520535216503, "grad_norm": 1.047358751296997, "learning_rate": 9.720098457181425e-06, "loss": 0.7869, "step": 18489 }, { "epoch": 3.418137892585021, "grad_norm": 1.073513388633728, "learning_rate": 9.713816487872374e-06, "loss": 0.8416, "step": 18490 }, { "epoch": 3.4183237316483925, "grad_norm": 1.0014564990997314, "learning_rate": 9.707536445566977e-06, "loss": 0.9485, "step": 18491 }, { "epoch": 3.4185095707117634, "grad_norm": 0.8295140266418457, "learning_rate": 9.701258330399255e-06, "loss": 0.7742, "step": 18492 }, { "epoch": 3.4186954097751348, "grad_norm": 1.0205795764923096, "learning_rate": 9.694982142503205e-06, "loss": 0.9397, "step": 18493 }, { "epoch": 3.418881248838506, "grad_norm": 1.060734748840332, "learning_rate": 9.688707882012814e-06, "loss": 0.7891, "step": 18494 }, { "epoch": 3.419067087901877, "grad_norm": 1.210156798362732, "learning_rate": 9.682435549061974e-06, "loss": 0.812, "step": 18495 }, { "epoch": 3.419252926965248, "grad_norm": 0.9137862920761108, "learning_rate": 9.676165143784544e-06, "loss": 0.6196, "step": 18496 }, { "epoch": 3.419438766028619, "grad_norm": 1.0834940671920776, "learning_rate": 9.669896666314371e-06, "loss": 0.8834, "step": 18497 }, { "epoch": 3.4196246050919905, "grad_norm": 1.0085303783416748, "learning_rate": 9.66363011678525e-06, "loss": 0.7188, "step": 18498 }, { "epoch": 3.4198104441553614, "grad_norm": 0.9292789697647095, "learning_rate": 9.657365495330916e-06, "loss": 1.0473, "step": 18499 }, { "epoch": 3.4199962832187327, "grad_norm": 0.8212962746620178, "learning_rate": 9.651102802085066e-06, "loss": 0.5599, "step": 18500 }, { "epoch": 3.4201821222821036, "grad_norm": 0.9323970079421997, "learning_rate": 9.6448420371814e-06, "loss": 0.7903, "step": 18501 }, { "epoch": 3.420367961345475, "grad_norm": 0.8843657970428467, "learning_rate": 9.638583200753525e-06, "loss": 0.9521, "step": 18502 }, { "epoch": 3.420553800408846, "grad_norm": 1.1132733821868896, "learning_rate": 9.632326292935012e-06, "loss": 0.586, "step": 18503 }, { "epoch": 3.420739639472217, "grad_norm": 0.9576237797737122, "learning_rate": 9.626071313859409e-06, "loss": 0.9634, "step": 18504 }, { "epoch": 3.420925478535588, "grad_norm": 0.9085745811462402, "learning_rate": 9.619818263660252e-06, "loss": 0.819, "step": 18505 }, { "epoch": 3.4211113175989594, "grad_norm": 0.9066290855407715, "learning_rate": 9.613567142470969e-06, "loss": 0.6965, "step": 18506 }, { "epoch": 3.4212971566623303, "grad_norm": 0.9321801662445068, "learning_rate": 9.607317950424987e-06, "loss": 0.7295, "step": 18507 }, { "epoch": 3.4214829957257016, "grad_norm": 0.9430164694786072, "learning_rate": 9.601070687655667e-06, "loss": 0.6397, "step": 18508 }, { "epoch": 3.421668834789073, "grad_norm": 0.9664836525917053, "learning_rate": 9.594825354296377e-06, "loss": 0.841, "step": 18509 }, { "epoch": 3.421854673852444, "grad_norm": 0.9440019130706787, "learning_rate": 9.588581950480401e-06, "loss": 1.039, "step": 18510 }, { "epoch": 3.4220405129158147, "grad_norm": 1.0869544744491577, "learning_rate": 9.58234047634099e-06, "loss": 0.8229, "step": 18511 }, { "epoch": 3.422226351979186, "grad_norm": 0.9242025017738342, "learning_rate": 9.576100932011344e-06, "loss": 0.7676, "step": 18512 }, { "epoch": 3.4224121910425573, "grad_norm": 0.9432842135429382, "learning_rate": 9.569863317624662e-06, "loss": 0.6781, "step": 18513 }, { "epoch": 3.4225980301059282, "grad_norm": 0.8574406504631042, "learning_rate": 9.563627633314054e-06, "loss": 0.6487, "step": 18514 }, { "epoch": 3.4227838691692996, "grad_norm": 0.942700982093811, "learning_rate": 9.557393879212629e-06, "loss": 0.7327, "step": 18515 }, { "epoch": 3.4229697082326704, "grad_norm": 0.9813656806945801, "learning_rate": 9.551162055453411e-06, "loss": 0.8764, "step": 18516 }, { "epoch": 3.4231555472960418, "grad_norm": 0.7930585145950317, "learning_rate": 9.54493216216944e-06, "loss": 0.6155, "step": 18517 }, { "epoch": 3.4233413863594127, "grad_norm": 1.2245550155639648, "learning_rate": 9.538704199493664e-06, "loss": 0.813, "step": 18518 }, { "epoch": 3.423527225422784, "grad_norm": 0.8570970892906189, "learning_rate": 9.532478167559011e-06, "loss": 0.922, "step": 18519 }, { "epoch": 3.423713064486155, "grad_norm": 0.8624559640884399, "learning_rate": 9.52625406649834e-06, "loss": 0.6445, "step": 18520 }, { "epoch": 3.423898903549526, "grad_norm": 0.8528590798377991, "learning_rate": 9.520031896444536e-06, "loss": 0.9415, "step": 18521 }, { "epoch": 3.424084742612897, "grad_norm": 0.9589774012565613, "learning_rate": 9.513811657530391e-06, "loss": 0.9034, "step": 18522 }, { "epoch": 3.4242705816762684, "grad_norm": 0.9136031270027161, "learning_rate": 9.507593349888644e-06, "loss": 0.8908, "step": 18523 }, { "epoch": 3.4244564207396393, "grad_norm": 1.0127958059310913, "learning_rate": 9.501376973651999e-06, "loss": 0.6507, "step": 18524 }, { "epoch": 3.4246422598030106, "grad_norm": 0.8139373064041138, "learning_rate": 9.495162528953195e-06, "loss": 0.6984, "step": 18525 }, { "epoch": 3.4248280988663815, "grad_norm": 0.9890860319137573, "learning_rate": 9.488950015924824e-06, "loss": 0.777, "step": 18526 }, { "epoch": 3.425013937929753, "grad_norm": 1.1747925281524658, "learning_rate": 9.482739434699506e-06, "loss": 0.9121, "step": 18527 }, { "epoch": 3.425199776993124, "grad_norm": 1.0490977764129639, "learning_rate": 9.476530785409754e-06, "loss": 0.9954, "step": 18528 }, { "epoch": 3.425385616056495, "grad_norm": 1.0559828281402588, "learning_rate": 9.47032406818813e-06, "loss": 0.741, "step": 18529 }, { "epoch": 3.4255714551198664, "grad_norm": 2.027252674102783, "learning_rate": 9.464119283167072e-06, "loss": 1.3585, "step": 18530 }, { "epoch": 3.4257572941832373, "grad_norm": 0.9632944464683533, "learning_rate": 9.45791643047904e-06, "loss": 0.8285, "step": 18531 }, { "epoch": 3.4259431332466086, "grad_norm": 1.0093849897384644, "learning_rate": 9.451715510256377e-06, "loss": 0.9482, "step": 18532 }, { "epoch": 3.4261289723099795, "grad_norm": 0.9287329912185669, "learning_rate": 9.445516522631482e-06, "loss": 0.8109, "step": 18533 }, { "epoch": 3.426314811373351, "grad_norm": 0.957906186580658, "learning_rate": 9.43931946773663e-06, "loss": 0.7097, "step": 18534 }, { "epoch": 3.4265006504367217, "grad_norm": 1.0049121379852295, "learning_rate": 9.433124345704092e-06, "loss": 0.8023, "step": 18535 }, { "epoch": 3.426686489500093, "grad_norm": 0.9933798313140869, "learning_rate": 9.426931156666085e-06, "loss": 0.7093, "step": 18536 }, { "epoch": 3.426872328563464, "grad_norm": 1.0770821571350098, "learning_rate": 9.420739900754815e-06, "loss": 0.7999, "step": 18537 }, { "epoch": 3.4270581676268352, "grad_norm": 0.9103144407272339, "learning_rate": 9.414550578102422e-06, "loss": 0.7976, "step": 18538 }, { "epoch": 3.427244006690206, "grad_norm": 0.9013404250144958, "learning_rate": 9.408363188840996e-06, "loss": 0.8276, "step": 18539 }, { "epoch": 3.4274298457535775, "grad_norm": 0.9676746726036072, "learning_rate": 9.402177733102579e-06, "loss": 0.539, "step": 18540 }, { "epoch": 3.4276156848169483, "grad_norm": 0.9252790808677673, "learning_rate": 9.39599421101922e-06, "loss": 0.7271, "step": 18541 }, { "epoch": 3.4278015238803197, "grad_norm": 1.0059067010879517, "learning_rate": 9.389812622722882e-06, "loss": 0.8074, "step": 18542 }, { "epoch": 3.427987362943691, "grad_norm": 1.0993884801864624, "learning_rate": 9.383632968345501e-06, "loss": 0.809, "step": 18543 }, { "epoch": 3.428173202007062, "grad_norm": 0.864134669303894, "learning_rate": 9.377455248018963e-06, "loss": 0.8721, "step": 18544 }, { "epoch": 3.4283590410704328, "grad_norm": 0.9246398210525513, "learning_rate": 9.371279461875115e-06, "loss": 0.6925, "step": 18545 }, { "epoch": 3.428544880133804, "grad_norm": 0.9502807259559631, "learning_rate": 9.365105610045798e-06, "loss": 1.0406, "step": 18546 }, { "epoch": 3.4287307191971754, "grad_norm": 0.8488006591796875, "learning_rate": 9.358933692662775e-06, "loss": 0.8352, "step": 18547 }, { "epoch": 3.4289165582605463, "grad_norm": 1.1370174884796143, "learning_rate": 9.352763709857749e-06, "loss": 0.913, "step": 18548 }, { "epoch": 3.4291023973239176, "grad_norm": 0.9592936635017395, "learning_rate": 9.346595661762436e-06, "loss": 0.7774, "step": 18549 }, { "epoch": 3.4292882363872885, "grad_norm": 0.841438353061676, "learning_rate": 9.340429548508468e-06, "loss": 0.6929, "step": 18550 }, { "epoch": 3.42947407545066, "grad_norm": 0.9155181050300598, "learning_rate": 9.334265370227458e-06, "loss": 0.7457, "step": 18551 }, { "epoch": 3.4296599145140307, "grad_norm": 0.9380632638931274, "learning_rate": 9.328103127050947e-06, "loss": 0.9594, "step": 18552 }, { "epoch": 3.429845753577402, "grad_norm": 1.0478308200836182, "learning_rate": 9.321942819110496e-06, "loss": 0.9126, "step": 18553 }, { "epoch": 3.430031592640773, "grad_norm": 0.9147072434425354, "learning_rate": 9.315784446537568e-06, "loss": 0.8514, "step": 18554 }, { "epoch": 3.4302174317041443, "grad_norm": 0.9005574584007263, "learning_rate": 9.309628009463579e-06, "loss": 0.749, "step": 18555 }, { "epoch": 3.430403270767515, "grad_norm": 0.9355246424674988, "learning_rate": 9.303473508019944e-06, "loss": 0.7584, "step": 18556 }, { "epoch": 3.4305891098308865, "grad_norm": 2.22025465965271, "learning_rate": 9.29732094233805e-06, "loss": 1.1015, "step": 18557 }, { "epoch": 3.4307749488942574, "grad_norm": 1.0061955451965332, "learning_rate": 9.29117031254919e-06, "loss": 0.715, "step": 18558 }, { "epoch": 3.4309607879576287, "grad_norm": 1.1127967834472656, "learning_rate": 9.285021618784628e-06, "loss": 0.947, "step": 18559 }, { "epoch": 3.4311466270209996, "grad_norm": 0.9937306642532349, "learning_rate": 9.278874861175601e-06, "loss": 0.9385, "step": 18560 }, { "epoch": 3.431332466084371, "grad_norm": 0.901160478591919, "learning_rate": 9.272730039853317e-06, "loss": 0.8884, "step": 18561 }, { "epoch": 3.4315183051477423, "grad_norm": 1.075750708580017, "learning_rate": 9.266587154948914e-06, "loss": 0.931, "step": 18562 }, { "epoch": 3.431704144211113, "grad_norm": 0.9136353135108948, "learning_rate": 9.260446206593509e-06, "loss": 0.8377, "step": 18563 }, { "epoch": 3.4318899832744845, "grad_norm": 1.0850541591644287, "learning_rate": 9.254307194918144e-06, "loss": 0.8653, "step": 18564 }, { "epoch": 3.4320758223378554, "grad_norm": 1.0017285346984863, "learning_rate": 9.248170120053878e-06, "loss": 0.7573, "step": 18565 }, { "epoch": 3.4322616614012267, "grad_norm": 0.9335682988166809, "learning_rate": 9.242034982131675e-06, "loss": 0.9018, "step": 18566 }, { "epoch": 3.4324475004645976, "grad_norm": 0.8629546165466309, "learning_rate": 9.235901781282496e-06, "loss": 0.4799, "step": 18567 }, { "epoch": 3.432633339527969, "grad_norm": 1.1791781187057495, "learning_rate": 9.229770517637227e-06, "loss": 0.7093, "step": 18568 }, { "epoch": 3.43281917859134, "grad_norm": 0.9566700458526611, "learning_rate": 9.22364119132676e-06, "loss": 0.7499, "step": 18569 }, { "epoch": 3.433005017654711, "grad_norm": 1.1813853979110718, "learning_rate": 9.21751380248188e-06, "loss": 0.8975, "step": 18570 }, { "epoch": 3.433190856718082, "grad_norm": 1.0684021711349487, "learning_rate": 9.211388351233396e-06, "loss": 0.747, "step": 18571 }, { "epoch": 3.4333766957814533, "grad_norm": 0.9259321689605713, "learning_rate": 9.205264837711991e-06, "loss": 0.8761, "step": 18572 }, { "epoch": 3.433562534844824, "grad_norm": 1.0204839706420898, "learning_rate": 9.199143262048427e-06, "loss": 0.8364, "step": 18573 }, { "epoch": 3.4337483739081955, "grad_norm": 0.8068413734436035, "learning_rate": 9.193023624373332e-06, "loss": 0.5637, "step": 18574 }, { "epoch": 3.4339342129715664, "grad_norm": 0.8738575577735901, "learning_rate": 9.186905924817301e-06, "loss": 0.7658, "step": 18575 }, { "epoch": 3.4341200520349378, "grad_norm": 0.9638357758522034, "learning_rate": 9.180790163510933e-06, "loss": 0.8261, "step": 18576 }, { "epoch": 3.434305891098309, "grad_norm": 1.031435251235962, "learning_rate": 9.174676340584721e-06, "loss": 0.9145, "step": 18577 }, { "epoch": 3.43449173016168, "grad_norm": 0.877661406993866, "learning_rate": 9.168564456169215e-06, "loss": 0.7629, "step": 18578 }, { "epoch": 3.4346775692250513, "grad_norm": 0.9631086587905884, "learning_rate": 9.162454510394813e-06, "loss": 0.7251, "step": 18579 }, { "epoch": 3.434863408288422, "grad_norm": 1.0152180194854736, "learning_rate": 9.156346503391921e-06, "loss": 0.791, "step": 18580 }, { "epoch": 3.4350492473517935, "grad_norm": 0.9161962866783142, "learning_rate": 9.150240435290947e-06, "loss": 0.8774, "step": 18581 }, { "epoch": 3.4352350864151644, "grad_norm": 0.8945319056510925, "learning_rate": 9.144136306222185e-06, "loss": 0.8238, "step": 18582 }, { "epoch": 3.4354209254785357, "grad_norm": 0.8093655109405518, "learning_rate": 9.138034116315907e-06, "loss": 0.6699, "step": 18583 }, { "epoch": 3.4356067645419066, "grad_norm": 1.2264208793640137, "learning_rate": 9.131933865702358e-06, "loss": 0.9794, "step": 18584 }, { "epoch": 3.435792603605278, "grad_norm": 0.7184557318687439, "learning_rate": 9.125835554511753e-06, "loss": 0.54, "step": 18585 }, { "epoch": 3.435978442668649, "grad_norm": 0.8314578533172607, "learning_rate": 9.119739182874254e-06, "loss": 0.7339, "step": 18586 }, { "epoch": 3.43616428173202, "grad_norm": 0.8828555345535278, "learning_rate": 9.113644750919936e-06, "loss": 0.9658, "step": 18587 }, { "epoch": 3.436350120795391, "grad_norm": 1.0641796588897705, "learning_rate": 9.107552258778907e-06, "loss": 0.7089, "step": 18588 }, { "epoch": 3.4365359598587624, "grad_norm": 0.8992449641227722, "learning_rate": 9.101461706581216e-06, "loss": 0.8167, "step": 18589 }, { "epoch": 3.4367217989221333, "grad_norm": 1.0069942474365234, "learning_rate": 9.095373094456839e-06, "loss": 0.704, "step": 18590 }, { "epoch": 3.4369076379855046, "grad_norm": 0.9273150563240051, "learning_rate": 9.089286422535726e-06, "loss": 0.8596, "step": 18591 }, { "epoch": 3.437093477048876, "grad_norm": 0.8703062534332275, "learning_rate": 9.083201690947763e-06, "loss": 0.7614, "step": 18592 }, { "epoch": 3.437279316112247, "grad_norm": 0.9495863914489746, "learning_rate": 9.077118899822857e-06, "loss": 0.8382, "step": 18593 }, { "epoch": 3.4374651551756177, "grad_norm": 0.9941915273666382, "learning_rate": 9.071038049290825e-06, "loss": 0.9582, "step": 18594 }, { "epoch": 3.437650994238989, "grad_norm": 1.2743667364120483, "learning_rate": 9.064959139481422e-06, "loss": 0.9139, "step": 18595 }, { "epoch": 3.4378368333023603, "grad_norm": 0.9535473585128784, "learning_rate": 9.05888217052443e-06, "loss": 0.8414, "step": 18596 }, { "epoch": 3.4380226723657312, "grad_norm": 0.8331275582313538, "learning_rate": 9.052807142549546e-06, "loss": 0.7422, "step": 18597 }, { "epoch": 3.4382085114291026, "grad_norm": 0.8911383748054504, "learning_rate": 9.046734055686401e-06, "loss": 0.8685, "step": 18598 }, { "epoch": 3.4383943504924734, "grad_norm": 1.0376087427139282, "learning_rate": 9.040662910064646e-06, "loss": 0.7465, "step": 18599 }, { "epoch": 3.4385801895558448, "grad_norm": 1.1285665035247803, "learning_rate": 9.034593705813843e-06, "loss": 1.2301, "step": 18600 }, { "epoch": 3.4387660286192157, "grad_norm": 0.9236640334129333, "learning_rate": 9.028526443063546e-06, "loss": 0.9906, "step": 18601 }, { "epoch": 3.438951867682587, "grad_norm": 1.0847108364105225, "learning_rate": 9.022461121943238e-06, "loss": 0.8161, "step": 18602 }, { "epoch": 3.439137706745958, "grad_norm": 0.729643702507019, "learning_rate": 9.016397742582373e-06, "loss": 0.3817, "step": 18603 }, { "epoch": 3.439323545809329, "grad_norm": 0.861388623714447, "learning_rate": 9.010336305110345e-06, "loss": 0.9105, "step": 18604 }, { "epoch": 3.4395093848727, "grad_norm": 0.6577388048171997, "learning_rate": 9.004276809656564e-06, "loss": 0.4741, "step": 18605 }, { "epoch": 3.4396952239360714, "grad_norm": 0.8057047724723816, "learning_rate": 8.998219256350349e-06, "loss": 0.6355, "step": 18606 }, { "epoch": 3.4398810629994423, "grad_norm": 0.9502322673797607, "learning_rate": 8.99216364532095e-06, "loss": 0.9192, "step": 18607 }, { "epoch": 3.4400669020628136, "grad_norm": 1.1867955923080444, "learning_rate": 8.986109976697643e-06, "loss": 0.8475, "step": 18608 }, { "epoch": 3.4402527411261845, "grad_norm": 0.9152840971946716, "learning_rate": 8.980058250609647e-06, "loss": 0.8698, "step": 18609 }, { "epoch": 3.440438580189556, "grad_norm": 0.9248976707458496, "learning_rate": 8.974008467186124e-06, "loss": 0.8996, "step": 18610 }, { "epoch": 3.440624419252927, "grad_norm": 0.7468507289886475, "learning_rate": 8.967960626556172e-06, "loss": 0.8113, "step": 18611 }, { "epoch": 3.440810258316298, "grad_norm": 1.05239737033844, "learning_rate": 8.961914728848864e-06, "loss": 0.8636, "step": 18612 }, { "epoch": 3.4409960973796694, "grad_norm": 1.0198965072631836, "learning_rate": 8.955870774193287e-06, "loss": 0.9243, "step": 18613 }, { "epoch": 3.4411819364430403, "grad_norm": 0.8686797618865967, "learning_rate": 8.949828762718404e-06, "loss": 0.6556, "step": 18614 }, { "epoch": 3.4413677755064116, "grad_norm": 1.007763385772705, "learning_rate": 8.943788694553157e-06, "loss": 0.9191, "step": 18615 }, { "epoch": 3.4415536145697825, "grad_norm": 1.1565308570861816, "learning_rate": 8.937750569826508e-06, "loss": 0.7586, "step": 18616 }, { "epoch": 3.441739453633154, "grad_norm": 0.7900993227958679, "learning_rate": 8.93171438866729e-06, "loss": 0.5349, "step": 18617 }, { "epoch": 3.4419252926965247, "grad_norm": 0.9284295439720154, "learning_rate": 8.925680151204363e-06, "loss": 0.8183, "step": 18618 }, { "epoch": 3.442111131759896, "grad_norm": 0.9950578808784485, "learning_rate": 8.919647857566482e-06, "loss": 1.0031, "step": 18619 }, { "epoch": 3.442296970823267, "grad_norm": 1.1575837135314941, "learning_rate": 8.91361750788241e-06, "loss": 0.9175, "step": 18620 }, { "epoch": 3.4424828098866382, "grad_norm": 0.8874562382698059, "learning_rate": 8.90758910228089e-06, "loss": 0.8789, "step": 18621 }, { "epoch": 3.442668648950009, "grad_norm": 0.893882691860199, "learning_rate": 8.901562640890548e-06, "loss": 0.6872, "step": 18622 }, { "epoch": 3.4428544880133805, "grad_norm": 0.8668039441108704, "learning_rate": 8.89553812384003e-06, "loss": 0.7776, "step": 18623 }, { "epoch": 3.4430403270767513, "grad_norm": 0.844916820526123, "learning_rate": 8.889515551257888e-06, "loss": 0.8515, "step": 18624 }, { "epoch": 3.4432261661401227, "grad_norm": 0.9719852209091187, "learning_rate": 8.883494923272707e-06, "loss": 0.7908, "step": 18625 }, { "epoch": 3.443412005203494, "grad_norm": 0.9443371891975403, "learning_rate": 8.87747624001296e-06, "loss": 0.8065, "step": 18626 }, { "epoch": 3.443597844266865, "grad_norm": 1.1390016078948975, "learning_rate": 8.871459501607093e-06, "loss": 0.8421, "step": 18627 }, { "epoch": 3.4437836833302358, "grad_norm": 0.8440651297569275, "learning_rate": 8.865444708183556e-06, "loss": 0.7339, "step": 18628 }, { "epoch": 3.443969522393607, "grad_norm": 0.9557360410690308, "learning_rate": 8.859431859870692e-06, "loss": 0.7506, "step": 18629 }, { "epoch": 3.4441553614569784, "grad_norm": 0.8384804129600525, "learning_rate": 8.853420956796866e-06, "loss": 0.6606, "step": 18630 }, { "epoch": 3.4443412005203493, "grad_norm": 1.0732804536819458, "learning_rate": 8.84741199909036e-06, "loss": 0.8602, "step": 18631 }, { "epoch": 3.4445270395837206, "grad_norm": 0.9016420245170593, "learning_rate": 8.841404986879409e-06, "loss": 0.973, "step": 18632 }, { "epoch": 3.4447128786470915, "grad_norm": 0.9232279062271118, "learning_rate": 8.835399920292241e-06, "loss": 0.7959, "step": 18633 }, { "epoch": 3.444898717710463, "grad_norm": 0.9467220306396484, "learning_rate": 8.829396799457024e-06, "loss": 0.9368, "step": 18634 }, { "epoch": 3.4450845567738337, "grad_norm": 0.7945659160614014, "learning_rate": 8.823395624501863e-06, "loss": 0.7663, "step": 18635 }, { "epoch": 3.445270395837205, "grad_norm": 0.8220109343528748, "learning_rate": 8.817396395554866e-06, "loss": 0.5273, "step": 18636 }, { "epoch": 3.445456234900576, "grad_norm": 0.8508674502372742, "learning_rate": 8.811399112744067e-06, "loss": 0.8074, "step": 18637 }, { "epoch": 3.4456420739639473, "grad_norm": 1.1290472745895386, "learning_rate": 8.805403776197474e-06, "loss": 0.984, "step": 18638 }, { "epoch": 3.445827913027318, "grad_norm": 0.9419941306114197, "learning_rate": 8.799410386043016e-06, "loss": 0.7817, "step": 18639 }, { "epoch": 3.4460137520906895, "grad_norm": 0.9528607130050659, "learning_rate": 8.79341894240865e-06, "loss": 0.9257, "step": 18640 }, { "epoch": 3.446199591154061, "grad_norm": 0.969680666923523, "learning_rate": 8.787429445422246e-06, "loss": 0.9073, "step": 18641 }, { "epoch": 3.4463854302174317, "grad_norm": 0.8936437368392944, "learning_rate": 8.78144189521164e-06, "loss": 0.787, "step": 18642 }, { "epoch": 3.4465712692808026, "grad_norm": 0.7957565188407898, "learning_rate": 8.775456291904615e-06, "loss": 0.7421, "step": 18643 }, { "epoch": 3.446757108344174, "grad_norm": 0.9845880270004272, "learning_rate": 8.769472635628905e-06, "loss": 0.8581, "step": 18644 }, { "epoch": 3.4469429474075453, "grad_norm": 1.0251277685165405, "learning_rate": 8.763490926512274e-06, "loss": 0.7913, "step": 18645 }, { "epoch": 3.447128786470916, "grad_norm": 1.0311559438705444, "learning_rate": 8.757511164682353e-06, "loss": 0.8396, "step": 18646 }, { "epoch": 3.4473146255342875, "grad_norm": 1.0363506078720093, "learning_rate": 8.751533350266761e-06, "loss": 0.8461, "step": 18647 }, { "epoch": 3.4475004645976584, "grad_norm": 1.0235755443572998, "learning_rate": 8.745557483393118e-06, "loss": 0.8727, "step": 18648 }, { "epoch": 3.4476863036610297, "grad_norm": 0.8560394644737244, "learning_rate": 8.739583564188958e-06, "loss": 0.613, "step": 18649 }, { "epoch": 3.4478721427244006, "grad_norm": 0.9190382957458496, "learning_rate": 8.733611592781755e-06, "loss": 0.9174, "step": 18650 }, { "epoch": 3.448057981787772, "grad_norm": 1.0788168907165527, "learning_rate": 8.727641569299006e-06, "loss": 0.8025, "step": 18651 }, { "epoch": 3.448243820851143, "grad_norm": 0.9430216550827026, "learning_rate": 8.72167349386811e-06, "loss": 0.777, "step": 18652 }, { "epoch": 3.448429659914514, "grad_norm": 0.917182207107544, "learning_rate": 8.715707366616466e-06, "loss": 0.9633, "step": 18653 }, { "epoch": 3.448615498977885, "grad_norm": 0.9732248783111572, "learning_rate": 8.709743187671393e-06, "loss": 0.6324, "step": 18654 }, { "epoch": 3.4488013380412563, "grad_norm": 0.9532489776611328, "learning_rate": 8.70378095716018e-06, "loss": 0.7928, "step": 18655 }, { "epoch": 3.448987177104627, "grad_norm": 0.8786229491233826, "learning_rate": 8.697820675210111e-06, "loss": 0.6679, "step": 18656 }, { "epoch": 3.4491730161679985, "grad_norm": 1.0036118030548096, "learning_rate": 8.691862341948376e-06, "loss": 0.8445, "step": 18657 }, { "epoch": 3.4493588552313694, "grad_norm": 1.0232371091842651, "learning_rate": 8.685905957502149e-06, "loss": 1.0843, "step": 18658 }, { "epoch": 3.4495446942947408, "grad_norm": 1.1256111860275269, "learning_rate": 8.679951521998542e-06, "loss": 0.8726, "step": 18659 }, { "epoch": 3.449730533358112, "grad_norm": 0.9262232184410095, "learning_rate": 8.673999035564673e-06, "loss": 0.9612, "step": 18660 }, { "epoch": 3.449916372421483, "grad_norm": 0.8881259560585022, "learning_rate": 8.668048498327552e-06, "loss": 0.7052, "step": 18661 }, { "epoch": 3.4501022114848543, "grad_norm": 0.9636815190315247, "learning_rate": 8.662099910414222e-06, "loss": 0.9367, "step": 18662 }, { "epoch": 3.450288050548225, "grad_norm": 1.1980029344558716, "learning_rate": 8.656153271951639e-06, "loss": 0.8395, "step": 18663 }, { "epoch": 3.4504738896115965, "grad_norm": 0.8954828381538391, "learning_rate": 8.650208583066689e-06, "loss": 0.8351, "step": 18664 }, { "epoch": 3.4506597286749674, "grad_norm": 1.0028259754180908, "learning_rate": 8.64426584388629e-06, "loss": 0.777, "step": 18665 }, { "epoch": 3.4508455677383387, "grad_norm": 0.953440248966217, "learning_rate": 8.638325054537255e-06, "loss": 0.7337, "step": 18666 }, { "epoch": 3.4510314068017096, "grad_norm": 0.9255670309066772, "learning_rate": 8.632386215146381e-06, "loss": 0.8076, "step": 18667 }, { "epoch": 3.451217245865081, "grad_norm": 0.9749565720558167, "learning_rate": 8.626449325840447e-06, "loss": 0.8783, "step": 18668 }, { "epoch": 3.451403084928452, "grad_norm": 0.9131873846054077, "learning_rate": 8.620514386746148e-06, "loss": 0.723, "step": 18669 }, { "epoch": 3.451588923991823, "grad_norm": 0.960011899471283, "learning_rate": 8.614581397990162e-06, "loss": 0.9202, "step": 18670 }, { "epoch": 3.451774763055194, "grad_norm": 1.0307480096817017, "learning_rate": 8.608650359699088e-06, "loss": 0.7317, "step": 18671 }, { "epoch": 3.4519606021185654, "grad_norm": 0.8767074346542358, "learning_rate": 8.602721271999536e-06, "loss": 1.0208, "step": 18672 }, { "epoch": 3.4521464411819363, "grad_norm": 0.9889634847640991, "learning_rate": 8.59679413501807e-06, "loss": 0.7734, "step": 18673 }, { "epoch": 3.4523322802453076, "grad_norm": 1.1916424036026, "learning_rate": 8.590868948881192e-06, "loss": 1.0509, "step": 18674 }, { "epoch": 3.452518119308679, "grad_norm": 1.2274255752563477, "learning_rate": 8.58494571371532e-06, "loss": 0.8215, "step": 18675 }, { "epoch": 3.45270395837205, "grad_norm": 1.0868438482284546, "learning_rate": 8.579024429646932e-06, "loss": 0.9448, "step": 18676 }, { "epoch": 3.4528897974354207, "grad_norm": 1.3764984607696533, "learning_rate": 8.57310509680238e-06, "loss": 1.0186, "step": 18677 }, { "epoch": 3.453075636498792, "grad_norm": 0.9937775731086731, "learning_rate": 8.567187715308012e-06, "loss": 1.0688, "step": 18678 }, { "epoch": 3.4532614755621633, "grad_norm": 1.1719824075698853, "learning_rate": 8.561272285290101e-06, "loss": 0.7771, "step": 18679 }, { "epoch": 3.4534473146255342, "grad_norm": 1.0099252462387085, "learning_rate": 8.555358806874925e-06, "loss": 0.6463, "step": 18680 }, { "epoch": 3.4536331536889056, "grad_norm": 1.1827915906906128, "learning_rate": 8.549447280188694e-06, "loss": 0.693, "step": 18681 }, { "epoch": 3.4538189927522764, "grad_norm": 0.9828644394874573, "learning_rate": 8.543537705357563e-06, "loss": 0.8288, "step": 18682 }, { "epoch": 3.4540048318156478, "grad_norm": 0.9095243811607361, "learning_rate": 8.537630082507687e-06, "loss": 0.7863, "step": 18683 }, { "epoch": 3.4541906708790187, "grad_norm": 1.0734424591064453, "learning_rate": 8.531724411765119e-06, "loss": 0.8124, "step": 18684 }, { "epoch": 3.45437650994239, "grad_norm": 1.265278697013855, "learning_rate": 8.525820693255949e-06, "loss": 1.0153, "step": 18685 }, { "epoch": 3.454562349005761, "grad_norm": 0.8495627045631409, "learning_rate": 8.519918927106164e-06, "loss": 0.6581, "step": 18686 }, { "epoch": 3.454748188069132, "grad_norm": 1.0464892387390137, "learning_rate": 8.514019113441695e-06, "loss": 0.894, "step": 18687 }, { "epoch": 3.454934027132503, "grad_norm": 0.9293912053108215, "learning_rate": 8.508121252388523e-06, "loss": 0.8371, "step": 18688 }, { "epoch": 3.4551198661958744, "grad_norm": 0.9331597685813904, "learning_rate": 8.50222534407249e-06, "loss": 0.8066, "step": 18689 }, { "epoch": 3.4553057052592457, "grad_norm": 0.7989200353622437, "learning_rate": 8.496331388619438e-06, "loss": 0.83, "step": 18690 }, { "epoch": 3.4554915443226166, "grad_norm": 0.8642352819442749, "learning_rate": 8.490439386155147e-06, "loss": 0.7559, "step": 18691 }, { "epoch": 3.4556773833859875, "grad_norm": 0.9246635437011719, "learning_rate": 8.484549336805403e-06, "loss": 0.7547, "step": 18692 }, { "epoch": 3.455863222449359, "grad_norm": 0.8502155542373657, "learning_rate": 8.478661240695917e-06, "loss": 0.6164, "step": 18693 }, { "epoch": 3.45604906151273, "grad_norm": 1.032968521118164, "learning_rate": 8.472775097952346e-06, "loss": 0.8578, "step": 18694 }, { "epoch": 3.456234900576101, "grad_norm": 0.8894579410552979, "learning_rate": 8.46689090870031e-06, "loss": 0.9053, "step": 18695 }, { "epoch": 3.4564207396394724, "grad_norm": 0.8251888751983643, "learning_rate": 8.461008673065429e-06, "loss": 0.7271, "step": 18696 }, { "epoch": 3.4566065787028433, "grad_norm": 0.974197506904602, "learning_rate": 8.455128391173228e-06, "loss": 0.9299, "step": 18697 }, { "epoch": 3.4567924177662146, "grad_norm": 0.6621809601783752, "learning_rate": 8.449250063149217e-06, "loss": 0.2961, "step": 18698 }, { "epoch": 3.4569782568295855, "grad_norm": 0.9626237750053406, "learning_rate": 8.443373689118827e-06, "loss": 1.0561, "step": 18699 }, { "epoch": 3.457164095892957, "grad_norm": 0.8487881422042847, "learning_rate": 8.437499269207539e-06, "loss": 0.781, "step": 18700 }, { "epoch": 3.4573499349563277, "grad_norm": 1.360926628112793, "learning_rate": 8.431626803540694e-06, "loss": 1.249, "step": 18701 }, { "epoch": 3.457535774019699, "grad_norm": 0.9287676811218262, "learning_rate": 8.425756292243647e-06, "loss": 0.9681, "step": 18702 }, { "epoch": 3.45772161308307, "grad_norm": 1.0354424715042114, "learning_rate": 8.419887735441633e-06, "loss": 0.9981, "step": 18703 }, { "epoch": 3.4579074521464412, "grad_norm": 0.9920165538787842, "learning_rate": 8.414021133260007e-06, "loss": 0.7668, "step": 18704 }, { "epoch": 3.458093291209812, "grad_norm": 0.9180377125740051, "learning_rate": 8.408156485823926e-06, "loss": 0.7742, "step": 18705 }, { "epoch": 3.4582791302731835, "grad_norm": 1.3005715608596802, "learning_rate": 8.402293793258576e-06, "loss": 0.9009, "step": 18706 }, { "epoch": 3.4584649693365543, "grad_norm": 0.9481584429740906, "learning_rate": 8.396433055689045e-06, "loss": 0.9433, "step": 18707 }, { "epoch": 3.4586508083999257, "grad_norm": 1.0840070247650146, "learning_rate": 8.390574273240482e-06, "loss": 0.8461, "step": 18708 }, { "epoch": 3.458836647463297, "grad_norm": 0.9308413863182068, "learning_rate": 8.384717446037893e-06, "loss": 0.8624, "step": 18709 }, { "epoch": 3.459022486526668, "grad_norm": 0.9320765137672424, "learning_rate": 8.378862574206303e-06, "loss": 0.814, "step": 18710 }, { "epoch": 3.459208325590039, "grad_norm": 0.953338086605072, "learning_rate": 8.373009657870635e-06, "loss": 0.7742, "step": 18711 }, { "epoch": 3.45939416465341, "grad_norm": 0.8855268359184265, "learning_rate": 8.367158697155863e-06, "loss": 0.8255, "step": 18712 }, { "epoch": 3.4595800037167814, "grad_norm": 0.9158693552017212, "learning_rate": 8.361309692186814e-06, "loss": 0.982, "step": 18713 }, { "epoch": 3.4597658427801523, "grad_norm": 0.9135979413986206, "learning_rate": 8.355462643088373e-06, "loss": 0.8785, "step": 18714 }, { "epoch": 3.4599516818435236, "grad_norm": 0.9471499919891357, "learning_rate": 8.3496175499853e-06, "loss": 0.9211, "step": 18715 }, { "epoch": 3.4601375209068945, "grad_norm": 0.8170753717422485, "learning_rate": 8.343774413002381e-06, "loss": 0.6915, "step": 18716 }, { "epoch": 3.460323359970266, "grad_norm": 0.8711549639701843, "learning_rate": 8.337933232264305e-06, "loss": 0.7263, "step": 18717 }, { "epoch": 3.4605091990336367, "grad_norm": 0.8051249980926514, "learning_rate": 8.332094007895741e-06, "loss": 0.6193, "step": 18718 }, { "epoch": 3.460695038097008, "grad_norm": 0.9760210514068604, "learning_rate": 8.32625674002131e-06, "loss": 0.8647, "step": 18719 }, { "epoch": 3.460880877160379, "grad_norm": 0.9609522223472595, "learning_rate": 8.320421428765635e-06, "loss": 0.7761, "step": 18720 }, { "epoch": 3.4610667162237503, "grad_norm": 1.2400734424591064, "learning_rate": 8.314588074253226e-06, "loss": 0.635, "step": 18721 }, { "epoch": 3.461252555287121, "grad_norm": Infinity, "learning_rate": 8.314588074253226e-06, "loss": 1.1963, "step": 18722 }, { "epoch": 3.4614383943504925, "grad_norm": 1.0367873907089233, "learning_rate": 8.308756676608597e-06, "loss": 0.9893, "step": 18723 }, { "epoch": 3.461624233413864, "grad_norm": 1.1819725036621094, "learning_rate": 8.302927235956193e-06, "loss": 0.7899, "step": 18724 }, { "epoch": 3.4618100724772347, "grad_norm": 0.9547193050384521, "learning_rate": 8.297099752420446e-06, "loss": 0.8314, "step": 18725 }, { "epoch": 3.4619959115406056, "grad_norm": 1.0257132053375244, "learning_rate": 8.291274226125756e-06, "loss": 0.8587, "step": 18726 }, { "epoch": 3.462181750603977, "grad_norm": 0.8922994136810303, "learning_rate": 8.285450657196437e-06, "loss": 0.8294, "step": 18727 }, { "epoch": 3.4623675896673483, "grad_norm": 0.8677076697349548, "learning_rate": 8.279629045756764e-06, "loss": 0.7083, "step": 18728 }, { "epoch": 3.462553428730719, "grad_norm": 0.9443361759185791, "learning_rate": 8.273809391931042e-06, "loss": 0.9847, "step": 18729 }, { "epoch": 3.4627392677940905, "grad_norm": 0.8874073028564453, "learning_rate": 8.267991695843436e-06, "loss": 0.5835, "step": 18730 }, { "epoch": 3.4629251068574614, "grad_norm": 1.1664199829101562, "learning_rate": 8.262175957618135e-06, "loss": 0.9179, "step": 18731 }, { "epoch": 3.4631109459208327, "grad_norm": 0.8360967040061951, "learning_rate": 8.25636217737924e-06, "loss": 0.7487, "step": 18732 }, { "epoch": 3.4632967849842036, "grad_norm": 1.129758596420288, "learning_rate": 8.250550355250875e-06, "loss": 0.9754, "step": 18733 }, { "epoch": 3.463482624047575, "grad_norm": 1.4248270988464355, "learning_rate": 8.244740491357062e-06, "loss": 1.1269, "step": 18734 }, { "epoch": 3.463668463110946, "grad_norm": 0.9525567889213562, "learning_rate": 8.23893258582179e-06, "loss": 0.9248, "step": 18735 }, { "epoch": 3.463854302174317, "grad_norm": 1.058853030204773, "learning_rate": 8.23312663876904e-06, "loss": 0.7135, "step": 18736 }, { "epoch": 3.464040141237688, "grad_norm": 0.9106456637382507, "learning_rate": 8.227322650322734e-06, "loss": 0.6708, "step": 18737 }, { "epoch": 3.4642259803010593, "grad_norm": 0.8357923030853271, "learning_rate": 8.221520620606738e-06, "loss": 0.8337, "step": 18738 }, { "epoch": 3.46441181936443, "grad_norm": 1.2621804475784302, "learning_rate": 8.215720549744887e-06, "loss": 0.939, "step": 18739 }, { "epoch": 3.4645976584278015, "grad_norm": 0.8909697532653809, "learning_rate": 8.20992243786095e-06, "loss": 0.7573, "step": 18740 }, { "epoch": 3.4647834974911724, "grad_norm": 0.9684772491455078, "learning_rate": 8.204126285078728e-06, "loss": 0.8583, "step": 18741 }, { "epoch": 3.4649693365545438, "grad_norm": 1.0464305877685547, "learning_rate": 8.198332091521898e-06, "loss": 0.8594, "step": 18742 }, { "epoch": 3.465155175617915, "grad_norm": 0.8310005068778992, "learning_rate": 8.19253985731413e-06, "loss": 0.8416, "step": 18743 }, { "epoch": 3.465341014681286, "grad_norm": 0.8563743829727173, "learning_rate": 8.186749582579034e-06, "loss": 0.8199, "step": 18744 }, { "epoch": 3.4655268537446573, "grad_norm": 0.8466929197311401, "learning_rate": 8.180961267440223e-06, "loss": 0.8748, "step": 18745 }, { "epoch": 3.465712692808028, "grad_norm": 0.9952386617660522, "learning_rate": 8.175174912021211e-06, "loss": 0.7719, "step": 18746 }, { "epoch": 3.4658985318713995, "grad_norm": 0.9680238366127014, "learning_rate": 8.16939051644554e-06, "loss": 0.8767, "step": 18747 }, { "epoch": 3.4660843709347704, "grad_norm": 0.9317744374275208, "learning_rate": 8.163608080836616e-06, "loss": 0.9406, "step": 18748 }, { "epoch": 3.4662702099981417, "grad_norm": 0.9056304693222046, "learning_rate": 8.157827605317892e-06, "loss": 0.7743, "step": 18749 }, { "epoch": 3.4664560490615126, "grad_norm": 0.8400526642799377, "learning_rate": 8.152049090012736e-06, "loss": 0.89, "step": 18750 }, { "epoch": 3.466641888124884, "grad_norm": 0.905646026134491, "learning_rate": 8.146272535044475e-06, "loss": 1.0205, "step": 18751 }, { "epoch": 3.466827727188255, "grad_norm": 0.8235187530517578, "learning_rate": 8.140497940536385e-06, "loss": 0.5697, "step": 18752 }, { "epoch": 3.467013566251626, "grad_norm": 1.1538528203964233, "learning_rate": 8.134725306611734e-06, "loss": 0.9183, "step": 18753 }, { "epoch": 3.467199405314997, "grad_norm": 0.8710090517997742, "learning_rate": 8.128954633393737e-06, "loss": 0.8054, "step": 18754 }, { "epoch": 3.4673852443783684, "grad_norm": 1.0354934930801392, "learning_rate": 8.123185921005538e-06, "loss": 0.9725, "step": 18755 }, { "epoch": 3.4675710834417393, "grad_norm": 1.0618163347244263, "learning_rate": 8.11741916957024e-06, "loss": 0.973, "step": 18756 }, { "epoch": 3.4677569225051106, "grad_norm": 0.6448819637298584, "learning_rate": 8.111654379210987e-06, "loss": 0.5416, "step": 18757 }, { "epoch": 3.467942761568482, "grad_norm": 1.0582246780395508, "learning_rate": 8.105891550050782e-06, "loss": 0.9356, "step": 18758 }, { "epoch": 3.468128600631853, "grad_norm": 0.9536944031715393, "learning_rate": 8.100130682212625e-06, "loss": 0.7974, "step": 18759 }, { "epoch": 3.468314439695224, "grad_norm": 0.9796311259269714, "learning_rate": 8.094371775819464e-06, "loss": 0.7147, "step": 18760 }, { "epoch": 3.468500278758595, "grad_norm": 0.9044296145439148, "learning_rate": 8.088614830994223e-06, "loss": 0.8041, "step": 18761 }, { "epoch": 3.4686861178219663, "grad_norm": 0.8856521844863892, "learning_rate": 8.08285984785978e-06, "loss": 0.8417, "step": 18762 }, { "epoch": 3.4688719568853372, "grad_norm": 0.8639485239982605, "learning_rate": 8.07710682653896e-06, "loss": 0.6279, "step": 18763 }, { "epoch": 3.4690577959487086, "grad_norm": 0.8745250701904297, "learning_rate": 8.07135576715453e-06, "loss": 0.8151, "step": 18764 }, { "epoch": 3.4692436350120794, "grad_norm": 0.9135953187942505, "learning_rate": 8.065606669829273e-06, "loss": 0.827, "step": 18765 }, { "epoch": 3.4694294740754508, "grad_norm": 0.9876458048820496, "learning_rate": 8.059859534685877e-06, "loss": 0.7074, "step": 18766 }, { "epoch": 3.4696153131388217, "grad_norm": 0.9676094055175781, "learning_rate": 8.054114361846988e-06, "loss": 0.9162, "step": 18767 }, { "epoch": 3.469801152202193, "grad_norm": 1.0216379165649414, "learning_rate": 8.04837115143523e-06, "loss": 0.6916, "step": 18768 }, { "epoch": 3.469986991265564, "grad_norm": 1.0474780797958374, "learning_rate": 8.042629903573217e-06, "loss": 0.7252, "step": 18769 }, { "epoch": 3.470172830328935, "grad_norm": 1.016576886177063, "learning_rate": 8.036890618383474e-06, "loss": 1.1245, "step": 18770 }, { "epoch": 3.470358669392306, "grad_norm": 1.0590362548828125, "learning_rate": 8.03115329598848e-06, "loss": 0.8428, "step": 18771 }, { "epoch": 3.4705445084556774, "grad_norm": 0.9816322922706604, "learning_rate": 8.025417936510671e-06, "loss": 0.8735, "step": 18772 }, { "epoch": 3.4707303475190487, "grad_norm": 0.9632866382598877, "learning_rate": 8.019684540072503e-06, "loss": 0.9617, "step": 18773 }, { "epoch": 3.4709161865824196, "grad_norm": 0.9751099944114685, "learning_rate": 8.013953106796324e-06, "loss": 0.7521, "step": 18774 }, { "epoch": 3.4711020256457905, "grad_norm": 0.8639714121818542, "learning_rate": 8.008223636804457e-06, "loss": 0.8057, "step": 18775 }, { "epoch": 3.471287864709162, "grad_norm": 1.7861194610595703, "learning_rate": 8.002496130219184e-06, "loss": 0.6409, "step": 18776 }, { "epoch": 3.471473703772533, "grad_norm": 0.8952376842498779, "learning_rate": 7.99677058716275e-06, "loss": 0.7854, "step": 18777 }, { "epoch": 3.471659542835904, "grad_norm": 0.9320510625839233, "learning_rate": 7.991047007757379e-06, "loss": 0.7999, "step": 18778 }, { "epoch": 3.4718453818992754, "grad_norm": 1.1861048936843872, "learning_rate": 7.98532539212522e-06, "loss": 0.9218, "step": 18779 }, { "epoch": 3.4720312209626463, "grad_norm": 0.8894674777984619, "learning_rate": 7.979605740388375e-06, "loss": 0.7517, "step": 18780 }, { "epoch": 3.4722170600260176, "grad_norm": 1.066564679145813, "learning_rate": 7.973888052668943e-06, "loss": 0.9006, "step": 18781 }, { "epoch": 3.4724028990893885, "grad_norm": 0.9083952307701111, "learning_rate": 7.968172329088952e-06, "loss": 0.8833, "step": 18782 }, { "epoch": 3.47258873815276, "grad_norm": 0.8635721206665039, "learning_rate": 7.962458569770382e-06, "loss": 0.896, "step": 18783 }, { "epoch": 3.4727745772161307, "grad_norm": 0.8096431493759155, "learning_rate": 7.956746774835178e-06, "loss": 0.7672, "step": 18784 }, { "epoch": 3.472960416279502, "grad_norm": 0.7971764802932739, "learning_rate": 7.951036944405287e-06, "loss": 0.7884, "step": 18785 }, { "epoch": 3.473146255342873, "grad_norm": 1.084967851638794, "learning_rate": 7.945329078602538e-06, "loss": 0.9731, "step": 18786 }, { "epoch": 3.4733320944062442, "grad_norm": 0.8574565052986145, "learning_rate": 7.93962317754876e-06, "loss": 0.7691, "step": 18787 }, { "epoch": 3.473517933469615, "grad_norm": 1.0025873184204102, "learning_rate": 7.933919241365729e-06, "loss": 0.8392, "step": 18788 }, { "epoch": 3.4737037725329865, "grad_norm": 1.0289613008499146, "learning_rate": 7.92821727017523e-06, "loss": 0.7774, "step": 18789 }, { "epoch": 3.4738896115963573, "grad_norm": 0.9610695838928223, "learning_rate": 7.922517264098927e-06, "loss": 0.7482, "step": 18790 }, { "epoch": 3.4740754506597287, "grad_norm": 0.8945107460021973, "learning_rate": 7.916819223258487e-06, "loss": 0.8455, "step": 18791 }, { "epoch": 3.4742612897231, "grad_norm": 0.8954695463180542, "learning_rate": 7.911123147775502e-06, "loss": 0.8472, "step": 18792 }, { "epoch": 3.474447128786471, "grad_norm": 1.206834077835083, "learning_rate": 7.905429037771572e-06, "loss": 0.8113, "step": 18793 }, { "epoch": 3.474632967849842, "grad_norm": 0.7065415978431702, "learning_rate": 7.899736893368226e-06, "loss": 0.4913, "step": 18794 }, { "epoch": 3.474818806913213, "grad_norm": 0.9732913374900818, "learning_rate": 7.894046714686943e-06, "loss": 0.6407, "step": 18795 }, { "epoch": 3.4750046459765844, "grad_norm": 0.9556186199188232, "learning_rate": 7.888358501849146e-06, "loss": 0.8798, "step": 18796 }, { "epoch": 3.4751904850399553, "grad_norm": 1.121410608291626, "learning_rate": 7.882672254976297e-06, "loss": 0.8895, "step": 18797 }, { "epoch": 3.4753763241033266, "grad_norm": 0.926697850227356, "learning_rate": 7.876987974189708e-06, "loss": 0.9799, "step": 18798 }, { "epoch": 3.4755621631666975, "grad_norm": 1.3014369010925293, "learning_rate": 7.871305659610739e-06, "loss": 0.9644, "step": 18799 }, { "epoch": 3.475748002230069, "grad_norm": 0.9880854487419128, "learning_rate": 7.865625311360636e-06, "loss": 0.8778, "step": 18800 }, { "epoch": 3.4759338412934397, "grad_norm": 0.7500266432762146, "learning_rate": 7.859946929560658e-06, "loss": 0.6615, "step": 18801 }, { "epoch": 3.476119680356811, "grad_norm": 0.9513152837753296, "learning_rate": 7.854270514332007e-06, "loss": 0.8724, "step": 18802 }, { "epoch": 3.476305519420182, "grad_norm": 0.945457935333252, "learning_rate": 7.84859606579582e-06, "loss": 0.821, "step": 18803 }, { "epoch": 3.4764913584835533, "grad_norm": 1.0324273109436035, "learning_rate": 7.842923584073192e-06, "loss": 0.8328, "step": 18804 }, { "epoch": 3.476677197546924, "grad_norm": 1.029531478881836, "learning_rate": 7.837253069285234e-06, "loss": 0.9378, "step": 18805 }, { "epoch": 3.4768630366102955, "grad_norm": 0.9380819201469421, "learning_rate": 7.831584521552949e-06, "loss": 0.8479, "step": 18806 }, { "epoch": 3.477048875673667, "grad_norm": 0.8309686779975891, "learning_rate": 7.825917940997308e-06, "loss": 0.7903, "step": 18807 }, { "epoch": 3.4772347147370377, "grad_norm": 1.0270386934280396, "learning_rate": 7.82025332773928e-06, "loss": 0.7759, "step": 18808 }, { "epoch": 3.4774205538004086, "grad_norm": 1.0845005512237549, "learning_rate": 7.814590681899758e-06, "loss": 0.897, "step": 18809 }, { "epoch": 3.47760639286378, "grad_norm": 0.9255852103233337, "learning_rate": 7.808930003599601e-06, "loss": 0.8083, "step": 18810 }, { "epoch": 3.4777922319271513, "grad_norm": 0.8624998927116394, "learning_rate": 7.803271292959636e-06, "loss": 0.6886, "step": 18811 }, { "epoch": 3.477978070990522, "grad_norm": 0.8667799830436707, "learning_rate": 7.797614550100618e-06, "loss": 0.7746, "step": 18812 }, { "epoch": 3.4781639100538935, "grad_norm": 1.1160162687301636, "learning_rate": 7.791959775143298e-06, "loss": 0.8162, "step": 18813 }, { "epoch": 3.4783497491172644, "grad_norm": 0.8530917167663574, "learning_rate": 7.786306968208368e-06, "loss": 0.552, "step": 18814 }, { "epoch": 3.4785355881806357, "grad_norm": 0.7741609811782837, "learning_rate": 7.780656129416464e-06, "loss": 0.8718, "step": 18815 }, { "epoch": 3.4787214272440066, "grad_norm": 0.9647586941719055, "learning_rate": 7.77500725888819e-06, "loss": 0.6975, "step": 18816 }, { "epoch": 3.478907266307378, "grad_norm": 0.8000401258468628, "learning_rate": 7.769360356744137e-06, "loss": 0.7548, "step": 18817 }, { "epoch": 3.479093105370749, "grad_norm": 1.4719072580337524, "learning_rate": 7.763715423104811e-06, "loss": 1.1186, "step": 18818 }, { "epoch": 3.47927894443412, "grad_norm": 0.9751482605934143, "learning_rate": 7.758072458090681e-06, "loss": 0.8613, "step": 18819 }, { "epoch": 3.479464783497491, "grad_norm": 0.8374261856079102, "learning_rate": 7.752431461822196e-06, "loss": 0.7657, "step": 18820 }, { "epoch": 3.4796506225608623, "grad_norm": 0.846748948097229, "learning_rate": 7.746792434419781e-06, "loss": 0.6049, "step": 18821 }, { "epoch": 3.4798364616242337, "grad_norm": 0.8745054602622986, "learning_rate": 7.741155376003773e-06, "loss": 0.8999, "step": 18822 }, { "epoch": 3.4800223006876045, "grad_norm": 0.958818256855011, "learning_rate": 7.735520286694475e-06, "loss": 0.8278, "step": 18823 }, { "epoch": 3.4802081397509754, "grad_norm": 0.9613667726516724, "learning_rate": 7.729887166612138e-06, "loss": 0.7098, "step": 18824 }, { "epoch": 3.4803939788143468, "grad_norm": 0.9327418208122253, "learning_rate": 7.724256015877051e-06, "loss": 0.8451, "step": 18825 }, { "epoch": 3.480579817877718, "grad_norm": 0.917344868183136, "learning_rate": 7.718626834609354e-06, "loss": 0.9286, "step": 18826 }, { "epoch": 3.480765656941089, "grad_norm": 0.8673303127288818, "learning_rate": 7.712999622929196e-06, "loss": 0.4841, "step": 18827 }, { "epoch": 3.4809514960044603, "grad_norm": 0.8560457229614258, "learning_rate": 7.707374380956711e-06, "loss": 0.6967, "step": 18828 }, { "epoch": 3.481137335067831, "grad_norm": 1.1645073890686035, "learning_rate": 7.701751108811938e-06, "loss": 0.753, "step": 18829 }, { "epoch": 3.4813231741312025, "grad_norm": 1.1489976644515991, "learning_rate": 7.696129806614871e-06, "loss": 0.8405, "step": 18830 }, { "epoch": 3.4815090131945734, "grad_norm": 0.9245211482048035, "learning_rate": 7.690510474485535e-06, "loss": 0.6869, "step": 18831 }, { "epoch": 3.4816948522579447, "grad_norm": 0.9011597633361816, "learning_rate": 7.684893112543834e-06, "loss": 0.8236, "step": 18832 }, { "epoch": 3.4818806913213156, "grad_norm": 1.0039126873016357, "learning_rate": 7.679277720909684e-06, "loss": 0.5301, "step": 18833 }, { "epoch": 3.482066530384687, "grad_norm": 0.9441542029380798, "learning_rate": 7.67366429970292e-06, "loss": 0.9177, "step": 18834 }, { "epoch": 3.482252369448058, "grad_norm": 0.8128690123558044, "learning_rate": 7.66805284904335e-06, "loss": 0.6621, "step": 18835 }, { "epoch": 3.482438208511429, "grad_norm": 0.8651259541511536, "learning_rate": 7.662443369050731e-06, "loss": 0.7271, "step": 18836 }, { "epoch": 3.4826240475748, "grad_norm": 0.8950327634811401, "learning_rate": 7.656835859844814e-06, "loss": 0.8193, "step": 18837 }, { "epoch": 3.4828098866381714, "grad_norm": 0.9409818649291992, "learning_rate": 7.651230321545278e-06, "loss": 0.868, "step": 18838 }, { "epoch": 3.4829957257015423, "grad_norm": 0.961976170539856, "learning_rate": 7.64562675427173e-06, "loss": 0.9764, "step": 18839 }, { "epoch": 3.4831815647649136, "grad_norm": 1.0851531028747559, "learning_rate": 7.640025158143782e-06, "loss": 0.6041, "step": 18840 }, { "epoch": 3.483367403828285, "grad_norm": 0.9040879011154175, "learning_rate": 7.634425533281031e-06, "loss": 0.8333, "step": 18841 }, { "epoch": 3.483553242891656, "grad_norm": 0.8291379809379578, "learning_rate": 7.628827879802958e-06, "loss": 0.749, "step": 18842 }, { "epoch": 3.483739081955027, "grad_norm": 0.9865771532058716, "learning_rate": 7.6232321978290444e-06, "loss": 0.9675, "step": 18843 }, { "epoch": 3.483924921018398, "grad_norm": 1.0167065858840942, "learning_rate": 7.617638487478696e-06, "loss": 0.9132, "step": 18844 }, { "epoch": 3.483924921018398, "eval_loss": 1.044190764427185, "eval_runtime": 23.1506, "eval_samples_per_second": 47.169, "eval_steps_per_second": 23.585, "step": 18844 }, { "epoch": 3.4841107600817693, "grad_norm": 1.0542774200439453, "learning_rate": 7.612046748871327e-06, "loss": 1.1246, "step": 18845 }, { "epoch": 3.4842965991451402, "grad_norm": 0.9042951464653015, "learning_rate": 7.606456982126287e-06, "loss": 0.8144, "step": 18846 }, { "epoch": 3.4844824382085116, "grad_norm": 1.0759397745132446, "learning_rate": 7.6008691873628465e-06, "loss": 0.7357, "step": 18847 }, { "epoch": 3.4846682772718824, "grad_norm": 0.8770076632499695, "learning_rate": 7.595283364700301e-06, "loss": 0.9929, "step": 18848 }, { "epoch": 3.4848541163352538, "grad_norm": 1.0161012411117554, "learning_rate": 7.589699514257875e-06, "loss": 0.7426, "step": 18849 }, { "epoch": 3.4850399553986247, "grad_norm": 0.9345296025276184, "learning_rate": 7.58411763615472e-06, "loss": 0.8618, "step": 18850 }, { "epoch": 3.485225794461996, "grad_norm": 0.8955548405647278, "learning_rate": 7.57853773050996e-06, "loss": 0.7533, "step": 18851 }, { "epoch": 3.485411633525367, "grad_norm": 0.8949218988418579, "learning_rate": 7.5729597974427246e-06, "loss": 0.6548, "step": 18852 }, { "epoch": 3.485597472588738, "grad_norm": 1.1832979917526245, "learning_rate": 7.567383837072062e-06, "loss": 0.9166, "step": 18853 }, { "epoch": 3.485783311652109, "grad_norm": 0.9716928601264954, "learning_rate": 7.561809849516965e-06, "loss": 0.8581, "step": 18854 }, { "epoch": 3.4859691507154804, "grad_norm": 0.9576780796051025, "learning_rate": 7.556237834896396e-06, "loss": 0.811, "step": 18855 }, { "epoch": 3.4861549897788517, "grad_norm": 0.9692073464393616, "learning_rate": 7.550667793329303e-06, "loss": 0.9054, "step": 18856 }, { "epoch": 3.4863408288422226, "grad_norm": 0.7223288416862488, "learning_rate": 7.545099724934557e-06, "loss": 0.5999, "step": 18857 }, { "epoch": 3.4865266679055935, "grad_norm": 1.0299596786499023, "learning_rate": 7.539533629831008e-06, "loss": 0.806, "step": 18858 }, { "epoch": 3.486712506968965, "grad_norm": 0.9294703006744385, "learning_rate": 7.533969508137418e-06, "loss": 0.945, "step": 18859 }, { "epoch": 3.486898346032336, "grad_norm": 0.9691776633262634, "learning_rate": 7.528407359972589e-06, "loss": 0.8303, "step": 18860 }, { "epoch": 3.487084185095707, "grad_norm": 0.9316863417625427, "learning_rate": 7.522847185455195e-06, "loss": 0.7473, "step": 18861 }, { "epoch": 3.4872700241590784, "grad_norm": 0.923642098903656, "learning_rate": 7.517288984703963e-06, "loss": 0.9339, "step": 18862 }, { "epoch": 3.4874558632224493, "grad_norm": 0.9262492656707764, "learning_rate": 7.511732757837476e-06, "loss": 0.811, "step": 18863 }, { "epoch": 3.4876417022858206, "grad_norm": 1.005344271659851, "learning_rate": 7.506178504974326e-06, "loss": 0.8925, "step": 18864 }, { "epoch": 3.4878275413491915, "grad_norm": 0.959369957447052, "learning_rate": 7.500626226233087e-06, "loss": 0.8635, "step": 18865 }, { "epoch": 3.488013380412563, "grad_norm": 1.0304522514343262, "learning_rate": 7.495075921732253e-06, "loss": 0.7596, "step": 18866 }, { "epoch": 3.4881992194759337, "grad_norm": 1.2714794874191284, "learning_rate": 7.4895275915902505e-06, "loss": 1.1942, "step": 18867 }, { "epoch": 3.488385058539305, "grad_norm": 1.0587759017944336, "learning_rate": 7.4839812359255525e-06, "loss": 0.994, "step": 18868 }, { "epoch": 3.488570897602676, "grad_norm": 1.0070648193359375, "learning_rate": 7.478436854856507e-06, "loss": 0.8278, "step": 18869 }, { "epoch": 3.4887567366660472, "grad_norm": 1.0866297483444214, "learning_rate": 7.472894448501455e-06, "loss": 1.0012, "step": 18870 }, { "epoch": 3.4889425757294186, "grad_norm": 0.9275298714637756, "learning_rate": 7.467354016978667e-06, "loss": 0.8685, "step": 18871 }, { "epoch": 3.4891284147927895, "grad_norm": 0.974448025226593, "learning_rate": 7.461815560406416e-06, "loss": 0.8151, "step": 18872 }, { "epoch": 3.4893142538561603, "grad_norm": 1.1058423519134521, "learning_rate": 7.456279078902928e-06, "loss": 0.8733, "step": 18873 }, { "epoch": 3.4895000929195317, "grad_norm": 0.964985191822052, "learning_rate": 7.450744572586354e-06, "loss": 0.6893, "step": 18874 }, { "epoch": 3.489685931982903, "grad_norm": 0.8219852447509766, "learning_rate": 7.4452120415748e-06, "loss": 0.6868, "step": 18875 }, { "epoch": 3.489871771046274, "grad_norm": 0.8574902415275574, "learning_rate": 7.439681485986383e-06, "loss": 0.7591, "step": 18876 }, { "epoch": 3.490057610109645, "grad_norm": 0.7997888326644897, "learning_rate": 7.434152905939118e-06, "loss": 0.8704, "step": 18877 }, { "epoch": 3.490243449173016, "grad_norm": 0.9168525338172913, "learning_rate": 7.428626301551012e-06, "loss": 1.0332, "step": 18878 }, { "epoch": 3.4904292882363874, "grad_norm": 0.937621533870697, "learning_rate": 7.423101672940003e-06, "loss": 0.7234, "step": 18879 }, { "epoch": 3.4906151272997583, "grad_norm": 0.8390883207321167, "learning_rate": 7.4175790202240415e-06, "loss": 0.7647, "step": 18880 }, { "epoch": 3.4908009663631296, "grad_norm": 0.9851588606834412, "learning_rate": 7.412058343520967e-06, "loss": 0.8092, "step": 18881 }, { "epoch": 3.4909868054265005, "grad_norm": 0.9515487551689148, "learning_rate": 7.406539642948606e-06, "loss": 0.8454, "step": 18882 }, { "epoch": 3.491172644489872, "grad_norm": 1.0104717016220093, "learning_rate": 7.40102291862479e-06, "loss": 0.8593, "step": 18883 }, { "epoch": 3.4913584835532427, "grad_norm": 0.7935540676116943, "learning_rate": 7.395508170667198e-06, "loss": 0.6452, "step": 18884 }, { "epoch": 3.491544322616614, "grad_norm": 0.9558344483375549, "learning_rate": 7.389995399193595e-06, "loss": 0.8742, "step": 18885 }, { "epoch": 3.491730161679985, "grad_norm": 0.9744873642921448, "learning_rate": 7.384484604321617e-06, "loss": 0.818, "step": 18886 }, { "epoch": 3.4919160007433563, "grad_norm": 1.1060720682144165, "learning_rate": 7.378975786168863e-06, "loss": 0.9289, "step": 18887 }, { "epoch": 3.492101839806727, "grad_norm": 0.9581206440925598, "learning_rate": 7.373468944852946e-06, "loss": 1.0307, "step": 18888 }, { "epoch": 3.4922876788700985, "grad_norm": 0.9559915661811829, "learning_rate": 7.367964080491374e-06, "loss": 0.8827, "step": 18889 }, { "epoch": 3.49247351793347, "grad_norm": 1.1436219215393066, "learning_rate": 7.362461193201642e-06, "loss": 0.9701, "step": 18890 }, { "epoch": 3.4926593569968407, "grad_norm": 1.0252236127853394, "learning_rate": 7.356960283101199e-06, "loss": 0.7517, "step": 18891 }, { "epoch": 3.492845196060212, "grad_norm": 0.9899161458015442, "learning_rate": 7.351461350307465e-06, "loss": 0.85, "step": 18892 }, { "epoch": 3.493031035123583, "grad_norm": 1.1004449129104614, "learning_rate": 7.345964394937787e-06, "loss": 0.8956, "step": 18893 }, { "epoch": 3.4932168741869543, "grad_norm": 1.0823179483413696, "learning_rate": 7.340469417109508e-06, "loss": 0.9652, "step": 18894 }, { "epoch": 3.493402713250325, "grad_norm": 0.96690833568573, "learning_rate": 7.334976416939887e-06, "loss": 0.887, "step": 18895 }, { "epoch": 3.4935885523136965, "grad_norm": 0.9663548469543457, "learning_rate": 7.329485394546187e-06, "loss": 0.9396, "step": 18896 }, { "epoch": 3.4937743913770674, "grad_norm": 0.9076710939407349, "learning_rate": 7.323996350045592e-06, "loss": 0.7719, "step": 18897 }, { "epoch": 3.4939602304404387, "grad_norm": 1.0663554668426514, "learning_rate": 7.318509283555264e-06, "loss": 0.9178, "step": 18898 }, { "epoch": 3.4941460695038096, "grad_norm": 1.2956310510635376, "learning_rate": 7.3130241951922865e-06, "loss": 1.1085, "step": 18899 }, { "epoch": 3.494331908567181, "grad_norm": 0.8738195300102234, "learning_rate": 7.307541085073766e-06, "loss": 0.9171, "step": 18900 }, { "epoch": 3.494517747630552, "grad_norm": 0.9473695755004883, "learning_rate": 7.30205995331672e-06, "loss": 0.7803, "step": 18901 }, { "epoch": 3.494703586693923, "grad_norm": 0.9984051585197449, "learning_rate": 7.296580800038122e-06, "loss": 0.9812, "step": 18902 }, { "epoch": 3.494889425757294, "grad_norm": 1.0475997924804688, "learning_rate": 7.291103625354911e-06, "loss": 0.9325, "step": 18903 }, { "epoch": 3.4950752648206653, "grad_norm": 0.9377017021179199, "learning_rate": 7.285628429383984e-06, "loss": 0.7597, "step": 18904 }, { "epoch": 3.4952611038840367, "grad_norm": 0.8637597560882568, "learning_rate": 7.2801552122422454e-06, "loss": 0.5853, "step": 18905 }, { "epoch": 3.4954469429474075, "grad_norm": 0.8163447976112366, "learning_rate": 7.2746839740464704e-06, "loss": 0.6579, "step": 18906 }, { "epoch": 3.4956327820107784, "grad_norm": 1.0498013496398926, "learning_rate": 7.269214714913431e-06, "loss": 0.8269, "step": 18907 }, { "epoch": 3.4958186210741498, "grad_norm": 1.0026522874832153, "learning_rate": 7.2637474349598886e-06, "loss": 0.7059, "step": 18908 }, { "epoch": 3.496004460137521, "grad_norm": 1.001114010810852, "learning_rate": 7.258282134302519e-06, "loss": 0.9834, "step": 18909 }, { "epoch": 3.496190299200892, "grad_norm": 0.9880086779594421, "learning_rate": 7.25281881305796e-06, "loss": 0.8499, "step": 18910 }, { "epoch": 3.4963761382642633, "grad_norm": 1.004437804222107, "learning_rate": 7.2473574713428085e-06, "loss": 0.6975, "step": 18911 }, { "epoch": 3.496561977327634, "grad_norm": 0.7537514567375183, "learning_rate": 7.241898109273659e-06, "loss": 0.6349, "step": 18912 }, { "epoch": 3.4967478163910055, "grad_norm": 1.0492370128631592, "learning_rate": 7.236440726967009e-06, "loss": 1.0626, "step": 18913 }, { "epoch": 3.4969336554543764, "grad_norm": 2.7229769229888916, "learning_rate": 7.230985324539341e-06, "loss": 1.1309, "step": 18914 }, { "epoch": 3.4971194945177477, "grad_norm": 1.1682634353637695, "learning_rate": 7.225531902107075e-06, "loss": 1.0752, "step": 18915 }, { "epoch": 3.4973053335811186, "grad_norm": 1.3416955471038818, "learning_rate": 7.220080459786649e-06, "loss": 0.8116, "step": 18916 }, { "epoch": 3.49749117264449, "grad_norm": 0.9258460402488708, "learning_rate": 7.214630997694393e-06, "loss": 0.993, "step": 18917 }, { "epoch": 3.497677011707861, "grad_norm": 0.7989770174026489, "learning_rate": 7.209183515946605e-06, "loss": 0.5692, "step": 18918 }, { "epoch": 3.497862850771232, "grad_norm": 0.8964726328849792, "learning_rate": 7.203738014659545e-06, "loss": 0.7662, "step": 18919 }, { "epoch": 3.498048689834603, "grad_norm": 1.1176609992980957, "learning_rate": 7.198294493949475e-06, "loss": 0.8345, "step": 18920 }, { "epoch": 3.4982345288979744, "grad_norm": 0.9257598519325256, "learning_rate": 7.1928529539325495e-06, "loss": 0.7902, "step": 18921 }, { "epoch": 3.4984203679613453, "grad_norm": 0.97118079662323, "learning_rate": 7.187413394724907e-06, "loss": 0.7702, "step": 18922 }, { "epoch": 3.4986062070247166, "grad_norm": 0.9934542179107666, "learning_rate": 7.181975816442643e-06, "loss": 0.8258, "step": 18923 }, { "epoch": 3.498792046088088, "grad_norm": 0.9483210444450378, "learning_rate": 7.176540219201833e-06, "loss": 0.8938, "step": 18924 }, { "epoch": 3.498977885151459, "grad_norm": 0.8880845904350281, "learning_rate": 7.171106603118482e-06, "loss": 0.5926, "step": 18925 }, { "epoch": 3.49916372421483, "grad_norm": 0.8391692638397217, "learning_rate": 7.165674968308567e-06, "loss": 0.9354, "step": 18926 }, { "epoch": 3.499349563278201, "grad_norm": 0.9406927824020386, "learning_rate": 7.160245314887992e-06, "loss": 0.6715, "step": 18927 }, { "epoch": 3.4995354023415723, "grad_norm": 0.9885954856872559, "learning_rate": 7.1548176429726775e-06, "loss": 0.807, "step": 18928 }, { "epoch": 3.4997212414049432, "grad_norm": 1.1389963626861572, "learning_rate": 7.149391952678452e-06, "loss": 0.8965, "step": 18929 }, { "epoch": 3.4999070804683146, "grad_norm": 1.0039417743682861, "learning_rate": 7.143968244121113e-06, "loss": 0.7828, "step": 18930 }, { "epoch": 3.5000929195316854, "grad_norm": 0.9714101552963257, "learning_rate": 7.138546517416411e-06, "loss": 0.8252, "step": 18931 }, { "epoch": 3.5002787585950568, "grad_norm": 1.0853351354599, "learning_rate": 7.1331267726800985e-06, "loss": 0.6779, "step": 18932 }, { "epoch": 3.5004645976584277, "grad_norm": 0.8246802687644958, "learning_rate": 7.127709010027827e-06, "loss": 0.8654, "step": 18933 }, { "epoch": 3.500650436721799, "grad_norm": 0.7620457410812378, "learning_rate": 7.122293229575228e-06, "loss": 0.665, "step": 18934 }, { "epoch": 3.5008362757851703, "grad_norm": 0.8930896520614624, "learning_rate": 7.116879431437862e-06, "loss": 0.8025, "step": 18935 }, { "epoch": 3.501022114848541, "grad_norm": 1.221191644668579, "learning_rate": 7.111467615731349e-06, "loss": 1.017, "step": 18936 }, { "epoch": 3.501207953911912, "grad_norm": 0.8264509439468384, "learning_rate": 7.106057782571152e-06, "loss": 0.9468, "step": 18937 }, { "epoch": 3.5013937929752834, "grad_norm": 0.8465451002120972, "learning_rate": 7.1006499320727336e-06, "loss": 0.5654, "step": 18938 }, { "epoch": 3.5015796320386547, "grad_norm": 0.8156012296676636, "learning_rate": 7.095244064351514e-06, "loss": 0.6718, "step": 18939 }, { "epoch": 3.5017654711020256, "grad_norm": 1.0571731328964233, "learning_rate": 7.089840179522888e-06, "loss": 0.9362, "step": 18940 }, { "epoch": 3.5019513101653965, "grad_norm": 0.9062555432319641, "learning_rate": 7.084438277702188e-06, "loss": 0.8661, "step": 18941 }, { "epoch": 3.502137149228768, "grad_norm": 1.0284250974655151, "learning_rate": 7.079038359004708e-06, "loss": 0.9107, "step": 18942 }, { "epoch": 3.502322988292139, "grad_norm": 1.419744610786438, "learning_rate": 7.073640423545669e-06, "loss": 1.0237, "step": 18943 }, { "epoch": 3.50250882735551, "grad_norm": 0.9171438813209534, "learning_rate": 7.068244471440333e-06, "loss": 0.8541, "step": 18944 }, { "epoch": 3.5026946664188814, "grad_norm": 0.8876679539680481, "learning_rate": 7.06285050280382e-06, "loss": 0.7288, "step": 18945 }, { "epoch": 3.5028805054822523, "grad_norm": 0.9148408770561218, "learning_rate": 7.057458517751292e-06, "loss": 0.8729, "step": 18946 }, { "epoch": 3.5030663445456236, "grad_norm": 0.8968338966369629, "learning_rate": 7.052068516397803e-06, "loss": 0.8159, "step": 18947 }, { "epoch": 3.5032521836089945, "grad_norm": 0.8965976238250732, "learning_rate": 7.046680498858415e-06, "loss": 0.6007, "step": 18948 }, { "epoch": 3.503438022672366, "grad_norm": 1.0043259859085083, "learning_rate": 7.041294465248127e-06, "loss": 0.8904, "step": 18949 }, { "epoch": 3.5036238617357367, "grad_norm": 0.9085475206375122, "learning_rate": 7.035910415681879e-06, "loss": 0.7296, "step": 18950 }, { "epoch": 3.503809700799108, "grad_norm": 1.0071899890899658, "learning_rate": 7.030528350274579e-06, "loss": 0.9354, "step": 18951 }, { "epoch": 3.503995539862479, "grad_norm": 0.9521633982658386, "learning_rate": 7.0251482691411245e-06, "loss": 0.9424, "step": 18952 }, { "epoch": 3.5041813789258502, "grad_norm": 0.7816515564918518, "learning_rate": 7.019770172396334e-06, "loss": 0.7411, "step": 18953 }, { "epoch": 3.5043672179892216, "grad_norm": 0.8777903318405151, "learning_rate": 7.014394060154983e-06, "loss": 0.7773, "step": 18954 }, { "epoch": 3.5045530570525925, "grad_norm": 0.9105899930000305, "learning_rate": 7.009019932531802e-06, "loss": 1.0029, "step": 18955 }, { "epoch": 3.5047388961159633, "grad_norm": 1.0930300951004028, "learning_rate": 7.0036477896415095e-06, "loss": 1.1189, "step": 18956 }, { "epoch": 3.5049247351793347, "grad_norm": 0.8531712293624878, "learning_rate": 6.998277631598793e-06, "loss": 0.7549, "step": 18957 }, { "epoch": 3.505110574242706, "grad_norm": 0.9285844564437866, "learning_rate": 6.992909458518237e-06, "loss": 0.6884, "step": 18958 }, { "epoch": 3.505296413306077, "grad_norm": 1.1396297216415405, "learning_rate": 6.987543270514407e-06, "loss": 0.7258, "step": 18959 }, { "epoch": 3.505482252369448, "grad_norm": 0.9273241758346558, "learning_rate": 6.982179067701866e-06, "loss": 0.9317, "step": 18960 }, { "epoch": 3.505668091432819, "grad_norm": 0.8453611135482788, "learning_rate": 6.976816850195089e-06, "loss": 0.6724, "step": 18961 }, { "epoch": 3.5058539304961904, "grad_norm": 1.0250625610351562, "learning_rate": 6.9714566181085185e-06, "loss": 0.9586, "step": 18962 }, { "epoch": 3.5060397695595613, "grad_norm": 0.8976748585700989, "learning_rate": 6.966098371556551e-06, "loss": 0.9126, "step": 18963 }, { "epoch": 3.5062256086229326, "grad_norm": 1.01888108253479, "learning_rate": 6.960742110653573e-06, "loss": 0.6516, "step": 18964 }, { "epoch": 3.5064114476863035, "grad_norm": 0.8228797912597656, "learning_rate": 6.9553878355138936e-06, "loss": 0.7013, "step": 18965 }, { "epoch": 3.506597286749675, "grad_norm": 0.8661003112792969, "learning_rate": 6.950035546251777e-06, "loss": 0.7522, "step": 18966 }, { "epoch": 3.5067831258130457, "grad_norm": 0.9044394493103027, "learning_rate": 6.944685242981463e-06, "loss": 1.0133, "step": 18967 }, { "epoch": 3.506968964876417, "grad_norm": 0.8815124034881592, "learning_rate": 6.939336925817186e-06, "loss": 0.8539, "step": 18968 }, { "epoch": 3.5071548039397884, "grad_norm": 0.9001720547676086, "learning_rate": 6.9339905948730525e-06, "loss": 0.5971, "step": 18969 }, { "epoch": 3.5073406430031593, "grad_norm": 1.0033185482025146, "learning_rate": 6.928646250263182e-06, "loss": 0.7974, "step": 18970 }, { "epoch": 3.50752648206653, "grad_norm": 0.9565051794052124, "learning_rate": 6.923303892101629e-06, "loss": 0.9197, "step": 18971 }, { "epoch": 3.5077123211299015, "grad_norm": 1.0655335187911987, "learning_rate": 6.917963520502435e-06, "loss": 0.9777, "step": 18972 }, { "epoch": 3.507898160193273, "grad_norm": 0.938111424446106, "learning_rate": 6.9126251355795864e-06, "loss": 0.7424, "step": 18973 }, { "epoch": 3.5080839992566437, "grad_norm": 1.0866252183914185, "learning_rate": 6.907288737447004e-06, "loss": 0.7386, "step": 18974 }, { "epoch": 3.5082698383200146, "grad_norm": 0.9407966136932373, "learning_rate": 6.901954326218573e-06, "loss": 0.9777, "step": 18975 }, { "epoch": 3.508455677383386, "grad_norm": 1.230141043663025, "learning_rate": 6.8966219020081826e-06, "loss": 0.9592, "step": 18976 }, { "epoch": 3.5086415164467573, "grad_norm": 0.9053949117660522, "learning_rate": 6.891291464929606e-06, "loss": 0.7945, "step": 18977 }, { "epoch": 3.508827355510128, "grad_norm": 0.8104222416877747, "learning_rate": 6.885963015096652e-06, "loss": 0.9587, "step": 18978 }, { "epoch": 3.5090131945734995, "grad_norm": 0.9656167030334473, "learning_rate": 6.88063655262301e-06, "loss": 0.6239, "step": 18979 }, { "epoch": 3.5091990336368704, "grad_norm": 1.8394811153411865, "learning_rate": 6.875312077622398e-06, "loss": 1.1899, "step": 18980 }, { "epoch": 3.5093848727002417, "grad_norm": 0.9846952557563782, "learning_rate": 6.869989590208448e-06, "loss": 0.9301, "step": 18981 }, { "epoch": 3.5095707117636126, "grad_norm": 1.0243431329727173, "learning_rate": 6.864669090494746e-06, "loss": 0.7457, "step": 18982 }, { "epoch": 3.509756550826984, "grad_norm": 0.9600151181221008, "learning_rate": 6.8593505785948365e-06, "loss": 0.7291, "step": 18983 }, { "epoch": 3.5099423898903552, "grad_norm": 0.9003852009773254, "learning_rate": 6.854034054622271e-06, "loss": 0.8666, "step": 18984 }, { "epoch": 3.510128228953726, "grad_norm": 1.0153359174728394, "learning_rate": 6.848719518690505e-06, "loss": 0.826, "step": 18985 }, { "epoch": 3.510314068017097, "grad_norm": 0.861562967300415, "learning_rate": 6.843406970912969e-06, "loss": 0.6989, "step": 18986 }, { "epoch": 3.5104999070804683, "grad_norm": 1.1449443101882935, "learning_rate": 6.838096411403017e-06, "loss": 0.7711, "step": 18987 }, { "epoch": 3.5106857461438397, "grad_norm": 0.8476724028587341, "learning_rate": 6.8327878402740465e-06, "loss": 0.8695, "step": 18988 }, { "epoch": 3.5108715852072105, "grad_norm": 0.8885052800178528, "learning_rate": 6.827481257639345e-06, "loss": 0.8422, "step": 18989 }, { "epoch": 3.5110574242705814, "grad_norm": 0.90379798412323, "learning_rate": 6.822176663612168e-06, "loss": 0.8569, "step": 18990 }, { "epoch": 3.5112432633339528, "grad_norm": 0.9090231657028198, "learning_rate": 6.816874058305711e-06, "loss": 0.7485, "step": 18991 }, { "epoch": 3.511429102397324, "grad_norm": 0.8142905831336975, "learning_rate": 6.8115734418331965e-06, "loss": 0.7595, "step": 18992 }, { "epoch": 3.511614941460695, "grad_norm": 0.9143938422203064, "learning_rate": 6.806274814307712e-06, "loss": 0.6598, "step": 18993 }, { "epoch": 3.5118007805240663, "grad_norm": 0.9759804606437683, "learning_rate": 6.800978175842377e-06, "loss": 0.7634, "step": 18994 }, { "epoch": 3.511986619587437, "grad_norm": 0.9622668623924255, "learning_rate": 6.7956835265502e-06, "loss": 0.5872, "step": 18995 }, { "epoch": 3.5121724586508085, "grad_norm": 0.9753997325897217, "learning_rate": 6.790390866544238e-06, "loss": 0.687, "step": 18996 }, { "epoch": 3.5123582977141794, "grad_norm": 1.0855896472930908, "learning_rate": 6.785100195937422e-06, "loss": 0.5035, "step": 18997 }, { "epoch": 3.5125441367775507, "grad_norm": 1.1820878982543945, "learning_rate": 6.7798115148426715e-06, "loss": 0.9115, "step": 18998 }, { "epoch": 3.5127299758409216, "grad_norm": 0.9932574033737183, "learning_rate": 6.774524823372863e-06, "loss": 0.8128, "step": 18999 }, { "epoch": 3.512915814904293, "grad_norm": 0.9725932478904724, "learning_rate": 6.769240121640863e-06, "loss": 0.8454, "step": 19000 }, { "epoch": 3.513101653967664, "grad_norm": 1.1076157093048096, "learning_rate": 6.763957409759447e-06, "loss": 0.7596, "step": 19001 }, { "epoch": 3.513287493031035, "grad_norm": 0.8583747744560242, "learning_rate": 6.758676687841348e-06, "loss": 0.8204, "step": 19002 }, { "epoch": 3.5134733320944065, "grad_norm": 0.8928229808807373, "learning_rate": 6.753397955999274e-06, "loss": 0.8113, "step": 19003 }, { "epoch": 3.5136591711577774, "grad_norm": 0.9273632764816284, "learning_rate": 6.748121214345926e-06, "loss": 0.8261, "step": 19004 }, { "epoch": 3.5138450102211483, "grad_norm": 0.8253338932991028, "learning_rate": 6.742846462993901e-06, "loss": 0.6899, "step": 19005 }, { "epoch": 3.5140308492845196, "grad_norm": 0.9060574769973755, "learning_rate": 6.737573702055766e-06, "loss": 0.8283, "step": 19006 }, { "epoch": 3.514216688347891, "grad_norm": 0.8999438881874084, "learning_rate": 6.732302931644085e-06, "loss": 0.888, "step": 19007 }, { "epoch": 3.514402527411262, "grad_norm": 1.105154037475586, "learning_rate": 6.727034151871336e-06, "loss": 0.7137, "step": 19008 }, { "epoch": 3.514588366474633, "grad_norm": 0.865492045879364, "learning_rate": 6.721767362849996e-06, "loss": 0.7081, "step": 19009 }, { "epoch": 3.514774205538004, "grad_norm": 0.9106673002243042, "learning_rate": 6.716502564692462e-06, "loss": 0.739, "step": 19010 }, { "epoch": 3.5149600446013753, "grad_norm": 0.8903344869613647, "learning_rate": 6.711239757511079e-06, "loss": 0.8402, "step": 19011 }, { "epoch": 3.5151458836647462, "grad_norm": 1.0108816623687744, "learning_rate": 6.705978941418212e-06, "loss": 0.8127, "step": 19012 }, { "epoch": 3.5153317227281176, "grad_norm": 0.9158901572227478, "learning_rate": 6.700720116526116e-06, "loss": 0.8192, "step": 19013 }, { "epoch": 3.5155175617914884, "grad_norm": 0.9774060845375061, "learning_rate": 6.695463282947057e-06, "loss": 0.7544, "step": 19014 }, { "epoch": 3.5157034008548598, "grad_norm": 1.0111356973648071, "learning_rate": 6.690208440793189e-06, "loss": 0.9235, "step": 19015 }, { "epoch": 3.5158892399182307, "grad_norm": 1.0181794166564941, "learning_rate": 6.684955590176711e-06, "loss": 0.676, "step": 19016 }, { "epoch": 3.516075078981602, "grad_norm": 1.300851583480835, "learning_rate": 6.679704731209724e-06, "loss": 1.1365, "step": 19017 }, { "epoch": 3.5162609180449733, "grad_norm": 0.9330505728721619, "learning_rate": 6.674455864004292e-06, "loss": 0.8658, "step": 19018 }, { "epoch": 3.516446757108344, "grad_norm": 1.028049111366272, "learning_rate": 6.669208988672426e-06, "loss": 0.8605, "step": 19019 }, { "epoch": 3.516632596171715, "grad_norm": 1.0538666248321533, "learning_rate": 6.663964105326159e-06, "loss": 0.859, "step": 19020 }, { "epoch": 3.5168184352350864, "grad_norm": 0.8396360874176025, "learning_rate": 6.658721214077412e-06, "loss": 0.7899, "step": 19021 }, { "epoch": 3.5170042742984577, "grad_norm": 1.0336717367172241, "learning_rate": 6.653480315038074e-06, "loss": 0.8283, "step": 19022 }, { "epoch": 3.5171901133618286, "grad_norm": 0.9230265021324158, "learning_rate": 6.648241408319988e-06, "loss": 0.7298, "step": 19023 }, { "epoch": 3.5173759524251995, "grad_norm": 0.952717125415802, "learning_rate": 6.643004494035021e-06, "loss": 0.8616, "step": 19024 }, { "epoch": 3.517561791488571, "grad_norm": 0.93483567237854, "learning_rate": 6.637769572294905e-06, "loss": 0.8873, "step": 19025 }, { "epoch": 3.517747630551942, "grad_norm": 0.948124349117279, "learning_rate": 6.632536643211373e-06, "loss": 0.7508, "step": 19026 }, { "epoch": 3.517933469615313, "grad_norm": 1.176931619644165, "learning_rate": 6.6273057068961255e-06, "loss": 0.7108, "step": 19027 }, { "epoch": 3.5181193086786844, "grad_norm": 1.1013628244400024, "learning_rate": 6.622076763460816e-06, "loss": 1.1076, "step": 19028 }, { "epoch": 3.5183051477420553, "grad_norm": 1.0013099908828735, "learning_rate": 6.616849813017012e-06, "loss": 0.8936, "step": 19029 }, { "epoch": 3.5184909868054266, "grad_norm": 0.9172407984733582, "learning_rate": 6.611624855676313e-06, "loss": 0.7041, "step": 19030 }, { "epoch": 3.5186768258687975, "grad_norm": 0.9969894886016846, "learning_rate": 6.6064018915502065e-06, "loss": 0.5888, "step": 19031 }, { "epoch": 3.518862664932169, "grad_norm": 1.0015980005264282, "learning_rate": 6.601180920750205e-06, "loss": 0.7944, "step": 19032 }, { "epoch": 3.5190485039955397, "grad_norm": 0.9136693477630615, "learning_rate": 6.595961943387707e-06, "loss": 0.8752, "step": 19033 }, { "epoch": 3.519234343058911, "grad_norm": 0.9295935034751892, "learning_rate": 6.5907449595741244e-06, "loss": 0.7903, "step": 19034 }, { "epoch": 3.519420182122282, "grad_norm": 1.0148817300796509, "learning_rate": 6.5855299694207675e-06, "loss": 0.8275, "step": 19035 }, { "epoch": 3.5196060211856532, "grad_norm": 0.8606860041618347, "learning_rate": 6.5803169730389914e-06, "loss": 0.6784, "step": 19036 }, { "epoch": 3.5197918602490246, "grad_norm": 0.8806768655776978, "learning_rate": 6.5751059705400295e-06, "loss": 0.6457, "step": 19037 }, { "epoch": 3.5199776993123955, "grad_norm": 0.9651481509208679, "learning_rate": 6.569896962035094e-06, "loss": 0.7849, "step": 19038 }, { "epoch": 3.5201635383757663, "grad_norm": 1.0954023599624634, "learning_rate": 6.564689947635383e-06, "loss": 0.7911, "step": 19039 }, { "epoch": 3.5203493774391377, "grad_norm": 0.8198789358139038, "learning_rate": 6.55948492745202e-06, "loss": 0.5841, "step": 19040 }, { "epoch": 3.520535216502509, "grad_norm": 0.9690950512886047, "learning_rate": 6.554281901596104e-06, "loss": 0.6836, "step": 19041 }, { "epoch": 3.52072105556588, "grad_norm": 0.977009117603302, "learning_rate": 6.549080870178692e-06, "loss": 0.6822, "step": 19042 }, { "epoch": 3.520906894629251, "grad_norm": 0.9366753101348877, "learning_rate": 6.543881833310761e-06, "loss": 0.9865, "step": 19043 }, { "epoch": 3.521092733692622, "grad_norm": 1.1933904886245728, "learning_rate": 6.538684791103311e-06, "loss": 1.0158, "step": 19044 }, { "epoch": 3.5212785727559934, "grad_norm": 0.9818659424781799, "learning_rate": 6.5334897436672535e-06, "loss": 0.7215, "step": 19045 }, { "epoch": 3.5214644118193643, "grad_norm": 0.9957472085952759, "learning_rate": 6.528296691113445e-06, "loss": 0.7268, "step": 19046 }, { "epoch": 3.5216502508827356, "grad_norm": 1.0240875482559204, "learning_rate": 6.523105633552751e-06, "loss": 0.7408, "step": 19047 }, { "epoch": 3.5218360899461065, "grad_norm": 0.9679796695709229, "learning_rate": 6.51791657109595e-06, "loss": 0.9828, "step": 19048 }, { "epoch": 3.522021929009478, "grad_norm": 1.1154346466064453, "learning_rate": 6.5127295038538095e-06, "loss": 0.9706, "step": 19049 }, { "epoch": 3.5222077680728487, "grad_norm": 0.858024537563324, "learning_rate": 6.507544431937007e-06, "loss": 0.8874, "step": 19050 }, { "epoch": 3.52239360713622, "grad_norm": 1.121969223022461, "learning_rate": 6.5023613554562216e-06, "loss": 0.7596, "step": 19051 }, { "epoch": 3.5225794461995914, "grad_norm": 0.8989073634147644, "learning_rate": 6.497180274522107e-06, "loss": 0.8389, "step": 19052 }, { "epoch": 3.5227652852629623, "grad_norm": 0.9460635185241699, "learning_rate": 6.492001189245223e-06, "loss": 1.0833, "step": 19053 }, { "epoch": 3.522951124326333, "grad_norm": 0.9267661571502686, "learning_rate": 6.48682409973611e-06, "loss": 1.0331, "step": 19054 }, { "epoch": 3.5231369633897045, "grad_norm": 0.9577577710151672, "learning_rate": 6.48164900610524e-06, "loss": 0.9354, "step": 19055 }, { "epoch": 3.523322802453076, "grad_norm": 1.011656641960144, "learning_rate": 6.4764759084631085e-06, "loss": 0.809, "step": 19056 }, { "epoch": 3.5235086415164467, "grad_norm": 0.9175121784210205, "learning_rate": 6.471304806920109e-06, "loss": 0.7586, "step": 19057 }, { "epoch": 3.5236944805798176, "grad_norm": 1.0583686828613281, "learning_rate": 6.466135701586584e-06, "loss": 0.8551, "step": 19058 }, { "epoch": 3.523880319643189, "grad_norm": 1.392242431640625, "learning_rate": 6.460968592572892e-06, "loss": 0.7784, "step": 19059 }, { "epoch": 3.5240661587065603, "grad_norm": 1.0272305011749268, "learning_rate": 6.45580347998932e-06, "loss": 0.8824, "step": 19060 }, { "epoch": 3.524251997769931, "grad_norm": 0.9726600646972656, "learning_rate": 6.45064036394607e-06, "loss": 1.2702, "step": 19061 }, { "epoch": 3.5244378368333025, "grad_norm": 1.0088410377502441, "learning_rate": 6.445479244553376e-06, "loss": 0.8307, "step": 19062 }, { "epoch": 3.5246236758966734, "grad_norm": 1.0037113428115845, "learning_rate": 6.440320121921372e-06, "loss": 0.8856, "step": 19063 }, { "epoch": 3.5248095149600447, "grad_norm": 1.1829400062561035, "learning_rate": 6.435162996160204e-06, "loss": 1.0169, "step": 19064 }, { "epoch": 3.5249953540234156, "grad_norm": 0.8123091459274292, "learning_rate": 6.430007867379905e-06, "loss": 0.8608, "step": 19065 }, { "epoch": 3.525181193086787, "grad_norm": 1.1172996759414673, "learning_rate": 6.4248547356905105e-06, "loss": 0.8055, "step": 19066 }, { "epoch": 3.5253670321501582, "grad_norm": 0.9727967381477356, "learning_rate": 6.41970360120202e-06, "loss": 0.8744, "step": 19067 }, { "epoch": 3.525552871213529, "grad_norm": 1.1461392641067505, "learning_rate": 6.4145544640243585e-06, "loss": 0.973, "step": 19068 }, { "epoch": 3.5257387102769, "grad_norm": 1.0670104026794434, "learning_rate": 6.409407324267447e-06, "loss": 0.8479, "step": 19069 }, { "epoch": 3.5259245493402713, "grad_norm": 1.036465048789978, "learning_rate": 6.404262182041099e-06, "loss": 0.7001, "step": 19070 }, { "epoch": 3.5261103884036427, "grad_norm": 1.0140867233276367, "learning_rate": 6.39911903745517e-06, "loss": 0.8693, "step": 19071 }, { "epoch": 3.5262962274670135, "grad_norm": 0.7492370009422302, "learning_rate": 6.3939778906194294e-06, "loss": 0.6441, "step": 19072 }, { "epoch": 3.5264820665303844, "grad_norm": 0.8034688234329224, "learning_rate": 6.388838741643599e-06, "loss": 0.6989, "step": 19073 }, { "epoch": 3.5266679055937558, "grad_norm": 0.9098494052886963, "learning_rate": 6.383701590637359e-06, "loss": 1.0181, "step": 19074 }, { "epoch": 3.526853744657127, "grad_norm": 1.013979196548462, "learning_rate": 6.3785664377103425e-06, "loss": 1.0134, "step": 19075 }, { "epoch": 3.527039583720498, "grad_norm": 0.8733248114585876, "learning_rate": 6.3734332829721745e-06, "loss": 0.7987, "step": 19076 }, { "epoch": 3.5272254227838693, "grad_norm": 0.9233043193817139, "learning_rate": 6.36830212653241e-06, "loss": 0.8204, "step": 19077 }, { "epoch": 3.52741126184724, "grad_norm": 0.9699963927268982, "learning_rate": 6.363172968500541e-06, "loss": 0.8159, "step": 19078 }, { "epoch": 3.5275971009106115, "grad_norm": 0.8718108534812927, "learning_rate": 6.3580458089860794e-06, "loss": 0.8537, "step": 19079 }, { "epoch": 3.5277829399739824, "grad_norm": 0.9551896452903748, "learning_rate": 6.352920648098426e-06, "loss": 0.787, "step": 19080 }, { "epoch": 3.5279687790373537, "grad_norm": 0.8582215309143066, "learning_rate": 6.3477974859469825e-06, "loss": 0.7686, "step": 19081 }, { "epoch": 3.5281546181007246, "grad_norm": 1.047671914100647, "learning_rate": 6.342676322641072e-06, "loss": 0.76, "step": 19082 }, { "epoch": 3.528340457164096, "grad_norm": 0.7875006198883057, "learning_rate": 6.337557158290009e-06, "loss": 0.8089, "step": 19083 }, { "epoch": 3.528526296227467, "grad_norm": 1.0197948217391968, "learning_rate": 6.33243999300307e-06, "loss": 1.2163, "step": 19084 }, { "epoch": 3.528712135290838, "grad_norm": 0.9266148805618286, "learning_rate": 6.327324826889469e-06, "loss": 0.7697, "step": 19085 }, { "epoch": 3.5288979743542095, "grad_norm": 1.0414260625839233, "learning_rate": 6.322211660058342e-06, "loss": 0.8559, "step": 19086 }, { "epoch": 3.5290838134175804, "grad_norm": 1.0009326934814453, "learning_rate": 6.317100492618877e-06, "loss": 0.9016, "step": 19087 }, { "epoch": 3.5292696524809513, "grad_norm": 0.91349196434021, "learning_rate": 6.311991324680133e-06, "loss": 0.6186, "step": 19088 }, { "epoch": 3.5294554915443226, "grad_norm": 0.8203105926513672, "learning_rate": 6.306884156351156e-06, "loss": 0.8004, "step": 19089 }, { "epoch": 3.529641330607694, "grad_norm": 0.9701465964317322, "learning_rate": 6.301778987740936e-06, "loss": 0.8254, "step": 19090 }, { "epoch": 3.529827169671065, "grad_norm": 1.039881706237793, "learning_rate": 6.296675818958475e-06, "loss": 0.7108, "step": 19091 }, { "epoch": 3.530013008734436, "grad_norm": 0.9743512272834778, "learning_rate": 6.291574650112641e-06, "loss": 0.8026, "step": 19092 }, { "epoch": 3.530198847797807, "grad_norm": 0.9358527064323425, "learning_rate": 6.28647548131236e-06, "loss": 0.9723, "step": 19093 }, { "epoch": 3.5303846868611783, "grad_norm": 0.8311519622802734, "learning_rate": 6.281378312666442e-06, "loss": 0.7873, "step": 19094 }, { "epoch": 3.5305705259245492, "grad_norm": 0.8295403122901917, "learning_rate": 6.276283144283656e-06, "loss": 0.7648, "step": 19095 }, { "epoch": 3.5307563649879206, "grad_norm": 0.9160231947898865, "learning_rate": 6.271189976272785e-06, "loss": 0.8451, "step": 19096 }, { "epoch": 3.5309422040512914, "grad_norm": 1.2153279781341553, "learning_rate": 6.266098808742516e-06, "loss": 0.8933, "step": 19097 }, { "epoch": 3.5311280431146628, "grad_norm": 0.8746144771575928, "learning_rate": 6.261009641801507e-06, "loss": 0.7959, "step": 19098 }, { "epoch": 3.5313138821780337, "grad_norm": 0.825903594493866, "learning_rate": 6.2559224755583955e-06, "loss": 0.6744, "step": 19099 }, { "epoch": 3.531499721241405, "grad_norm": 0.8564138412475586, "learning_rate": 6.250837310121738e-06, "loss": 0.7711, "step": 19100 }, { "epoch": 3.5316855603047763, "grad_norm": 0.9014472365379333, "learning_rate": 6.245754145600091e-06, "loss": 0.9041, "step": 19101 }, { "epoch": 3.531871399368147, "grad_norm": 0.9554387927055359, "learning_rate": 6.240672982101914e-06, "loss": 0.8498, "step": 19102 }, { "epoch": 3.532057238431518, "grad_norm": 0.8042546510696411, "learning_rate": 6.235593819735674e-06, "loss": 0.8419, "step": 19103 }, { "epoch": 3.5322430774948894, "grad_norm": 0.9801455736160278, "learning_rate": 6.230516658609798e-06, "loss": 0.834, "step": 19104 }, { "epoch": 3.5324289165582607, "grad_norm": 0.9968674778938293, "learning_rate": 6.225441498832629e-06, "loss": 0.9294, "step": 19105 }, { "epoch": 3.5326147556216316, "grad_norm": 0.9890447854995728, "learning_rate": 6.220368340512461e-06, "loss": 0.7123, "step": 19106 }, { "epoch": 3.5328005946850025, "grad_norm": 0.8202847242355347, "learning_rate": 6.215297183757629e-06, "loss": 0.5153, "step": 19107 }, { "epoch": 3.532986433748374, "grad_norm": 0.8917155265808105, "learning_rate": 6.2102280286763235e-06, "loss": 0.9371, "step": 19108 }, { "epoch": 3.533172272811745, "grad_norm": 0.9823048114776611, "learning_rate": 6.2051608753767585e-06, "loss": 0.7532, "step": 19109 }, { "epoch": 3.533358111875116, "grad_norm": 0.8202247023582458, "learning_rate": 6.200095723967058e-06, "loss": 0.5955, "step": 19110 }, { "epoch": 3.5335439509384874, "grad_norm": 0.9594755172729492, "learning_rate": 6.1950325745553705e-06, "loss": 0.8521, "step": 19111 }, { "epoch": 3.5337297900018583, "grad_norm": 1.0144023895263672, "learning_rate": 6.18997142724973e-06, "loss": 0.7749, "step": 19112 }, { "epoch": 3.5339156290652296, "grad_norm": 1.067853331565857, "learning_rate": 6.1849122821581505e-06, "loss": 0.873, "step": 19113 }, { "epoch": 3.5341014681286005, "grad_norm": 1.0349130630493164, "learning_rate": 6.179855139388635e-06, "loss": 0.821, "step": 19114 }, { "epoch": 3.534287307191972, "grad_norm": 0.8972457051277161, "learning_rate": 6.174799999049097e-06, "loss": 0.7264, "step": 19115 }, { "epoch": 3.534473146255343, "grad_norm": 0.8936841487884521, "learning_rate": 6.169746861247461e-06, "loss": 0.6736, "step": 19116 }, { "epoch": 3.534658985318714, "grad_norm": 1.215692400932312, "learning_rate": 6.164695726091563e-06, "loss": 0.8523, "step": 19117 }, { "epoch": 3.534844824382085, "grad_norm": 0.904807448387146, "learning_rate": 6.1596465936891835e-06, "loss": 0.884, "step": 19118 }, { "epoch": 3.5350306634454562, "grad_norm": 1.222141146659851, "learning_rate": 6.154599464148136e-06, "loss": 1.0758, "step": 19119 }, { "epoch": 3.5352165025088276, "grad_norm": 0.9937120079994202, "learning_rate": 6.149554337576113e-06, "loss": 0.8717, "step": 19120 }, { "epoch": 3.5354023415721985, "grad_norm": 1.0441583395004272, "learning_rate": 6.144511214080795e-06, "loss": 0.7194, "step": 19121 }, { "epoch": 3.5355881806355693, "grad_norm": 0.931191086769104, "learning_rate": 6.139470093769806e-06, "loss": 0.6677, "step": 19122 }, { "epoch": 3.5357740196989407, "grad_norm": 0.9039041996002197, "learning_rate": 6.134430976750783e-06, "loss": 0.827, "step": 19123 }, { "epoch": 3.535959858762312, "grad_norm": 0.9199702739715576, "learning_rate": 6.129393863131228e-06, "loss": 0.7714, "step": 19124 }, { "epoch": 3.536145697825683, "grad_norm": 0.9820542335510254, "learning_rate": 6.124358753018689e-06, "loss": 0.8069, "step": 19125 }, { "epoch": 3.536331536889054, "grad_norm": 0.9555262923240662, "learning_rate": 6.119325646520591e-06, "loss": 0.5713, "step": 19126 }, { "epoch": 3.536517375952425, "grad_norm": 1.0285457372665405, "learning_rate": 6.114294543744414e-06, "loss": 0.7669, "step": 19127 }, { "epoch": 3.5367032150157964, "grad_norm": 0.9669595956802368, "learning_rate": 6.1092654447974965e-06, "loss": 0.8578, "step": 19128 }, { "epoch": 3.5368890540791673, "grad_norm": 0.911249041557312, "learning_rate": 6.104238349787183e-06, "loss": 0.6561, "step": 19129 }, { "epoch": 3.5370748931425386, "grad_norm": 0.8711757659912109, "learning_rate": 6.0992132588207576e-06, "loss": 0.4448, "step": 19130 }, { "epoch": 3.5372607322059095, "grad_norm": 0.8461050987243652, "learning_rate": 6.0941901720054985e-06, "loss": 0.8608, "step": 19131 }, { "epoch": 3.537446571269281, "grad_norm": 0.9975296258926392, "learning_rate": 6.089169089448599e-06, "loss": 0.8095, "step": 19132 }, { "epoch": 3.5376324103326517, "grad_norm": 0.9165214896202087, "learning_rate": 6.084150011257239e-06, "loss": 0.7625, "step": 19133 }, { "epoch": 3.537818249396023, "grad_norm": 1.0196541547775269, "learning_rate": 6.079132937538501e-06, "loss": 0.6671, "step": 19134 }, { "epoch": 3.5380040884593944, "grad_norm": 0.9346649050712585, "learning_rate": 6.074117868399509e-06, "loss": 0.8232, "step": 19135 }, { "epoch": 3.5381899275227653, "grad_norm": 0.9664354920387268, "learning_rate": 6.069104803947301e-06, "loss": 0.771, "step": 19136 }, { "epoch": 3.538375766586136, "grad_norm": 1.1240217685699463, "learning_rate": 6.064093744288845e-06, "loss": 0.8951, "step": 19137 }, { "epoch": 3.5385616056495075, "grad_norm": 1.1051549911499023, "learning_rate": 6.059084689531113e-06, "loss": 0.8819, "step": 19138 }, { "epoch": 3.538747444712879, "grad_norm": 0.8375083804130554, "learning_rate": 6.0540776397810085e-06, "loss": 0.6488, "step": 19139 }, { "epoch": 3.5389332837762497, "grad_norm": 0.900913655757904, "learning_rate": 6.049072595145411e-06, "loss": 0.9445, "step": 19140 }, { "epoch": 3.539119122839621, "grad_norm": 1.3367409706115723, "learning_rate": 6.044069555731124e-06, "loss": 0.9208, "step": 19141 }, { "epoch": 3.539304961902992, "grad_norm": 1.1735326051712036, "learning_rate": 6.03906852164492e-06, "loss": 1.0493, "step": 19142 }, { "epoch": 3.5394908009663633, "grad_norm": 0.918788731098175, "learning_rate": 6.034069492993577e-06, "loss": 0.7548, "step": 19143 }, { "epoch": 3.539676640029734, "grad_norm": 0.8883483409881592, "learning_rate": 6.0290724698837675e-06, "loss": 0.8226, "step": 19144 }, { "epoch": 3.5398624790931055, "grad_norm": 0.931384265422821, "learning_rate": 6.024077452422128e-06, "loss": 0.779, "step": 19145 }, { "epoch": 3.5400483181564764, "grad_norm": 0.8548913598060608, "learning_rate": 6.019084440715295e-06, "loss": 0.6444, "step": 19146 }, { "epoch": 3.5402341572198477, "grad_norm": 1.0747677087783813, "learning_rate": 6.014093434869828e-06, "loss": 0.8846, "step": 19147 }, { "epoch": 3.5404199962832186, "grad_norm": 1.1103484630584717, "learning_rate": 6.009104434992263e-06, "loss": 0.8992, "step": 19148 }, { "epoch": 3.54060583534659, "grad_norm": 1.0426290035247803, "learning_rate": 6.004117441189061e-06, "loss": 0.9561, "step": 19149 }, { "epoch": 3.5407916744099612, "grad_norm": 1.2314473390579224, "learning_rate": 5.999132453566658e-06, "loss": 1.0631, "step": 19150 }, { "epoch": 3.540977513473332, "grad_norm": 0.7971338629722595, "learning_rate": 5.994149472231469e-06, "loss": 0.6471, "step": 19151 }, { "epoch": 3.541163352536703, "grad_norm": 1.167417287826538, "learning_rate": 5.989168497289843e-06, "loss": 0.823, "step": 19152 }, { "epoch": 3.5413491916000743, "grad_norm": 0.9355377554893494, "learning_rate": 5.984189528848095e-06, "loss": 1.0002, "step": 19153 }, { "epoch": 3.5415350306634457, "grad_norm": 0.8810675740242004, "learning_rate": 5.97921256701246e-06, "loss": 0.8025, "step": 19154 }, { "epoch": 3.5417208697268165, "grad_norm": 0.9732177257537842, "learning_rate": 5.974237611889189e-06, "loss": 1.0295, "step": 19155 }, { "epoch": 3.5419067087901874, "grad_norm": 0.8081285357475281, "learning_rate": 5.969264663584473e-06, "loss": 0.3682, "step": 19156 }, { "epoch": 3.5420925478535588, "grad_norm": 0.852075457572937, "learning_rate": 5.9642937222044396e-06, "loss": 0.7055, "step": 19157 }, { "epoch": 3.54227838691693, "grad_norm": 0.8370651602745056, "learning_rate": 5.95932478785517e-06, "loss": 0.7749, "step": 19158 }, { "epoch": 3.542464225980301, "grad_norm": 1.0717309713363647, "learning_rate": 5.954357860642745e-06, "loss": 0.906, "step": 19159 }, { "epoch": 3.5426500650436723, "grad_norm": 1.0112475156784058, "learning_rate": 5.9493929406731705e-06, "loss": 0.9781, "step": 19160 }, { "epoch": 3.542835904107043, "grad_norm": 1.2165931463241577, "learning_rate": 5.944430028052405e-06, "loss": 0.8503, "step": 19161 }, { "epoch": 3.5430217431704145, "grad_norm": 0.9976071119308472, "learning_rate": 5.939469122886354e-06, "loss": 0.7948, "step": 19162 }, { "epoch": 3.5432075822337854, "grad_norm": 0.9213830232620239, "learning_rate": 5.934510225280943e-06, "loss": 1.0859, "step": 19163 }, { "epoch": 3.5433934212971567, "grad_norm": 0.8859527111053467, "learning_rate": 5.929553335341986e-06, "loss": 1.0171, "step": 19164 }, { "epoch": 3.543579260360528, "grad_norm": 0.9154654145240784, "learning_rate": 5.9245984531752784e-06, "loss": 0.5721, "step": 19165 }, { "epoch": 3.543765099423899, "grad_norm": 0.8688372373580933, "learning_rate": 5.919645578886545e-06, "loss": 0.7146, "step": 19166 }, { "epoch": 3.54395093848727, "grad_norm": 0.8358892798423767, "learning_rate": 5.91469471258157e-06, "loss": 0.8985, "step": 19167 }, { "epoch": 3.544136777550641, "grad_norm": 1.0070583820343018, "learning_rate": 5.9097458543659666e-06, "loss": 0.9028, "step": 19168 }, { "epoch": 3.5443226166140125, "grad_norm": 1.22250235080719, "learning_rate": 5.904799004345385e-06, "loss": 1.0275, "step": 19169 }, { "epoch": 3.5445084556773834, "grad_norm": 0.8911054730415344, "learning_rate": 5.899854162625362e-06, "loss": 0.8251, "step": 19170 }, { "epoch": 3.5446942947407543, "grad_norm": 1.0135678052902222, "learning_rate": 5.894911329311503e-06, "loss": 0.9109, "step": 19171 }, { "epoch": 3.5448801338041256, "grad_norm": 0.9569916725158691, "learning_rate": 5.889970504509257e-06, "loss": 0.9177, "step": 19172 }, { "epoch": 3.545065972867497, "grad_norm": 1.3330678939819336, "learning_rate": 5.8850316883241055e-06, "loss": 0.9676, "step": 19173 }, { "epoch": 3.545251811930868, "grad_norm": 0.8885575532913208, "learning_rate": 5.88009488086142e-06, "loss": 0.7955, "step": 19174 }, { "epoch": 3.545437650994239, "grad_norm": 0.9844068288803101, "learning_rate": 5.875160082226605e-06, "loss": 1.0337, "step": 19175 }, { "epoch": 3.54562349005761, "grad_norm": 0.8716814517974854, "learning_rate": 5.870227292524966e-06, "loss": 0.7287, "step": 19176 }, { "epoch": 3.5458093291209813, "grad_norm": 1.133697509765625, "learning_rate": 5.865296511861806e-06, "loss": 0.9023, "step": 19177 }, { "epoch": 3.5459951681843522, "grad_norm": 0.9876354932785034, "learning_rate": 5.860367740342332e-06, "loss": 0.8509, "step": 19178 }, { "epoch": 3.5461810072477236, "grad_norm": 1.0449570417404175, "learning_rate": 5.855440978071769e-06, "loss": 1.0393, "step": 19179 }, { "epoch": 3.5463668463110944, "grad_norm": 1.3608499765396118, "learning_rate": 5.850516225155267e-06, "loss": 0.8978, "step": 19180 }, { "epoch": 3.5465526853744658, "grad_norm": 0.9593074321746826, "learning_rate": 5.8455934816979305e-06, "loss": 0.6206, "step": 19181 }, { "epoch": 3.5467385244378367, "grad_norm": 0.9247384071350098, "learning_rate": 5.840672747804809e-06, "loss": 0.8468, "step": 19182 }, { "epoch": 3.546924363501208, "grad_norm": 0.8710890412330627, "learning_rate": 5.835754023580964e-06, "loss": 1.0445, "step": 19183 }, { "epoch": 3.5471102025645793, "grad_norm": 0.9194334149360657, "learning_rate": 5.8308373091313425e-06, "loss": 0.8244, "step": 19184 }, { "epoch": 3.54729604162795, "grad_norm": 0.7997312545776367, "learning_rate": 5.825922604560908e-06, "loss": 0.7945, "step": 19185 }, { "epoch": 3.547481880691321, "grad_norm": 1.069359540939331, "learning_rate": 5.8210099099745284e-06, "loss": 0.9528, "step": 19186 }, { "epoch": 3.5476677197546924, "grad_norm": 0.9582682251930237, "learning_rate": 5.816099225477079e-06, "loss": 0.9763, "step": 19187 }, { "epoch": 3.5478535588180637, "grad_norm": 0.8646688461303711, "learning_rate": 5.811190551173373e-06, "loss": 0.8443, "step": 19188 }, { "epoch": 3.5480393978814346, "grad_norm": 0.9127521514892578, "learning_rate": 5.806283887168185e-06, "loss": 0.7724, "step": 19189 }, { "epoch": 3.548225236944806, "grad_norm": 0.779879093170166, "learning_rate": 5.801379233566195e-06, "loss": 0.8479, "step": 19190 }, { "epoch": 3.548411076008177, "grad_norm": 0.8569574356079102, "learning_rate": 5.796476590472133e-06, "loss": 0.7031, "step": 19191 }, { "epoch": 3.548596915071548, "grad_norm": 1.0802698135375977, "learning_rate": 5.791575957990614e-06, "loss": 0.8848, "step": 19192 }, { "epoch": 3.548782754134919, "grad_norm": 1.2723355293273926, "learning_rate": 5.7866773362262425e-06, "loss": 1.2078, "step": 19193 }, { "epoch": 3.5489685931982904, "grad_norm": 1.1142456531524658, "learning_rate": 5.781780725283558e-06, "loss": 0.8916, "step": 19194 }, { "epoch": 3.5491544322616613, "grad_norm": 0.7921794652938843, "learning_rate": 5.776886125267089e-06, "loss": 0.6711, "step": 19195 }, { "epoch": 3.5493402713250326, "grad_norm": 0.925909161567688, "learning_rate": 5.771993536281284e-06, "loss": 0.9102, "step": 19196 }, { "epoch": 3.5495261103884035, "grad_norm": 0.9324236512184143, "learning_rate": 5.76710295843057e-06, "loss": 0.8768, "step": 19197 }, { "epoch": 3.549711949451775, "grad_norm": 1.0298572778701782, "learning_rate": 5.7622143918193314e-06, "loss": 0.7518, "step": 19198 }, { "epoch": 3.549897788515146, "grad_norm": 0.8958404660224915, "learning_rate": 5.757327836551929e-06, "loss": 0.6492, "step": 19199 }, { "epoch": 3.550083627578517, "grad_norm": 1.1485790014266968, "learning_rate": 5.7524432927326235e-06, "loss": 0.7827, "step": 19200 }, { "epoch": 3.550269466641888, "grad_norm": 0.872681736946106, "learning_rate": 5.747560760465687e-06, "loss": 0.8896, "step": 19201 }, { "epoch": 3.5504553057052592, "grad_norm": 0.8319202065467834, "learning_rate": 5.742680239855314e-06, "loss": 0.5662, "step": 19202 }, { "epoch": 3.5506411447686306, "grad_norm": 0.9280526638031006, "learning_rate": 5.737801731005688e-06, "loss": 0.7522, "step": 19203 }, { "epoch": 3.5508269838320015, "grad_norm": 1.0687966346740723, "learning_rate": 5.732925234020925e-06, "loss": 0.9029, "step": 19204 }, { "epoch": 3.5510128228953723, "grad_norm": 0.9236042499542236, "learning_rate": 5.7280507490050985e-06, "loss": 0.8034, "step": 19205 }, { "epoch": 3.5511986619587437, "grad_norm": 0.9122721552848816, "learning_rate": 5.723178276062247e-06, "loss": 0.7589, "step": 19206 }, { "epoch": 3.551384501022115, "grad_norm": 0.86166912317276, "learning_rate": 5.718307815296375e-06, "loss": 0.8254, "step": 19207 }, { "epoch": 3.551570340085486, "grad_norm": 1.1353827714920044, "learning_rate": 5.713439366811413e-06, "loss": 0.6789, "step": 19208 }, { "epoch": 3.551756179148857, "grad_norm": 0.8476354479789734, "learning_rate": 5.708572930711309e-06, "loss": 0.6649, "step": 19209 }, { "epoch": 3.551942018212228, "grad_norm": 0.8318846225738525, "learning_rate": 5.703708507099881e-06, "loss": 0.7236, "step": 19210 }, { "epoch": 3.5521278572755994, "grad_norm": 0.9746939539909363, "learning_rate": 5.698846096081001e-06, "loss": 0.8217, "step": 19211 }, { "epoch": 3.5523136963389703, "grad_norm": 0.9182035326957703, "learning_rate": 5.693985697758419e-06, "loss": 0.7409, "step": 19212 }, { "epoch": 3.5524995354023416, "grad_norm": 1.098073959350586, "learning_rate": 5.689127312235876e-06, "loss": 0.8893, "step": 19213 }, { "epoch": 3.5526853744657125, "grad_norm": 0.9553743600845337, "learning_rate": 5.684270939617054e-06, "loss": 0.8132, "step": 19214 }, { "epoch": 3.552871213529084, "grad_norm": 1.0023797750473022, "learning_rate": 5.679416580005626e-06, "loss": 0.8384, "step": 19215 }, { "epoch": 3.5530570525924547, "grad_norm": 0.8118361830711365, "learning_rate": 5.6745642335051976e-06, "loss": 0.6801, "step": 19216 }, { "epoch": 3.553242891655826, "grad_norm": 1.0483431816101074, "learning_rate": 5.669713900219309e-06, "loss": 0.9402, "step": 19217 }, { "epoch": 3.5534287307191974, "grad_norm": 0.924910843372345, "learning_rate": 5.664865580251499e-06, "loss": 0.7887, "step": 19218 }, { "epoch": 3.5536145697825683, "grad_norm": 0.9264012575149536, "learning_rate": 5.660019273705264e-06, "loss": 0.9293, "step": 19219 }, { "epoch": 3.553800408845939, "grad_norm": 0.9317333102226257, "learning_rate": 5.65517498068402e-06, "loss": 0.9858, "step": 19220 }, { "epoch": 3.5539862479093105, "grad_norm": 0.9699829816818237, "learning_rate": 5.6503327012911725e-06, "loss": 0.8917, "step": 19221 }, { "epoch": 3.554172086972682, "grad_norm": 0.7067593336105347, "learning_rate": 5.64549243563004e-06, "loss": 0.5219, "step": 19222 }, { "epoch": 3.5543579260360527, "grad_norm": 0.9304884076118469, "learning_rate": 5.640654183803962e-06, "loss": 0.6241, "step": 19223 }, { "epoch": 3.554543765099424, "grad_norm": 0.8964663147926331, "learning_rate": 5.635817945916211e-06, "loss": 0.7789, "step": 19224 }, { "epoch": 3.554729604162795, "grad_norm": 0.8794653415679932, "learning_rate": 5.630983722069972e-06, "loss": 0.763, "step": 19225 }, { "epoch": 3.5549154432261663, "grad_norm": 0.8196675181388855, "learning_rate": 5.626151512368438e-06, "loss": 0.9579, "step": 19226 }, { "epoch": 3.555101282289537, "grad_norm": 0.9369693398475647, "learning_rate": 5.621321316914763e-06, "loss": 0.8541, "step": 19227 }, { "epoch": 3.5552871213529085, "grad_norm": 0.9243240356445312, "learning_rate": 5.616493135812018e-06, "loss": 0.7995, "step": 19228 }, { "epoch": 3.5554729604162794, "grad_norm": 0.8889716863632202, "learning_rate": 5.611666969163243e-06, "loss": 0.8044, "step": 19229 }, { "epoch": 3.5556587994796507, "grad_norm": 0.8678849935531616, "learning_rate": 5.606842817071467e-06, "loss": 0.8267, "step": 19230 }, { "epoch": 3.5558446385430216, "grad_norm": 0.9801349639892578, "learning_rate": 5.602020679639663e-06, "loss": 0.8801, "step": 19231 }, { "epoch": 3.556030477606393, "grad_norm": 0.8349409699440002, "learning_rate": 5.597200556970727e-06, "loss": 0.8689, "step": 19232 }, { "epoch": 3.5562163166697642, "grad_norm": 0.9708721041679382, "learning_rate": 5.592382449167543e-06, "loss": 0.7498, "step": 19233 }, { "epoch": 3.556402155733135, "grad_norm": 0.9165699481964111, "learning_rate": 5.58756635633293e-06, "loss": 0.892, "step": 19234 }, { "epoch": 3.556587994796506, "grad_norm": 0.9796352982521057, "learning_rate": 5.5827522785697136e-06, "loss": 0.7933, "step": 19235 }, { "epoch": 3.5567738338598773, "grad_norm": 0.8175069093704224, "learning_rate": 5.577940215980626e-06, "loss": 0.8168, "step": 19236 }, { "epoch": 3.5569596729232487, "grad_norm": 1.0237641334533691, "learning_rate": 5.57313016866835e-06, "loss": 0.8044, "step": 19237 }, { "epoch": 3.5571455119866195, "grad_norm": 0.9125300645828247, "learning_rate": 5.568322136735582e-06, "loss": 0.6959, "step": 19238 }, { "epoch": 3.5573313510499904, "grad_norm": 1.0325636863708496, "learning_rate": 5.5635161202849175e-06, "loss": 0.8413, "step": 19239 }, { "epoch": 3.5575171901133618, "grad_norm": 0.9485664963722229, "learning_rate": 5.558712119418952e-06, "loss": 0.7329, "step": 19240 }, { "epoch": 3.557703029176733, "grad_norm": 0.8749877214431763, "learning_rate": 5.553910134240203e-06, "loss": 0.8431, "step": 19241 }, { "epoch": 3.557888868240104, "grad_norm": 1.127166986465454, "learning_rate": 5.549110164851157e-06, "loss": 0.8702, "step": 19242 }, { "epoch": 3.5580747073034753, "grad_norm": 0.9633110761642456, "learning_rate": 5.544312211354286e-06, "loss": 0.9324, "step": 19243 }, { "epoch": 3.558260546366846, "grad_norm": 0.9466096758842468, "learning_rate": 5.539516273851986e-06, "loss": 0.7709, "step": 19244 }, { "epoch": 3.5584463854302175, "grad_norm": 0.8669358491897583, "learning_rate": 5.534722352446598e-06, "loss": 0.8215, "step": 19245 }, { "epoch": 3.5586322244935884, "grad_norm": 0.8715214133262634, "learning_rate": 5.529930447240439e-06, "loss": 0.6906, "step": 19246 }, { "epoch": 3.5588180635569597, "grad_norm": 1.3729928731918335, "learning_rate": 5.525140558335817e-06, "loss": 1.1958, "step": 19247 }, { "epoch": 3.559003902620331, "grad_norm": 0.9086440801620483, "learning_rate": 5.52035268583494e-06, "loss": 0.7287, "step": 19248 }, { "epoch": 3.559189741683702, "grad_norm": 1.0862767696380615, "learning_rate": 5.515566829839991e-06, "loss": 0.7094, "step": 19249 }, { "epoch": 3.559375580747073, "grad_norm": 0.8084904551506042, "learning_rate": 5.510782990453123e-06, "loss": 0.9409, "step": 19250 }, { "epoch": 3.559561419810444, "grad_norm": 0.960584819316864, "learning_rate": 5.506001167776464e-06, "loss": 0.6924, "step": 19251 }, { "epoch": 3.5597472588738155, "grad_norm": 1.1462523937225342, "learning_rate": 5.501221361912046e-06, "loss": 1.0633, "step": 19252 }, { "epoch": 3.5599330979371864, "grad_norm": 0.9685887098312378, "learning_rate": 5.496443572961895e-06, "loss": 0.7463, "step": 19253 }, { "epoch": 3.5601189370005573, "grad_norm": 0.8634170889854431, "learning_rate": 5.491667801027955e-06, "loss": 0.9771, "step": 19254 }, { "epoch": 3.5603047760639286, "grad_norm": 0.8998491168022156, "learning_rate": 5.4868940462122094e-06, "loss": 0.8428, "step": 19255 }, { "epoch": 3.5604906151273, "grad_norm": 0.7958370447158813, "learning_rate": 5.482122308616522e-06, "loss": 0.6143, "step": 19256 }, { "epoch": 3.560676454190671, "grad_norm": 1.019345998764038, "learning_rate": 5.47735258834271e-06, "loss": 0.7629, "step": 19257 }, { "epoch": 3.560862293254042, "grad_norm": 0.9218160510063171, "learning_rate": 5.472584885492616e-06, "loss": 0.8334, "step": 19258 }, { "epoch": 3.561048132317413, "grad_norm": 1.104111909866333, "learning_rate": 5.467819200167989e-06, "loss": 0.9566, "step": 19259 }, { "epoch": 3.5612339713807843, "grad_norm": 0.9276748299598694, "learning_rate": 5.463055532470518e-06, "loss": 0.7049, "step": 19260 }, { "epoch": 3.5614198104441552, "grad_norm": 0.9067643880844116, "learning_rate": 5.458293882501908e-06, "loss": 0.8014, "step": 19261 }, { "epoch": 3.5616056495075266, "grad_norm": 1.0602048635482788, "learning_rate": 5.4535342503637675e-06, "loss": 0.8069, "step": 19262 }, { "epoch": 3.5617914885708974, "grad_norm": 0.9947314858436584, "learning_rate": 5.448776636157693e-06, "loss": 0.8717, "step": 19263 }, { "epoch": 3.5619773276342688, "grad_norm": 1.2373080253601074, "learning_rate": 5.444021039985236e-06, "loss": 0.8202, "step": 19264 }, { "epoch": 3.5621631666976397, "grad_norm": 0.9831281304359436, "learning_rate": 5.439267461947883e-06, "loss": 0.7609, "step": 19265 }, { "epoch": 3.562349005761011, "grad_norm": 0.9348733425140381, "learning_rate": 5.434515902147075e-06, "loss": 0.7215, "step": 19266 }, { "epoch": 3.5625348448243823, "grad_norm": 0.9554674029350281, "learning_rate": 5.429766360684252e-06, "loss": 0.8535, "step": 19267 }, { "epoch": 3.562720683887753, "grad_norm": 1.0535904169082642, "learning_rate": 5.425018837660789e-06, "loss": 1.0426, "step": 19268 }, { "epoch": 3.562906522951124, "grad_norm": 1.0819015502929688, "learning_rate": 5.420273333177983e-06, "loss": 0.8682, "step": 19269 }, { "epoch": 3.5630923620144954, "grad_norm": 1.0718095302581787, "learning_rate": 5.415529847337153e-06, "loss": 1.1112, "step": 19270 }, { "epoch": 3.5632782010778667, "grad_norm": 0.8572877049446106, "learning_rate": 5.410788380239507e-06, "loss": 0.9122, "step": 19271 }, { "epoch": 3.5634640401412376, "grad_norm": 1.0377191305160522, "learning_rate": 5.406048931986285e-06, "loss": 0.4833, "step": 19272 }, { "epoch": 3.563649879204609, "grad_norm": 1.1077818870544434, "learning_rate": 5.401311502678608e-06, "loss": 0.7051, "step": 19273 }, { "epoch": 3.56383571826798, "grad_norm": 0.8639101386070251, "learning_rate": 5.396576092417593e-06, "loss": 0.7289, "step": 19274 }, { "epoch": 3.564021557331351, "grad_norm": 1.079602837562561, "learning_rate": 5.391842701304328e-06, "loss": 0.6417, "step": 19275 }, { "epoch": 3.564207396394722, "grad_norm": 0.9743924736976624, "learning_rate": 5.38711132943982e-06, "loss": 0.9016, "step": 19276 }, { "epoch": 3.5643932354580934, "grad_norm": 1.0895919799804688, "learning_rate": 5.382381976925044e-06, "loss": 0.8508, "step": 19277 }, { "epoch": 3.5645790745214643, "grad_norm": 0.9121440052986145, "learning_rate": 5.377654643860974e-06, "loss": 0.6967, "step": 19278 }, { "epoch": 3.5647649135848356, "grad_norm": 1.0038836002349854, "learning_rate": 5.3729293303484864e-06, "loss": 0.9408, "step": 19279 }, { "epoch": 3.5649507526482065, "grad_norm": 0.9119470119476318, "learning_rate": 5.368206036488443e-06, "loss": 0.8108, "step": 19280 }, { "epoch": 3.565136591711578, "grad_norm": 0.9499068856239319, "learning_rate": 5.3634847623816205e-06, "loss": 0.5967, "step": 19281 }, { "epoch": 3.565322430774949, "grad_norm": 0.8114598393440247, "learning_rate": 5.358765508128816e-06, "loss": 0.6871, "step": 19282 }, { "epoch": 3.56550826983832, "grad_norm": 1.0090092420578003, "learning_rate": 5.35404827383077e-06, "loss": 0.9781, "step": 19283 }, { "epoch": 3.565694108901691, "grad_norm": 0.961084246635437, "learning_rate": 5.349333059588146e-06, "loss": 0.8388, "step": 19284 }, { "epoch": 3.5658799479650622, "grad_norm": 0.9709075093269348, "learning_rate": 5.3446198655015765e-06, "loss": 0.9357, "step": 19285 }, { "epoch": 3.5660657870284336, "grad_norm": 0.8814340233802795, "learning_rate": 5.339908691671647e-06, "loss": 0.7196, "step": 19286 }, { "epoch": 3.5662516260918045, "grad_norm": 1.1103253364562988, "learning_rate": 5.335199538198932e-06, "loss": 0.884, "step": 19287 }, { "epoch": 3.5664374651551753, "grad_norm": 0.9542502164840698, "learning_rate": 5.33049240518394e-06, "loss": 0.8492, "step": 19288 }, { "epoch": 3.5666233042185467, "grad_norm": 1.0562374591827393, "learning_rate": 5.325787292727113e-06, "loss": 0.7911, "step": 19289 }, { "epoch": 3.566809143281918, "grad_norm": 1.0418407917022705, "learning_rate": 5.3210842009288945e-06, "loss": 0.6607, "step": 19290 }, { "epoch": 3.566994982345289, "grad_norm": 0.7913893461227417, "learning_rate": 5.316383129889668e-06, "loss": 0.6634, "step": 19291 }, { "epoch": 3.56718082140866, "grad_norm": 0.8778683543205261, "learning_rate": 5.311684079709745e-06, "loss": 0.7286, "step": 19292 }, { "epoch": 3.567366660472031, "grad_norm": 1.1600593328475952, "learning_rate": 5.306987050489442e-06, "loss": 0.741, "step": 19293 }, { "epoch": 3.5675524995354024, "grad_norm": 0.8523426055908203, "learning_rate": 5.302292042328982e-06, "loss": 0.8115, "step": 19294 }, { "epoch": 3.5677383385987733, "grad_norm": 0.8197619318962097, "learning_rate": 5.297599055328617e-06, "loss": 0.8362, "step": 19295 }, { "epoch": 3.5679241776621446, "grad_norm": 1.0496013164520264, "learning_rate": 5.292908089588478e-06, "loss": 0.8323, "step": 19296 }, { "epoch": 3.568110016725516, "grad_norm": 0.9278433322906494, "learning_rate": 5.2882191452086834e-06, "loss": 0.8625, "step": 19297 }, { "epoch": 3.568295855788887, "grad_norm": 1.8732656240463257, "learning_rate": 5.283532222289322e-06, "loss": 1.3532, "step": 19298 }, { "epoch": 3.5684816948522577, "grad_norm": 0.8991494178771973, "learning_rate": 5.278847320930436e-06, "loss": 0.6995, "step": 19299 }, { "epoch": 3.568667533915629, "grad_norm": 1.0465878248214722, "learning_rate": 5.274164441231999e-06, "loss": 1.0019, "step": 19300 }, { "epoch": 3.5688533729790004, "grad_norm": 0.9910255670547485, "learning_rate": 5.269483583293966e-06, "loss": 0.8754, "step": 19301 }, { "epoch": 3.5690392120423713, "grad_norm": 0.9528656005859375, "learning_rate": 5.264804747216223e-06, "loss": 0.7644, "step": 19302 }, { "epoch": 3.569225051105742, "grad_norm": 0.8942814469337463, "learning_rate": 5.26012793309868e-06, "loss": 0.952, "step": 19303 }, { "epoch": 3.5694108901691135, "grad_norm": 1.0535019636154175, "learning_rate": 5.255453141041111e-06, "loss": 1.0923, "step": 19304 }, { "epoch": 3.569596729232485, "grad_norm": 1.106765627861023, "learning_rate": 5.250780371143315e-06, "loss": 0.7056, "step": 19305 }, { "epoch": 3.5697825682958557, "grad_norm": 0.9234157204627991, "learning_rate": 5.246109623505002e-06, "loss": 0.8472, "step": 19306 }, { "epoch": 3.569968407359227, "grad_norm": 1.0718744993209839, "learning_rate": 5.241440898225891e-06, "loss": 1.0636, "step": 19307 }, { "epoch": 3.570154246422598, "grad_norm": 0.8861552476882935, "learning_rate": 5.236774195405614e-06, "loss": 0.8399, "step": 19308 }, { "epoch": 3.5703400854859693, "grad_norm": 0.9062960743904114, "learning_rate": 5.232109515143746e-06, "loss": 0.6951, "step": 19309 }, { "epoch": 3.57052592454934, "grad_norm": 1.8155157566070557, "learning_rate": 5.227446857539897e-06, "loss": 1.4572, "step": 19310 }, { "epoch": 3.5707117636127115, "grad_norm": 0.938761830329895, "learning_rate": 5.222786222693554e-06, "loss": 0.7112, "step": 19311 }, { "epoch": 3.5708976026760824, "grad_norm": 0.9258983135223389, "learning_rate": 5.218127610704194e-06, "loss": 0.6857, "step": 19312 }, { "epoch": 3.5710834417394537, "grad_norm": 0.8706833720207214, "learning_rate": 5.213471021671235e-06, "loss": 0.622, "step": 19313 }, { "epoch": 3.5712692808028246, "grad_norm": 0.9127697348594666, "learning_rate": 5.208816455694077e-06, "loss": 0.7618, "step": 19314 }, { "epoch": 3.571455119866196, "grad_norm": 0.7951032519340515, "learning_rate": 5.204163912872073e-06, "loss": 0.8307, "step": 19315 }, { "epoch": 3.5716409589295672, "grad_norm": 0.9423043727874756, "learning_rate": 5.1995133933045225e-06, "loss": 0.4945, "step": 19316 }, { "epoch": 3.571826797992938, "grad_norm": 0.9046568870544434, "learning_rate": 5.194864897090656e-06, "loss": 0.8827, "step": 19317 }, { "epoch": 3.572012637056309, "grad_norm": 0.9737492203712463, "learning_rate": 5.190218424329718e-06, "loss": 0.7078, "step": 19318 }, { "epoch": 3.5721984761196803, "grad_norm": 0.9893245697021484, "learning_rate": 5.185573975120872e-06, "loss": 0.8561, "step": 19319 }, { "epoch": 3.5723843151830517, "grad_norm": 0.8789792060852051, "learning_rate": 5.1809315495632394e-06, "loss": 0.713, "step": 19320 }, { "epoch": 3.5725701542464225, "grad_norm": 0.9256452918052673, "learning_rate": 5.1762911477558965e-06, "loss": 0.8957, "step": 19321 }, { "epoch": 3.572755993309794, "grad_norm": 0.8583182692527771, "learning_rate": 5.171652769797908e-06, "loss": 0.8656, "step": 19322 }, { "epoch": 3.5729418323731648, "grad_norm": 1.0559611320495605, "learning_rate": 5.167016415788239e-06, "loss": 0.982, "step": 19323 }, { "epoch": 3.573127671436536, "grad_norm": 1.0091091394424438, "learning_rate": 5.162382085825878e-06, "loss": 1.0239, "step": 19324 }, { "epoch": 3.573313510499907, "grad_norm": 0.9123789668083191, "learning_rate": 5.157749780009735e-06, "loss": 0.5609, "step": 19325 }, { "epoch": 3.5734993495632783, "grad_norm": 0.9494024515151978, "learning_rate": 5.153119498438641e-06, "loss": 0.6981, "step": 19326 }, { "epoch": 3.573685188626649, "grad_norm": 0.9123292565345764, "learning_rate": 5.148491241211473e-06, "loss": 0.7819, "step": 19327 }, { "epoch": 3.5738710276900205, "grad_norm": 0.8726586699485779, "learning_rate": 5.143865008426973e-06, "loss": 0.7431, "step": 19328 }, { "epoch": 3.5740568667533914, "grad_norm": 1.0286791324615479, "learning_rate": 5.139240800183897e-06, "loss": 0.9894, "step": 19329 }, { "epoch": 3.5742427058167627, "grad_norm": 0.9350249767303467, "learning_rate": 5.134618616580944e-06, "loss": 0.8188, "step": 19330 }, { "epoch": 3.574428544880134, "grad_norm": 1.010162591934204, "learning_rate": 5.129998457716756e-06, "loss": 0.8046, "step": 19331 }, { "epoch": 3.574614383943505, "grad_norm": 0.9536052346229553, "learning_rate": 5.125380323689954e-06, "loss": 0.7729, "step": 19332 }, { "epoch": 3.574800223006876, "grad_norm": 0.8379716277122498, "learning_rate": 5.120764214599083e-06, "loss": 0.9196, "step": 19333 }, { "epoch": 3.574986062070247, "grad_norm": 1.0392776727676392, "learning_rate": 5.116150130542685e-06, "loss": 0.8505, "step": 19334 }, { "epoch": 3.5751719011336185, "grad_norm": 0.7669402360916138, "learning_rate": 5.111538071619249e-06, "loss": 0.61, "step": 19335 }, { "epoch": 3.5753577401969894, "grad_norm": 0.9669473171234131, "learning_rate": 5.106928037927195e-06, "loss": 0.9057, "step": 19336 }, { "epoch": 3.5755435792603603, "grad_norm": 1.1792256832122803, "learning_rate": 5.1023200295649e-06, "loss": 0.9097, "step": 19337 }, { "epoch": 3.5757294183237316, "grad_norm": 0.934672474861145, "learning_rate": 5.097714046630753e-06, "loss": 1.0523, "step": 19338 }, { "epoch": 3.575915257387103, "grad_norm": 0.9194068908691406, "learning_rate": 5.09311008922303e-06, "loss": 0.8906, "step": 19339 }, { "epoch": 3.576101096450474, "grad_norm": 0.9827641248703003, "learning_rate": 5.0885081574400195e-06, "loss": 0.985, "step": 19340 }, { "epoch": 3.576286935513845, "grad_norm": 1.0495479106903076, "learning_rate": 5.08390825137991e-06, "loss": 0.8759, "step": 19341 }, { "epoch": 3.576472774577216, "grad_norm": 1.0074039697647095, "learning_rate": 5.0793103711409e-06, "loss": 0.6792, "step": 19342 }, { "epoch": 3.5766586136405873, "grad_norm": 0.9039619565010071, "learning_rate": 5.074714516821133e-06, "loss": 0.6722, "step": 19343 }, { "epoch": 3.5768444527039582, "grad_norm": 1.0133075714111328, "learning_rate": 5.070120688518665e-06, "loss": 0.8542, "step": 19344 }, { "epoch": 3.5770302917673296, "grad_norm": 0.8256039023399353, "learning_rate": 5.065528886331583e-06, "loss": 0.6006, "step": 19345 }, { "epoch": 3.5772161308307004, "grad_norm": 1.1857333183288574, "learning_rate": 5.060939110357854e-06, "loss": 1.0767, "step": 19346 }, { "epoch": 3.5774019698940718, "grad_norm": 0.97385573387146, "learning_rate": 5.056351360695477e-06, "loss": 0.816, "step": 19347 }, { "epoch": 3.5775878089574427, "grad_norm": 0.8637109398841858, "learning_rate": 5.051765637442351e-06, "loss": 0.9806, "step": 19348 }, { "epoch": 3.577773648020814, "grad_norm": 0.7983254790306091, "learning_rate": 5.047181940696333e-06, "loss": 0.5248, "step": 19349 }, { "epoch": 3.5779594870841853, "grad_norm": 0.8218321204185486, "learning_rate": 5.042600270555287e-06, "loss": 0.8291, "step": 19350 }, { "epoch": 3.578145326147556, "grad_norm": 0.9524140954017639, "learning_rate": 5.03802062711698e-06, "loss": 0.8874, "step": 19351 }, { "epoch": 3.578331165210927, "grad_norm": 0.8764885067939758, "learning_rate": 5.0334430104791664e-06, "loss": 0.8199, "step": 19352 }, { "epoch": 3.5785170042742984, "grad_norm": 0.986173689365387, "learning_rate": 5.028867420739524e-06, "loss": 0.7779, "step": 19353 }, { "epoch": 3.5787028433376697, "grad_norm": 1.0108602046966553, "learning_rate": 5.0242938579957435e-06, "loss": 0.7467, "step": 19354 }, { "epoch": 3.5788886824010406, "grad_norm": 0.970950186252594, "learning_rate": 5.0197223223454106e-06, "loss": 0.9147, "step": 19355 }, { "epoch": 3.579074521464412, "grad_norm": 0.9849465489387512, "learning_rate": 5.015152813886137e-06, "loss": 0.9148, "step": 19356 }, { "epoch": 3.579260360527783, "grad_norm": 0.9472989439964294, "learning_rate": 5.0105853327154e-06, "loss": 0.8742, "step": 19357 }, { "epoch": 3.579446199591154, "grad_norm": 0.8969159722328186, "learning_rate": 5.0060198789307345e-06, "loss": 0.7965, "step": 19358 }, { "epoch": 3.579632038654525, "grad_norm": 1.0213388204574585, "learning_rate": 5.001456452629549e-06, "loss": 0.7155, "step": 19359 }, { "epoch": 3.5798178777178964, "grad_norm": 0.7637826204299927, "learning_rate": 4.996895053909256e-06, "loss": 0.5367, "step": 19360 }, { "epoch": 3.5800037167812673, "grad_norm": 1.0223419666290283, "learning_rate": 4.992335682867188e-06, "loss": 0.7434, "step": 19361 }, { "epoch": 3.5801895558446386, "grad_norm": 1.0513916015625, "learning_rate": 4.9877783396006906e-06, "loss": 0.7295, "step": 19362 }, { "epoch": 3.5803753949080095, "grad_norm": 0.9246789216995239, "learning_rate": 4.983223024207029e-06, "loss": 0.6569, "step": 19363 }, { "epoch": 3.580561233971381, "grad_norm": 1.0383719205856323, "learning_rate": 4.978669736783404e-06, "loss": 0.8964, "step": 19364 }, { "epoch": 3.580747073034752, "grad_norm": 0.8949868679046631, "learning_rate": 4.974118477426992e-06, "loss": 0.8108, "step": 19365 }, { "epoch": 3.580932912098123, "grad_norm": 0.9759200811386108, "learning_rate": 4.969569246234962e-06, "loss": 0.7496, "step": 19366 }, { "epoch": 3.581118751161494, "grad_norm": 0.9014949202537537, "learning_rate": 4.965022043304413e-06, "loss": 0.7466, "step": 19367 }, { "epoch": 3.5813045902248652, "grad_norm": 0.8902834057807922, "learning_rate": 4.9604768687323664e-06, "loss": 0.954, "step": 19368 }, { "epoch": 3.5814904292882366, "grad_norm": 0.8954604268074036, "learning_rate": 4.955933722615847e-06, "loss": 0.6673, "step": 19369 }, { "epoch": 3.5816762683516075, "grad_norm": 0.9591830372810364, "learning_rate": 4.95139260505183e-06, "loss": 0.8093, "step": 19370 }, { "epoch": 3.581862107414979, "grad_norm": 0.8783988356590271, "learning_rate": 4.946853516137229e-06, "loss": 0.6318, "step": 19371 }, { "epoch": 3.5820479464783497, "grad_norm": 1.4477580785751343, "learning_rate": 4.94231645596892e-06, "loss": 1.3058, "step": 19372 }, { "epoch": 3.582233785541721, "grad_norm": 1.1015466451644897, "learning_rate": 4.937781424643728e-06, "loss": 0.8462, "step": 19373 }, { "epoch": 3.582419624605092, "grad_norm": 0.9507879614830017, "learning_rate": 4.933248422258463e-06, "loss": 0.8298, "step": 19374 }, { "epoch": 3.582605463668463, "grad_norm": 0.9847450852394104, "learning_rate": 4.928717448909881e-06, "loss": 0.9353, "step": 19375 }, { "epoch": 3.582791302731834, "grad_norm": 0.8292127251625061, "learning_rate": 4.92418850469466e-06, "loss": 0.8207, "step": 19376 }, { "epoch": 3.5829771417952054, "grad_norm": 0.8937145471572876, "learning_rate": 4.919661589709479e-06, "loss": 0.7023, "step": 19377 }, { "epoch": 3.5831629808585763, "grad_norm": 1.0915123224258423, "learning_rate": 4.915136704050971e-06, "loss": 0.7531, "step": 19378 }, { "epoch": 3.5833488199219476, "grad_norm": 0.9832667112350464, "learning_rate": 4.910613847815693e-06, "loss": 0.8312, "step": 19379 }, { "epoch": 3.583534658985319, "grad_norm": 1.0424070358276367, "learning_rate": 4.906093021100189e-06, "loss": 0.7697, "step": 19380 }, { "epoch": 3.58372049804869, "grad_norm": 0.8419145941734314, "learning_rate": 4.901574224000927e-06, "loss": 0.8657, "step": 19381 }, { "epoch": 3.5839063371120607, "grad_norm": 0.8672407865524292, "learning_rate": 4.897057456614373e-06, "loss": 0.8676, "step": 19382 }, { "epoch": 3.584092176175432, "grad_norm": 0.8903321623802185, "learning_rate": 4.89254271903693e-06, "loss": 0.7165, "step": 19383 }, { "epoch": 3.5842780152388034, "grad_norm": 0.9509225487709045, "learning_rate": 4.888030011364953e-06, "loss": 0.6107, "step": 19384 }, { "epoch": 3.5844638543021743, "grad_norm": 1.4456324577331543, "learning_rate": 4.883519333694742e-06, "loss": 0.9916, "step": 19385 }, { "epoch": 3.584649693365545, "grad_norm": 1.193587064743042, "learning_rate": 4.879010686122587e-06, "loss": 0.8602, "step": 19386 }, { "epoch": 3.5848355324289165, "grad_norm": 1.0764076709747314, "learning_rate": 4.874504068744723e-06, "loss": 0.6382, "step": 19387 }, { "epoch": 3.585021371492288, "grad_norm": 0.8120892643928528, "learning_rate": 4.869999481657339e-06, "loss": 0.8553, "step": 19388 }, { "epoch": 3.5852072105556587, "grad_norm": 0.7945622205734253, "learning_rate": 4.865496924956536e-06, "loss": 0.5361, "step": 19389 }, { "epoch": 3.58539304961903, "grad_norm": 1.0395240783691406, "learning_rate": 4.860996398738471e-06, "loss": 0.9424, "step": 19390 }, { "epoch": 3.585578888682401, "grad_norm": 1.0619295835494995, "learning_rate": 4.856497903099166e-06, "loss": 0.7907, "step": 19391 }, { "epoch": 3.5857647277457723, "grad_norm": 0.8295533657073975, "learning_rate": 4.852001438134646e-06, "loss": 0.7913, "step": 19392 }, { "epoch": 3.585950566809143, "grad_norm": 1.1208479404449463, "learning_rate": 4.847507003940854e-06, "loss": 0.8687, "step": 19393 }, { "epoch": 3.5861364058725145, "grad_norm": 0.8541909456253052, "learning_rate": 4.84301460061376e-06, "loss": 0.7109, "step": 19394 }, { "epoch": 3.5863222449358854, "grad_norm": 1.2070188522338867, "learning_rate": 4.83852422824922e-06, "loss": 0.8644, "step": 19395 }, { "epoch": 3.5865080839992567, "grad_norm": 0.9395968317985535, "learning_rate": 4.834035886943067e-06, "loss": 0.7369, "step": 19396 }, { "epoch": 3.5866939230626276, "grad_norm": 0.9339488744735718, "learning_rate": 4.829549576791093e-06, "loss": 0.6236, "step": 19397 }, { "epoch": 3.586879762125999, "grad_norm": 1.0937880277633667, "learning_rate": 4.825065297889075e-06, "loss": 0.8, "step": 19398 }, { "epoch": 3.5870656011893702, "grad_norm": 1.1339138746261597, "learning_rate": 4.8205830503327274e-06, "loss": 0.7031, "step": 19399 }, { "epoch": 3.587251440252741, "grad_norm": 0.8786293268203735, "learning_rate": 4.816102834217695e-06, "loss": 0.7978, "step": 19400 }, { "epoch": 3.587437279316112, "grad_norm": 0.8842145204544067, "learning_rate": 4.81162464963959e-06, "loss": 0.7576, "step": 19401 }, { "epoch": 3.5876231183794833, "grad_norm": 1.0113906860351562, "learning_rate": 4.807148496694025e-06, "loss": 0.8677, "step": 19402 }, { "epoch": 3.5878089574428547, "grad_norm": 0.8485032320022583, "learning_rate": 4.8026743754765125e-06, "loss": 0.9075, "step": 19403 }, { "epoch": 3.5879947965062255, "grad_norm": 1.0245674848556519, "learning_rate": 4.798202286082554e-06, "loss": 0.8218, "step": 19404 }, { "epoch": 3.588180635569597, "grad_norm": 0.878632664680481, "learning_rate": 4.7937322286075725e-06, "loss": 0.8435, "step": 19405 }, { "epoch": 3.5883664746329678, "grad_norm": 0.9851412773132324, "learning_rate": 4.789264203147026e-06, "loss": 0.8201, "step": 19406 }, { "epoch": 3.588552313696339, "grad_norm": 0.9448543190956116, "learning_rate": 4.784798209796215e-06, "loss": 0.6644, "step": 19407 }, { "epoch": 3.58873815275971, "grad_norm": 1.1279706954956055, "learning_rate": 4.78033424865052e-06, "loss": 0.9009, "step": 19408 }, { "epoch": 3.5889239918230813, "grad_norm": 0.9069414138793945, "learning_rate": 4.775872319805164e-06, "loss": 0.7876, "step": 19409 }, { "epoch": 3.589109830886452, "grad_norm": 0.9666802287101746, "learning_rate": 4.771412423355415e-06, "loss": 0.8229, "step": 19410 }, { "epoch": 3.5892956699498235, "grad_norm": 1.0100266933441162, "learning_rate": 4.766954559396452e-06, "loss": 0.9738, "step": 19411 }, { "epoch": 3.5894815090131944, "grad_norm": 1.088109016418457, "learning_rate": 4.762498728023423e-06, "loss": 0.752, "step": 19412 }, { "epoch": 3.5896673480765657, "grad_norm": 1.0191460847854614, "learning_rate": 4.7580449293314066e-06, "loss": 0.8325, "step": 19413 }, { "epoch": 3.589853187139937, "grad_norm": 0.9894083142280579, "learning_rate": 4.753593163415493e-06, "loss": 0.9898, "step": 19414 }, { "epoch": 3.590039026203308, "grad_norm": 0.876889169216156, "learning_rate": 4.749143430370684e-06, "loss": 0.671, "step": 19415 }, { "epoch": 3.590224865266679, "grad_norm": 0.7905625700950623, "learning_rate": 4.744695730291948e-06, "loss": 0.4096, "step": 19416 }, { "epoch": 3.59041070433005, "grad_norm": 0.8803296089172363, "learning_rate": 4.74025006327421e-06, "loss": 0.5567, "step": 19417 }, { "epoch": 3.5905965433934215, "grad_norm": 0.9684118032455444, "learning_rate": 4.735806429412359e-06, "loss": 0.8737, "step": 19418 }, { "epoch": 3.5907823824567924, "grad_norm": 0.9677191376686096, "learning_rate": 4.731364828801243e-06, "loss": 0.7487, "step": 19419 }, { "epoch": 3.5909682215201633, "grad_norm": 0.936278760433197, "learning_rate": 4.726925261535675e-06, "loss": 0.7198, "step": 19420 }, { "epoch": 3.5911540605835346, "grad_norm": 1.0864543914794922, "learning_rate": 4.722487727710368e-06, "loss": 1.0318, "step": 19421 }, { "epoch": 3.591339899646906, "grad_norm": 1.2860759496688843, "learning_rate": 4.718052227420078e-06, "loss": 0.631, "step": 19422 }, { "epoch": 3.591525738710277, "grad_norm": 0.8440675735473633, "learning_rate": 4.713618760759442e-06, "loss": 0.6534, "step": 19423 }, { "epoch": 3.591711577773648, "grad_norm": 0.9321073293685913, "learning_rate": 4.709187327823106e-06, "loss": 0.714, "step": 19424 }, { "epoch": 3.591897416837019, "grad_norm": 0.8669753074645996, "learning_rate": 4.704757928705617e-06, "loss": 0.834, "step": 19425 }, { "epoch": 3.5920832559003903, "grad_norm": 1.121113657951355, "learning_rate": 4.700330563501565e-06, "loss": 0.8264, "step": 19426 }, { "epoch": 3.5922690949637612, "grad_norm": 0.8920173645019531, "learning_rate": 4.6959052323054085e-06, "loss": 1.0234, "step": 19427 }, { "epoch": 3.5924549340271326, "grad_norm": 0.9285154938697815, "learning_rate": 4.691481935211584e-06, "loss": 0.8498, "step": 19428 }, { "epoch": 3.592640773090504, "grad_norm": 1.2579076290130615, "learning_rate": 4.687060672314536e-06, "loss": 1.0118, "step": 19429 }, { "epoch": 3.5928266121538748, "grad_norm": 0.8487569689750671, "learning_rate": 4.682641443708624e-06, "loss": 0.7867, "step": 19430 }, { "epoch": 3.5930124512172457, "grad_norm": 0.9112158417701721, "learning_rate": 4.67822424948815e-06, "loss": 0.6424, "step": 19431 }, { "epoch": 3.593198290280617, "grad_norm": 0.9790871143341064, "learning_rate": 4.673809089747416e-06, "loss": 0.7442, "step": 19432 }, { "epoch": 3.5933841293439883, "grad_norm": 1.0553319454193115, "learning_rate": 4.669395964580614e-06, "loss": 0.8304, "step": 19433 }, { "epoch": 3.593569968407359, "grad_norm": 0.9264032244682312, "learning_rate": 4.6649848740819794e-06, "loss": 0.7993, "step": 19434 }, { "epoch": 3.59375580747073, "grad_norm": 0.8428817391395569, "learning_rate": 4.660575818345647e-06, "loss": 0.7858, "step": 19435 }, { "epoch": 3.5939416465341014, "grad_norm": 0.8501147627830505, "learning_rate": 4.6561687974657095e-06, "loss": 0.8255, "step": 19436 }, { "epoch": 3.5941274855974727, "grad_norm": 0.9591187834739685, "learning_rate": 4.6517638115362136e-06, "loss": 0.7297, "step": 19437 }, { "epoch": 3.5943133246608436, "grad_norm": 0.8795649409294128, "learning_rate": 4.647360860651217e-06, "loss": 0.6012, "step": 19438 }, { "epoch": 3.594499163724215, "grad_norm": 1.1258068084716797, "learning_rate": 4.642959944904656e-06, "loss": 0.8224, "step": 19439 }, { "epoch": 3.594685002787586, "grad_norm": 0.9716440439224243, "learning_rate": 4.63856106439049e-06, "loss": 0.9072, "step": 19440 }, { "epoch": 3.594870841850957, "grad_norm": 0.8310915231704712, "learning_rate": 4.634164219202575e-06, "loss": 0.7235, "step": 19441 }, { "epoch": 3.595056680914328, "grad_norm": 0.8197675943374634, "learning_rate": 4.629769409434781e-06, "loss": 0.7642, "step": 19442 }, { "epoch": 3.5952425199776994, "grad_norm": 1.115140438079834, "learning_rate": 4.625376635180901e-06, "loss": 0.9677, "step": 19443 }, { "epoch": 3.5954283590410703, "grad_norm": 0.995252251625061, "learning_rate": 4.620985896534679e-06, "loss": 0.7594, "step": 19444 }, { "epoch": 3.5956141981044416, "grad_norm": 1.0043798685073853, "learning_rate": 4.616597193589833e-06, "loss": 0.7948, "step": 19445 }, { "epoch": 3.5958000371678125, "grad_norm": 0.8082947731018066, "learning_rate": 4.612210526440053e-06, "loss": 0.6672, "step": 19446 }, { "epoch": 3.595985876231184, "grad_norm": 0.9716810584068298, "learning_rate": 4.607825895178941e-06, "loss": 1.0794, "step": 19447 }, { "epoch": 3.596171715294555, "grad_norm": 0.8218140602111816, "learning_rate": 4.603443299900068e-06, "loss": 0.7711, "step": 19448 }, { "epoch": 3.596357554357926, "grad_norm": 1.154445767402649, "learning_rate": 4.599062740696991e-06, "loss": 0.9637, "step": 19449 }, { "epoch": 3.596543393421297, "grad_norm": 0.8171733021736145, "learning_rate": 4.594684217663225e-06, "loss": 0.6243, "step": 19450 }, { "epoch": 3.5967292324846682, "grad_norm": 1.1623929738998413, "learning_rate": 4.590307730892196e-06, "loss": 0.4765, "step": 19451 }, { "epoch": 3.5969150715480396, "grad_norm": 0.8249045014381409, "learning_rate": 4.585933280477328e-06, "loss": 0.8474, "step": 19452 }, { "epoch": 3.5971009106114105, "grad_norm": 0.8545265197753906, "learning_rate": 4.581560866511958e-06, "loss": 0.6475, "step": 19453 }, { "epoch": 3.597286749674782, "grad_norm": 1.0757006406784058, "learning_rate": 4.577190489089445e-06, "loss": 0.8369, "step": 19454 }, { "epoch": 3.5974725887381527, "grad_norm": 0.8231223821640015, "learning_rate": 4.572822148303035e-06, "loss": 0.6575, "step": 19455 }, { "epoch": 3.597658427801524, "grad_norm": 0.9437882900238037, "learning_rate": 4.568455844245989e-06, "loss": 0.9076, "step": 19456 }, { "epoch": 3.597844266864895, "grad_norm": 1.02458655834198, "learning_rate": 4.564091577011465e-06, "loss": 0.7054, "step": 19457 }, { "epoch": 3.598030105928266, "grad_norm": 0.8490471243858337, "learning_rate": 4.559729346692654e-06, "loss": 0.8083, "step": 19458 }, { "epoch": 3.598215944991637, "grad_norm": 1.1624078750610352, "learning_rate": 4.5553691533826385e-06, "loss": 0.8191, "step": 19459 }, { "epoch": 3.5984017840550084, "grad_norm": 0.7939742803573608, "learning_rate": 4.551010997174454e-06, "loss": 0.6867, "step": 19460 }, { "epoch": 3.5985876231183793, "grad_norm": 0.9577316045761108, "learning_rate": 4.54665487816115e-06, "loss": 0.9839, "step": 19461 }, { "epoch": 3.5987734621817506, "grad_norm": 1.0471750497817993, "learning_rate": 4.542300796435706e-06, "loss": 0.8577, "step": 19462 }, { "epoch": 3.598959301245122, "grad_norm": 1.0368680953979492, "learning_rate": 4.537948752091037e-06, "loss": 0.901, "step": 19463 }, { "epoch": 3.599145140308493, "grad_norm": 1.006668210029602, "learning_rate": 4.5335987452200355e-06, "loss": 0.9178, "step": 19464 }, { "epoch": 3.5993309793718637, "grad_norm": 1.0635803937911987, "learning_rate": 4.529250775915528e-06, "loss": 0.6381, "step": 19465 }, { "epoch": 3.599516818435235, "grad_norm": 0.9568383693695068, "learning_rate": 4.524904844270339e-06, "loss": 0.95, "step": 19466 }, { "epoch": 3.5997026574986064, "grad_norm": 1.078012228012085, "learning_rate": 4.520560950377217e-06, "loss": 0.8683, "step": 19467 }, { "epoch": 3.5998884965619773, "grad_norm": 0.9089167714118958, "learning_rate": 4.5162190943288665e-06, "loss": 0.8525, "step": 19468 }, { "epoch": 3.600074335625348, "grad_norm": 1.0891344547271729, "learning_rate": 4.5118792762179676e-06, "loss": 0.9373, "step": 19469 }, { "epoch": 3.6002601746887195, "grad_norm": 0.9258513450622559, "learning_rate": 4.507541496137125e-06, "loss": 0.5951, "step": 19470 }, { "epoch": 3.600446013752091, "grad_norm": 0.9756423830986023, "learning_rate": 4.503205754178952e-06, "loss": 0.9625, "step": 19471 }, { "epoch": 3.6006318528154617, "grad_norm": 1.035539150238037, "learning_rate": 4.498872050435976e-06, "loss": 0.7596, "step": 19472 }, { "epoch": 3.600817691878833, "grad_norm": 1.0090545415878296, "learning_rate": 4.494540385000679e-06, "loss": 0.8671, "step": 19473 }, { "epoch": 3.601003530942204, "grad_norm": 0.8952997326850891, "learning_rate": 4.490210757965529e-06, "loss": 0.7329, "step": 19474 }, { "epoch": 3.6011893700055753, "grad_norm": 0.9174466133117676, "learning_rate": 4.485883169422933e-06, "loss": 0.8245, "step": 19475 }, { "epoch": 3.601375209068946, "grad_norm": 1.4027634859085083, "learning_rate": 4.481557619465249e-06, "loss": 0.7972, "step": 19476 }, { "epoch": 3.6015610481323175, "grad_norm": 1.035872459411621, "learning_rate": 4.477234108184791e-06, "loss": 0.9884, "step": 19477 }, { "epoch": 3.601746887195689, "grad_norm": 0.9323967695236206, "learning_rate": 4.472912635673865e-06, "loss": 0.7008, "step": 19478 }, { "epoch": 3.6019327262590597, "grad_norm": 0.8735101222991943, "learning_rate": 4.468593202024685e-06, "loss": 0.7825, "step": 19479 }, { "epoch": 3.6021185653224306, "grad_norm": 0.9623664617538452, "learning_rate": 4.464275807329432e-06, "loss": 0.8998, "step": 19480 }, { "epoch": 3.602304404385802, "grad_norm": 1.0125689506530762, "learning_rate": 4.459960451680268e-06, "loss": 1.0694, "step": 19481 }, { "epoch": 3.6024902434491732, "grad_norm": 0.981284499168396, "learning_rate": 4.4556471351692945e-06, "loss": 1.1182, "step": 19482 }, { "epoch": 3.602676082512544, "grad_norm": 0.9122440218925476, "learning_rate": 4.451335857888583e-06, "loss": 0.9736, "step": 19483 }, { "epoch": 3.602861921575915, "grad_norm": 1.042223334312439, "learning_rate": 4.447026619930139e-06, "loss": 0.7919, "step": 19484 }, { "epoch": 3.6030477606392863, "grad_norm": 0.9088339805603027, "learning_rate": 4.442719421385922e-06, "loss": 0.7814, "step": 19485 }, { "epoch": 3.6032335997026577, "grad_norm": 1.1977900266647339, "learning_rate": 4.43841426234789e-06, "loss": 0.7343, "step": 19486 }, { "epoch": 3.6034194387660285, "grad_norm": 1.3704267740249634, "learning_rate": 4.4341111429079156e-06, "loss": 1.1858, "step": 19487 }, { "epoch": 3.6036052778294, "grad_norm": 0.9473437666893005, "learning_rate": 4.429810063157835e-06, "loss": 0.7812, "step": 19488 }, { "epoch": 3.6037911168927708, "grad_norm": 1.0082486867904663, "learning_rate": 4.425511023189455e-06, "loss": 0.7871, "step": 19489 }, { "epoch": 3.603976955956142, "grad_norm": 1.0087922811508179, "learning_rate": 4.421214023094533e-06, "loss": 0.8876, "step": 19490 }, { "epoch": 3.604162795019513, "grad_norm": 0.7733488082885742, "learning_rate": 4.416919062964764e-06, "loss": 0.6503, "step": 19491 }, { "epoch": 3.6043486340828843, "grad_norm": 0.8714876770973206, "learning_rate": 4.412626142891851e-06, "loss": 0.5623, "step": 19492 }, { "epoch": 3.604534473146255, "grad_norm": 0.8953797221183777, "learning_rate": 4.408335262967378e-06, "loss": 0.7119, "step": 19493 }, { "epoch": 3.6047203122096265, "grad_norm": 0.7247197031974792, "learning_rate": 4.404046423282959e-06, "loss": 0.5571, "step": 19494 }, { "epoch": 3.6049061512729974, "grad_norm": 1.0153875350952148, "learning_rate": 4.399759623930122e-06, "loss": 0.8047, "step": 19495 }, { "epoch": 3.6050919903363687, "grad_norm": 0.9947652220726013, "learning_rate": 4.3954748650003704e-06, "loss": 0.9466, "step": 19496 }, { "epoch": 3.60527782939974, "grad_norm": 0.8497185707092285, "learning_rate": 4.3911921465851105e-06, "loss": 0.8645, "step": 19497 }, { "epoch": 3.605463668463111, "grad_norm": 0.9185053110122681, "learning_rate": 4.386911468775812e-06, "loss": 0.6431, "step": 19498 }, { "epoch": 3.605649507526482, "grad_norm": 0.8739264011383057, "learning_rate": 4.382632831663802e-06, "loss": 0.7773, "step": 19499 }, { "epoch": 3.605835346589853, "grad_norm": 0.8473137617111206, "learning_rate": 4.378356235340397e-06, "loss": 0.7008, "step": 19500 }, { "epoch": 3.6060211856532245, "grad_norm": 1.0435789823532104, "learning_rate": 4.374081679896902e-06, "loss": 0.8217, "step": 19501 }, { "epoch": 3.6062070247165954, "grad_norm": 0.8870427012443542, "learning_rate": 4.36980916542451e-06, "loss": 0.8054, "step": 19502 }, { "epoch": 3.6063928637799667, "grad_norm": 0.993805468082428, "learning_rate": 4.365538692014459e-06, "loss": 0.7849, "step": 19503 }, { "epoch": 3.6065787028433376, "grad_norm": 1.0244243144989014, "learning_rate": 4.361270259757855e-06, "loss": 0.8614, "step": 19504 }, { "epoch": 3.606764541906709, "grad_norm": 0.9650846719741821, "learning_rate": 4.3570038687458125e-06, "loss": 0.769, "step": 19505 }, { "epoch": 3.60695038097008, "grad_norm": 0.8885460495948792, "learning_rate": 4.352739519069404e-06, "loss": 0.7117, "step": 19506 }, { "epoch": 3.607136220033451, "grad_norm": 1.1355706453323364, "learning_rate": 4.348477210819624e-06, "loss": 1.001, "step": 19507 }, { "epoch": 3.607322059096822, "grad_norm": 0.9262754917144775, "learning_rate": 4.344216944087454e-06, "loss": 1.0028, "step": 19508 }, { "epoch": 3.6075078981601933, "grad_norm": 0.9142513275146484, "learning_rate": 4.339958718963821e-06, "loss": 0.7278, "step": 19509 }, { "epoch": 3.6076937372235642, "grad_norm": 1.113861322402954, "learning_rate": 4.335702535539621e-06, "loss": 0.9307, "step": 19510 }, { "epoch": 3.6078795762869356, "grad_norm": 0.9880869388580322, "learning_rate": 4.331448393905679e-06, "loss": 0.6321, "step": 19511 }, { "epoch": 3.608065415350307, "grad_norm": 1.4004838466644287, "learning_rate": 4.32719629415278e-06, "loss": 0.9643, "step": 19512 }, { "epoch": 3.6082512544136778, "grad_norm": 0.9977883696556091, "learning_rate": 4.322946236371705e-06, "loss": 0.7023, "step": 19513 }, { "epoch": 3.6084370934770487, "grad_norm": 0.9743738174438477, "learning_rate": 4.318698220653161e-06, "loss": 0.7202, "step": 19514 }, { "epoch": 3.60862293254042, "grad_norm": 1.4286434650421143, "learning_rate": 4.314452247087808e-06, "loss": 0.6513, "step": 19515 }, { "epoch": 3.6088087716037913, "grad_norm": 0.9633848667144775, "learning_rate": 4.310208315766273e-06, "loss": 0.8454, "step": 19516 }, { "epoch": 3.608994610667162, "grad_norm": 1.1255860328674316, "learning_rate": 4.305966426779118e-06, "loss": 0.9287, "step": 19517 }, { "epoch": 3.609180449730533, "grad_norm": 1.0300997495651245, "learning_rate": 4.301726580216902e-06, "loss": 0.8876, "step": 19518 }, { "epoch": 3.6093662887939044, "grad_norm": 0.9136091470718384, "learning_rate": 4.2974887761701e-06, "loss": 0.6974, "step": 19519 }, { "epoch": 3.6095521278572757, "grad_norm": 0.8567022681236267, "learning_rate": 4.293253014729171e-06, "loss": 0.9059, "step": 19520 }, { "epoch": 3.6097379669206466, "grad_norm": 1.0704967975616455, "learning_rate": 4.28901929598452e-06, "loss": 0.5962, "step": 19521 }, { "epoch": 3.609923805984018, "grad_norm": 0.9951958060264587, "learning_rate": 4.284787620026498e-06, "loss": 0.5911, "step": 19522 }, { "epoch": 3.610109645047389, "grad_norm": 0.8182536363601685, "learning_rate": 4.280557986945433e-06, "loss": 0.7981, "step": 19523 }, { "epoch": 3.61029548411076, "grad_norm": 0.8957955837249756, "learning_rate": 4.276330396831596e-06, "loss": 0.8166, "step": 19524 }, { "epoch": 3.610481323174131, "grad_norm": 0.9633662700653076, "learning_rate": 4.272104849775216e-06, "loss": 0.8607, "step": 19525 }, { "epoch": 3.6106671622375024, "grad_norm": 1.0764795541763306, "learning_rate": 4.2678813458664866e-06, "loss": 0.7152, "step": 19526 }, { "epoch": 3.6108530013008733, "grad_norm": 0.7728953957557678, "learning_rate": 4.263659885195559e-06, "loss": 0.5237, "step": 19527 }, { "epoch": 3.6110388403642446, "grad_norm": 0.8335964679718018, "learning_rate": 4.2594404678524935e-06, "loss": 0.6697, "step": 19528 }, { "epoch": 3.6112246794276155, "grad_norm": 0.9022690653800964, "learning_rate": 4.255223093927396e-06, "loss": 0.4068, "step": 19529 }, { "epoch": 3.611410518490987, "grad_norm": 1.0087932348251343, "learning_rate": 4.251007763510262e-06, "loss": 0.7562, "step": 19530 }, { "epoch": 3.611596357554358, "grad_norm": 0.8287792205810547, "learning_rate": 4.24679447669104e-06, "loss": 0.7704, "step": 19531 }, { "epoch": 3.611782196617729, "grad_norm": 1.1251407861709595, "learning_rate": 4.242583233559671e-06, "loss": 0.6351, "step": 19532 }, { "epoch": 3.6119680356811, "grad_norm": 1.1125820875167847, "learning_rate": 4.238374034206028e-06, "loss": 0.8003, "step": 19533 }, { "epoch": 3.6121538747444712, "grad_norm": 0.8124536275863647, "learning_rate": 4.234166878719981e-06, "loss": 0.7484, "step": 19534 }, { "epoch": 3.6123397138078426, "grad_norm": 0.8514935970306396, "learning_rate": 4.229961767191293e-06, "loss": 0.8515, "step": 19535 }, { "epoch": 3.6125255528712135, "grad_norm": 1.0911568403244019, "learning_rate": 4.225758699709725e-06, "loss": 1.0046, "step": 19536 }, { "epoch": 3.612711391934585, "grad_norm": 0.823409378528595, "learning_rate": 4.2215576763649735e-06, "loss": 0.459, "step": 19537 }, { "epoch": 3.6128972309979557, "grad_norm": 0.8221675157546997, "learning_rate": 4.217358697246709e-06, "loss": 0.6495, "step": 19538 }, { "epoch": 3.613083070061327, "grad_norm": 0.8527970910072327, "learning_rate": 4.213161762444573e-06, "loss": 0.8859, "step": 19539 }, { "epoch": 3.613268909124698, "grad_norm": 1.006277322769165, "learning_rate": 4.208966872048092e-06, "loss": 0.6568, "step": 19540 }, { "epoch": 3.613454748188069, "grad_norm": 1.1168917417526245, "learning_rate": 4.204774026146851e-06, "loss": 1.0532, "step": 19541 }, { "epoch": 3.61364058725144, "grad_norm": 0.8510649800300598, "learning_rate": 4.200583224830324e-06, "loss": 0.8008, "step": 19542 }, { "epoch": 3.6138264263148114, "grad_norm": 0.9765340089797974, "learning_rate": 4.196394468187936e-06, "loss": 1.0101, "step": 19543 }, { "epoch": 3.6140122653781823, "grad_norm": 0.9470708966255188, "learning_rate": 4.192207756309097e-06, "loss": 0.7003, "step": 19544 }, { "epoch": 3.6141981044415536, "grad_norm": 1.0259088277816772, "learning_rate": 4.188023089283177e-06, "loss": 0.9788, "step": 19545 }, { "epoch": 3.614383943504925, "grad_norm": 0.8592630624771118, "learning_rate": 4.183840467199485e-06, "loss": 0.762, "step": 19546 }, { "epoch": 3.614569782568296, "grad_norm": 1.0951875448226929, "learning_rate": 4.179659890147302e-06, "loss": 1.1296, "step": 19547 }, { "epoch": 3.6147556216316667, "grad_norm": 0.8168365359306335, "learning_rate": 4.175481358215827e-06, "loss": 0.7834, "step": 19548 }, { "epoch": 3.614941460695038, "grad_norm": 1.0954785346984863, "learning_rate": 4.171304871494264e-06, "loss": 0.6931, "step": 19549 }, { "epoch": 3.6151272997584094, "grad_norm": 1.0510135889053345, "learning_rate": 4.167130430071764e-06, "loss": 1.1022, "step": 19550 }, { "epoch": 3.6153131388217803, "grad_norm": 1.0180712938308716, "learning_rate": 4.1629580340374005e-06, "loss": 1.0358, "step": 19551 }, { "epoch": 3.6154989778851516, "grad_norm": 1.0190739631652832, "learning_rate": 4.1587876834802255e-06, "loss": 0.7687, "step": 19552 }, { "epoch": 3.6156848169485225, "grad_norm": 1.0596504211425781, "learning_rate": 4.154619378489266e-06, "loss": 0.9051, "step": 19553 }, { "epoch": 3.615870656011894, "grad_norm": 0.8554338812828064, "learning_rate": 4.150453119153475e-06, "loss": 0.8167, "step": 19554 }, { "epoch": 3.6160564950752647, "grad_norm": 0.8878720998764038, "learning_rate": 4.14628890556178e-06, "loss": 0.9358, "step": 19555 }, { "epoch": 3.616242334138636, "grad_norm": 1.0696130990982056, "learning_rate": 4.142126737803065e-06, "loss": 0.862, "step": 19556 }, { "epoch": 3.616428173202007, "grad_norm": 1.079859733581543, "learning_rate": 4.137966615966138e-06, "loss": 0.9542, "step": 19557 }, { "epoch": 3.6166140122653783, "grad_norm": 0.9978886246681213, "learning_rate": 4.1338085401398275e-06, "loss": 1.1051, "step": 19558 }, { "epoch": 3.616799851328749, "grad_norm": 0.9419093132019043, "learning_rate": 4.129652510412851e-06, "loss": 0.87, "step": 19559 }, { "epoch": 3.6169856903921205, "grad_norm": 0.9488292336463928, "learning_rate": 4.125498526873917e-06, "loss": 0.9075, "step": 19560 }, { "epoch": 3.617171529455492, "grad_norm": 0.9746842980384827, "learning_rate": 4.121346589611697e-06, "loss": 0.7477, "step": 19561 }, { "epoch": 3.6173573685188627, "grad_norm": 0.992132842540741, "learning_rate": 4.117196698714798e-06, "loss": 0.7586, "step": 19562 }, { "epoch": 3.6175432075822336, "grad_norm": 0.9009767174720764, "learning_rate": 4.113048854271795e-06, "loss": 1.0266, "step": 19563 }, { "epoch": 3.617729046645605, "grad_norm": 0.7765776515007019, "learning_rate": 4.108903056371216e-06, "loss": 0.6627, "step": 19564 }, { "epoch": 3.6179148857089762, "grad_norm": 1.0591367483139038, "learning_rate": 4.104759305101525e-06, "loss": 0.918, "step": 19565 }, { "epoch": 3.618100724772347, "grad_norm": 1.149461030960083, "learning_rate": 4.100617600551216e-06, "loss": 0.6963, "step": 19566 }, { "epoch": 3.618286563835718, "grad_norm": 0.8625674247741699, "learning_rate": 4.096477942808641e-06, "loss": 0.7483, "step": 19567 }, { "epoch": 3.6184724028990893, "grad_norm": 0.8393241763114929, "learning_rate": 4.092340331962152e-06, "loss": 0.9917, "step": 19568 }, { "epoch": 3.6186582419624607, "grad_norm": 0.8998364806175232, "learning_rate": 4.088204768100101e-06, "loss": 0.6585, "step": 19569 }, { "epoch": 3.6188440810258315, "grad_norm": 0.9522582292556763, "learning_rate": 4.0840712513107175e-06, "loss": 0.7368, "step": 19570 }, { "epoch": 3.619029920089203, "grad_norm": 0.8371335864067078, "learning_rate": 4.07993978168224e-06, "loss": 0.6592, "step": 19571 }, { "epoch": 3.6192157591525738, "grad_norm": 0.9628044962882996, "learning_rate": 4.075810359302823e-06, "loss": 1.0382, "step": 19572 }, { "epoch": 3.619401598215945, "grad_norm": 1.0300853252410889, "learning_rate": 4.071682984260638e-06, "loss": 0.798, "step": 19573 }, { "epoch": 3.619587437279316, "grad_norm": 0.90743088722229, "learning_rate": 4.0675576566437615e-06, "loss": 0.7294, "step": 19574 }, { "epoch": 3.6197732763426873, "grad_norm": 1.2764816284179688, "learning_rate": 4.063434376540232e-06, "loss": 0.7791, "step": 19575 }, { "epoch": 3.619959115406058, "grad_norm": 1.1036268472671509, "learning_rate": 4.059313144038069e-06, "loss": 0.7762, "step": 19576 }, { "epoch": 3.6201449544694295, "grad_norm": 0.9103646874427795, "learning_rate": 4.055193959225212e-06, "loss": 0.8537, "step": 19577 }, { "epoch": 3.6203307935328004, "grad_norm": 0.8981128334999084, "learning_rate": 4.051076822189603e-06, "loss": 0.9564, "step": 19578 }, { "epoch": 3.6205166325961717, "grad_norm": 0.9731377363204956, "learning_rate": 4.046961733019117e-06, "loss": 0.6327, "step": 19579 }, { "epoch": 3.620702471659543, "grad_norm": 0.83604496717453, "learning_rate": 4.042848691801548e-06, "loss": 0.6543, "step": 19580 }, { "epoch": 3.620888310722914, "grad_norm": 0.8671492338180542, "learning_rate": 4.0387376986247174e-06, "loss": 0.8174, "step": 19581 }, { "epoch": 3.621074149786285, "grad_norm": 0.9103574752807617, "learning_rate": 4.0346287535763524e-06, "loss": 0.6509, "step": 19582 }, { "epoch": 3.621259988849656, "grad_norm": 0.9837188124656677, "learning_rate": 4.030521856744163e-06, "loss": 0.7643, "step": 19583 }, { "epoch": 3.6214458279130275, "grad_norm": 0.8985093235969543, "learning_rate": 4.026417008215766e-06, "loss": 0.8411, "step": 19584 }, { "epoch": 3.6216316669763984, "grad_norm": 0.9094043374061584, "learning_rate": 4.022314208078826e-06, "loss": 0.7546, "step": 19585 }, { "epoch": 3.6218175060397697, "grad_norm": 1.068396806716919, "learning_rate": 4.018213456420872e-06, "loss": 0.8532, "step": 19586 }, { "epoch": 3.6220033451031406, "grad_norm": 0.9189227819442749, "learning_rate": 4.014114753329446e-06, "loss": 0.6418, "step": 19587 }, { "epoch": 3.622189184166512, "grad_norm": 1.0001705884933472, "learning_rate": 4.01001809889201e-06, "loss": 0.8016, "step": 19588 }, { "epoch": 3.622375023229883, "grad_norm": 1.0853298902511597, "learning_rate": 4.0059234931960284e-06, "loss": 0.9409, "step": 19589 }, { "epoch": 3.622560862293254, "grad_norm": 1.1793755292892456, "learning_rate": 4.001830936328877e-06, "loss": 1.1449, "step": 19590 }, { "epoch": 3.622746701356625, "grad_norm": 0.8038926124572754, "learning_rate": 3.997740428377905e-06, "loss": 0.7745, "step": 19591 }, { "epoch": 3.6229325404199964, "grad_norm": 0.9299508929252625, "learning_rate": 3.993651969430412e-06, "loss": 0.9194, "step": 19592 }, { "epoch": 3.6231183794833672, "grad_norm": 1.0494447946548462, "learning_rate": 3.989565559573672e-06, "loss": 0.9838, "step": 19593 }, { "epoch": 3.6233042185467386, "grad_norm": 1.0491498708724976, "learning_rate": 3.985481198894891e-06, "loss": 0.926, "step": 19594 }, { "epoch": 3.62349005761011, "grad_norm": 1.0643278360366821, "learning_rate": 3.981398887481247e-06, "loss": 0.6892, "step": 19595 }, { "epoch": 3.623675896673481, "grad_norm": 0.9665752053260803, "learning_rate": 3.977318625419857e-06, "loss": 0.994, "step": 19596 }, { "epoch": 3.6238617357368517, "grad_norm": 0.9783822298049927, "learning_rate": 3.973240412797819e-06, "loss": 0.7812, "step": 19597 }, { "epoch": 3.624047574800223, "grad_norm": 0.9810153245925903, "learning_rate": 3.969164249702184e-06, "loss": 0.7105, "step": 19598 }, { "epoch": 3.6242334138635943, "grad_norm": 0.8744558095932007, "learning_rate": 3.965090136219951e-06, "loss": 1.0382, "step": 19599 }, { "epoch": 3.624419252926965, "grad_norm": 0.9780319333076477, "learning_rate": 3.96101807243805e-06, "loss": 0.9367, "step": 19600 }, { "epoch": 3.624605091990336, "grad_norm": 0.8120416402816772, "learning_rate": 3.9569480584434216e-06, "loss": 0.8283, "step": 19601 }, { "epoch": 3.6247909310537074, "grad_norm": 0.9405677914619446, "learning_rate": 3.952880094322919e-06, "loss": 0.7167, "step": 19602 }, { "epoch": 3.6249767701170788, "grad_norm": 0.970422625541687, "learning_rate": 3.9488141801633736e-06, "loss": 0.7446, "step": 19603 }, { "epoch": 3.6251626091804496, "grad_norm": 0.9250800609588623, "learning_rate": 3.944750316051537e-06, "loss": 0.6638, "step": 19604 }, { "epoch": 3.625348448243821, "grad_norm": 0.8559107184410095, "learning_rate": 3.940688502074186e-06, "loss": 0.81, "step": 19605 }, { "epoch": 3.625534287307192, "grad_norm": 0.9691064953804016, "learning_rate": 3.9366287383179935e-06, "loss": 0.9659, "step": 19606 }, { "epoch": 3.625720126370563, "grad_norm": 1.0005972385406494, "learning_rate": 3.932571024869591e-06, "loss": 0.9558, "step": 19607 }, { "epoch": 3.625905965433934, "grad_norm": 0.8913658261299133, "learning_rate": 3.9285153618156104e-06, "loss": 0.8656, "step": 19608 }, { "epoch": 3.6260918044973054, "grad_norm": 0.9526522755622864, "learning_rate": 3.924461749242614e-06, "loss": 0.79, "step": 19609 }, { "epoch": 3.6262776435606767, "grad_norm": 0.9458757638931274, "learning_rate": 3.920410187237111e-06, "loss": 0.8057, "step": 19610 }, { "epoch": 3.6264634826240476, "grad_norm": 0.8560062050819397, "learning_rate": 3.916360675885566e-06, "loss": 0.7014, "step": 19611 }, { "epoch": 3.6266493216874185, "grad_norm": 0.9556618332862854, "learning_rate": 3.91231321527441e-06, "loss": 0.7079, "step": 19612 }, { "epoch": 3.62683516075079, "grad_norm": 0.9822280406951904, "learning_rate": 3.908267805490051e-06, "loss": 0.9576, "step": 19613 }, { "epoch": 3.627020999814161, "grad_norm": 1.2236249446868896, "learning_rate": 3.904224446618809e-06, "loss": 0.9071, "step": 19614 }, { "epoch": 3.627206838877532, "grad_norm": 0.9653648138046265, "learning_rate": 3.900183138746993e-06, "loss": 0.7818, "step": 19615 }, { "epoch": 3.627392677940903, "grad_norm": 0.9901843667030334, "learning_rate": 3.8961438819608345e-06, "loss": 0.8232, "step": 19616 }, { "epoch": 3.6275785170042743, "grad_norm": 0.931734561920166, "learning_rate": 3.892106676346574e-06, "loss": 0.7768, "step": 19617 }, { "epoch": 3.6277643560676456, "grad_norm": 1.1918833255767822, "learning_rate": 3.888071521990378e-06, "loss": 0.7921, "step": 19618 }, { "epoch": 3.6279501951310165, "grad_norm": 1.5211327075958252, "learning_rate": 3.884038418978364e-06, "loss": 1.1896, "step": 19619 }, { "epoch": 3.628136034194388, "grad_norm": 1.0748412609100342, "learning_rate": 3.880007367396588e-06, "loss": 0.7944, "step": 19620 }, { "epoch": 3.6283218732577587, "grad_norm": 1.0167418718338013, "learning_rate": 3.875978367331124e-06, "loss": 0.8971, "step": 19621 }, { "epoch": 3.62850771232113, "grad_norm": 0.8878464698791504, "learning_rate": 3.871951418867936e-06, "loss": 0.8168, "step": 19622 }, { "epoch": 3.628693551384501, "grad_norm": 0.9061787128448486, "learning_rate": 3.867926522092991e-06, "loss": 0.6172, "step": 19623 }, { "epoch": 3.628879390447872, "grad_norm": 1.3221555948257446, "learning_rate": 3.863903677092173e-06, "loss": 0.7848, "step": 19624 }, { "epoch": 3.629065229511243, "grad_norm": 0.8736843466758728, "learning_rate": 3.859882883951371e-06, "loss": 0.7202, "step": 19625 }, { "epoch": 3.6292510685746144, "grad_norm": 1.0111111402511597, "learning_rate": 3.855864142756382e-06, "loss": 0.9331, "step": 19626 }, { "epoch": 3.6294369076379853, "grad_norm": 0.9184491038322449, "learning_rate": 3.8518474535929826e-06, "loss": 0.6702, "step": 19627 }, { "epoch": 3.6296227467013567, "grad_norm": 1.0071487426757812, "learning_rate": 3.847832816546881e-06, "loss": 0.7365, "step": 19628 }, { "epoch": 3.629808585764728, "grad_norm": 1.061942458152771, "learning_rate": 3.8438202317037986e-06, "loss": 0.8551, "step": 19629 }, { "epoch": 3.629994424828099, "grad_norm": 0.9342822432518005, "learning_rate": 3.8398096991493774e-06, "loss": 0.7583, "step": 19630 }, { "epoch": 3.6301802638914697, "grad_norm": 0.8872012495994568, "learning_rate": 3.835801218969193e-06, "loss": 0.7427, "step": 19631 }, { "epoch": 3.630366102954841, "grad_norm": 1.1290273666381836, "learning_rate": 3.8317947912487886e-06, "loss": 0.7622, "step": 19632 }, { "epoch": 3.6305519420182124, "grad_norm": 0.9274680614471436, "learning_rate": 3.827790416073718e-06, "loss": 0.742, "step": 19633 }, { "epoch": 3.6307377810815833, "grad_norm": 0.8616178035736084, "learning_rate": 3.823788093529412e-06, "loss": 0.9817, "step": 19634 }, { "epoch": 3.6309236201449546, "grad_norm": 1.890286922454834, "learning_rate": 3.819787823701315e-06, "loss": 1.2465, "step": 19635 }, { "epoch": 3.6311094592083255, "grad_norm": 1.0613394975662231, "learning_rate": 3.815789606674769e-06, "loss": 0.7922, "step": 19636 }, { "epoch": 3.631295298271697, "grad_norm": 0.8788023591041565, "learning_rate": 3.8117934425351607e-06, "loss": 0.916, "step": 19637 }, { "epoch": 3.6314811373350677, "grad_norm": 1.0239942073822021, "learning_rate": 3.807799331367734e-06, "loss": 0.9387, "step": 19638 }, { "epoch": 3.631666976398439, "grad_norm": 1.172844648361206, "learning_rate": 3.8038072732577758e-06, "loss": 0.8622, "step": 19639 }, { "epoch": 3.63185281546181, "grad_norm": 0.8852987885475159, "learning_rate": 3.799817268290462e-06, "loss": 0.6051, "step": 19640 }, { "epoch": 3.6320386545251813, "grad_norm": 0.9584529399871826, "learning_rate": 3.7958293165509697e-06, "loss": 0.7786, "step": 19641 }, { "epoch": 3.632224493588552, "grad_norm": 1.499760627746582, "learning_rate": 3.791843418124408e-06, "loss": 0.8859, "step": 19642 }, { "epoch": 3.6324103326519235, "grad_norm": 0.9277597069740295, "learning_rate": 3.787859573095853e-06, "loss": 0.8963, "step": 19643 }, { "epoch": 3.632596171715295, "grad_norm": 0.8993749022483826, "learning_rate": 3.7838777815503047e-06, "loss": 0.8914, "step": 19644 }, { "epoch": 3.6327820107786657, "grad_norm": 1.0153673887252808, "learning_rate": 3.779898043572794e-06, "loss": 0.6897, "step": 19645 }, { "epoch": 3.6329678498420366, "grad_norm": 0.9434956312179565, "learning_rate": 3.7759203592482416e-06, "loss": 0.9235, "step": 19646 }, { "epoch": 3.633153688905408, "grad_norm": 0.9439644813537598, "learning_rate": 3.771944728661536e-06, "loss": 0.8132, "step": 19647 }, { "epoch": 3.6333395279687792, "grad_norm": 1.0923913717269897, "learning_rate": 3.767971151897531e-06, "loss": 1.0037, "step": 19648 }, { "epoch": 3.63352536703215, "grad_norm": 1.04251229763031, "learning_rate": 3.763999629041026e-06, "loss": 0.8628, "step": 19649 }, { "epoch": 3.633711206095521, "grad_norm": 0.9401928782463074, "learning_rate": 3.7600301601768305e-06, "loss": 0.9765, "step": 19650 }, { "epoch": 3.6338970451588923, "grad_norm": 0.8512163758277893, "learning_rate": 3.7560627453896324e-06, "loss": 0.6523, "step": 19651 }, { "epoch": 3.6340828842222637, "grad_norm": 1.129056453704834, "learning_rate": 3.7520973847640973e-06, "loss": 0.8549, "step": 19652 }, { "epoch": 3.6342687232856346, "grad_norm": 0.9493163824081421, "learning_rate": 3.7481340783848904e-06, "loss": 0.7761, "step": 19653 }, { "epoch": 3.634454562349006, "grad_norm": 0.8012655973434448, "learning_rate": 3.7441728263365783e-06, "loss": 0.6869, "step": 19654 }, { "epoch": 3.6346404014123768, "grad_norm": 0.9964479207992554, "learning_rate": 3.740213628703726e-06, "loss": 0.7729, "step": 19655 }, { "epoch": 3.634826240475748, "grad_norm": 0.9677879810333252, "learning_rate": 3.7362564855708106e-06, "loss": 0.8043, "step": 19656 }, { "epoch": 3.635012079539119, "grad_norm": 1.0772374868392944, "learning_rate": 3.73230139702232e-06, "loss": 0.875, "step": 19657 }, { "epoch": 3.6351979186024903, "grad_norm": 0.9329453110694885, "learning_rate": 3.728348363142653e-06, "loss": 0.824, "step": 19658 }, { "epoch": 3.6353837576658616, "grad_norm": 1.1014840602874756, "learning_rate": 3.724397384016165e-06, "loss": 0.6482, "step": 19659 }, { "epoch": 3.6355695967292325, "grad_norm": 0.9583537578582764, "learning_rate": 3.7204484597271995e-06, "loss": 0.6224, "step": 19660 }, { "epoch": 3.6357554357926034, "grad_norm": 0.9297904968261719, "learning_rate": 3.7165015903600555e-06, "loss": 0.8584, "step": 19661 }, { "epoch": 3.6359412748559747, "grad_norm": 0.7306504845619202, "learning_rate": 3.7125567759989545e-06, "loss": 0.3111, "step": 19662 }, { "epoch": 3.636127113919346, "grad_norm": 0.9969332814216614, "learning_rate": 3.708614016728096e-06, "loss": 0.6899, "step": 19663 }, { "epoch": 3.636312952982717, "grad_norm": 1.0395742654800415, "learning_rate": 3.7046733126316126e-06, "loss": 0.8837, "step": 19664 }, { "epoch": 3.636498792046088, "grad_norm": 0.9263941645622253, "learning_rate": 3.700734663793637e-06, "loss": 0.7839, "step": 19665 }, { "epoch": 3.636684631109459, "grad_norm": 0.901239812374115, "learning_rate": 3.696798070298224e-06, "loss": 0.7272, "step": 19666 }, { "epoch": 3.6368704701728305, "grad_norm": 0.8450802564620972, "learning_rate": 3.692863532229407e-06, "loss": 0.8683, "step": 19667 }, { "epoch": 3.6370563092362014, "grad_norm": 1.290610432624817, "learning_rate": 3.6889310496711184e-06, "loss": 0.7479, "step": 19668 }, { "epoch": 3.6372421482995727, "grad_norm": 0.8974242806434631, "learning_rate": 3.6850006227073464e-06, "loss": 1.0162, "step": 19669 }, { "epoch": 3.6374279873629436, "grad_norm": 0.8204767107963562, "learning_rate": 3.6810722514219355e-06, "loss": 0.8115, "step": 19670 }, { "epoch": 3.637613826426315, "grad_norm": 0.8401081562042236, "learning_rate": 3.6771459358987513e-06, "loss": 0.6104, "step": 19671 }, { "epoch": 3.637799665489686, "grad_norm": 0.9978303909301758, "learning_rate": 3.673221676221594e-06, "loss": 0.8078, "step": 19672 }, { "epoch": 3.637985504553057, "grad_norm": 0.9485687613487244, "learning_rate": 3.669299472474219e-06, "loss": 0.8662, "step": 19673 }, { "epoch": 3.638171343616428, "grad_norm": 1.2545994520187378, "learning_rate": 3.665379324740337e-06, "loss": 0.9099, "step": 19674 }, { "epoch": 3.6383571826797994, "grad_norm": 0.8143329620361328, "learning_rate": 3.6614612331036136e-06, "loss": 0.8263, "step": 19675 }, { "epoch": 3.6385430217431702, "grad_norm": 0.7915576100349426, "learning_rate": 3.6575451976476715e-06, "loss": 0.7497, "step": 19676 }, { "epoch": 3.6387288608065416, "grad_norm": 0.7817553877830505, "learning_rate": 3.6536312184560993e-06, "loss": 0.7649, "step": 19677 }, { "epoch": 3.638914699869913, "grad_norm": 1.7373839616775513, "learning_rate": 3.6497192956124414e-06, "loss": 1.3472, "step": 19678 }, { "epoch": 3.639100538933284, "grad_norm": 0.966449499130249, "learning_rate": 3.645809429200164e-06, "loss": 0.736, "step": 19679 }, { "epoch": 3.6392863779966547, "grad_norm": 1.2942341566085815, "learning_rate": 3.6419016193027455e-06, "loss": 0.7483, "step": 19680 }, { "epoch": 3.639472217060026, "grad_norm": 1.1573847532272339, "learning_rate": 3.637995866003574e-06, "loss": 0.7297, "step": 19681 }, { "epoch": 3.6396580561233973, "grad_norm": 1.0986378192901611, "learning_rate": 3.6340921693860276e-06, "loss": 1.0598, "step": 19682 }, { "epoch": 3.639843895186768, "grad_norm": 1.030543565750122, "learning_rate": 3.630190529533417e-06, "loss": 0.864, "step": 19683 }, { "epoch": 3.6400297342501395, "grad_norm": 0.9853333830833435, "learning_rate": 3.626290946528987e-06, "loss": 0.6901, "step": 19684 }, { "epoch": 3.6402155733135104, "grad_norm": 1.2523033618927002, "learning_rate": 3.622393420456016e-06, "loss": 0.5345, "step": 19685 }, { "epoch": 3.6404014123768818, "grad_norm": 0.8894228935241699, "learning_rate": 3.618497951397659e-06, "loss": 0.8965, "step": 19686 }, { "epoch": 3.6405872514402526, "grad_norm": 1.0515977144241333, "learning_rate": 3.614604539437072e-06, "loss": 0.8215, "step": 19687 }, { "epoch": 3.640773090503624, "grad_norm": 0.8954871892929077, "learning_rate": 3.6107131846573327e-06, "loss": 0.8407, "step": 19688 }, { "epoch": 3.640958929566995, "grad_norm": 0.9122246503829956, "learning_rate": 3.6068238871415306e-06, "loss": 1.0491, "step": 19689 }, { "epoch": 3.641144768630366, "grad_norm": 0.9619038701057434, "learning_rate": 3.6029366469726433e-06, "loss": 0.8329, "step": 19690 }, { "epoch": 3.641330607693737, "grad_norm": 1.3435996770858765, "learning_rate": 3.599051464233638e-06, "loss": 1.2128, "step": 19691 }, { "epoch": 3.6415164467571084, "grad_norm": 0.9356776475906372, "learning_rate": 3.5951683390074487e-06, "loss": 0.9509, "step": 19692 }, { "epoch": 3.6417022858204797, "grad_norm": 1.0035337209701538, "learning_rate": 3.5912872713769753e-06, "loss": 0.694, "step": 19693 }, { "epoch": 3.6418881248838506, "grad_norm": 0.9547224044799805, "learning_rate": 3.5874082614250182e-06, "loss": 1.0477, "step": 19694 }, { "epoch": 3.6420739639472215, "grad_norm": 0.8454679846763611, "learning_rate": 3.5835313092343893e-06, "loss": 0.9012, "step": 19695 }, { "epoch": 3.642259803010593, "grad_norm": 0.962364673614502, "learning_rate": 3.579656414887811e-06, "loss": 0.7182, "step": 19696 }, { "epoch": 3.642445642073964, "grad_norm": 0.9948057532310486, "learning_rate": 3.575783578468017e-06, "loss": 0.8841, "step": 19697 }, { "epoch": 3.642631481137335, "grad_norm": 0.8334029912948608, "learning_rate": 3.5719128000576528e-06, "loss": 0.7499, "step": 19698 }, { "epoch": 3.642817320200706, "grad_norm": 0.984076976776123, "learning_rate": 3.56804407973933e-06, "loss": 0.8517, "step": 19699 }, { "epoch": 3.6430031592640773, "grad_norm": 1.1743614673614502, "learning_rate": 3.5641774175956156e-06, "loss": 1.0066, "step": 19700 }, { "epoch": 3.6431889983274486, "grad_norm": 0.9171334505081177, "learning_rate": 3.5603128137090435e-06, "loss": 0.903, "step": 19701 }, { "epoch": 3.6433748373908195, "grad_norm": 0.8026492595672607, "learning_rate": 3.5564502681621037e-06, "loss": 0.7917, "step": 19702 }, { "epoch": 3.643560676454191, "grad_norm": 1.0340454578399658, "learning_rate": 3.55258978103723e-06, "loss": 0.8378, "step": 19703 }, { "epoch": 3.6437465155175617, "grad_norm": 0.9348717927932739, "learning_rate": 3.548731352416812e-06, "loss": 0.7884, "step": 19704 }, { "epoch": 3.643932354580933, "grad_norm": 0.9344277381896973, "learning_rate": 3.5448749823832174e-06, "loss": 0.7056, "step": 19705 }, { "epoch": 3.644118193644304, "grad_norm": 0.9544888138771057, "learning_rate": 3.5410206710187353e-06, "loss": 0.8964, "step": 19706 }, { "epoch": 3.6443040327076752, "grad_norm": 1.1061723232269287, "learning_rate": 3.537168418405634e-06, "loss": 0.851, "step": 19707 }, { "epoch": 3.644489871771046, "grad_norm": 0.9542726874351501, "learning_rate": 3.5333182246261367e-06, "loss": 0.8178, "step": 19708 }, { "epoch": 3.6446757108344174, "grad_norm": 0.9229307174682617, "learning_rate": 3.529470089762421e-06, "loss": 0.5897, "step": 19709 }, { "epoch": 3.6448615498977883, "grad_norm": 1.1158232688903809, "learning_rate": 3.5256240138966224e-06, "loss": 0.8865, "step": 19710 }, { "epoch": 3.6450473889611597, "grad_norm": 0.9804997444152832, "learning_rate": 3.5217799971107966e-06, "loss": 0.8154, "step": 19711 }, { "epoch": 3.645233228024531, "grad_norm": 0.9301099181175232, "learning_rate": 3.5179380394870345e-06, "loss": 0.9157, "step": 19712 }, { "epoch": 3.645419067087902, "grad_norm": 1.1942648887634277, "learning_rate": 3.514098141107314e-06, "loss": 0.8761, "step": 19713 }, { "epoch": 3.6456049061512728, "grad_norm": 1.1579983234405518, "learning_rate": 3.5102603020535916e-06, "loss": 0.8602, "step": 19714 }, { "epoch": 3.645790745214644, "grad_norm": 0.972847044467926, "learning_rate": 3.506424522407781e-06, "loss": 0.6708, "step": 19715 }, { "epoch": 3.6459765842780154, "grad_norm": 1.035806655883789, "learning_rate": 3.5025908022517374e-06, "loss": 0.9466, "step": 19716 }, { "epoch": 3.6461624233413863, "grad_norm": 0.9446421265602112, "learning_rate": 3.498759141667318e-06, "loss": 0.9707, "step": 19717 }, { "epoch": 3.6463482624047576, "grad_norm": 1.0214331150054932, "learning_rate": 3.494929540736269e-06, "loss": 0.8919, "step": 19718 }, { "epoch": 3.6465341014681285, "grad_norm": 1.0789176225662231, "learning_rate": 3.491101999540336e-06, "loss": 0.812, "step": 19719 }, { "epoch": 3.6467199405315, "grad_norm": 0.8678821921348572, "learning_rate": 3.48727651816122e-06, "loss": 1.0487, "step": 19720 }, { "epoch": 3.6469057795948707, "grad_norm": 1.0004359483718872, "learning_rate": 3.4834530966805667e-06, "loss": 0.8156, "step": 19721 }, { "epoch": 3.647091618658242, "grad_norm": 0.9830246567726135, "learning_rate": 3.4796317351799777e-06, "loss": 0.5935, "step": 19722 }, { "epoch": 3.647277457721613, "grad_norm": 1.0413470268249512, "learning_rate": 3.47581243374101e-06, "loss": 0.7936, "step": 19723 }, { "epoch": 3.6474632967849843, "grad_norm": 0.975347638130188, "learning_rate": 3.4719951924451767e-06, "loss": 1.0661, "step": 19724 }, { "epoch": 3.647649135848355, "grad_norm": 1.0321831703186035, "learning_rate": 3.4681800113739783e-06, "loss": 0.734, "step": 19725 }, { "epoch": 3.6478349749117265, "grad_norm": 1.9831745624542236, "learning_rate": 3.4643668906088056e-06, "loss": 1.3494, "step": 19726 }, { "epoch": 3.648020813975098, "grad_norm": 0.9179535508155823, "learning_rate": 3.4605558302310715e-06, "loss": 0.8206, "step": 19727 }, { "epoch": 3.6482066530384687, "grad_norm": 0.9104013442993164, "learning_rate": 3.4567468303220886e-06, "loss": 0.7554, "step": 19728 }, { "epoch": 3.6483924921018396, "grad_norm": 0.994432806968689, "learning_rate": 3.4529398909631803e-06, "loss": 0.8357, "step": 19729 }, { "epoch": 3.648578331165211, "grad_norm": 0.914111316204071, "learning_rate": 3.449135012235594e-06, "loss": 0.7204, "step": 19730 }, { "epoch": 3.6487641702285822, "grad_norm": 0.9353268146514893, "learning_rate": 3.4453321942205186e-06, "loss": 0.8731, "step": 19731 }, { "epoch": 3.648950009291953, "grad_norm": 0.8555099964141846, "learning_rate": 3.4415314369991458e-06, "loss": 0.9585, "step": 19732 }, { "epoch": 3.6491358483553245, "grad_norm": 0.9863101243972778, "learning_rate": 3.437732740652566e-06, "loss": 0.8062, "step": 19733 }, { "epoch": 3.6493216874186953, "grad_norm": 0.9781383872032166, "learning_rate": 3.4339361052618923e-06, "loss": 0.7702, "step": 19734 }, { "epoch": 3.6495075264820667, "grad_norm": 0.9102252721786499, "learning_rate": 3.4301415309081376e-06, "loss": 0.8075, "step": 19735 }, { "epoch": 3.6496933655454376, "grad_norm": 0.8706068992614746, "learning_rate": 3.426349017672281e-06, "loss": 0.7584, "step": 19736 }, { "epoch": 3.649879204608809, "grad_norm": 1.0591397285461426, "learning_rate": 3.422558565635281e-06, "loss": 0.7617, "step": 19737 }, { "epoch": 3.6500650436721798, "grad_norm": 0.8889842629432678, "learning_rate": 3.418770174878039e-06, "loss": 0.5825, "step": 19738 }, { "epoch": 3.650250882735551, "grad_norm": 1.0698341131210327, "learning_rate": 3.414983845481401e-06, "loss": 0.7684, "step": 19739 }, { "epoch": 3.650436721798922, "grad_norm": 0.9470606446266174, "learning_rate": 3.411199577526192e-06, "loss": 0.9922, "step": 19740 }, { "epoch": 3.6506225608622933, "grad_norm": 0.9126014113426208, "learning_rate": 3.40741737109318e-06, "loss": 0.8621, "step": 19741 }, { "epoch": 3.6508083999256646, "grad_norm": 1.010872483253479, "learning_rate": 3.4036372262630787e-06, "loss": 0.7708, "step": 19742 }, { "epoch": 3.6509942389890355, "grad_norm": 1.04227614402771, "learning_rate": 3.3998591431165682e-06, "loss": 0.8666, "step": 19743 }, { "epoch": 3.6511800780524064, "grad_norm": 1.3853538036346436, "learning_rate": 3.3960831217342836e-06, "loss": 0.7814, "step": 19744 }, { "epoch": 3.6513659171157777, "grad_norm": 0.9948869347572327, "learning_rate": 3.3923091621968493e-06, "loss": 0.6507, "step": 19745 }, { "epoch": 3.651551756179149, "grad_norm": 1.059181809425354, "learning_rate": 3.388537264584779e-06, "loss": 0.9442, "step": 19746 }, { "epoch": 3.65173759524252, "grad_norm": 0.9141784906387329, "learning_rate": 3.384767428978597e-06, "loss": 0.6792, "step": 19747 }, { "epoch": 3.651923434305891, "grad_norm": 0.9963765740394592, "learning_rate": 3.3809996554587497e-06, "loss": 0.8995, "step": 19748 }, { "epoch": 3.652109273369262, "grad_norm": 1.1455966234207153, "learning_rate": 3.377233944105662e-06, "loss": 0.8618, "step": 19749 }, { "epoch": 3.6522951124326335, "grad_norm": 0.9624447822570801, "learning_rate": 3.3734702949997034e-06, "loss": 0.975, "step": 19750 }, { "epoch": 3.6524809514960044, "grad_norm": 1.3912914991378784, "learning_rate": 3.3697087082211975e-06, "loss": 1.3396, "step": 19751 }, { "epoch": 3.6526667905593757, "grad_norm": 1.043426752090454, "learning_rate": 3.3659491838504364e-06, "loss": 0.8956, "step": 19752 }, { "epoch": 3.6528526296227466, "grad_norm": 0.8907557725906372, "learning_rate": 3.3621917219676666e-06, "loss": 0.7101, "step": 19753 }, { "epoch": 3.653038468686118, "grad_norm": 1.01949942111969, "learning_rate": 3.358436322653069e-06, "loss": 0.392, "step": 19754 }, { "epoch": 3.653224307749489, "grad_norm": 1.0524967908859253, "learning_rate": 3.3546829859868123e-06, "loss": 0.855, "step": 19755 }, { "epoch": 3.65341014681286, "grad_norm": 0.8573651313781738, "learning_rate": 3.350931712048988e-06, "loss": 0.8067, "step": 19756 }, { "epoch": 3.653595985876231, "grad_norm": 1.0655800104141235, "learning_rate": 3.3471825009196766e-06, "loss": 0.6145, "step": 19757 }, { "epoch": 3.6537818249396024, "grad_norm": 0.8858900666236877, "learning_rate": 3.3434353526789032e-06, "loss": 0.906, "step": 19758 }, { "epoch": 3.6539676640029732, "grad_norm": 1.1333801746368408, "learning_rate": 3.3396902674066033e-06, "loss": 1.0519, "step": 19759 }, { "epoch": 3.6541535030663446, "grad_norm": 1.1849747896194458, "learning_rate": 3.335947245182769e-06, "loss": 1.061, "step": 19760 }, { "epoch": 3.654339342129716, "grad_norm": 0.9107005596160889, "learning_rate": 3.332206286087247e-06, "loss": 0.7066, "step": 19761 }, { "epoch": 3.654525181193087, "grad_norm": 0.9740599393844604, "learning_rate": 3.3284673901998964e-06, "loss": 0.6674, "step": 19762 }, { "epoch": 3.6547110202564577, "grad_norm": 0.8133429884910583, "learning_rate": 3.324730557600497e-06, "loss": 0.6233, "step": 19763 }, { "epoch": 3.654896859319829, "grad_norm": 0.9082481861114502, "learning_rate": 3.3209957883688304e-06, "loss": 0.9023, "step": 19764 }, { "epoch": 3.6550826983832003, "grad_norm": 0.939697802066803, "learning_rate": 3.3172630825846095e-06, "loss": 0.7883, "step": 19765 }, { "epoch": 3.655268537446571, "grad_norm": 0.9083552360534668, "learning_rate": 3.3135324403274935e-06, "loss": 1.0788, "step": 19766 }, { "epoch": 3.6554543765099425, "grad_norm": 0.9659096598625183, "learning_rate": 3.3098038616771075e-06, "loss": 0.869, "step": 19767 }, { "epoch": 3.6556402155733134, "grad_norm": 1.592389702796936, "learning_rate": 3.306077346713021e-06, "loss": 1.145, "step": 19768 }, { "epoch": 3.6558260546366848, "grad_norm": 1.1005619764328003, "learning_rate": 3.302352895514793e-06, "loss": 0.9374, "step": 19769 }, { "epoch": 3.6560118937000556, "grad_norm": 0.9759562611579895, "learning_rate": 3.298630508161904e-06, "loss": 0.893, "step": 19770 }, { "epoch": 3.656197732763427, "grad_norm": 0.9161255359649658, "learning_rate": 3.2949101847337792e-06, "loss": 0.7266, "step": 19771 }, { "epoch": 3.656383571826798, "grad_norm": 0.9906998872756958, "learning_rate": 3.2911919253098664e-06, "loss": 0.859, "step": 19772 }, { "epoch": 3.656569410890169, "grad_norm": 0.9362435936927795, "learning_rate": 3.2874757299694913e-06, "loss": 1.032, "step": 19773 }, { "epoch": 3.65675524995354, "grad_norm": 1.1768964529037476, "learning_rate": 3.2837615987919902e-06, "loss": 0.7615, "step": 19774 }, { "epoch": 3.6569410890169114, "grad_norm": 0.9677229523658752, "learning_rate": 3.2800495318566105e-06, "loss": 0.7366, "step": 19775 }, { "epoch": 3.6571269280802827, "grad_norm": 1.0703089237213135, "learning_rate": 3.2763395292426004e-06, "loss": 0.8577, "step": 19776 }, { "epoch": 3.6573127671436536, "grad_norm": 0.8955932855606079, "learning_rate": 3.2726315910291516e-06, "loss": 0.6111, "step": 19777 }, { "epoch": 3.6574986062070245, "grad_norm": 1.3545371294021606, "learning_rate": 3.268925717295379e-06, "loss": 0.8886, "step": 19778 }, { "epoch": 3.657684445270396, "grad_norm": 0.9368428587913513, "learning_rate": 3.265221908120386e-06, "loss": 0.8056, "step": 19779 }, { "epoch": 3.657870284333767, "grad_norm": 0.8998544812202454, "learning_rate": 3.2615201635832428e-06, "loss": 0.7344, "step": 19780 }, { "epoch": 3.658056123397138, "grad_norm": 0.9378226399421692, "learning_rate": 3.257820483762941e-06, "loss": 0.9107, "step": 19781 }, { "epoch": 3.658241962460509, "grad_norm": 1.0062965154647827, "learning_rate": 3.2541228687384406e-06, "loss": 1.3038, "step": 19782 }, { "epoch": 3.6584278015238803, "grad_norm": 0.8791436553001404, "learning_rate": 3.2504273185886447e-06, "loss": 0.9039, "step": 19783 }, { "epoch": 3.6586136405872516, "grad_norm": 0.869947075843811, "learning_rate": 3.246733833392479e-06, "loss": 0.8601, "step": 19784 }, { "epoch": 3.6587994796506225, "grad_norm": 0.8931410312652588, "learning_rate": 3.2430424132287253e-06, "loss": 0.9305, "step": 19785 }, { "epoch": 3.658985318713994, "grad_norm": 0.9086544513702393, "learning_rate": 3.2393530581761977e-06, "loss": 0.9764, "step": 19786 }, { "epoch": 3.6591711577773647, "grad_norm": 0.839087188243866, "learning_rate": 3.235665768313634e-06, "loss": 0.8387, "step": 19787 }, { "epoch": 3.659356996840736, "grad_norm": 1.050937533378601, "learning_rate": 3.231980543719715e-06, "loss": 0.814, "step": 19788 }, { "epoch": 3.659542835904107, "grad_norm": 1.0313888788223267, "learning_rate": 3.2282973844731224e-06, "loss": 0.7997, "step": 19789 }, { "epoch": 3.6597286749674782, "grad_norm": 1.1018502712249756, "learning_rate": 3.22461629065246e-06, "loss": 1.0423, "step": 19790 }, { "epoch": 3.6599145140308496, "grad_norm": 1.0087827444076538, "learning_rate": 3.2209372623362765e-06, "loss": 0.8628, "step": 19791 }, { "epoch": 3.6601003530942204, "grad_norm": 0.9059208035469055, "learning_rate": 3.21726029960312e-06, "loss": 0.7809, "step": 19792 }, { "epoch": 3.6602861921575913, "grad_norm": 0.99634850025177, "learning_rate": 3.2135854025314495e-06, "loss": 0.8574, "step": 19793 }, { "epoch": 3.6604720312209627, "grad_norm": 1.172855019569397, "learning_rate": 3.2099125711997134e-06, "loss": 0.6191, "step": 19794 }, { "epoch": 3.660657870284334, "grad_norm": 0.9979076981544495, "learning_rate": 3.2062418056862833e-06, "loss": 1.0289, "step": 19795 }, { "epoch": 3.660843709347705, "grad_norm": 0.9946637153625488, "learning_rate": 3.2025731060695174e-06, "loss": 0.8253, "step": 19796 }, { "epoch": 3.6610295484110758, "grad_norm": 0.9385978579521179, "learning_rate": 3.198906472427732e-06, "loss": 0.9922, "step": 19797 }, { "epoch": 3.661215387474447, "grad_norm": 0.7791514992713928, "learning_rate": 3.1952419048391635e-06, "loss": 0.6416, "step": 19798 }, { "epoch": 3.6614012265378184, "grad_norm": 1.0495576858520508, "learning_rate": 3.191579403382028e-06, "loss": 0.8827, "step": 19799 }, { "epoch": 3.6615870656011893, "grad_norm": 1.1099753379821777, "learning_rate": 3.187918968134507e-06, "loss": 0.9144, "step": 19800 }, { "epoch": 3.6617729046645606, "grad_norm": 1.0003912448883057, "learning_rate": 3.1842605991747154e-06, "loss": 0.7915, "step": 19801 }, { "epoch": 3.6619587437279315, "grad_norm": 0.793580949306488, "learning_rate": 3.180604296580747e-06, "loss": 0.8196, "step": 19802 }, { "epoch": 3.662144582791303, "grad_norm": 0.8666147589683533, "learning_rate": 3.1769500604306167e-06, "loss": 0.7983, "step": 19803 }, { "epoch": 3.6623304218546737, "grad_norm": 1.5029810667037964, "learning_rate": 3.1732978908023405e-06, "loss": 1.3051, "step": 19804 }, { "epoch": 3.662516260918045, "grad_norm": 0.959281861782074, "learning_rate": 3.169647787773866e-06, "loss": 0.7916, "step": 19805 }, { "epoch": 3.662702099981416, "grad_norm": 1.0968326330184937, "learning_rate": 3.1659997514230768e-06, "loss": 0.9021, "step": 19806 }, { "epoch": 3.6628879390447873, "grad_norm": 1.0215089321136475, "learning_rate": 3.162353781827865e-06, "loss": 0.8865, "step": 19807 }, { "epoch": 3.663073778108158, "grad_norm": 1.0578625202178955, "learning_rate": 3.158709879066013e-06, "loss": 0.8077, "step": 19808 }, { "epoch": 3.6632596171715295, "grad_norm": 1.015753149986267, "learning_rate": 3.155068043215326e-06, "loss": 0.8311, "step": 19809 }, { "epoch": 3.663445456234901, "grad_norm": 0.956145703792572, "learning_rate": 3.1514282743535184e-06, "loss": 0.8834, "step": 19810 }, { "epoch": 3.6636312952982717, "grad_norm": 0.9006800055503845, "learning_rate": 3.1477905725582623e-06, "loss": 0.7183, "step": 19811 }, { "epoch": 3.6638171343616426, "grad_norm": 0.8815486431121826, "learning_rate": 3.144154937907229e-06, "loss": 0.785, "step": 19812 }, { "epoch": 3.664002973425014, "grad_norm": 0.9718546867370605, "learning_rate": 3.1405213704780002e-06, "loss": 0.7752, "step": 19813 }, { "epoch": 3.6641888124883852, "grad_norm": 0.9407109022140503, "learning_rate": 3.1368898703481143e-06, "loss": 0.7941, "step": 19814 }, { "epoch": 3.664374651551756, "grad_norm": 0.9799614548683167, "learning_rate": 3.133260437595087e-06, "loss": 0.8844, "step": 19815 }, { "epoch": 3.6645604906151275, "grad_norm": 1.2716703414916992, "learning_rate": 3.1296330722963897e-06, "loss": 0.7302, "step": 19816 }, { "epoch": 3.6647463296784983, "grad_norm": 0.8386351466178894, "learning_rate": 3.126007774529438e-06, "loss": 0.7081, "step": 19817 }, { "epoch": 3.6649321687418697, "grad_norm": 1.0708590745925903, "learning_rate": 3.1223845443716037e-06, "loss": 0.9288, "step": 19818 }, { "epoch": 3.6651180078052406, "grad_norm": 1.1155301332473755, "learning_rate": 3.118763381900225e-06, "loss": 0.998, "step": 19819 }, { "epoch": 3.665303846868612, "grad_norm": 0.9290077090263367, "learning_rate": 3.1151442871925952e-06, "loss": 0.5783, "step": 19820 }, { "epoch": 3.6654896859319828, "grad_norm": 0.8495419025421143, "learning_rate": 3.1115272603259416e-06, "loss": 0.7152, "step": 19821 }, { "epoch": 3.665675524995354, "grad_norm": 0.8739228844642639, "learning_rate": 3.10791230137748e-06, "loss": 0.6829, "step": 19822 }, { "epoch": 3.665861364058725, "grad_norm": 0.8517270088195801, "learning_rate": 3.1042994104243494e-06, "loss": 0.9774, "step": 19823 }, { "epoch": 3.6660472031220963, "grad_norm": 1.1146289110183716, "learning_rate": 3.100688587543665e-06, "loss": 1.083, "step": 19824 }, { "epoch": 3.6662330421854676, "grad_norm": 0.885505735874176, "learning_rate": 3.0970798328125105e-06, "loss": 0.6914, "step": 19825 }, { "epoch": 3.6664188812488385, "grad_norm": 0.7472629547119141, "learning_rate": 3.0934731463078904e-06, "loss": 0.7286, "step": 19826 }, { "epoch": 3.6666047203122094, "grad_norm": 0.8759147524833679, "learning_rate": 3.089868528106776e-06, "loss": 0.6962, "step": 19827 }, { "epoch": 3.6667905593755807, "grad_norm": 0.9485564231872559, "learning_rate": 3.086265978286107e-06, "loss": 0.9121, "step": 19828 }, { "epoch": 3.666976398438952, "grad_norm": 1.0498046875, "learning_rate": 3.0826654969227987e-06, "loss": 0.788, "step": 19829 }, { "epoch": 3.667162237502323, "grad_norm": 1.0976457595825195, "learning_rate": 3.0790670840936788e-06, "loss": 0.6969, "step": 19830 }, { "epoch": 3.667348076565694, "grad_norm": 0.9646531939506531, "learning_rate": 3.075470739875541e-06, "loss": 0.9687, "step": 19831 }, { "epoch": 3.667533915629065, "grad_norm": 0.9509338736534119, "learning_rate": 3.0718764643451582e-06, "loss": 0.8766, "step": 19832 }, { "epoch": 3.6677197546924365, "grad_norm": 1.0596628189086914, "learning_rate": 3.068284257579235e-06, "loss": 0.8185, "step": 19833 }, { "epoch": 3.6679055937558074, "grad_norm": 0.8546445965766907, "learning_rate": 3.064694119654443e-06, "loss": 0.6911, "step": 19834 }, { "epoch": 3.6680914328191787, "grad_norm": 0.8668458461761475, "learning_rate": 3.061106050647389e-06, "loss": 0.7985, "step": 19835 }, { "epoch": 3.6682772718825496, "grad_norm": 0.9155436754226685, "learning_rate": 3.057520050634699e-06, "loss": 0.8708, "step": 19836 }, { "epoch": 3.668463110945921, "grad_norm": 0.913185715675354, "learning_rate": 3.0539361196928682e-06, "loss": 0.7792, "step": 19837 }, { "epoch": 3.668648950009292, "grad_norm": 1.0407506227493286, "learning_rate": 3.050354257898402e-06, "loss": 0.7501, "step": 19838 }, { "epoch": 3.668834789072663, "grad_norm": 1.0762405395507812, "learning_rate": 3.0467744653277617e-06, "loss": 0.8153, "step": 19839 }, { "epoch": 3.6690206281360345, "grad_norm": 0.9334627389907837, "learning_rate": 3.0431967420573414e-06, "loss": 0.8077, "step": 19840 }, { "epoch": 3.6692064671994054, "grad_norm": 1.1207897663116455, "learning_rate": 3.0396210881635023e-06, "loss": 0.8184, "step": 19841 }, { "epoch": 3.6693923062627762, "grad_norm": 1.1474028825759888, "learning_rate": 3.036047503722561e-06, "loss": 0.945, "step": 19842 }, { "epoch": 3.6695781453261476, "grad_norm": 0.8330485820770264, "learning_rate": 3.032475988810779e-06, "loss": 0.7401, "step": 19843 }, { "epoch": 3.669763984389519, "grad_norm": 0.9342332482337952, "learning_rate": 3.028906543504406e-06, "loss": 0.731, "step": 19844 }, { "epoch": 3.66994982345289, "grad_norm": 0.9211150407791138, "learning_rate": 3.025339167879615e-06, "loss": 0.7433, "step": 19845 }, { "epoch": 3.6701356625162607, "grad_norm": 0.8624177575111389, "learning_rate": 3.021773862012545e-06, "loss": 0.8936, "step": 19846 }, { "epoch": 3.670321501579632, "grad_norm": 0.8853157758712769, "learning_rate": 3.0182106259792676e-06, "loss": 0.9055, "step": 19847 }, { "epoch": 3.6705073406430033, "grad_norm": 0.8142578601837158, "learning_rate": 3.0146494598558674e-06, "loss": 0.6743, "step": 19848 }, { "epoch": 3.670693179706374, "grad_norm": 0.9826507568359375, "learning_rate": 3.0110903637183497e-06, "loss": 0.7625, "step": 19849 }, { "epoch": 3.6708790187697455, "grad_norm": 0.9327687621116638, "learning_rate": 3.007533337642676e-06, "loss": 0.7594, "step": 19850 }, { "epoch": 3.6710648578331164, "grad_norm": 0.9218081831932068, "learning_rate": 3.0039783817047418e-06, "loss": 0.7935, "step": 19851 }, { "epoch": 3.6712506968964878, "grad_norm": 1.0486152172088623, "learning_rate": 3.000425495980441e-06, "loss": 0.8762, "step": 19852 }, { "epoch": 3.6714365359598586, "grad_norm": 0.8621472120285034, "learning_rate": 2.996874680545603e-06, "loss": 0.8295, "step": 19853 }, { "epoch": 3.67162237502323, "grad_norm": 1.0741668939590454, "learning_rate": 2.99332593547601e-06, "loss": 0.9637, "step": 19854 }, { "epoch": 3.671808214086601, "grad_norm": 0.8611621856689453, "learning_rate": 2.989779260847403e-06, "loss": 0.7141, "step": 19855 }, { "epoch": 3.671994053149972, "grad_norm": 0.8329762816429138, "learning_rate": 2.986234656735487e-06, "loss": 0.8697, "step": 19856 }, { "epoch": 3.672179892213343, "grad_norm": 0.9581859707832336, "learning_rate": 2.982692123215913e-06, "loss": 0.9079, "step": 19857 }, { "epoch": 3.6723657312767144, "grad_norm": 0.9887629151344299, "learning_rate": 2.9791516603642765e-06, "loss": 0.8446, "step": 19858 }, { "epoch": 3.6725515703400857, "grad_norm": 0.9503438472747803, "learning_rate": 2.9756132682561387e-06, "loss": 0.9235, "step": 19859 }, { "epoch": 3.6727374094034566, "grad_norm": 0.898719847202301, "learning_rate": 2.9720769469670617e-06, "loss": 0.7815, "step": 19860 }, { "epoch": 3.6729232484668275, "grad_norm": 0.975048303604126, "learning_rate": 2.968542696572485e-06, "loss": 0.8249, "step": 19861 }, { "epoch": 3.673109087530199, "grad_norm": 0.8516594171524048, "learning_rate": 2.9650105171478593e-06, "loss": 0.7859, "step": 19862 }, { "epoch": 3.67329492659357, "grad_norm": 1.0244548320770264, "learning_rate": 2.961480408768558e-06, "loss": 0.8145, "step": 19863 }, { "epoch": 3.673480765656941, "grad_norm": 0.9714505672454834, "learning_rate": 2.957952371509931e-06, "loss": 0.9863, "step": 19864 }, { "epoch": 3.6736666047203124, "grad_norm": 0.9386559724807739, "learning_rate": 2.954426405447297e-06, "loss": 0.6725, "step": 19865 }, { "epoch": 3.6738524437836833, "grad_norm": 0.9028299450874329, "learning_rate": 2.9509025106558843e-06, "loss": 0.9663, "step": 19866 }, { "epoch": 3.6740382828470546, "grad_norm": 1.3612467050552368, "learning_rate": 2.9473806872108988e-06, "loss": 0.951, "step": 19867 }, { "epoch": 3.6742241219104255, "grad_norm": 0.9654171466827393, "learning_rate": 2.9438609351875366e-06, "loss": 0.9266, "step": 19868 }, { "epoch": 3.674409960973797, "grad_norm": 0.8086720705032349, "learning_rate": 2.9403432546609043e-06, "loss": 0.7396, "step": 19869 }, { "epoch": 3.6745958000371677, "grad_norm": 0.915579617023468, "learning_rate": 2.936827645706086e-06, "loss": 0.6547, "step": 19870 }, { "epoch": 3.674781639100539, "grad_norm": 0.8911121487617493, "learning_rate": 2.933314108398111e-06, "loss": 0.7598, "step": 19871 }, { "epoch": 3.67496747816391, "grad_norm": 0.9755234718322754, "learning_rate": 2.929802642811985e-06, "loss": 0.9399, "step": 19872 }, { "epoch": 3.6751533172272812, "grad_norm": 0.9139371514320374, "learning_rate": 2.926293249022638e-06, "loss": 0.9042, "step": 19873 }, { "epoch": 3.6753391562906526, "grad_norm": 1.0513211488723755, "learning_rate": 2.9227859271049763e-06, "loss": 1.0011, "step": 19874 }, { "epoch": 3.6755249953540234, "grad_norm": 0.884815514087677, "learning_rate": 2.919280677133851e-06, "loss": 0.7407, "step": 19875 }, { "epoch": 3.6757108344173943, "grad_norm": 0.8955864906311035, "learning_rate": 2.9157774991840915e-06, "loss": 0.8038, "step": 19876 }, { "epoch": 3.6758966734807657, "grad_norm": 1.3236089944839478, "learning_rate": 2.9122763933304486e-06, "loss": 1.0079, "step": 19877 }, { "epoch": 3.676082512544137, "grad_norm": 0.8637065291404724, "learning_rate": 2.908777359647674e-06, "loss": 0.7272, "step": 19878 }, { "epoch": 3.676268351607508, "grad_norm": 1.0538060665130615, "learning_rate": 2.9052803982104083e-06, "loss": 0.6253, "step": 19879 }, { "epoch": 3.6764541906708788, "grad_norm": 1.1004332304000854, "learning_rate": 2.901785509093313e-06, "loss": 0.816, "step": 19880 }, { "epoch": 3.67664002973425, "grad_norm": 1.302672266960144, "learning_rate": 2.8982926923709962e-06, "loss": 1.2928, "step": 19881 }, { "epoch": 3.6768258687976214, "grad_norm": 0.9969615936279297, "learning_rate": 2.8948019481179757e-06, "loss": 0.9255, "step": 19882 }, { "epoch": 3.6770117078609923, "grad_norm": 0.9608601927757263, "learning_rate": 2.8913132764087693e-06, "loss": 0.8857, "step": 19883 }, { "epoch": 3.6771975469243636, "grad_norm": 0.8944962024688721, "learning_rate": 2.8878266773178396e-06, "loss": 0.9644, "step": 19884 }, { "epoch": 3.6773833859877345, "grad_norm": 1.051511526107788, "learning_rate": 2.8843421509196057e-06, "loss": 1.0011, "step": 19885 }, { "epoch": 3.677569225051106, "grad_norm": 0.9785056710243225, "learning_rate": 2.8808596972884296e-06, "loss": 0.949, "step": 19886 }, { "epoch": 3.6777550641144767, "grad_norm": 0.9483036398887634, "learning_rate": 2.8773793164986298e-06, "loss": 0.8208, "step": 19887 }, { "epoch": 3.677940903177848, "grad_norm": 1.0405542850494385, "learning_rate": 2.873901008624502e-06, "loss": 0.7559, "step": 19888 }, { "epoch": 3.678126742241219, "grad_norm": 1.360510230064392, "learning_rate": 2.870424773740288e-06, "loss": 0.9153, "step": 19889 }, { "epoch": 3.6783125813045903, "grad_norm": 0.8900576829910278, "learning_rate": 2.8669506119201716e-06, "loss": 0.9084, "step": 19890 }, { "epoch": 3.678498420367961, "grad_norm": 0.9248895049095154, "learning_rate": 2.8634785232382944e-06, "loss": 0.9498, "step": 19891 }, { "epoch": 3.6786842594313325, "grad_norm": 1.0271313190460205, "learning_rate": 2.8600085077687965e-06, "loss": 0.7387, "step": 19892 }, { "epoch": 3.678870098494704, "grad_norm": 0.8631148934364319, "learning_rate": 2.8565405655857192e-06, "loss": 0.8698, "step": 19893 }, { "epoch": 3.6790559375580747, "grad_norm": 0.9825854897499084, "learning_rate": 2.8530746967630695e-06, "loss": 0.7327, "step": 19894 }, { "epoch": 3.6792417766214456, "grad_norm": 0.8682910203933716, "learning_rate": 2.8496109013748217e-06, "loss": 0.6029, "step": 19895 }, { "epoch": 3.679427615684817, "grad_norm": 0.9333560466766357, "learning_rate": 2.846149179494928e-06, "loss": 0.7099, "step": 19896 }, { "epoch": 3.6796134547481882, "grad_norm": 1.0085049867630005, "learning_rate": 2.8426895311972513e-06, "loss": 0.6731, "step": 19897 }, { "epoch": 3.679799293811559, "grad_norm": 0.9532813429832458, "learning_rate": 2.8392319565556436e-06, "loss": 0.9296, "step": 19898 }, { "epoch": 3.6799851328749305, "grad_norm": 0.8818166851997375, "learning_rate": 2.835776455643879e-06, "loss": 0.7739, "step": 19899 }, { "epoch": 3.6801709719383013, "grad_norm": 0.970205545425415, "learning_rate": 2.8323230285357327e-06, "loss": 0.7891, "step": 19900 }, { "epoch": 3.6803568110016727, "grad_norm": 0.8429179787635803, "learning_rate": 2.8288716753049005e-06, "loss": 0.7411, "step": 19901 }, { "epoch": 3.6805426500650436, "grad_norm": 1.096352219581604, "learning_rate": 2.825422396025068e-06, "loss": 0.872, "step": 19902 }, { "epoch": 3.680728489128415, "grad_norm": 0.8698795437812805, "learning_rate": 2.82197519076981e-06, "loss": 0.7923, "step": 19903 }, { "epoch": 3.6809143281917858, "grad_norm": 0.8768172264099121, "learning_rate": 2.818530059612745e-06, "loss": 0.8951, "step": 19904 }, { "epoch": 3.681100167255157, "grad_norm": 0.9791592359542847, "learning_rate": 2.8150870026273922e-06, "loss": 0.6247, "step": 19905 }, { "epoch": 3.681286006318528, "grad_norm": 0.9714826941490173, "learning_rate": 2.8116460198872264e-06, "loss": 1.048, "step": 19906 }, { "epoch": 3.6814718453818993, "grad_norm": 0.9335835576057434, "learning_rate": 2.8082071114656772e-06, "loss": 0.7909, "step": 19907 }, { "epoch": 3.6816576844452706, "grad_norm": 0.97763991355896, "learning_rate": 2.8047702774361863e-06, "loss": 0.9408, "step": 19908 }, { "epoch": 3.6818435235086415, "grad_norm": 0.8466770052909851, "learning_rate": 2.8013355178720612e-06, "loss": 0.8742, "step": 19909 }, { "epoch": 3.6820293625720124, "grad_norm": 1.0056238174438477, "learning_rate": 2.7979028328466328e-06, "loss": 0.7738, "step": 19910 }, { "epoch": 3.6822152016353837, "grad_norm": 0.8779562711715698, "learning_rate": 2.794472222433153e-06, "loss": 0.8267, "step": 19911 }, { "epoch": 3.682401040698755, "grad_norm": 0.9409157633781433, "learning_rate": 2.791043686704875e-06, "loss": 0.9228, "step": 19912 }, { "epoch": 3.682586879762126, "grad_norm": 0.95030277967453, "learning_rate": 2.78761722573494e-06, "loss": 0.8736, "step": 19913 }, { "epoch": 3.6827727188254973, "grad_norm": 1.142154335975647, "learning_rate": 2.7841928395965e-06, "loss": 0.902, "step": 19914 }, { "epoch": 3.682958557888868, "grad_norm": 0.8686795234680176, "learning_rate": 2.7807705283626194e-06, "loss": 0.8466, "step": 19915 }, { "epoch": 3.6831443969522395, "grad_norm": 1.1532697677612305, "learning_rate": 2.7773502921063733e-06, "loss": 0.8168, "step": 19916 }, { "epoch": 3.6833302360156104, "grad_norm": 1.1650819778442383, "learning_rate": 2.773932130900736e-06, "loss": 0.758, "step": 19917 }, { "epoch": 3.6835160750789817, "grad_norm": 0.8981561064720154, "learning_rate": 2.770516044818683e-06, "loss": 0.7541, "step": 19918 }, { "epoch": 3.6837019141423526, "grad_norm": 1.029568076133728, "learning_rate": 2.7671020339330888e-06, "loss": 0.7395, "step": 19919 }, { "epoch": 3.683887753205724, "grad_norm": 1.0341814756393433, "learning_rate": 2.7636900983168625e-06, "loss": 0.8342, "step": 19920 }, { "epoch": 3.684073592269095, "grad_norm": 1.0072674751281738, "learning_rate": 2.7602802380428007e-06, "loss": 0.882, "step": 19921 }, { "epoch": 3.684259431332466, "grad_norm": 0.8727888464927673, "learning_rate": 2.756872453183679e-06, "loss": 0.7647, "step": 19922 }, { "epoch": 3.6844452703958375, "grad_norm": 0.9565852284431458, "learning_rate": 2.7534667438122387e-06, "loss": 0.6707, "step": 19923 }, { "epoch": 3.6846311094592084, "grad_norm": 0.9715271592140198, "learning_rate": 2.7500631100011888e-06, "loss": 0.8839, "step": 19924 }, { "epoch": 3.6848169485225792, "grad_norm": 0.8984237909317017, "learning_rate": 2.7466615518231486e-06, "loss": 0.8218, "step": 19925 }, { "epoch": 3.6850027875859506, "grad_norm": 1.039727807044983, "learning_rate": 2.743262069350716e-06, "loss": 0.9666, "step": 19926 }, { "epoch": 3.685188626649322, "grad_norm": 1.0353304147720337, "learning_rate": 2.739864662656455e-06, "loss": 0.7697, "step": 19927 }, { "epoch": 3.685374465712693, "grad_norm": 1.000001311302185, "learning_rate": 2.7364693318128744e-06, "loss": 0.7935, "step": 19928 }, { "epoch": 3.6855603047760637, "grad_norm": 0.905925452709198, "learning_rate": 2.7330760768924603e-06, "loss": 0.7919, "step": 19929 }, { "epoch": 3.685746143839435, "grad_norm": 0.8545135855674744, "learning_rate": 2.7296848979675992e-06, "loss": 0.7409, "step": 19930 }, { "epoch": 3.6859319829028063, "grad_norm": 1.0508830547332764, "learning_rate": 2.7262957951107116e-06, "loss": 0.7794, "step": 19931 }, { "epoch": 3.686117821966177, "grad_norm": 0.8892425894737244, "learning_rate": 2.7229087683940946e-06, "loss": 0.5947, "step": 19932 }, { "epoch": 3.6863036610295485, "grad_norm": 1.0860157012939453, "learning_rate": 2.7195238178900684e-06, "loss": 0.9729, "step": 19933 }, { "epoch": 3.6864895000929194, "grad_norm": 0.9292671084403992, "learning_rate": 2.716140943670864e-06, "loss": 0.9533, "step": 19934 }, { "epoch": 3.6866753391562908, "grad_norm": 1.120851993560791, "learning_rate": 2.712760145808679e-06, "loss": 0.9393, "step": 19935 }, { "epoch": 3.6868611782196616, "grad_norm": 1.0049084424972534, "learning_rate": 2.70938142437569e-06, "loss": 0.8023, "step": 19936 }, { "epoch": 3.687047017283033, "grad_norm": 1.0591238737106323, "learning_rate": 2.7060047794439936e-06, "loss": 0.7781, "step": 19937 }, { "epoch": 3.687232856346404, "grad_norm": 1.1058365106582642, "learning_rate": 2.702630211085655e-06, "loss": 0.8144, "step": 19938 }, { "epoch": 3.687418695409775, "grad_norm": 0.868482768535614, "learning_rate": 2.6992577193727053e-06, "loss": 0.8758, "step": 19939 }, { "epoch": 3.687604534473146, "grad_norm": 0.762561023235321, "learning_rate": 2.6958873043771317e-06, "loss": 0.7921, "step": 19940 }, { "epoch": 3.6877903735365174, "grad_norm": 1.0790410041809082, "learning_rate": 2.692518966170865e-06, "loss": 0.9765, "step": 19941 }, { "epoch": 3.6879762125998887, "grad_norm": 0.9293431639671326, "learning_rate": 2.6891527048257813e-06, "loss": 0.9429, "step": 19942 }, { "epoch": 3.6881620516632596, "grad_norm": 1.1230645179748535, "learning_rate": 2.6857885204137346e-06, "loss": 0.7974, "step": 19943 }, { "epoch": 3.6883478907266305, "grad_norm": 0.8476539850234985, "learning_rate": 2.6824264130065555e-06, "loss": 0.7633, "step": 19944 }, { "epoch": 3.688533729790002, "grad_norm": 0.98152756690979, "learning_rate": 2.6790663826759765e-06, "loss": 0.9192, "step": 19945 }, { "epoch": 3.688719568853373, "grad_norm": 1.035069227218628, "learning_rate": 2.6757084294937173e-06, "loss": 0.8501, "step": 19946 }, { "epoch": 3.688905407916744, "grad_norm": 1.1366145610809326, "learning_rate": 2.672352553531432e-06, "loss": 0.7742, "step": 19947 }, { "epoch": 3.6890912469801154, "grad_norm": 0.9123889803886414, "learning_rate": 2.668998754860774e-06, "loss": 0.8288, "step": 19948 }, { "epoch": 3.6892770860434863, "grad_norm": 0.8435447812080383, "learning_rate": 2.6656470335533092e-06, "loss": 0.6986, "step": 19949 }, { "epoch": 3.6894629251068576, "grad_norm": 0.9356799721717834, "learning_rate": 2.662297389680568e-06, "loss": 0.857, "step": 19950 }, { "epoch": 3.6896487641702285, "grad_norm": 0.993013322353363, "learning_rate": 2.658949823314061e-06, "loss": 0.9632, "step": 19951 }, { "epoch": 3.6898346032336, "grad_norm": 1.127468466758728, "learning_rate": 2.65560433452523e-06, "loss": 0.7315, "step": 19952 }, { "epoch": 3.6900204422969707, "grad_norm": 0.842962384223938, "learning_rate": 2.6522609233854633e-06, "loss": 0.7186, "step": 19953 }, { "epoch": 3.690206281360342, "grad_norm": 1.0940351486206055, "learning_rate": 2.6489195899661368e-06, "loss": 1.2196, "step": 19954 }, { "epoch": 3.690392120423713, "grad_norm": 0.8041806221008301, "learning_rate": 2.6455803343385486e-06, "loss": 0.71, "step": 19955 }, { "epoch": 3.6905779594870842, "grad_norm": 0.8925176858901978, "learning_rate": 2.642243156573998e-06, "loss": 0.7767, "step": 19956 }, { "epoch": 3.6907637985504556, "grad_norm": 1.1546437740325928, "learning_rate": 2.638908056743694e-06, "loss": 0.8426, "step": 19957 }, { "epoch": 3.6909496376138264, "grad_norm": 0.921238899230957, "learning_rate": 2.6355750349188136e-06, "loss": 0.629, "step": 19958 }, { "epoch": 3.6911354766771973, "grad_norm": 0.866148054599762, "learning_rate": 2.632244091170499e-06, "loss": 0.6771, "step": 19959 }, { "epoch": 3.6913213157405687, "grad_norm": 0.864345371723175, "learning_rate": 2.6289152255698503e-06, "loss": 0.6718, "step": 19960 }, { "epoch": 3.69150715480394, "grad_norm": 0.9704990983009338, "learning_rate": 2.6255884381879093e-06, "loss": 0.9381, "step": 19961 }, { "epoch": 3.691692993867311, "grad_norm": 0.9350848197937012, "learning_rate": 2.6222637290956642e-06, "loss": 0.7384, "step": 19962 }, { "epoch": 3.6918788329306818, "grad_norm": 0.8249576091766357, "learning_rate": 2.618941098364114e-06, "loss": 0.6296, "step": 19963 }, { "epoch": 3.692064671994053, "grad_norm": 1.0667258501052856, "learning_rate": 2.6156205460641346e-06, "loss": 0.7731, "step": 19964 }, { "epoch": 3.6922505110574244, "grad_norm": 1.0333415269851685, "learning_rate": 2.612302072266637e-06, "loss": 0.7406, "step": 19965 }, { "epoch": 3.6924363501207953, "grad_norm": 1.2386226654052734, "learning_rate": 2.6089856770424194e-06, "loss": 0.7804, "step": 19966 }, { "epoch": 3.6926221891841666, "grad_norm": 0.9404177069664001, "learning_rate": 2.6056713604622697e-06, "loss": 0.6818, "step": 19967 }, { "epoch": 3.6928080282475375, "grad_norm": 0.8880127668380737, "learning_rate": 2.6023591225969312e-06, "loss": 0.6698, "step": 19968 }, { "epoch": 3.692993867310909, "grad_norm": 0.8711450099945068, "learning_rate": 2.5990489635171033e-06, "loss": 0.9517, "step": 19969 }, { "epoch": 3.6931797063742797, "grad_norm": 1.015639305114746, "learning_rate": 2.5957408832934183e-06, "loss": 0.7335, "step": 19970 }, { "epoch": 3.693365545437651, "grad_norm": 0.8139245510101318, "learning_rate": 2.592434881996497e-06, "loss": 0.6054, "step": 19971 }, { "epoch": 3.6935513845010224, "grad_norm": 0.989398181438446, "learning_rate": 2.5891309596968948e-06, "loss": 1.0038, "step": 19972 }, { "epoch": 3.6937372235643933, "grad_norm": 0.9275818467140198, "learning_rate": 2.5858291164651325e-06, "loss": 0.7925, "step": 19973 }, { "epoch": 3.693923062627764, "grad_norm": 0.8881398439407349, "learning_rate": 2.5825293523716654e-06, "loss": 1.083, "step": 19974 }, { "epoch": 3.6941089016911355, "grad_norm": 0.8966253995895386, "learning_rate": 2.5792316674869364e-06, "loss": 0.6862, "step": 19975 }, { "epoch": 3.694294740754507, "grad_norm": 1.009162425994873, "learning_rate": 2.5759360618813454e-06, "loss": 0.8876, "step": 19976 }, { "epoch": 3.6944805798178777, "grad_norm": 1.0653289556503296, "learning_rate": 2.5726425356252026e-06, "loss": 0.6619, "step": 19977 }, { "epoch": 3.6946664188812486, "grad_norm": 0.9822260141372681, "learning_rate": 2.5693510887888074e-06, "loss": 0.9509, "step": 19978 }, { "epoch": 3.69485225794462, "grad_norm": 1.2793655395507812, "learning_rate": 2.5660617214424145e-06, "loss": 1.1533, "step": 19979 }, { "epoch": 3.6950380970079912, "grad_norm": 0.8063891530036926, "learning_rate": 2.5627744336562343e-06, "loss": 0.6872, "step": 19980 }, { "epoch": 3.695223936071362, "grad_norm": 0.9094991087913513, "learning_rate": 2.5594892255004334e-06, "loss": 0.5784, "step": 19981 }, { "epoch": 3.6954097751347335, "grad_norm": 0.9532362818717957, "learning_rate": 2.5562060970450996e-06, "loss": 0.6367, "step": 19982 }, { "epoch": 3.6955956141981043, "grad_norm": 0.9102233052253723, "learning_rate": 2.552925048360344e-06, "loss": 0.6689, "step": 19983 }, { "epoch": 3.6957814532614757, "grad_norm": 0.8929124474525452, "learning_rate": 2.549646079516177e-06, "loss": 0.8239, "step": 19984 }, { "epoch": 3.6959672923248466, "grad_norm": 0.9813836216926575, "learning_rate": 2.546369190582576e-06, "loss": 0.7966, "step": 19985 }, { "epoch": 3.696153131388218, "grad_norm": 0.991256833076477, "learning_rate": 2.543094381629496e-06, "loss": 0.7203, "step": 19986 }, { "epoch": 3.6963389704515888, "grad_norm": 0.861219048500061, "learning_rate": 2.539821652726804e-06, "loss": 0.5936, "step": 19987 }, { "epoch": 3.69652480951496, "grad_norm": 0.8657010197639465, "learning_rate": 2.5365510039443873e-06, "loss": 0.7177, "step": 19988 }, { "epoch": 3.696710648578331, "grad_norm": 0.9564704895019531, "learning_rate": 2.5332824353520357e-06, "loss": 0.9188, "step": 19989 }, { "epoch": 3.6968964876417023, "grad_norm": 0.907672643661499, "learning_rate": 2.530015947019504e-06, "loss": 0.8229, "step": 19990 }, { "epoch": 3.6970823267050736, "grad_norm": 0.9323121309280396, "learning_rate": 2.5267515390165254e-06, "loss": 0.501, "step": 19991 }, { "epoch": 3.6972681657684445, "grad_norm": 0.9874093532562256, "learning_rate": 2.523489211412755e-06, "loss": 0.9714, "step": 19992 }, { "epoch": 3.6974540048318154, "grad_norm": 0.9057678580284119, "learning_rate": 2.5202289642778375e-06, "loss": 0.8565, "step": 19993 }, { "epoch": 3.6976398438951867, "grad_norm": 0.8049215078353882, "learning_rate": 2.5169707976813504e-06, "loss": 0.6827, "step": 19994 }, { "epoch": 3.697825682958558, "grad_norm": 1.047194004058838, "learning_rate": 2.513714711692827e-06, "loss": 1.1513, "step": 19995 }, { "epoch": 3.698011522021929, "grad_norm": 0.8550397157669067, "learning_rate": 2.510460706381779e-06, "loss": 0.9733, "step": 19996 }, { "epoch": 3.6981973610853003, "grad_norm": 0.9241202473640442, "learning_rate": 2.5072087818176382e-06, "loss": 0.8653, "step": 19997 }, { "epoch": 3.698383200148671, "grad_norm": 1.0444940328598022, "learning_rate": 2.503958938069839e-06, "loss": 0.7391, "step": 19998 }, { "epoch": 3.6985690392120425, "grad_norm": 0.9220775961875916, "learning_rate": 2.500711175207704e-06, "loss": 0.8743, "step": 19999 }, { "epoch": 3.6987548782754134, "grad_norm": 0.9342935681343079, "learning_rate": 2.497465493300588e-06, "loss": 0.7478, "step": 20000 }, { "epoch": 3.6989407173387847, "grad_norm": 0.8913558721542358, "learning_rate": 2.4942218924177473e-06, "loss": 0.8953, "step": 20001 }, { "epoch": 3.6991265564021556, "grad_norm": 0.9742237329483032, "learning_rate": 2.4909803726284043e-06, "loss": 0.8836, "step": 20002 }, { "epoch": 3.699312395465527, "grad_norm": 0.8685630559921265, "learning_rate": 2.48774093400177e-06, "loss": 0.559, "step": 20003 }, { "epoch": 3.699498234528898, "grad_norm": 1.0585001707077026, "learning_rate": 2.484503576606967e-06, "loss": 0.8954, "step": 20004 }, { "epoch": 3.699684073592269, "grad_norm": 0.8607193827629089, "learning_rate": 2.4812683005130843e-06, "loss": 0.7518, "step": 20005 }, { "epoch": 3.6998699126556405, "grad_norm": 0.8997200727462769, "learning_rate": 2.478035105789178e-06, "loss": 0.5457, "step": 20006 }, { "epoch": 3.7000557517190114, "grad_norm": 0.9212594032287598, "learning_rate": 2.474803992504249e-06, "loss": 0.8139, "step": 20007 }, { "epoch": 3.7002415907823822, "grad_norm": 0.991801917552948, "learning_rate": 2.4715749607272853e-06, "loss": 0.7639, "step": 20008 }, { "epoch": 3.7004274298457536, "grad_norm": 0.9359737038612366, "learning_rate": 2.4683480105271883e-06, "loss": 0.8758, "step": 20009 }, { "epoch": 3.700613268909125, "grad_norm": 0.8309842348098755, "learning_rate": 2.465123141972825e-06, "loss": 0.7622, "step": 20010 }, { "epoch": 3.700799107972496, "grad_norm": 0.5806534886360168, "learning_rate": 2.461900355133029e-06, "loss": 0.323, "step": 20011 }, { "epoch": 3.7009849470358667, "grad_norm": 1.1498020887374878, "learning_rate": 2.4586796500766005e-06, "loss": 0.8802, "step": 20012 }, { "epoch": 3.701170786099238, "grad_norm": 0.8998548984527588, "learning_rate": 2.4554610268722634e-06, "loss": 0.8204, "step": 20013 }, { "epoch": 3.7013566251626093, "grad_norm": 0.9816725850105286, "learning_rate": 2.452244485588706e-06, "loss": 0.765, "step": 20014 }, { "epoch": 3.70154246422598, "grad_norm": 1.1572517156600952, "learning_rate": 2.4490300262946074e-06, "loss": 0.9171, "step": 20015 }, { "epoch": 3.7017283032893515, "grad_norm": 0.889315128326416, "learning_rate": 2.445817649058546e-06, "loss": 0.8954, "step": 20016 }, { "epoch": 3.7019141423527224, "grad_norm": 1.7878528833389282, "learning_rate": 2.4426073539491e-06, "loss": 1.1884, "step": 20017 }, { "epoch": 3.7020999814160938, "grad_norm": 1.295170545578003, "learning_rate": 2.4393991410347928e-06, "loss": 0.7628, "step": 20018 }, { "epoch": 3.7022858204794646, "grad_norm": 1.06532621383667, "learning_rate": 2.4361930103840802e-06, "loss": 0.9315, "step": 20019 }, { "epoch": 3.702471659542836, "grad_norm": 0.8872401118278503, "learning_rate": 2.4329889620654077e-06, "loss": 0.6206, "step": 20020 }, { "epoch": 3.7026574986062073, "grad_norm": 1.11594820022583, "learning_rate": 2.429786996147154e-06, "loss": 1.1982, "step": 20021 }, { "epoch": 3.702843337669578, "grad_norm": 1.0438435077667236, "learning_rate": 2.4265871126976536e-06, "loss": 0.9589, "step": 20022 }, { "epoch": 3.703029176732949, "grad_norm": 0.9034283757209778, "learning_rate": 2.423389311785207e-06, "loss": 0.7611, "step": 20023 }, { "epoch": 3.7032150157963204, "grad_norm": 1.0138309001922607, "learning_rate": 2.4201935934780816e-06, "loss": 0.6974, "step": 20024 }, { "epoch": 3.7034008548596917, "grad_norm": 1.1490155458450317, "learning_rate": 2.4169999578444678e-06, "loss": 0.8325, "step": 20025 }, { "epoch": 3.7035866939230626, "grad_norm": 0.8348366618156433, "learning_rate": 2.413808404952511e-06, "loss": 0.7004, "step": 20026 }, { "epoch": 3.7037725329864335, "grad_norm": 1.0912022590637207, "learning_rate": 2.4106189348703565e-06, "loss": 0.6626, "step": 20027 }, { "epoch": 3.703958372049805, "grad_norm": 0.8689257502555847, "learning_rate": 2.4074315476660836e-06, "loss": 0.6285, "step": 20028 }, { "epoch": 3.704144211113176, "grad_norm": 1.1265016794204712, "learning_rate": 2.404246243407704e-06, "loss": 0.6572, "step": 20029 }, { "epoch": 3.704330050176547, "grad_norm": 1.0007442235946655, "learning_rate": 2.401063022163197e-06, "loss": 0.7885, "step": 20030 }, { "epoch": 3.7045158892399184, "grad_norm": 0.8556223511695862, "learning_rate": 2.39788188400053e-06, "loss": 0.6905, "step": 20031 }, { "epoch": 3.7047017283032893, "grad_norm": 0.7509297728538513, "learning_rate": 2.3947028289875716e-06, "loss": 0.6629, "step": 20032 }, { "epoch": 3.7048875673666606, "grad_norm": 0.8223177194595337, "learning_rate": 2.3915258571921896e-06, "loss": 0.5922, "step": 20033 }, { "epoch": 3.7050734064300315, "grad_norm": 0.8994002938270569, "learning_rate": 2.388350968682185e-06, "loss": 0.8186, "step": 20034 }, { "epoch": 3.705259245493403, "grad_norm": 0.9853032827377319, "learning_rate": 2.3851781635253147e-06, "loss": 0.8247, "step": 20035 }, { "epoch": 3.7054450845567737, "grad_norm": 0.8897237181663513, "learning_rate": 2.3820074417893136e-06, "loss": 0.7914, "step": 20036 }, { "epoch": 3.705630923620145, "grad_norm": 0.9126240015029907, "learning_rate": 2.3788388035418273e-06, "loss": 0.6382, "step": 20037 }, { "epoch": 3.705816762683516, "grad_norm": 1.0882678031921387, "learning_rate": 2.3756722488505135e-06, "loss": 0.8311, "step": 20038 }, { "epoch": 3.7060026017468872, "grad_norm": 1.1550171375274658, "learning_rate": 2.3725077777829395e-06, "loss": 0.8201, "step": 20039 }, { "epoch": 3.7061884408102586, "grad_norm": 1.1292064189910889, "learning_rate": 2.369345390406663e-06, "loss": 0.8463, "step": 20040 }, { "epoch": 3.7063742798736294, "grad_norm": 0.9061990976333618, "learning_rate": 2.3661850867891634e-06, "loss": 0.6411, "step": 20041 }, { "epoch": 3.7065601189370003, "grad_norm": 1.173477053642273, "learning_rate": 2.363026866997886e-06, "loss": 0.7937, "step": 20042 }, { "epoch": 3.7067459580003717, "grad_norm": 1.0434966087341309, "learning_rate": 2.3598707311002555e-06, "loss": 0.8296, "step": 20043 }, { "epoch": 3.706931797063743, "grad_norm": 0.9530367851257324, "learning_rate": 2.3567166791636284e-06, "loss": 0.9408, "step": 20044 }, { "epoch": 3.707117636127114, "grad_norm": 0.7768343091011047, "learning_rate": 2.3535647112553294e-06, "loss": 0.8813, "step": 20045 }, { "epoch": 3.707303475190485, "grad_norm": 1.1541277170181274, "learning_rate": 2.350414827442604e-06, "loss": 0.7388, "step": 20046 }, { "epoch": 3.707489314253856, "grad_norm": 0.9838178753852844, "learning_rate": 2.347267027792721e-06, "loss": 0.5767, "step": 20047 }, { "epoch": 3.7076751533172274, "grad_norm": 0.8992997407913208, "learning_rate": 2.3441213123728266e-06, "loss": 0.7926, "step": 20048 }, { "epoch": 3.7078609923805983, "grad_norm": 1.0437636375427246, "learning_rate": 2.3409776812500894e-06, "loss": 1.0169, "step": 20049 }, { "epoch": 3.7080468314439696, "grad_norm": 0.8232665657997131, "learning_rate": 2.337836134491589e-06, "loss": 0.7918, "step": 20050 }, { "epoch": 3.7082326705073405, "grad_norm": 1.0069401264190674, "learning_rate": 2.3346966721643825e-06, "loss": 0.9552, "step": 20051 }, { "epoch": 3.708418509570712, "grad_norm": 0.9937763810157776, "learning_rate": 2.3315592943354837e-06, "loss": 0.8517, "step": 20052 }, { "epoch": 3.7086043486340827, "grad_norm": 0.9887884259223938, "learning_rate": 2.3284240010718383e-06, "loss": 0.8548, "step": 20053 }, { "epoch": 3.708790187697454, "grad_norm": 1.0063434839248657, "learning_rate": 2.325290792440371e-06, "loss": 0.7341, "step": 20054 }, { "epoch": 3.7089760267608254, "grad_norm": 0.9228911995887756, "learning_rate": 2.3221596685079616e-06, "loss": 0.936, "step": 20055 }, { "epoch": 3.7091618658241963, "grad_norm": 0.9210600852966309, "learning_rate": 2.319030629341423e-06, "loss": 0.7458, "step": 20056 }, { "epoch": 3.709347704887567, "grad_norm": 0.8243858218193054, "learning_rate": 2.315903675007558e-06, "loss": 0.9547, "step": 20057 }, { "epoch": 3.7095335439509385, "grad_norm": 1.0575931072235107, "learning_rate": 2.3127788055730794e-06, "loss": 0.9452, "step": 20058 }, { "epoch": 3.70971938301431, "grad_norm": 1.033123254776001, "learning_rate": 2.309656021104711e-06, "loss": 0.7252, "step": 20059 }, { "epoch": 3.7099052220776807, "grad_norm": 1.0101604461669922, "learning_rate": 2.30653532166909e-06, "loss": 0.6904, "step": 20060 }, { "epoch": 3.7100910611410516, "grad_norm": 0.787567675113678, "learning_rate": 2.3034167073328284e-06, "loss": 0.5783, "step": 20061 }, { "epoch": 3.710276900204423, "grad_norm": 1.0227032899856567, "learning_rate": 2.3003001781624734e-06, "loss": 0.7656, "step": 20062 }, { "epoch": 3.7104627392677942, "grad_norm": 0.8273500204086304, "learning_rate": 2.2971857342245606e-06, "loss": 0.7409, "step": 20063 }, { "epoch": 3.710648578331165, "grad_norm": 1.0066863298416138, "learning_rate": 2.2940733755855592e-06, "loss": 0.93, "step": 20064 }, { "epoch": 3.7108344173945365, "grad_norm": 0.9400318264961243, "learning_rate": 2.2909631023118826e-06, "loss": 0.9107, "step": 20065 }, { "epoch": 3.7110202564579073, "grad_norm": 1.2339752912521362, "learning_rate": 2.2878549144699223e-06, "loss": 0.8317, "step": 20066 }, { "epoch": 3.7112060955212787, "grad_norm": 0.9648247957229614, "learning_rate": 2.2847488121260253e-06, "loss": 0.8801, "step": 20067 }, { "epoch": 3.7113919345846496, "grad_norm": 0.9962890148162842, "learning_rate": 2.2816447953464715e-06, "loss": 0.7403, "step": 20068 }, { "epoch": 3.711577773648021, "grad_norm": 0.9467740654945374, "learning_rate": 2.278542864197519e-06, "loss": 0.8098, "step": 20069 }, { "epoch": 3.7117636127113918, "grad_norm": 0.9877055287361145, "learning_rate": 2.27544301874536e-06, "loss": 0.6955, "step": 20070 }, { "epoch": 3.711949451774763, "grad_norm": 1.4038006067276, "learning_rate": 2.2723452590561856e-06, "loss": 0.8247, "step": 20071 }, { "epoch": 3.712135290838134, "grad_norm": 1.0121270418167114, "learning_rate": 2.2692495851960983e-06, "loss": 0.95, "step": 20072 }, { "epoch": 3.7123211299015053, "grad_norm": 0.9226126074790955, "learning_rate": 2.2661559972311565e-06, "loss": 0.8994, "step": 20073 }, { "epoch": 3.7125069689648766, "grad_norm": 0.7741632461547852, "learning_rate": 2.263064495227396e-06, "loss": 0.6216, "step": 20074 }, { "epoch": 3.7126928080282475, "grad_norm": 0.8833262324333191, "learning_rate": 2.259975079250798e-06, "loss": 0.6881, "step": 20075 }, { "epoch": 3.7128786470916184, "grad_norm": 1.0069153308868408, "learning_rate": 2.256887749367309e-06, "loss": 0.751, "step": 20076 }, { "epoch": 3.7130644861549897, "grad_norm": 0.9459255337715149, "learning_rate": 2.2538025056428214e-06, "loss": 0.8701, "step": 20077 }, { "epoch": 3.713250325218361, "grad_norm": 0.8940929770469666, "learning_rate": 2.250719348143171e-06, "loss": 0.9173, "step": 20078 }, { "epoch": 3.713436164281732, "grad_norm": 0.834430992603302, "learning_rate": 2.247638276934172e-06, "loss": 0.6886, "step": 20079 }, { "epoch": 3.7136220033451033, "grad_norm": 0.9722082018852234, "learning_rate": 2.2445592920815826e-06, "loss": 0.9061, "step": 20080 }, { "epoch": 3.713807842408474, "grad_norm": 0.9196001887321472, "learning_rate": 2.2414823936511287e-06, "loss": 0.7362, "step": 20081 }, { "epoch": 3.7139936814718455, "grad_norm": 1.0167895555496216, "learning_rate": 2.238407581708468e-06, "loss": 0.7281, "step": 20082 }, { "epoch": 3.7141795205352164, "grad_norm": 1.020185112953186, "learning_rate": 2.2353348563192266e-06, "loss": 0.7961, "step": 20083 }, { "epoch": 3.7143653595985877, "grad_norm": 1.0760143995285034, "learning_rate": 2.2322642175490073e-06, "loss": 0.9359, "step": 20084 }, { "epoch": 3.7145511986619586, "grad_norm": 1.1122297048568726, "learning_rate": 2.229195665463324e-06, "loss": 0.7231, "step": 20085 }, { "epoch": 3.71473703772533, "grad_norm": 0.9687815308570862, "learning_rate": 2.226129200127669e-06, "loss": 1.0986, "step": 20086 }, { "epoch": 3.714922876788701, "grad_norm": 1.0281693935394287, "learning_rate": 2.2230648216075125e-06, "loss": 0.7868, "step": 20087 }, { "epoch": 3.715108715852072, "grad_norm": 0.90626060962677, "learning_rate": 2.220002529968246e-06, "loss": 0.9467, "step": 20088 }, { "epoch": 3.7152945549154435, "grad_norm": 1.0133919715881348, "learning_rate": 2.2169423252752175e-06, "loss": 0.5898, "step": 20089 }, { "epoch": 3.7154803939788144, "grad_norm": 0.8916006088256836, "learning_rate": 2.213884207593742e-06, "loss": 0.8046, "step": 20090 }, { "epoch": 3.7156662330421852, "grad_norm": 0.7658432126045227, "learning_rate": 2.2108281769891217e-06, "loss": 0.5187, "step": 20091 }, { "epoch": 3.7158520721055566, "grad_norm": 0.937510073184967, "learning_rate": 2.2077742335265604e-06, "loss": 0.8989, "step": 20092 }, { "epoch": 3.716037911168928, "grad_norm": 0.9862101674079895, "learning_rate": 2.20472237727124e-06, "loss": 0.7326, "step": 20093 }, { "epoch": 3.716223750232299, "grad_norm": 0.9088574647903442, "learning_rate": 2.2016726082882743e-06, "loss": 0.8046, "step": 20094 }, { "epoch": 3.71640958929567, "grad_norm": 0.9740971326828003, "learning_rate": 2.1986249266428006e-06, "loss": 0.7832, "step": 20095 }, { "epoch": 3.716595428359041, "grad_norm": 0.8323392271995544, "learning_rate": 2.1955793323998442e-06, "loss": 0.6632, "step": 20096 }, { "epoch": 3.7167812674224123, "grad_norm": 0.8543899059295654, "learning_rate": 2.192535825624409e-06, "loss": 0.8198, "step": 20097 }, { "epoch": 3.716967106485783, "grad_norm": 0.9209011793136597, "learning_rate": 2.1894944063814314e-06, "loss": 0.7359, "step": 20098 }, { "epoch": 3.7171529455491545, "grad_norm": 0.9761630296707153, "learning_rate": 2.1864550747358713e-06, "loss": 0.7975, "step": 20099 }, { "epoch": 3.7173387846125254, "grad_norm": 0.9385524392127991, "learning_rate": 2.1834178307525543e-06, "loss": 0.876, "step": 20100 }, { "epoch": 3.7175246236758968, "grad_norm": 1.1285103559494019, "learning_rate": 2.1803826744963286e-06, "loss": 0.8112, "step": 20101 }, { "epoch": 3.7177104627392676, "grad_norm": 0.8879675269126892, "learning_rate": 2.1773496060319755e-06, "loss": 0.8473, "step": 20102 }, { "epoch": 3.717896301802639, "grad_norm": 0.9821565747261047, "learning_rate": 2.174318625424221e-06, "loss": 0.7657, "step": 20103 }, { "epoch": 3.7180821408660103, "grad_norm": 0.8077647089958191, "learning_rate": 2.171289732737758e-06, "loss": 0.7054, "step": 20104 }, { "epoch": 3.718267979929381, "grad_norm": 0.9461015462875366, "learning_rate": 2.1682629280372456e-06, "loss": 0.8369, "step": 20105 }, { "epoch": 3.718453818992752, "grad_norm": 0.8106582760810852, "learning_rate": 2.165238211387255e-06, "loss": 0.5342, "step": 20106 }, { "epoch": 3.7186396580561234, "grad_norm": 0.96129310131073, "learning_rate": 2.1622155828523784e-06, "loss": 0.9539, "step": 20107 }, { "epoch": 3.7188254971194947, "grad_norm": 0.8640586733818054, "learning_rate": 2.15919504249712e-06, "loss": 0.754, "step": 20108 }, { "epoch": 3.7190113361828656, "grad_norm": 1.0741902589797974, "learning_rate": 2.1561765903859275e-06, "loss": 0.5654, "step": 20109 }, { "epoch": 3.7191971752462365, "grad_norm": 0.9164366126060486, "learning_rate": 2.1531602265832397e-06, "loss": 0.7342, "step": 20110 }, { "epoch": 3.719383014309608, "grad_norm": 1.1637158393859863, "learning_rate": 2.1501459511534374e-06, "loss": 0.9248, "step": 20111 }, { "epoch": 3.719568853372979, "grad_norm": 0.9291113018989563, "learning_rate": 2.147133764160858e-06, "loss": 0.8318, "step": 20112 }, { "epoch": 3.71975469243635, "grad_norm": 0.956951916217804, "learning_rate": 2.144123665669784e-06, "loss": 0.8349, "step": 20113 }, { "epoch": 3.7199405314997214, "grad_norm": 0.8551384806632996, "learning_rate": 2.1411156557444525e-06, "loss": 0.9349, "step": 20114 }, { "epoch": 3.7201263705630923, "grad_norm": 0.9560601115226746, "learning_rate": 2.1381097344490895e-06, "loss": 0.8236, "step": 20115 }, { "epoch": 3.7203122096264636, "grad_norm": 0.7943173050880432, "learning_rate": 2.1351059018478336e-06, "loss": 0.7864, "step": 20116 }, { "epoch": 3.7204980486898345, "grad_norm": 0.8827927708625793, "learning_rate": 2.1321041580047996e-06, "loss": 0.6593, "step": 20117 }, { "epoch": 3.720683887753206, "grad_norm": 0.9819035530090332, "learning_rate": 2.1291045029840474e-06, "loss": 1.1072, "step": 20118 }, { "epoch": 3.7208697268165767, "grad_norm": 0.7115758061408997, "learning_rate": 2.1261069368496033e-06, "loss": 0.5483, "step": 20119 }, { "epoch": 3.721055565879948, "grad_norm": 1.027331829071045, "learning_rate": 2.123111459665461e-06, "loss": 1.0232, "step": 20120 }, { "epoch": 3.721241404943319, "grad_norm": 0.8737844228744507, "learning_rate": 2.120118071495536e-06, "loss": 0.6384, "step": 20121 }, { "epoch": 3.7214272440066902, "grad_norm": 0.831642210483551, "learning_rate": 2.1171267724037105e-06, "loss": 0.6566, "step": 20122 }, { "epoch": 3.7216130830700616, "grad_norm": 0.8471463918685913, "learning_rate": 2.114137562453855e-06, "loss": 0.8067, "step": 20123 }, { "epoch": 3.7217989221334324, "grad_norm": 1.0067334175109863, "learning_rate": 2.1111504417097525e-06, "loss": 0.8335, "step": 20124 }, { "epoch": 3.7219847611968033, "grad_norm": 1.0657848119735718, "learning_rate": 2.1081654102351635e-06, "loss": 0.892, "step": 20125 }, { "epoch": 3.7221706002601747, "grad_norm": 0.9801212549209595, "learning_rate": 2.1051824680937802e-06, "loss": 0.7435, "step": 20126 }, { "epoch": 3.722356439323546, "grad_norm": 0.9074841141700745, "learning_rate": 2.1022016153492973e-06, "loss": 0.6944, "step": 20127 }, { "epoch": 3.722542278386917, "grad_norm": 0.9464606642723083, "learning_rate": 2.099222852065319e-06, "loss": 0.8186, "step": 20128 }, { "epoch": 3.722728117450288, "grad_norm": 0.9544642567634583, "learning_rate": 2.0962461783054167e-06, "loss": 0.8442, "step": 20129 }, { "epoch": 3.722913956513659, "grad_norm": 1.5007946491241455, "learning_rate": 2.093271594133128e-06, "loss": 1.1873, "step": 20130 }, { "epoch": 3.7230997955770304, "grad_norm": 1.1086900234222412, "learning_rate": 2.090299099611959e-06, "loss": 0.9498, "step": 20131 }, { "epoch": 3.7232856346404013, "grad_norm": 0.9293392896652222, "learning_rate": 2.087328694805324e-06, "loss": 0.8043, "step": 20132 }, { "epoch": 3.7234714737037726, "grad_norm": 0.9821919202804565, "learning_rate": 2.0843603797766287e-06, "loss": 0.746, "step": 20133 }, { "epoch": 3.7236573127671435, "grad_norm": 0.9458260536193848, "learning_rate": 2.0813941545892337e-06, "loss": 0.6984, "step": 20134 }, { "epoch": 3.723843151830515, "grad_norm": 1.0252138376235962, "learning_rate": 2.0784300193064544e-06, "loss": 0.8409, "step": 20135 }, { "epoch": 3.7240289908938857, "grad_norm": 0.932839035987854, "learning_rate": 2.0754679739915406e-06, "loss": 0.8454, "step": 20136 }, { "epoch": 3.724214829957257, "grad_norm": 0.7695898413658142, "learning_rate": 2.0725080187077305e-06, "loss": 0.6664, "step": 20137 }, { "epoch": 3.7244006690206284, "grad_norm": 1.0405521392822266, "learning_rate": 2.0695501535181626e-06, "loss": 0.8003, "step": 20138 }, { "epoch": 3.7245865080839993, "grad_norm": 1.0741987228393555, "learning_rate": 2.066594378486009e-06, "loss": 0.8593, "step": 20139 }, { "epoch": 3.72477234714737, "grad_norm": 1.0499932765960693, "learning_rate": 2.063640693674329e-06, "loss": 0.786, "step": 20140 }, { "epoch": 3.7249581862107415, "grad_norm": 0.9765279293060303, "learning_rate": 2.0606890991461737e-06, "loss": 0.8632, "step": 20141 }, { "epoch": 3.725144025274113, "grad_norm": 1.08168363571167, "learning_rate": 2.057739594964536e-06, "loss": 0.9217, "step": 20142 }, { "epoch": 3.7253298643374837, "grad_norm": 0.9003187417984009, "learning_rate": 2.0547921811923774e-06, "loss": 0.7954, "step": 20143 }, { "epoch": 3.7255157034008546, "grad_norm": 0.9419955611228943, "learning_rate": 2.051846857892603e-06, "loss": 0.5936, "step": 20144 }, { "epoch": 3.725701542464226, "grad_norm": 0.9181314706802368, "learning_rate": 2.0489036251280624e-06, "loss": 0.6663, "step": 20145 }, { "epoch": 3.7258873815275972, "grad_norm": 0.9209869503974915, "learning_rate": 2.045962482961583e-06, "loss": 0.6843, "step": 20146 }, { "epoch": 3.726073220590968, "grad_norm": 1.0010343790054321, "learning_rate": 2.0430234314559482e-06, "loss": 0.9073, "step": 20147 }, { "epoch": 3.7262590596543395, "grad_norm": 0.9539319276809692, "learning_rate": 2.0400864706738744e-06, "loss": 0.8202, "step": 20148 }, { "epoch": 3.7264448987177103, "grad_norm": 1.0110539197921753, "learning_rate": 2.0371516006780443e-06, "loss": 1.0652, "step": 20149 }, { "epoch": 3.7266307377810817, "grad_norm": 1.076846718788147, "learning_rate": 2.0342188215311086e-06, "loss": 0.7156, "step": 20150 }, { "epoch": 3.7268165768444526, "grad_norm": 0.9414911270141602, "learning_rate": 2.03128813329565e-06, "loss": 0.8184, "step": 20151 }, { "epoch": 3.727002415907824, "grad_norm": 0.985910177230835, "learning_rate": 2.02835953603423e-06, "loss": 0.7397, "step": 20152 }, { "epoch": 3.727188254971195, "grad_norm": 0.8706223368644714, "learning_rate": 2.0254330298093425e-06, "loss": 0.6884, "step": 20153 }, { "epoch": 3.727374094034566, "grad_norm": 0.9221065640449524, "learning_rate": 2.0225086146834495e-06, "loss": 0.8807, "step": 20154 }, { "epoch": 3.727559933097937, "grad_norm": 0.9632253646850586, "learning_rate": 2.019586290718989e-06, "loss": 0.8141, "step": 20155 }, { "epoch": 3.7277457721613083, "grad_norm": 0.9486887454986572, "learning_rate": 2.0166660579783113e-06, "loss": 0.8536, "step": 20156 }, { "epoch": 3.7279316112246796, "grad_norm": 1.1962312459945679, "learning_rate": 2.013747916523756e-06, "loss": 0.8527, "step": 20157 }, { "epoch": 3.7281174502880505, "grad_norm": 0.8917415738105774, "learning_rate": 2.010831866417595e-06, "loss": 0.9358, "step": 20158 }, { "epoch": 3.7283032893514214, "grad_norm": 0.8610925078392029, "learning_rate": 2.0079179077220788e-06, "loss": 0.8847, "step": 20159 }, { "epoch": 3.7284891284147927, "grad_norm": 0.9495860934257507, "learning_rate": 2.005006040499391e-06, "loss": 0.8034, "step": 20160 }, { "epoch": 3.728674967478164, "grad_norm": 1.1175827980041504, "learning_rate": 2.0020962648116704e-06, "loss": 0.766, "step": 20161 }, { "epoch": 3.728860806541535, "grad_norm": 1.092516303062439, "learning_rate": 1.999188580721045e-06, "loss": 0.88, "step": 20162 }, { "epoch": 3.7290466456049063, "grad_norm": 0.9286985993385315, "learning_rate": 1.9962829882895662e-06, "loss": 0.8497, "step": 20163 }, { "epoch": 3.729232484668277, "grad_norm": 1.028407335281372, "learning_rate": 1.993379487579239e-06, "loss": 0.8222, "step": 20164 }, { "epoch": 3.7294183237316485, "grad_norm": 0.7155711650848389, "learning_rate": 1.990478078652047e-06, "loss": 0.5466, "step": 20165 }, { "epoch": 3.7296041627950194, "grad_norm": 1.143075704574585, "learning_rate": 1.987578761569897e-06, "loss": 0.9321, "step": 20166 }, { "epoch": 3.7297900018583907, "grad_norm": 0.830189049243927, "learning_rate": 1.9846815363946947e-06, "loss": 0.7996, "step": 20167 }, { "epoch": 3.7299758409217616, "grad_norm": 0.958895206451416, "learning_rate": 1.981786403188268e-06, "loss": 1.0407, "step": 20168 }, { "epoch": 3.730161679985133, "grad_norm": 0.8601774573326111, "learning_rate": 1.978893362012402e-06, "loss": 0.9049, "step": 20169 }, { "epoch": 3.730347519048504, "grad_norm": 0.9910159707069397, "learning_rate": 1.9760024129288344e-06, "loss": 0.8518, "step": 20170 }, { "epoch": 3.730533358111875, "grad_norm": 0.9023825526237488, "learning_rate": 1.973113555999284e-06, "loss": 0.77, "step": 20171 }, { "epoch": 3.7307191971752465, "grad_norm": 1.6084246635437012, "learning_rate": 1.970226791285412e-06, "loss": 1.2125, "step": 20172 }, { "epoch": 3.7309050362386174, "grad_norm": 0.9333736896514893, "learning_rate": 1.9673421188488137e-06, "loss": 0.5978, "step": 20173 }, { "epoch": 3.7310908753019882, "grad_norm": 1.0360727310180664, "learning_rate": 1.9644595387510624e-06, "loss": 0.8183, "step": 20174 }, { "epoch": 3.7312767143653596, "grad_norm": 0.8903640508651733, "learning_rate": 1.9615790510536967e-06, "loss": 0.7548, "step": 20175 }, { "epoch": 3.731462553428731, "grad_norm": 0.8818002343177795, "learning_rate": 1.9587006558181795e-06, "loss": 0.9411, "step": 20176 }, { "epoch": 3.731648392492102, "grad_norm": 0.8426865339279175, "learning_rate": 1.955824353105962e-06, "loss": 0.6555, "step": 20177 }, { "epoch": 3.731834231555473, "grad_norm": 1.3011345863342285, "learning_rate": 1.9529501429784158e-06, "loss": 0.7965, "step": 20178 }, { "epoch": 3.732020070618844, "grad_norm": 0.9135447144508362, "learning_rate": 1.9500780254968932e-06, "loss": 0.9401, "step": 20179 }, { "epoch": 3.7322059096822153, "grad_norm": 0.8463492393493652, "learning_rate": 1.9472080007227e-06, "loss": 0.9496, "step": 20180 }, { "epoch": 3.732391748745586, "grad_norm": 0.9751183390617371, "learning_rate": 1.9443400687170763e-06, "loss": 0.8128, "step": 20181 }, { "epoch": 3.7325775878089575, "grad_norm": 1.836098551750183, "learning_rate": 1.9414742295412515e-06, "loss": 1.3495, "step": 20182 }, { "epoch": 3.7327634268723284, "grad_norm": 0.9058772325515747, "learning_rate": 1.9386104832563867e-06, "loss": 0.917, "step": 20183 }, { "epoch": 3.7329492659356998, "grad_norm": 0.9225811958312988, "learning_rate": 1.9357488299235894e-06, "loss": 0.7237, "step": 20184 }, { "epoch": 3.7331351049990706, "grad_norm": 1.1890093088150024, "learning_rate": 1.9328892696039545e-06, "loss": 0.8495, "step": 20185 }, { "epoch": 3.733320944062442, "grad_norm": 0.877237856388092, "learning_rate": 1.930031802358501e-06, "loss": 0.7467, "step": 20186 }, { "epoch": 3.7335067831258133, "grad_norm": 0.9402475357055664, "learning_rate": 1.927176428248234e-06, "loss": 0.8492, "step": 20187 }, { "epoch": 3.733692622189184, "grad_norm": 0.8594173789024353, "learning_rate": 1.924323147334095e-06, "loss": 0.8439, "step": 20188 }, { "epoch": 3.733878461252555, "grad_norm": 0.876255214214325, "learning_rate": 1.921471959676957e-06, "loss": 0.6615, "step": 20189 }, { "epoch": 3.7340643003159264, "grad_norm": 0.966371476650238, "learning_rate": 1.918622865337694e-06, "loss": 0.7981, "step": 20190 }, { "epoch": 3.7340643003159264, "eval_loss": 1.0422886610031128, "eval_runtime": 23.1336, "eval_samples_per_second": 47.204, "eval_steps_per_second": 23.602, "step": 20190 }, { "epoch": 3.7342501393792977, "grad_norm": 1.0176982879638672, "learning_rate": 1.915775864377123e-06, "loss": 0.9423, "step": 20191 }, { "epoch": 3.7344359784426686, "grad_norm": 1.9772957563400269, "learning_rate": 1.912930956855985e-06, "loss": 1.09, "step": 20192 }, { "epoch": 3.7346218175060395, "grad_norm": 0.8535692095756531, "learning_rate": 1.9100881428350094e-06, "loss": 0.8229, "step": 20193 }, { "epoch": 3.734807656569411, "grad_norm": 0.8185490369796753, "learning_rate": 1.9072474223748805e-06, "loss": 0.9348, "step": 20194 }, { "epoch": 3.734993495632782, "grad_norm": 1.1920720338821411, "learning_rate": 1.9044087955362167e-06, "loss": 0.8847, "step": 20195 }, { "epoch": 3.735179334696153, "grad_norm": 1.0626229047775269, "learning_rate": 1.901572262379614e-06, "loss": 0.9121, "step": 20196 }, { "epoch": 3.7353651737595244, "grad_norm": 1.131238341331482, "learning_rate": 1.8987378229656016e-06, "loss": 0.76, "step": 20197 }, { "epoch": 3.7355510128228953, "grad_norm": 0.8504828214645386, "learning_rate": 1.895905477354687e-06, "loss": 0.4397, "step": 20198 }, { "epoch": 3.7357368518862666, "grad_norm": 0.991796612739563, "learning_rate": 1.8930752256073103e-06, "loss": 0.8937, "step": 20199 }, { "epoch": 3.7359226909496375, "grad_norm": 0.8266373872756958, "learning_rate": 1.8902470677839013e-06, "loss": 0.7949, "step": 20200 }, { "epoch": 3.736108530013009, "grad_norm": 1.1079061031341553, "learning_rate": 1.8874210039447782e-06, "loss": 0.8597, "step": 20201 }, { "epoch": 3.73629436907638, "grad_norm": 0.7702348232269287, "learning_rate": 1.8845970341503038e-06, "loss": 0.6479, "step": 20202 }, { "epoch": 3.736480208139751, "grad_norm": 1.0181301832199097, "learning_rate": 1.8817751584607413e-06, "loss": 0.5921, "step": 20203 }, { "epoch": 3.736666047203122, "grad_norm": 1.0332307815551758, "learning_rate": 1.8789553769362978e-06, "loss": 0.7162, "step": 20204 }, { "epoch": 3.7368518862664932, "grad_norm": 0.8270226120948792, "learning_rate": 1.8761376896371697e-06, "loss": 0.811, "step": 20205 }, { "epoch": 3.7370377253298646, "grad_norm": 1.0913925170898438, "learning_rate": 1.8733220966234866e-06, "loss": 0.8046, "step": 20206 }, { "epoch": 3.7372235643932354, "grad_norm": 0.9492542743682861, "learning_rate": 1.870508597955367e-06, "loss": 0.8421, "step": 20207 }, { "epoch": 3.7374094034566063, "grad_norm": 1.0270322561264038, "learning_rate": 1.8676971936928412e-06, "loss": 0.8161, "step": 20208 }, { "epoch": 3.7375952425199777, "grad_norm": 1.083715796470642, "learning_rate": 1.864887883895905e-06, "loss": 0.8378, "step": 20209 }, { "epoch": 3.737781081583349, "grad_norm": 1.139499545097351, "learning_rate": 1.862080668624544e-06, "loss": 0.9432, "step": 20210 }, { "epoch": 3.73796692064672, "grad_norm": 0.8852711915969849, "learning_rate": 1.8592755479386549e-06, "loss": 0.731, "step": 20211 }, { "epoch": 3.738152759710091, "grad_norm": 0.8319820761680603, "learning_rate": 1.8564725218981227e-06, "loss": 0.9048, "step": 20212 }, { "epoch": 3.738338598773462, "grad_norm": 1.0262010097503662, "learning_rate": 1.8536715905627443e-06, "loss": 0.8344, "step": 20213 }, { "epoch": 3.7385244378368334, "grad_norm": 0.8073480725288391, "learning_rate": 1.8508727539923276e-06, "loss": 0.8241, "step": 20214 }, { "epoch": 3.7387102769002043, "grad_norm": 0.7722681164741516, "learning_rate": 1.8480760122466134e-06, "loss": 0.7719, "step": 20215 }, { "epoch": 3.7388961159635756, "grad_norm": 0.8970304727554321, "learning_rate": 1.845281365385265e-06, "loss": 0.7599, "step": 20216 }, { "epoch": 3.7390819550269465, "grad_norm": 0.8711795210838318, "learning_rate": 1.8424888134679574e-06, "loss": 0.8864, "step": 20217 }, { "epoch": 3.739267794090318, "grad_norm": 1.1783910989761353, "learning_rate": 1.8396983565542648e-06, "loss": 0.8427, "step": 20218 }, { "epoch": 3.7394536331536887, "grad_norm": 0.8600555658340454, "learning_rate": 1.8369099947037728e-06, "loss": 0.7282, "step": 20219 }, { "epoch": 3.73963947221706, "grad_norm": 0.8570142984390259, "learning_rate": 1.8341237279759894e-06, "loss": 0.7585, "step": 20220 }, { "epoch": 3.7398253112804314, "grad_norm": 1.1565258502960205, "learning_rate": 1.831339556430356e-06, "loss": 0.9635, "step": 20221 }, { "epoch": 3.7400111503438023, "grad_norm": 1.0019115209579468, "learning_rate": 1.828557480126336e-06, "loss": 0.8266, "step": 20222 }, { "epoch": 3.740196989407173, "grad_norm": 1.0627496242523193, "learning_rate": 1.8257774991232824e-06, "loss": 0.8761, "step": 20223 }, { "epoch": 3.7403828284705445, "grad_norm": 1.1928831338882446, "learning_rate": 1.8229996134805249e-06, "loss": 0.9779, "step": 20224 }, { "epoch": 3.740568667533916, "grad_norm": 0.8400055766105652, "learning_rate": 1.820223823257372e-06, "loss": 0.6205, "step": 20225 }, { "epoch": 3.7407545065972867, "grad_norm": 0.9616957902908325, "learning_rate": 1.8174501285130429e-06, "loss": 0.9422, "step": 20226 }, { "epoch": 3.740940345660658, "grad_norm": 1.165166974067688, "learning_rate": 1.8146785293067681e-06, "loss": 0.6762, "step": 20227 }, { "epoch": 3.741126184724029, "grad_norm": 0.9141461253166199, "learning_rate": 1.811909025697689e-06, "loss": 0.7108, "step": 20228 }, { "epoch": 3.7413120237874002, "grad_norm": 0.931006133556366, "learning_rate": 1.8091416177449029e-06, "loss": 0.7751, "step": 20229 }, { "epoch": 3.741497862850771, "grad_norm": 0.9470266103744507, "learning_rate": 1.8063763055074957e-06, "loss": 0.8048, "step": 20230 }, { "epoch": 3.7416837019141425, "grad_norm": 1.2754347324371338, "learning_rate": 1.8036130890444757e-06, "loss": 1.2652, "step": 20231 }, { "epoch": 3.7418695409775133, "grad_norm": 0.8617785573005676, "learning_rate": 1.8008519684148295e-06, "loss": 0.8024, "step": 20232 }, { "epoch": 3.7420553800408847, "grad_norm": 1.0762972831726074, "learning_rate": 1.7980929436774651e-06, "loss": 0.7773, "step": 20233 }, { "epoch": 3.7422412191042556, "grad_norm": 1.0489073991775513, "learning_rate": 1.7953360148913023e-06, "loss": 0.6359, "step": 20234 }, { "epoch": 3.742427058167627, "grad_norm": 0.9695454835891724, "learning_rate": 1.7925811821151606e-06, "loss": 0.6696, "step": 20235 }, { "epoch": 3.742612897230998, "grad_norm": 1.1188420057296753, "learning_rate": 1.7898284454078484e-06, "loss": 0.8205, "step": 20236 }, { "epoch": 3.742798736294369, "grad_norm": 0.9165396690368652, "learning_rate": 1.7870778048280966e-06, "loss": 0.7197, "step": 20237 }, { "epoch": 3.74298457535774, "grad_norm": 0.9933660626411438, "learning_rate": 1.7843292604346362e-06, "loss": 0.7233, "step": 20238 }, { "epoch": 3.7431704144211113, "grad_norm": 0.9684987664222717, "learning_rate": 1.78158281228612e-06, "loss": 0.6795, "step": 20239 }, { "epoch": 3.7433562534844826, "grad_norm": 0.9409486651420593, "learning_rate": 1.778838460441179e-06, "loss": 0.836, "step": 20240 }, { "epoch": 3.7435420925478535, "grad_norm": 1.025458812713623, "learning_rate": 1.7760962049583663e-06, "loss": 0.9465, "step": 20241 }, { "epoch": 3.7437279316112244, "grad_norm": 0.9537796974182129, "learning_rate": 1.7733560458962351e-06, "loss": 0.6594, "step": 20242 }, { "epoch": 3.7439137706745957, "grad_norm": 0.884209930896759, "learning_rate": 1.7706179833132497e-06, "loss": 0.7746, "step": 20243 }, { "epoch": 3.744099609737967, "grad_norm": 1.0387382507324219, "learning_rate": 1.7678820172678523e-06, "loss": 0.8117, "step": 20244 }, { "epoch": 3.744285448801338, "grad_norm": 1.0448459386825562, "learning_rate": 1.7651481478184296e-06, "loss": 0.7244, "step": 20245 }, { "epoch": 3.7444712878647093, "grad_norm": 0.8891094326972961, "learning_rate": 1.7624163750233568e-06, "loss": 0.7074, "step": 20246 }, { "epoch": 3.74465712692808, "grad_norm": 0.7286022305488586, "learning_rate": 1.7596866989409211e-06, "loss": 0.5185, "step": 20247 }, { "epoch": 3.7448429659914515, "grad_norm": 1.0601147413253784, "learning_rate": 1.7569591196293867e-06, "loss": 0.6587, "step": 20248 }, { "epoch": 3.7450288050548224, "grad_norm": 0.9401094317436218, "learning_rate": 1.7542336371469513e-06, "loss": 0.7293, "step": 20249 }, { "epoch": 3.7452146441181937, "grad_norm": 1.1351622343063354, "learning_rate": 1.7515102515518245e-06, "loss": 1.0622, "step": 20250 }, { "epoch": 3.7454004831815646, "grad_norm": 0.8532180190086365, "learning_rate": 1.7487889629021037e-06, "loss": 0.6642, "step": 20251 }, { "epoch": 3.745586322244936, "grad_norm": 1.0489792823791504, "learning_rate": 1.746069771255876e-06, "loss": 0.8137, "step": 20252 }, { "epoch": 3.745772161308307, "grad_norm": 0.803139865398407, "learning_rate": 1.7433526766711728e-06, "loss": 0.6271, "step": 20253 }, { "epoch": 3.745958000371678, "grad_norm": 1.0900132656097412, "learning_rate": 1.740637679206003e-06, "loss": 1.0272, "step": 20254 }, { "epoch": 3.7461438394350495, "grad_norm": 0.9377065896987915, "learning_rate": 1.7379247789182985e-06, "loss": 0.898, "step": 20255 }, { "epoch": 3.7463296784984204, "grad_norm": 1.0326143503189087, "learning_rate": 1.735213975865968e-06, "loss": 0.9932, "step": 20256 }, { "epoch": 3.7465155175617912, "grad_norm": 0.9098765850067139, "learning_rate": 1.7325052701068545e-06, "loss": 0.858, "step": 20257 }, { "epoch": 3.7467013566251626, "grad_norm": 0.9849448800086975, "learning_rate": 1.7297986616987783e-06, "loss": 0.9108, "step": 20258 }, { "epoch": 3.746887195688534, "grad_norm": 0.8679027557373047, "learning_rate": 1.7270941506995265e-06, "loss": 0.721, "step": 20259 }, { "epoch": 3.747073034751905, "grad_norm": 0.8865405321121216, "learning_rate": 1.7243917371668083e-06, "loss": 0.6828, "step": 20260 }, { "epoch": 3.747258873815276, "grad_norm": 0.8816723227500916, "learning_rate": 1.7216914211582891e-06, "loss": 0.9355, "step": 20261 }, { "epoch": 3.747444712878647, "grad_norm": 0.8717049956321716, "learning_rate": 1.7189932027316225e-06, "loss": 0.5854, "step": 20262 }, { "epoch": 3.7476305519420183, "grad_norm": 0.9659672379493713, "learning_rate": 1.7162970819443846e-06, "loss": 0.6806, "step": 20263 }, { "epoch": 3.747816391005389, "grad_norm": 0.8695626258850098, "learning_rate": 1.7136030588541297e-06, "loss": 0.7705, "step": 20264 }, { "epoch": 3.7480022300687605, "grad_norm": 0.7861249446868896, "learning_rate": 1.7109111335183336e-06, "loss": 0.8505, "step": 20265 }, { "epoch": 3.7481880691321314, "grad_norm": 0.9285872578620911, "learning_rate": 1.7082213059944841e-06, "loss": 0.6255, "step": 20266 }, { "epoch": 3.7483739081955028, "grad_norm": 1.0697044134140015, "learning_rate": 1.7055335763399794e-06, "loss": 0.7684, "step": 20267 }, { "epoch": 3.7485597472588736, "grad_norm": 0.9197176694869995, "learning_rate": 1.7028479446121737e-06, "loss": 0.8119, "step": 20268 }, { "epoch": 3.748745586322245, "grad_norm": 0.8771321773529053, "learning_rate": 1.7001644108683878e-06, "loss": 0.6254, "step": 20269 }, { "epoch": 3.7489314253856163, "grad_norm": 0.8180269002914429, "learning_rate": 1.6974829751659094e-06, "loss": 0.6798, "step": 20270 }, { "epoch": 3.749117264448987, "grad_norm": 0.9938405156135559, "learning_rate": 1.6948036375619702e-06, "loss": 0.8099, "step": 20271 }, { "epoch": 3.749303103512358, "grad_norm": 0.8783755302429199, "learning_rate": 1.6921263981137359e-06, "loss": 0.699, "step": 20272 }, { "epoch": 3.7494889425757294, "grad_norm": 1.013279914855957, "learning_rate": 1.6894512568783716e-06, "loss": 0.6969, "step": 20273 }, { "epoch": 3.7496747816391007, "grad_norm": 0.8796840310096741, "learning_rate": 1.686778213912954e-06, "loss": 0.7338, "step": 20274 }, { "epoch": 3.7498606207024716, "grad_norm": 0.925635039806366, "learning_rate": 1.6841072692745485e-06, "loss": 0.7921, "step": 20275 }, { "epoch": 3.750046459765843, "grad_norm": 1.2063930034637451, "learning_rate": 1.6814384230201541e-06, "loss": 0.8636, "step": 20276 }, { "epoch": 3.750232298829214, "grad_norm": 1.013020634651184, "learning_rate": 1.6787716752067362e-06, "loss": 0.7568, "step": 20277 }, { "epoch": 3.750418137892585, "grad_norm": 0.9455508589744568, "learning_rate": 1.676107025891216e-06, "loss": 0.8089, "step": 20278 }, { "epoch": 3.750603976955956, "grad_norm": 1.0164023637771606, "learning_rate": 1.673444475130448e-06, "loss": 0.8108, "step": 20279 }, { "epoch": 3.7507898160193274, "grad_norm": 1.0061945915222168, "learning_rate": 1.6707840229812866e-06, "loss": 0.8351, "step": 20280 }, { "epoch": 3.7509756550826983, "grad_norm": 1.016903042793274, "learning_rate": 1.6681256695004866e-06, "loss": 0.8434, "step": 20281 }, { "epoch": 3.7511614941460696, "grad_norm": 0.8196696639060974, "learning_rate": 1.6654694147448024e-06, "loss": 0.6653, "step": 20282 }, { "epoch": 3.7513473332094405, "grad_norm": 1.1199551820755005, "learning_rate": 1.6628152587709335e-06, "loss": 1.0246, "step": 20283 }, { "epoch": 3.751533172272812, "grad_norm": 1.1850839853286743, "learning_rate": 1.660163201635523e-06, "loss": 0.7926, "step": 20284 }, { "epoch": 3.751719011336183, "grad_norm": 0.8495197892189026, "learning_rate": 1.657513243395159e-06, "loss": 0.7317, "step": 20285 }, { "epoch": 3.751904850399554, "grad_norm": 0.982175350189209, "learning_rate": 1.6548653841064187e-06, "loss": 0.7159, "step": 20286 }, { "epoch": 3.752090689462925, "grad_norm": 1.007477879524231, "learning_rate": 1.6522196238258126e-06, "loss": 0.8783, "step": 20287 }, { "epoch": 3.7522765285262962, "grad_norm": 1.0071723461151123, "learning_rate": 1.649575962609795e-06, "loss": 0.6093, "step": 20288 }, { "epoch": 3.7524623675896676, "grad_norm": 0.8891699910163879, "learning_rate": 1.646934400514799e-06, "loss": 0.789, "step": 20289 }, { "epoch": 3.7526482066530384, "grad_norm": 0.9914696216583252, "learning_rate": 1.644294937597224e-06, "loss": 0.9451, "step": 20290 }, { "epoch": 3.7528340457164093, "grad_norm": 0.9179845452308655, "learning_rate": 1.64165757391338e-06, "loss": 0.6384, "step": 20291 }, { "epoch": 3.7530198847797807, "grad_norm": 1.0186740159988403, "learning_rate": 1.6390223095195779e-06, "loss": 0.7442, "step": 20292 }, { "epoch": 3.753205723843152, "grad_norm": 1.0324479341506958, "learning_rate": 1.636389144472028e-06, "loss": 0.8352, "step": 20293 }, { "epoch": 3.753391562906523, "grad_norm": 0.8830369710922241, "learning_rate": 1.6337580788269747e-06, "loss": 0.7471, "step": 20294 }, { "epoch": 3.753577401969894, "grad_norm": 1.063448190689087, "learning_rate": 1.6311291126405393e-06, "loss": 1.0423, "step": 20295 }, { "epoch": 3.753763241033265, "grad_norm": 0.9705498814582825, "learning_rate": 1.6285022459688549e-06, "loss": 0.9218, "step": 20296 }, { "epoch": 3.7539490800966364, "grad_norm": 0.6145190596580505, "learning_rate": 1.6258774788679655e-06, "loss": 0.3206, "step": 20297 }, { "epoch": 3.7541349191600073, "grad_norm": 0.9766961336135864, "learning_rate": 1.623254811393915e-06, "loss": 0.6823, "step": 20298 }, { "epoch": 3.7543207582233786, "grad_norm": 0.9116829633712769, "learning_rate": 1.62063424360267e-06, "loss": 0.8332, "step": 20299 }, { "epoch": 3.7545065972867495, "grad_norm": 0.977838933467865, "learning_rate": 1.6180157755501523e-06, "loss": 0.9376, "step": 20300 }, { "epoch": 3.754692436350121, "grad_norm": 1.1162077188491821, "learning_rate": 1.6153994072922506e-06, "loss": 0.8784, "step": 20301 }, { "epoch": 3.7548782754134917, "grad_norm": 0.8749675154685974, "learning_rate": 1.6127851388848315e-06, "loss": 0.7572, "step": 20302 }, { "epoch": 3.755064114476863, "grad_norm": 0.9837542176246643, "learning_rate": 1.610172970383672e-06, "loss": 0.7145, "step": 20303 }, { "epoch": 3.7552499535402344, "grad_norm": 0.8241393566131592, "learning_rate": 1.6075629018445281e-06, "loss": 0.9012, "step": 20304 }, { "epoch": 3.7554357926036053, "grad_norm": 0.8609997630119324, "learning_rate": 1.6049549333230884e-06, "loss": 0.6587, "step": 20305 }, { "epoch": 3.755621631666976, "grad_norm": 1.0367193222045898, "learning_rate": 1.6023490648750528e-06, "loss": 0.9347, "step": 20306 }, { "epoch": 3.7558074707303475, "grad_norm": 0.9241992235183716, "learning_rate": 1.5997452965560101e-06, "loss": 0.7351, "step": 20307 }, { "epoch": 3.755993309793719, "grad_norm": 0.9562812447547913, "learning_rate": 1.597143628421549e-06, "loss": 0.7696, "step": 20308 }, { "epoch": 3.7561791488570897, "grad_norm": 0.8069552779197693, "learning_rate": 1.5945440605271811e-06, "loss": 0.5351, "step": 20309 }, { "epoch": 3.756364987920461, "grad_norm": 0.9494739174842834, "learning_rate": 1.5919465929283949e-06, "loss": 0.8539, "step": 20310 }, { "epoch": 3.756550826983832, "grad_norm": 0.9122812747955322, "learning_rate": 1.5893512256806464e-06, "loss": 0.7785, "step": 20311 }, { "epoch": 3.7567366660472032, "grad_norm": 0.9202660322189331, "learning_rate": 1.5867579588393133e-06, "loss": 0.8463, "step": 20312 }, { "epoch": 3.756922505110574, "grad_norm": 0.9109393358230591, "learning_rate": 1.5841667924597513e-06, "loss": 0.8587, "step": 20313 }, { "epoch": 3.7571083441739455, "grad_norm": 1.0824145078659058, "learning_rate": 1.5815777265972608e-06, "loss": 0.816, "step": 20314 }, { "epoch": 3.7572941832373163, "grad_norm": 1.070737361907959, "learning_rate": 1.5789907613070976e-06, "loss": 0.7731, "step": 20315 }, { "epoch": 3.7574800223006877, "grad_norm": 1.022955298423767, "learning_rate": 1.576405896644484e-06, "loss": 0.605, "step": 20316 }, { "epoch": 3.7576658613640586, "grad_norm": 1.0162962675094604, "learning_rate": 1.5738231326645758e-06, "loss": 0.7206, "step": 20317 }, { "epoch": 3.75785170042743, "grad_norm": 1.1087743043899536, "learning_rate": 1.571242469422507e-06, "loss": 0.7641, "step": 20318 }, { "epoch": 3.758037539490801, "grad_norm": 1.0301648378372192, "learning_rate": 1.5686639069733667e-06, "loss": 0.9089, "step": 20319 }, { "epoch": 3.758223378554172, "grad_norm": 1.5919631719589233, "learning_rate": 1.5660874453721774e-06, "loss": 0.8016, "step": 20320 }, { "epoch": 3.758409217617543, "grad_norm": 0.8339594602584839, "learning_rate": 1.5635130846739176e-06, "loss": 0.8136, "step": 20321 }, { "epoch": 3.7585950566809143, "grad_norm": 0.7868873476982117, "learning_rate": 1.5609408249335544e-06, "loss": 0.7063, "step": 20322 }, { "epoch": 3.7587808957442856, "grad_norm": 0.9781759977340698, "learning_rate": 1.5583706662059993e-06, "loss": 0.8121, "step": 20323 }, { "epoch": 3.7589667348076565, "grad_norm": 0.9512813091278076, "learning_rate": 1.5558026085460753e-06, "loss": 0.7352, "step": 20324 }, { "epoch": 3.7591525738710274, "grad_norm": 0.8733657598495483, "learning_rate": 1.553236652008605e-06, "loss": 0.8675, "step": 20325 }, { "epoch": 3.7593384129343987, "grad_norm": 1.2183723449707031, "learning_rate": 1.550672796648356e-06, "loss": 0.8708, "step": 20326 }, { "epoch": 3.75952425199777, "grad_norm": 0.8526677489280701, "learning_rate": 1.5481110425200618e-06, "loss": 0.7549, "step": 20327 }, { "epoch": 3.759710091061141, "grad_norm": 0.891410768032074, "learning_rate": 1.5455513896783792e-06, "loss": 0.6507, "step": 20328 }, { "epoch": 3.7598959301245123, "grad_norm": 0.956473708152771, "learning_rate": 1.542993838177953e-06, "loss": 0.9669, "step": 20329 }, { "epoch": 3.760081769187883, "grad_norm": 1.0115660429000854, "learning_rate": 1.540438388073362e-06, "loss": 0.7748, "step": 20330 }, { "epoch": 3.7602676082512545, "grad_norm": 0.9662947654724121, "learning_rate": 1.5378850394191402e-06, "loss": 1.0672, "step": 20331 }, { "epoch": 3.7604534473146254, "grad_norm": 0.8652477860450745, "learning_rate": 1.5353337922697996e-06, "loss": 0.7552, "step": 20332 }, { "epoch": 3.7606392863779967, "grad_norm": 0.8552069664001465, "learning_rate": 1.5327846466797856e-06, "loss": 0.8663, "step": 20333 }, { "epoch": 3.760825125441368, "grad_norm": 0.9188373684883118, "learning_rate": 1.5302376027035214e-06, "loss": 0.7211, "step": 20334 }, { "epoch": 3.761010964504739, "grad_norm": 0.9596465229988098, "learning_rate": 1.5276926603953413e-06, "loss": 0.7554, "step": 20335 }, { "epoch": 3.76119680356811, "grad_norm": 0.9444963932037354, "learning_rate": 1.5251498198095793e-06, "loss": 0.6637, "step": 20336 }, { "epoch": 3.761382642631481, "grad_norm": 0.9106894731521606, "learning_rate": 1.5226090810005034e-06, "loss": 0.5993, "step": 20337 }, { "epoch": 3.7615684816948525, "grad_norm": 0.8916698694229126, "learning_rate": 1.520070444022348e-06, "loss": 0.7076, "step": 20338 }, { "epoch": 3.7617543207582234, "grad_norm": 0.8964123725891113, "learning_rate": 1.5175339089292918e-06, "loss": 0.7878, "step": 20339 }, { "epoch": 3.7619401598215942, "grad_norm": 1.3913319110870361, "learning_rate": 1.5149994757754582e-06, "loss": 1.0517, "step": 20340 }, { "epoch": 3.7621259988849656, "grad_norm": 1.08450186252594, "learning_rate": 1.5124671446149596e-06, "loss": 1.087, "step": 20341 }, { "epoch": 3.762311837948337, "grad_norm": 0.7509164214134216, "learning_rate": 1.5099369155018417e-06, "loss": 0.6092, "step": 20342 }, { "epoch": 3.762497677011708, "grad_norm": 0.96497642993927, "learning_rate": 1.5074087884901056e-06, "loss": 1.0966, "step": 20343 }, { "epoch": 3.762683516075079, "grad_norm": 0.8731896281242371, "learning_rate": 1.5048827636337082e-06, "loss": 0.8057, "step": 20344 }, { "epoch": 3.76286935513845, "grad_norm": 1.0215585231781006, "learning_rate": 1.502358840986562e-06, "loss": 0.9523, "step": 20345 }, { "epoch": 3.7630551942018213, "grad_norm": 0.8914654850959778, "learning_rate": 1.4998370206025348e-06, "loss": 0.8197, "step": 20346 }, { "epoch": 3.763241033265192, "grad_norm": 1.267439603805542, "learning_rate": 1.4973173025354615e-06, "loss": 1.0239, "step": 20347 }, { "epoch": 3.7634268723285635, "grad_norm": 1.2980300188064575, "learning_rate": 1.49479968683911e-06, "loss": 0.8948, "step": 20348 }, { "epoch": 3.7636127113919344, "grad_norm": 0.9254906177520752, "learning_rate": 1.4922841735672156e-06, "loss": 0.9238, "step": 20349 }, { "epoch": 3.7637985504553058, "grad_norm": 0.9976295828819275, "learning_rate": 1.489770762773468e-06, "loss": 0.8328, "step": 20350 }, { "epoch": 3.7639843895186766, "grad_norm": 0.8693231344223022, "learning_rate": 1.487259454511525e-06, "loss": 0.9526, "step": 20351 }, { "epoch": 3.764170228582048, "grad_norm": 1.0530657768249512, "learning_rate": 1.4847502488349542e-06, "loss": 0.7263, "step": 20352 }, { "epoch": 3.7643560676454193, "grad_norm": 0.9318096041679382, "learning_rate": 1.482243145797335e-06, "loss": 0.9668, "step": 20353 }, { "epoch": 3.76454190670879, "grad_norm": 0.851360559463501, "learning_rate": 1.479738145452181e-06, "loss": 0.8775, "step": 20354 }, { "epoch": 3.764727745772161, "grad_norm": 1.3592497110366821, "learning_rate": 1.4772352478529483e-06, "loss": 1.015, "step": 20355 }, { "epoch": 3.7649135848355324, "grad_norm": 1.0484044551849365, "learning_rate": 1.4747344530530615e-06, "loss": 0.7613, "step": 20356 }, { "epoch": 3.7650994238989037, "grad_norm": 1.0023231506347656, "learning_rate": 1.472235761105878e-06, "loss": 0.8241, "step": 20357 }, { "epoch": 3.7652852629622746, "grad_norm": 1.194373369216919, "learning_rate": 1.4697391720647435e-06, "loss": 0.9966, "step": 20358 }, { "epoch": 3.765471102025646, "grad_norm": 0.8711073994636536, "learning_rate": 1.4672446859829492e-06, "loss": 0.7771, "step": 20359 }, { "epoch": 3.765656941089017, "grad_norm": 0.8670382499694824, "learning_rate": 1.46475230291373e-06, "loss": 0.7617, "step": 20360 }, { "epoch": 3.765842780152388, "grad_norm": 0.806544840335846, "learning_rate": 1.4622620229102767e-06, "loss": 0.6538, "step": 20361 }, { "epoch": 3.766028619215759, "grad_norm": 0.8053675889968872, "learning_rate": 1.459773846025747e-06, "loss": 0.5867, "step": 20362 }, { "epoch": 3.7662144582791304, "grad_norm": 0.9925404191017151, "learning_rate": 1.4572877723132316e-06, "loss": 0.93, "step": 20363 }, { "epoch": 3.7664002973425013, "grad_norm": 0.9203683137893677, "learning_rate": 1.4548038018258103e-06, "loss": 0.7603, "step": 20364 }, { "epoch": 3.7665861364058726, "grad_norm": 0.8202987909317017, "learning_rate": 1.4523219346164852e-06, "loss": 0.5888, "step": 20365 }, { "epoch": 3.7667719754692435, "grad_norm": 0.9790657758712769, "learning_rate": 1.449842170738236e-06, "loss": 0.7736, "step": 20366 }, { "epoch": 3.766957814532615, "grad_norm": 1.0166908502578735, "learning_rate": 1.4473645102439981e-06, "loss": 0.9156, "step": 20367 }, { "epoch": 3.767143653595986, "grad_norm": 1.00437331199646, "learning_rate": 1.4448889531866295e-06, "loss": 0.7816, "step": 20368 }, { "epoch": 3.767329492659357, "grad_norm": 0.9228891730308533, "learning_rate": 1.4424154996189765e-06, "loss": 0.7313, "step": 20369 }, { "epoch": 3.767515331722728, "grad_norm": 0.8260902762413025, "learning_rate": 1.4399441495938416e-06, "loss": 0.6848, "step": 20370 }, { "epoch": 3.7677011707860992, "grad_norm": 1.1458709239959717, "learning_rate": 1.43747490316396e-06, "loss": 0.9087, "step": 20371 }, { "epoch": 3.7678870098494706, "grad_norm": 0.9048810601234436, "learning_rate": 1.4350077603820345e-06, "loss": 0.659, "step": 20372 }, { "epoch": 3.7680728489128414, "grad_norm": 0.9446879029273987, "learning_rate": 1.4325427213007224e-06, "loss": 0.5988, "step": 20373 }, { "epoch": 3.7682586879762123, "grad_norm": 1.0057936906814575, "learning_rate": 1.4300797859726379e-06, "loss": 0.7141, "step": 20374 }, { "epoch": 3.7684445270395837, "grad_norm": 1.0417944192886353, "learning_rate": 1.4276189544503605e-06, "loss": 0.76, "step": 20375 }, { "epoch": 3.768630366102955, "grad_norm": 0.9925251603126526, "learning_rate": 1.4251602267863929e-06, "loss": 0.7363, "step": 20376 }, { "epoch": 3.768816205166326, "grad_norm": 0.9868441820144653, "learning_rate": 1.4227036030332041e-06, "loss": 0.981, "step": 20377 }, { "epoch": 3.769002044229697, "grad_norm": 0.9268509745597839, "learning_rate": 1.4202490832432635e-06, "loss": 0.887, "step": 20378 }, { "epoch": 3.769187883293068, "grad_norm": 0.9644826650619507, "learning_rate": 1.4177966674689292e-06, "loss": 0.7274, "step": 20379 }, { "epoch": 3.7693737223564394, "grad_norm": 0.8194114565849304, "learning_rate": 1.415346355762537e-06, "loss": 0.8135, "step": 20380 }, { "epoch": 3.7695595614198103, "grad_norm": 0.9341859221458435, "learning_rate": 1.4128981481764115e-06, "loss": 0.5956, "step": 20381 }, { "epoch": 3.7697454004831816, "grad_norm": 0.9932578802108765, "learning_rate": 1.4104520447628e-06, "loss": 1.014, "step": 20382 }, { "epoch": 3.769931239546553, "grad_norm": 1.0174542665481567, "learning_rate": 1.4080080455738942e-06, "loss": 0.9121, "step": 20383 }, { "epoch": 3.770117078609924, "grad_norm": 0.8680111765861511, "learning_rate": 1.4055661506618635e-06, "loss": 0.7486, "step": 20384 }, { "epoch": 3.7703029176732947, "grad_norm": 1.0944784879684448, "learning_rate": 1.4031263600788214e-06, "loss": 0.9201, "step": 20385 }, { "epoch": 3.770488756736666, "grad_norm": 0.9446232914924622, "learning_rate": 1.4006886738768598e-06, "loss": 0.6932, "step": 20386 }, { "epoch": 3.7706745958000374, "grad_norm": 1.0787503719329834, "learning_rate": 1.398253092108004e-06, "loss": 0.9325, "step": 20387 }, { "epoch": 3.7708604348634083, "grad_norm": 0.9129641652107239, "learning_rate": 1.395819614824212e-06, "loss": 0.6115, "step": 20388 }, { "epoch": 3.771046273926779, "grad_norm": 0.9007181525230408, "learning_rate": 1.3933882420774425e-06, "loss": 0.8728, "step": 20389 }, { "epoch": 3.7712321129901505, "grad_norm": 0.9841013550758362, "learning_rate": 1.390958973919587e-06, "loss": 0.9041, "step": 20390 }, { "epoch": 3.771417952053522, "grad_norm": 0.9039537906646729, "learning_rate": 1.3885318104025047e-06, "loss": 0.7337, "step": 20391 }, { "epoch": 3.7716037911168927, "grad_norm": 1.195389747619629, "learning_rate": 1.3861067515779647e-06, "loss": 0.6884, "step": 20392 }, { "epoch": 3.771789630180264, "grad_norm": 0.9455596208572388, "learning_rate": 1.383683797497759e-06, "loss": 0.8562, "step": 20393 }, { "epoch": 3.771975469243635, "grad_norm": 0.9761845469474792, "learning_rate": 1.3812629482135909e-06, "loss": 0.7933, "step": 20394 }, { "epoch": 3.7721613083070062, "grad_norm": 0.9781766533851624, "learning_rate": 1.3788442037771298e-06, "loss": 0.9388, "step": 20395 }, { "epoch": 3.772347147370377, "grad_norm": 0.9669922590255737, "learning_rate": 1.3764275642399904e-06, "loss": 0.9848, "step": 20396 }, { "epoch": 3.7725329864337485, "grad_norm": 1.0537102222442627, "learning_rate": 1.3740130296537646e-06, "loss": 0.8952, "step": 20397 }, { "epoch": 3.7727188254971193, "grad_norm": 0.9086155891418457, "learning_rate": 1.3716006000699888e-06, "loss": 0.7, "step": 20398 }, { "epoch": 3.7729046645604907, "grad_norm": 1.0693638324737549, "learning_rate": 1.3691902755401442e-06, "loss": 0.9063, "step": 20399 }, { "epoch": 3.7730905036238616, "grad_norm": 0.9978718757629395, "learning_rate": 1.3667820561156563e-06, "loss": 0.9654, "step": 20400 }, { "epoch": 3.773276342687233, "grad_norm": 0.9386802911758423, "learning_rate": 1.3643759418479618e-06, "loss": 0.7988, "step": 20401 }, { "epoch": 3.773462181750604, "grad_norm": 0.9663328528404236, "learning_rate": 1.3619719327883974e-06, "loss": 0.922, "step": 20402 }, { "epoch": 3.773648020813975, "grad_norm": 0.7999980449676514, "learning_rate": 1.3595700289882774e-06, "loss": 0.6153, "step": 20403 }, { "epoch": 3.773833859877346, "grad_norm": 1.1055376529693604, "learning_rate": 1.3571702304988388e-06, "loss": 0.8979, "step": 20404 }, { "epoch": 3.7740196989407173, "grad_norm": 1.031434178352356, "learning_rate": 1.3547725373713405e-06, "loss": 0.7908, "step": 20405 }, { "epoch": 3.7742055380040886, "grad_norm": 0.8867281079292297, "learning_rate": 1.3523769496569417e-06, "loss": 0.9089, "step": 20406 }, { "epoch": 3.7743913770674595, "grad_norm": 0.9022477865219116, "learning_rate": 1.349983467406768e-06, "loss": 0.7944, "step": 20407 }, { "epoch": 3.774577216130831, "grad_norm": 1.0239287614822388, "learning_rate": 1.3475920906719008e-06, "loss": 1.1125, "step": 20408 }, { "epoch": 3.7747630551942017, "grad_norm": 0.8552660346031189, "learning_rate": 1.3452028195033883e-06, "loss": 0.9971, "step": 20409 }, { "epoch": 3.774948894257573, "grad_norm": 0.9268264770507812, "learning_rate": 1.342815653952234e-06, "loss": 0.7839, "step": 20410 }, { "epoch": 3.775134733320944, "grad_norm": 1.1092424392700195, "learning_rate": 1.340430594069364e-06, "loss": 0.9033, "step": 20411 }, { "epoch": 3.7753205723843153, "grad_norm": 0.9117311835289001, "learning_rate": 1.338047639905704e-06, "loss": 0.7707, "step": 20412 }, { "epoch": 3.775506411447686, "grad_norm": 1.024247646331787, "learning_rate": 1.3356667915121025e-06, "loss": 0.8204, "step": 20413 }, { "epoch": 3.7756922505110575, "grad_norm": 0.9615598320960999, "learning_rate": 1.333288048939385e-06, "loss": 0.7454, "step": 20414 }, { "epoch": 3.7758780895744284, "grad_norm": 0.9834153652191162, "learning_rate": 1.3309114122383004e-06, "loss": 0.8387, "step": 20415 }, { "epoch": 3.7760639286377997, "grad_norm": 1.088229775428772, "learning_rate": 1.3285368814596078e-06, "loss": 0.7928, "step": 20416 }, { "epoch": 3.776249767701171, "grad_norm": 1.0894205570220947, "learning_rate": 1.3261644566539443e-06, "loss": 0.9444, "step": 20417 }, { "epoch": 3.776435606764542, "grad_norm": 0.9492201805114746, "learning_rate": 1.323794137871992e-06, "loss": 0.8596, "step": 20418 }, { "epoch": 3.776621445827913, "grad_norm": 1.5879576206207275, "learning_rate": 1.32142592516431e-06, "loss": 0.8944, "step": 20419 }, { "epoch": 3.776807284891284, "grad_norm": 0.9181216359138489, "learning_rate": 1.3190598185814474e-06, "loss": 0.9116, "step": 20420 }, { "epoch": 3.7769931239546555, "grad_norm": 1.0042266845703125, "learning_rate": 1.3166958181739186e-06, "loss": 1.0265, "step": 20421 }, { "epoch": 3.7771789630180264, "grad_norm": 0.8743492960929871, "learning_rate": 1.3143339239921727e-06, "loss": 0.7216, "step": 20422 }, { "epoch": 3.7773648020813972, "grad_norm": 0.9112885594367981, "learning_rate": 1.3119741360866134e-06, "loss": 0.8286, "step": 20423 }, { "epoch": 3.7775506411447686, "grad_norm": 1.1501792669296265, "learning_rate": 1.3096164545076007e-06, "loss": 0.8404, "step": 20424 }, { "epoch": 3.77773648020814, "grad_norm": 0.8658446073532104, "learning_rate": 1.307260879305483e-06, "loss": 0.6555, "step": 20425 }, { "epoch": 3.777922319271511, "grad_norm": 0.9965296387672424, "learning_rate": 1.3049074105304982e-06, "loss": 0.7141, "step": 20426 }, { "epoch": 3.778108158334882, "grad_norm": 1.0920850038528442, "learning_rate": 1.302556048232917e-06, "loss": 0.79, "step": 20427 }, { "epoch": 3.778293997398253, "grad_norm": 0.9294142127037048, "learning_rate": 1.3002067924628992e-06, "loss": 0.8282, "step": 20428 }, { "epoch": 3.7784798364616243, "grad_norm": 0.8633919954299927, "learning_rate": 1.2978596432705825e-06, "loss": 0.9949, "step": 20429 }, { "epoch": 3.778665675524995, "grad_norm": 0.9597409963607788, "learning_rate": 1.2955146007060826e-06, "loss": 0.739, "step": 20430 }, { "epoch": 3.7788515145883665, "grad_norm": 1.1199076175689697, "learning_rate": 1.293171664819437e-06, "loss": 0.9229, "step": 20431 }, { "epoch": 3.7790373536517374, "grad_norm": 1.026311993598938, "learning_rate": 1.2908308356606503e-06, "loss": 0.9393, "step": 20432 }, { "epoch": 3.7792231927151088, "grad_norm": 1.0156198740005493, "learning_rate": 1.2884921132796934e-06, "loss": 0.8739, "step": 20433 }, { "epoch": 3.7794090317784796, "grad_norm": 0.968630313873291, "learning_rate": 1.2861554977264712e-06, "loss": 0.8528, "step": 20434 }, { "epoch": 3.779594870841851, "grad_norm": 1.049005389213562, "learning_rate": 1.2838209890508657e-06, "loss": 0.6951, "step": 20435 }, { "epoch": 3.7797807099052223, "grad_norm": 1.0795906782150269, "learning_rate": 1.2814885873026927e-06, "loss": 0.8106, "step": 20436 }, { "epoch": 3.779966548968593, "grad_norm": 0.8436425924301147, "learning_rate": 1.2791582925317348e-06, "loss": 0.7104, "step": 20437 }, { "epoch": 3.780152388031964, "grad_norm": 0.9369443655014038, "learning_rate": 1.2768301047877406e-06, "loss": 0.8956, "step": 20438 }, { "epoch": 3.7803382270953354, "grad_norm": 1.007063865661621, "learning_rate": 1.2745040241203933e-06, "loss": 0.9896, "step": 20439 }, { "epoch": 3.7805240661587067, "grad_norm": 0.9993696808815002, "learning_rate": 1.2721800505793302e-06, "loss": 0.8909, "step": 20440 }, { "epoch": 3.7807099052220776, "grad_norm": 0.9917848706245422, "learning_rate": 1.2698581842141567e-06, "loss": 0.6445, "step": 20441 }, { "epoch": 3.780895744285449, "grad_norm": 1.1443630456924438, "learning_rate": 1.2675384250744437e-06, "loss": 0.6763, "step": 20442 }, { "epoch": 3.78108158334882, "grad_norm": 0.9800666570663452, "learning_rate": 1.2652207732096966e-06, "loss": 0.8198, "step": 20443 }, { "epoch": 3.781267422412191, "grad_norm": 1.080905556678772, "learning_rate": 1.2629052286693533e-06, "loss": 0.9398, "step": 20444 }, { "epoch": 3.781453261475562, "grad_norm": 0.9579738974571228, "learning_rate": 1.2605917915028742e-06, "loss": 0.6774, "step": 20445 }, { "epoch": 3.7816391005389334, "grad_norm": 1.017539620399475, "learning_rate": 1.25828046175962e-06, "loss": 1.009, "step": 20446 }, { "epoch": 3.7818249396023043, "grad_norm": 0.8087149262428284, "learning_rate": 1.2559712394889179e-06, "loss": 0.6337, "step": 20447 }, { "epoch": 3.7820107786656756, "grad_norm": 0.9919185042381287, "learning_rate": 1.2536641247400616e-06, "loss": 0.9481, "step": 20448 }, { "epoch": 3.7821966177290465, "grad_norm": 0.8533022403717041, "learning_rate": 1.2513591175622785e-06, "loss": 0.8498, "step": 20449 }, { "epoch": 3.782382456792418, "grad_norm": 1.1120437383651733, "learning_rate": 1.249056218004785e-06, "loss": 0.7406, "step": 20450 }, { "epoch": 3.782568295855789, "grad_norm": 0.973164439201355, "learning_rate": 1.2467554261167192e-06, "loss": 0.8322, "step": 20451 }, { "epoch": 3.78275413491916, "grad_norm": 0.8706233501434326, "learning_rate": 1.2444567419471976e-06, "loss": 0.894, "step": 20452 }, { "epoch": 3.782939973982531, "grad_norm": 0.7955963611602783, "learning_rate": 1.2421601655452698e-06, "loss": 0.78, "step": 20453 }, { "epoch": 3.7831258130459022, "grad_norm": 1.0311912298202515, "learning_rate": 1.239865696959963e-06, "loss": 0.8277, "step": 20454 }, { "epoch": 3.7833116521092736, "grad_norm": 0.847622275352478, "learning_rate": 1.2375733362402387e-06, "loss": 0.763, "step": 20455 }, { "epoch": 3.7834974911726444, "grad_norm": 0.9051774740219116, "learning_rate": 1.2352830834350238e-06, "loss": 0.7094, "step": 20456 }, { "epoch": 3.7836833302360158, "grad_norm": 0.8949741125106812, "learning_rate": 1.2329949385932127e-06, "loss": 0.7451, "step": 20457 }, { "epoch": 3.7838691692993867, "grad_norm": 1.043743371963501, "learning_rate": 1.2307089017636332e-06, "loss": 0.7586, "step": 20458 }, { "epoch": 3.784055008362758, "grad_norm": 0.8539413809776306, "learning_rate": 1.2284249729950792e-06, "loss": 0.9639, "step": 20459 }, { "epoch": 3.784240847426129, "grad_norm": 0.8604346513748169, "learning_rate": 1.22614315233629e-06, "loss": 0.7509, "step": 20460 }, { "epoch": 3.7844266864895, "grad_norm": 2.567366361618042, "learning_rate": 1.2238634398359706e-06, "loss": 1.1025, "step": 20461 }, { "epoch": 3.784612525552871, "grad_norm": 0.9127428531646729, "learning_rate": 1.2215858355427824e-06, "loss": 0.7166, "step": 20462 }, { "epoch": 3.7847983646162424, "grad_norm": 0.966698944568634, "learning_rate": 1.2193103395053418e-06, "loss": 0.7546, "step": 20463 }, { "epoch": 3.7849842036796133, "grad_norm": 1.4357216358184814, "learning_rate": 1.2170369517721991e-06, "loss": 1.1201, "step": 20464 }, { "epoch": 3.7851700427429846, "grad_norm": 1.5461227893829346, "learning_rate": 1.2147656723918821e-06, "loss": 1.4152, "step": 20465 }, { "epoch": 3.785355881806356, "grad_norm": 1.0198841094970703, "learning_rate": 1.212496501412874e-06, "loss": 0.9104, "step": 20466 }, { "epoch": 3.785541720869727, "grad_norm": 0.8557313084602356, "learning_rate": 1.2102294388836033e-06, "loss": 0.8077, "step": 20467 }, { "epoch": 3.7857275599330977, "grad_norm": 0.818263053894043, "learning_rate": 1.207964484852453e-06, "loss": 0.8271, "step": 20468 }, { "epoch": 3.785913398996469, "grad_norm": 1.010506272315979, "learning_rate": 1.2057016393677623e-06, "loss": 0.7292, "step": 20469 }, { "epoch": 3.7860992380598404, "grad_norm": 1.0275390148162842, "learning_rate": 1.203440902477837e-06, "loss": 0.8306, "step": 20470 }, { "epoch": 3.7862850771232113, "grad_norm": 0.9307306408882141, "learning_rate": 1.2011822742309275e-06, "loss": 0.7356, "step": 20471 }, { "epoch": 3.786470916186582, "grad_norm": 0.8827458024024963, "learning_rate": 1.1989257546752285e-06, "loss": 0.879, "step": 20472 }, { "epoch": 3.7866567552499535, "grad_norm": 0.9304745197296143, "learning_rate": 1.1966713438589128e-06, "loss": 0.787, "step": 20473 }, { "epoch": 3.786842594313325, "grad_norm": 0.8192884922027588, "learning_rate": 1.1944190418301083e-06, "loss": 0.5985, "step": 20474 }, { "epoch": 3.7870284333766957, "grad_norm": 0.9939797520637512, "learning_rate": 1.1921688486368543e-06, "loss": 0.9617, "step": 20475 }, { "epoch": 3.787214272440067, "grad_norm": 0.9714579582214355, "learning_rate": 1.1899207643272014e-06, "loss": 0.725, "step": 20476 }, { "epoch": 3.787400111503438, "grad_norm": 0.8307166695594788, "learning_rate": 1.1876747889491223e-06, "loss": 0.6712, "step": 20477 }, { "epoch": 3.7875859505668092, "grad_norm": 0.8596906065940857, "learning_rate": 1.1854309225505566e-06, "loss": 0.7358, "step": 20478 }, { "epoch": 3.78777178963018, "grad_norm": 0.865994930267334, "learning_rate": 1.1831891651793992e-06, "loss": 0.898, "step": 20479 }, { "epoch": 3.7879576286935515, "grad_norm": 0.9152920842170715, "learning_rate": 1.1809495168834783e-06, "loss": 0.7202, "step": 20480 }, { "epoch": 3.7881434677569223, "grad_norm": 0.860379159450531, "learning_rate": 1.1787119777106225e-06, "loss": 0.8855, "step": 20481 }, { "epoch": 3.7883293068202937, "grad_norm": 0.9607135057449341, "learning_rate": 1.1764765477085827e-06, "loss": 0.7073, "step": 20482 }, { "epoch": 3.7885151458836646, "grad_norm": 0.8664621710777283, "learning_rate": 1.1742432269250536e-06, "loss": 0.775, "step": 20483 }, { "epoch": 3.788700984947036, "grad_norm": 0.7738723158836365, "learning_rate": 1.1720120154077085e-06, "loss": 0.6682, "step": 20484 }, { "epoch": 3.788886824010407, "grad_norm": 1.108939290046692, "learning_rate": 1.169782913204176e-06, "loss": 0.7816, "step": 20485 }, { "epoch": 3.789072663073778, "grad_norm": 0.9274181723594666, "learning_rate": 1.167555920362029e-06, "loss": 0.9537, "step": 20486 }, { "epoch": 3.789258502137149, "grad_norm": 0.9644546508789062, "learning_rate": 1.1653310369287962e-06, "loss": 0.9466, "step": 20487 }, { "epoch": 3.7894443412005203, "grad_norm": 0.9580609202384949, "learning_rate": 1.163108262951962e-06, "loss": 0.6057, "step": 20488 }, { "epoch": 3.7896301802638916, "grad_norm": 1.0273137092590332, "learning_rate": 1.1608875984789659e-06, "loss": 0.7508, "step": 20489 }, { "epoch": 3.7898160193272625, "grad_norm": 0.8549194931983948, "learning_rate": 1.1586690435572256e-06, "loss": 0.9798, "step": 20490 }, { "epoch": 3.790001858390634, "grad_norm": 1.169147253036499, "learning_rate": 1.1564525982340702e-06, "loss": 0.831, "step": 20491 }, { "epoch": 3.7901876974540047, "grad_norm": 0.8162899613380432, "learning_rate": 1.154238262556795e-06, "loss": 0.7717, "step": 20492 }, { "epoch": 3.790373536517376, "grad_norm": 1.4703549146652222, "learning_rate": 1.1520260365726954e-06, "loss": 1.0135, "step": 20493 }, { "epoch": 3.790559375580747, "grad_norm": 1.3136372566223145, "learning_rate": 1.1498159203289672e-06, "loss": 0.8336, "step": 20494 }, { "epoch": 3.7907452146441183, "grad_norm": 0.9716363549232483, "learning_rate": 1.1476079138727836e-06, "loss": 0.8506, "step": 20495 }, { "epoch": 3.790931053707489, "grad_norm": 1.0611934661865234, "learning_rate": 1.1454020172512737e-06, "loss": 0.7567, "step": 20496 }, { "epoch": 3.7911168927708605, "grad_norm": 0.9886415600776672, "learning_rate": 1.1431982305115108e-06, "loss": 0.918, "step": 20497 }, { "epoch": 3.7913027318342314, "grad_norm": 0.9046659469604492, "learning_rate": 1.1409965537005463e-06, "loss": 0.829, "step": 20498 }, { "epoch": 3.7914885708976027, "grad_norm": 1.3718743324279785, "learning_rate": 1.1387969868653537e-06, "loss": 0.6962, "step": 20499 }, { "epoch": 3.791674409960974, "grad_norm": 1.355650782585144, "learning_rate": 1.1365995300528841e-06, "loss": 1.0893, "step": 20500 }, { "epoch": 3.791860249024345, "grad_norm": 0.8272162675857544, "learning_rate": 1.134404183310045e-06, "loss": 0.6227, "step": 20501 }, { "epoch": 3.792046088087716, "grad_norm": 0.842643141746521, "learning_rate": 1.1322109466836871e-06, "loss": 0.8684, "step": 20502 }, { "epoch": 3.792231927151087, "grad_norm": 0.9802500009536743, "learning_rate": 1.130019820220629e-06, "loss": 0.8027, "step": 20503 }, { "epoch": 3.7924177662144585, "grad_norm": 0.9252774715423584, "learning_rate": 1.1278308039676221e-06, "loss": 0.7099, "step": 20504 }, { "epoch": 3.7926036052778294, "grad_norm": 0.9800246953964233, "learning_rate": 1.1256438979713958e-06, "loss": 0.7664, "step": 20505 }, { "epoch": 3.7927894443412002, "grad_norm": 0.9181430339813232, "learning_rate": 1.1234591022786345e-06, "loss": 0.8109, "step": 20506 }, { "epoch": 3.7929752834045716, "grad_norm": 1.1357393264770508, "learning_rate": 1.121276416935957e-06, "loss": 0.6259, "step": 20507 }, { "epoch": 3.793161122467943, "grad_norm": 1.1219927072525024, "learning_rate": 1.1190958419899478e-06, "loss": 0.8326, "step": 20508 }, { "epoch": 3.793346961531314, "grad_norm": 1.0360108613967896, "learning_rate": 1.1169173774871478e-06, "loss": 1.0121, "step": 20509 }, { "epoch": 3.793532800594685, "grad_norm": 0.9814891219139099, "learning_rate": 1.114741023474053e-06, "loss": 0.716, "step": 20510 }, { "epoch": 3.793718639658056, "grad_norm": 1.0262317657470703, "learning_rate": 1.1125667799971262e-06, "loss": 0.8805, "step": 20511 }, { "epoch": 3.7939044787214273, "grad_norm": 0.9185773134231567, "learning_rate": 1.1103946471027637e-06, "loss": 0.8587, "step": 20512 }, { "epoch": 3.794090317784798, "grad_norm": 0.8239337801933289, "learning_rate": 1.1082246248373285e-06, "loss": 0.9026, "step": 20513 }, { "epoch": 3.7942761568481695, "grad_norm": 1.0945147275924683, "learning_rate": 1.1060567132471279e-06, "loss": 0.9878, "step": 20514 }, { "epoch": 3.794461995911541, "grad_norm": 1.008570909500122, "learning_rate": 1.103890912378447e-06, "loss": 0.9664, "step": 20515 }, { "epoch": 3.7946478349749118, "grad_norm": 0.9568138718605042, "learning_rate": 1.101727222277482e-06, "loss": 0.8537, "step": 20516 }, { "epoch": 3.7948336740382826, "grad_norm": 0.9105019569396973, "learning_rate": 1.0995656429904523e-06, "loss": 0.8228, "step": 20517 }, { "epoch": 3.795019513101654, "grad_norm": 0.9967132210731506, "learning_rate": 1.0974061745634646e-06, "loss": 1.0734, "step": 20518 }, { "epoch": 3.7952053521650253, "grad_norm": 1.0719634294509888, "learning_rate": 1.0952488170426268e-06, "loss": 0.7706, "step": 20519 }, { "epoch": 3.795391191228396, "grad_norm": 0.9113902449607849, "learning_rate": 1.0930935704739576e-06, "loss": 0.7723, "step": 20520 }, { "epoch": 3.795577030291767, "grad_norm": 1.1396875381469727, "learning_rate": 1.0909404349034868e-06, "loss": 0.7241, "step": 20521 }, { "epoch": 3.7957628693551384, "grad_norm": 0.885818600654602, "learning_rate": 1.0887894103771668e-06, "loss": 0.7088, "step": 20522 }, { "epoch": 3.7959487084185097, "grad_norm": 0.8519472479820251, "learning_rate": 1.0866404969408938e-06, "loss": 0.8594, "step": 20523 }, { "epoch": 3.7961345474818806, "grad_norm": 0.8347841501235962, "learning_rate": 1.0844936946405203e-06, "loss": 0.7616, "step": 20524 }, { "epoch": 3.796320386545252, "grad_norm": 1.0317564010620117, "learning_rate": 1.0823490035218987e-06, "loss": 0.6993, "step": 20525 }, { "epoch": 3.796506225608623, "grad_norm": 1.1278623342514038, "learning_rate": 1.0802064236307917e-06, "loss": 1.0551, "step": 20526 }, { "epoch": 3.796692064671994, "grad_norm": 0.937333345413208, "learning_rate": 1.0780659550129191e-06, "loss": 0.84, "step": 20527 }, { "epoch": 3.796877903735365, "grad_norm": 0.841964602470398, "learning_rate": 1.075927597713966e-06, "loss": 0.7571, "step": 20528 }, { "epoch": 3.7970637427987364, "grad_norm": 1.0166205167770386, "learning_rate": 1.073791351779596e-06, "loss": 0.8675, "step": 20529 }, { "epoch": 3.7972495818621073, "grad_norm": 1.0265706777572632, "learning_rate": 1.071657217255373e-06, "loss": 0.7984, "step": 20530 }, { "epoch": 3.7974354209254786, "grad_norm": 0.9770160913467407, "learning_rate": 1.069525194186849e-06, "loss": 0.7979, "step": 20531 }, { "epoch": 3.7976212599888495, "grad_norm": 0.9987168908119202, "learning_rate": 1.0673952826195432e-06, "loss": 0.9849, "step": 20532 }, { "epoch": 3.797807099052221, "grad_norm": 0.8842899799346924, "learning_rate": 1.0652674825989196e-06, "loss": 0.8466, "step": 20533 }, { "epoch": 3.797992938115592, "grad_norm": 0.9251445531845093, "learning_rate": 1.0631417941703858e-06, "loss": 0.9717, "step": 20534 }, { "epoch": 3.798178777178963, "grad_norm": 1.066029667854309, "learning_rate": 1.0610182173792948e-06, "loss": 0.8857, "step": 20535 }, { "epoch": 3.798364616242334, "grad_norm": 0.9665650129318237, "learning_rate": 1.058896752270988e-06, "loss": 0.9257, "step": 20536 }, { "epoch": 3.7985504553057052, "grad_norm": 1.0575613975524902, "learning_rate": 1.0567773988907514e-06, "loss": 0.8657, "step": 20537 }, { "epoch": 3.7987362943690766, "grad_norm": 0.8713177442550659, "learning_rate": 1.0546601572837933e-06, "loss": 0.8402, "step": 20538 }, { "epoch": 3.7989221334324474, "grad_norm": 1.0530487298965454, "learning_rate": 1.0525450274953218e-06, "loss": 0.6614, "step": 20539 }, { "epoch": 3.7991079724958188, "grad_norm": 1.0554367303848267, "learning_rate": 1.050432009570479e-06, "loss": 0.8807, "step": 20540 }, { "epoch": 3.7992938115591897, "grad_norm": 0.8764644861221313, "learning_rate": 1.0483211035543504e-06, "loss": 0.8133, "step": 20541 }, { "epoch": 3.799479650622561, "grad_norm": 1.0848758220672607, "learning_rate": 1.0462123094920117e-06, "loss": 0.8538, "step": 20542 }, { "epoch": 3.799665489685932, "grad_norm": 0.9339497685432434, "learning_rate": 1.04410562742846e-06, "loss": 0.9361, "step": 20543 }, { "epoch": 3.799851328749303, "grad_norm": 1.0878101587295532, "learning_rate": 1.0420010574086481e-06, "loss": 0.7417, "step": 20544 }, { "epoch": 3.800037167812674, "grad_norm": 0.9283502697944641, "learning_rate": 1.0398985994775069e-06, "loss": 0.8909, "step": 20545 }, { "epoch": 3.8002230068760454, "grad_norm": 0.9685944318771362, "learning_rate": 1.0377982536799224e-06, "loss": 0.8446, "step": 20546 }, { "epoch": 3.8004088459394163, "grad_norm": 1.0181951522827148, "learning_rate": 1.0357000200606926e-06, "loss": 0.7759, "step": 20547 }, { "epoch": 3.8005946850027876, "grad_norm": 1.517647624015808, "learning_rate": 1.0336038986646258e-06, "loss": 1.0891, "step": 20548 }, { "epoch": 3.800780524066159, "grad_norm": 0.9938087463378906, "learning_rate": 1.0315098895364416e-06, "loss": 0.8461, "step": 20549 }, { "epoch": 3.80096636312953, "grad_norm": 0.9021065831184387, "learning_rate": 1.029417992720849e-06, "loss": 0.7976, "step": 20550 }, { "epoch": 3.8011522021929007, "grad_norm": 0.8116894364356995, "learning_rate": 1.0273282082624901e-06, "loss": 0.8055, "step": 20551 }, { "epoch": 3.801338041256272, "grad_norm": 0.9933568835258484, "learning_rate": 1.0252405362059514e-06, "loss": 0.7171, "step": 20552 }, { "epoch": 3.8015238803196434, "grad_norm": 0.7740741968154907, "learning_rate": 1.0231549765958192e-06, "loss": 0.493, "step": 20553 }, { "epoch": 3.8017097193830143, "grad_norm": 0.8002797365188599, "learning_rate": 1.0210715294765916e-06, "loss": 0.8268, "step": 20554 }, { "epoch": 3.801895558446385, "grad_norm": 0.9246355891227722, "learning_rate": 1.0189901948927438e-06, "loss": 0.8769, "step": 20555 }, { "epoch": 3.8020813975097565, "grad_norm": 1.1914242506027222, "learning_rate": 1.0169109728886849e-06, "loss": 0.9132, "step": 20556 }, { "epoch": 3.802267236573128, "grad_norm": 1.0889278650283813, "learning_rate": 1.0148338635088018e-06, "loss": 0.7392, "step": 20557 }, { "epoch": 3.8024530756364987, "grad_norm": 1.1419200897216797, "learning_rate": 1.0127588667974253e-06, "loss": 0.868, "step": 20558 }, { "epoch": 3.80263891469987, "grad_norm": 0.9914236068725586, "learning_rate": 1.0106859827988424e-06, "loss": 0.8623, "step": 20559 }, { "epoch": 3.802824753763241, "grad_norm": 1.0282906293869019, "learning_rate": 1.0086152115572843e-06, "loss": 0.9744, "step": 20560 }, { "epoch": 3.8030105928266122, "grad_norm": 1.0758063793182373, "learning_rate": 1.0065465531169715e-06, "loss": 0.8696, "step": 20561 }, { "epoch": 3.803196431889983, "grad_norm": 0.7498318552970886, "learning_rate": 1.0044800075220352e-06, "loss": 0.6165, "step": 20562 }, { "epoch": 3.8033822709533545, "grad_norm": 0.7701108455657959, "learning_rate": 1.0024155748165953e-06, "loss": 0.8269, "step": 20563 }, { "epoch": 3.803568110016726, "grad_norm": 1.0195519924163818, "learning_rate": 1.0003532550446949e-06, "loss": 0.8114, "step": 20564 }, { "epoch": 3.8037539490800967, "grad_norm": 0.8638911247253418, "learning_rate": 9.98293048250376e-07, "loss": 0.6922, "step": 20565 }, { "epoch": 3.8039397881434676, "grad_norm": 1.0542092323303223, "learning_rate": 9.96234954477604e-07, "loss": 0.7888, "step": 20566 }, { "epoch": 3.804125627206839, "grad_norm": 0.9138762950897217, "learning_rate": 9.94178973770299e-07, "loss": 0.7489, "step": 20567 }, { "epoch": 3.80431146627021, "grad_norm": 0.6343844532966614, "learning_rate": 9.921251061723257e-07, "loss": 0.444, "step": 20568 }, { "epoch": 3.804497305333581, "grad_norm": 0.8197211027145386, "learning_rate": 9.900733517275495e-07, "loss": 0.6423, "step": 20569 }, { "epoch": 3.804683144396952, "grad_norm": 0.9333930611610413, "learning_rate": 9.880237104797575e-07, "loss": 0.7554, "step": 20570 }, { "epoch": 3.8048689834603233, "grad_norm": 0.8915857672691345, "learning_rate": 9.8597618247267e-07, "loss": 0.733, "step": 20571 }, { "epoch": 3.8050548225236946, "grad_norm": 0.8643396496772766, "learning_rate": 9.839307677500186e-07, "loss": 0.8303, "step": 20572 }, { "epoch": 3.8052406615870655, "grad_norm": 0.8911091685295105, "learning_rate": 9.818874663554357e-07, "loss": 0.8417, "step": 20573 }, { "epoch": 3.805426500650437, "grad_norm": 0.9695454835891724, "learning_rate": 9.798462783325524e-07, "loss": 0.8411, "step": 20574 }, { "epoch": 3.8056123397138077, "grad_norm": 0.6776549816131592, "learning_rate": 9.778072037249342e-07, "loss": 0.4747, "step": 20575 }, { "epoch": 3.805798178777179, "grad_norm": 0.903198778629303, "learning_rate": 9.757702425760685e-07, "loss": 0.7752, "step": 20576 }, { "epoch": 3.80598401784055, "grad_norm": 1.0293703079223633, "learning_rate": 9.737353949294759e-07, "loss": 1.0133, "step": 20577 }, { "epoch": 3.8061698569039213, "grad_norm": 1.0670793056488037, "learning_rate": 9.717026608285552e-07, "loss": 1.1066, "step": 20578 }, { "epoch": 3.806355695967292, "grad_norm": 1.0591223239898682, "learning_rate": 9.696720403167048e-07, "loss": 0.8048, "step": 20579 }, { "epoch": 3.8065415350306635, "grad_norm": 0.9527950286865234, "learning_rate": 9.67643533437257e-07, "loss": 0.8519, "step": 20580 }, { "epoch": 3.8067273740940344, "grad_norm": 1.242479681968689, "learning_rate": 9.656171402335213e-07, "loss": 0.6964, "step": 20581 }, { "epoch": 3.8069132131574057, "grad_norm": 0.9502848982810974, "learning_rate": 9.635928607487298e-07, "loss": 0.9544, "step": 20582 }, { "epoch": 3.807099052220777, "grad_norm": 0.8187753558158875, "learning_rate": 9.615706950260928e-07, "loss": 0.7997, "step": 20583 }, { "epoch": 3.807284891284148, "grad_norm": 1.0020241737365723, "learning_rate": 9.595506431087752e-07, "loss": 0.7883, "step": 20584 }, { "epoch": 3.807470730347519, "grad_norm": 0.8587465882301331, "learning_rate": 9.575327050398875e-07, "loss": 0.9517, "step": 20585 }, { "epoch": 3.80765656941089, "grad_norm": 0.9860709309577942, "learning_rate": 9.55516880862506e-07, "loss": 0.769, "step": 20586 }, { "epoch": 3.8078424084742615, "grad_norm": 0.9654290080070496, "learning_rate": 9.535031706196517e-07, "loss": 0.9548, "step": 20587 }, { "epoch": 3.8080282475376324, "grad_norm": 0.8824122548103333, "learning_rate": 9.514915743542907e-07, "loss": 0.9232, "step": 20588 }, { "epoch": 3.8082140866010037, "grad_norm": 1.4160360097885132, "learning_rate": 9.494820921093772e-07, "loss": 0.8105, "step": 20589 }, { "epoch": 3.8083999256643746, "grad_norm": 0.8777419924736023, "learning_rate": 9.474747239277993e-07, "loss": 0.7099, "step": 20590 }, { "epoch": 3.808585764727746, "grad_norm": 1.0006147623062134, "learning_rate": 9.45469469852378e-07, "loss": 0.8877, "step": 20591 }, { "epoch": 3.808771603791117, "grad_norm": 1.0253385305404663, "learning_rate": 9.434663299259239e-07, "loss": 0.7588, "step": 20592 }, { "epoch": 3.808957442854488, "grad_norm": 1.6540416479110718, "learning_rate": 9.414653041912025e-07, "loss": 0.6916, "step": 20593 }, { "epoch": 3.809143281917859, "grad_norm": 0.8838813900947571, "learning_rate": 9.39466392690913e-07, "loss": 0.8394, "step": 20594 }, { "epoch": 3.8093291209812303, "grad_norm": 0.9816716909408569, "learning_rate": 9.374695954677104e-07, "loss": 0.7785, "step": 20595 }, { "epoch": 3.809514960044601, "grad_norm": 1.0879133939743042, "learning_rate": 9.35474912564227e-07, "loss": 0.8887, "step": 20596 }, { "epoch": 3.8097007991079725, "grad_norm": 0.8840386271476746, "learning_rate": 9.33482344023029e-07, "loss": 0.7582, "step": 20597 }, { "epoch": 3.809886638171344, "grad_norm": 0.9541440606117249, "learning_rate": 9.314918898866487e-07, "loss": 0.9832, "step": 20598 }, { "epoch": 3.8100724772347148, "grad_norm": 1.02316153049469, "learning_rate": 9.295035501975636e-07, "loss": 0.862, "step": 20599 }, { "epoch": 3.8102583162980856, "grad_norm": 0.8722786903381348, "learning_rate": 9.275173249982061e-07, "loss": 0.8949, "step": 20600 }, { "epoch": 3.810444155361457, "grad_norm": 0.8821304440498352, "learning_rate": 9.25533214330987e-07, "loss": 0.7753, "step": 20601 }, { "epoch": 3.8106299944248283, "grad_norm": 0.8481576442718506, "learning_rate": 9.235512182382388e-07, "loss": 0.7523, "step": 20602 }, { "epoch": 3.810815833488199, "grad_norm": 1.0084933042526245, "learning_rate": 9.215713367622613e-07, "loss": 0.9815, "step": 20603 }, { "epoch": 3.81100167255157, "grad_norm": 0.9282767176628113, "learning_rate": 9.195935699453207e-07, "loss": 0.716, "step": 20604 }, { "epoch": 3.8111875116149414, "grad_norm": 1.095992922782898, "learning_rate": 9.176179178296385e-07, "loss": 0.7956, "step": 20605 }, { "epoch": 3.8113733506783127, "grad_norm": 1.1294467449188232, "learning_rate": 9.156443804573588e-07, "loss": 0.8014, "step": 20606 }, { "epoch": 3.8115591897416836, "grad_norm": 1.0914392471313477, "learning_rate": 9.13672957870615e-07, "loss": 1.0108, "step": 20607 }, { "epoch": 3.811745028805055, "grad_norm": 0.9797545075416565, "learning_rate": 9.117036501114839e-07, "loss": 0.7969, "step": 20608 }, { "epoch": 3.811930867868426, "grad_norm": 0.924508273601532, "learning_rate": 9.09736457221999e-07, "loss": 0.8286, "step": 20609 }, { "epoch": 3.812116706931797, "grad_norm": 1.0149329900741577, "learning_rate": 9.077713792441489e-07, "loss": 0.7124, "step": 20610 }, { "epoch": 3.812302545995168, "grad_norm": 1.2102501392364502, "learning_rate": 9.058084162198555e-07, "loss": 1.0784, "step": 20611 }, { "epoch": 3.8124883850585394, "grad_norm": 0.8796945810317993, "learning_rate": 9.038475681910296e-07, "loss": 0.7931, "step": 20612 }, { "epoch": 3.8126742241219103, "grad_norm": 1.0211964845657349, "learning_rate": 9.01888835199538e-07, "loss": 0.7759, "step": 20613 }, { "epoch": 3.8128600631852816, "grad_norm": 0.9216942191123962, "learning_rate": 8.999322172871583e-07, "loss": 0.9624, "step": 20614 }, { "epoch": 3.8130459022486525, "grad_norm": 1.1108382940292358, "learning_rate": 8.97977714495657e-07, "loss": 0.9636, "step": 20615 }, { "epoch": 3.813231741312024, "grad_norm": 1.3374735116958618, "learning_rate": 8.960253268667673e-07, "loss": 0.8246, "step": 20616 }, { "epoch": 3.813417580375395, "grad_norm": 0.8724303245544434, "learning_rate": 8.94075054442145e-07, "loss": 0.6753, "step": 20617 }, { "epoch": 3.813603419438766, "grad_norm": 0.8837342858314514, "learning_rate": 8.921268972634123e-07, "loss": 0.8393, "step": 20618 }, { "epoch": 3.813789258502137, "grad_norm": 0.996241569519043, "learning_rate": 8.90180855372158e-07, "loss": 0.8471, "step": 20619 }, { "epoch": 3.8139750975655082, "grad_norm": 0.8742111325263977, "learning_rate": 8.882369288099157e-07, "loss": 1.0219, "step": 20620 }, { "epoch": 3.8141609366288796, "grad_norm": 1.0098148584365845, "learning_rate": 8.862951176181744e-07, "loss": 0.9206, "step": 20621 }, { "epoch": 3.8143467756922504, "grad_norm": 0.9189237952232361, "learning_rate": 8.843554218383787e-07, "loss": 0.8208, "step": 20622 }, { "epoch": 3.8145326147556218, "grad_norm": 1.065704345703125, "learning_rate": 8.824178415119177e-07, "loss": 0.6901, "step": 20623 }, { "epoch": 3.8147184538189927, "grad_norm": 0.8512879610061646, "learning_rate": 8.804823766801696e-07, "loss": 0.5791, "step": 20624 }, { "epoch": 3.814904292882364, "grad_norm": 0.9663471579551697, "learning_rate": 8.785490273844121e-07, "loss": 0.8668, "step": 20625 }, { "epoch": 3.815090131945735, "grad_norm": 1.5551700592041016, "learning_rate": 8.766177936659458e-07, "loss": 0.8712, "step": 20626 }, { "epoch": 3.815275971009106, "grad_norm": 1.0716501474380493, "learning_rate": 8.746886755659601e-07, "loss": 0.8028, "step": 20627 }, { "epoch": 3.815461810072477, "grad_norm": 0.8538807034492493, "learning_rate": 8.727616731256327e-07, "loss": 0.803, "step": 20628 }, { "epoch": 3.8156476491358484, "grad_norm": 0.890023410320282, "learning_rate": 8.708367863861089e-07, "loss": 0.6986, "step": 20629 }, { "epoch": 3.8158334881992193, "grad_norm": 0.9053801894187927, "learning_rate": 8.689140153884556e-07, "loss": 0.7898, "step": 20630 }, { "epoch": 3.8160193272625906, "grad_norm": 1.0622429847717285, "learning_rate": 8.66993360173718e-07, "loss": 0.8973, "step": 20631 }, { "epoch": 3.816205166325962, "grad_norm": 0.9308961629867554, "learning_rate": 8.650748207828963e-07, "loss": 0.8658, "step": 20632 }, { "epoch": 3.816391005389333, "grad_norm": 1.0338478088378906, "learning_rate": 8.631583972569246e-07, "loss": 0.8523, "step": 20633 }, { "epoch": 3.8165768444527037, "grad_norm": 0.9793767929077148, "learning_rate": 8.612440896367035e-07, "loss": 0.7213, "step": 20634 }, { "epoch": 3.816762683516075, "grad_norm": 0.7624946236610413, "learning_rate": 8.593318979631004e-07, "loss": 0.524, "step": 20635 }, { "epoch": 3.8169485225794464, "grad_norm": 0.9758871793746948, "learning_rate": 8.57421822276927e-07, "loss": 0.7867, "step": 20636 }, { "epoch": 3.8171343616428173, "grad_norm": 0.9500848054885864, "learning_rate": 8.555138626189618e-07, "loss": 0.7514, "step": 20637 }, { "epoch": 3.8173202007061886, "grad_norm": 0.9387444257736206, "learning_rate": 8.536080190299056e-07, "loss": 0.7099, "step": 20638 }, { "epoch": 3.8175060397695595, "grad_norm": 0.9620884656906128, "learning_rate": 8.517042915504481e-07, "loss": 0.9058, "step": 20639 }, { "epoch": 3.817691878832931, "grad_norm": 0.9171242117881775, "learning_rate": 8.498026802212012e-07, "loss": 0.7526, "step": 20640 }, { "epoch": 3.8178777178963017, "grad_norm": 1.1685776710510254, "learning_rate": 8.479031850827879e-07, "loss": 0.9633, "step": 20641 }, { "epoch": 3.818063556959673, "grad_norm": 0.9995948076248169, "learning_rate": 8.460058061757203e-07, "loss": 0.7484, "step": 20642 }, { "epoch": 3.818249396023044, "grad_norm": 0.8692541122436523, "learning_rate": 8.441105435404995e-07, "loss": 0.908, "step": 20643 }, { "epoch": 3.8184352350864152, "grad_norm": 0.9087664484977722, "learning_rate": 8.422173972175818e-07, "loss": 0.8699, "step": 20644 }, { "epoch": 3.818621074149786, "grad_norm": 0.7954351902008057, "learning_rate": 8.403263672473793e-07, "loss": 0.6872, "step": 20645 }, { "epoch": 3.8188069132131575, "grad_norm": 0.9517496824264526, "learning_rate": 8.384374536702377e-07, "loss": 0.7928, "step": 20646 }, { "epoch": 3.818992752276529, "grad_norm": 0.947588324546814, "learning_rate": 8.365506565264913e-07, "loss": 0.8637, "step": 20647 }, { "epoch": 3.8191785913398997, "grad_norm": 1.1128437519073486, "learning_rate": 8.346659758563857e-07, "loss": 0.7051, "step": 20648 }, { "epoch": 3.8193644304032706, "grad_norm": 1.1500704288482666, "learning_rate": 8.327834117001665e-07, "loss": 0.9687, "step": 20649 }, { "epoch": 3.819550269466642, "grad_norm": 1.0038901567459106, "learning_rate": 8.309029640980126e-07, "loss": 0.6612, "step": 20650 }, { "epoch": 3.819736108530013, "grad_norm": 0.894903302192688, "learning_rate": 8.290246330900476e-07, "loss": 0.7769, "step": 20651 }, { "epoch": 3.819921947593384, "grad_norm": 1.0159218311309814, "learning_rate": 8.271484187163725e-07, "loss": 0.7865, "step": 20652 }, { "epoch": 3.820107786656755, "grad_norm": 0.9636523127555847, "learning_rate": 8.252743210170222e-07, "loss": 0.5899, "step": 20653 }, { "epoch": 3.8202936257201263, "grad_norm": 0.9794251918792725, "learning_rate": 8.23402340032009e-07, "loss": 0.6804, "step": 20654 }, { "epoch": 3.8204794647834976, "grad_norm": 0.9215170741081238, "learning_rate": 8.215324758012677e-07, "loss": 0.6189, "step": 20655 }, { "epoch": 3.8206653038468685, "grad_norm": 0.836532473564148, "learning_rate": 8.196647283647218e-07, "loss": 0.77, "step": 20656 }, { "epoch": 3.82085114291024, "grad_norm": 0.8718463182449341, "learning_rate": 8.177990977622396e-07, "loss": 0.8012, "step": 20657 }, { "epoch": 3.8210369819736107, "grad_norm": 0.9456227421760559, "learning_rate": 8.159355840336225e-07, "loss": 0.8262, "step": 20658 }, { "epoch": 3.821222821036982, "grad_norm": 0.8164328336715698, "learning_rate": 8.140741872186608e-07, "loss": 0.8653, "step": 20659 }, { "epoch": 3.821408660100353, "grad_norm": 1.0025135278701782, "learning_rate": 8.122149073570673e-07, "loss": 0.7972, "step": 20660 }, { "epoch": 3.8215944991637243, "grad_norm": 0.7859447598457336, "learning_rate": 8.103577444885435e-07, "loss": 0.8528, "step": 20661 }, { "epoch": 3.821780338227095, "grad_norm": 0.8448495864868164, "learning_rate": 8.085026986527133e-07, "loss": 0.6718, "step": 20662 }, { "epoch": 3.8219661772904665, "grad_norm": 0.8926912546157837, "learning_rate": 8.066497698891673e-07, "loss": 0.7745, "step": 20663 }, { "epoch": 3.8221520163538374, "grad_norm": 0.9226291179656982, "learning_rate": 8.047989582374738e-07, "loss": 0.8601, "step": 20664 }, { "epoch": 3.8223378554172087, "grad_norm": 0.8001793026924133, "learning_rate": 8.029502637371123e-07, "loss": 0.7786, "step": 20665 }, { "epoch": 3.82252369448058, "grad_norm": 1.0437462329864502, "learning_rate": 8.011036864275512e-07, "loss": 0.9139, "step": 20666 }, { "epoch": 3.822709533543951, "grad_norm": 0.8408721089363098, "learning_rate": 7.992592263481924e-07, "loss": 0.7911, "step": 20667 }, { "epoch": 3.822895372607322, "grad_norm": 0.9707818031311035, "learning_rate": 7.974168835384155e-07, "loss": 0.8485, "step": 20668 }, { "epoch": 3.823081211670693, "grad_norm": 0.9045571684837341, "learning_rate": 7.955766580375335e-07, "loss": 0.8209, "step": 20669 }, { "epoch": 3.8232670507340645, "grad_norm": 0.9762415289878845, "learning_rate": 7.937385498848371e-07, "loss": 0.8957, "step": 20670 }, { "epoch": 3.8234528897974354, "grad_norm": 0.8625732660293579, "learning_rate": 7.919025591195284e-07, "loss": 0.7141, "step": 20671 }, { "epoch": 3.8236387288608067, "grad_norm": 0.9887514114379883, "learning_rate": 7.900686857808204e-07, "loss": 0.8672, "step": 20672 }, { "epoch": 3.8238245679241776, "grad_norm": 1.12395179271698, "learning_rate": 7.882369299078485e-07, "loss": 0.8746, "step": 20673 }, { "epoch": 3.824010406987549, "grad_norm": 0.8853467106819153, "learning_rate": 7.864072915397036e-07, "loss": 0.9147, "step": 20674 }, { "epoch": 3.82419624605092, "grad_norm": 1.0425935983657837, "learning_rate": 7.845797707154434e-07, "loss": 0.8855, "step": 20675 }, { "epoch": 3.824382085114291, "grad_norm": 1.0093079805374146, "learning_rate": 7.827543674740589e-07, "loss": 0.9353, "step": 20676 }, { "epoch": 3.824567924177662, "grad_norm": 1.0275657176971436, "learning_rate": 7.80931081854519e-07, "loss": 0.902, "step": 20677 }, { "epoch": 3.8247537632410333, "grad_norm": 1.0822455883026123, "learning_rate": 7.79109913895737e-07, "loss": 0.8277, "step": 20678 }, { "epoch": 3.824939602304404, "grad_norm": 0.9282769560813904, "learning_rate": 7.772908636365927e-07, "loss": 0.6831, "step": 20679 }, { "epoch": 3.8251254413677755, "grad_norm": 1.0311388969421387, "learning_rate": 7.754739311158887e-07, "loss": 0.7844, "step": 20680 }, { "epoch": 3.825311280431147, "grad_norm": 1.0050128698349, "learning_rate": 7.736591163724272e-07, "loss": 0.8631, "step": 20681 }, { "epoch": 3.8254971194945178, "grad_norm": 0.9121170043945312, "learning_rate": 7.718464194449326e-07, "loss": 0.8428, "step": 20682 }, { "epoch": 3.8256829585578886, "grad_norm": 1.0084474086761475, "learning_rate": 7.700358403720964e-07, "loss": 0.6792, "step": 20683 }, { "epoch": 3.82586879762126, "grad_norm": 1.1749839782714844, "learning_rate": 7.682273791925543e-07, "loss": 0.7773, "step": 20684 }, { "epoch": 3.8260546366846313, "grad_norm": 0.960232138633728, "learning_rate": 7.664210359449086e-07, "loss": 0.8197, "step": 20685 }, { "epoch": 3.826240475748002, "grad_norm": 0.980080783367157, "learning_rate": 7.646168106677287e-07, "loss": 0.8017, "step": 20686 }, { "epoch": 3.826426314811373, "grad_norm": 0.9059486389160156, "learning_rate": 7.628147033994837e-07, "loss": 0.9325, "step": 20687 }, { "epoch": 3.8266121538747444, "grad_norm": 1.0272023677825928, "learning_rate": 7.610147141786761e-07, "loss": 0.8221, "step": 20688 }, { "epoch": 3.8267979929381157, "grad_norm": 0.9386231303215027, "learning_rate": 7.592168430437086e-07, "loss": 0.7834, "step": 20689 }, { "epoch": 3.8269838320014866, "grad_norm": 0.9377012252807617, "learning_rate": 7.574210900329503e-07, "loss": 0.8522, "step": 20690 }, { "epoch": 3.827169671064858, "grad_norm": 0.9702726006507874, "learning_rate": 7.556274551847264e-07, "loss": 0.8191, "step": 20691 }, { "epoch": 3.827355510128229, "grad_norm": 0.8971997499465942, "learning_rate": 7.538359385373395e-07, "loss": 0.6999, "step": 20692 }, { "epoch": 3.8275413491916, "grad_norm": 0.9465587139129639, "learning_rate": 7.520465401290033e-07, "loss": 0.8327, "step": 20693 }, { "epoch": 3.827727188254971, "grad_norm": 1.0489004850387573, "learning_rate": 7.502592599979208e-07, "loss": 0.6599, "step": 20694 }, { "epoch": 3.8279130273183424, "grad_norm": 1.0122950077056885, "learning_rate": 7.48474098182228e-07, "loss": 0.7954, "step": 20695 }, { "epoch": 3.8280988663817137, "grad_norm": 0.8980873227119446, "learning_rate": 7.466910547200279e-07, "loss": 0.7559, "step": 20696 }, { "epoch": 3.8282847054450846, "grad_norm": 0.889599084854126, "learning_rate": 7.44910129649401e-07, "loss": 0.8781, "step": 20697 }, { "epoch": 3.8284705445084555, "grad_norm": 1.1924166679382324, "learning_rate": 7.431313230083281e-07, "loss": 0.9235, "step": 20698 }, { "epoch": 3.828656383571827, "grad_norm": 0.8644866347312927, "learning_rate": 7.413546348347789e-07, "loss": 0.7679, "step": 20699 }, { "epoch": 3.828842222635198, "grad_norm": 1.0182651281356812, "learning_rate": 7.395800651666785e-07, "loss": 0.9471, "step": 20700 }, { "epoch": 3.829028061698569, "grad_norm": 0.887749195098877, "learning_rate": 7.378076140419187e-07, "loss": 0.8044, "step": 20701 }, { "epoch": 3.82921390076194, "grad_norm": 0.9445759654045105, "learning_rate": 7.360372814983141e-07, "loss": 0.7114, "step": 20702 }, { "epoch": 3.8293997398253112, "grad_norm": 1.3595925569534302, "learning_rate": 7.342690675736341e-07, "loss": 0.8659, "step": 20703 }, { "epoch": 3.8295855788886826, "grad_norm": 1.240745186805725, "learning_rate": 7.325029723056487e-07, "loss": 0.8501, "step": 20704 }, { "epoch": 3.8297714179520534, "grad_norm": 1.0776466131210327, "learning_rate": 7.307389957320276e-07, "loss": 1.0036, "step": 20705 }, { "epoch": 3.8299572570154248, "grad_norm": 0.8508837819099426, "learning_rate": 7.289771378904408e-07, "loss": 0.8033, "step": 20706 }, { "epoch": 3.8301430960787957, "grad_norm": 0.8653962016105652, "learning_rate": 7.272173988184694e-07, "loss": 0.8732, "step": 20707 }, { "epoch": 3.830328935142167, "grad_norm": 0.8358247876167297, "learning_rate": 7.254597785536943e-07, "loss": 0.8679, "step": 20708 }, { "epoch": 3.830514774205538, "grad_norm": 0.983256995677948, "learning_rate": 7.237042771336078e-07, "loss": 1.0119, "step": 20709 }, { "epoch": 3.830700613268909, "grad_norm": 0.9095485806465149, "learning_rate": 7.219508945956909e-07, "loss": 0.7091, "step": 20710 }, { "epoch": 3.83088645233228, "grad_norm": 0.7739748954772949, "learning_rate": 7.201996309773695e-07, "loss": 0.7629, "step": 20711 }, { "epoch": 3.8310722913956514, "grad_norm": 0.9428021907806396, "learning_rate": 7.184504863160135e-07, "loss": 0.7273, "step": 20712 }, { "epoch": 3.8312581304590223, "grad_norm": 0.9342936277389526, "learning_rate": 7.167034606489598e-07, "loss": 0.7602, "step": 20713 }, { "epoch": 3.8314439695223936, "grad_norm": 0.8526061177253723, "learning_rate": 7.149585540134896e-07, "loss": 0.629, "step": 20714 }, { "epoch": 3.831629808585765, "grad_norm": 0.9603124260902405, "learning_rate": 7.132157664468509e-07, "loss": 0.677, "step": 20715 }, { "epoch": 3.831815647649136, "grad_norm": 0.8807042837142944, "learning_rate": 7.114750979862472e-07, "loss": 0.8594, "step": 20716 }, { "epoch": 3.8320014867125067, "grad_norm": 1.1333434581756592, "learning_rate": 7.097365486688157e-07, "loss": 0.8751, "step": 20717 }, { "epoch": 3.832187325775878, "grad_norm": 1.110044240951538, "learning_rate": 7.080001185316599e-07, "loss": 1.0981, "step": 20718 }, { "epoch": 3.8323731648392494, "grad_norm": 0.9268306493759155, "learning_rate": 7.062658076118611e-07, "loss": 0.8715, "step": 20719 }, { "epoch": 3.8325590039026203, "grad_norm": 0.9917341470718384, "learning_rate": 7.045336159464122e-07, "loss": 0.7835, "step": 20720 }, { "epoch": 3.8327448429659916, "grad_norm": 0.9318150281906128, "learning_rate": 7.028035435723058e-07, "loss": 0.7383, "step": 20721 }, { "epoch": 3.8329306820293625, "grad_norm": 0.8739350438117981, "learning_rate": 7.010755905264565e-07, "loss": 0.8592, "step": 20722 }, { "epoch": 3.833116521092734, "grad_norm": 0.9437780380249023, "learning_rate": 6.993497568457352e-07, "loss": 0.9203, "step": 20723 }, { "epoch": 3.8333023601561047, "grad_norm": 0.8102313876152039, "learning_rate": 6.9762604256699e-07, "loss": 0.5722, "step": 20724 }, { "epoch": 3.833488199219476, "grad_norm": 0.8980199098587036, "learning_rate": 6.959044477270138e-07, "loss": 0.7649, "step": 20725 }, { "epoch": 3.833674038282847, "grad_norm": 0.9712707996368408, "learning_rate": 6.941849723625437e-07, "loss": 1.068, "step": 20726 }, { "epoch": 3.8338598773462182, "grad_norm": 1.315290093421936, "learning_rate": 6.924676165102729e-07, "loss": 0.9794, "step": 20727 }, { "epoch": 3.834045716409589, "grad_norm": 0.8967644572257996, "learning_rate": 6.907523802068716e-07, "loss": 0.9972, "step": 20728 }, { "epoch": 3.8342315554729605, "grad_norm": 0.8474511504173279, "learning_rate": 6.890392634889332e-07, "loss": 0.8099, "step": 20729 }, { "epoch": 3.834417394536332, "grad_norm": 0.928673505783081, "learning_rate": 6.873282663930281e-07, "loss": 0.7815, "step": 20730 }, { "epoch": 3.8346032335997027, "grad_norm": 0.9721923470497131, "learning_rate": 6.856193889556717e-07, "loss": 0.6947, "step": 20731 }, { "epoch": 3.8347890726630736, "grad_norm": 1.0192464590072632, "learning_rate": 6.839126312133459e-07, "loss": 0.7105, "step": 20732 }, { "epoch": 3.834974911726445, "grad_norm": 0.8009982705116272, "learning_rate": 6.822079932024661e-07, "loss": 0.7098, "step": 20733 }, { "epoch": 3.835160750789816, "grad_norm": 0.9369624257087708, "learning_rate": 6.805054749594253e-07, "loss": 0.9278, "step": 20734 }, { "epoch": 3.835346589853187, "grad_norm": 0.8212448358535767, "learning_rate": 6.7880507652055e-07, "loss": 0.6242, "step": 20735 }, { "epoch": 3.835532428916558, "grad_norm": 0.9031669497489929, "learning_rate": 6.771067979221335e-07, "loss": 0.7569, "step": 20736 }, { "epoch": 3.8357182679799293, "grad_norm": 1.0682936906814575, "learning_rate": 6.754106392004467e-07, "loss": 0.8424, "step": 20737 }, { "epoch": 3.8359041070433006, "grad_norm": 1.0823919773101807, "learning_rate": 6.737166003916606e-07, "loss": 0.899, "step": 20738 }, { "epoch": 3.8360899461066715, "grad_norm": 1.1261800527572632, "learning_rate": 6.720246815319354e-07, "loss": 0.8051, "step": 20739 }, { "epoch": 3.836275785170043, "grad_norm": 0.9055033326148987, "learning_rate": 6.703348826573974e-07, "loss": 0.8597, "step": 20740 }, { "epoch": 3.8364616242334137, "grad_norm": 0.9246683716773987, "learning_rate": 6.68647203804107e-07, "loss": 0.7764, "step": 20741 }, { "epoch": 3.836647463296785, "grad_norm": 0.8555891513824463, "learning_rate": 6.669616450080796e-07, "loss": 0.7487, "step": 20742 }, { "epoch": 3.836833302360156, "grad_norm": 0.8516334295272827, "learning_rate": 6.652782063052865e-07, "loss": 0.8647, "step": 20743 }, { "epoch": 3.8370191414235273, "grad_norm": 0.9311104416847229, "learning_rate": 6.635968877316767e-07, "loss": 0.6996, "step": 20744 }, { "epoch": 3.8372049804868986, "grad_norm": 1.1723272800445557, "learning_rate": 6.619176893231215e-07, "loss": 0.9617, "step": 20745 }, { "epoch": 3.8373908195502695, "grad_norm": 1.2130335569381714, "learning_rate": 6.602406111154591e-07, "loss": 1.0183, "step": 20746 }, { "epoch": 3.8375766586136404, "grad_norm": 0.856591522693634, "learning_rate": 6.585656531444828e-07, "loss": 0.7832, "step": 20747 }, { "epoch": 3.8377624976770117, "grad_norm": 0.9378266334533691, "learning_rate": 6.56892815445953e-07, "loss": 0.9207, "step": 20748 }, { "epoch": 3.837948336740383, "grad_norm": 1.0121698379516602, "learning_rate": 6.552220980555635e-07, "loss": 0.906, "step": 20749 }, { "epoch": 3.838134175803754, "grad_norm": 1.2572370767593384, "learning_rate": 6.535535010089744e-07, "loss": 0.5664, "step": 20750 }, { "epoch": 3.838320014867125, "grad_norm": 0.9440824389457703, "learning_rate": 6.518870243418018e-07, "loss": 0.7313, "step": 20751 }, { "epoch": 3.838505853930496, "grad_norm": 0.7588942050933838, "learning_rate": 6.502226680896062e-07, "loss": 0.8024, "step": 20752 }, { "epoch": 3.8386916929938675, "grad_norm": 1.0178306102752686, "learning_rate": 6.485604322879257e-07, "loss": 0.8894, "step": 20753 }, { "epoch": 3.8388775320572384, "grad_norm": 0.9385819435119629, "learning_rate": 6.469003169722321e-07, "loss": 0.5797, "step": 20754 }, { "epoch": 3.8390633711206097, "grad_norm": 0.9427735805511475, "learning_rate": 6.452423221779414e-07, "loss": 0.6894, "step": 20755 }, { "epoch": 3.8392492101839806, "grad_norm": 0.9153196215629578, "learning_rate": 6.435864479404696e-07, "loss": 0.6369, "step": 20756 }, { "epoch": 3.839435049247352, "grad_norm": 1.2121299505233765, "learning_rate": 6.419326942951331e-07, "loss": 0.765, "step": 20757 }, { "epoch": 3.839620888310723, "grad_norm": 1.0967673063278198, "learning_rate": 6.40281061277237e-07, "loss": 1.0784, "step": 20758 }, { "epoch": 3.839806727374094, "grad_norm": 0.9232576489448547, "learning_rate": 6.386315489220418e-07, "loss": 0.9331, "step": 20759 }, { "epoch": 3.839992566437465, "grad_norm": 0.9867680072784424, "learning_rate": 6.369841572647306e-07, "loss": 0.7843, "step": 20760 }, { "epoch": 3.8401784055008363, "grad_norm": 0.8611258864402771, "learning_rate": 6.353388863404974e-07, "loss": 0.7441, "step": 20761 }, { "epoch": 3.840364244564207, "grad_norm": 0.837388277053833, "learning_rate": 6.336957361844254e-07, "loss": 0.7768, "step": 20762 }, { "epoch": 3.8405500836275785, "grad_norm": 1.0685689449310303, "learning_rate": 6.320547068315974e-07, "loss": 0.8462, "step": 20763 }, { "epoch": 3.84073592269095, "grad_norm": 1.0068045854568481, "learning_rate": 6.304157983170411e-07, "loss": 0.9256, "step": 20764 }, { "epoch": 3.8409217617543208, "grad_norm": 0.9282239675521851, "learning_rate": 6.287790106757396e-07, "loss": 0.8004, "step": 20765 }, { "epoch": 3.8411076008176916, "grad_norm": 1.6104170083999634, "learning_rate": 6.271443439426205e-07, "loss": 0.8226, "step": 20766 }, { "epoch": 3.841293439881063, "grad_norm": 0.8550835847854614, "learning_rate": 6.255117981525782e-07, "loss": 0.7964, "step": 20767 }, { "epoch": 3.8414792789444343, "grad_norm": 1.3087283372879028, "learning_rate": 6.238813733404514e-07, "loss": 1.0183, "step": 20768 }, { "epoch": 3.841665118007805, "grad_norm": 0.9228026270866394, "learning_rate": 6.222530695410344e-07, "loss": 0.6845, "step": 20769 }, { "epoch": 3.8418509570711765, "grad_norm": 1.0402220487594604, "learning_rate": 6.206268867890997e-07, "loss": 0.8275, "step": 20770 }, { "epoch": 3.8420367961345474, "grad_norm": 0.86312335729599, "learning_rate": 6.190028251193303e-07, "loss": 0.6399, "step": 20771 }, { "epoch": 3.8422226351979187, "grad_norm": 0.9186187386512756, "learning_rate": 6.173808845663987e-07, "loss": 0.6913, "step": 20772 }, { "epoch": 3.8424084742612896, "grad_norm": 0.7808519005775452, "learning_rate": 6.157610651649326e-07, "loss": 0.7809, "step": 20773 }, { "epoch": 3.842594313324661, "grad_norm": 0.8238528966903687, "learning_rate": 6.141433669494934e-07, "loss": 0.6946, "step": 20774 }, { "epoch": 3.842780152388032, "grad_norm": 0.8859274983406067, "learning_rate": 6.12527789954609e-07, "loss": 0.7788, "step": 20775 }, { "epoch": 3.842965991451403, "grad_norm": 0.8922582864761353, "learning_rate": 6.109143342147627e-07, "loss": 0.7511, "step": 20776 }, { "epoch": 3.843151830514774, "grad_norm": 0.8180863261222839, "learning_rate": 6.09302999764394e-07, "loss": 0.8641, "step": 20777 }, { "epoch": 3.8433376695781454, "grad_norm": 0.9965726733207703, "learning_rate": 6.076937866378974e-07, "loss": 0.6953, "step": 20778 }, { "epoch": 3.8435235086415167, "grad_norm": 0.9308102130889893, "learning_rate": 6.060866948696009e-07, "loss": 0.6541, "step": 20779 }, { "epoch": 3.8437093477048876, "grad_norm": 0.9346275925636292, "learning_rate": 6.044817244938328e-07, "loss": 0.9128, "step": 20780 }, { "epoch": 3.8438951867682585, "grad_norm": 0.8471858501434326, "learning_rate": 6.028788755448211e-07, "loss": 1.1086, "step": 20781 }, { "epoch": 3.84408102583163, "grad_norm": 0.9544927477836609, "learning_rate": 6.012781480567831e-07, "loss": 0.9041, "step": 20782 }, { "epoch": 3.844266864895001, "grad_norm": 0.8701556324958801, "learning_rate": 5.996795420638912e-07, "loss": 0.7266, "step": 20783 }, { "epoch": 3.844452703958372, "grad_norm": 1.0673178434371948, "learning_rate": 5.980830576002738e-07, "loss": 0.9692, "step": 20784 }, { "epoch": 3.844638543021743, "grad_norm": 1.1484763622283936, "learning_rate": 5.964886946999815e-07, "loss": 0.9399, "step": 20785 }, { "epoch": 3.8448243820851142, "grad_norm": 0.7817178964614868, "learning_rate": 5.948964533970647e-07, "loss": 0.8361, "step": 20786 }, { "epoch": 3.8450102211484856, "grad_norm": 1.080405354499817, "learning_rate": 5.933063337254852e-07, "loss": 0.7481, "step": 20787 }, { "epoch": 3.8451960602118564, "grad_norm": 1.027951955795288, "learning_rate": 5.917183357191935e-07, "loss": 0.6884, "step": 20788 }, { "epoch": 3.8453818992752278, "grad_norm": 0.9318289756774902, "learning_rate": 5.90132459412096e-07, "loss": 0.8408, "step": 20789 }, { "epoch": 3.8455677383385987, "grad_norm": 0.8515191078186035, "learning_rate": 5.885487048380101e-07, "loss": 0.7485, "step": 20790 }, { "epoch": 3.84575357740197, "grad_norm": 0.814934253692627, "learning_rate": 5.869670720307641e-07, "loss": 0.7777, "step": 20791 }, { "epoch": 3.845939416465341, "grad_norm": 0.7820918560028076, "learning_rate": 5.853875610241088e-07, "loss": 0.6791, "step": 20792 }, { "epoch": 3.846125255528712, "grad_norm": 1.0850684642791748, "learning_rate": 5.838101718517508e-07, "loss": 0.7751, "step": 20793 }, { "epoch": 3.846311094592083, "grad_norm": 0.90702885389328, "learning_rate": 5.822349045473518e-07, "loss": 1.1127, "step": 20794 }, { "epoch": 3.8464969336554544, "grad_norm": 0.9162556529045105, "learning_rate": 5.806617591445518e-07, "loss": 0.8811, "step": 20795 }, { "epoch": 3.8466827727188253, "grad_norm": 1.4018508195877075, "learning_rate": 5.790907356769127e-07, "loss": 1.1379, "step": 20796 }, { "epoch": 3.8468686117821966, "grad_norm": 1.215028166770935, "learning_rate": 5.775218341779742e-07, "loss": 0.9514, "step": 20797 }, { "epoch": 3.847054450845568, "grad_norm": 0.8906833529472351, "learning_rate": 5.759550546812098e-07, "loss": 0.8951, "step": 20798 }, { "epoch": 3.847240289908939, "grad_norm": 0.8343894481658936, "learning_rate": 5.743903972200592e-07, "loss": 0.9802, "step": 20799 }, { "epoch": 3.8474261289723097, "grad_norm": 0.9710038304328918, "learning_rate": 5.728278618279404e-07, "loss": 0.8445, "step": 20800 }, { "epoch": 3.847611968035681, "grad_norm": 0.8098582029342651, "learning_rate": 5.712674485381819e-07, "loss": 0.6027, "step": 20801 }, { "epoch": 3.8477978070990524, "grad_norm": 0.9980126023292542, "learning_rate": 5.697091573840796e-07, "loss": 0.8672, "step": 20802 }, { "epoch": 3.8479836461624233, "grad_norm": 0.9283662438392639, "learning_rate": 5.681529883989178e-07, "loss": 0.6205, "step": 20803 }, { "epoch": 3.8481694852257946, "grad_norm": 1.1998335123062134, "learning_rate": 5.665989416159035e-07, "loss": 0.7646, "step": 20804 }, { "epoch": 3.8483553242891655, "grad_norm": 0.8913629055023193, "learning_rate": 5.650470170681876e-07, "loss": 0.7736, "step": 20805 }, { "epoch": 3.848541163352537, "grad_norm": 0.8077051043510437, "learning_rate": 5.634972147889217e-07, "loss": 0.7823, "step": 20806 }, { "epoch": 3.8487270024159077, "grad_norm": 0.9425461888313293, "learning_rate": 5.61949534811157e-07, "loss": 0.793, "step": 20807 }, { "epoch": 3.848912841479279, "grad_norm": 0.9253484606742859, "learning_rate": 5.604039771679337e-07, "loss": 0.917, "step": 20808 }, { "epoch": 3.84909868054265, "grad_norm": 0.9143958687782288, "learning_rate": 5.588605418922476e-07, "loss": 0.8883, "step": 20809 }, { "epoch": 3.8492845196060212, "grad_norm": 0.9746808409690857, "learning_rate": 5.573192290170393e-07, "loss": 0.9594, "step": 20810 }, { "epoch": 3.849470358669392, "grad_norm": 0.9156635403633118, "learning_rate": 5.557800385751932e-07, "loss": 0.7471, "step": 20811 }, { "epoch": 3.8496561977327635, "grad_norm": 0.9907944202423096, "learning_rate": 5.542429705995833e-07, "loss": 0.9115, "step": 20812 }, { "epoch": 3.849842036796135, "grad_norm": 1.0210583209991455, "learning_rate": 5.527080251229833e-07, "loss": 0.7514, "step": 20813 }, { "epoch": 3.8500278758595057, "grad_norm": 0.9511533975601196, "learning_rate": 5.51175202178178e-07, "loss": 0.8369, "step": 20814 }, { "epoch": 3.8502137149228766, "grad_norm": 0.9177201390266418, "learning_rate": 5.496445017978857e-07, "loss": 0.9094, "step": 20815 }, { "epoch": 3.850399553986248, "grad_norm": 0.7297847867012024, "learning_rate": 5.481159240147582e-07, "loss": 0.5455, "step": 20816 }, { "epoch": 3.850585393049619, "grad_norm": 1.1877726316452026, "learning_rate": 5.465894688614359e-07, "loss": 1.014, "step": 20817 }, { "epoch": 3.85077123211299, "grad_norm": 0.9924846887588501, "learning_rate": 5.450651363704929e-07, "loss": 0.6386, "step": 20818 }, { "epoch": 3.8509570711763614, "grad_norm": 0.8977491855621338, "learning_rate": 5.435429265744585e-07, "loss": 0.749, "step": 20819 }, { "epoch": 3.8511429102397323, "grad_norm": 0.9722229838371277, "learning_rate": 5.420228395058291e-07, "loss": 0.8737, "step": 20820 }, { "epoch": 3.8513287493031036, "grad_norm": 1.1354950666427612, "learning_rate": 5.405048751970454e-07, "loss": 0.8821, "step": 20821 }, { "epoch": 3.8515145883664745, "grad_norm": 0.8959169983863831, "learning_rate": 5.389890336805037e-07, "loss": 1.0039, "step": 20822 }, { "epoch": 3.851700427429846, "grad_norm": 0.8557883501052856, "learning_rate": 5.374753149885558e-07, "loss": 0.7533, "step": 20823 }, { "epoch": 3.8518862664932167, "grad_norm": 1.0070464611053467, "learning_rate": 5.359637191535205e-07, "loss": 0.7772, "step": 20824 }, { "epoch": 3.852072105556588, "grad_norm": 0.9542919397354126, "learning_rate": 5.344542462076496e-07, "loss": 0.6336, "step": 20825 }, { "epoch": 3.852257944619959, "grad_norm": 0.988021731376648, "learning_rate": 5.329468961831619e-07, "loss": 0.9381, "step": 20826 }, { "epoch": 3.8524437836833303, "grad_norm": 0.8486770391464233, "learning_rate": 5.314416691122204e-07, "loss": 0.8043, "step": 20827 }, { "epoch": 3.8526296227467016, "grad_norm": 0.847549557685852, "learning_rate": 5.299385650269773e-07, "loss": 1.0205, "step": 20828 }, { "epoch": 3.8528154618100725, "grad_norm": 0.9774998426437378, "learning_rate": 5.284375839594957e-07, "loss": 0.8089, "step": 20829 }, { "epoch": 3.8530013008734434, "grad_norm": 1.0979493856430054, "learning_rate": 5.269387259418168e-07, "loss": 0.7921, "step": 20830 }, { "epoch": 3.8531871399368147, "grad_norm": 0.8957627415657043, "learning_rate": 5.25441991005915e-07, "loss": 0.801, "step": 20831 }, { "epoch": 3.853372979000186, "grad_norm": 0.8519086241722107, "learning_rate": 5.239473791837534e-07, "loss": 0.6381, "step": 20832 }, { "epoch": 3.853558818063557, "grad_norm": 0.9422652721405029, "learning_rate": 5.224548905072402e-07, "loss": 0.876, "step": 20833 }, { "epoch": 3.853744657126928, "grad_norm": 1.0043606758117676, "learning_rate": 5.20964525008194e-07, "loss": 0.8307, "step": 20834 }, { "epoch": 3.853930496190299, "grad_norm": 0.8691344857215881, "learning_rate": 5.194762827184673e-07, "loss": 0.8215, "step": 20835 }, { "epoch": 3.8541163352536705, "grad_norm": 0.9490212202072144, "learning_rate": 5.179901636697904e-07, "loss": 0.7445, "step": 20836 }, { "epoch": 3.8543021743170414, "grad_norm": 1.0769680738449097, "learning_rate": 5.165061678939042e-07, "loss": 1.0122, "step": 20837 }, { "epoch": 3.8544880133804127, "grad_norm": 0.8654699325561523, "learning_rate": 5.150242954224727e-07, "loss": 0.655, "step": 20838 }, { "epoch": 3.8546738524437836, "grad_norm": 0.9956970810890198, "learning_rate": 5.135445462871258e-07, "loss": 0.8539, "step": 20839 }, { "epoch": 3.854859691507155, "grad_norm": 0.9140504002571106, "learning_rate": 5.120669205194384e-07, "loss": 0.7781, "step": 20840 }, { "epoch": 3.855045530570526, "grad_norm": 0.7998533248901367, "learning_rate": 5.10591418150963e-07, "loss": 0.7059, "step": 20841 }, { "epoch": 3.855231369633897, "grad_norm": 1.070513129234314, "learning_rate": 5.091180392131745e-07, "loss": 0.7954, "step": 20842 }, { "epoch": 3.855417208697268, "grad_norm": 1.021838665008545, "learning_rate": 5.076467837375254e-07, "loss": 0.9054, "step": 20843 }, { "epoch": 3.8556030477606393, "grad_norm": 0.8108730316162109, "learning_rate": 5.061776517554351e-07, "loss": 0.625, "step": 20844 }, { "epoch": 3.85578888682401, "grad_norm": 1.0125364065170288, "learning_rate": 5.04710643298234e-07, "loss": 0.8624, "step": 20845 }, { "epoch": 3.8559747258873815, "grad_norm": 1.2414863109588623, "learning_rate": 5.032457583972416e-07, "loss": 1.0397, "step": 20846 }, { "epoch": 3.856160564950753, "grad_norm": 0.8799111247062683, "learning_rate": 5.017829970837329e-07, "loss": 0.6313, "step": 20847 }, { "epoch": 3.8563464040141238, "grad_norm": 1.1502881050109863, "learning_rate": 5.003223593889161e-07, "loss": 0.801, "step": 20848 }, { "epoch": 3.8565322430774946, "grad_norm": 0.8884891271591187, "learning_rate": 4.988638453439665e-07, "loss": 0.9028, "step": 20849 }, { "epoch": 3.856718082140866, "grad_norm": 1.1250226497650146, "learning_rate": 4.974074549800256e-07, "loss": 0.8561, "step": 20850 }, { "epoch": 3.8569039212042373, "grad_norm": 0.9480788111686707, "learning_rate": 4.959531883281576e-07, "loss": 0.8672, "step": 20851 }, { "epoch": 3.857089760267608, "grad_norm": 0.9684773683547974, "learning_rate": 4.945010454194265e-07, "loss": 0.9812, "step": 20852 }, { "epoch": 3.8572755993309795, "grad_norm": 0.8317880034446716, "learning_rate": 4.930510262848076e-07, "loss": 0.6962, "step": 20853 }, { "epoch": 3.8574614383943504, "grad_norm": 1.09212064743042, "learning_rate": 4.916031309552538e-07, "loss": 0.7375, "step": 20854 }, { "epoch": 3.8576472774577217, "grad_norm": 1.0218623876571655, "learning_rate": 4.901573594616626e-07, "loss": 0.7847, "step": 20855 }, { "epoch": 3.8578331165210926, "grad_norm": 1.1442655324935913, "learning_rate": 4.887137118348983e-07, "loss": 0.7224, "step": 20856 }, { "epoch": 3.858018955584464, "grad_norm": 0.874628484249115, "learning_rate": 4.872721881057806e-07, "loss": 0.7401, "step": 20857 }, { "epoch": 3.858204794647835, "grad_norm": 0.837985634803772, "learning_rate": 4.858327883050628e-07, "loss": 0.656, "step": 20858 }, { "epoch": 3.858390633711206, "grad_norm": 0.870708703994751, "learning_rate": 4.843955124634647e-07, "loss": 0.8925, "step": 20859 }, { "epoch": 3.858576472774577, "grad_norm": 1.1414045095443726, "learning_rate": 4.829603606116728e-07, "loss": 0.6571, "step": 20860 }, { "epoch": 3.8587623118379484, "grad_norm": 1.0180896520614624, "learning_rate": 4.815273327803182e-07, "loss": 0.8944, "step": 20861 }, { "epoch": 3.8589481509013197, "grad_norm": 1.1275979280471802, "learning_rate": 4.800964289999654e-07, "loss": 1.0819, "step": 20862 }, { "epoch": 3.8591339899646906, "grad_norm": 0.9097642302513123, "learning_rate": 4.786676493011899e-07, "loss": 0.7079, "step": 20863 }, { "epoch": 3.8593198290280615, "grad_norm": 1.1204291582107544, "learning_rate": 4.772409937144673e-07, "loss": 0.9284, "step": 20864 }, { "epoch": 3.859505668091433, "grad_norm": 1.0713176727294922, "learning_rate": 4.758164622702399e-07, "loss": 1.0045, "step": 20865 }, { "epoch": 3.859691507154804, "grad_norm": 0.8504579663276672, "learning_rate": 4.7439405499891674e-07, "loss": 0.6306, "step": 20866 }, { "epoch": 3.859877346218175, "grad_norm": 1.0859054327011108, "learning_rate": 4.729737719308625e-07, "loss": 0.9759, "step": 20867 }, { "epoch": 3.860063185281546, "grad_norm": 0.9498307108879089, "learning_rate": 4.7155561309639716e-07, "loss": 0.7607, "step": 20868 }, { "epoch": 3.8602490243449172, "grad_norm": 0.882290780544281, "learning_rate": 4.7013957852577453e-07, "loss": 0.814, "step": 20869 }, { "epoch": 3.8604348634082886, "grad_norm": 0.8619256615638733, "learning_rate": 4.6872566824922584e-07, "loss": 0.8288, "step": 20870 }, { "epoch": 3.8606207024716594, "grad_norm": 0.9371104836463928, "learning_rate": 4.673138822969159e-07, "loss": 0.7566, "step": 20871 }, { "epoch": 3.860806541535031, "grad_norm": 0.9974023103713989, "learning_rate": 4.659042206989872e-07, "loss": 0.9281, "step": 20872 }, { "epoch": 3.8609923805984017, "grad_norm": 0.9402217268943787, "learning_rate": 4.6449668348553797e-07, "loss": 0.779, "step": 20873 }, { "epoch": 3.861178219661773, "grad_norm": 0.9545169472694397, "learning_rate": 4.6309127068657754e-07, "loss": 1.095, "step": 20874 }, { "epoch": 3.861364058725144, "grad_norm": 0.9999849200248718, "learning_rate": 4.616879823321374e-07, "loss": 0.8332, "step": 20875 }, { "epoch": 3.861549897788515, "grad_norm": 0.811683177947998, "learning_rate": 4.6028681845214915e-07, "loss": 0.7105, "step": 20876 }, { "epoch": 3.8617357368518865, "grad_norm": 0.9631338119506836, "learning_rate": 4.5888777907652227e-07, "loss": 0.9943, "step": 20877 }, { "epoch": 3.8619215759152574, "grad_norm": 0.9109195470809937, "learning_rate": 4.574908642351106e-07, "loss": 0.893, "step": 20878 }, { "epoch": 3.8621074149786283, "grad_norm": 1.31691575050354, "learning_rate": 4.5609607395773466e-07, "loss": 1.0398, "step": 20879 }, { "epoch": 3.8622932540419996, "grad_norm": 0.8035569787025452, "learning_rate": 4.547034082741708e-07, "loss": 0.493, "step": 20880 }, { "epoch": 3.862479093105371, "grad_norm": 0.8431886434555054, "learning_rate": 4.5331286721413956e-07, "loss": 1.0258, "step": 20881 }, { "epoch": 3.862664932168742, "grad_norm": 0.9853758215904236, "learning_rate": 4.519244508073062e-07, "loss": 0.9369, "step": 20882 }, { "epoch": 3.8628507712321127, "grad_norm": 1.0060194730758667, "learning_rate": 4.5053815908331353e-07, "loss": 0.9941, "step": 20883 }, { "epoch": 3.863036610295484, "grad_norm": 1.0062130689620972, "learning_rate": 4.4915399207176026e-07, "loss": 0.8644, "step": 20884 }, { "epoch": 3.8632224493588554, "grad_norm": 0.9900531768798828, "learning_rate": 4.477719498021782e-07, "loss": 0.9401, "step": 20885 }, { "epoch": 3.8634082884222263, "grad_norm": 0.909517228603363, "learning_rate": 4.463920323040549e-07, "loss": 0.6103, "step": 20886 }, { "epoch": 3.8635941274855976, "grad_norm": 0.970980703830719, "learning_rate": 4.450142396068557e-07, "loss": 1.0352, "step": 20887 }, { "epoch": 3.8637799665489685, "grad_norm": 0.9788309931755066, "learning_rate": 4.4363857173999045e-07, "loss": 0.8198, "step": 20888 }, { "epoch": 3.86396580561234, "grad_norm": 0.9254130125045776, "learning_rate": 4.422650287328134e-07, "loss": 1.0075, "step": 20889 }, { "epoch": 3.8641516446757107, "grad_norm": 0.8469333648681641, "learning_rate": 4.408936106146344e-07, "loss": 0.8859, "step": 20890 }, { "epoch": 3.864337483739082, "grad_norm": 1.055687427520752, "learning_rate": 4.395243174147301e-07, "loss": 0.8075, "step": 20891 }, { "epoch": 3.864523322802453, "grad_norm": 1.0496764183044434, "learning_rate": 4.3815714916233263e-07, "loss": 0.8729, "step": 20892 }, { "epoch": 3.8647091618658242, "grad_norm": 1.0249707698822021, "learning_rate": 4.3679210588661866e-07, "loss": 1.014, "step": 20893 }, { "epoch": 3.864895000929195, "grad_norm": 1.0160168409347534, "learning_rate": 4.3542918761672044e-07, "loss": 0.6459, "step": 20894 }, { "epoch": 3.8650808399925665, "grad_norm": 0.9592434763908386, "learning_rate": 4.340683943817148e-07, "loss": 1.0442, "step": 20895 }, { "epoch": 3.865266679055938, "grad_norm": 0.7899896502494812, "learning_rate": 4.327097262106672e-07, "loss": 0.7655, "step": 20896 }, { "epoch": 3.8654525181193087, "grad_norm": 1.0063457489013672, "learning_rate": 4.313531831325657e-07, "loss": 0.5508, "step": 20897 }, { "epoch": 3.8656383571826796, "grad_norm": 0.874465823173523, "learning_rate": 4.2999876517636484e-07, "loss": 0.762, "step": 20898 }, { "epoch": 3.865824196246051, "grad_norm": 1.07059645652771, "learning_rate": 4.286464723709638e-07, "loss": 0.7365, "step": 20899 }, { "epoch": 3.866010035309422, "grad_norm": 0.804924488067627, "learning_rate": 4.272963047452394e-07, "loss": 0.7519, "step": 20900 }, { "epoch": 3.866195874372793, "grad_norm": 1.277814507484436, "learning_rate": 4.25948262328002e-07, "loss": 0.5872, "step": 20901 }, { "epoch": 3.8663817134361644, "grad_norm": 0.8175092339515686, "learning_rate": 4.2460234514801743e-07, "loss": 0.6297, "step": 20902 }, { "epoch": 3.8665675524995353, "grad_norm": 0.9114618301391602, "learning_rate": 4.232585532340183e-07, "loss": 0.9771, "step": 20903 }, { "epoch": 3.8667533915629066, "grad_norm": 0.8819079399108887, "learning_rate": 4.2191688661469275e-07, "loss": 0.7006, "step": 20904 }, { "epoch": 3.8669392306262775, "grad_norm": 0.9290643930435181, "learning_rate": 4.2057734531866235e-07, "loss": 0.7299, "step": 20905 }, { "epoch": 3.867125069689649, "grad_norm": 0.9308085441589355, "learning_rate": 4.192399293745264e-07, "loss": 0.9075, "step": 20906 }, { "epoch": 3.8673109087530197, "grad_norm": 0.9288163781166077, "learning_rate": 4.179046388108177e-07, "loss": 0.9751, "step": 20907 }, { "epoch": 3.867496747816391, "grad_norm": 0.9171245098114014, "learning_rate": 4.1657147365605775e-07, "loss": 0.7872, "step": 20908 }, { "epoch": 3.867682586879762, "grad_norm": 0.7444671392440796, "learning_rate": 4.152404339386795e-07, "loss": 0.6115, "step": 20909 }, { "epoch": 3.8678684259431333, "grad_norm": 1.1080163717269897, "learning_rate": 4.1391151968710464e-07, "loss": 0.8745, "step": 20910 }, { "epoch": 3.8680542650065046, "grad_norm": 1.0134642124176025, "learning_rate": 4.1258473092967706e-07, "loss": 0.9199, "step": 20911 }, { "epoch": 3.8682401040698755, "grad_norm": 1.1440978050231934, "learning_rate": 4.11260067694752e-07, "loss": 0.7557, "step": 20912 }, { "epoch": 3.8684259431332464, "grad_norm": 0.8744987845420837, "learning_rate": 4.0993753001056236e-07, "loss": 0.8767, "step": 20913 }, { "epoch": 3.8686117821966177, "grad_norm": 1.6089298725128174, "learning_rate": 4.086171179053633e-07, "loss": 1.2797, "step": 20914 }, { "epoch": 3.868797621259989, "grad_norm": 0.9238671064376831, "learning_rate": 4.0729883140732115e-07, "loss": 0.8659, "step": 20915 }, { "epoch": 3.86898346032336, "grad_norm": 0.9194700717926025, "learning_rate": 4.059826705445802e-07, "loss": 0.7047, "step": 20916 }, { "epoch": 3.869169299386731, "grad_norm": 0.7992855906486511, "learning_rate": 4.0466863534522893e-07, "loss": 0.8166, "step": 20917 }, { "epoch": 3.869355138450102, "grad_norm": 1.237243890762329, "learning_rate": 4.0335672583731167e-07, "loss": 0.8881, "step": 20918 }, { "epoch": 3.8695409775134735, "grad_norm": 0.99635249376297, "learning_rate": 4.020469420488282e-07, "loss": 0.6967, "step": 20919 }, { "epoch": 3.8697268165768444, "grad_norm": 1.0364038944244385, "learning_rate": 4.0073928400773396e-07, "loss": 0.7512, "step": 20920 }, { "epoch": 3.8699126556402157, "grad_norm": 0.8522542119026184, "learning_rate": 3.9943375174195107e-07, "loss": 0.8132, "step": 20921 }, { "epoch": 3.8700984947035866, "grad_norm": 0.8687629699707031, "learning_rate": 3.981303452793239e-07, "loss": 0.98, "step": 20922 }, { "epoch": 3.870284333766958, "grad_norm": 0.8941859006881714, "learning_rate": 3.968290646476858e-07, "loss": 0.6673, "step": 20923 }, { "epoch": 3.870470172830329, "grad_norm": 0.852587103843689, "learning_rate": 3.9552990987480333e-07, "loss": 0.8243, "step": 20924 }, { "epoch": 3.8706560118937, "grad_norm": 1.0379388332366943, "learning_rate": 3.942328809884099e-07, "loss": 0.8923, "step": 20925 }, { "epoch": 3.8708418509570715, "grad_norm": 0.9077103734016418, "learning_rate": 3.929379780161724e-07, "loss": 0.7595, "step": 20926 }, { "epoch": 3.8710276900204423, "grad_norm": 0.8946455717086792, "learning_rate": 3.916452009857574e-07, "loss": 0.7433, "step": 20927 }, { "epoch": 3.871213529083813, "grad_norm": 1.0783296823501587, "learning_rate": 3.9035454992473184e-07, "loss": 0.8262, "step": 20928 }, { "epoch": 3.8713993681471845, "grad_norm": 1.0029511451721191, "learning_rate": 3.8906602486066256e-07, "loss": 0.9491, "step": 20929 }, { "epoch": 3.871585207210556, "grad_norm": 1.003530740737915, "learning_rate": 3.8777962582102757e-07, "loss": 0.7677, "step": 20930 }, { "epoch": 3.8717710462739268, "grad_norm": 0.9285503625869751, "learning_rate": 3.8649535283329376e-07, "loss": 0.9863, "step": 20931 }, { "epoch": 3.8719568853372976, "grad_norm": 0.8985755443572998, "learning_rate": 3.852132059248836e-07, "loss": 0.6707, "step": 20932 }, { "epoch": 3.872142724400669, "grad_norm": 0.9956530332565308, "learning_rate": 3.83933185123142e-07, "loss": 0.8795, "step": 20933 }, { "epoch": 3.8723285634640403, "grad_norm": 0.8931170701980591, "learning_rate": 3.826552904554026e-07, "loss": 0.9117, "step": 20934 }, { "epoch": 3.872514402527411, "grad_norm": 0.9893044829368591, "learning_rate": 3.8137952194893244e-07, "loss": 1.0049, "step": 20935 }, { "epoch": 3.8727002415907825, "grad_norm": 0.9650073051452637, "learning_rate": 3.8010587963097646e-07, "loss": 0.8627, "step": 20936 }, { "epoch": 3.8728860806541534, "grad_norm": 0.8656110167503357, "learning_rate": 3.788343635286906e-07, "loss": 0.6881, "step": 20937 }, { "epoch": 3.8730719197175247, "grad_norm": 1.0848356485366821, "learning_rate": 3.7756497366923103e-07, "loss": 0.9858, "step": 20938 }, { "epoch": 3.8732577587808956, "grad_norm": 0.9150021076202393, "learning_rate": 3.762977100796983e-07, "loss": 0.7277, "step": 20939 }, { "epoch": 3.873443597844267, "grad_norm": 1.00950026512146, "learning_rate": 3.7503257278712623e-07, "loss": 0.857, "step": 20940 }, { "epoch": 3.873629436907638, "grad_norm": 1.1124643087387085, "learning_rate": 3.737695618185155e-07, "loss": 0.7612, "step": 20941 }, { "epoch": 3.873815275971009, "grad_norm": 0.854637086391449, "learning_rate": 3.7250867720082236e-07, "loss": 0.7645, "step": 20942 }, { "epoch": 3.87400111503438, "grad_norm": 0.7971043586730957, "learning_rate": 3.712499189609808e-07, "loss": 0.7094, "step": 20943 }, { "epoch": 3.8741869540977514, "grad_norm": 0.8815410137176514, "learning_rate": 3.6999328712582495e-07, "loss": 0.9448, "step": 20944 }, { "epoch": 3.8743727931611227, "grad_norm": 1.0783112049102783, "learning_rate": 3.687387817221999e-07, "loss": 0.8899, "step": 20945 }, { "epoch": 3.8745586322244936, "grad_norm": 0.8870981931686401, "learning_rate": 3.674864027768621e-07, "loss": 0.5712, "step": 20946 }, { "epoch": 3.8747444712878645, "grad_norm": 0.9387726187705994, "learning_rate": 3.662361503165679e-07, "loss": 0.8228, "step": 20947 }, { "epoch": 3.874930310351236, "grad_norm": 1.0043076276779175, "learning_rate": 3.6498802436797374e-07, "loss": 0.6124, "step": 20948 }, { "epoch": 3.875116149414607, "grad_norm": 1.0434253215789795, "learning_rate": 3.6374202495772505e-07, "loss": 0.8393, "step": 20949 }, { "epoch": 3.875301988477978, "grad_norm": 0.9208751916885376, "learning_rate": 3.6249815211242265e-07, "loss": 0.9704, "step": 20950 }, { "epoch": 3.8754878275413494, "grad_norm": 0.7848434448242188, "learning_rate": 3.612564058586121e-07, "loss": 0.6018, "step": 20951 }, { "epoch": 3.8756736666047202, "grad_norm": 0.9232147932052612, "learning_rate": 3.6001678622280546e-07, "loss": 0.9302, "step": 20952 }, { "epoch": 3.8758595056680916, "grad_norm": 0.7875188589096069, "learning_rate": 3.587792932314371e-07, "loss": 0.6236, "step": 20953 }, { "epoch": 3.8760453447314624, "grad_norm": 0.9610685110092163, "learning_rate": 3.5754392691094154e-07, "loss": 0.9156, "step": 20954 }, { "epoch": 3.876231183794834, "grad_norm": 1.028588056564331, "learning_rate": 3.5631068728767536e-07, "loss": 1.0818, "step": 20955 }, { "epoch": 3.8764170228582047, "grad_norm": 0.9451630711555481, "learning_rate": 3.5507957438797314e-07, "loss": 0.9733, "step": 20956 }, { "epoch": 3.876602861921576, "grad_norm": 1.1087616682052612, "learning_rate": 3.5385058823809156e-07, "loss": 0.7876, "step": 20957 }, { "epoch": 3.876788700984947, "grad_norm": 0.74570232629776, "learning_rate": 3.526237288642653e-07, "loss": 0.5052, "step": 20958 }, { "epoch": 3.876974540048318, "grad_norm": 1.2114689350128174, "learning_rate": 3.5139899629268445e-07, "loss": 0.929, "step": 20959 }, { "epoch": 3.8771603791116895, "grad_norm": 0.9840768575668335, "learning_rate": 3.501763905494948e-07, "loss": 0.7526, "step": 20960 }, { "epoch": 3.8773462181750604, "grad_norm": 0.9244621992111206, "learning_rate": 3.4895591166077544e-07, "loss": 0.8511, "step": 20961 }, { "epoch": 3.8775320572384313, "grad_norm": 0.8813620209693909, "learning_rate": 3.4773755965258335e-07, "loss": 0.8786, "step": 20962 }, { "epoch": 3.8777178963018026, "grad_norm": 1.096408486366272, "learning_rate": 3.4652133455093106e-07, "loss": 0.773, "step": 20963 }, { "epoch": 3.877903735365174, "grad_norm": 1.1385411024093628, "learning_rate": 3.4530723638177553e-07, "loss": 1.0432, "step": 20964 }, { "epoch": 3.878089574428545, "grad_norm": 0.8550912141799927, "learning_rate": 3.440952651710072e-07, "loss": 0.8016, "step": 20965 }, { "epoch": 3.8782754134919157, "grad_norm": 0.8417757749557495, "learning_rate": 3.4288542094451646e-07, "loss": 0.5985, "step": 20966 }, { "epoch": 3.878461252555287, "grad_norm": 1.0538462400436401, "learning_rate": 3.4167770372811604e-07, "loss": 0.9619, "step": 20967 }, { "epoch": 3.8786470916186584, "grad_norm": 1.0869489908218384, "learning_rate": 3.404721135475852e-07, "loss": 0.7688, "step": 20968 }, { "epoch": 3.8788329306820293, "grad_norm": 2.1352310180664062, "learning_rate": 3.392686504286591e-07, "loss": 1.2084, "step": 20969 }, { "epoch": 3.8790187697454006, "grad_norm": 1.1426483392715454, "learning_rate": 3.380673143970059e-07, "loss": 0.8039, "step": 20970 }, { "epoch": 3.8792046088087715, "grad_norm": 0.8323079943656921, "learning_rate": 3.368681054782941e-07, "loss": 0.7903, "step": 20971 }, { "epoch": 3.879390447872143, "grad_norm": 0.9386733770370483, "learning_rate": 3.35671023698092e-07, "loss": 0.9971, "step": 20972 }, { "epoch": 3.8795762869355137, "grad_norm": 1.0048978328704834, "learning_rate": 3.3447606908196817e-07, "loss": 0.9458, "step": 20973 }, { "epoch": 3.879762125998885, "grad_norm": 0.8610646724700928, "learning_rate": 3.3328324165541327e-07, "loss": 0.7504, "step": 20974 }, { "epoch": 3.879947965062256, "grad_norm": 0.9040335416793823, "learning_rate": 3.320925414438958e-07, "loss": 0.7795, "step": 20975 }, { "epoch": 3.8801338041256273, "grad_norm": 1.0290958881378174, "learning_rate": 3.309039684728288e-07, "loss": 0.695, "step": 20976 }, { "epoch": 3.880319643188998, "grad_norm": 1.0693012475967407, "learning_rate": 3.297175227675808e-07, "loss": 0.7905, "step": 20977 }, { "epoch": 3.8805054822523695, "grad_norm": 1.0066351890563965, "learning_rate": 3.28533204353465e-07, "loss": 0.7107, "step": 20978 }, { "epoch": 3.880691321315741, "grad_norm": 1.0007990598678589, "learning_rate": 3.2735101325577224e-07, "loss": 0.5716, "step": 20979 }, { "epoch": 3.8808771603791117, "grad_norm": 0.7360565066337585, "learning_rate": 3.2617094949971563e-07, "loss": 0.5082, "step": 20980 }, { "epoch": 3.8810629994424826, "grad_norm": 1.1390726566314697, "learning_rate": 3.2499301311050834e-07, "loss": 0.9744, "step": 20981 }, { "epoch": 3.881248838505854, "grad_norm": 0.8924768567085266, "learning_rate": 3.2381720411325256e-07, "loss": 0.7307, "step": 20982 }, { "epoch": 3.8814346775692252, "grad_norm": 0.9569699764251709, "learning_rate": 3.2264352253308373e-07, "loss": 0.8021, "step": 20983 }, { "epoch": 3.881620516632596, "grad_norm": 0.9881690740585327, "learning_rate": 3.2147196839503734e-07, "loss": 0.7866, "step": 20984 }, { "epoch": 3.8818063556959674, "grad_norm": 0.9270148873329163, "learning_rate": 3.203025417241157e-07, "loss": 0.8255, "step": 20985 }, { "epoch": 3.8819921947593383, "grad_norm": 0.8654059767723083, "learning_rate": 3.1913524254526537e-07, "loss": 0.9177, "step": 20986 }, { "epoch": 3.8821780338227097, "grad_norm": 1.0542529821395874, "learning_rate": 3.179700708834332e-07, "loss": 0.8161, "step": 20987 }, { "epoch": 3.8823638728860805, "grad_norm": 1.0827463865280151, "learning_rate": 3.168070267634549e-07, "loss": 0.8735, "step": 20988 }, { "epoch": 3.882549711949452, "grad_norm": 0.8836554884910583, "learning_rate": 3.156461102101771e-07, "loss": 0.6402, "step": 20989 }, { "epoch": 3.8827355510128228, "grad_norm": 0.9729933142662048, "learning_rate": 3.1448732124835787e-07, "loss": 0.757, "step": 20990 }, { "epoch": 3.882921390076194, "grad_norm": 0.919170081615448, "learning_rate": 3.133306599027441e-07, "loss": 0.6836, "step": 20991 }, { "epoch": 3.883107229139565, "grad_norm": 0.8769204020500183, "learning_rate": 3.1217612619802715e-07, "loss": 0.6689, "step": 20992 }, { "epoch": 3.8832930682029363, "grad_norm": 0.7491322159767151, "learning_rate": 3.110237201588206e-07, "loss": 0.6114, "step": 20993 }, { "epoch": 3.8834789072663076, "grad_norm": 0.9114632606506348, "learning_rate": 3.098734418097493e-07, "loss": 0.9487, "step": 20994 }, { "epoch": 3.8836647463296785, "grad_norm": 0.8882074356079102, "learning_rate": 3.087252911753602e-07, "loss": 0.8461, "step": 20995 }, { "epoch": 3.8838505853930494, "grad_norm": 1.2083011865615845, "learning_rate": 3.075792682801448e-07, "loss": 0.9698, "step": 20996 }, { "epoch": 3.8840364244564207, "grad_norm": 0.9191475510597229, "learning_rate": 3.0643537314858364e-07, "loss": 0.8748, "step": 20997 }, { "epoch": 3.884222263519792, "grad_norm": 1.1168391704559326, "learning_rate": 3.0529360580505707e-07, "loss": 0.9781, "step": 20998 }, { "epoch": 3.884408102583163, "grad_norm": 0.9397374987602234, "learning_rate": 3.0415396627396785e-07, "loss": 0.7974, "step": 20999 }, { "epoch": 3.8845939416465343, "grad_norm": 0.8357664346694946, "learning_rate": 3.030164545796299e-07, "loss": 0.7544, "step": 21000 }, { "epoch": 3.884779780709905, "grad_norm": 0.920737087726593, "learning_rate": 3.0188107074632376e-07, "loss": 0.9297, "step": 21001 }, { "epoch": 3.8849656197732765, "grad_norm": 0.9226137399673462, "learning_rate": 3.0074781479826344e-07, "loss": 0.9672, "step": 21002 }, { "epoch": 3.8851514588366474, "grad_norm": 0.9706295132637024, "learning_rate": 2.9961668675965173e-07, "loss": 0.9407, "step": 21003 }, { "epoch": 3.8853372979000187, "grad_norm": 1.1339836120605469, "learning_rate": 2.98487686654636e-07, "loss": 0.8755, "step": 21004 }, { "epoch": 3.8855231369633896, "grad_norm": 0.961578369140625, "learning_rate": 2.9736081450730815e-07, "loss": 0.659, "step": 21005 }, { "epoch": 3.885708976026761, "grad_norm": 0.9136162996292114, "learning_rate": 2.962360703417044e-07, "loss": 0.8642, "step": 21006 }, { "epoch": 3.885894815090132, "grad_norm": 0.9740614295005798, "learning_rate": 2.951134541818501e-07, "loss": 0.8162, "step": 21007 }, { "epoch": 3.886080654153503, "grad_norm": 1.0303674936294556, "learning_rate": 2.939929660517038e-07, "loss": 1.0471, "step": 21008 }, { "epoch": 3.8862664932168745, "grad_norm": 1.094256043434143, "learning_rate": 2.928746059751686e-07, "loss": 0.6487, "step": 21009 }, { "epoch": 3.8864523322802453, "grad_norm": 0.8360663056373596, "learning_rate": 2.9175837397612536e-07, "loss": 0.6802, "step": 21010 }, { "epoch": 3.886638171343616, "grad_norm": 0.9560714960098267, "learning_rate": 2.9064427007838844e-07, "loss": 0.7438, "step": 21011 }, { "epoch": 3.8868240104069876, "grad_norm": 1.2549717426300049, "learning_rate": 2.895322943057499e-07, "loss": 0.7281, "step": 21012 }, { "epoch": 3.887009849470359, "grad_norm": 0.9083113670349121, "learning_rate": 2.8842244668192406e-07, "loss": 0.5523, "step": 21013 }, { "epoch": 3.8871956885337298, "grad_norm": 1.0238784551620483, "learning_rate": 2.8731472723061424e-07, "loss": 0.8316, "step": 21014 }, { "epoch": 3.8873815275971006, "grad_norm": 0.8489323854446411, "learning_rate": 2.8620913597545707e-07, "loss": 0.7387, "step": 21015 }, { "epoch": 3.887567366660472, "grad_norm": 0.9123976230621338, "learning_rate": 2.8510567294005587e-07, "loss": 0.8579, "step": 21016 }, { "epoch": 3.8877532057238433, "grad_norm": 1.016202688217163, "learning_rate": 2.8400433814794734e-07, "loss": 0.848, "step": 21017 }, { "epoch": 3.887939044787214, "grad_norm": 0.9767157435417175, "learning_rate": 2.829051316226461e-07, "loss": 0.9816, "step": 21018 }, { "epoch": 3.8881248838505855, "grad_norm": 0.9810354113578796, "learning_rate": 2.8180805338762217e-07, "loss": 0.7172, "step": 21019 }, { "epoch": 3.8883107229139564, "grad_norm": 0.8121437430381775, "learning_rate": 2.807131034662791e-07, "loss": 0.7956, "step": 21020 }, { "epoch": 3.8884965619773277, "grad_norm": 1.0200241804122925, "learning_rate": 2.7962028188198706e-07, "loss": 0.6751, "step": 21021 }, { "epoch": 3.8886824010406986, "grad_norm": 1.0330476760864258, "learning_rate": 2.785295886580719e-07, "loss": 0.9685, "step": 21022 }, { "epoch": 3.88886824010407, "grad_norm": 0.8258622288703918, "learning_rate": 2.774410238178149e-07, "loss": 0.682, "step": 21023 }, { "epoch": 3.889054079167441, "grad_norm": 1.0162584781646729, "learning_rate": 2.763545873844531e-07, "loss": 0.7645, "step": 21024 }, { "epoch": 3.889239918230812, "grad_norm": 0.9385042786598206, "learning_rate": 2.752702793811679e-07, "loss": 0.8204, "step": 21025 }, { "epoch": 3.889425757294183, "grad_norm": 0.9334362745285034, "learning_rate": 2.741880998310964e-07, "loss": 0.8146, "step": 21026 }, { "epoch": 3.8896115963575544, "grad_norm": 0.9243485331535339, "learning_rate": 2.731080487573534e-07, "loss": 0.8734, "step": 21027 }, { "epoch": 3.8897974354209257, "grad_norm": 0.8317142724990845, "learning_rate": 2.7203012618297607e-07, "loss": 0.453, "step": 21028 }, { "epoch": 3.8899832744842966, "grad_norm": 0.8083961009979248, "learning_rate": 2.709543321309793e-07, "loss": 0.51, "step": 21029 }, { "epoch": 3.8901691135476675, "grad_norm": 0.8945092558860779, "learning_rate": 2.6988066662432253e-07, "loss": 0.7041, "step": 21030 }, { "epoch": 3.890354952611039, "grad_norm": 0.7138225436210632, "learning_rate": 2.6880912968590965e-07, "loss": 0.6267, "step": 21031 }, { "epoch": 3.89054079167441, "grad_norm": 0.9707046151161194, "learning_rate": 2.6773972133862237e-07, "loss": 0.8071, "step": 21032 }, { "epoch": 3.890726630737781, "grad_norm": 1.011085867881775, "learning_rate": 2.6667244160528684e-07, "loss": 0.8037, "step": 21033 }, { "epoch": 3.8909124698011524, "grad_norm": 0.9872880578041077, "learning_rate": 2.656072905086848e-07, "loss": 0.9047, "step": 21034 }, { "epoch": 3.8910983088645232, "grad_norm": 1.0062687397003174, "learning_rate": 2.6454426807153154e-07, "loss": 0.8721, "step": 21035 }, { "epoch": 3.8912841479278946, "grad_norm": 0.86174476146698, "learning_rate": 2.634833743165421e-07, "loss": 0.7465, "step": 21036 }, { "epoch": 3.8914699869912655, "grad_norm": 0.9085232019424438, "learning_rate": 2.624246092663318e-07, "loss": 0.7574, "step": 21037 }, { "epoch": 3.891655826054637, "grad_norm": 0.9221695065498352, "learning_rate": 2.613679729435159e-07, "loss": 0.7558, "step": 21038 }, { "epoch": 3.8918416651180077, "grad_norm": 0.7637650370597839, "learning_rate": 2.6031346537064293e-07, "loss": 0.7128, "step": 21039 }, { "epoch": 3.892027504181379, "grad_norm": 0.9544199705123901, "learning_rate": 2.592610865702172e-07, "loss": 0.847, "step": 21040 }, { "epoch": 3.89221334324475, "grad_norm": 1.0526127815246582, "learning_rate": 2.5821083656470956e-07, "loss": 0.8271, "step": 21041 }, { "epoch": 3.892399182308121, "grad_norm": 0.9636459946632385, "learning_rate": 2.5716271537652437e-07, "loss": 0.7335, "step": 21042 }, { "epoch": 3.8925850213714925, "grad_norm": 0.8651697635650635, "learning_rate": 2.5611672302803257e-07, "loss": 0.8061, "step": 21043 }, { "epoch": 3.8927708604348634, "grad_norm": 0.9114117622375488, "learning_rate": 2.550728595415608e-07, "loss": 0.8719, "step": 21044 }, { "epoch": 3.8929566994982343, "grad_norm": 0.9638585448265076, "learning_rate": 2.540311249393912e-07, "loss": 0.8053, "step": 21045 }, { "epoch": 3.8931425385616056, "grad_norm": 0.9569137692451477, "learning_rate": 2.529915192437504e-07, "loss": 0.6136, "step": 21046 }, { "epoch": 3.893328377624977, "grad_norm": 0.9131402373313904, "learning_rate": 2.5195404247684294e-07, "loss": 0.7985, "step": 21047 }, { "epoch": 3.893514216688348, "grad_norm": 0.7240068316459656, "learning_rate": 2.5091869466079555e-07, "loss": 0.5631, "step": 21048 }, { "epoch": 3.8937000557517187, "grad_norm": 1.0669825077056885, "learning_rate": 2.4988547581772383e-07, "loss": 0.8824, "step": 21049 }, { "epoch": 3.89388589481509, "grad_norm": 0.9768051505088806, "learning_rate": 2.488543859696546e-07, "loss": 0.7538, "step": 21050 }, { "epoch": 3.8940717338784614, "grad_norm": 0.9522945284843445, "learning_rate": 2.478254251386147e-07, "loss": 0.777, "step": 21051 }, { "epoch": 3.8942575729418323, "grad_norm": 0.8581794500350952, "learning_rate": 2.467985933465644e-07, "loss": 0.8562, "step": 21052 }, { "epoch": 3.8944434120052036, "grad_norm": 1.0297988653182983, "learning_rate": 2.457738906153972e-07, "loss": 0.8718, "step": 21053 }, { "epoch": 3.8946292510685745, "grad_norm": 1.201256275177002, "learning_rate": 2.4475131696701794e-07, "loss": 0.8034, "step": 21054 }, { "epoch": 3.894815090131946, "grad_norm": 0.9601525664329529, "learning_rate": 2.4373087242323125e-07, "loss": 0.5696, "step": 21055 }, { "epoch": 3.8950009291953167, "grad_norm": 0.7941562533378601, "learning_rate": 2.4271255700581974e-07, "loss": 0.7143, "step": 21056 }, { "epoch": 3.895186768258688, "grad_norm": 0.9030526876449585, "learning_rate": 2.416963707365105e-07, "loss": 0.909, "step": 21057 }, { "epoch": 3.8953726073220594, "grad_norm": 0.8962125778198242, "learning_rate": 2.4068231363700846e-07, "loss": 0.8633, "step": 21058 }, { "epoch": 3.8955584463854303, "grad_norm": 0.8150654435157776, "learning_rate": 2.3967038572895173e-07, "loss": 0.6983, "step": 21059 }, { "epoch": 3.895744285448801, "grad_norm": 1.003766417503357, "learning_rate": 2.386605870339342e-07, "loss": 0.7517, "step": 21060 }, { "epoch": 3.8959301245121725, "grad_norm": 0.8307972550392151, "learning_rate": 2.376529175735054e-07, "loss": 0.8183, "step": 21061 }, { "epoch": 3.896115963575544, "grad_norm": 0.9385664463043213, "learning_rate": 2.366473773691702e-07, "loss": 0.8329, "step": 21062 }, { "epoch": 3.8963018026389147, "grad_norm": 1.100223183631897, "learning_rate": 2.3564396644240038e-07, "loss": 0.6488, "step": 21063 }, { "epoch": 3.8964876417022856, "grad_norm": 0.985472559928894, "learning_rate": 2.3464268481461216e-07, "loss": 0.8551, "step": 21064 }, { "epoch": 3.896673480765657, "grad_norm": 1.1585676670074463, "learning_rate": 2.3364353250716619e-07, "loss": 0.8933, "step": 21065 }, { "epoch": 3.8968593198290282, "grad_norm": 0.815650999546051, "learning_rate": 2.3264650954140098e-07, "loss": 0.7755, "step": 21066 }, { "epoch": 3.897045158892399, "grad_norm": 0.8249333500862122, "learning_rate": 2.3165161593857733e-07, "loss": 0.7678, "step": 21067 }, { "epoch": 3.8972309979557704, "grad_norm": 0.9866358637809753, "learning_rate": 2.3065885171994483e-07, "loss": 0.8614, "step": 21068 }, { "epoch": 3.8974168370191413, "grad_norm": 0.8926262259483337, "learning_rate": 2.2966821690669771e-07, "loss": 0.8931, "step": 21069 }, { "epoch": 3.8976026760825127, "grad_norm": 1.156703233718872, "learning_rate": 2.2867971151995238e-07, "loss": 0.9257, "step": 21070 }, { "epoch": 3.8977885151458835, "grad_norm": 1.1967850923538208, "learning_rate": 2.2769333558083638e-07, "loss": 0.7539, "step": 21071 }, { "epoch": 3.897974354209255, "grad_norm": 1.0719119310379028, "learning_rate": 2.2670908911038846e-07, "loss": 1.0875, "step": 21072 }, { "epoch": 3.8981601932726258, "grad_norm": 0.8525536060333252, "learning_rate": 2.25726972129614e-07, "loss": 0.8303, "step": 21073 }, { "epoch": 3.898346032335997, "grad_norm": 0.8941748738288879, "learning_rate": 2.2474698465948518e-07, "loss": 0.6014, "step": 21074 }, { "epoch": 3.898531871399368, "grad_norm": 1.1483436822891235, "learning_rate": 2.2376912672090744e-07, "loss": 0.8563, "step": 21075 }, { "epoch": 3.8987177104627393, "grad_norm": 0.9525496363639832, "learning_rate": 2.22793398334753e-07, "loss": 0.7296, "step": 21076 }, { "epoch": 3.8989035495261106, "grad_norm": 0.948024332523346, "learning_rate": 2.2181979952183852e-07, "loss": 0.7623, "step": 21077 }, { "epoch": 3.8990893885894815, "grad_norm": 0.8140870928764343, "learning_rate": 2.2084833030296958e-07, "loss": 0.8169, "step": 21078 }, { "epoch": 3.8992752276528524, "grad_norm": 1.0897465944290161, "learning_rate": 2.1987899069886298e-07, "loss": 0.8071, "step": 21079 }, { "epoch": 3.8994610667162237, "grad_norm": 0.8725020289421082, "learning_rate": 2.1891178073020213e-07, "loss": 0.8565, "step": 21080 }, { "epoch": 3.899646905779595, "grad_norm": 0.9559839963912964, "learning_rate": 2.1794670041764832e-07, "loss": 0.8861, "step": 21081 }, { "epoch": 3.899832744842966, "grad_norm": 1.0393133163452148, "learning_rate": 2.1698374978177392e-07, "loss": 0.8598, "step": 21082 }, { "epoch": 3.9000185839063373, "grad_norm": 1.010941505432129, "learning_rate": 2.1602292884316254e-07, "loss": 0.6854, "step": 21083 }, { "epoch": 3.900204422969708, "grad_norm": 1.0234802961349487, "learning_rate": 2.1506423762229778e-07, "loss": 0.7249, "step": 21084 }, { "epoch": 3.9003902620330795, "grad_norm": 0.9321912527084351, "learning_rate": 2.141076761396521e-07, "loss": 0.684, "step": 21085 }, { "epoch": 3.9005761010964504, "grad_norm": 0.9883886575698853, "learning_rate": 2.1315324441563146e-07, "loss": 0.9127, "step": 21086 }, { "epoch": 3.9007619401598217, "grad_norm": 0.9180409908294678, "learning_rate": 2.122009424706195e-07, "loss": 0.787, "step": 21087 }, { "epoch": 3.9009477792231926, "grad_norm": 0.9692301750183105, "learning_rate": 2.1125077032494445e-07, "loss": 0.614, "step": 21088 }, { "epoch": 3.901133618286564, "grad_norm": 0.8420822620391846, "learning_rate": 2.1030272799886785e-07, "loss": 0.7879, "step": 21089 }, { "epoch": 3.901319457349935, "grad_norm": 0.8528715372085571, "learning_rate": 2.0935681551262908e-07, "loss": 1.0001, "step": 21090 }, { "epoch": 3.901505296413306, "grad_norm": 1.1028200387954712, "learning_rate": 2.0841303288642311e-07, "loss": 0.8211, "step": 21091 }, { "epoch": 3.9016911354766775, "grad_norm": 0.9070749878883362, "learning_rate": 2.0747138014040047e-07, "loss": 0.8908, "step": 21092 }, { "epoch": 3.9018769745400483, "grad_norm": 0.9431641101837158, "learning_rate": 2.065318572946451e-07, "loss": 1.0222, "step": 21093 }, { "epoch": 3.9020628136034192, "grad_norm": 1.069291591644287, "learning_rate": 2.055944643692187e-07, "loss": 0.9555, "step": 21094 }, { "epoch": 3.9022486526667906, "grad_norm": 1.1835315227508545, "learning_rate": 2.0465920138411644e-07, "loss": 0.9396, "step": 21095 }, { "epoch": 3.902434491730162, "grad_norm": 0.9170207977294922, "learning_rate": 2.0372606835931118e-07, "loss": 0.8386, "step": 21096 }, { "epoch": 3.9026203307935328, "grad_norm": 0.9324058294296265, "learning_rate": 2.0279506531472037e-07, "loss": 0.873, "step": 21097 }, { "epoch": 3.9028061698569037, "grad_norm": 0.9698527455329895, "learning_rate": 2.0186619227020586e-07, "loss": 0.5926, "step": 21098 }, { "epoch": 3.902992008920275, "grad_norm": 0.8722599148750305, "learning_rate": 2.0093944924559626e-07, "loss": 0.8603, "step": 21099 }, { "epoch": 3.9031778479836463, "grad_norm": 0.8694491982460022, "learning_rate": 2.0001483626067573e-07, "loss": 0.7546, "step": 21100 }, { "epoch": 3.903363687047017, "grad_norm": 0.871938943862915, "learning_rate": 1.9909235333517295e-07, "loss": 0.8239, "step": 21101 }, { "epoch": 3.9035495261103885, "grad_norm": 0.8448165655136108, "learning_rate": 1.9817200048877217e-07, "loss": 0.8066, "step": 21102 }, { "epoch": 3.9037353651737594, "grad_norm": 0.9169268012046814, "learning_rate": 1.972537777411243e-07, "loss": 0.9164, "step": 21103 }, { "epoch": 3.9039212042371307, "grad_norm": 0.9491472840309143, "learning_rate": 1.9633768511183592e-07, "loss": 0.7313, "step": 21104 }, { "epoch": 3.9041070433005016, "grad_norm": 1.0520941019058228, "learning_rate": 1.9542372262044694e-07, "loss": 0.9784, "step": 21105 }, { "epoch": 3.904292882363873, "grad_norm": 0.9484320878982544, "learning_rate": 1.94511890286464e-07, "loss": 0.7181, "step": 21106 }, { "epoch": 3.9044787214272443, "grad_norm": 0.94563227891922, "learning_rate": 1.9360218812934927e-07, "loss": 0.7048, "step": 21107 }, { "epoch": 3.904664560490615, "grad_norm": 0.8737128376960754, "learning_rate": 1.9269461616852057e-07, "loss": 0.7899, "step": 21108 }, { "epoch": 3.904850399553986, "grad_norm": 1.0051727294921875, "learning_rate": 1.917891744233624e-07, "loss": 0.6657, "step": 21109 }, { "epoch": 3.9050362386173574, "grad_norm": 1.0031359195709229, "learning_rate": 1.9088586291317045e-07, "loss": 0.8422, "step": 21110 }, { "epoch": 3.9052220776807287, "grad_norm": 0.7909043431282043, "learning_rate": 1.8998468165725148e-07, "loss": 0.7197, "step": 21111 }, { "epoch": 3.9054079167440996, "grad_norm": 1.001685619354248, "learning_rate": 1.8908563067482343e-07, "loss": 0.9523, "step": 21112 }, { "epoch": 3.9055937558074705, "grad_norm": 0.9117027521133423, "learning_rate": 1.8818870998508208e-07, "loss": 0.7413, "step": 21113 }, { "epoch": 3.905779594870842, "grad_norm": 0.8238965272903442, "learning_rate": 1.872939196071677e-07, "loss": 0.7549, "step": 21114 }, { "epoch": 3.905965433934213, "grad_norm": 0.9219626784324646, "learning_rate": 1.8640125956017607e-07, "loss": 0.8353, "step": 21115 }, { "epoch": 3.906151272997584, "grad_norm": 1.1130602359771729, "learning_rate": 1.8551072986316974e-07, "loss": 0.6794, "step": 21116 }, { "epoch": 3.9063371120609554, "grad_norm": 1.287622094154358, "learning_rate": 1.8462233053514467e-07, "loss": 0.7526, "step": 21117 }, { "epoch": 3.9065229511243262, "grad_norm": 0.8811305165290833, "learning_rate": 1.837360615950634e-07, "loss": 0.8361, "step": 21118 }, { "epoch": 3.9067087901876976, "grad_norm": 0.9759013056755066, "learning_rate": 1.8285192306184417e-07, "loss": 1.0873, "step": 21119 }, { "epoch": 3.9068946292510685, "grad_norm": 1.404094934463501, "learning_rate": 1.8196991495436077e-07, "loss": 0.8197, "step": 21120 }, { "epoch": 3.90708046831444, "grad_norm": 0.8916171193122864, "learning_rate": 1.8109003729143147e-07, "loss": 0.7677, "step": 21121 }, { "epoch": 3.9072663073778107, "grad_norm": 0.8397266268730164, "learning_rate": 1.8021229009184126e-07, "loss": 0.7017, "step": 21122 }, { "epoch": 3.907452146441182, "grad_norm": 0.9295265078544617, "learning_rate": 1.793366733743307e-07, "loss": 0.9705, "step": 21123 }, { "epoch": 3.907637985504553, "grad_norm": 1.106117606163025, "learning_rate": 1.784631871575626e-07, "loss": 0.8659, "step": 21124 }, { "epoch": 3.907823824567924, "grad_norm": 0.9938054084777832, "learning_rate": 1.7759183146021096e-07, "loss": 0.9143, "step": 21125 }, { "epoch": 3.9080096636312955, "grad_norm": 1.1148672103881836, "learning_rate": 1.767226063008498e-07, "loss": 0.8098, "step": 21126 }, { "epoch": 3.9081955026946664, "grad_norm": 0.8424129486083984, "learning_rate": 1.7585551169805315e-07, "loss": 0.8031, "step": 21127 }, { "epoch": 3.9083813417580373, "grad_norm": 0.9194619655609131, "learning_rate": 1.749905476703062e-07, "loss": 0.7698, "step": 21128 }, { "epoch": 3.9085671808214086, "grad_norm": 0.9470589756965637, "learning_rate": 1.741277142360831e-07, "loss": 0.8279, "step": 21129 }, { "epoch": 3.90875301988478, "grad_norm": 0.9974692463874817, "learning_rate": 1.7326701141379132e-07, "loss": 0.7498, "step": 21130 }, { "epoch": 3.908938858948151, "grad_norm": 1.03632390499115, "learning_rate": 1.7240843922180505e-07, "loss": 0.77, "step": 21131 }, { "epoch": 3.909124698011522, "grad_norm": 1.0640885829925537, "learning_rate": 1.7155199767845408e-07, "loss": 0.823, "step": 21132 }, { "epoch": 3.909310537074893, "grad_norm": 1.1156837940216064, "learning_rate": 1.706976868020127e-07, "loss": 0.9895, "step": 21133 }, { "epoch": 3.9094963761382644, "grad_norm": 0.7735521793365479, "learning_rate": 1.6984550661071073e-07, "loss": 0.7006, "step": 21134 }, { "epoch": 3.9096822152016353, "grad_norm": 1.045270562171936, "learning_rate": 1.6899545712275588e-07, "loss": 0.7921, "step": 21135 }, { "epoch": 3.9098680542650066, "grad_norm": 0.9739266633987427, "learning_rate": 1.6814753835626695e-07, "loss": 0.9616, "step": 21136 }, { "epoch": 3.9100538933283775, "grad_norm": 0.9834922552108765, "learning_rate": 1.673017503293406e-07, "loss": 0.9297, "step": 21137 }, { "epoch": 3.910239732391749, "grad_norm": 0.9054717421531677, "learning_rate": 1.6645809306005124e-07, "loss": 0.7545, "step": 21138 }, { "epoch": 3.9104255714551197, "grad_norm": 1.0187366008758545, "learning_rate": 1.6561656656638447e-07, "loss": 0.7447, "step": 21139 }, { "epoch": 3.910611410518491, "grad_norm": 0.8211098313331604, "learning_rate": 1.6477717086631483e-07, "loss": 0.8485, "step": 21140 }, { "epoch": 3.9107972495818624, "grad_norm": 1.1753753423690796, "learning_rate": 1.6393990597775022e-07, "loss": 0.8201, "step": 21141 }, { "epoch": 3.9109830886452333, "grad_norm": 0.9633937478065491, "learning_rate": 1.6310477191856522e-07, "loss": 1.0452, "step": 21142 }, { "epoch": 3.911168927708604, "grad_norm": 0.9461058974266052, "learning_rate": 1.622717687065789e-07, "loss": 0.8729, "step": 21143 }, { "epoch": 3.9113547667719755, "grad_norm": 0.911384642124176, "learning_rate": 1.6144089635957704e-07, "loss": 0.8119, "step": 21144 }, { "epoch": 3.911540605835347, "grad_norm": 0.9880189895629883, "learning_rate": 1.6061215489527882e-07, "loss": 0.7411, "step": 21145 }, { "epoch": 3.9117264448987177, "grad_norm": 0.8829910755157471, "learning_rate": 1.5978554433139226e-07, "loss": 0.9446, "step": 21146 }, { "epoch": 3.9119122839620886, "grad_norm": 0.8976996541023254, "learning_rate": 1.5896106468555883e-07, "loss": 0.7811, "step": 21147 }, { "epoch": 3.91209812302546, "grad_norm": 0.8251069784164429, "learning_rate": 1.5813871597535334e-07, "loss": 0.8636, "step": 21148 }, { "epoch": 3.9122839620888312, "grad_norm": 1.216132402420044, "learning_rate": 1.5731849821833954e-07, "loss": 0.7413, "step": 21149 }, { "epoch": 3.912469801152202, "grad_norm": 0.9787434935569763, "learning_rate": 1.5650041143203677e-07, "loss": 0.6992, "step": 21150 }, { "epoch": 3.9126556402155734, "grad_norm": 0.9688008427619934, "learning_rate": 1.5568445563388657e-07, "loss": 0.7277, "step": 21151 }, { "epoch": 3.9128414792789443, "grad_norm": 0.949082612991333, "learning_rate": 1.5487063084131946e-07, "loss": 0.8283, "step": 21152 }, { "epoch": 3.9130273183423157, "grad_norm": 1.3479398488998413, "learning_rate": 1.540589370716883e-07, "loss": 0.7075, "step": 21153 }, { "epoch": 3.9132131574056865, "grad_norm": 0.8778617978096008, "learning_rate": 1.5324937434233467e-07, "loss": 0.7449, "step": 21154 }, { "epoch": 3.913398996469058, "grad_norm": 0.971396267414093, "learning_rate": 1.524419426705226e-07, "loss": 0.9297, "step": 21155 }, { "epoch": 3.9135848355324288, "grad_norm": 0.8594685196876526, "learning_rate": 1.5163664207349381e-07, "loss": 0.9008, "step": 21156 }, { "epoch": 3.9137706745958, "grad_norm": 0.9365720748901367, "learning_rate": 1.508334725684346e-07, "loss": 1.102, "step": 21157 }, { "epoch": 3.913956513659171, "grad_norm": 1.1041709184646606, "learning_rate": 1.500324341724979e-07, "loss": 0.891, "step": 21158 }, { "epoch": 3.9141423527225423, "grad_norm": 0.815019428730011, "learning_rate": 1.4923352690275893e-07, "loss": 0.7458, "step": 21159 }, { "epoch": 3.9143281917859136, "grad_norm": 0.9310709238052368, "learning_rate": 1.4843675077628183e-07, "loss": 0.6284, "step": 21160 }, { "epoch": 3.9145140308492845, "grad_norm": 1.229316234588623, "learning_rate": 1.476421058100641e-07, "loss": 0.9508, "step": 21161 }, { "epoch": 3.9146998699126554, "grad_norm": 0.8601710796356201, "learning_rate": 1.4684959202106997e-07, "loss": 0.7945, "step": 21162 }, { "epoch": 3.9148857089760267, "grad_norm": 1.241491675376892, "learning_rate": 1.460592094262303e-07, "loss": 1.0845, "step": 21163 }, { "epoch": 3.915071548039398, "grad_norm": 1.1255581378936768, "learning_rate": 1.452709580423872e-07, "loss": 0.9065, "step": 21164 }, { "epoch": 3.915257387102769, "grad_norm": 1.0429335832595825, "learning_rate": 1.444848378863828e-07, "loss": 0.9329, "step": 21165 }, { "epoch": 3.9154432261661403, "grad_norm": 1.027580976486206, "learning_rate": 1.4370084897499247e-07, "loss": 0.8746, "step": 21166 }, { "epoch": 3.915629065229511, "grad_norm": 1.0583301782608032, "learning_rate": 1.4291899132494736e-07, "loss": 0.7915, "step": 21167 }, { "epoch": 3.9158149042928825, "grad_norm": 0.9076946973800659, "learning_rate": 1.4213926495293407e-07, "loss": 0.7156, "step": 21168 }, { "epoch": 3.9160007433562534, "grad_norm": 1.085066318511963, "learning_rate": 1.4136166987559485e-07, "loss": 0.8511, "step": 21169 }, { "epoch": 3.9161865824196247, "grad_norm": 0.9009854197502136, "learning_rate": 1.4058620610952755e-07, "loss": 0.6704, "step": 21170 }, { "epoch": 3.9163724214829956, "grad_norm": 0.8545687794685364, "learning_rate": 1.398128736712856e-07, "loss": 0.7344, "step": 21171 }, { "epoch": 3.916558260546367, "grad_norm": 0.9864504337310791, "learning_rate": 1.390416725773669e-07, "loss": 0.8692, "step": 21172 }, { "epoch": 3.916744099609738, "grad_norm": 0.850251317024231, "learning_rate": 1.3827260284423604e-07, "loss": 0.7236, "step": 21173 }, { "epoch": 3.916929938673109, "grad_norm": 0.8641321659088135, "learning_rate": 1.3750566448830215e-07, "loss": 0.8228, "step": 21174 }, { "epoch": 3.9171157777364805, "grad_norm": 0.9118969440460205, "learning_rate": 1.3674085752594103e-07, "loss": 0.8112, "step": 21175 }, { "epoch": 3.9173016167998513, "grad_norm": 0.951363742351532, "learning_rate": 1.35978181973484e-07, "loss": 0.7726, "step": 21176 }, { "epoch": 3.9174874558632222, "grad_norm": 1.010247826576233, "learning_rate": 1.3521763784718477e-07, "loss": 0.6253, "step": 21177 }, { "epoch": 3.9176732949265936, "grad_norm": 1.1614164113998413, "learning_rate": 1.3445922516329702e-07, "loss": 0.9844, "step": 21178 }, { "epoch": 3.917859133989965, "grad_norm": 1.0017623901367188, "learning_rate": 1.3370294393799666e-07, "loss": 0.7972, "step": 21179 }, { "epoch": 3.9180449730533358, "grad_norm": 1.0043132305145264, "learning_rate": 1.3294879418743743e-07, "loss": 1.0765, "step": 21180 }, { "epoch": 3.918230812116707, "grad_norm": 0.9212683439254761, "learning_rate": 1.3219677592770652e-07, "loss": 0.7776, "step": 21181 }, { "epoch": 3.918416651180078, "grad_norm": 0.9996517896652222, "learning_rate": 1.314468891748466e-07, "loss": 0.9532, "step": 21182 }, { "epoch": 3.9186024902434493, "grad_norm": 0.8499897718429565, "learning_rate": 1.306991339448782e-07, "loss": 0.9038, "step": 21183 }, { "epoch": 3.91878832930682, "grad_norm": 0.8690329790115356, "learning_rate": 1.2995351025375524e-07, "loss": 0.5533, "step": 21184 }, { "epoch": 3.9189741683701915, "grad_norm": 0.8721495866775513, "learning_rate": 1.292100181173872e-07, "loss": 0.9053, "step": 21185 }, { "epoch": 3.9191600074335624, "grad_norm": 0.9867119193077087, "learning_rate": 1.2846865755165028e-07, "loss": 0.8639, "step": 21186 }, { "epoch": 3.9193458464969337, "grad_norm": 0.8210247755050659, "learning_rate": 1.2772942857235404e-07, "loss": 0.7302, "step": 21187 }, { "epoch": 3.9195316855603046, "grad_norm": 0.9453384876251221, "learning_rate": 1.2699233119529696e-07, "loss": 0.7579, "step": 21188 }, { "epoch": 3.919717524623676, "grad_norm": 0.8770848512649536, "learning_rate": 1.262573654361887e-07, "loss": 0.7229, "step": 21189 }, { "epoch": 3.9199033636870473, "grad_norm": 1.0798051357269287, "learning_rate": 1.2552453131073893e-07, "loss": 0.9094, "step": 21190 }, { "epoch": 3.920089202750418, "grad_norm": 0.9015201330184937, "learning_rate": 1.2479382883456847e-07, "loss": 0.7454, "step": 21191 }, { "epoch": 3.920275041813789, "grad_norm": 0.8095447421073914, "learning_rate": 1.2406525802328706e-07, "loss": 0.6548, "step": 21192 }, { "epoch": 3.9204608808771604, "grad_norm": 1.061845064163208, "learning_rate": 1.233388188924267e-07, "loss": 0.9094, "step": 21193 }, { "epoch": 3.9206467199405317, "grad_norm": 0.9467784762382507, "learning_rate": 1.2261451145751946e-07, "loss": 0.6258, "step": 21194 }, { "epoch": 3.9208325590039026, "grad_norm": 1.0385093688964844, "learning_rate": 1.218923357339974e-07, "loss": 0.8663, "step": 21195 }, { "epoch": 3.9210183980672735, "grad_norm": 0.909909725189209, "learning_rate": 1.2117229173729262e-07, "loss": 0.8303, "step": 21196 }, { "epoch": 3.921204237130645, "grad_norm": 1.0109440088272095, "learning_rate": 1.204543794827595e-07, "loss": 0.8107, "step": 21197 }, { "epoch": 3.921390076194016, "grad_norm": 0.7961100935935974, "learning_rate": 1.1973859898573026e-07, "loss": 0.7006, "step": 21198 }, { "epoch": 3.921575915257387, "grad_norm": 0.8510643243789673, "learning_rate": 1.1902495026148153e-07, "loss": 0.7086, "step": 21199 }, { "epoch": 3.9217617543207584, "grad_norm": 1.1658729314804077, "learning_rate": 1.1831343332524558e-07, "loss": 1.0314, "step": 21200 }, { "epoch": 3.9219475933841292, "grad_norm": 0.911927342414856, "learning_rate": 1.1760404819219916e-07, "loss": 0.7124, "step": 21201 }, { "epoch": 3.9221334324475006, "grad_norm": 1.0180903673171997, "learning_rate": 1.1689679487749683e-07, "loss": 0.7781, "step": 21202 }, { "epoch": 3.9223192715108715, "grad_norm": 1.9628604650497437, "learning_rate": 1.1619167339621539e-07, "loss": 0.8688, "step": 21203 }, { "epoch": 3.922505110574243, "grad_norm": 0.924095094203949, "learning_rate": 1.1548868376342059e-07, "loss": 0.9179, "step": 21204 }, { "epoch": 3.9226909496376137, "grad_norm": 0.9204443693161011, "learning_rate": 1.1478782599411153e-07, "loss": 0.9451, "step": 21205 }, { "epoch": 3.922876788700985, "grad_norm": 0.9130086302757263, "learning_rate": 1.1408910010324292e-07, "loss": 0.9045, "step": 21206 }, { "epoch": 3.923062627764356, "grad_norm": 0.9768751263618469, "learning_rate": 1.1339250610573615e-07, "loss": 0.8263, "step": 21207 }, { "epoch": 3.923248466827727, "grad_norm": 0.8459489345550537, "learning_rate": 1.1269804401645712e-07, "loss": 0.658, "step": 21208 }, { "epoch": 3.9234343058910985, "grad_norm": 0.8942362666130066, "learning_rate": 1.1200571385021618e-07, "loss": 0.7145, "step": 21209 }, { "epoch": 3.9236201449544694, "grad_norm": 0.8998235464096069, "learning_rate": 1.1131551562180153e-07, "loss": 0.8589, "step": 21210 }, { "epoch": 3.9238059840178403, "grad_norm": 0.9604059457778931, "learning_rate": 1.1062744934594582e-07, "loss": 0.8735, "step": 21211 }, { "epoch": 3.9239918230812116, "grad_norm": 1.559274673461914, "learning_rate": 1.099415150373262e-07, "loss": 1.1403, "step": 21212 }, { "epoch": 3.924177662144583, "grad_norm": 0.9578410387039185, "learning_rate": 1.0925771271058649e-07, "loss": 0.9836, "step": 21213 }, { "epoch": 3.924363501207954, "grad_norm": 0.8517030477523804, "learning_rate": 1.0857604238032614e-07, "loss": 0.8048, "step": 21214 }, { "epoch": 3.924549340271325, "grad_norm": 0.939640998840332, "learning_rate": 1.0789650406108909e-07, "loss": 0.6859, "step": 21215 }, { "epoch": 3.924735179334696, "grad_norm": 1.3704183101654053, "learning_rate": 1.0721909776737482e-07, "loss": 0.7968, "step": 21216 }, { "epoch": 3.9249210183980674, "grad_norm": 0.9239481687545776, "learning_rate": 1.0654382351363845e-07, "loss": 0.9119, "step": 21217 }, { "epoch": 3.9251068574614383, "grad_norm": 1.0346596240997314, "learning_rate": 1.0587068131431289e-07, "loss": 0.7972, "step": 21218 }, { "epoch": 3.9252926965248096, "grad_norm": 0.9344537258148193, "learning_rate": 1.0519967118375329e-07, "loss": 1.0446, "step": 21219 }, { "epoch": 3.9254785355881805, "grad_norm": 0.9868215322494507, "learning_rate": 1.0453079313627045e-07, "loss": 0.654, "step": 21220 }, { "epoch": 3.925664374651552, "grad_norm": 0.851256787776947, "learning_rate": 1.0386404718616405e-07, "loss": 0.63, "step": 21221 }, { "epoch": 3.9258502137149227, "grad_norm": 1.0101230144500732, "learning_rate": 1.0319943334763383e-07, "loss": 0.7849, "step": 21222 }, { "epoch": 3.926036052778294, "grad_norm": 0.9066257476806641, "learning_rate": 1.0253695163489063e-07, "loss": 0.6061, "step": 21223 }, { "epoch": 3.9262218918416654, "grad_norm": 0.8656601905822754, "learning_rate": 1.0187660206206761e-07, "loss": 0.7616, "step": 21224 }, { "epoch": 3.9264077309050363, "grad_norm": 0.7974125146865845, "learning_rate": 1.012183846432535e-07, "loss": 0.9409, "step": 21225 }, { "epoch": 3.926593569968407, "grad_norm": 1.3469806909561157, "learning_rate": 1.0056229939250372e-07, "loss": 0.7043, "step": 21226 }, { "epoch": 3.9267794090317785, "grad_norm": 0.9865778684616089, "learning_rate": 9.990834632381818e-08, "loss": 0.7802, "step": 21227 }, { "epoch": 3.92696524809515, "grad_norm": 1.0248531103134155, "learning_rate": 9.925652545114129e-08, "loss": 0.772, "step": 21228 }, { "epoch": 3.9271510871585207, "grad_norm": 1.012474775314331, "learning_rate": 9.860683678840632e-08, "loss": 0.7757, "step": 21229 }, { "epoch": 3.9273369262218916, "grad_norm": 0.7945632934570312, "learning_rate": 9.795928034946889e-08, "loss": 0.764, "step": 21230 }, { "epoch": 3.927522765285263, "grad_norm": 0.811353862285614, "learning_rate": 9.731385614816236e-08, "loss": 0.7347, "step": 21231 }, { "epoch": 3.9277086043486342, "grad_norm": 1.1800938844680786, "learning_rate": 9.667056419824238e-08, "loss": 0.8599, "step": 21232 }, { "epoch": 3.927894443412005, "grad_norm": 0.8375271558761597, "learning_rate": 9.602940451344244e-08, "loss": 0.7644, "step": 21233 }, { "epoch": 3.9280802824753764, "grad_norm": 0.8486659526824951, "learning_rate": 9.539037710745158e-08, "loss": 0.7136, "step": 21234 }, { "epoch": 3.9282661215387473, "grad_norm": 1.0186020135879517, "learning_rate": 9.475348199392553e-08, "loss": 0.7333, "step": 21235 }, { "epoch": 3.9284519606021187, "grad_norm": 0.9781624674797058, "learning_rate": 9.411871918643122e-08, "loss": 1.0204, "step": 21236 }, { "epoch": 3.9286377996654895, "grad_norm": 0.9855211973190308, "learning_rate": 9.348608869852448e-08, "loss": 0.7462, "step": 21237 }, { "epoch": 3.928823638728861, "grad_norm": 1.0383330583572388, "learning_rate": 9.285559054371672e-08, "loss": 0.7325, "step": 21238 }, { "epoch": 3.929009477792232, "grad_norm": 0.8758466243743896, "learning_rate": 9.222722473546386e-08, "loss": 0.7156, "step": 21239 }, { "epoch": 3.929195316855603, "grad_norm": 0.9373558759689331, "learning_rate": 9.160099128716627e-08, "loss": 0.8481, "step": 21240 }, { "epoch": 3.929381155918974, "grad_norm": 0.9394092559814453, "learning_rate": 9.097689021219102e-08, "loss": 0.7789, "step": 21241 }, { "epoch": 3.9295669949823453, "grad_norm": 0.9431646466255188, "learning_rate": 9.035492152387193e-08, "loss": 0.8201, "step": 21242 }, { "epoch": 3.9297528340457166, "grad_norm": 0.9627437591552734, "learning_rate": 8.973508523547613e-08, "loss": 0.9604, "step": 21243 }, { "epoch": 3.9299386731090875, "grad_norm": 1.0630133152008057, "learning_rate": 8.91173813602264e-08, "loss": 0.8981, "step": 21244 }, { "epoch": 3.9301245121724584, "grad_norm": 1.1388155221939087, "learning_rate": 8.850180991131219e-08, "loss": 0.8641, "step": 21245 }, { "epoch": 3.9303103512358297, "grad_norm": 0.8789702653884888, "learning_rate": 8.788837090186742e-08, "loss": 0.7783, "step": 21246 }, { "epoch": 3.930496190299201, "grad_norm": 1.3608787059783936, "learning_rate": 8.727706434499272e-08, "loss": 1.8397, "step": 21247 }, { "epoch": 3.930682029362572, "grad_norm": 0.7911452054977417, "learning_rate": 8.666789025374433e-08, "loss": 0.8227, "step": 21248 }, { "epoch": 3.9308678684259433, "grad_norm": 1.0240737199783325, "learning_rate": 8.606084864110076e-08, "loss": 0.9596, "step": 21249 }, { "epoch": 3.931053707489314, "grad_norm": 1.040171504020691, "learning_rate": 8.545593952002939e-08, "loss": 0.9922, "step": 21250 }, { "epoch": 3.9312395465526855, "grad_norm": 0.7827027440071106, "learning_rate": 8.485316290344214e-08, "loss": 0.6822, "step": 21251 }, { "epoch": 3.9314253856160564, "grad_norm": 1.0231354236602783, "learning_rate": 8.425251880419537e-08, "loss": 0.6931, "step": 21252 }, { "epoch": 3.9316112246794277, "grad_norm": 1.0050798654556274, "learning_rate": 8.365400723512328e-08, "loss": 0.8074, "step": 21253 }, { "epoch": 3.9317970637427986, "grad_norm": 0.7175681591033936, "learning_rate": 8.305762820899343e-08, "loss": 0.5561, "step": 21254 }, { "epoch": 3.93198290280617, "grad_norm": 1.0384067296981812, "learning_rate": 8.246338173852896e-08, "loss": 0.8317, "step": 21255 }, { "epoch": 3.932168741869541, "grad_norm": 0.9028663039207458, "learning_rate": 8.187126783643084e-08, "loss": 0.8666, "step": 21256 }, { "epoch": 3.932354580932912, "grad_norm": 0.9729875922203064, "learning_rate": 8.12812865153112e-08, "loss": 0.9616, "step": 21257 }, { "epoch": 3.9325404199962835, "grad_norm": 1.0092549324035645, "learning_rate": 8.069343778778215e-08, "loss": 0.8919, "step": 21258 }, { "epoch": 3.9327262590596543, "grad_norm": 0.7683966159820557, "learning_rate": 8.010772166637814e-08, "loss": 0.6543, "step": 21259 }, { "epoch": 3.9329120981230252, "grad_norm": 1.0346338748931885, "learning_rate": 7.952413816361138e-08, "loss": 0.9382, "step": 21260 }, { "epoch": 3.9330979371863966, "grad_norm": 0.9304220080375671, "learning_rate": 7.894268729192744e-08, "loss": 0.9117, "step": 21261 }, { "epoch": 3.933283776249768, "grad_norm": 0.9711853265762329, "learning_rate": 7.836336906373865e-08, "loss": 0.8058, "step": 21262 }, { "epoch": 3.9334696153131388, "grad_norm": 0.8730411529541016, "learning_rate": 7.778618349141287e-08, "loss": 0.8607, "step": 21263 }, { "epoch": 3.93365545437651, "grad_norm": 1.2616333961486816, "learning_rate": 7.721113058727359e-08, "loss": 0.9632, "step": 21264 }, { "epoch": 3.933841293439881, "grad_norm": 1.0677188634872437, "learning_rate": 7.663821036358876e-08, "loss": 0.9732, "step": 21265 }, { "epoch": 3.9340271325032523, "grad_norm": 1.28867769241333, "learning_rate": 7.606742283257085e-08, "loss": 0.9898, "step": 21266 }, { "epoch": 3.934212971566623, "grad_norm": 1.0222501754760742, "learning_rate": 7.54987680064323e-08, "loss": 1.0614, "step": 21267 }, { "epoch": 3.9343988106299945, "grad_norm": 1.1863951683044434, "learning_rate": 7.493224589728565e-08, "loss": 0.8528, "step": 21268 }, { "epoch": 3.9345846496933654, "grad_norm": 0.8715998530387878, "learning_rate": 7.436785651724342e-08, "loss": 0.8946, "step": 21269 }, { "epoch": 3.9347704887567367, "grad_norm": 1.1731165647506714, "learning_rate": 7.380559987832935e-08, "loss": 0.7065, "step": 21270 }, { "epoch": 3.9349563278201076, "grad_norm": 1.1409883499145508, "learning_rate": 7.324547599255605e-08, "loss": 0.7519, "step": 21271 }, { "epoch": 3.935142166883479, "grad_norm": 0.8426750302314758, "learning_rate": 7.26874848718806e-08, "loss": 0.5539, "step": 21272 }, { "epoch": 3.9353280059468503, "grad_norm": 0.8278335928916931, "learning_rate": 7.21316265282046e-08, "loss": 0.6854, "step": 21273 }, { "epoch": 3.935513845010221, "grad_norm": 0.8848078846931458, "learning_rate": 7.157790097340745e-08, "loss": 0.918, "step": 21274 }, { "epoch": 3.935699684073592, "grad_norm": 0.8748083710670471, "learning_rate": 7.102630821927969e-08, "loss": 0.7788, "step": 21275 }, { "epoch": 3.9358855231369634, "grad_norm": 1.6303963661193848, "learning_rate": 7.047684827762302e-08, "loss": 1.3235, "step": 21276 }, { "epoch": 3.9360713622003347, "grad_norm": 0.8565230965614319, "learning_rate": 6.992952116013918e-08, "loss": 0.6716, "step": 21277 }, { "epoch": 3.9362572012637056, "grad_norm": 1.0051913261413574, "learning_rate": 6.938432687854102e-08, "loss": 0.8032, "step": 21278 }, { "epoch": 3.9364430403270765, "grad_norm": 1.0851858854293823, "learning_rate": 6.884126544443037e-08, "loss": 0.8638, "step": 21279 }, { "epoch": 3.936628879390448, "grad_norm": 1.3282707929611206, "learning_rate": 6.830033686942017e-08, "loss": 0.6202, "step": 21280 }, { "epoch": 3.936814718453819, "grad_norm": 1.0067006349563599, "learning_rate": 6.776154116504562e-08, "loss": 0.9785, "step": 21281 }, { "epoch": 3.93700055751719, "grad_norm": 0.9127426147460938, "learning_rate": 6.722487834281977e-08, "loss": 0.9038, "step": 21282 }, { "epoch": 3.9371863965805614, "grad_norm": 1.1649229526519775, "learning_rate": 6.669034841417787e-08, "loss": 0.946, "step": 21283 }, { "epoch": 3.9373722356439322, "grad_norm": 0.8898347020149231, "learning_rate": 6.615795139054415e-08, "loss": 0.8945, "step": 21284 }, { "epoch": 3.9375580747073036, "grad_norm": 0.9791845083236694, "learning_rate": 6.562768728327618e-08, "loss": 0.6055, "step": 21285 }, { "epoch": 3.9377439137706745, "grad_norm": 1.031858205795288, "learning_rate": 6.509955610368711e-08, "loss": 0.6925, "step": 21286 }, { "epoch": 3.937929752834046, "grad_norm": 0.7945656776428223, "learning_rate": 6.457355786305685e-08, "loss": 0.7017, "step": 21287 }, { "epoch": 3.938115591897417, "grad_norm": 0.8898307681083679, "learning_rate": 6.404969257259863e-08, "loss": 0.757, "step": 21288 }, { "epoch": 3.938301430960788, "grad_norm": 0.9096782803535461, "learning_rate": 6.35279602435146e-08, "loss": 0.7248, "step": 21289 }, { "epoch": 3.938487270024159, "grad_norm": 0.9898115396499634, "learning_rate": 6.300836088691809e-08, "loss": 0.813, "step": 21290 }, { "epoch": 3.93867310908753, "grad_norm": 0.9275386929512024, "learning_rate": 6.249089451391133e-08, "loss": 0.7457, "step": 21291 }, { "epoch": 3.9388589481509015, "grad_norm": 1.1459001302719116, "learning_rate": 6.197556113554104e-08, "loss": 0.8262, "step": 21292 }, { "epoch": 3.9390447872142724, "grad_norm": 1.0498716831207275, "learning_rate": 6.146236076279843e-08, "loss": 0.8605, "step": 21293 }, { "epoch": 3.9392306262776433, "grad_norm": 1.0647568702697754, "learning_rate": 6.095129340665252e-08, "loss": 0.9404, "step": 21294 }, { "epoch": 3.9394164653410146, "grad_norm": 0.9955742359161377, "learning_rate": 6.044235907798346e-08, "loss": 0.8509, "step": 21295 }, { "epoch": 3.939602304404386, "grad_norm": 0.9847276210784912, "learning_rate": 5.993555778767146e-08, "loss": 0.8834, "step": 21296 }, { "epoch": 3.939788143467757, "grad_norm": 0.9324856996536255, "learning_rate": 5.943088954654119e-08, "loss": 0.9322, "step": 21297 }, { "epoch": 3.939973982531128, "grad_norm": 1.1173300743103027, "learning_rate": 5.8928354365339614e-08, "loss": 0.8917, "step": 21298 }, { "epoch": 3.940159821594499, "grad_norm": 1.0777596235275269, "learning_rate": 5.842795225481368e-08, "loss": 0.9903, "step": 21299 }, { "epoch": 3.9403456606578704, "grad_norm": 1.0002269744873047, "learning_rate": 5.792968322564374e-08, "loss": 0.8653, "step": 21300 }, { "epoch": 3.9405314997212413, "grad_norm": 0.8822920918464661, "learning_rate": 5.7433547288443525e-08, "loss": 0.7842, "step": 21301 }, { "epoch": 3.9407173387846126, "grad_norm": 1.0127395391464233, "learning_rate": 5.6939544453826766e-08, "loss": 0.8987, "step": 21302 }, { "epoch": 3.9409031778479835, "grad_norm": 0.8914490342140198, "learning_rate": 5.644767473231838e-08, "loss": 0.6468, "step": 21303 }, { "epoch": 3.941089016911355, "grad_norm": 0.851958692073822, "learning_rate": 5.595793813443217e-08, "loss": 0.6582, "step": 21304 }, { "epoch": 3.9412748559747257, "grad_norm": 0.9173169136047363, "learning_rate": 5.547033467060425e-08, "loss": 0.7409, "step": 21305 }, { "epoch": 3.941460695038097, "grad_norm": 0.9577040672302246, "learning_rate": 5.49848643512596e-08, "loss": 0.8937, "step": 21306 }, { "epoch": 3.9416465341014684, "grad_norm": 1.2994239330291748, "learning_rate": 5.4501527186745505e-08, "loss": 0.6323, "step": 21307 }, { "epoch": 3.9418323731648393, "grad_norm": 0.8939706683158875, "learning_rate": 5.4020323187375934e-08, "loss": 0.7016, "step": 21308 }, { "epoch": 3.94201821222821, "grad_norm": 1.0897858142852783, "learning_rate": 5.354125236343155e-08, "loss": 0.7664, "step": 21309 }, { "epoch": 3.9422040512915815, "grad_norm": 1.5078178644180298, "learning_rate": 5.3064314725137507e-08, "loss": 1.0231, "step": 21310 }, { "epoch": 3.942389890354953, "grad_norm": 1.005741834640503, "learning_rate": 5.258951028267456e-08, "loss": 0.982, "step": 21311 }, { "epoch": 3.9425757294183237, "grad_norm": 0.9288942217826843, "learning_rate": 5.211683904616793e-08, "loss": 0.6563, "step": 21312 }, { "epoch": 3.942761568481695, "grad_norm": 0.8981716632843018, "learning_rate": 5.164630102570955e-08, "loss": 0.8861, "step": 21313 }, { "epoch": 3.942947407545066, "grad_norm": 1.05691659450531, "learning_rate": 5.117789623134694e-08, "loss": 0.8238, "step": 21314 }, { "epoch": 3.9431332466084372, "grad_norm": 0.8106885552406311, "learning_rate": 5.0711624673061006e-08, "loss": 0.776, "step": 21315 }, { "epoch": 3.943319085671808, "grad_norm": 0.9093728065490723, "learning_rate": 5.024748636082155e-08, "loss": 0.8649, "step": 21316 }, { "epoch": 3.9435049247351794, "grad_norm": 0.8479557037353516, "learning_rate": 4.9785481304531754e-08, "loss": 0.7704, "step": 21317 }, { "epoch": 3.9436907637985503, "grad_norm": 0.9083126187324524, "learning_rate": 4.932560951405041e-08, "loss": 0.7381, "step": 21318 }, { "epoch": 3.9438766028619217, "grad_norm": 1.0219753980636597, "learning_rate": 4.886787099919188e-08, "loss": 0.8371, "step": 21319 }, { "epoch": 3.9440624419252925, "grad_norm": 1.0285660028457642, "learning_rate": 4.841226576972613e-08, "loss": 0.7398, "step": 21320 }, { "epoch": 3.944248280988664, "grad_norm": 1.084557294845581, "learning_rate": 4.795879383536761e-08, "loss": 0.7634, "step": 21321 }, { "epoch": 3.944434120052035, "grad_norm": 1.0166188478469849, "learning_rate": 4.750745520580857e-08, "loss": 0.7617, "step": 21322 }, { "epoch": 3.944619959115406, "grad_norm": 0.8051891922950745, "learning_rate": 4.7058249890685743e-08, "loss": 0.7276, "step": 21323 }, { "epoch": 3.944805798178777, "grad_norm": 0.8736680150032043, "learning_rate": 4.661117789956926e-08, "loss": 0.6811, "step": 21324 }, { "epoch": 3.9449916372421483, "grad_norm": 0.9255815148353577, "learning_rate": 4.616623924201813e-08, "loss": 0.9285, "step": 21325 }, { "epoch": 3.9451774763055196, "grad_norm": 0.9325020909309387, "learning_rate": 4.572343392751366e-08, "loss": 0.4965, "step": 21326 }, { "epoch": 3.9453633153688905, "grad_norm": 0.7916317582130432, "learning_rate": 4.528276196551495e-08, "loss": 0.6926, "step": 21327 }, { "epoch": 3.9455491544322614, "grad_norm": 1.3394813537597656, "learning_rate": 4.48442233654256e-08, "loss": 0.7654, "step": 21328 }, { "epoch": 3.9457349934956327, "grad_norm": 1.0707465410232544, "learning_rate": 4.440781813660477e-08, "loss": 0.8737, "step": 21329 }, { "epoch": 3.945920832559004, "grad_norm": 0.9279564619064331, "learning_rate": 4.397354628837835e-08, "loss": 0.6375, "step": 21330 }, { "epoch": 3.946106671622375, "grad_norm": 1.1137791872024536, "learning_rate": 4.3541407829994496e-08, "loss": 0.8231, "step": 21331 }, { "epoch": 3.9462925106857463, "grad_norm": 0.9537021517753601, "learning_rate": 4.311140277070136e-08, "loss": 0.7397, "step": 21332 }, { "epoch": 3.946478349749117, "grad_norm": 0.9723032712936401, "learning_rate": 4.268353111964718e-08, "loss": 0.8534, "step": 21333 }, { "epoch": 3.9466641888124885, "grad_norm": 1.1306790113449097, "learning_rate": 4.22577928859913e-08, "loss": 0.8695, "step": 21334 }, { "epoch": 3.9468500278758594, "grad_norm": 1.071981430053711, "learning_rate": 4.183418807879313e-08, "loss": 0.9349, "step": 21335 }, { "epoch": 3.9470358669392307, "grad_norm": 0.960877537727356, "learning_rate": 4.141271670712321e-08, "loss": 0.8431, "step": 21336 }, { "epoch": 3.9472217060026016, "grad_norm": 0.8617332577705383, "learning_rate": 4.099337877995213e-08, "loss": 0.7918, "step": 21337 }, { "epoch": 3.947407545065973, "grad_norm": 0.9909121990203857, "learning_rate": 4.0576174306250494e-08, "loss": 0.9285, "step": 21338 }, { "epoch": 3.947593384129344, "grad_norm": 1.2329769134521484, "learning_rate": 4.0161103294911184e-08, "loss": 1.0796, "step": 21339 }, { "epoch": 3.947779223192715, "grad_norm": 0.8551541566848755, "learning_rate": 3.974816575479379e-08, "loss": 0.7998, "step": 21340 }, { "epoch": 3.9479650622560865, "grad_norm": 0.9515479803085327, "learning_rate": 3.933736169471347e-08, "loss": 0.6935, "step": 21341 }, { "epoch": 3.9481509013194573, "grad_norm": 0.9743008017539978, "learning_rate": 3.8928691123429894e-08, "loss": 0.732, "step": 21342 }, { "epoch": 3.9483367403828282, "grad_norm": 1.077339768409729, "learning_rate": 3.852215404969162e-08, "loss": 1.0379, "step": 21343 }, { "epoch": 3.9485225794461996, "grad_norm": 0.8660666346549988, "learning_rate": 3.8117750482136174e-08, "loss": 0.7244, "step": 21344 }, { "epoch": 3.948708418509571, "grad_norm": 1.029205322265625, "learning_rate": 3.77154804294344e-08, "loss": 0.9903, "step": 21345 }, { "epoch": 3.9488942575729418, "grad_norm": 0.8247634172439575, "learning_rate": 3.731534390014613e-08, "loss": 0.7297, "step": 21346 }, { "epoch": 3.949080096636313, "grad_norm": 1.1168490648269653, "learning_rate": 3.691734090280896e-08, "loss": 0.8315, "step": 21347 }, { "epoch": 3.949265935699684, "grad_norm": 0.9257239103317261, "learning_rate": 3.652147144593832e-08, "loss": 0.8475, "step": 21348 }, { "epoch": 3.9494517747630553, "grad_norm": 0.8547592163085938, "learning_rate": 3.612773553797188e-08, "loss": 0.7528, "step": 21349 }, { "epoch": 3.949637613826426, "grad_norm": 0.9390490651130676, "learning_rate": 3.573613318731406e-08, "loss": 0.8821, "step": 21350 }, { "epoch": 3.9498234528897975, "grad_norm": 0.8484833240509033, "learning_rate": 3.534666440232481e-08, "loss": 0.7977, "step": 21351 }, { "epoch": 3.9500092919531684, "grad_norm": 0.8618779182434082, "learning_rate": 3.495932919130862e-08, "loss": 0.8179, "step": 21352 }, { "epoch": 3.9501951310165397, "grad_norm": 0.8798786401748657, "learning_rate": 3.457412756253664e-08, "loss": 0.6562, "step": 21353 }, { "epoch": 3.9503809700799106, "grad_norm": 0.9681945443153381, "learning_rate": 3.4191059524246724e-08, "loss": 0.9839, "step": 21354 }, { "epoch": 3.950566809143282, "grad_norm": 0.8329968452453613, "learning_rate": 3.381012508458792e-08, "loss": 0.7537, "step": 21355 }, { "epoch": 3.9507526482066533, "grad_norm": 1.0116149187088013, "learning_rate": 3.343132425170925e-08, "loss": 0.7618, "step": 21356 }, { "epoch": 3.950938487270024, "grad_norm": 0.9968962073326111, "learning_rate": 3.305465703368205e-08, "loss": 0.7926, "step": 21357 }, { "epoch": 3.951124326333395, "grad_norm": 0.8955625295639038, "learning_rate": 3.268012343855542e-08, "loss": 0.9569, "step": 21358 }, { "epoch": 3.9513101653967664, "grad_norm": 1.4347474575042725, "learning_rate": 3.230772347432298e-08, "loss": 1.0598, "step": 21359 }, { "epoch": 3.9514960044601377, "grad_norm": 1.2289568185806274, "learning_rate": 3.193745714893392e-08, "loss": 0.773, "step": 21360 }, { "epoch": 3.9516818435235086, "grad_norm": 0.9194411635398865, "learning_rate": 3.156932447028194e-08, "loss": 0.6479, "step": 21361 }, { "epoch": 3.95186768258688, "grad_norm": 0.9544026851654053, "learning_rate": 3.1203325446238496e-08, "loss": 0.8541, "step": 21362 }, { "epoch": 3.952053521650251, "grad_norm": 0.9884023070335388, "learning_rate": 3.083946008459737e-08, "loss": 0.8302, "step": 21363 }, { "epoch": 3.952239360713622, "grad_norm": 1.0158047676086426, "learning_rate": 3.0477728393141226e-08, "loss": 0.6797, "step": 21364 }, { "epoch": 3.952425199776993, "grad_norm": 0.8721694350242615, "learning_rate": 3.0118130379575005e-08, "loss": 0.7315, "step": 21365 }, { "epoch": 3.9526110388403644, "grad_norm": 0.929004967212677, "learning_rate": 2.9760666051592555e-08, "loss": 0.8917, "step": 21366 }, { "epoch": 3.9527968779037352, "grad_norm": 0.931168794631958, "learning_rate": 2.940533541681001e-08, "loss": 0.9602, "step": 21367 }, { "epoch": 3.9529827169671066, "grad_norm": 1.0448548793792725, "learning_rate": 2.9052138482810188e-08, "loss": 0.8694, "step": 21368 }, { "epoch": 3.9531685560304775, "grad_norm": 1.0332188606262207, "learning_rate": 2.8701075257131503e-08, "loss": 0.7027, "step": 21369 }, { "epoch": 3.953354395093849, "grad_norm": 0.9469727873802185, "learning_rate": 2.835214574727907e-08, "loss": 0.6672, "step": 21370 }, { "epoch": 3.95354023415722, "grad_norm": 0.7369052171707153, "learning_rate": 2.8005349960691375e-08, "loss": 0.5705, "step": 21371 }, { "epoch": 3.953726073220591, "grad_norm": 1.1198176145553589, "learning_rate": 2.7660687904762506e-08, "loss": 0.9384, "step": 21372 }, { "epoch": 3.953911912283962, "grad_norm": 0.9621124267578125, "learning_rate": 2.7318159586864344e-08, "loss": 0.8375, "step": 21373 }, { "epoch": 3.954097751347333, "grad_norm": 0.9608624577522278, "learning_rate": 2.6977765014302158e-08, "loss": 0.6683, "step": 21374 }, { "epoch": 3.9542835904107045, "grad_norm": 1.0401973724365234, "learning_rate": 2.6639504194325703e-08, "loss": 0.9533, "step": 21375 }, { "epoch": 3.9544694294740754, "grad_norm": 0.826633632183075, "learning_rate": 2.6303377134184738e-08, "loss": 0.6486, "step": 21376 }, { "epoch": 3.9546552685374463, "grad_norm": 0.971384584903717, "learning_rate": 2.5969383841029094e-08, "loss": 0.8556, "step": 21377 }, { "epoch": 3.9548411076008176, "grad_norm": 1.117881178855896, "learning_rate": 2.5637524321986404e-08, "loss": 0.8109, "step": 21378 }, { "epoch": 3.955026946664189, "grad_norm": 0.9405913352966309, "learning_rate": 2.53077985841621e-08, "loss": 0.7834, "step": 21379 }, { "epoch": 3.95521278572756, "grad_norm": 0.727852463722229, "learning_rate": 2.498020663457279e-08, "loss": 0.4824, "step": 21380 }, { "epoch": 3.955398624790931, "grad_norm": 0.955696702003479, "learning_rate": 2.4654748480212876e-08, "loss": 0.8243, "step": 21381 }, { "epoch": 3.955584463854302, "grad_norm": 0.8906865119934082, "learning_rate": 2.4331424128043457e-08, "loss": 0.9608, "step": 21382 }, { "epoch": 3.9557703029176734, "grad_norm": 1.1015154123306274, "learning_rate": 2.4010233584947916e-08, "loss": 0.9698, "step": 21383 }, { "epoch": 3.9559561419810443, "grad_norm": 0.9718268513679504, "learning_rate": 2.3691176857787433e-08, "loss": 0.9026, "step": 21384 }, { "epoch": 3.9561419810444156, "grad_norm": 0.980043888092041, "learning_rate": 2.3374253953367676e-08, "loss": 0.8598, "step": 21385 }, { "epoch": 3.9563278201077865, "grad_norm": 0.871580958366394, "learning_rate": 2.3059464878472104e-08, "loss": 0.8815, "step": 21386 }, { "epoch": 3.956513659171158, "grad_norm": 0.8850259780883789, "learning_rate": 2.2746809639795362e-08, "loss": 0.8521, "step": 21387 }, { "epoch": 3.9566994982345287, "grad_norm": 1.4275609254837036, "learning_rate": 2.2436288244020996e-08, "loss": 0.7822, "step": 21388 }, { "epoch": 3.9568853372979, "grad_norm": 0.841894805431366, "learning_rate": 2.2127900697777036e-08, "loss": 0.7784, "step": 21389 }, { "epoch": 3.9570711763612714, "grad_norm": 0.91501384973526, "learning_rate": 2.1821647007636004e-08, "loss": 0.6936, "step": 21390 }, { "epoch": 3.9572570154246423, "grad_norm": 1.1249712705612183, "learning_rate": 2.1517527180159315e-08, "loss": 0.6923, "step": 21391 }, { "epoch": 3.957442854488013, "grad_norm": 0.9765527844429016, "learning_rate": 2.1215541221808467e-08, "loss": 0.9196, "step": 21392 }, { "epoch": 3.9576286935513845, "grad_norm": 0.8743729591369629, "learning_rate": 2.091568913904496e-08, "loss": 0.8915, "step": 21393 }, { "epoch": 3.957814532614756, "grad_norm": 1.0052157640457153, "learning_rate": 2.0617970938263676e-08, "loss": 0.9449, "step": 21394 }, { "epoch": 3.9580003716781267, "grad_norm": 0.8857167959213257, "learning_rate": 2.0322386625815092e-08, "loss": 0.7643, "step": 21395 }, { "epoch": 3.958186210741498, "grad_norm": 0.9562453627586365, "learning_rate": 2.0028936208027483e-08, "loss": 0.834, "step": 21396 }, { "epoch": 3.958372049804869, "grad_norm": 0.8608560562133789, "learning_rate": 1.9737619691140297e-08, "loss": 0.8131, "step": 21397 }, { "epoch": 3.9585578888682402, "grad_norm": 0.9653459787368774, "learning_rate": 1.9448437081381887e-08, "loss": 0.697, "step": 21398 }, { "epoch": 3.958743727931611, "grad_norm": 0.9109430909156799, "learning_rate": 1.9161388384925093e-08, "loss": 0.755, "step": 21399 }, { "epoch": 3.9589295669949824, "grad_norm": 0.858791172504425, "learning_rate": 1.8876473607887245e-08, "loss": 0.6814, "step": 21400 }, { "epoch": 3.9591154060583533, "grad_norm": Infinity, "learning_rate": 1.8876473607887245e-08, "loss": 1.3839, "step": 21401 }, { "epoch": 3.9593012451217247, "grad_norm": 1.032059907913208, "learning_rate": 1.859369275636347e-08, "loss": 0.8256, "step": 21402 }, { "epoch": 3.9594870841850955, "grad_norm": 1.0155627727508545, "learning_rate": 1.831304583638227e-08, "loss": 0.862, "step": 21403 }, { "epoch": 3.959672923248467, "grad_norm": 0.857572078704834, "learning_rate": 1.8034532853927754e-08, "loss": 0.8946, "step": 21404 }, { "epoch": 3.959858762311838, "grad_norm": 1.0889736413955688, "learning_rate": 1.775815381495072e-08, "loss": 0.9409, "step": 21405 }, { "epoch": 3.960044601375209, "grad_norm": 0.8809292316436768, "learning_rate": 1.7483908725357545e-08, "loss": 0.5094, "step": 21406 }, { "epoch": 3.96023044043858, "grad_norm": 1.019468903541565, "learning_rate": 1.721179759098801e-08, "loss": 0.8253, "step": 21407 }, { "epoch": 3.9604162795019513, "grad_norm": 0.8753800988197327, "learning_rate": 1.694182041765968e-08, "loss": 0.8791, "step": 21408 }, { "epoch": 3.9606021185653226, "grad_norm": 1.059045433998108, "learning_rate": 1.6673977211123515e-08, "loss": 0.9221, "step": 21409 }, { "epoch": 3.9607879576286935, "grad_norm": 0.9621778130531311, "learning_rate": 1.640826797710826e-08, "loss": 0.8444, "step": 21410 }, { "epoch": 3.9609737966920644, "grad_norm": 0.8941175937652588, "learning_rate": 1.6144692721276054e-08, "loss": 0.6987, "step": 21411 }, { "epoch": 3.9611596357554357, "grad_norm": 0.9373825788497925, "learning_rate": 1.5883251449266832e-08, "loss": 0.8148, "step": 21412 }, { "epoch": 3.961345474818807, "grad_norm": 1.0011820793151855, "learning_rate": 1.562394416663171e-08, "loss": 0.9052, "step": 21413 }, { "epoch": 3.961531313882178, "grad_norm": 0.7819288969039917, "learning_rate": 1.53667708789329e-08, "loss": 0.6291, "step": 21414 }, { "epoch": 3.9617171529455493, "grad_norm": 0.9070609211921692, "learning_rate": 1.511173159165491e-08, "loss": 0.736, "step": 21415 }, { "epoch": 3.96190299200892, "grad_norm": 0.9019717574119568, "learning_rate": 1.4858826310226725e-08, "loss": 0.7922, "step": 21416 }, { "epoch": 3.9620888310722915, "grad_norm": 1.0442172288894653, "learning_rate": 1.4608055040066237e-08, "loss": 0.844, "step": 21417 }, { "epoch": 3.9622746701356624, "grad_norm": 1.027233362197876, "learning_rate": 1.4359417786502516e-08, "loss": 0.8094, "step": 21418 }, { "epoch": 3.9624605091990337, "grad_norm": 0.9201172590255737, "learning_rate": 1.4112914554864631e-08, "loss": 0.8485, "step": 21419 }, { "epoch": 3.962646348262405, "grad_norm": 1.0947834253311157, "learning_rate": 1.3868545350403939e-08, "loss": 0.8768, "step": 21420 }, { "epoch": 3.962832187325776, "grad_norm": 0.980541467666626, "learning_rate": 1.3626310178327384e-08, "loss": 0.6502, "step": 21421 }, { "epoch": 3.963018026389147, "grad_norm": 0.9271747469902039, "learning_rate": 1.3386209043819708e-08, "loss": 0.5833, "step": 21422 }, { "epoch": 3.963203865452518, "grad_norm": 1.063616156578064, "learning_rate": 1.314824195199904e-08, "loss": 0.8515, "step": 21423 }, { "epoch": 3.9633897045158895, "grad_norm": 0.8098671436309814, "learning_rate": 1.2912408907950201e-08, "loss": 0.7521, "step": 21424 }, { "epoch": 3.9635755435792603, "grad_norm": 1.0300740003585815, "learning_rate": 1.26787099166914e-08, "loss": 0.7567, "step": 21425 }, { "epoch": 3.9637613826426312, "grad_norm": 1.0278445482254028, "learning_rate": 1.2447144983229742e-08, "loss": 0.9015, "step": 21426 }, { "epoch": 3.9639472217060026, "grad_norm": 0.8696473240852356, "learning_rate": 1.2217714112483514e-08, "loss": 0.7775, "step": 21427 }, { "epoch": 3.964133060769374, "grad_norm": 0.7928922772407532, "learning_rate": 1.1990417309382107e-08, "loss": 0.708, "step": 21428 }, { "epoch": 3.9643188998327448, "grad_norm": 1.0644580125808716, "learning_rate": 1.176525457874389e-08, "loss": 0.798, "step": 21429 }, { "epoch": 3.964504738896116, "grad_norm": 0.9723425507545471, "learning_rate": 1.1542225925398332e-08, "loss": 0.86, "step": 21430 }, { "epoch": 3.964690577959487, "grad_norm": 0.8694878816604614, "learning_rate": 1.1321331354097186e-08, "loss": 0.7656, "step": 21431 }, { "epoch": 3.9648764170228583, "grad_norm": 1.1160660982131958, "learning_rate": 1.1102570869547801e-08, "loss": 0.7742, "step": 21432 }, { "epoch": 3.965062256086229, "grad_norm": 0.9629189372062683, "learning_rate": 1.0885944476435316e-08, "loss": 0.9387, "step": 21433 }, { "epoch": 3.9652480951496005, "grad_norm": 0.9273543357849121, "learning_rate": 1.0671452179367158e-08, "loss": 0.7062, "step": 21434 }, { "epoch": 3.9654339342129714, "grad_norm": 0.8834670186042786, "learning_rate": 1.0459093982928547e-08, "loss": 0.7967, "step": 21435 }, { "epoch": 3.9656197732763427, "grad_norm": 0.8330079317092896, "learning_rate": 1.0248869891660295e-08, "loss": 0.8148, "step": 21436 }, { "epoch": 3.9658056123397136, "grad_norm": 0.9004113078117371, "learning_rate": 1.0040779910036602e-08, "loss": 0.8104, "step": 21437 }, { "epoch": 3.965991451403085, "grad_norm": 0.9802833199501038, "learning_rate": 9.834824042498358e-09, "loss": 0.8014, "step": 21438 }, { "epoch": 3.9661772904664563, "grad_norm": 0.7356975078582764, "learning_rate": 9.631002293442049e-09, "loss": 0.6674, "step": 21439 }, { "epoch": 3.966363129529827, "grad_norm": 0.8870053887367249, "learning_rate": 9.429314667230849e-09, "loss": 0.7549, "step": 21440 }, { "epoch": 3.966548968593198, "grad_norm": 0.9902865886688232, "learning_rate": 9.229761168161321e-09, "loss": 0.7859, "step": 21441 }, { "epoch": 3.9667348076565694, "grad_norm": 0.9357664585113525, "learning_rate": 9.032341800485622e-09, "loss": 0.8592, "step": 21442 }, { "epoch": 3.9669206467199407, "grad_norm": 0.8407206535339355, "learning_rate": 8.837056568422597e-09, "loss": 0.7167, "step": 21443 }, { "epoch": 3.9671064857833116, "grad_norm": 0.9275605082511902, "learning_rate": 8.643905476146686e-09, "loss": 0.7188, "step": 21444 }, { "epoch": 3.967292324846683, "grad_norm": 0.9496821761131287, "learning_rate": 8.452888527765712e-09, "loss": 0.8235, "step": 21445 }, { "epoch": 3.967478163910054, "grad_norm": 0.7953393459320068, "learning_rate": 8.2640057273764e-09, "loss": 0.7986, "step": 21446 }, { "epoch": 3.967664002973425, "grad_norm": 0.9482079148292542, "learning_rate": 8.077257078986655e-09, "loss": 0.7884, "step": 21447 }, { "epoch": 3.967849842036796, "grad_norm": 0.8741645216941833, "learning_rate": 7.892642586604383e-09, "loss": 0.8575, "step": 21448 }, { "epoch": 3.9680356811001674, "grad_norm": 1.0921684503555298, "learning_rate": 7.71016225415977e-09, "loss": 0.6118, "step": 21449 }, { "epoch": 3.9682215201635382, "grad_norm": 0.8780234456062317, "learning_rate": 7.529816085549702e-09, "loss": 0.8748, "step": 21450 }, { "epoch": 3.9684073592269096, "grad_norm": 0.8375685214996338, "learning_rate": 7.351604084615549e-09, "loss": 1.0171, "step": 21451 }, { "epoch": 3.9685931982902805, "grad_norm": 0.9099856019020081, "learning_rate": 7.175526255165377e-09, "loss": 0.8002, "step": 21452 }, { "epoch": 3.968779037353652, "grad_norm": 0.9492738246917725, "learning_rate": 7.001582600962841e-09, "loss": 0.9728, "step": 21453 }, { "epoch": 3.968964876417023, "grad_norm": 0.9797655940055847, "learning_rate": 6.829773125716088e-09, "loss": 0.7906, "step": 21454 }, { "epoch": 3.969150715480394, "grad_norm": 0.9608103036880493, "learning_rate": 6.660097833099954e-09, "loss": 0.7766, "step": 21455 }, { "epoch": 3.969336554543765, "grad_norm": 1.1397579908370972, "learning_rate": 6.492556726722665e-09, "loss": 0.9013, "step": 21456 }, { "epoch": 3.969522393607136, "grad_norm": 0.8630730509757996, "learning_rate": 6.3271498101591386e-09, "loss": 0.8758, "step": 21457 }, { "epoch": 3.9697082326705075, "grad_norm": 0.9617315530776978, "learning_rate": 6.163877086962089e-09, "loss": 0.8588, "step": 21458 }, { "epoch": 3.9698940717338784, "grad_norm": 0.9304690957069397, "learning_rate": 6.002738560584309e-09, "loss": 0.7827, "step": 21459 }, { "epoch": 3.9700799107972493, "grad_norm": 0.7468497157096863, "learning_rate": 5.843734234489695e-09, "loss": 0.5286, "step": 21460 }, { "epoch": 3.9702657498606206, "grad_norm": 0.99229496717453, "learning_rate": 5.686864112064427e-09, "loss": 0.8848, "step": 21461 }, { "epoch": 3.970451588923992, "grad_norm": 0.9466888308525085, "learning_rate": 5.5321281966502765e-09, "loss": 0.8736, "step": 21462 }, { "epoch": 3.970637427987363, "grad_norm": 1.1540240049362183, "learning_rate": 5.3795264915668106e-09, "loss": 0.8687, "step": 21463 }, { "epoch": 3.970823267050734, "grad_norm": 0.7100964188575745, "learning_rate": 5.22905900005588e-09, "loss": 0.5729, "step": 21464 }, { "epoch": 3.971009106114105, "grad_norm": 1.0300414562225342, "learning_rate": 5.08072572532603e-09, "loss": 0.801, "step": 21465 }, { "epoch": 3.9711949451774764, "grad_norm": 0.9186599254608154, "learning_rate": 4.934526670552497e-09, "loss": 0.6908, "step": 21466 }, { "epoch": 3.9713807842408473, "grad_norm": 1.088610291481018, "learning_rate": 4.7904618388550095e-09, "loss": 0.8143, "step": 21467 }, { "epoch": 3.9715666233042186, "grad_norm": 0.7681308388710022, "learning_rate": 4.6485312333088835e-09, "loss": 0.7136, "step": 21468 }, { "epoch": 3.97175246236759, "grad_norm": 0.9112083911895752, "learning_rate": 4.508734856933927e-09, "loss": 0.7703, "step": 21469 }, { "epoch": 3.971938301430961, "grad_norm": 0.8502170443534851, "learning_rate": 4.371072712727742e-09, "loss": 0.7863, "step": 21470 }, { "epoch": 3.9721241404943317, "grad_norm": 0.8810780644416809, "learning_rate": 4.235544803610214e-09, "loss": 0.7088, "step": 21471 }, { "epoch": 3.972309979557703, "grad_norm": 0.7818952798843384, "learning_rate": 4.102151132501231e-09, "loss": 0.6869, "step": 21472 }, { "epoch": 3.9724958186210744, "grad_norm": 0.9463311433792114, "learning_rate": 3.970891702220758e-09, "loss": 0.8012, "step": 21473 }, { "epoch": 3.9726816576844453, "grad_norm": 1.06798255443573, "learning_rate": 3.841766515588763e-09, "loss": 0.7741, "step": 21474 }, { "epoch": 3.972867496747816, "grad_norm": 0.8491981029510498, "learning_rate": 3.7147755753474954e-09, "loss": 0.5917, "step": 21475 }, { "epoch": 3.9730533358111875, "grad_norm": 0.906574010848999, "learning_rate": 3.5899188842170027e-09, "loss": 0.5356, "step": 21476 }, { "epoch": 3.973239174874559, "grad_norm": 0.9730631113052368, "learning_rate": 3.467196444850718e-09, "loss": 0.8405, "step": 21477 }, { "epoch": 3.9734250139379297, "grad_norm": 0.8137460350990295, "learning_rate": 3.346608259890971e-09, "loss": 0.7913, "step": 21478 }, { "epoch": 3.973610853001301, "grad_norm": 1.1336978673934937, "learning_rate": 3.2281543318912756e-09, "loss": 0.8974, "step": 21479 }, { "epoch": 3.973796692064672, "grad_norm": 1.1150788068771362, "learning_rate": 3.1118346633829397e-09, "loss": 0.5933, "step": 21480 }, { "epoch": 3.9739825311280432, "grad_norm": 1.1757761240005493, "learning_rate": 2.9976492568528635e-09, "loss": 0.9587, "step": 21481 }, { "epoch": 3.974168370191414, "grad_norm": 0.9290224313735962, "learning_rate": 2.8855981147324352e-09, "loss": 0.8138, "step": 21482 }, { "epoch": 3.9743542092547854, "grad_norm": 0.8209988474845886, "learning_rate": 2.7756812394197363e-09, "loss": 0.7527, "step": 21483 }, { "epoch": 3.9745400483181563, "grad_norm": 0.9280112981796265, "learning_rate": 2.6678986332573373e-09, "loss": 0.8674, "step": 21484 }, { "epoch": 3.9747258873815277, "grad_norm": 1.1409305334091187, "learning_rate": 2.5622502985434003e-09, "loss": 0.8977, "step": 21485 }, { "epoch": 3.9749117264448985, "grad_norm": 1.0304864645004272, "learning_rate": 2.45873623754278e-09, "loss": 0.7986, "step": 21486 }, { "epoch": 3.97509756550827, "grad_norm": 0.8403981924057007, "learning_rate": 2.3573564524537183e-09, "loss": 0.8795, "step": 21487 }, { "epoch": 3.975283404571641, "grad_norm": 0.9824759364128113, "learning_rate": 2.2581109454411496e-09, "loss": 1.0885, "step": 21488 }, { "epoch": 3.975469243635012, "grad_norm": 1.0141150951385498, "learning_rate": 2.1609997186367024e-09, "loss": 1.0805, "step": 21489 }, { "epoch": 3.975655082698383, "grad_norm": 0.9527256488800049, "learning_rate": 2.0660227740942893e-09, "loss": 0.7499, "step": 21490 }, { "epoch": 3.9758409217617543, "grad_norm": 0.9705147743225098, "learning_rate": 1.9731801138456184e-09, "loss": 0.8076, "step": 21491 }, { "epoch": 3.9760267608251256, "grad_norm": 1.0493558645248413, "learning_rate": 1.882471739889091e-09, "loss": 0.6033, "step": 21492 }, { "epoch": 3.9762125998884965, "grad_norm": 0.9863125681877136, "learning_rate": 1.7938976541342912e-09, "loss": 0.9957, "step": 21493 }, { "epoch": 3.976398438951868, "grad_norm": 0.9200931191444397, "learning_rate": 1.707457858490802e-09, "loss": 0.8426, "step": 21494 }, { "epoch": 3.9765842780152387, "grad_norm": 1.5785354375839233, "learning_rate": 1.6231523548015937e-09, "loss": 1.3327, "step": 21495 }, { "epoch": 3.97677011707861, "grad_norm": 0.9231945276260376, "learning_rate": 1.5409811448541257e-09, "loss": 0.9703, "step": 21496 }, { "epoch": 3.976955956141981, "grad_norm": 0.8975953459739685, "learning_rate": 1.4609442304136523e-09, "loss": 0.7697, "step": 21497 }, { "epoch": 3.9771417952053523, "grad_norm": 1.0590022802352905, "learning_rate": 1.383041613178815e-09, "loss": 0.9046, "step": 21498 }, { "epoch": 3.977327634268723, "grad_norm": 1.0810893774032593, "learning_rate": 1.3072732948260502e-09, "loss": 0.7008, "step": 21499 }, { "epoch": 3.9775134733320945, "grad_norm": 0.938327431678772, "learning_rate": 1.2336392769651816e-09, "loss": 0.7577, "step": 21500 }, { "epoch": 3.9776993123954654, "grad_norm": 0.9448954463005066, "learning_rate": 1.162139561172726e-09, "loss": 0.7967, "step": 21501 }, { "epoch": 3.9778851514588367, "grad_norm": 1.4106966257095337, "learning_rate": 1.0927741489585862e-09, "loss": 0.6138, "step": 21502 }, { "epoch": 3.978070990522208, "grad_norm": 0.882112979888916, "learning_rate": 1.0255430418215638e-09, "loss": 0.6142, "step": 21503 }, { "epoch": 3.978256829585579, "grad_norm": 1.1243953704833984, "learning_rate": 9.604462411827441e-10, "loss": 1.0005, "step": 21504 }, { "epoch": 3.97844266864895, "grad_norm": 0.925744891166687, "learning_rate": 8.974837484410081e-10, "loss": 0.9544, "step": 21505 }, { "epoch": 3.978628507712321, "grad_norm": 0.8985171318054199, "learning_rate": 8.366555649397256e-10, "loss": 0.9202, "step": 21506 }, { "epoch": 3.9788143467756925, "grad_norm": 0.9336652755737305, "learning_rate": 7.779616919667553e-10, "loss": 0.7498, "step": 21507 }, { "epoch": 3.9790001858390633, "grad_norm": 0.9400238990783691, "learning_rate": 7.214021307877517e-10, "loss": 0.8356, "step": 21508 }, { "epoch": 3.9791860249024342, "grad_norm": 0.9942156076431274, "learning_rate": 6.669768826017552e-10, "loss": 0.7825, "step": 21509 }, { "epoch": 3.9793718639658056, "grad_norm": 1.040473222732544, "learning_rate": 6.146859485745004e-10, "loss": 0.7813, "step": 21510 }, { "epoch": 3.979557703029177, "grad_norm": 1.0206513404846191, "learning_rate": 5.645293298273124e-10, "loss": 0.8759, "step": 21511 }, { "epoch": 3.9797435420925478, "grad_norm": 0.8693331480026245, "learning_rate": 5.165070274260053e-10, "loss": 0.72, "step": 21512 }, { "epoch": 3.979929381155919, "grad_norm": 0.9240720868110657, "learning_rate": 4.706190423808821e-10, "loss": 0.7878, "step": 21513 }, { "epoch": 3.98011522021929, "grad_norm": 0.8996371626853943, "learning_rate": 4.268653756911434e-10, "loss": 0.851, "step": 21514 }, { "epoch": 3.9803010592826613, "grad_norm": 1.18083655834198, "learning_rate": 3.852460282893766e-10, "loss": 0.8969, "step": 21515 }, { "epoch": 3.980486898346032, "grad_norm": 0.8848041296005249, "learning_rate": 3.4576100105265796e-10, "loss": 0.9113, "step": 21516 }, { "epoch": 3.9806727374094035, "grad_norm": 1.0805909633636475, "learning_rate": 3.084102948247569e-10, "loss": 0.8013, "step": 21517 }, { "epoch": 3.9808585764727744, "grad_norm": 1.1475094556808472, "learning_rate": 2.731939104161363e-10, "loss": 0.7648, "step": 21518 }, { "epoch": 3.9810444155361457, "grad_norm": 0.8849775195121765, "learning_rate": 2.4011184855954327e-10, "loss": 0.6718, "step": 21519 }, { "epoch": 3.9812302545995166, "grad_norm": 0.8735383749008179, "learning_rate": 2.0916410997662283e-10, "loss": 0.683, "step": 21520 }, { "epoch": 3.981416093662888, "grad_norm": 0.8057239055633545, "learning_rate": 1.803506953335088e-10, "loss": 0.7949, "step": 21521 }, { "epoch": 3.9816019327262593, "grad_norm": 0.8094781041145325, "learning_rate": 1.5367160521861935e-10, "loss": 0.6191, "step": 21522 }, { "epoch": 3.98178777178963, "grad_norm": 0.9692800045013428, "learning_rate": 1.2912684022037269e-10, "loss": 0.9201, "step": 21523 }, { "epoch": 3.981973610853001, "grad_norm": 1.0223878622055054, "learning_rate": 1.0671640086057366e-10, "loss": 0.9353, "step": 21524 } ], "logging_steps": 1, "max_steps": 21524, "num_input_tokens_seen": 0, "num_train_epochs": 4, "save_steps": 5381, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": true }, "attributes": {} } }, "total_flos": 8.362292502065971e+17, "train_batch_size": 2, "trial_name": null, "trial_params": null }