diff --git "a/overfit/trainer_state.json" "b/overfit/trainer_state.json" new file mode 100644--- /dev/null +++ "b/overfit/trainer_state.json" @@ -0,0 +1,35033 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 0.24976272541085967, + "eval_steps": 500, + "global_step": 5000, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 4.9952545082171936e-05, + "grad_norm": 0.7773185968399048, + "learning_rate": 1.6638935108153078e-07, + "loss": 1.033, + "step": 1 + }, + { + "epoch": 9.990509016434387e-05, + "grad_norm": 0.9185749888420105, + "learning_rate": 3.3277870216306157e-07, + "loss": 1.0129, + "step": 2 + }, + { + "epoch": 0.0001498576352465158, + "grad_norm": 0.825122058391571, + "learning_rate": 4.991680532445924e-07, + "loss": 1.0114, + "step": 3 + }, + { + "epoch": 0.00019981018032868775, + "grad_norm": 1.0089958906173706, + "learning_rate": 6.655574043261231e-07, + "loss": 1.0068, + "step": 4 + }, + { + "epoch": 0.0002497627254108597, + "grad_norm": 0.9923648238182068, + "learning_rate": 8.319467554076539e-07, + "loss": 1.0037, + "step": 5 + }, + { + "epoch": 0.0002997152704930316, + "grad_norm": 1.031822681427002, + "learning_rate": 9.983361064891848e-07, + "loss": 1.0155, + "step": 6 + }, + { + "epoch": 0.0003496678155752036, + "grad_norm": 1.0755441188812256, + "learning_rate": 1.1647254575707155e-06, + "loss": 1.0152, + "step": 7 + }, + { + "epoch": 0.0003996203606573755, + "grad_norm": 0.9725750088691711, + "learning_rate": 1.3311148086522463e-06, + "loss": 1.0115, + "step": 8 + }, + { + "epoch": 0.0004495729057395474, + "grad_norm": 0.9579866528511047, + "learning_rate": 1.4975041597337772e-06, + "loss": 1.008, + "step": 9 + }, + { + "epoch": 0.0004995254508217194, + "grad_norm": 0.7439063787460327, + "learning_rate": 1.6638935108153078e-06, + "loss": 1.0197, + "step": 10 + }, + { + "epoch": 0.0005494779959038913, + "grad_norm": 0.8231655359268188, + "learning_rate": 1.8302828618968388e-06, + "loss": 1.0048, + "step": 11 + }, + { + "epoch": 0.0005994305409860632, + "grad_norm": 1.045804738998413, + "learning_rate": 1.9966722129783695e-06, + "loss": 1.0094, + "step": 12 + }, + { + "epoch": 0.0006493830860682351, + "grad_norm": 0.7114976644515991, + "learning_rate": 2.1630615640599005e-06, + "loss": 1.0072, + "step": 13 + }, + { + "epoch": 0.0006993356311504072, + "grad_norm": 1.0108648538589478, + "learning_rate": 2.329450915141431e-06, + "loss": 1.0114, + "step": 14 + }, + { + "epoch": 0.0007492881762325791, + "grad_norm": 0.8935947418212891, + "learning_rate": 2.4958402662229616e-06, + "loss": 1.0171, + "step": 15 + }, + { + "epoch": 0.000799240721314751, + "grad_norm": 0.9089986681938171, + "learning_rate": 2.6622296173044925e-06, + "loss": 1.0083, + "step": 16 + }, + { + "epoch": 0.0008491932663969229, + "grad_norm": 0.9433355927467346, + "learning_rate": 2.8286189683860235e-06, + "loss": 1.0123, + "step": 17 + }, + { + "epoch": 0.0008991458114790948, + "grad_norm": 1.0307338237762451, + "learning_rate": 2.9950083194675545e-06, + "loss": 1.0065, + "step": 18 + }, + { + "epoch": 0.0009490983565612668, + "grad_norm": 0.8762496113777161, + "learning_rate": 3.161397670549085e-06, + "loss": 1.0107, + "step": 19 + }, + { + "epoch": 0.0009990509016434388, + "grad_norm": 1.0380902290344238, + "learning_rate": 3.3277870216306156e-06, + "loss": 1.0164, + "step": 20 + }, + { + "epoch": 0.0010490034467256107, + "grad_norm": 1.096903920173645, + "learning_rate": 3.494176372712146e-06, + "loss": 1.0038, + "step": 21 + }, + { + "epoch": 0.0010989559918077826, + "grad_norm": 0.9560140371322632, + "learning_rate": 3.6605657237936775e-06, + "loss": 1.0121, + "step": 22 + }, + { + "epoch": 0.0011489085368899545, + "grad_norm": 1.0501253604888916, + "learning_rate": 3.826955074875208e-06, + "loss": 1.0069, + "step": 23 + }, + { + "epoch": 0.0011988610819721264, + "grad_norm": 0.942711591720581, + "learning_rate": 3.993344425956739e-06, + "loss": 1.0007, + "step": 24 + }, + { + "epoch": 0.0012488136270542983, + "grad_norm": 1.0343027114868164, + "learning_rate": 4.159733777038269e-06, + "loss": 1.0116, + "step": 25 + }, + { + "epoch": 0.0012987661721364703, + "grad_norm": 0.9838687777519226, + "learning_rate": 4.326123128119801e-06, + "loss": 1.0115, + "step": 26 + }, + { + "epoch": 0.0013487187172186424, + "grad_norm": 1.0810514688491821, + "learning_rate": 4.492512479201331e-06, + "loss": 0.9983, + "step": 27 + }, + { + "epoch": 0.0013986712623008143, + "grad_norm": 0.7168344855308533, + "learning_rate": 4.658901830282862e-06, + "loss": 1.0134, + "step": 28 + }, + { + "epoch": 0.0014486238073829862, + "grad_norm": 1.1685770750045776, + "learning_rate": 4.825291181364393e-06, + "loss": 0.9973, + "step": 29 + }, + { + "epoch": 0.0014985763524651581, + "grad_norm": 1.273232102394104, + "learning_rate": 4.991680532445923e-06, + "loss": 0.9999, + "step": 30 + }, + { + "epoch": 0.00154852889754733, + "grad_norm": 1.1402456760406494, + "learning_rate": 5.158069883527455e-06, + "loss": 1.0042, + "step": 31 + }, + { + "epoch": 0.001598481442629502, + "grad_norm": 1.1441937685012817, + "learning_rate": 5.324459234608985e-06, + "loss": 0.997, + "step": 32 + }, + { + "epoch": 0.0016484339877116739, + "grad_norm": 1.186643123626709, + "learning_rate": 5.490848585690516e-06, + "loss": 0.999, + "step": 33 + }, + { + "epoch": 0.0016983865327938458, + "grad_norm": 1.1428825855255127, + "learning_rate": 5.657237936772047e-06, + "loss": 1.0033, + "step": 34 + }, + { + "epoch": 0.0017483390778760177, + "grad_norm": 1.1740399599075317, + "learning_rate": 5.823627287853577e-06, + "loss": 0.9939, + "step": 35 + }, + { + "epoch": 0.0017982916229581896, + "grad_norm": 1.2727118730545044, + "learning_rate": 5.990016638935109e-06, + "loss": 0.991, + "step": 36 + }, + { + "epoch": 0.0018482441680403618, + "grad_norm": 1.3344697952270508, + "learning_rate": 6.156405990016639e-06, + "loss": 0.9903, + "step": 37 + }, + { + "epoch": 0.0018981967131225337, + "grad_norm": 1.3692967891693115, + "learning_rate": 6.32279534109817e-06, + "loss": 0.9859, + "step": 38 + }, + { + "epoch": 0.0019481492582047056, + "grad_norm": 1.4418965578079224, + "learning_rate": 6.489184692179701e-06, + "loss": 0.9881, + "step": 39 + }, + { + "epoch": 0.0019981018032868775, + "grad_norm": 1.2858428955078125, + "learning_rate": 6.655574043261231e-06, + "loss": 0.9935, + "step": 40 + }, + { + "epoch": 0.0020480543483690494, + "grad_norm": 1.5297908782958984, + "learning_rate": 6.821963394342762e-06, + "loss": 0.9788, + "step": 41 + }, + { + "epoch": 0.0020980068934512213, + "grad_norm": 1.7121331691741943, + "learning_rate": 6.988352745424292e-06, + "loss": 0.9696, + "step": 42 + }, + { + "epoch": 0.0021479594385333933, + "grad_norm": 1.3771235942840576, + "learning_rate": 7.154742096505825e-06, + "loss": 0.9902, + "step": 43 + }, + { + "epoch": 0.002197911983615565, + "grad_norm": 1.4865529537200928, + "learning_rate": 7.321131447587355e-06, + "loss": 0.9888, + "step": 44 + }, + { + "epoch": 0.002247864528697737, + "grad_norm": 1.8075222969055176, + "learning_rate": 7.487520798668886e-06, + "loss": 0.9695, + "step": 45 + }, + { + "epoch": 0.002297817073779909, + "grad_norm": 1.2856154441833496, + "learning_rate": 7.653910149750416e-06, + "loss": 0.9913, + "step": 46 + }, + { + "epoch": 0.002347769618862081, + "grad_norm": 1.4229388236999512, + "learning_rate": 7.820299500831948e-06, + "loss": 1.1509, + "step": 47 + }, + { + "epoch": 0.002397722163944253, + "grad_norm": 1.6663596630096436, + "learning_rate": 7.986688851913478e-06, + "loss": 0.9728, + "step": 48 + }, + { + "epoch": 0.0024476747090264248, + "grad_norm": 2.004216194152832, + "learning_rate": 8.153078202995008e-06, + "loss": 0.9564, + "step": 49 + }, + { + "epoch": 0.0024976272541085967, + "grad_norm": 2.028611898422241, + "learning_rate": 8.319467554076538e-06, + "loss": 0.9521, + "step": 50 + }, + { + "epoch": 0.0025475797991907686, + "grad_norm": 2.201580762863159, + "learning_rate": 8.48585690515807e-06, + "loss": 0.9402, + "step": 51 + }, + { + "epoch": 0.0025975323442729405, + "grad_norm": 2.3311357498168945, + "learning_rate": 8.652246256239602e-06, + "loss": 0.9346, + "step": 52 + }, + { + "epoch": 0.002647484889355113, + "grad_norm": 2.1405184268951416, + "learning_rate": 8.818635607321132e-06, + "loss": 0.9427, + "step": 53 + }, + { + "epoch": 0.0026974374344372848, + "grad_norm": 2.3333184719085693, + "learning_rate": 8.985024958402662e-06, + "loss": 0.9334, + "step": 54 + }, + { + "epoch": 0.0027473899795194567, + "grad_norm": 2.4782657623291016, + "learning_rate": 9.151414309484194e-06, + "loss": 0.9148, + "step": 55 + }, + { + "epoch": 0.0027973425246016286, + "grad_norm": 2.196162462234497, + "learning_rate": 9.317803660565724e-06, + "loss": 0.9227, + "step": 56 + }, + { + "epoch": 0.0028472950696838005, + "grad_norm": 2.2567436695098877, + "learning_rate": 9.484193011647254e-06, + "loss": 0.9203, + "step": 57 + }, + { + "epoch": 0.0028972476147659724, + "grad_norm": 2.78241229057312, + "learning_rate": 9.650582362728786e-06, + "loss": 0.8954, + "step": 58 + }, + { + "epoch": 0.0029472001598481443, + "grad_norm": 2.437448740005493, + "learning_rate": 9.816971713810316e-06, + "loss": 0.9055, + "step": 59 + }, + { + "epoch": 0.0029971527049303163, + "grad_norm": 2.1612024307250977, + "learning_rate": 9.983361064891846e-06, + "loss": 0.907, + "step": 60 + }, + { + "epoch": 0.003047105250012488, + "grad_norm": 3.0380752086639404, + "learning_rate": 1.0149750415973378e-05, + "loss": 0.8629, + "step": 61 + }, + { + "epoch": 0.00309705779509466, + "grad_norm": 2.7716970443725586, + "learning_rate": 1.031613976705491e-05, + "loss": 0.8628, + "step": 62 + }, + { + "epoch": 0.003147010340176832, + "grad_norm": 2.7553319931030273, + "learning_rate": 1.048252911813644e-05, + "loss": 0.8755, + "step": 63 + }, + { + "epoch": 0.003196962885259004, + "grad_norm": 3.2631242275238037, + "learning_rate": 1.064891846921797e-05, + "loss": 0.8277, + "step": 64 + }, + { + "epoch": 0.003246915430341176, + "grad_norm": 3.2757086753845215, + "learning_rate": 1.0815307820299502e-05, + "loss": 0.8113, + "step": 65 + }, + { + "epoch": 0.0032968679754233478, + "grad_norm": 3.2490997314453125, + "learning_rate": 1.0981697171381032e-05, + "loss": 0.8045, + "step": 66 + }, + { + "epoch": 0.0033468205205055197, + "grad_norm": 2.903132200241089, + "learning_rate": 1.1148086522462562e-05, + "loss": 0.825, + "step": 67 + }, + { + "epoch": 0.0033967730655876916, + "grad_norm": 3.432964324951172, + "learning_rate": 1.1314475873544094e-05, + "loss": 0.7689, + "step": 68 + }, + { + "epoch": 0.0034467256106698635, + "grad_norm": 3.100541591644287, + "learning_rate": 1.1480865224625624e-05, + "loss": 0.7918, + "step": 69 + }, + { + "epoch": 0.0034966781557520354, + "grad_norm": 3.3243002891540527, + "learning_rate": 1.1647254575707154e-05, + "loss": 0.7582, + "step": 70 + }, + { + "epoch": 0.0035466307008342073, + "grad_norm": 3.3712973594665527, + "learning_rate": 1.1813643926788686e-05, + "loss": 0.7404, + "step": 71 + }, + { + "epoch": 0.0035965832459163793, + "grad_norm": 3.4654033184051514, + "learning_rate": 1.1980033277870218e-05, + "loss": 0.7144, + "step": 72 + }, + { + "epoch": 0.003646535790998551, + "grad_norm": 2.8961360454559326, + "learning_rate": 1.2146422628951748e-05, + "loss": 0.7529, + "step": 73 + }, + { + "epoch": 0.0036964883360807235, + "grad_norm": 3.488382577896118, + "learning_rate": 1.2312811980033278e-05, + "loss": 0.695, + "step": 74 + }, + { + "epoch": 0.0037464408811628954, + "grad_norm": 3.2645366191864014, + "learning_rate": 1.247920133111481e-05, + "loss": 0.6984, + "step": 75 + }, + { + "epoch": 0.0037963934262450674, + "grad_norm": 3.4284181594848633, + "learning_rate": 1.264559068219634e-05, + "loss": 0.6746, + "step": 76 + }, + { + "epoch": 0.0038463459713272393, + "grad_norm": 3.384287118911743, + "learning_rate": 1.281198003327787e-05, + "loss": 0.665, + "step": 77 + }, + { + "epoch": 0.003896298516409411, + "grad_norm": 2.730473279953003, + "learning_rate": 1.2978369384359402e-05, + "loss": 0.7127, + "step": 78 + }, + { + "epoch": 0.003946251061491583, + "grad_norm": 3.343994379043579, + "learning_rate": 1.3144758735440932e-05, + "loss": 0.6326, + "step": 79 + }, + { + "epoch": 0.003996203606573755, + "grad_norm": 3.3340377807617188, + "learning_rate": 1.3311148086522462e-05, + "loss": 0.6248, + "step": 80 + }, + { + "epoch": 0.004046156151655927, + "grad_norm": 3.1976561546325684, + "learning_rate": 1.3477537437603992e-05, + "loss": 0.6187, + "step": 81 + }, + { + "epoch": 0.004096108696738099, + "grad_norm": 3.5234458446502686, + "learning_rate": 1.3643926788685524e-05, + "loss": 0.5748, + "step": 82 + }, + { + "epoch": 0.004146061241820271, + "grad_norm": 3.245985746383667, + "learning_rate": 1.3810316139767054e-05, + "loss": 0.5905, + "step": 83 + }, + { + "epoch": 0.004196013786902443, + "grad_norm": 3.3966660499572754, + "learning_rate": 1.3976705490848584e-05, + "loss": 0.546, + "step": 84 + }, + { + "epoch": 0.004245966331984615, + "grad_norm": 2.75223970413208, + "learning_rate": 1.4143094841930118e-05, + "loss": 0.6201, + "step": 85 + }, + { + "epoch": 0.0042959188770667865, + "grad_norm": 2.6243913173675537, + "learning_rate": 1.430948419301165e-05, + "loss": 0.6154, + "step": 86 + }, + { + "epoch": 0.004345871422148958, + "grad_norm": 2.5606563091278076, + "learning_rate": 1.447587354409318e-05, + "loss": 0.7891, + "step": 87 + }, + { + "epoch": 0.00439582396723113, + "grad_norm": 2.9580576419830322, + "learning_rate": 1.464226289517471e-05, + "loss": 0.5363, + "step": 88 + }, + { + "epoch": 0.004445776512313302, + "grad_norm": 2.3361687660217285, + "learning_rate": 1.4808652246256242e-05, + "loss": 0.6123, + "step": 89 + }, + { + "epoch": 0.004495729057395474, + "grad_norm": 2.6858437061309814, + "learning_rate": 1.4975041597337772e-05, + "loss": 0.5452, + "step": 90 + }, + { + "epoch": 0.004545681602477646, + "grad_norm": 2.1839842796325684, + "learning_rate": 1.5141430948419302e-05, + "loss": 0.6144, + "step": 91 + }, + { + "epoch": 0.004595634147559818, + "grad_norm": 2.5700578689575195, + "learning_rate": 1.5307820299500832e-05, + "loss": 0.53, + "step": 92 + }, + { + "epoch": 0.00464558669264199, + "grad_norm": 1.4318684339523315, + "learning_rate": 1.5474209650582364e-05, + "loss": 0.7077, + "step": 93 + }, + { + "epoch": 0.004695539237724162, + "grad_norm": 2.430396795272827, + "learning_rate": 1.5640599001663896e-05, + "loss": 0.5252, + "step": 94 + }, + { + "epoch": 0.004745491782806334, + "grad_norm": 2.640272378921509, + "learning_rate": 1.5806988352745424e-05, + "loss": 0.4662, + "step": 95 + }, + { + "epoch": 0.004795444327888506, + "grad_norm": 2.7492828369140625, + "learning_rate": 1.5973377703826956e-05, + "loss": 0.4379, + "step": 96 + }, + { + "epoch": 0.004845396872970678, + "grad_norm": 2.799135208129883, + "learning_rate": 1.6139767054908485e-05, + "loss": 0.4063, + "step": 97 + }, + { + "epoch": 0.0048953494180528495, + "grad_norm": 2.3904054164886475, + "learning_rate": 1.6306156405990016e-05, + "loss": 0.469, + "step": 98 + }, + { + "epoch": 0.004945301963135021, + "grad_norm": 2.3703794479370117, + "learning_rate": 1.6472545757071548e-05, + "loss": 0.4496, + "step": 99 + }, + { + "epoch": 0.004995254508217193, + "grad_norm": 2.175579071044922, + "learning_rate": 1.6638935108153077e-05, + "loss": 0.4714, + "step": 100 + }, + { + "epoch": 0.005045207053299365, + "grad_norm": 1.3306567668914795, + "learning_rate": 1.680532445923461e-05, + "loss": 0.6222, + "step": 101 + }, + { + "epoch": 0.005095159598381537, + "grad_norm": 1.5607502460479736, + "learning_rate": 1.697171381031614e-05, + "loss": 0.5627, + "step": 102 + }, + { + "epoch": 0.005145112143463709, + "grad_norm": 2.731821298599243, + "learning_rate": 1.713810316139767e-05, + "loss": 0.3134, + "step": 103 + }, + { + "epoch": 0.005195064688545881, + "grad_norm": 1.9303752183914185, + "learning_rate": 1.7304492512479204e-05, + "loss": 0.4499, + "step": 104 + }, + { + "epoch": 0.005245017233628053, + "grad_norm": 2.5264477729797363, + "learning_rate": 1.7470881863560736e-05, + "loss": 0.3067, + "step": 105 + }, + { + "epoch": 0.005294969778710226, + "grad_norm": 1.63570237159729, + "learning_rate": 1.7637271214642264e-05, + "loss": 0.5084, + "step": 106 + }, + { + "epoch": 0.005344922323792398, + "grad_norm": 1.7646117210388184, + "learning_rate": 1.7803660565723796e-05, + "loss": 0.4419, + "step": 107 + }, + { + "epoch": 0.0053948748688745695, + "grad_norm": 1.8301258087158203, + "learning_rate": 1.7970049916805324e-05, + "loss": 0.424, + "step": 108 + }, + { + "epoch": 0.0054448274139567415, + "grad_norm": 1.6305100917816162, + "learning_rate": 1.8136439267886856e-05, + "loss": 0.4842, + "step": 109 + }, + { + "epoch": 0.005494779959038913, + "grad_norm": 2.2797558307647705, + "learning_rate": 1.8302828618968388e-05, + "loss": 0.2604, + "step": 110 + }, + { + "epoch": 0.005544732504121085, + "grad_norm": 2.1746997833251953, + "learning_rate": 1.8469217970049916e-05, + "loss": 0.2696, + "step": 111 + }, + { + "epoch": 0.005594685049203257, + "grad_norm": 1.322988510131836, + "learning_rate": 1.8635607321131448e-05, + "loss": 0.4872, + "step": 112 + }, + { + "epoch": 0.005644637594285429, + "grad_norm": 2.097958564758301, + "learning_rate": 1.880199667221298e-05, + "loss": 0.2709, + "step": 113 + }, + { + "epoch": 0.005694590139367601, + "grad_norm": 2.066315174102783, + "learning_rate": 1.896838602329451e-05, + "loss": 0.2237, + "step": 114 + }, + { + "epoch": 0.005744542684449773, + "grad_norm": 1.6474424600601196, + "learning_rate": 1.913477537437604e-05, + "loss": 0.3059, + "step": 115 + }, + { + "epoch": 0.005794495229531945, + "grad_norm": 1.3982146978378296, + "learning_rate": 1.9301164725457572e-05, + "loss": 0.4101, + "step": 116 + }, + { + "epoch": 0.005844447774614117, + "grad_norm": 1.5813037157058716, + "learning_rate": 1.94675540765391e-05, + "loss": 0.3201, + "step": 117 + }, + { + "epoch": 0.005894400319696289, + "grad_norm": 1.168007731437683, + "learning_rate": 1.9633943427620632e-05, + "loss": 0.4962, + "step": 118 + }, + { + "epoch": 0.005944352864778461, + "grad_norm": 1.411887288093567, + "learning_rate": 1.9800332778702164e-05, + "loss": 0.3718, + "step": 119 + }, + { + "epoch": 0.0059943054098606325, + "grad_norm": 1.703660011291504, + "learning_rate": 1.9966722129783693e-05, + "loss": 0.1875, + "step": 120 + }, + { + "epoch": 0.0060442579549428044, + "grad_norm": 1.4341709613800049, + "learning_rate": 2.0133111480865224e-05, + "loss": 0.2564, + "step": 121 + }, + { + "epoch": 0.006094210500024976, + "grad_norm": 1.3891007900238037, + "learning_rate": 2.0299500831946756e-05, + "loss": 0.2596, + "step": 122 + }, + { + "epoch": 0.006144163045107148, + "grad_norm": 1.1879467964172363, + "learning_rate": 2.0465890183028288e-05, + "loss": 0.3106, + "step": 123 + }, + { + "epoch": 0.00619411559018932, + "grad_norm": 1.5363272428512573, + "learning_rate": 2.063227953410982e-05, + "loss": 0.1255, + "step": 124 + }, + { + "epoch": 0.006244068135271492, + "grad_norm": 1.3745156526565552, + "learning_rate": 2.0798668885191348e-05, + "loss": 0.1939, + "step": 125 + }, + { + "epoch": 0.006294020680353664, + "grad_norm": 1.1485426425933838, + "learning_rate": 2.096505823627288e-05, + "loss": 0.2596, + "step": 126 + }, + { + "epoch": 0.006343973225435836, + "grad_norm": 1.1569701433181763, + "learning_rate": 2.1131447587354412e-05, + "loss": 0.2194, + "step": 127 + }, + { + "epoch": 0.006393925770518008, + "grad_norm": 1.236960768699646, + "learning_rate": 2.129783693843594e-05, + "loss": 0.0935, + "step": 128 + }, + { + "epoch": 0.00644387831560018, + "grad_norm": 0.994005024433136, + "learning_rate": 2.1464226289517472e-05, + "loss": 0.254, + "step": 129 + }, + { + "epoch": 0.006493830860682352, + "grad_norm": 1.0437902212142944, + "learning_rate": 2.1630615640599004e-05, + "loss": 0.1314, + "step": 130 + }, + { + "epoch": 0.006543783405764524, + "grad_norm": 1.0001115798950195, + "learning_rate": 2.1797004991680532e-05, + "loss": 0.22, + "step": 131 + }, + { + "epoch": 0.0065937359508466955, + "grad_norm": 0.9341840744018555, + "learning_rate": 2.1963394342762064e-05, + "loss": 0.1162, + "step": 132 + }, + { + "epoch": 0.0066436884959288674, + "grad_norm": 0.9063742756843567, + "learning_rate": 2.2129783693843596e-05, + "loss": 0.1763, + "step": 133 + }, + { + "epoch": 0.006693641041011039, + "grad_norm": 0.9084346294403076, + "learning_rate": 2.2296173044925124e-05, + "loss": 0.0758, + "step": 134 + }, + { + "epoch": 0.006743593586093211, + "grad_norm": 0.7937085032463074, + "learning_rate": 2.2462562396006656e-05, + "loss": 0.3322, + "step": 135 + }, + { + "epoch": 0.006793546131175383, + "grad_norm": 0.7174133062362671, + "learning_rate": 2.2628951747088188e-05, + "loss": 0.1358, + "step": 136 + }, + { + "epoch": 0.006843498676257555, + "grad_norm": 0.6804559826850891, + "learning_rate": 2.2795341098169716e-05, + "loss": 0.1928, + "step": 137 + }, + { + "epoch": 0.006893451221339727, + "grad_norm": 0.6757315993309021, + "learning_rate": 2.296173044925125e-05, + "loss": 0.2094, + "step": 138 + }, + { + "epoch": 0.006943403766421899, + "grad_norm": 0.598112165927887, + "learning_rate": 2.312811980033278e-05, + "loss": 0.3031, + "step": 139 + }, + { + "epoch": 0.006993356311504071, + "grad_norm": 0.8176196813583374, + "learning_rate": 2.329450915141431e-05, + "loss": 0.2046, + "step": 140 + }, + { + "epoch": 0.007043308856586243, + "grad_norm": 0.5473939776420593, + "learning_rate": 2.3460898502495844e-05, + "loss": 0.0689, + "step": 141 + }, + { + "epoch": 0.007093261401668415, + "grad_norm": 0.8834987282752991, + "learning_rate": 2.3627287853577372e-05, + "loss": 0.2324, + "step": 142 + }, + { + "epoch": 0.007143213946750587, + "grad_norm": 0.6660081744194031, + "learning_rate": 2.3793677204658904e-05, + "loss": 0.0717, + "step": 143 + }, + { + "epoch": 0.0071931664918327585, + "grad_norm": 0.6782917976379395, + "learning_rate": 2.3960066555740436e-05, + "loss": 0.1032, + "step": 144 + }, + { + "epoch": 0.00724311903691493, + "grad_norm": 0.8911049962043762, + "learning_rate": 2.4126455906821964e-05, + "loss": 0.2413, + "step": 145 + }, + { + "epoch": 0.007293071581997102, + "grad_norm": 0.7802180647850037, + "learning_rate": 2.4292845257903496e-05, + "loss": 0.1627, + "step": 146 + }, + { + "epoch": 0.007343024127079274, + "grad_norm": 1.1716145277023315, + "learning_rate": 2.4459234608985028e-05, + "loss": 0.4075, + "step": 147 + }, + { + "epoch": 0.007392976672161447, + "grad_norm": 1.13730788230896, + "learning_rate": 2.4625623960066556e-05, + "loss": 0.4173, + "step": 148 + }, + { + "epoch": 0.007442929217243619, + "grad_norm": 0.8156155943870544, + "learning_rate": 2.4792013311148088e-05, + "loss": 0.1472, + "step": 149 + }, + { + "epoch": 0.007492881762325791, + "grad_norm": 0.6844672560691833, + "learning_rate": 2.495840266222962e-05, + "loss": 0.086, + "step": 150 + }, + { + "epoch": 0.007542834307407963, + "grad_norm": 0.7242697477340698, + "learning_rate": 2.512479201331115e-05, + "loss": 0.1523, + "step": 151 + }, + { + "epoch": 0.007592786852490135, + "grad_norm": 0.863088071346283, + "learning_rate": 2.529118136439268e-05, + "loss": 0.1443, + "step": 152 + }, + { + "epoch": 0.007642739397572307, + "grad_norm": 1.5629624128341675, + "learning_rate": 2.5457570715474212e-05, + "loss": 0.2497, + "step": 153 + }, + { + "epoch": 0.0076926919426544785, + "grad_norm": 1.255920648574829, + "learning_rate": 2.562396006655574e-05, + "loss": 0.1955, + "step": 154 + }, + { + "epoch": 0.0077426444877366505, + "grad_norm": 1.2659616470336914, + "learning_rate": 2.5790349417637272e-05, + "loss": 0.3316, + "step": 155 + }, + { + "epoch": 0.007792597032818822, + "grad_norm": 1.592040777206421, + "learning_rate": 2.5956738768718804e-05, + "loss": 0.2407, + "step": 156 + }, + { + "epoch": 0.007842549577900993, + "grad_norm": 2.218533754348755, + "learning_rate": 2.6123128119800332e-05, + "loss": 0.374, + "step": 157 + }, + { + "epoch": 0.007892502122983166, + "grad_norm": 0.9361076354980469, + "learning_rate": 2.6289517470881864e-05, + "loss": 0.0512, + "step": 158 + }, + { + "epoch": 0.007942454668065337, + "grad_norm": 2.169257640838623, + "learning_rate": 2.6455906821963393e-05, + "loss": 0.3369, + "step": 159 + }, + { + "epoch": 0.00799240721314751, + "grad_norm": 1.8530528545379639, + "learning_rate": 2.6622296173044925e-05, + "loss": 0.1216, + "step": 160 + }, + { + "epoch": 0.008042359758229681, + "grad_norm": 1.6074941158294678, + "learning_rate": 2.6788685524126456e-05, + "loss": 0.2716, + "step": 161 + }, + { + "epoch": 0.008092312303311854, + "grad_norm": 1.6200915575027466, + "learning_rate": 2.6955074875207985e-05, + "loss": 0.3135, + "step": 162 + }, + { + "epoch": 0.008142264848394025, + "grad_norm": 1.2141520977020264, + "learning_rate": 2.7121464226289517e-05, + "loss": 0.0844, + "step": 163 + }, + { + "epoch": 0.008192217393476198, + "grad_norm": 1.1795310974121094, + "learning_rate": 2.728785357737105e-05, + "loss": 0.2115, + "step": 164 + }, + { + "epoch": 0.008242169938558369, + "grad_norm": 1.5797030925750732, + "learning_rate": 2.7454242928452577e-05, + "loss": 0.1528, + "step": 165 + }, + { + "epoch": 0.008292122483640542, + "grad_norm": 1.2820453643798828, + "learning_rate": 2.762063227953411e-05, + "loss": 0.2074, + "step": 166 + }, + { + "epoch": 0.008342075028722713, + "grad_norm": 1.0517704486846924, + "learning_rate": 2.778702163061564e-05, + "loss": 0.2662, + "step": 167 + }, + { + "epoch": 0.008392027573804885, + "grad_norm": 0.9293931126594543, + "learning_rate": 2.795341098169717e-05, + "loss": 0.3498, + "step": 168 + }, + { + "epoch": 0.008441980118887058, + "grad_norm": 0.7814486622810364, + "learning_rate": 2.81198003327787e-05, + "loss": 0.3543, + "step": 169 + }, + { + "epoch": 0.00849193266396923, + "grad_norm": 0.8077500462532043, + "learning_rate": 2.8286189683860236e-05, + "loss": 0.0267, + "step": 170 + }, + { + "epoch": 0.008541885209051402, + "grad_norm": 0.951673686504364, + "learning_rate": 2.8452579034941768e-05, + "loss": 0.4449, + "step": 171 + }, + { + "epoch": 0.008591837754133573, + "grad_norm": 1.4421314001083374, + "learning_rate": 2.86189683860233e-05, + "loss": 0.1906, + "step": 172 + }, + { + "epoch": 0.008641790299215746, + "grad_norm": 0.6025161743164062, + "learning_rate": 2.8785357737104828e-05, + "loss": 0.2883, + "step": 173 + }, + { + "epoch": 0.008691742844297917, + "grad_norm": 0.9734605550765991, + "learning_rate": 2.895174708818636e-05, + "loss": 0.167, + "step": 174 + }, + { + "epoch": 0.00874169538938009, + "grad_norm": 0.7788898944854736, + "learning_rate": 2.911813643926789e-05, + "loss": 0.0301, + "step": 175 + }, + { + "epoch": 0.00879164793446226, + "grad_norm": 0.8004228472709656, + "learning_rate": 2.928452579034942e-05, + "loss": 0.1621, + "step": 176 + }, + { + "epoch": 0.008841600479544433, + "grad_norm": 1.1998097896575928, + "learning_rate": 2.9450915141430952e-05, + "loss": 0.1246, + "step": 177 + }, + { + "epoch": 0.008891553024626605, + "grad_norm": 0.954428493976593, + "learning_rate": 2.9617304492512484e-05, + "loss": 0.1225, + "step": 178 + }, + { + "epoch": 0.008941505569708777, + "grad_norm": 0.590995728969574, + "learning_rate": 2.9783693843594012e-05, + "loss": 0.1369, + "step": 179 + }, + { + "epoch": 0.008991458114790948, + "grad_norm": 0.9028201103210449, + "learning_rate": 2.9950083194675544e-05, + "loss": 0.0768, + "step": 180 + }, + { + "epoch": 0.009041410659873121, + "grad_norm": 1.0041567087173462, + "learning_rate": 3.0116472545757072e-05, + "loss": 0.3149, + "step": 181 + }, + { + "epoch": 0.009091363204955292, + "grad_norm": 0.8921720385551453, + "learning_rate": 3.0282861896838604e-05, + "loss": 0.1695, + "step": 182 + }, + { + "epoch": 0.009141315750037465, + "grad_norm": 0.9573813080787659, + "learning_rate": 3.0449251247920136e-05, + "loss": 0.1202, + "step": 183 + }, + { + "epoch": 0.009191268295119636, + "grad_norm": 1.0034539699554443, + "learning_rate": 3.0615640599001664e-05, + "loss": 0.2617, + "step": 184 + }, + { + "epoch": 0.009241220840201809, + "grad_norm": 1.1443101167678833, + "learning_rate": 3.0782029950083196e-05, + "loss": 0.139, + "step": 185 + }, + { + "epoch": 0.00929117338528398, + "grad_norm": 1.1108348369598389, + "learning_rate": 3.094841930116473e-05, + "loss": 0.1928, + "step": 186 + }, + { + "epoch": 0.009341125930366153, + "grad_norm": 0.8795648217201233, + "learning_rate": 3.111480865224626e-05, + "loss": 0.0749, + "step": 187 + }, + { + "epoch": 0.009391078475448324, + "grad_norm": 0.7995696663856506, + "learning_rate": 3.128119800332779e-05, + "loss": 0.2085, + "step": 188 + }, + { + "epoch": 0.009441031020530496, + "grad_norm": 0.8787071108818054, + "learning_rate": 3.144758735440932e-05, + "loss": 0.201, + "step": 189 + }, + { + "epoch": 0.009490983565612668, + "grad_norm": 0.6818284392356873, + "learning_rate": 3.161397670549085e-05, + "loss": 0.1243, + "step": 190 + }, + { + "epoch": 0.00954093611069484, + "grad_norm": 0.7378799915313721, + "learning_rate": 3.178036605657238e-05, + "loss": 0.1291, + "step": 191 + }, + { + "epoch": 0.009590888655777011, + "grad_norm": 0.7147393822669983, + "learning_rate": 3.194675540765391e-05, + "loss": 0.1779, + "step": 192 + }, + { + "epoch": 0.009640841200859184, + "grad_norm": 0.7001100182533264, + "learning_rate": 3.2113144758735444e-05, + "loss": 0.0443, + "step": 193 + }, + { + "epoch": 0.009690793745941355, + "grad_norm": 0.7866895794868469, + "learning_rate": 3.227953410981697e-05, + "loss": 0.1122, + "step": 194 + }, + { + "epoch": 0.009740746291023528, + "grad_norm": 0.8156589865684509, + "learning_rate": 3.24459234608985e-05, + "loss": 0.2165, + "step": 195 + }, + { + "epoch": 0.009790698836105699, + "grad_norm": 0.7361156344413757, + "learning_rate": 3.261231281198003e-05, + "loss": 0.1385, + "step": 196 + }, + { + "epoch": 0.009840651381187872, + "grad_norm": 0.9058022499084473, + "learning_rate": 3.2778702163061564e-05, + "loss": 0.2136, + "step": 197 + }, + { + "epoch": 0.009890603926270043, + "grad_norm": 0.7774553894996643, + "learning_rate": 3.2945091514143096e-05, + "loss": 0.2015, + "step": 198 + }, + { + "epoch": 0.009940556471352216, + "grad_norm": 0.585500180721283, + "learning_rate": 3.311148086522463e-05, + "loss": 0.1628, + "step": 199 + }, + { + "epoch": 0.009990509016434387, + "grad_norm": 0.8312129378318787, + "learning_rate": 3.327787021630615e-05, + "loss": 0.1231, + "step": 200 + }, + { + "epoch": 0.01004046156151656, + "grad_norm": 0.7176649570465088, + "learning_rate": 3.3444259567387685e-05, + "loss": 0.1761, + "step": 201 + }, + { + "epoch": 0.01009041410659873, + "grad_norm": 0.9552359580993652, + "learning_rate": 3.361064891846922e-05, + "loss": 0.0912, + "step": 202 + }, + { + "epoch": 0.010140366651680903, + "grad_norm": 0.8616659641265869, + "learning_rate": 3.377703826955075e-05, + "loss": 0.1425, + "step": 203 + }, + { + "epoch": 0.010190319196763074, + "grad_norm": 0.8543926477432251, + "learning_rate": 3.394342762063228e-05, + "loss": 0.0924, + "step": 204 + }, + { + "epoch": 0.010240271741845247, + "grad_norm": 0.8953242897987366, + "learning_rate": 3.410981697171381e-05, + "loss": 0.1389, + "step": 205 + }, + { + "epoch": 0.010290224286927418, + "grad_norm": 0.6580705046653748, + "learning_rate": 3.427620632279534e-05, + "loss": 0.0871, + "step": 206 + }, + { + "epoch": 0.010340176832009591, + "grad_norm": 1.0291297435760498, + "learning_rate": 3.4442595673876876e-05, + "loss": 0.1804, + "step": 207 + }, + { + "epoch": 0.010390129377091762, + "grad_norm": 0.9023648500442505, + "learning_rate": 3.460898502495841e-05, + "loss": 0.2048, + "step": 208 + }, + { + "epoch": 0.010440081922173935, + "grad_norm": 0.9671919941902161, + "learning_rate": 3.477537437603994e-05, + "loss": 0.159, + "step": 209 + }, + { + "epoch": 0.010490034467256106, + "grad_norm": 0.8875133395195007, + "learning_rate": 3.494176372712147e-05, + "loss": 0.2028, + "step": 210 + }, + { + "epoch": 0.010539987012338279, + "grad_norm": 0.9025343656539917, + "learning_rate": 3.5108153078202996e-05, + "loss": 0.3496, + "step": 211 + }, + { + "epoch": 0.010589939557420451, + "grad_norm": 0.9096697568893433, + "learning_rate": 3.527454242928453e-05, + "loss": 0.1691, + "step": 212 + }, + { + "epoch": 0.010639892102502622, + "grad_norm": 1.4726406335830688, + "learning_rate": 3.544093178036606e-05, + "loss": 0.4967, + "step": 213 + }, + { + "epoch": 0.010689844647584795, + "grad_norm": 1.9270225763320923, + "learning_rate": 3.560732113144759e-05, + "loss": 0.1325, + "step": 214 + }, + { + "epoch": 0.010739797192666966, + "grad_norm": 1.3116896152496338, + "learning_rate": 3.5773710482529124e-05, + "loss": 0.2183, + "step": 215 + }, + { + "epoch": 0.010789749737749139, + "grad_norm": 1.3984644412994385, + "learning_rate": 3.594009983361065e-05, + "loss": 0.3395, + "step": 216 + }, + { + "epoch": 0.01083970228283131, + "grad_norm": 1.6455167531967163, + "learning_rate": 3.610648918469218e-05, + "loss": 0.1851, + "step": 217 + }, + { + "epoch": 0.010889654827913483, + "grad_norm": 1.4423123598098755, + "learning_rate": 3.627287853577371e-05, + "loss": 0.2101, + "step": 218 + }, + { + "epoch": 0.010939607372995654, + "grad_norm": 1.399526834487915, + "learning_rate": 3.6439267886855244e-05, + "loss": 0.1275, + "step": 219 + }, + { + "epoch": 0.010989559918077827, + "grad_norm": 1.3287642002105713, + "learning_rate": 3.6605657237936776e-05, + "loss": 0.2917, + "step": 220 + }, + { + "epoch": 0.011039512463159998, + "grad_norm": 1.5703953504562378, + "learning_rate": 3.677204658901831e-05, + "loss": 0.3049, + "step": 221 + }, + { + "epoch": 0.01108946500824217, + "grad_norm": 1.1338931322097778, + "learning_rate": 3.693843594009983e-05, + "loss": 0.0613, + "step": 222 + }, + { + "epoch": 0.011139417553324342, + "grad_norm": 1.026479959487915, + "learning_rate": 3.7104825291181365e-05, + "loss": 0.1703, + "step": 223 + }, + { + "epoch": 0.011189370098406514, + "grad_norm": 1.234251618385315, + "learning_rate": 3.7271214642262896e-05, + "loss": 0.1926, + "step": 224 + }, + { + "epoch": 0.011239322643488685, + "grad_norm": 1.2162742614746094, + "learning_rate": 3.743760399334443e-05, + "loss": 0.204, + "step": 225 + }, + { + "epoch": 0.011289275188570858, + "grad_norm": 0.9306070804595947, + "learning_rate": 3.760399334442596e-05, + "loss": 0.0468, + "step": 226 + }, + { + "epoch": 0.01133922773365303, + "grad_norm": 0.9760810732841492, + "learning_rate": 3.777038269550749e-05, + "loss": 0.2317, + "step": 227 + }, + { + "epoch": 0.011389180278735202, + "grad_norm": 1.2859827280044556, + "learning_rate": 3.793677204658902e-05, + "loss": 0.0754, + "step": 228 + }, + { + "epoch": 0.011439132823817373, + "grad_norm": 0.7277094125747681, + "learning_rate": 3.810316139767055e-05, + "loss": 0.1498, + "step": 229 + }, + { + "epoch": 0.011489085368899546, + "grad_norm": 1.0644822120666504, + "learning_rate": 3.826955074875208e-05, + "loss": 0.2485, + "step": 230 + }, + { + "epoch": 0.011539037913981717, + "grad_norm": 1.3385686874389648, + "learning_rate": 3.843594009983361e-05, + "loss": 0.019, + "step": 231 + }, + { + "epoch": 0.01158899045906389, + "grad_norm": 1.2722818851470947, + "learning_rate": 3.8602329450915144e-05, + "loss": 0.2167, + "step": 232 + }, + { + "epoch": 0.01163894300414606, + "grad_norm": 1.2983276844024658, + "learning_rate": 3.8768718801996676e-05, + "loss": 0.1563, + "step": 233 + }, + { + "epoch": 0.011688895549228234, + "grad_norm": 1.20723557472229, + "learning_rate": 3.89351081530782e-05, + "loss": 0.23, + "step": 234 + }, + { + "epoch": 0.011738848094310405, + "grad_norm": 1.1958420276641846, + "learning_rate": 3.910149750415973e-05, + "loss": 0.143, + "step": 235 + }, + { + "epoch": 0.011788800639392577, + "grad_norm": 0.7623319625854492, + "learning_rate": 3.9267886855241265e-05, + "loss": 0.0403, + "step": 236 + }, + { + "epoch": 0.011838753184474748, + "grad_norm": 1.1975929737091064, + "learning_rate": 3.9434276206322796e-05, + "loss": 0.1138, + "step": 237 + }, + { + "epoch": 0.011888705729556921, + "grad_norm": 1.436880350112915, + "learning_rate": 3.960066555740433e-05, + "loss": 0.3196, + "step": 238 + }, + { + "epoch": 0.011938658274639092, + "grad_norm": 1.0667468309402466, + "learning_rate": 3.976705490848586e-05, + "loss": 0.046, + "step": 239 + }, + { + "epoch": 0.011988610819721265, + "grad_norm": 1.6662734746932983, + "learning_rate": 3.9933444259567385e-05, + "loss": 0.2504, + "step": 240 + }, + { + "epoch": 0.012038563364803436, + "grad_norm": 1.094754695892334, + "learning_rate": 4.009983361064892e-05, + "loss": 0.1758, + "step": 241 + }, + { + "epoch": 0.012088515909885609, + "grad_norm": 1.3207210302352905, + "learning_rate": 4.026622296173045e-05, + "loss": 0.2176, + "step": 242 + }, + { + "epoch": 0.01213846845496778, + "grad_norm": 1.3022575378417969, + "learning_rate": 4.043261231281198e-05, + "loss": 0.2428, + "step": 243 + }, + { + "epoch": 0.012188421000049953, + "grad_norm": 1.1550939083099365, + "learning_rate": 4.059900166389351e-05, + "loss": 0.2292, + "step": 244 + }, + { + "epoch": 0.012238373545132124, + "grad_norm": 1.19441819190979, + "learning_rate": 4.0765391014975044e-05, + "loss": 0.0928, + "step": 245 + }, + { + "epoch": 0.012288326090214297, + "grad_norm": 1.4193369150161743, + "learning_rate": 4.0931780366056576e-05, + "loss": 0.2491, + "step": 246 + }, + { + "epoch": 0.012338278635296468, + "grad_norm": 1.0700119733810425, + "learning_rate": 4.109816971713811e-05, + "loss": 0.2354, + "step": 247 + }, + { + "epoch": 0.01238823118037864, + "grad_norm": 1.1196368932724, + "learning_rate": 4.126455906821964e-05, + "loss": 0.0352, + "step": 248 + }, + { + "epoch": 0.012438183725460811, + "grad_norm": 1.2887195348739624, + "learning_rate": 4.143094841930117e-05, + "loss": 0.0985, + "step": 249 + }, + { + "epoch": 0.012488136270542984, + "grad_norm": 0.8794819712638855, + "learning_rate": 4.1597337770382697e-05, + "loss": 0.2921, + "step": 250 + }, + { + "epoch": 0.012538088815625155, + "grad_norm": 1.3117047548294067, + "learning_rate": 4.176372712146423e-05, + "loss": 0.1882, + "step": 251 + }, + { + "epoch": 0.012588041360707328, + "grad_norm": 1.354897141456604, + "learning_rate": 4.193011647254576e-05, + "loss": 0.1112, + "step": 252 + }, + { + "epoch": 0.0126379939057895, + "grad_norm": 1.3721188306808472, + "learning_rate": 4.209650582362729e-05, + "loss": 0.0667, + "step": 253 + }, + { + "epoch": 0.012687946450871672, + "grad_norm": 1.0415611267089844, + "learning_rate": 4.2262895174708824e-05, + "loss": 0.1165, + "step": 254 + }, + { + "epoch": 0.012737898995953845, + "grad_norm": 1.0630691051483154, + "learning_rate": 4.2429284525790356e-05, + "loss": 0.247, + "step": 255 + }, + { + "epoch": 0.012787851541036016, + "grad_norm": 1.1943310499191284, + "learning_rate": 4.259567387687188e-05, + "loss": 0.1816, + "step": 256 + }, + { + "epoch": 0.012837804086118189, + "grad_norm": 1.0627037286758423, + "learning_rate": 4.276206322795341e-05, + "loss": 0.1295, + "step": 257 + }, + { + "epoch": 0.01288775663120036, + "grad_norm": 0.9218604564666748, + "learning_rate": 4.2928452579034944e-05, + "loss": 0.0999, + "step": 258 + }, + { + "epoch": 0.012937709176282532, + "grad_norm": 1.0682590007781982, + "learning_rate": 4.3094841930116476e-05, + "loss": 0.1385, + "step": 259 + }, + { + "epoch": 0.012987661721364703, + "grad_norm": 0.9920094013214111, + "learning_rate": 4.326123128119801e-05, + "loss": 0.122, + "step": 260 + }, + { + "epoch": 0.013037614266446876, + "grad_norm": 0.9195244312286377, + "learning_rate": 4.342762063227954e-05, + "loss": 0.0875, + "step": 261 + }, + { + "epoch": 0.013087566811529047, + "grad_norm": 0.8353629112243652, + "learning_rate": 4.3594009983361065e-05, + "loss": 0.1753, + "step": 262 + }, + { + "epoch": 0.01313751935661122, + "grad_norm": 1.047048807144165, + "learning_rate": 4.3760399334442597e-05, + "loss": 0.0369, + "step": 263 + }, + { + "epoch": 0.013187471901693391, + "grad_norm": 0.8298856616020203, + "learning_rate": 4.392678868552413e-05, + "loss": 0.1509, + "step": 264 + }, + { + "epoch": 0.013237424446775564, + "grad_norm": 0.9373747706413269, + "learning_rate": 4.409317803660566e-05, + "loss": 0.1305, + "step": 265 + }, + { + "epoch": 0.013287376991857735, + "grad_norm": 0.7969961762428284, + "learning_rate": 4.425956738768719e-05, + "loss": 0.19, + "step": 266 + }, + { + "epoch": 0.013337329536939908, + "grad_norm": 0.884782075881958, + "learning_rate": 4.442595673876872e-05, + "loss": 0.22, + "step": 267 + }, + { + "epoch": 0.013387282082022079, + "grad_norm": 0.8766407370567322, + "learning_rate": 4.459234608985025e-05, + "loss": 0.0603, + "step": 268 + }, + { + "epoch": 0.013437234627104251, + "grad_norm": 0.8031877279281616, + "learning_rate": 4.475873544093178e-05, + "loss": 0.1986, + "step": 269 + }, + { + "epoch": 0.013487187172186423, + "grad_norm": 1.2182422876358032, + "learning_rate": 4.492512479201331e-05, + "loss": 0.2532, + "step": 270 + }, + { + "epoch": 0.013537139717268595, + "grad_norm": 1.0183029174804688, + "learning_rate": 4.5091514143094844e-05, + "loss": 0.3113, + "step": 271 + }, + { + "epoch": 0.013587092262350766, + "grad_norm": 1.0454894304275513, + "learning_rate": 4.5257903494176376e-05, + "loss": 0.1126, + "step": 272 + }, + { + "epoch": 0.01363704480743294, + "grad_norm": 0.7971144318580627, + "learning_rate": 4.54242928452579e-05, + "loss": 0.1809, + "step": 273 + }, + { + "epoch": 0.01368699735251511, + "grad_norm": 1.2324053049087524, + "learning_rate": 4.559068219633943e-05, + "loss": 0.1186, + "step": 274 + }, + { + "epoch": 0.013736949897597283, + "grad_norm": 1.0972692966461182, + "learning_rate": 4.5757071547420965e-05, + "loss": 0.1522, + "step": 275 + }, + { + "epoch": 0.013786902442679454, + "grad_norm": 1.1334056854248047, + "learning_rate": 4.59234608985025e-05, + "loss": 0.0478, + "step": 276 + }, + { + "epoch": 0.013836854987761627, + "grad_norm": 0.915405809879303, + "learning_rate": 4.608985024958403e-05, + "loss": 0.0667, + "step": 277 + }, + { + "epoch": 0.013886807532843798, + "grad_norm": 0.8687565326690674, + "learning_rate": 4.625623960066556e-05, + "loss": 0.1981, + "step": 278 + }, + { + "epoch": 0.01393676007792597, + "grad_norm": 1.0031801462173462, + "learning_rate": 4.6422628951747085e-05, + "loss": 0.2253, + "step": 279 + }, + { + "epoch": 0.013986712623008142, + "grad_norm": 1.0229508876800537, + "learning_rate": 4.658901830282862e-05, + "loss": 0.1183, + "step": 280 + }, + { + "epoch": 0.014036665168090314, + "grad_norm": 0.8891842365264893, + "learning_rate": 4.675540765391015e-05, + "loss": 0.093, + "step": 281 + }, + { + "epoch": 0.014086617713172486, + "grad_norm": 0.9675115346908569, + "learning_rate": 4.692179700499169e-05, + "loss": 0.437, + "step": 282 + }, + { + "epoch": 0.014136570258254658, + "grad_norm": 1.1238436698913574, + "learning_rate": 4.708818635607322e-05, + "loss": 0.0897, + "step": 283 + }, + { + "epoch": 0.01418652280333683, + "grad_norm": 0.8497625589370728, + "learning_rate": 4.7254575707154744e-05, + "loss": 0.2161, + "step": 284 + }, + { + "epoch": 0.014236475348419002, + "grad_norm": 1.2852574586868286, + "learning_rate": 4.7420965058236276e-05, + "loss": 0.2218, + "step": 285 + }, + { + "epoch": 0.014286427893501173, + "grad_norm": 1.3896344900131226, + "learning_rate": 4.758735440931781e-05, + "loss": 0.1546, + "step": 286 + }, + { + "epoch": 0.014336380438583346, + "grad_norm": 1.0637980699539185, + "learning_rate": 4.775374376039934e-05, + "loss": 0.1342, + "step": 287 + }, + { + "epoch": 0.014386332983665517, + "grad_norm": 1.0323102474212646, + "learning_rate": 4.792013311148087e-05, + "loss": 0.0207, + "step": 288 + }, + { + "epoch": 0.01443628552874769, + "grad_norm": 1.1150763034820557, + "learning_rate": 4.80865224625624e-05, + "loss": 0.0173, + "step": 289 + }, + { + "epoch": 0.01448623807382986, + "grad_norm": 1.025802731513977, + "learning_rate": 4.825291181364393e-05, + "loss": 0.3247, + "step": 290 + }, + { + "epoch": 0.014536190618912034, + "grad_norm": 1.9237759113311768, + "learning_rate": 4.841930116472546e-05, + "loss": 0.2604, + "step": 291 + }, + { + "epoch": 0.014586143163994205, + "grad_norm": 1.5385209321975708, + "learning_rate": 4.858569051580699e-05, + "loss": 0.1587, + "step": 292 + }, + { + "epoch": 0.014636095709076377, + "grad_norm": 1.3327640295028687, + "learning_rate": 4.8752079866888524e-05, + "loss": 0.1001, + "step": 293 + }, + { + "epoch": 0.014686048254158549, + "grad_norm": 1.2036781311035156, + "learning_rate": 4.8918469217970056e-05, + "loss": 0.1152, + "step": 294 + }, + { + "epoch": 0.014736000799240721, + "grad_norm": 1.2955141067504883, + "learning_rate": 4.908485856905158e-05, + "loss": 0.0643, + "step": 295 + }, + { + "epoch": 0.014785953344322894, + "grad_norm": 1.082534909248352, + "learning_rate": 4.925124792013311e-05, + "loss": 0.0831, + "step": 296 + }, + { + "epoch": 0.014835905889405065, + "grad_norm": 1.2713667154312134, + "learning_rate": 4.9417637271214644e-05, + "loss": 0.1336, + "step": 297 + }, + { + "epoch": 0.014885858434487238, + "grad_norm": 1.3509265184402466, + "learning_rate": 4.9584026622296176e-05, + "loss": 0.0894, + "step": 298 + }, + { + "epoch": 0.014935810979569409, + "grad_norm": 1.082978367805481, + "learning_rate": 4.975041597337771e-05, + "loss": 0.0667, + "step": 299 + }, + { + "epoch": 0.014985763524651582, + "grad_norm": 1.1650606393814087, + "learning_rate": 4.991680532445924e-05, + "loss": 0.092, + "step": 300 + }, + { + "epoch": 0.015035716069733753, + "grad_norm": 1.2913053035736084, + "learning_rate": 5.0083194675540765e-05, + "loss": 0.0405, + "step": 301 + }, + { + "epoch": 0.015085668614815926, + "grad_norm": 1.0247113704681396, + "learning_rate": 5.02495840266223e-05, + "loss": 0.0672, + "step": 302 + }, + { + "epoch": 0.015135621159898097, + "grad_norm": 0.952612042427063, + "learning_rate": 5.041597337770383e-05, + "loss": 0.0808, + "step": 303 + }, + { + "epoch": 0.01518557370498027, + "grad_norm": 1.0478129386901855, + "learning_rate": 5.058236272878536e-05, + "loss": 0.0842, + "step": 304 + }, + { + "epoch": 0.01523552625006244, + "grad_norm": 0.914654016494751, + "learning_rate": 5.074875207986689e-05, + "loss": 0.0284, + "step": 305 + }, + { + "epoch": 0.015285478795144613, + "grad_norm": 0.9866631031036377, + "learning_rate": 5.0915141430948424e-05, + "loss": 0.1207, + "step": 306 + }, + { + "epoch": 0.015335431340226784, + "grad_norm": 0.8195444345474243, + "learning_rate": 5.108153078202995e-05, + "loss": 0.0891, + "step": 307 + }, + { + "epoch": 0.015385383885308957, + "grad_norm": 1.2613061666488647, + "learning_rate": 5.124792013311148e-05, + "loss": 0.1143, + "step": 308 + }, + { + "epoch": 0.015435336430391128, + "grad_norm": 0.9732815027236938, + "learning_rate": 5.141430948419301e-05, + "loss": 0.1905, + "step": 309 + }, + { + "epoch": 0.015485288975473301, + "grad_norm": 0.8498770594596863, + "learning_rate": 5.1580698835274544e-05, + "loss": 0.0583, + "step": 310 + }, + { + "epoch": 0.015535241520555472, + "grad_norm": 0.9181931614875793, + "learning_rate": 5.1747088186356076e-05, + "loss": 0.0473, + "step": 311 + }, + { + "epoch": 0.015585194065637645, + "grad_norm": 1.0210882425308228, + "learning_rate": 5.191347753743761e-05, + "loss": 0.0808, + "step": 312 + }, + { + "epoch": 0.015635146610719818, + "grad_norm": 1.0700209140777588, + "learning_rate": 5.207986688851913e-05, + "loss": 0.041, + "step": 313 + }, + { + "epoch": 0.015685099155801987, + "grad_norm": 0.9831816554069519, + "learning_rate": 5.2246256239600665e-05, + "loss": 0.0592, + "step": 314 + }, + { + "epoch": 0.01573505170088416, + "grad_norm": 0.7289150357246399, + "learning_rate": 5.24126455906822e-05, + "loss": 0.1491, + "step": 315 + }, + { + "epoch": 0.015785004245966332, + "grad_norm": 1.102803349494934, + "learning_rate": 5.257903494176373e-05, + "loss": 0.0341, + "step": 316 + }, + { + "epoch": 0.015834956791048505, + "grad_norm": 0.6358113884925842, + "learning_rate": 5.274542429284526e-05, + "loss": 0.0915, + "step": 317 + }, + { + "epoch": 0.015884909336130675, + "grad_norm": 0.6831187605857849, + "learning_rate": 5.2911813643926785e-05, + "loss": 0.051, + "step": 318 + }, + { + "epoch": 0.015934861881212847, + "grad_norm": 0.8011876344680786, + "learning_rate": 5.307820299500832e-05, + "loss": 0.0555, + "step": 319 + }, + { + "epoch": 0.01598481442629502, + "grad_norm": 0.715848445892334, + "learning_rate": 5.324459234608985e-05, + "loss": 0.0134, + "step": 320 + }, + { + "epoch": 0.016034766971377193, + "grad_norm": 0.6955671310424805, + "learning_rate": 5.341098169717138e-05, + "loss": 0.0359, + "step": 321 + }, + { + "epoch": 0.016084719516459362, + "grad_norm": 0.9082951545715332, + "learning_rate": 5.357737104825291e-05, + "loss": 0.1104, + "step": 322 + }, + { + "epoch": 0.016134672061541535, + "grad_norm": 1.0050984621047974, + "learning_rate": 5.3743760399334445e-05, + "loss": 0.0548, + "step": 323 + }, + { + "epoch": 0.016184624606623708, + "grad_norm": 0.9997800588607788, + "learning_rate": 5.391014975041597e-05, + "loss": 0.1398, + "step": 324 + }, + { + "epoch": 0.01623457715170588, + "grad_norm": 0.9968631267547607, + "learning_rate": 5.40765391014975e-05, + "loss": 0.0551, + "step": 325 + }, + { + "epoch": 0.01628452969678805, + "grad_norm": 1.2626473903656006, + "learning_rate": 5.424292845257903e-05, + "loss": 0.0794, + "step": 326 + }, + { + "epoch": 0.016334482241870223, + "grad_norm": 0.6565149426460266, + "learning_rate": 5.4409317803660565e-05, + "loss": 0.0681, + "step": 327 + }, + { + "epoch": 0.016384434786952395, + "grad_norm": 0.9055567979812622, + "learning_rate": 5.45757071547421e-05, + "loss": 0.0729, + "step": 328 + }, + { + "epoch": 0.016434387332034568, + "grad_norm": 0.9563494324684143, + "learning_rate": 5.474209650582363e-05, + "loss": 0.2678, + "step": 329 + }, + { + "epoch": 0.016484339877116738, + "grad_norm": 1.3142122030258179, + "learning_rate": 5.4908485856905154e-05, + "loss": 0.0717, + "step": 330 + }, + { + "epoch": 0.01653429242219891, + "grad_norm": 0.8934493660926819, + "learning_rate": 5.5074875207986686e-05, + "loss": 0.1406, + "step": 331 + }, + { + "epoch": 0.016584244967281083, + "grad_norm": 1.336534857749939, + "learning_rate": 5.524126455906822e-05, + "loss": 0.1011, + "step": 332 + }, + { + "epoch": 0.016634197512363256, + "grad_norm": 1.052679419517517, + "learning_rate": 5.540765391014975e-05, + "loss": 0.1814, + "step": 333 + }, + { + "epoch": 0.016684150057445425, + "grad_norm": 0.7298359870910645, + "learning_rate": 5.557404326123128e-05, + "loss": 0.0614, + "step": 334 + }, + { + "epoch": 0.016734102602527598, + "grad_norm": 0.9035956859588623, + "learning_rate": 5.574043261231281e-05, + "loss": 0.0371, + "step": 335 + }, + { + "epoch": 0.01678405514760977, + "grad_norm": 0.9984443187713623, + "learning_rate": 5.590682196339434e-05, + "loss": 0.0407, + "step": 336 + }, + { + "epoch": 0.016834007692691944, + "grad_norm": 1.0916666984558105, + "learning_rate": 5.607321131447587e-05, + "loss": 0.3124, + "step": 337 + }, + { + "epoch": 0.016883960237774116, + "grad_norm": 1.2346761226654053, + "learning_rate": 5.62396006655574e-05, + "loss": 0.0398, + "step": 338 + }, + { + "epoch": 0.016933912782856286, + "grad_norm": 1.0144277811050415, + "learning_rate": 5.640599001663894e-05, + "loss": 0.051, + "step": 339 + }, + { + "epoch": 0.01698386532793846, + "grad_norm": 0.7899157404899597, + "learning_rate": 5.657237936772047e-05, + "loss": 0.2, + "step": 340 + }, + { + "epoch": 0.01703381787302063, + "grad_norm": 1.078605055809021, + "learning_rate": 5.6738768718802004e-05, + "loss": 0.204, + "step": 341 + }, + { + "epoch": 0.017083770418102804, + "grad_norm": 0.864900529384613, + "learning_rate": 5.6905158069883535e-05, + "loss": 0.1106, + "step": 342 + }, + { + "epoch": 0.017133722963184973, + "grad_norm": 1.0467946529388428, + "learning_rate": 5.707154742096507e-05, + "loss": 0.2322, + "step": 343 + }, + { + "epoch": 0.017183675508267146, + "grad_norm": 1.0434801578521729, + "learning_rate": 5.72379367720466e-05, + "loss": 0.1679, + "step": 344 + }, + { + "epoch": 0.01723362805334932, + "grad_norm": 0.7895509004592896, + "learning_rate": 5.7404326123128124e-05, + "loss": 0.1421, + "step": 345 + }, + { + "epoch": 0.01728358059843149, + "grad_norm": 1.049283742904663, + "learning_rate": 5.7570715474209656e-05, + "loss": 0.1442, + "step": 346 + }, + { + "epoch": 0.01733353314351366, + "grad_norm": 1.1877703666687012, + "learning_rate": 5.773710482529119e-05, + "loss": 0.1264, + "step": 347 + }, + { + "epoch": 0.017383485688595834, + "grad_norm": 1.3037993907928467, + "learning_rate": 5.790349417637272e-05, + "loss": 0.0328, + "step": 348 + }, + { + "epoch": 0.017433438233678007, + "grad_norm": 0.9877206683158875, + "learning_rate": 5.806988352745425e-05, + "loss": 0.0448, + "step": 349 + }, + { + "epoch": 0.01748339077876018, + "grad_norm": 1.1700186729431152, + "learning_rate": 5.823627287853578e-05, + "loss": 0.0925, + "step": 350 + }, + { + "epoch": 0.01753334332384235, + "grad_norm": 1.5068670511245728, + "learning_rate": 5.840266222961731e-05, + "loss": 0.0958, + "step": 351 + }, + { + "epoch": 0.01758329586892452, + "grad_norm": 1.7880791425704956, + "learning_rate": 5.856905158069884e-05, + "loss": 0.0699, + "step": 352 + }, + { + "epoch": 0.017633248414006694, + "grad_norm": 1.1081947088241577, + "learning_rate": 5.873544093178037e-05, + "loss": 0.0703, + "step": 353 + }, + { + "epoch": 0.017683200959088867, + "grad_norm": 0.848996639251709, + "learning_rate": 5.8901830282861904e-05, + "loss": 0.0659, + "step": 354 + }, + { + "epoch": 0.017733153504171036, + "grad_norm": 1.261146068572998, + "learning_rate": 5.9068219633943436e-05, + "loss": 0.0834, + "step": 355 + }, + { + "epoch": 0.01778310604925321, + "grad_norm": 0.9266605377197266, + "learning_rate": 5.923460898502497e-05, + "loss": 0.0853, + "step": 356 + }, + { + "epoch": 0.017833058594335382, + "grad_norm": 0.8910160660743713, + "learning_rate": 5.940099833610649e-05, + "loss": 0.0824, + "step": 357 + }, + { + "epoch": 0.017883011139417555, + "grad_norm": 0.973775327205658, + "learning_rate": 5.9567387687188024e-05, + "loss": 0.0816, + "step": 358 + }, + { + "epoch": 0.017932963684499724, + "grad_norm": 1.1417291164398193, + "learning_rate": 5.9733777038269556e-05, + "loss": 0.133, + "step": 359 + }, + { + "epoch": 0.017982916229581897, + "grad_norm": 0.7870392203330994, + "learning_rate": 5.990016638935109e-05, + "loss": 0.0847, + "step": 360 + }, + { + "epoch": 0.01803286877466407, + "grad_norm": 0.8297953605651855, + "learning_rate": 6.006655574043262e-05, + "loss": 0.1619, + "step": 361 + }, + { + "epoch": 0.018082821319746242, + "grad_norm": 0.9551287889480591, + "learning_rate": 6.0232945091514145e-05, + "loss": 0.1052, + "step": 362 + }, + { + "epoch": 0.01813277386482841, + "grad_norm": 0.803027331829071, + "learning_rate": 6.0399334442595677e-05, + "loss": 0.0943, + "step": 363 + }, + { + "epoch": 0.018182726409910584, + "grad_norm": 0.6481413841247559, + "learning_rate": 6.056572379367721e-05, + "loss": 0.0249, + "step": 364 + }, + { + "epoch": 0.018232678954992757, + "grad_norm": 0.8569649457931519, + "learning_rate": 6.073211314475874e-05, + "loss": 0.1093, + "step": 365 + }, + { + "epoch": 0.01828263150007493, + "grad_norm": 0.8699489831924438, + "learning_rate": 6.089850249584027e-05, + "loss": 0.0297, + "step": 366 + }, + { + "epoch": 0.0183325840451571, + "grad_norm": 0.9851505756378174, + "learning_rate": 6.10648918469218e-05, + "loss": 0.1773, + "step": 367 + }, + { + "epoch": 0.018382536590239272, + "grad_norm": 0.8965858817100525, + "learning_rate": 6.123128119800333e-05, + "loss": 0.0252, + "step": 368 + }, + { + "epoch": 0.018432489135321445, + "grad_norm": 0.9282126426696777, + "learning_rate": 6.139767054908487e-05, + "loss": 0.0484, + "step": 369 + }, + { + "epoch": 0.018482441680403618, + "grad_norm": 1.3553026914596558, + "learning_rate": 6.156405990016639e-05, + "loss": 0.1579, + "step": 370 + }, + { + "epoch": 0.018532394225485787, + "grad_norm": 1.1928353309631348, + "learning_rate": 6.173044925124792e-05, + "loss": 0.1118, + "step": 371 + }, + { + "epoch": 0.01858234677056796, + "grad_norm": 1.4261761903762817, + "learning_rate": 6.189683860232946e-05, + "loss": 0.0642, + "step": 372 + }, + { + "epoch": 0.018632299315650133, + "grad_norm": 1.2115405797958374, + "learning_rate": 6.206322795341098e-05, + "loss": 0.0687, + "step": 373 + }, + { + "epoch": 0.018682251860732305, + "grad_norm": 0.937313973903656, + "learning_rate": 6.222961730449252e-05, + "loss": 0.1141, + "step": 374 + }, + { + "epoch": 0.018732204405814475, + "grad_norm": 0.8499642610549927, + "learning_rate": 6.239600665557404e-05, + "loss": 0.0648, + "step": 375 + }, + { + "epoch": 0.018782156950896647, + "grad_norm": 0.8437957763671875, + "learning_rate": 6.256239600665558e-05, + "loss": 0.0817, + "step": 376 + }, + { + "epoch": 0.01883210949597882, + "grad_norm": 0.8849421739578247, + "learning_rate": 6.272878535773711e-05, + "loss": 0.0554, + "step": 377 + }, + { + "epoch": 0.018882062041060993, + "grad_norm": 0.7692974209785461, + "learning_rate": 6.289517470881863e-05, + "loss": 0.0965, + "step": 378 + }, + { + "epoch": 0.018932014586143162, + "grad_norm": 0.6161106824874878, + "learning_rate": 6.306156405990017e-05, + "loss": 0.1376, + "step": 379 + }, + { + "epoch": 0.018981967131225335, + "grad_norm": 0.7092472314834595, + "learning_rate": 6.32279534109817e-05, + "loss": 0.0394, + "step": 380 + }, + { + "epoch": 0.019031919676307508, + "grad_norm": 0.6661204099655151, + "learning_rate": 6.339434276206324e-05, + "loss": 0.0186, + "step": 381 + }, + { + "epoch": 0.01908187222138968, + "grad_norm": 0.6758548617362976, + "learning_rate": 6.356073211314476e-05, + "loss": 0.1191, + "step": 382 + }, + { + "epoch": 0.019131824766471853, + "grad_norm": 0.6042129993438721, + "learning_rate": 6.372712146422629e-05, + "loss": 0.0658, + "step": 383 + }, + { + "epoch": 0.019181777311554023, + "grad_norm": 0.71405029296875, + "learning_rate": 6.389351081530782e-05, + "loss": 0.0384, + "step": 384 + }, + { + "epoch": 0.019231729856636195, + "grad_norm": 0.6925369501113892, + "learning_rate": 6.405990016638935e-05, + "loss": 0.0227, + "step": 385 + }, + { + "epoch": 0.01928168240171837, + "grad_norm": 0.8713813424110413, + "learning_rate": 6.422628951747089e-05, + "loss": 0.189, + "step": 386 + }, + { + "epoch": 0.01933163494680054, + "grad_norm": 0.6406596899032593, + "learning_rate": 6.439267886855241e-05, + "loss": 0.0508, + "step": 387 + }, + { + "epoch": 0.01938158749188271, + "grad_norm": 0.6894404292106628, + "learning_rate": 6.455906821963394e-05, + "loss": 0.2609, + "step": 388 + }, + { + "epoch": 0.019431540036964883, + "grad_norm": 0.6989345550537109, + "learning_rate": 6.472545757071548e-05, + "loss": 0.0341, + "step": 389 + }, + { + "epoch": 0.019481492582047056, + "grad_norm": 0.669314444065094, + "learning_rate": 6.4891846921797e-05, + "loss": 0.0555, + "step": 390 + }, + { + "epoch": 0.01953144512712923, + "grad_norm": 0.791816234588623, + "learning_rate": 6.505823627287854e-05, + "loss": 0.0279, + "step": 391 + }, + { + "epoch": 0.019581397672211398, + "grad_norm": 0.7049844264984131, + "learning_rate": 6.522462562396007e-05, + "loss": 0.0644, + "step": 392 + }, + { + "epoch": 0.01963135021729357, + "grad_norm": 0.6958633661270142, + "learning_rate": 6.53910149750416e-05, + "loss": 0.0345, + "step": 393 + }, + { + "epoch": 0.019681302762375744, + "grad_norm": 0.5409795045852661, + "learning_rate": 6.555740432612313e-05, + "loss": 0.0879, + "step": 394 + }, + { + "epoch": 0.019731255307457916, + "grad_norm": 0.7164896726608276, + "learning_rate": 6.572379367720465e-05, + "loss": 0.0296, + "step": 395 + }, + { + "epoch": 0.019781207852540086, + "grad_norm": 0.5096608996391296, + "learning_rate": 6.589018302828619e-05, + "loss": 0.0331, + "step": 396 + }, + { + "epoch": 0.01983116039762226, + "grad_norm": 0.3957138657569885, + "learning_rate": 6.605657237936772e-05, + "loss": 0.0299, + "step": 397 + }, + { + "epoch": 0.01988111294270443, + "grad_norm": 0.6122379302978516, + "learning_rate": 6.622296173044926e-05, + "loss": 0.024, + "step": 398 + }, + { + "epoch": 0.019931065487786604, + "grad_norm": 0.636982798576355, + "learning_rate": 6.638935108153078e-05, + "loss": 0.0243, + "step": 399 + }, + { + "epoch": 0.019981018032868773, + "grad_norm": 0.4524487257003784, + "learning_rate": 6.65557404326123e-05, + "loss": 0.0525, + "step": 400 + }, + { + "epoch": 0.020030970577950946, + "grad_norm": 0.45042604207992554, + "learning_rate": 6.672212978369384e-05, + "loss": 0.0287, + "step": 401 + }, + { + "epoch": 0.02008092312303312, + "grad_norm": 0.49954870343208313, + "learning_rate": 6.688851913477537e-05, + "loss": 0.1452, + "step": 402 + }, + { + "epoch": 0.02013087566811529, + "grad_norm": 0.4999921917915344, + "learning_rate": 6.705490848585691e-05, + "loss": 0.0751, + "step": 403 + }, + { + "epoch": 0.02018082821319746, + "grad_norm": 0.6252273321151733, + "learning_rate": 6.722129783693843e-05, + "loss": 0.2377, + "step": 404 + }, + { + "epoch": 0.020230780758279634, + "grad_norm": 0.5370644330978394, + "learning_rate": 6.738768718801997e-05, + "loss": 0.0154, + "step": 405 + }, + { + "epoch": 0.020280733303361807, + "grad_norm": 0.7622987031936646, + "learning_rate": 6.75540765391015e-05, + "loss": 0.1168, + "step": 406 + }, + { + "epoch": 0.02033068584844398, + "grad_norm": 0.6339974999427795, + "learning_rate": 6.772046589018302e-05, + "loss": 0.0513, + "step": 407 + }, + { + "epoch": 0.02038063839352615, + "grad_norm": 1.1702152490615845, + "learning_rate": 6.788685524126456e-05, + "loss": 0.0299, + "step": 408 + }, + { + "epoch": 0.02043059093860832, + "grad_norm": 0.8568829298019409, + "learning_rate": 6.805324459234609e-05, + "loss": 0.0171, + "step": 409 + }, + { + "epoch": 0.020480543483690494, + "grad_norm": 0.7926309704780579, + "learning_rate": 6.821963394342762e-05, + "loss": 0.0159, + "step": 410 + }, + { + "epoch": 0.020530496028772667, + "grad_norm": 0.7580233216285706, + "learning_rate": 6.838602329450915e-05, + "loss": 0.1421, + "step": 411 + }, + { + "epoch": 0.020580448573854836, + "grad_norm": 0.7526037693023682, + "learning_rate": 6.855241264559067e-05, + "loss": 0.1435, + "step": 412 + }, + { + "epoch": 0.02063040111893701, + "grad_norm": 0.7218987941741943, + "learning_rate": 6.871880199667221e-05, + "loss": 0.1242, + "step": 413 + }, + { + "epoch": 0.020680353664019182, + "grad_norm": 0.9916396737098694, + "learning_rate": 6.888519134775375e-05, + "loss": 0.1356, + "step": 414 + }, + { + "epoch": 0.020730306209101355, + "grad_norm": 0.5703962445259094, + "learning_rate": 6.905158069883528e-05, + "loss": 0.0275, + "step": 415 + }, + { + "epoch": 0.020780258754183524, + "grad_norm": 0.8390823006629944, + "learning_rate": 6.921797004991682e-05, + "loss": 0.0697, + "step": 416 + }, + { + "epoch": 0.020830211299265697, + "grad_norm": 0.5570675730705261, + "learning_rate": 6.938435940099834e-05, + "loss": 0.0618, + "step": 417 + }, + { + "epoch": 0.02088016384434787, + "grad_norm": 0.560774564743042, + "learning_rate": 6.955074875207988e-05, + "loss": 0.0562, + "step": 418 + }, + { + "epoch": 0.020930116389430042, + "grad_norm": 0.8487900495529175, + "learning_rate": 6.97171381031614e-05, + "loss": 0.0225, + "step": 419 + }, + { + "epoch": 0.02098006893451221, + "grad_norm": 0.652852475643158, + "learning_rate": 6.988352745424294e-05, + "loss": 0.0175, + "step": 420 + }, + { + "epoch": 0.021030021479594384, + "grad_norm": 0.911546528339386, + "learning_rate": 7.004991680532447e-05, + "loss": 0.2497, + "step": 421 + }, + { + "epoch": 0.021079974024676557, + "grad_norm": 0.8173904418945312, + "learning_rate": 7.021630615640599e-05, + "loss": 0.0275, + "step": 422 + }, + { + "epoch": 0.02112992656975873, + "grad_norm": 0.8045750260353088, + "learning_rate": 7.038269550748753e-05, + "loss": 0.1272, + "step": 423 + }, + { + "epoch": 0.021179879114840903, + "grad_norm": 0.7161885499954224, + "learning_rate": 7.054908485856906e-05, + "loss": 0.04, + "step": 424 + }, + { + "epoch": 0.021229831659923072, + "grad_norm": 0.576621413230896, + "learning_rate": 7.07154742096506e-05, + "loss": 0.0573, + "step": 425 + }, + { + "epoch": 0.021279784205005245, + "grad_norm": 0.815387487411499, + "learning_rate": 7.088186356073212e-05, + "loss": 0.0421, + "step": 426 + }, + { + "epoch": 0.021329736750087418, + "grad_norm": 0.7231108546257019, + "learning_rate": 7.104825291181364e-05, + "loss": 0.2579, + "step": 427 + }, + { + "epoch": 0.02137968929516959, + "grad_norm": 0.7460161447525024, + "learning_rate": 7.121464226289518e-05, + "loss": 0.0311, + "step": 428 + }, + { + "epoch": 0.02142964184025176, + "grad_norm": 0.572077751159668, + "learning_rate": 7.138103161397671e-05, + "loss": 0.0232, + "step": 429 + }, + { + "epoch": 0.021479594385333933, + "grad_norm": 0.6145396828651428, + "learning_rate": 7.154742096505825e-05, + "loss": 0.0512, + "step": 430 + }, + { + "epoch": 0.021529546930416105, + "grad_norm": 0.6593700647354126, + "learning_rate": 7.171381031613977e-05, + "loss": 0.1273, + "step": 431 + }, + { + "epoch": 0.021579499475498278, + "grad_norm": 0.5056473612785339, + "learning_rate": 7.18801996672213e-05, + "loss": 0.1745, + "step": 432 + }, + { + "epoch": 0.021629452020580447, + "grad_norm": 0.6090322136878967, + "learning_rate": 7.204658901830284e-05, + "loss": 0.0412, + "step": 433 + }, + { + "epoch": 0.02167940456566262, + "grad_norm": 0.6056920289993286, + "learning_rate": 7.221297836938436e-05, + "loss": 0.0098, + "step": 434 + }, + { + "epoch": 0.021729357110744793, + "grad_norm": 0.5330151915550232, + "learning_rate": 7.23793677204659e-05, + "loss": 0.0644, + "step": 435 + }, + { + "epoch": 0.021779309655826966, + "grad_norm": 0.5986607670783997, + "learning_rate": 7.254575707154742e-05, + "loss": 0.0497, + "step": 436 + }, + { + "epoch": 0.021829262200909135, + "grad_norm": 0.5386449098587036, + "learning_rate": 7.271214642262896e-05, + "loss": 0.0308, + "step": 437 + }, + { + "epoch": 0.021879214745991308, + "grad_norm": 0.42428672313690186, + "learning_rate": 7.287853577371049e-05, + "loss": 0.03, + "step": 438 + }, + { + "epoch": 0.02192916729107348, + "grad_norm": 0.6082504391670227, + "learning_rate": 7.304492512479201e-05, + "loss": 0.0375, + "step": 439 + }, + { + "epoch": 0.021979119836155653, + "grad_norm": 0.45483770966529846, + "learning_rate": 7.321131447587355e-05, + "loss": 0.051, + "step": 440 + }, + { + "epoch": 0.022029072381237823, + "grad_norm": 0.39269495010375977, + "learning_rate": 7.337770382695508e-05, + "loss": 0.024, + "step": 441 + }, + { + "epoch": 0.022079024926319996, + "grad_norm": 0.3701241612434387, + "learning_rate": 7.354409317803662e-05, + "loss": 0.0146, + "step": 442 + }, + { + "epoch": 0.02212897747140217, + "grad_norm": 0.5039634108543396, + "learning_rate": 7.371048252911814e-05, + "loss": 0.0584, + "step": 443 + }, + { + "epoch": 0.02217893001648434, + "grad_norm": 0.47586455941200256, + "learning_rate": 7.387687188019967e-05, + "loss": 0.1542, + "step": 444 + }, + { + "epoch": 0.02222888256156651, + "grad_norm": 0.6887615919113159, + "learning_rate": 7.40432612312812e-05, + "loss": 0.0637, + "step": 445 + }, + { + "epoch": 0.022278835106648683, + "grad_norm": 0.35250988602638245, + "learning_rate": 7.420965058236273e-05, + "loss": 0.0289, + "step": 446 + }, + { + "epoch": 0.022328787651730856, + "grad_norm": 0.6109102964401245, + "learning_rate": 7.437603993344427e-05, + "loss": 0.1516, + "step": 447 + }, + { + "epoch": 0.02237874019681303, + "grad_norm": 0.7194654941558838, + "learning_rate": 7.454242928452579e-05, + "loss": 0.0305, + "step": 448 + }, + { + "epoch": 0.022428692741895198, + "grad_norm": 0.6307637095451355, + "learning_rate": 7.470881863560733e-05, + "loss": 0.0743, + "step": 449 + }, + { + "epoch": 0.02247864528697737, + "grad_norm": 0.8775025010108948, + "learning_rate": 7.487520798668886e-05, + "loss": 0.039, + "step": 450 + }, + { + "epoch": 0.022528597832059544, + "grad_norm": 0.8005836009979248, + "learning_rate": 7.504159733777038e-05, + "loss": 0.0484, + "step": 451 + }, + { + "epoch": 0.022578550377141716, + "grad_norm": 0.8005648851394653, + "learning_rate": 7.520798668885192e-05, + "loss": 0.0337, + "step": 452 + }, + { + "epoch": 0.022628502922223886, + "grad_norm": 0.7831838130950928, + "learning_rate": 7.537437603993345e-05, + "loss": 0.0156, + "step": 453 + }, + { + "epoch": 0.02267845546730606, + "grad_norm": 0.7577358484268188, + "learning_rate": 7.554076539101498e-05, + "loss": 0.094, + "step": 454 + }, + { + "epoch": 0.02272840801238823, + "grad_norm": 0.8397919535636902, + "learning_rate": 7.570715474209651e-05, + "loss": 0.0979, + "step": 455 + }, + { + "epoch": 0.022778360557470404, + "grad_norm": 0.7972233295440674, + "learning_rate": 7.587354409317803e-05, + "loss": 0.0236, + "step": 456 + }, + { + "epoch": 0.022828313102552573, + "grad_norm": 0.7102578282356262, + "learning_rate": 7.603993344425957e-05, + "loss": 0.0162, + "step": 457 + }, + { + "epoch": 0.022878265647634746, + "grad_norm": 0.7206525802612305, + "learning_rate": 7.62063227953411e-05, + "loss": 0.0993, + "step": 458 + }, + { + "epoch": 0.02292821819271692, + "grad_norm": 0.741297721862793, + "learning_rate": 7.637271214642264e-05, + "loss": 0.0763, + "step": 459 + }, + { + "epoch": 0.022978170737799092, + "grad_norm": 0.8757969737052917, + "learning_rate": 7.653910149750416e-05, + "loss": 0.0307, + "step": 460 + }, + { + "epoch": 0.02302812328288126, + "grad_norm": 0.857140064239502, + "learning_rate": 7.670549084858569e-05, + "loss": 0.0442, + "step": 461 + }, + { + "epoch": 0.023078075827963434, + "grad_norm": 0.6564064025878906, + "learning_rate": 7.687188019966722e-05, + "loss": 0.0591, + "step": 462 + }, + { + "epoch": 0.023128028373045607, + "grad_norm": 0.7751290202140808, + "learning_rate": 7.703826955074875e-05, + "loss": 0.0125, + "step": 463 + }, + { + "epoch": 0.02317798091812778, + "grad_norm": 0.6433943510055542, + "learning_rate": 7.720465890183029e-05, + "loss": 0.0257, + "step": 464 + }, + { + "epoch": 0.023227933463209952, + "grad_norm": 0.5921209454536438, + "learning_rate": 7.737104825291181e-05, + "loss": 0.0429, + "step": 465 + }, + { + "epoch": 0.02327788600829212, + "grad_norm": 0.5040141344070435, + "learning_rate": 7.753743760399335e-05, + "loss": 0.0251, + "step": 466 + }, + { + "epoch": 0.023327838553374294, + "grad_norm": 0.4425252079963684, + "learning_rate": 7.770382695507488e-05, + "loss": 0.0377, + "step": 467 + }, + { + "epoch": 0.023377791098456467, + "grad_norm": 0.594623327255249, + "learning_rate": 7.78702163061564e-05, + "loss": 0.0353, + "step": 468 + }, + { + "epoch": 0.02342774364353864, + "grad_norm": 0.529391348361969, + "learning_rate": 7.803660565723794e-05, + "loss": 0.0185, + "step": 469 + }, + { + "epoch": 0.02347769618862081, + "grad_norm": 0.4944928288459778, + "learning_rate": 7.820299500831947e-05, + "loss": 0.1546, + "step": 470 + }, + { + "epoch": 0.023527648733702982, + "grad_norm": 0.4489753246307373, + "learning_rate": 7.8369384359401e-05, + "loss": 0.0552, + "step": 471 + }, + { + "epoch": 0.023577601278785155, + "grad_norm": 0.6387485861778259, + "learning_rate": 7.853577371048253e-05, + "loss": 0.0546, + "step": 472 + }, + { + "epoch": 0.023627553823867328, + "grad_norm": 0.5656390190124512, + "learning_rate": 7.870216306156405e-05, + "loss": 0.0172, + "step": 473 + }, + { + "epoch": 0.023677506368949497, + "grad_norm": 0.5951818227767944, + "learning_rate": 7.886855241264559e-05, + "loss": 0.0427, + "step": 474 + }, + { + "epoch": 0.02372745891403167, + "grad_norm": 0.49911248683929443, + "learning_rate": 7.903494176372712e-05, + "loss": 0.0135, + "step": 475 + }, + { + "epoch": 0.023777411459113842, + "grad_norm": 0.42261171340942383, + "learning_rate": 7.920133111480866e-05, + "loss": 0.0295, + "step": 476 + }, + { + "epoch": 0.023827364004196015, + "grad_norm": 0.6505982875823975, + "learning_rate": 7.936772046589018e-05, + "loss": 0.0163, + "step": 477 + }, + { + "epoch": 0.023877316549278185, + "grad_norm": 0.5210033059120178, + "learning_rate": 7.953410981697172e-05, + "loss": 0.0083, + "step": 478 + }, + { + "epoch": 0.023927269094360357, + "grad_norm": 0.43533170223236084, + "learning_rate": 7.970049916805325e-05, + "loss": 0.0457, + "step": 479 + }, + { + "epoch": 0.02397722163944253, + "grad_norm": 0.44989335536956787, + "learning_rate": 7.986688851913477e-05, + "loss": 0.0401, + "step": 480 + }, + { + "epoch": 0.024027174184524703, + "grad_norm": 0.5011099576950073, + "learning_rate": 8.003327787021631e-05, + "loss": 0.03, + "step": 481 + }, + { + "epoch": 0.024077126729606872, + "grad_norm": 0.47838276624679565, + "learning_rate": 8.019966722129783e-05, + "loss": 0.0221, + "step": 482 + }, + { + "epoch": 0.024127079274689045, + "grad_norm": 0.45105695724487305, + "learning_rate": 8.036605657237937e-05, + "loss": 0.0247, + "step": 483 + }, + { + "epoch": 0.024177031819771218, + "grad_norm": 0.41928738355636597, + "learning_rate": 8.05324459234609e-05, + "loss": 0.0183, + "step": 484 + }, + { + "epoch": 0.02422698436485339, + "grad_norm": 0.6916990280151367, + "learning_rate": 8.069883527454242e-05, + "loss": 0.1138, + "step": 485 + }, + { + "epoch": 0.02427693690993556, + "grad_norm": 0.5992579460144043, + "learning_rate": 8.086522462562396e-05, + "loss": 0.0445, + "step": 486 + }, + { + "epoch": 0.024326889455017733, + "grad_norm": 0.5708386898040771, + "learning_rate": 8.103161397670549e-05, + "loss": 0.0376, + "step": 487 + }, + { + "epoch": 0.024376842000099905, + "grad_norm": 0.4699360728263855, + "learning_rate": 8.119800332778702e-05, + "loss": 0.0253, + "step": 488 + }, + { + "epoch": 0.024426794545182078, + "grad_norm": 0.4717262387275696, + "learning_rate": 8.136439267886856e-05, + "loss": 0.1474, + "step": 489 + }, + { + "epoch": 0.024476747090264248, + "grad_norm": 0.5126490592956543, + "learning_rate": 8.153078202995009e-05, + "loss": 0.0197, + "step": 490 + }, + { + "epoch": 0.02452669963534642, + "grad_norm": 0.6797859072685242, + "learning_rate": 8.169717138103163e-05, + "loss": 0.0419, + "step": 491 + }, + { + "epoch": 0.024576652180428593, + "grad_norm": 0.5222330093383789, + "learning_rate": 8.186356073211315e-05, + "loss": 0.0161, + "step": 492 + }, + { + "epoch": 0.024626604725510766, + "grad_norm": 0.4467020630836487, + "learning_rate": 8.202995008319469e-05, + "loss": 0.108, + "step": 493 + }, + { + "epoch": 0.024676557270592935, + "grad_norm": 0.5784523487091064, + "learning_rate": 8.219633943427622e-05, + "loss": 0.0298, + "step": 494 + }, + { + "epoch": 0.024726509815675108, + "grad_norm": 0.5478585958480835, + "learning_rate": 8.236272878535774e-05, + "loss": 0.0617, + "step": 495 + }, + { + "epoch": 0.02477646236075728, + "grad_norm": 0.5446329712867737, + "learning_rate": 8.252911813643928e-05, + "loss": 0.0968, + "step": 496 + }, + { + "epoch": 0.024826414905839454, + "grad_norm": 0.60307377576828, + "learning_rate": 8.26955074875208e-05, + "loss": 0.0524, + "step": 497 + }, + { + "epoch": 0.024876367450921623, + "grad_norm": 0.5957506895065308, + "learning_rate": 8.286189683860234e-05, + "loss": 0.0175, + "step": 498 + }, + { + "epoch": 0.024926319996003796, + "grad_norm": 0.41645124554634094, + "learning_rate": 8.302828618968387e-05, + "loss": 0.0168, + "step": 499 + }, + { + "epoch": 0.02497627254108597, + "grad_norm": 0.44864949584007263, + "learning_rate": 8.319467554076539e-05, + "loss": 0.0492, + "step": 500 + }, + { + "epoch": 0.02502622508616814, + "grad_norm": 0.6209083199501038, + "learning_rate": 8.336106489184693e-05, + "loss": 0.025, + "step": 501 + }, + { + "epoch": 0.02507617763125031, + "grad_norm": 0.603119969367981, + "learning_rate": 8.352745424292846e-05, + "loss": 0.0273, + "step": 502 + }, + { + "epoch": 0.025126130176332483, + "grad_norm": 0.6709516048431396, + "learning_rate": 8.369384359401e-05, + "loss": 0.1252, + "step": 503 + }, + { + "epoch": 0.025176082721414656, + "grad_norm": 0.7413842678070068, + "learning_rate": 8.386023294509152e-05, + "loss": 0.0153, + "step": 504 + }, + { + "epoch": 0.02522603526649683, + "grad_norm": 0.9032710194587708, + "learning_rate": 8.402662229617305e-05, + "loss": 0.0751, + "step": 505 + }, + { + "epoch": 0.025275987811579, + "grad_norm": 0.5562509298324585, + "learning_rate": 8.419301164725458e-05, + "loss": 0.02, + "step": 506 + }, + { + "epoch": 0.02532594035666117, + "grad_norm": 0.6195027232170105, + "learning_rate": 8.435940099833611e-05, + "loss": 0.0633, + "step": 507 + }, + { + "epoch": 0.025375892901743344, + "grad_norm": 0.8276190757751465, + "learning_rate": 8.452579034941765e-05, + "loss": 0.0535, + "step": 508 + }, + { + "epoch": 0.025425845446825517, + "grad_norm": 0.7709572315216064, + "learning_rate": 8.469217970049917e-05, + "loss": 0.1837, + "step": 509 + }, + { + "epoch": 0.02547579799190769, + "grad_norm": 0.7662091851234436, + "learning_rate": 8.485856905158071e-05, + "loss": 0.0306, + "step": 510 + }, + { + "epoch": 0.02552575053698986, + "grad_norm": 0.7323451042175293, + "learning_rate": 8.502495840266224e-05, + "loss": 0.0236, + "step": 511 + }, + { + "epoch": 0.02557570308207203, + "grad_norm": 0.6191704273223877, + "learning_rate": 8.519134775374376e-05, + "loss": 0.0285, + "step": 512 + }, + { + "epoch": 0.025625655627154204, + "grad_norm": 0.5759174823760986, + "learning_rate": 8.53577371048253e-05, + "loss": 0.0207, + "step": 513 + }, + { + "epoch": 0.025675608172236377, + "grad_norm": 1.077681064605713, + "learning_rate": 8.552412645590682e-05, + "loss": 0.1653, + "step": 514 + }, + { + "epoch": 0.025725560717318546, + "grad_norm": 1.1839879751205444, + "learning_rate": 8.569051580698836e-05, + "loss": 0.0336, + "step": 515 + }, + { + "epoch": 0.02577551326240072, + "grad_norm": 0.741925060749054, + "learning_rate": 8.585690515806989e-05, + "loss": 0.2129, + "step": 516 + }, + { + "epoch": 0.025825465807482892, + "grad_norm": 0.9553372859954834, + "learning_rate": 8.602329450915141e-05, + "loss": 0.016, + "step": 517 + }, + { + "epoch": 0.025875418352565065, + "grad_norm": 1.0190812349319458, + "learning_rate": 8.618968386023295e-05, + "loss": 0.0356, + "step": 518 + }, + { + "epoch": 0.025925370897647234, + "grad_norm": 0.6753031611442566, + "learning_rate": 8.635607321131448e-05, + "loss": 0.0402, + "step": 519 + }, + { + "epoch": 0.025975323442729407, + "grad_norm": 0.8820869326591492, + "learning_rate": 8.652246256239602e-05, + "loss": 0.0232, + "step": 520 + }, + { + "epoch": 0.02602527598781158, + "grad_norm": 0.7335262298583984, + "learning_rate": 8.668885191347754e-05, + "loss": 0.1037, + "step": 521 + }, + { + "epoch": 0.026075228532893752, + "grad_norm": 0.8432420492172241, + "learning_rate": 8.685524126455908e-05, + "loss": 0.0171, + "step": 522 + }, + { + "epoch": 0.02612518107797592, + "grad_norm": 0.9434980750083923, + "learning_rate": 8.70216306156406e-05, + "loss": 0.1012, + "step": 523 + }, + { + "epoch": 0.026175133623058094, + "grad_norm": 0.6978007555007935, + "learning_rate": 8.718801996672213e-05, + "loss": 0.0324, + "step": 524 + }, + { + "epoch": 0.026225086168140267, + "grad_norm": 0.9871712327003479, + "learning_rate": 8.735440931780367e-05, + "loss": 0.064, + "step": 525 + }, + { + "epoch": 0.02627503871322244, + "grad_norm": 0.9312126636505127, + "learning_rate": 8.752079866888519e-05, + "loss": 0.0169, + "step": 526 + }, + { + "epoch": 0.02632499125830461, + "grad_norm": 0.7002211809158325, + "learning_rate": 8.768718801996673e-05, + "loss": 0.098, + "step": 527 + }, + { + "epoch": 0.026374943803386782, + "grad_norm": 0.8846800327301025, + "learning_rate": 8.785357737104826e-05, + "loss": 0.0608, + "step": 528 + }, + { + "epoch": 0.026424896348468955, + "grad_norm": 0.9121425151824951, + "learning_rate": 8.801996672212978e-05, + "loss": 0.163, + "step": 529 + }, + { + "epoch": 0.026474848893551128, + "grad_norm": 0.8648207783699036, + "learning_rate": 8.818635607321132e-05, + "loss": 0.0627, + "step": 530 + }, + { + "epoch": 0.026524801438633297, + "grad_norm": 0.5863767862319946, + "learning_rate": 8.835274542429285e-05, + "loss": 0.0683, + "step": 531 + }, + { + "epoch": 0.02657475398371547, + "grad_norm": 0.7643042802810669, + "learning_rate": 8.851913477537438e-05, + "loss": 0.1078, + "step": 532 + }, + { + "epoch": 0.026624706528797643, + "grad_norm": 0.7499838471412659, + "learning_rate": 8.868552412645591e-05, + "loss": 0.0189, + "step": 533 + }, + { + "epoch": 0.026674659073879815, + "grad_norm": 0.7118635773658752, + "learning_rate": 8.885191347753743e-05, + "loss": 0.0293, + "step": 534 + }, + { + "epoch": 0.026724611618961985, + "grad_norm": 0.5469330549240112, + "learning_rate": 8.901830282861897e-05, + "loss": 0.04, + "step": 535 + }, + { + "epoch": 0.026774564164044157, + "grad_norm": 0.8468111157417297, + "learning_rate": 8.91846921797005e-05, + "loss": 0.0185, + "step": 536 + }, + { + "epoch": 0.02682451670912633, + "grad_norm": 0.8880670666694641, + "learning_rate": 8.935108153078204e-05, + "loss": 0.0317, + "step": 537 + }, + { + "epoch": 0.026874469254208503, + "grad_norm": 0.5572132468223572, + "learning_rate": 8.951747088186356e-05, + "loss": 0.0088, + "step": 538 + }, + { + "epoch": 0.026924421799290672, + "grad_norm": 0.5380578637123108, + "learning_rate": 8.96838602329451e-05, + "loss": 0.0803, + "step": 539 + }, + { + "epoch": 0.026974374344372845, + "grad_norm": 0.5765405297279358, + "learning_rate": 8.985024958402663e-05, + "loss": 0.0237, + "step": 540 + }, + { + "epoch": 0.027024326889455018, + "grad_norm": 0.6273530125617981, + "learning_rate": 9.001663893510815e-05, + "loss": 0.0293, + "step": 541 + }, + { + "epoch": 0.02707427943453719, + "grad_norm": 0.5352333784103394, + "learning_rate": 9.018302828618969e-05, + "loss": 0.0128, + "step": 542 + }, + { + "epoch": 0.02712423197961936, + "grad_norm": 0.47899189591407776, + "learning_rate": 9.034941763727121e-05, + "loss": 0.0244, + "step": 543 + }, + { + "epoch": 0.027174184524701533, + "grad_norm": 0.5222384929656982, + "learning_rate": 9.051580698835275e-05, + "loss": 0.054, + "step": 544 + }, + { + "epoch": 0.027224137069783706, + "grad_norm": 0.5817683339118958, + "learning_rate": 9.068219633943428e-05, + "loss": 0.0205, + "step": 545 + }, + { + "epoch": 0.02727408961486588, + "grad_norm": 0.6895291805267334, + "learning_rate": 9.08485856905158e-05, + "loss": 0.1258, + "step": 546 + }, + { + "epoch": 0.027324042159948048, + "grad_norm": 0.7045086622238159, + "learning_rate": 9.101497504159734e-05, + "loss": 0.3491, + "step": 547 + }, + { + "epoch": 0.02737399470503022, + "grad_norm": 0.5296542048454285, + "learning_rate": 9.118136439267887e-05, + "loss": 0.0114, + "step": 548 + }, + { + "epoch": 0.027423947250112393, + "grad_norm": 0.606448233127594, + "learning_rate": 9.13477537437604e-05, + "loss": 0.0293, + "step": 549 + }, + { + "epoch": 0.027473899795194566, + "grad_norm": 0.7867725491523743, + "learning_rate": 9.151414309484193e-05, + "loss": 0.4021, + "step": 550 + }, + { + "epoch": 0.02752385234027674, + "grad_norm": 0.6079162359237671, + "learning_rate": 9.168053244592347e-05, + "loss": 0.1239, + "step": 551 + }, + { + "epoch": 0.027573804885358908, + "grad_norm": 0.9728902578353882, + "learning_rate": 9.1846921797005e-05, + "loss": 0.0367, + "step": 552 + }, + { + "epoch": 0.02762375743044108, + "grad_norm": 0.8469427824020386, + "learning_rate": 9.201331114808652e-05, + "loss": 0.1597, + "step": 553 + }, + { + "epoch": 0.027673709975523254, + "grad_norm": 0.7472911477088928, + "learning_rate": 9.217970049916806e-05, + "loss": 0.134, + "step": 554 + }, + { + "epoch": 0.027723662520605426, + "grad_norm": 0.8408514261245728, + "learning_rate": 9.234608985024958e-05, + "loss": 0.0317, + "step": 555 + }, + { + "epoch": 0.027773615065687596, + "grad_norm": 0.8090733289718628, + "learning_rate": 9.251247920133112e-05, + "loss": 0.0394, + "step": 556 + }, + { + "epoch": 0.02782356761076977, + "grad_norm": 0.6135545372962952, + "learning_rate": 9.267886855241265e-05, + "loss": 0.0316, + "step": 557 + }, + { + "epoch": 0.02787352015585194, + "grad_norm": 0.679618239402771, + "learning_rate": 9.284525790349417e-05, + "loss": 0.0413, + "step": 558 + }, + { + "epoch": 0.027923472700934114, + "grad_norm": 0.771882176399231, + "learning_rate": 9.301164725457571e-05, + "loss": 0.0372, + "step": 559 + }, + { + "epoch": 0.027973425246016283, + "grad_norm": 0.672834038734436, + "learning_rate": 9.317803660565723e-05, + "loss": 0.0907, + "step": 560 + }, + { + "epoch": 0.028023377791098456, + "grad_norm": 0.9464245438575745, + "learning_rate": 9.334442595673877e-05, + "loss": 0.0155, + "step": 561 + }, + { + "epoch": 0.02807333033618063, + "grad_norm": 0.716213047504425, + "learning_rate": 9.35108153078203e-05, + "loss": 0.0688, + "step": 562 + }, + { + "epoch": 0.028123282881262802, + "grad_norm": 0.6392244100570679, + "learning_rate": 9.367720465890182e-05, + "loss": 0.0551, + "step": 563 + }, + { + "epoch": 0.02817323542634497, + "grad_norm": 0.6184731125831604, + "learning_rate": 9.384359400998338e-05, + "loss": 0.0267, + "step": 564 + }, + { + "epoch": 0.028223187971427144, + "grad_norm": 0.4990319013595581, + "learning_rate": 9.40099833610649e-05, + "loss": 0.0241, + "step": 565 + }, + { + "epoch": 0.028273140516509317, + "grad_norm": 0.7598366737365723, + "learning_rate": 9.417637271214644e-05, + "loss": 0.0941, + "step": 566 + }, + { + "epoch": 0.02832309306159149, + "grad_norm": 0.6571099162101746, + "learning_rate": 9.434276206322796e-05, + "loss": 0.0248, + "step": 567 + }, + { + "epoch": 0.02837304560667366, + "grad_norm": 0.7228655815124512, + "learning_rate": 9.450915141430949e-05, + "loss": 0.0583, + "step": 568 + }, + { + "epoch": 0.02842299815175583, + "grad_norm": 0.5334884524345398, + "learning_rate": 9.467554076539103e-05, + "loss": 0.0278, + "step": 569 + }, + { + "epoch": 0.028472950696838004, + "grad_norm": 0.615043580532074, + "learning_rate": 9.484193011647255e-05, + "loss": 0.0144, + "step": 570 + }, + { + "epoch": 0.028522903241920177, + "grad_norm": 0.49267223477363586, + "learning_rate": 9.500831946755409e-05, + "loss": 0.0325, + "step": 571 + }, + { + "epoch": 0.028572855787002346, + "grad_norm": 0.46629852056503296, + "learning_rate": 9.517470881863562e-05, + "loss": 0.0337, + "step": 572 + }, + { + "epoch": 0.02862280833208452, + "grad_norm": 0.4710291624069214, + "learning_rate": 9.534109816971714e-05, + "loss": 0.0393, + "step": 573 + }, + { + "epoch": 0.028672760877166692, + "grad_norm": 0.5465243458747864, + "learning_rate": 9.550748752079868e-05, + "loss": 0.0164, + "step": 574 + }, + { + "epoch": 0.028722713422248865, + "grad_norm": 0.5630074739456177, + "learning_rate": 9.56738768718802e-05, + "loss": 0.1487, + "step": 575 + }, + { + "epoch": 0.028772665967331034, + "grad_norm": 0.5086005926132202, + "learning_rate": 9.584026622296174e-05, + "loss": 0.0688, + "step": 576 + }, + { + "epoch": 0.028822618512413207, + "grad_norm": 0.5639705061912537, + "learning_rate": 9.600665557404327e-05, + "loss": 0.0203, + "step": 577 + }, + { + "epoch": 0.02887257105749538, + "grad_norm": 0.5977868437767029, + "learning_rate": 9.61730449251248e-05, + "loss": 0.037, + "step": 578 + }, + { + "epoch": 0.028922523602577552, + "grad_norm": 0.4919567108154297, + "learning_rate": 9.633943427620633e-05, + "loss": 0.0364, + "step": 579 + }, + { + "epoch": 0.02897247614765972, + "grad_norm": 0.4776834547519684, + "learning_rate": 9.650582362728786e-05, + "loss": 0.1391, + "step": 580 + }, + { + "epoch": 0.029022428692741895, + "grad_norm": 0.5483447313308716, + "learning_rate": 9.66722129783694e-05, + "loss": 0.1882, + "step": 581 + }, + { + "epoch": 0.029072381237824067, + "grad_norm": 0.4843452274799347, + "learning_rate": 9.683860232945092e-05, + "loss": 0.1496, + "step": 582 + }, + { + "epoch": 0.02912233378290624, + "grad_norm": 0.4109496772289276, + "learning_rate": 9.700499168053246e-05, + "loss": 0.0299, + "step": 583 + }, + { + "epoch": 0.02917228632798841, + "grad_norm": 0.5499976277351379, + "learning_rate": 9.717138103161398e-05, + "loss": 0.0446, + "step": 584 + }, + { + "epoch": 0.029222238873070582, + "grad_norm": 0.4769642651081085, + "learning_rate": 9.733777038269551e-05, + "loss": 0.0127, + "step": 585 + }, + { + "epoch": 0.029272191418152755, + "grad_norm": 0.5834087133407593, + "learning_rate": 9.750415973377705e-05, + "loss": 0.0971, + "step": 586 + }, + { + "epoch": 0.029322143963234928, + "grad_norm": 0.4362919330596924, + "learning_rate": 9.767054908485857e-05, + "loss": 0.016, + "step": 587 + }, + { + "epoch": 0.029372096508317097, + "grad_norm": 0.3829391598701477, + "learning_rate": 9.783693843594011e-05, + "loss": 0.0361, + "step": 588 + }, + { + "epoch": 0.02942204905339927, + "grad_norm": 0.44194260239601135, + "learning_rate": 9.800332778702164e-05, + "loss": 0.0211, + "step": 589 + }, + { + "epoch": 0.029472001598481443, + "grad_norm": 0.322797566652298, + "learning_rate": 9.816971713810316e-05, + "loss": 0.0143, + "step": 590 + }, + { + "epoch": 0.029521954143563615, + "grad_norm": 0.41340047121047974, + "learning_rate": 9.83361064891847e-05, + "loss": 0.0151, + "step": 591 + }, + { + "epoch": 0.029571906688645788, + "grad_norm": 0.36231666803359985, + "learning_rate": 9.850249584026623e-05, + "loss": 0.0203, + "step": 592 + }, + { + "epoch": 0.029621859233727958, + "grad_norm": 0.39851629734039307, + "learning_rate": 9.866888519134776e-05, + "loss": 0.0317, + "step": 593 + }, + { + "epoch": 0.02967181177881013, + "grad_norm": 0.3903023302555084, + "learning_rate": 9.883527454242929e-05, + "loss": 0.0097, + "step": 594 + }, + { + "epoch": 0.029721764323892303, + "grad_norm": 0.36431533098220825, + "learning_rate": 9.900166389351083e-05, + "loss": 0.0313, + "step": 595 + }, + { + "epoch": 0.029771716868974476, + "grad_norm": 0.33846795558929443, + "learning_rate": 9.916805324459235e-05, + "loss": 0.0269, + "step": 596 + }, + { + "epoch": 0.029821669414056645, + "grad_norm": 0.3975025415420532, + "learning_rate": 9.933444259567388e-05, + "loss": 0.0173, + "step": 597 + }, + { + "epoch": 0.029871621959138818, + "grad_norm": 0.4188225567340851, + "learning_rate": 9.950083194675542e-05, + "loss": 0.0087, + "step": 598 + }, + { + "epoch": 0.02992157450422099, + "grad_norm": 0.2506827414035797, + "learning_rate": 9.966722129783694e-05, + "loss": 0.0104, + "step": 599 + }, + { + "epoch": 0.029971527049303164, + "grad_norm": 0.308960497379303, + "learning_rate": 9.983361064891848e-05, + "loss": 0.1343, + "step": 600 + }, + { + "epoch": 0.030021479594385333, + "grad_norm": 0.3612692058086395, + "learning_rate": 0.0001, + "loss": 0.0134, + "step": 601 + }, + { + "epoch": 0.030071432139467506, + "grad_norm": 0.36283203959465027, + "learning_rate": 0.0001, + "loss": 0.0391, + "step": 602 + }, + { + "epoch": 0.03012138468454968, + "grad_norm": 0.36479452252388, + "learning_rate": 0.0001, + "loss": 0.0118, + "step": 603 + }, + { + "epoch": 0.03017133722963185, + "grad_norm": 0.4018637239933014, + "learning_rate": 0.0001, + "loss": 0.0174, + "step": 604 + }, + { + "epoch": 0.03022128977471402, + "grad_norm": 0.398385226726532, + "learning_rate": 0.0001, + "loss": 0.0198, + "step": 605 + }, + { + "epoch": 0.030271242319796193, + "grad_norm": 0.3644341826438904, + "learning_rate": 0.0001, + "loss": 0.155, + "step": 606 + }, + { + "epoch": 0.030321194864878366, + "grad_norm": 0.5398977398872375, + "learning_rate": 0.0001, + "loss": 0.0819, + "step": 607 + }, + { + "epoch": 0.03037114740996054, + "grad_norm": 0.6647158861160278, + "learning_rate": 0.0001, + "loss": 0.0264, + "step": 608 + }, + { + "epoch": 0.030421099955042708, + "grad_norm": 0.555635929107666, + "learning_rate": 0.0001, + "loss": 0.0518, + "step": 609 + }, + { + "epoch": 0.03047105250012488, + "grad_norm": 0.5294899940490723, + "learning_rate": 0.0001, + "loss": 0.015, + "step": 610 + }, + { + "epoch": 0.030521005045207054, + "grad_norm": 0.7051606774330139, + "learning_rate": 0.0001, + "loss": 0.0563, + "step": 611 + }, + { + "epoch": 0.030570957590289227, + "grad_norm": 0.44410908222198486, + "learning_rate": 0.0001, + "loss": 0.0303, + "step": 612 + }, + { + "epoch": 0.030620910135371396, + "grad_norm": 0.5621587634086609, + "learning_rate": 0.0001, + "loss": 0.0408, + "step": 613 + }, + { + "epoch": 0.03067086268045357, + "grad_norm": 0.5017131567001343, + "learning_rate": 0.0001, + "loss": 0.0631, + "step": 614 + }, + { + "epoch": 0.03072081522553574, + "grad_norm": 0.43166103959083557, + "learning_rate": 0.0001, + "loss": 0.0488, + "step": 615 + }, + { + "epoch": 0.030770767770617914, + "grad_norm": 0.45159727334976196, + "learning_rate": 0.0001, + "loss": 0.0266, + "step": 616 + }, + { + "epoch": 0.030820720315700083, + "grad_norm": 0.4991622865200043, + "learning_rate": 0.0001, + "loss": 0.0134, + "step": 617 + }, + { + "epoch": 0.030870672860782256, + "grad_norm": 0.44174110889434814, + "learning_rate": 0.0001, + "loss": 0.0175, + "step": 618 + }, + { + "epoch": 0.03092062540586443, + "grad_norm": 0.3898726999759674, + "learning_rate": 0.0001, + "loss": 0.0318, + "step": 619 + }, + { + "epoch": 0.030970577950946602, + "grad_norm": 0.4026041626930237, + "learning_rate": 0.0001, + "loss": 0.0161, + "step": 620 + }, + { + "epoch": 0.03102053049602877, + "grad_norm": 0.32607826590538025, + "learning_rate": 0.0001, + "loss": 0.0124, + "step": 621 + }, + { + "epoch": 0.031070483041110944, + "grad_norm": 0.413459837436676, + "learning_rate": 0.0001, + "loss": 0.0352, + "step": 622 + }, + { + "epoch": 0.031120435586193117, + "grad_norm": 0.35116463899612427, + "learning_rate": 0.0001, + "loss": 0.0267, + "step": 623 + }, + { + "epoch": 0.03117038813127529, + "grad_norm": 0.34268665313720703, + "learning_rate": 0.0001, + "loss": 0.0226, + "step": 624 + }, + { + "epoch": 0.03122034067635746, + "grad_norm": 0.35203996300697327, + "learning_rate": 0.0001, + "loss": 0.0168, + "step": 625 + }, + { + "epoch": 0.031270293221439635, + "grad_norm": 0.3408026099205017, + "learning_rate": 0.0001, + "loss": 0.0203, + "step": 626 + }, + { + "epoch": 0.0313202457665218, + "grad_norm": 0.42950350046157837, + "learning_rate": 0.0001, + "loss": 0.0841, + "step": 627 + }, + { + "epoch": 0.031370198311603974, + "grad_norm": 0.43382203578948975, + "learning_rate": 0.0001, + "loss": 0.198, + "step": 628 + }, + { + "epoch": 0.031420150856686146, + "grad_norm": 0.3762940764427185, + "learning_rate": 0.0001, + "loss": 0.0275, + "step": 629 + }, + { + "epoch": 0.03147010340176832, + "grad_norm": 0.32274940609931946, + "learning_rate": 0.0001, + "loss": 0.0143, + "step": 630 + }, + { + "epoch": 0.03152005594685049, + "grad_norm": 0.39347127079963684, + "learning_rate": 0.0001, + "loss": 0.02, + "step": 631 + }, + { + "epoch": 0.031570008491932665, + "grad_norm": 0.3070063292980194, + "learning_rate": 0.0001, + "loss": 0.0091, + "step": 632 + }, + { + "epoch": 0.03161996103701484, + "grad_norm": 0.4458078444004059, + "learning_rate": 0.0001, + "loss": 0.0654, + "step": 633 + }, + { + "epoch": 0.03166991358209701, + "grad_norm": 0.37211498618125916, + "learning_rate": 0.0001, + "loss": 0.0174, + "step": 634 + }, + { + "epoch": 0.03171986612717918, + "grad_norm": 0.4464173913002014, + "learning_rate": 0.0001, + "loss": 0.1503, + "step": 635 + }, + { + "epoch": 0.03176981867226135, + "grad_norm": 0.408173531293869, + "learning_rate": 0.0001, + "loss": 0.0737, + "step": 636 + }, + { + "epoch": 0.03181977121734352, + "grad_norm": 0.41735634207725525, + "learning_rate": 0.0001, + "loss": 0.0264, + "step": 637 + }, + { + "epoch": 0.031869723762425695, + "grad_norm": 0.37526819109916687, + "learning_rate": 0.0001, + "loss": 0.0352, + "step": 638 + }, + { + "epoch": 0.03191967630750787, + "grad_norm": 0.3839438259601593, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 639 + }, + { + "epoch": 0.03196962885259004, + "grad_norm": 0.3560616672039032, + "learning_rate": 0.0001, + "loss": 0.028, + "step": 640 + }, + { + "epoch": 0.03201958139767221, + "grad_norm": 0.5427830815315247, + "learning_rate": 0.0001, + "loss": 0.1212, + "step": 641 + }, + { + "epoch": 0.032069533942754386, + "grad_norm": 0.43218928575515747, + "learning_rate": 0.0001, + "loss": 0.0142, + "step": 642 + }, + { + "epoch": 0.03211948648783656, + "grad_norm": 0.3558264970779419, + "learning_rate": 0.0001, + "loss": 0.0093, + "step": 643 + }, + { + "epoch": 0.032169439032918724, + "grad_norm": 0.4927358031272888, + "learning_rate": 0.0001, + "loss": 0.0269, + "step": 644 + }, + { + "epoch": 0.0322193915780009, + "grad_norm": 0.4953572154045105, + "learning_rate": 0.0001, + "loss": 0.1541, + "step": 645 + }, + { + "epoch": 0.03226934412308307, + "grad_norm": 0.4367840886116028, + "learning_rate": 0.0001, + "loss": 0.015, + "step": 646 + }, + { + "epoch": 0.03231929666816524, + "grad_norm": 0.5178108215332031, + "learning_rate": 0.0001, + "loss": 0.0682, + "step": 647 + }, + { + "epoch": 0.032369249213247415, + "grad_norm": 0.5523868203163147, + "learning_rate": 0.0001, + "loss": 0.072, + "step": 648 + }, + { + "epoch": 0.03241920175832959, + "grad_norm": 0.5356776118278503, + "learning_rate": 0.0001, + "loss": 0.0173, + "step": 649 + }, + { + "epoch": 0.03246915430341176, + "grad_norm": 0.6276541948318481, + "learning_rate": 0.0001, + "loss": 0.0365, + "step": 650 + }, + { + "epoch": 0.032519106848493934, + "grad_norm": 0.5935607552528381, + "learning_rate": 0.0001, + "loss": 0.1803, + "step": 651 + }, + { + "epoch": 0.0325690593935761, + "grad_norm": 0.6336206197738647, + "learning_rate": 0.0001, + "loss": 0.0494, + "step": 652 + }, + { + "epoch": 0.03261901193865827, + "grad_norm": 0.7214436531066895, + "learning_rate": 0.0001, + "loss": 0.0751, + "step": 653 + }, + { + "epoch": 0.032668964483740445, + "grad_norm": 0.5202685594558716, + "learning_rate": 0.0001, + "loss": 0.017, + "step": 654 + }, + { + "epoch": 0.03271891702882262, + "grad_norm": 0.4918231964111328, + "learning_rate": 0.0001, + "loss": 0.0269, + "step": 655 + }, + { + "epoch": 0.03276886957390479, + "grad_norm": 0.5493785738945007, + "learning_rate": 0.0001, + "loss": 0.1601, + "step": 656 + }, + { + "epoch": 0.032818822118986964, + "grad_norm": 0.6731783747673035, + "learning_rate": 0.0001, + "loss": 0.0313, + "step": 657 + }, + { + "epoch": 0.032868774664069136, + "grad_norm": 0.5955875515937805, + "learning_rate": 0.0001, + "loss": 0.0907, + "step": 658 + }, + { + "epoch": 0.03291872720915131, + "grad_norm": 0.44354403018951416, + "learning_rate": 0.0001, + "loss": 0.0518, + "step": 659 + }, + { + "epoch": 0.032968679754233475, + "grad_norm": 0.48454901576042175, + "learning_rate": 0.0001, + "loss": 0.0229, + "step": 660 + }, + { + "epoch": 0.03301863229931565, + "grad_norm": 0.5281491875648499, + "learning_rate": 0.0001, + "loss": 0.0743, + "step": 661 + }, + { + "epoch": 0.03306858484439782, + "grad_norm": 0.4481571316719055, + "learning_rate": 0.0001, + "loss": 0.0218, + "step": 662 + }, + { + "epoch": 0.03311853738947999, + "grad_norm": 0.43633806705474854, + "learning_rate": 0.0001, + "loss": 0.0195, + "step": 663 + }, + { + "epoch": 0.033168489934562166, + "grad_norm": 0.4298940598964691, + "learning_rate": 0.0001, + "loss": 0.0129, + "step": 664 + }, + { + "epoch": 0.03321844247964434, + "grad_norm": 0.46610671281814575, + "learning_rate": 0.0001, + "loss": 0.0485, + "step": 665 + }, + { + "epoch": 0.03326839502472651, + "grad_norm": 0.48274585604667664, + "learning_rate": 0.0001, + "loss": 0.0079, + "step": 666 + }, + { + "epoch": 0.033318347569808685, + "grad_norm": 0.4019782841205597, + "learning_rate": 0.0001, + "loss": 0.1852, + "step": 667 + }, + { + "epoch": 0.03336830011489085, + "grad_norm": 0.42000526189804077, + "learning_rate": 0.0001, + "loss": 0.0318, + "step": 668 + }, + { + "epoch": 0.03341825265997302, + "grad_norm": 0.36862418055534363, + "learning_rate": 0.0001, + "loss": 0.0182, + "step": 669 + }, + { + "epoch": 0.033468205205055196, + "grad_norm": 0.36126643419265747, + "learning_rate": 0.0001, + "loss": 0.0153, + "step": 670 + }, + { + "epoch": 0.03351815775013737, + "grad_norm": 0.49328839778900146, + "learning_rate": 0.0001, + "loss": 0.0446, + "step": 671 + }, + { + "epoch": 0.03356811029521954, + "grad_norm": 0.39675840735435486, + "learning_rate": 0.0001, + "loss": 0.0329, + "step": 672 + }, + { + "epoch": 0.033618062840301714, + "grad_norm": 0.4160608649253845, + "learning_rate": 0.0001, + "loss": 0.0199, + "step": 673 + }, + { + "epoch": 0.03366801538538389, + "grad_norm": 0.325435996055603, + "learning_rate": 0.0001, + "loss": 0.0149, + "step": 674 + }, + { + "epoch": 0.03371796793046606, + "grad_norm": 0.39068683981895447, + "learning_rate": 0.0001, + "loss": 0.0303, + "step": 675 + }, + { + "epoch": 0.03376792047554823, + "grad_norm": 0.3490559160709381, + "learning_rate": 0.0001, + "loss": 0.0171, + "step": 676 + }, + { + "epoch": 0.0338178730206304, + "grad_norm": 0.3673558235168457, + "learning_rate": 0.0001, + "loss": 0.0257, + "step": 677 + }, + { + "epoch": 0.03386782556571257, + "grad_norm": 0.29753443598747253, + "learning_rate": 0.0001, + "loss": 0.0131, + "step": 678 + }, + { + "epoch": 0.033917778110794744, + "grad_norm": 0.47393813729286194, + "learning_rate": 0.0001, + "loss": 0.0519, + "step": 679 + }, + { + "epoch": 0.03396773065587692, + "grad_norm": 0.5102143287658691, + "learning_rate": 0.0001, + "loss": 0.1036, + "step": 680 + }, + { + "epoch": 0.03401768320095909, + "grad_norm": 0.3006477355957031, + "learning_rate": 0.0001, + "loss": 0.0098, + "step": 681 + }, + { + "epoch": 0.03406763574604126, + "grad_norm": 0.4543059170246124, + "learning_rate": 0.0001, + "loss": 0.0247, + "step": 682 + }, + { + "epoch": 0.034117588291123435, + "grad_norm": 0.4608672857284546, + "learning_rate": 0.0001, + "loss": 0.0618, + "step": 683 + }, + { + "epoch": 0.03416754083620561, + "grad_norm": 0.3074890971183777, + "learning_rate": 0.0001, + "loss": 0.1368, + "step": 684 + }, + { + "epoch": 0.034217493381287774, + "grad_norm": 0.7455437779426575, + "learning_rate": 0.0001, + "loss": 0.2077, + "step": 685 + }, + { + "epoch": 0.03426744592636995, + "grad_norm": 0.6153985857963562, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 686 + }, + { + "epoch": 0.03431739847145212, + "grad_norm": 0.6359850764274597, + "learning_rate": 0.0001, + "loss": 0.0195, + "step": 687 + }, + { + "epoch": 0.03436735101653429, + "grad_norm": 0.8011057376861572, + "learning_rate": 0.0001, + "loss": 0.0961, + "step": 688 + }, + { + "epoch": 0.034417303561616465, + "grad_norm": 0.592543363571167, + "learning_rate": 0.0001, + "loss": 0.0212, + "step": 689 + }, + { + "epoch": 0.03446725610669864, + "grad_norm": 0.6588611602783203, + "learning_rate": 0.0001, + "loss": 0.1523, + "step": 690 + }, + { + "epoch": 0.03451720865178081, + "grad_norm": 0.5971696972846985, + "learning_rate": 0.0001, + "loss": 0.0254, + "step": 691 + }, + { + "epoch": 0.03456716119686298, + "grad_norm": 0.5804377198219299, + "learning_rate": 0.0001, + "loss": 0.1306, + "step": 692 + }, + { + "epoch": 0.03461711374194515, + "grad_norm": 0.5655006766319275, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 693 + }, + { + "epoch": 0.03466706628702732, + "grad_norm": 0.6037716865539551, + "learning_rate": 0.0001, + "loss": 0.0291, + "step": 694 + }, + { + "epoch": 0.034717018832109495, + "grad_norm": 0.5487541556358337, + "learning_rate": 0.0001, + "loss": 0.0186, + "step": 695 + }, + { + "epoch": 0.03476697137719167, + "grad_norm": 0.4329695701599121, + "learning_rate": 0.0001, + "loss": 0.0311, + "step": 696 + }, + { + "epoch": 0.03481692392227384, + "grad_norm": 0.4634283483028412, + "learning_rate": 0.0001, + "loss": 0.0257, + "step": 697 + }, + { + "epoch": 0.03486687646735601, + "grad_norm": 0.437765508890152, + "learning_rate": 0.0001, + "loss": 0.0251, + "step": 698 + }, + { + "epoch": 0.034916829012438186, + "grad_norm": 0.40194177627563477, + "learning_rate": 0.0001, + "loss": 0.0094, + "step": 699 + }, + { + "epoch": 0.03496678155752036, + "grad_norm": 0.500559389591217, + "learning_rate": 0.0001, + "loss": 0.042, + "step": 700 + }, + { + "epoch": 0.035016734102602524, + "grad_norm": 0.6342719197273254, + "learning_rate": 0.0001, + "loss": 0.0092, + "step": 701 + }, + { + "epoch": 0.0350666866476847, + "grad_norm": 0.3445347547531128, + "learning_rate": 0.0001, + "loss": 0.1543, + "step": 702 + }, + { + "epoch": 0.03511663919276687, + "grad_norm": 0.41866838932037354, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 703 + }, + { + "epoch": 0.03516659173784904, + "grad_norm": 0.3688819706439972, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 704 + }, + { + "epoch": 0.035216544282931216, + "grad_norm": 0.3646050691604614, + "learning_rate": 0.0001, + "loss": 0.016, + "step": 705 + }, + { + "epoch": 0.03526649682801339, + "grad_norm": 0.36661672592163086, + "learning_rate": 0.0001, + "loss": 0.0292, + "step": 706 + }, + { + "epoch": 0.03531644937309556, + "grad_norm": 0.3257046937942505, + "learning_rate": 0.0001, + "loss": 0.0139, + "step": 707 + }, + { + "epoch": 0.035366401918177734, + "grad_norm": 0.40412184596061707, + "learning_rate": 0.0001, + "loss": 0.0724, + "step": 708 + }, + { + "epoch": 0.0354163544632599, + "grad_norm": 0.40503668785095215, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 709 + }, + { + "epoch": 0.03546630700834207, + "grad_norm": 0.5863651037216187, + "learning_rate": 0.0001, + "loss": 0.071, + "step": 710 + }, + { + "epoch": 0.035516259553424245, + "grad_norm": 0.3601503372192383, + "learning_rate": 0.0001, + "loss": 0.0513, + "step": 711 + }, + { + "epoch": 0.03556621209850642, + "grad_norm": 0.42078542709350586, + "learning_rate": 0.0001, + "loss": 0.0534, + "step": 712 + }, + { + "epoch": 0.03561616464358859, + "grad_norm": 0.4145791828632355, + "learning_rate": 0.0001, + "loss": 0.0134, + "step": 713 + }, + { + "epoch": 0.035666117188670764, + "grad_norm": 0.3527210056781769, + "learning_rate": 0.0001, + "loss": 0.0207, + "step": 714 + }, + { + "epoch": 0.035716069733752936, + "grad_norm": 0.49244964122772217, + "learning_rate": 0.0001, + "loss": 0.0537, + "step": 715 + }, + { + "epoch": 0.03576602227883511, + "grad_norm": 0.37003231048583984, + "learning_rate": 0.0001, + "loss": 0.0301, + "step": 716 + }, + { + "epoch": 0.03581597482391728, + "grad_norm": 0.2848529815673828, + "learning_rate": 0.0001, + "loss": 0.0105, + "step": 717 + }, + { + "epoch": 0.03586592736899945, + "grad_norm": 0.4067012667655945, + "learning_rate": 0.0001, + "loss": 0.0117, + "step": 718 + }, + { + "epoch": 0.03591587991408162, + "grad_norm": 0.5778188705444336, + "learning_rate": 0.0001, + "loss": 0.058, + "step": 719 + }, + { + "epoch": 0.03596583245916379, + "grad_norm": 0.4515853226184845, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 720 + }, + { + "epoch": 0.036015785004245966, + "grad_norm": 0.39757204055786133, + "learning_rate": 0.0001, + "loss": 0.0157, + "step": 721 + }, + { + "epoch": 0.03606573754932814, + "grad_norm": 0.36848849058151245, + "learning_rate": 0.0001, + "loss": 0.013, + "step": 722 + }, + { + "epoch": 0.03611569009441031, + "grad_norm": 0.408626526594162, + "learning_rate": 0.0001, + "loss": 0.019, + "step": 723 + }, + { + "epoch": 0.036165642639492485, + "grad_norm": 0.4275587499141693, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 724 + }, + { + "epoch": 0.03621559518457466, + "grad_norm": 0.4005682170391083, + "learning_rate": 0.0001, + "loss": 0.0245, + "step": 725 + }, + { + "epoch": 0.03626554772965682, + "grad_norm": 0.37532690167427063, + "learning_rate": 0.0001, + "loss": 0.0683, + "step": 726 + }, + { + "epoch": 0.036315500274738996, + "grad_norm": 0.336221307516098, + "learning_rate": 0.0001, + "loss": 0.0085, + "step": 727 + }, + { + "epoch": 0.03636545281982117, + "grad_norm": 0.4166675806045532, + "learning_rate": 0.0001, + "loss": 0.0171, + "step": 728 + }, + { + "epoch": 0.03641540536490334, + "grad_norm": 0.34879860281944275, + "learning_rate": 0.0001, + "loss": 0.0135, + "step": 729 + }, + { + "epoch": 0.036465357909985514, + "grad_norm": 0.3092426061630249, + "learning_rate": 0.0001, + "loss": 0.0165, + "step": 730 + }, + { + "epoch": 0.03651531045506769, + "grad_norm": 0.3836914002895355, + "learning_rate": 0.0001, + "loss": 0.0314, + "step": 731 + }, + { + "epoch": 0.03656526300014986, + "grad_norm": 0.40817156434059143, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 732 + }, + { + "epoch": 0.03661521554523203, + "grad_norm": 0.35060590505599976, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 733 + }, + { + "epoch": 0.0366651680903142, + "grad_norm": 0.2940794825553894, + "learning_rate": 0.0001, + "loss": 0.0113, + "step": 734 + }, + { + "epoch": 0.03671512063539637, + "grad_norm": 0.390851229429245, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 735 + }, + { + "epoch": 0.036765073180478544, + "grad_norm": 0.43592917919158936, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 736 + }, + { + "epoch": 0.03681502572556072, + "grad_norm": 0.36282801628112793, + "learning_rate": 0.0001, + "loss": 0.0204, + "step": 737 + }, + { + "epoch": 0.03686497827064289, + "grad_norm": 0.41860339045524597, + "learning_rate": 0.0001, + "loss": 0.1547, + "step": 738 + }, + { + "epoch": 0.03691493081572506, + "grad_norm": 0.3197900950908661, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 739 + }, + { + "epoch": 0.036964883360807235, + "grad_norm": 0.34518417716026306, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 740 + }, + { + "epoch": 0.03701483590588941, + "grad_norm": 0.3151269257068634, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 741 + }, + { + "epoch": 0.037064788450971574, + "grad_norm": 0.3765740990638733, + "learning_rate": 0.0001, + "loss": 0.0213, + "step": 742 + }, + { + "epoch": 0.03711474099605375, + "grad_norm": 0.4188808500766754, + "learning_rate": 0.0001, + "loss": 0.0865, + "step": 743 + }, + { + "epoch": 0.03716469354113592, + "grad_norm": 0.38910919427871704, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 744 + }, + { + "epoch": 0.03721464608621809, + "grad_norm": 0.48784592747688293, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 745 + }, + { + "epoch": 0.037264598631300265, + "grad_norm": 0.35521501302719116, + "learning_rate": 0.0001, + "loss": 0.0313, + "step": 746 + }, + { + "epoch": 0.03731455117638244, + "grad_norm": 0.37384504079818726, + "learning_rate": 0.0001, + "loss": 0.0536, + "step": 747 + }, + { + "epoch": 0.03736450372146461, + "grad_norm": 0.4572345018386841, + "learning_rate": 0.0001, + "loss": 0.0103, + "step": 748 + }, + { + "epoch": 0.03741445626654678, + "grad_norm": 0.5510309338569641, + "learning_rate": 0.0001, + "loss": 0.098, + "step": 749 + }, + { + "epoch": 0.03746440881162895, + "grad_norm": 0.45002833008766174, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 750 + }, + { + "epoch": 0.03751436135671112, + "grad_norm": 0.5581663250923157, + "learning_rate": 0.0001, + "loss": 0.034, + "step": 751 + }, + { + "epoch": 0.037564313901793295, + "grad_norm": 0.42477741837501526, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 752 + }, + { + "epoch": 0.03761426644687547, + "grad_norm": 0.3469684422016144, + "learning_rate": 0.0001, + "loss": 0.0141, + "step": 753 + }, + { + "epoch": 0.03766421899195764, + "grad_norm": 0.47266390919685364, + "learning_rate": 0.0001, + "loss": 0.0168, + "step": 754 + }, + { + "epoch": 0.03771417153703981, + "grad_norm": 0.4726772904396057, + "learning_rate": 0.0001, + "loss": 0.02, + "step": 755 + }, + { + "epoch": 0.037764124082121986, + "grad_norm": 0.31871920824050903, + "learning_rate": 0.0001, + "loss": 0.1297, + "step": 756 + }, + { + "epoch": 0.03781407662720416, + "grad_norm": 0.490089476108551, + "learning_rate": 0.0001, + "loss": 0.0263, + "step": 757 + }, + { + "epoch": 0.037864029172286325, + "grad_norm": 0.36057376861572266, + "learning_rate": 0.0001, + "loss": 0.0107, + "step": 758 + }, + { + "epoch": 0.0379139817173685, + "grad_norm": 0.35802599787712097, + "learning_rate": 0.0001, + "loss": 0.1385, + "step": 759 + }, + { + "epoch": 0.03796393426245067, + "grad_norm": 0.3675881028175354, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 760 + }, + { + "epoch": 0.03801388680753284, + "grad_norm": 0.430169939994812, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 761 + }, + { + "epoch": 0.038063839352615016, + "grad_norm": 0.25379446148872375, + "learning_rate": 0.0001, + "loss": 0.0103, + "step": 762 + }, + { + "epoch": 0.03811379189769719, + "grad_norm": 0.35191476345062256, + "learning_rate": 0.0001, + "loss": 0.0205, + "step": 763 + }, + { + "epoch": 0.03816374444277936, + "grad_norm": 0.3186319172382355, + "learning_rate": 0.0001, + "loss": 0.0203, + "step": 764 + }, + { + "epoch": 0.038213696987861534, + "grad_norm": 0.3013067841529846, + "learning_rate": 0.0001, + "loss": 0.0102, + "step": 765 + }, + { + "epoch": 0.03826364953294371, + "grad_norm": 0.38902243971824646, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 766 + }, + { + "epoch": 0.03831360207802587, + "grad_norm": 0.3012847602367401, + "learning_rate": 0.0001, + "loss": 0.1289, + "step": 767 + }, + { + "epoch": 0.038363554623108045, + "grad_norm": 0.4440463185310364, + "learning_rate": 0.0001, + "loss": 0.0433, + "step": 768 + }, + { + "epoch": 0.03841350716819022, + "grad_norm": 0.4042235016822815, + "learning_rate": 0.0001, + "loss": 0.0446, + "step": 769 + }, + { + "epoch": 0.03846345971327239, + "grad_norm": 0.40291109681129456, + "learning_rate": 0.0001, + "loss": 0.1423, + "step": 770 + }, + { + "epoch": 0.038513412258354564, + "grad_norm": 0.3711715042591095, + "learning_rate": 0.0001, + "loss": 0.013, + "step": 771 + }, + { + "epoch": 0.03856336480343674, + "grad_norm": 0.30692851543426514, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 772 + }, + { + "epoch": 0.03861331734851891, + "grad_norm": 0.49537888169288635, + "learning_rate": 0.0001, + "loss": 0.0372, + "step": 773 + }, + { + "epoch": 0.03866326989360108, + "grad_norm": 0.4964855909347534, + "learning_rate": 0.0001, + "loss": 0.0653, + "step": 774 + }, + { + "epoch": 0.03871322243868325, + "grad_norm": 0.5046573877334595, + "learning_rate": 0.0001, + "loss": 0.0206, + "step": 775 + }, + { + "epoch": 0.03876317498376542, + "grad_norm": 0.5300730466842651, + "learning_rate": 0.0001, + "loss": 0.0164, + "step": 776 + }, + { + "epoch": 0.038813127528847594, + "grad_norm": 0.511640191078186, + "learning_rate": 0.0001, + "loss": 0.0229, + "step": 777 + }, + { + "epoch": 0.038863080073929766, + "grad_norm": 0.5537866353988647, + "learning_rate": 0.0001, + "loss": 0.02, + "step": 778 + }, + { + "epoch": 0.03891303261901194, + "grad_norm": 0.4664732813835144, + "learning_rate": 0.0001, + "loss": 0.0531, + "step": 779 + }, + { + "epoch": 0.03896298516409411, + "grad_norm": 0.5009952783584595, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 780 + }, + { + "epoch": 0.039012937709176285, + "grad_norm": 0.4453968107700348, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 781 + }, + { + "epoch": 0.03906289025425846, + "grad_norm": 0.44181498885154724, + "learning_rate": 0.0001, + "loss": 0.0231, + "step": 782 + }, + { + "epoch": 0.03911284279934062, + "grad_norm": 0.49071502685546875, + "learning_rate": 0.0001, + "loss": 0.0263, + "step": 783 + }, + { + "epoch": 0.039162795344422796, + "grad_norm": 0.5327684879302979, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 784 + }, + { + "epoch": 0.03921274788950497, + "grad_norm": 0.5049312710762024, + "learning_rate": 0.0001, + "loss": 0.1494, + "step": 785 + }, + { + "epoch": 0.03926270043458714, + "grad_norm": 0.3441064655780792, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 786 + }, + { + "epoch": 0.039312652979669314, + "grad_norm": 0.45944133400917053, + "learning_rate": 0.0001, + "loss": 0.2795, + "step": 787 + }, + { + "epoch": 0.03936260552475149, + "grad_norm": 0.5531800985336304, + "learning_rate": 0.0001, + "loss": 0.0143, + "step": 788 + }, + { + "epoch": 0.03941255806983366, + "grad_norm": 0.3593677282333374, + "learning_rate": 0.0001, + "loss": 0.0171, + "step": 789 + }, + { + "epoch": 0.03946251061491583, + "grad_norm": 0.5121352672576904, + "learning_rate": 0.0001, + "loss": 0.0745, + "step": 790 + }, + { + "epoch": 0.039512463159998, + "grad_norm": 0.4650610685348511, + "learning_rate": 0.0001, + "loss": 0.0077, + "step": 791 + }, + { + "epoch": 0.03956241570508017, + "grad_norm": 0.6448559761047363, + "learning_rate": 0.0001, + "loss": 0.0954, + "step": 792 + }, + { + "epoch": 0.039612368250162344, + "grad_norm": 0.4245648980140686, + "learning_rate": 0.0001, + "loss": 0.0143, + "step": 793 + }, + { + "epoch": 0.03966232079524452, + "grad_norm": 0.47080472111701965, + "learning_rate": 0.0001, + "loss": 0.0163, + "step": 794 + }, + { + "epoch": 0.03971227334032669, + "grad_norm": 0.4915279448032379, + "learning_rate": 0.0001, + "loss": 0.0138, + "step": 795 + }, + { + "epoch": 0.03976222588540886, + "grad_norm": 0.7056427001953125, + "learning_rate": 0.0001, + "loss": 0.0675, + "step": 796 + }, + { + "epoch": 0.039812178430491035, + "grad_norm": 0.49101611971855164, + "learning_rate": 0.0001, + "loss": 0.0142, + "step": 797 + }, + { + "epoch": 0.03986213097557321, + "grad_norm": 0.5362517833709717, + "learning_rate": 0.0001, + "loss": 0.0099, + "step": 798 + }, + { + "epoch": 0.039912083520655374, + "grad_norm": 0.49907025694847107, + "learning_rate": 0.0001, + "loss": 0.0284, + "step": 799 + }, + { + "epoch": 0.03996203606573755, + "grad_norm": 0.5964041948318481, + "learning_rate": 0.0001, + "loss": 0.0092, + "step": 800 + }, + { + "epoch": 0.04001198861081972, + "grad_norm": 0.6420426964759827, + "learning_rate": 0.0001, + "loss": 0.0156, + "step": 801 + }, + { + "epoch": 0.04006194115590189, + "grad_norm": 0.613911509513855, + "learning_rate": 0.0001, + "loss": 0.0156, + "step": 802 + }, + { + "epoch": 0.040111893700984065, + "grad_norm": 0.42736202478408813, + "learning_rate": 0.0001, + "loss": 0.0109, + "step": 803 + }, + { + "epoch": 0.04016184624606624, + "grad_norm": 0.5389325618743896, + "learning_rate": 0.0001, + "loss": 0.014, + "step": 804 + }, + { + "epoch": 0.04021179879114841, + "grad_norm": 0.5486679673194885, + "learning_rate": 0.0001, + "loss": 0.1797, + "step": 805 + }, + { + "epoch": 0.04026175133623058, + "grad_norm": 0.45768478512763977, + "learning_rate": 0.0001, + "loss": 0.0662, + "step": 806 + }, + { + "epoch": 0.040311703881312756, + "grad_norm": 0.3509109318256378, + "learning_rate": 0.0001, + "loss": 0.016, + "step": 807 + }, + { + "epoch": 0.04036165642639492, + "grad_norm": 0.4088302254676819, + "learning_rate": 0.0001, + "loss": 0.026, + "step": 808 + }, + { + "epoch": 0.040411608971477095, + "grad_norm": 0.391631156206131, + "learning_rate": 0.0001, + "loss": 0.0148, + "step": 809 + }, + { + "epoch": 0.04046156151655927, + "grad_norm": 0.41680458188056946, + "learning_rate": 0.0001, + "loss": 0.0127, + "step": 810 + }, + { + "epoch": 0.04051151406164144, + "grad_norm": 0.37573006749153137, + "learning_rate": 0.0001, + "loss": 0.0107, + "step": 811 + }, + { + "epoch": 0.04056146660672361, + "grad_norm": 0.2866998314857483, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 812 + }, + { + "epoch": 0.040611419151805786, + "grad_norm": 0.35932594537734985, + "learning_rate": 0.0001, + "loss": 0.0201, + "step": 813 + }, + { + "epoch": 0.04066137169688796, + "grad_norm": 0.35448533296585083, + "learning_rate": 0.0001, + "loss": 0.0258, + "step": 814 + }, + { + "epoch": 0.04071132424197013, + "grad_norm": 0.36502549052238464, + "learning_rate": 0.0001, + "loss": 0.0147, + "step": 815 + }, + { + "epoch": 0.0407612767870523, + "grad_norm": 0.3661327362060547, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 816 + }, + { + "epoch": 0.04081122933213447, + "grad_norm": 0.3312963545322418, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 817 + }, + { + "epoch": 0.04086118187721664, + "grad_norm": 0.37260735034942627, + "learning_rate": 0.0001, + "loss": 0.0192, + "step": 818 + }, + { + "epoch": 0.040911134422298816, + "grad_norm": 0.3425200283527374, + "learning_rate": 0.0001, + "loss": 0.0129, + "step": 819 + }, + { + "epoch": 0.04096108696738099, + "grad_norm": 0.39418330788612366, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 820 + }, + { + "epoch": 0.04101103951246316, + "grad_norm": 0.3751195967197418, + "learning_rate": 0.0001, + "loss": 0.0192, + "step": 821 + }, + { + "epoch": 0.041060992057545334, + "grad_norm": 0.34772351384162903, + "learning_rate": 0.0001, + "loss": 0.0196, + "step": 822 + }, + { + "epoch": 0.04111094460262751, + "grad_norm": 0.32184335589408875, + "learning_rate": 0.0001, + "loss": 0.0251, + "step": 823 + }, + { + "epoch": 0.04116089714770967, + "grad_norm": 0.36641982197761536, + "learning_rate": 0.0001, + "loss": 0.0108, + "step": 824 + }, + { + "epoch": 0.041210849692791846, + "grad_norm": 0.33575764298439026, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 825 + }, + { + "epoch": 0.04126080223787402, + "grad_norm": 0.34376055002212524, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 826 + }, + { + "epoch": 0.04131075478295619, + "grad_norm": 0.3266841471195221, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 827 + }, + { + "epoch": 0.041360707328038364, + "grad_norm": 0.4060051143169403, + "learning_rate": 0.0001, + "loss": 0.0827, + "step": 828 + }, + { + "epoch": 0.04141065987312054, + "grad_norm": 0.3683590292930603, + "learning_rate": 0.0001, + "loss": 0.0102, + "step": 829 + }, + { + "epoch": 0.04146061241820271, + "grad_norm": 0.5374011993408203, + "learning_rate": 0.0001, + "loss": 0.0184, + "step": 830 + }, + { + "epoch": 0.04151056496328488, + "grad_norm": 0.33828333020210266, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 831 + }, + { + "epoch": 0.04156051750836705, + "grad_norm": 0.4139735698699951, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 832 + }, + { + "epoch": 0.04161047005344922, + "grad_norm": 0.35170304775238037, + "learning_rate": 0.0001, + "loss": 0.02, + "step": 833 + }, + { + "epoch": 0.041660422598531394, + "grad_norm": 0.3221001923084259, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 834 + }, + { + "epoch": 0.041710375143613566, + "grad_norm": 0.3325352370738983, + "learning_rate": 0.0001, + "loss": 0.0258, + "step": 835 + }, + { + "epoch": 0.04176032768869574, + "grad_norm": 0.40513527393341064, + "learning_rate": 0.0001, + "loss": 0.0108, + "step": 836 + }, + { + "epoch": 0.04181028023377791, + "grad_norm": 0.3408893644809723, + "learning_rate": 0.0001, + "loss": 0.0138, + "step": 837 + }, + { + "epoch": 0.041860232778860085, + "grad_norm": 0.334218829870224, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 838 + }, + { + "epoch": 0.04191018532394226, + "grad_norm": 0.3165181279182434, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 839 + }, + { + "epoch": 0.04196013786902442, + "grad_norm": 0.28257524967193604, + "learning_rate": 0.0001, + "loss": 0.0089, + "step": 840 + }, + { + "epoch": 0.042010090414106596, + "grad_norm": 0.24890895187854767, + "learning_rate": 0.0001, + "loss": 0.0109, + "step": 841 + }, + { + "epoch": 0.04206004295918877, + "grad_norm": 0.29911595582962036, + "learning_rate": 0.0001, + "loss": 0.0077, + "step": 842 + }, + { + "epoch": 0.04210999550427094, + "grad_norm": 0.2573944628238678, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 843 + }, + { + "epoch": 0.042159948049353115, + "grad_norm": 0.27792564034461975, + "learning_rate": 0.0001, + "loss": 0.041, + "step": 844 + }, + { + "epoch": 0.04220990059443529, + "grad_norm": 0.314260333776474, + "learning_rate": 0.0001, + "loss": 0.0079, + "step": 845 + }, + { + "epoch": 0.04225985313951746, + "grad_norm": 0.2994958162307739, + "learning_rate": 0.0001, + "loss": 0.0097, + "step": 846 + }, + { + "epoch": 0.04230980568459963, + "grad_norm": 0.24238307774066925, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 847 + }, + { + "epoch": 0.042359758229681806, + "grad_norm": 0.20887108147144318, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 848 + }, + { + "epoch": 0.04240971077476397, + "grad_norm": 0.29376059770584106, + "learning_rate": 0.0001, + "loss": 0.0125, + "step": 849 + }, + { + "epoch": 0.042459663319846144, + "grad_norm": 0.35171881318092346, + "learning_rate": 0.0001, + "loss": 0.1367, + "step": 850 + }, + { + "epoch": 0.04250961586492832, + "grad_norm": 0.18763038516044617, + "learning_rate": 0.0001, + "loss": 0.0099, + "step": 851 + }, + { + "epoch": 0.04255956841001049, + "grad_norm": 0.2369695007801056, + "learning_rate": 0.0001, + "loss": 0.0173, + "step": 852 + }, + { + "epoch": 0.04260952095509266, + "grad_norm": 0.3535175919532776, + "learning_rate": 0.0001, + "loss": 0.016, + "step": 853 + }, + { + "epoch": 0.042659473500174835, + "grad_norm": 0.31211549043655396, + "learning_rate": 0.0001, + "loss": 0.0199, + "step": 854 + }, + { + "epoch": 0.04270942604525701, + "grad_norm": 0.2684563994407654, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 855 + }, + { + "epoch": 0.04275937859033918, + "grad_norm": 0.37058594822883606, + "learning_rate": 0.0001, + "loss": 0.0371, + "step": 856 + }, + { + "epoch": 0.04280933113542135, + "grad_norm": 0.3474418520927429, + "learning_rate": 0.0001, + "loss": 0.0121, + "step": 857 + }, + { + "epoch": 0.04285928368050352, + "grad_norm": 0.3903052806854248, + "learning_rate": 0.0001, + "loss": 0.2631, + "step": 858 + }, + { + "epoch": 0.04290923622558569, + "grad_norm": 0.3230353593826294, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 859 + }, + { + "epoch": 0.042959188770667865, + "grad_norm": 0.24226965010166168, + "learning_rate": 0.0001, + "loss": 0.0067, + "step": 860 + }, + { + "epoch": 0.04300914131575004, + "grad_norm": 0.48063191771507263, + "learning_rate": 0.0001, + "loss": 0.0393, + "step": 861 + }, + { + "epoch": 0.04305909386083221, + "grad_norm": 0.2675555348396301, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 862 + }, + { + "epoch": 0.043109046405914384, + "grad_norm": 0.341242253780365, + "learning_rate": 0.0001, + "loss": 0.0119, + "step": 863 + }, + { + "epoch": 0.043158998950996556, + "grad_norm": 0.46038225293159485, + "learning_rate": 0.0001, + "loss": 0.0514, + "step": 864 + }, + { + "epoch": 0.04320895149607872, + "grad_norm": 0.2755572497844696, + "learning_rate": 0.0001, + "loss": 0.0308, + "step": 865 + }, + { + "epoch": 0.043258904041160895, + "grad_norm": 0.35874801874160767, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 866 + }, + { + "epoch": 0.04330885658624307, + "grad_norm": 0.3286133110523224, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 867 + }, + { + "epoch": 0.04335880913132524, + "grad_norm": 0.5850495100021362, + "learning_rate": 0.0001, + "loss": 0.0468, + "step": 868 + }, + { + "epoch": 0.04340876167640741, + "grad_norm": 0.3182699680328369, + "learning_rate": 0.0001, + "loss": 0.0097, + "step": 869 + }, + { + "epoch": 0.043458714221489586, + "grad_norm": 0.41328972578048706, + "learning_rate": 0.0001, + "loss": 0.0237, + "step": 870 + }, + { + "epoch": 0.04350866676657176, + "grad_norm": 0.3828691840171814, + "learning_rate": 0.0001, + "loss": 0.0173, + "step": 871 + }, + { + "epoch": 0.04355861931165393, + "grad_norm": 0.3480788469314575, + "learning_rate": 0.0001, + "loss": 0.0149, + "step": 872 + }, + { + "epoch": 0.0436085718567361, + "grad_norm": 0.4047306180000305, + "learning_rate": 0.0001, + "loss": 0.0606, + "step": 873 + }, + { + "epoch": 0.04365852440181827, + "grad_norm": 0.45055991411209106, + "learning_rate": 0.0001, + "loss": 0.0494, + "step": 874 + }, + { + "epoch": 0.04370847694690044, + "grad_norm": 0.42277759313583374, + "learning_rate": 0.0001, + "loss": 0.0493, + "step": 875 + }, + { + "epoch": 0.043758429491982616, + "grad_norm": 0.5435769557952881, + "learning_rate": 0.0001, + "loss": 0.012, + "step": 876 + }, + { + "epoch": 0.04380838203706479, + "grad_norm": 0.46059709787368774, + "learning_rate": 0.0001, + "loss": 0.0105, + "step": 877 + }, + { + "epoch": 0.04385833458214696, + "grad_norm": 0.3494422435760498, + "learning_rate": 0.0001, + "loss": 0.0126, + "step": 878 + }, + { + "epoch": 0.043908287127229134, + "grad_norm": 0.573963463306427, + "learning_rate": 0.0001, + "loss": 0.064, + "step": 879 + }, + { + "epoch": 0.04395823967231131, + "grad_norm": 0.4448268711566925, + "learning_rate": 0.0001, + "loss": 0.0233, + "step": 880 + }, + { + "epoch": 0.04400819221739347, + "grad_norm": 0.3889966905117035, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 881 + }, + { + "epoch": 0.044058144762475646, + "grad_norm": 0.3613930344581604, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 882 + }, + { + "epoch": 0.04410809730755782, + "grad_norm": 0.6484033465385437, + "learning_rate": 0.0001, + "loss": 0.0832, + "step": 883 + }, + { + "epoch": 0.04415804985263999, + "grad_norm": 0.4561299681663513, + "learning_rate": 0.0001, + "loss": 0.0388, + "step": 884 + }, + { + "epoch": 0.044208002397722164, + "grad_norm": 0.5400488972663879, + "learning_rate": 0.0001, + "loss": 0.0239, + "step": 885 + }, + { + "epoch": 0.04425795494280434, + "grad_norm": 0.4084000587463379, + "learning_rate": 0.0001, + "loss": 0.0239, + "step": 886 + }, + { + "epoch": 0.04430790748788651, + "grad_norm": 0.4647136330604553, + "learning_rate": 0.0001, + "loss": 0.0612, + "step": 887 + }, + { + "epoch": 0.04435786003296868, + "grad_norm": 0.3978618383407593, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 888 + }, + { + "epoch": 0.044407812578050855, + "grad_norm": 0.4453149735927582, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 889 + }, + { + "epoch": 0.04445776512313302, + "grad_norm": 0.42756351828575134, + "learning_rate": 0.0001, + "loss": 0.0348, + "step": 890 + }, + { + "epoch": 0.044507717668215194, + "grad_norm": 0.33696773648262024, + "learning_rate": 0.0001, + "loss": 0.0093, + "step": 891 + }, + { + "epoch": 0.044557670213297366, + "grad_norm": 0.41081732511520386, + "learning_rate": 0.0001, + "loss": 0.0132, + "step": 892 + }, + { + "epoch": 0.04460762275837954, + "grad_norm": 0.3461061716079712, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 893 + }, + { + "epoch": 0.04465757530346171, + "grad_norm": 0.3064468801021576, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 894 + }, + { + "epoch": 0.044707527848543885, + "grad_norm": 0.3778267800807953, + "learning_rate": 0.0001, + "loss": 0.0126, + "step": 895 + }, + { + "epoch": 0.04475748039362606, + "grad_norm": 0.29984205961227417, + "learning_rate": 0.0001, + "loss": 0.0182, + "step": 896 + }, + { + "epoch": 0.04480743293870823, + "grad_norm": 0.32381337881088257, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 897 + }, + { + "epoch": 0.044857385483790396, + "grad_norm": 0.3549831211566925, + "learning_rate": 0.0001, + "loss": 0.0669, + "step": 898 + }, + { + "epoch": 0.04490733802887257, + "grad_norm": 0.35116255283355713, + "learning_rate": 0.0001, + "loss": 0.0441, + "step": 899 + }, + { + "epoch": 0.04495729057395474, + "grad_norm": 0.41823095083236694, + "learning_rate": 0.0001, + "loss": 0.1419, + "step": 900 + }, + { + "epoch": 0.045007243119036915, + "grad_norm": 0.35655444860458374, + "learning_rate": 0.0001, + "loss": 0.0179, + "step": 901 + }, + { + "epoch": 0.04505719566411909, + "grad_norm": 0.31306901574134827, + "learning_rate": 0.0001, + "loss": 0.0153, + "step": 902 + }, + { + "epoch": 0.04510714820920126, + "grad_norm": 0.33130690455436707, + "learning_rate": 0.0001, + "loss": 0.0079, + "step": 903 + }, + { + "epoch": 0.04515710075428343, + "grad_norm": 0.3968793749809265, + "learning_rate": 0.0001, + "loss": 0.1567, + "step": 904 + }, + { + "epoch": 0.045207053299365606, + "grad_norm": 0.3662184774875641, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 905 + }, + { + "epoch": 0.04525700584444777, + "grad_norm": 0.312427818775177, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 906 + }, + { + "epoch": 0.045306958389529944, + "grad_norm": 0.30178388953208923, + "learning_rate": 0.0001, + "loss": 0.0183, + "step": 907 + }, + { + "epoch": 0.04535691093461212, + "grad_norm": 0.4048222005367279, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 908 + }, + { + "epoch": 0.04540686347969429, + "grad_norm": 0.3232572078704834, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 909 + }, + { + "epoch": 0.04545681602477646, + "grad_norm": 0.25216954946517944, + "learning_rate": 0.0001, + "loss": 0.0086, + "step": 910 + }, + { + "epoch": 0.045506768569858635, + "grad_norm": 0.29953986406326294, + "learning_rate": 0.0001, + "loss": 0.0105, + "step": 911 + }, + { + "epoch": 0.04555672111494081, + "grad_norm": 0.32921069860458374, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 912 + }, + { + "epoch": 0.04560667366002298, + "grad_norm": 0.3266013562679291, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 913 + }, + { + "epoch": 0.04565662620510515, + "grad_norm": 0.2844143509864807, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 914 + }, + { + "epoch": 0.04570657875018732, + "grad_norm": 0.3761591911315918, + "learning_rate": 0.0001, + "loss": 0.1951, + "step": 915 + }, + { + "epoch": 0.04575653129526949, + "grad_norm": 0.28319886326789856, + "learning_rate": 0.0001, + "loss": 0.1332, + "step": 916 + }, + { + "epoch": 0.045806483840351665, + "grad_norm": 0.3386899530887604, + "learning_rate": 0.0001, + "loss": 0.0085, + "step": 917 + }, + { + "epoch": 0.04585643638543384, + "grad_norm": 0.25884193181991577, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 918 + }, + { + "epoch": 0.04590638893051601, + "grad_norm": 0.25797784328460693, + "learning_rate": 0.0001, + "loss": 0.0217, + "step": 919 + }, + { + "epoch": 0.045956341475598184, + "grad_norm": 0.33485135436058044, + "learning_rate": 0.0001, + "loss": 0.0191, + "step": 920 + }, + { + "epoch": 0.046006294020680356, + "grad_norm": 0.2547144889831543, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 921 + }, + { + "epoch": 0.04605624656576252, + "grad_norm": 0.2663048505783081, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 922 + }, + { + "epoch": 0.046106199110844695, + "grad_norm": 0.3094973564147949, + "learning_rate": 0.0001, + "loss": 0.0147, + "step": 923 + }, + { + "epoch": 0.04615615165592687, + "grad_norm": 0.2547912299633026, + "learning_rate": 0.0001, + "loss": 0.1312, + "step": 924 + }, + { + "epoch": 0.04620610420100904, + "grad_norm": 0.25040969252586365, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 925 + }, + { + "epoch": 0.04625605674609121, + "grad_norm": 0.28820860385894775, + "learning_rate": 0.0001, + "loss": 0.0189, + "step": 926 + }, + { + "epoch": 0.046306009291173386, + "grad_norm": 0.2364806830883026, + "learning_rate": 0.0001, + "loss": 0.0096, + "step": 927 + }, + { + "epoch": 0.04635596183625556, + "grad_norm": 0.3410743474960327, + "learning_rate": 0.0001, + "loss": 0.0086, + "step": 928 + }, + { + "epoch": 0.04640591438133773, + "grad_norm": 0.36243829131126404, + "learning_rate": 0.0001, + "loss": 0.0439, + "step": 929 + }, + { + "epoch": 0.046455866926419905, + "grad_norm": 0.2274184674024582, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 930 + }, + { + "epoch": 0.04650581947150207, + "grad_norm": 0.26222071051597595, + "learning_rate": 0.0001, + "loss": 0.0128, + "step": 931 + }, + { + "epoch": 0.04655577201658424, + "grad_norm": 0.27094554901123047, + "learning_rate": 0.0001, + "loss": 0.1344, + "step": 932 + }, + { + "epoch": 0.046605724561666416, + "grad_norm": 0.35851073265075684, + "learning_rate": 0.0001, + "loss": 0.1254, + "step": 933 + }, + { + "epoch": 0.04665567710674859, + "grad_norm": 0.2782266438007355, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 934 + }, + { + "epoch": 0.04670562965183076, + "grad_norm": 0.3525797724723816, + "learning_rate": 0.0001, + "loss": 0.0461, + "step": 935 + }, + { + "epoch": 0.046755582196912934, + "grad_norm": 0.29501715302467346, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 936 + }, + { + "epoch": 0.04680553474199511, + "grad_norm": 0.4124155342578888, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 937 + }, + { + "epoch": 0.04685548728707728, + "grad_norm": 0.610251247882843, + "learning_rate": 0.0001, + "loss": 0.1012, + "step": 938 + }, + { + "epoch": 0.046905439832159446, + "grad_norm": 0.27388542890548706, + "learning_rate": 0.0001, + "loss": 0.1319, + "step": 939 + }, + { + "epoch": 0.04695539237724162, + "grad_norm": 0.4093243181705475, + "learning_rate": 0.0001, + "loss": 0.0134, + "step": 940 + }, + { + "epoch": 0.04700534492232379, + "grad_norm": 0.579296886920929, + "learning_rate": 0.0001, + "loss": 0.0689, + "step": 941 + }, + { + "epoch": 0.047055297467405964, + "grad_norm": 0.5415114760398865, + "learning_rate": 0.0001, + "loss": 0.0866, + "step": 942 + }, + { + "epoch": 0.04710525001248814, + "grad_norm": 0.6088832020759583, + "learning_rate": 0.0001, + "loss": 0.0122, + "step": 943 + }, + { + "epoch": 0.04715520255757031, + "grad_norm": 0.42974334955215454, + "learning_rate": 0.0001, + "loss": 0.0538, + "step": 944 + }, + { + "epoch": 0.04720515510265248, + "grad_norm": 0.47884291410446167, + "learning_rate": 0.0001, + "loss": 0.0134, + "step": 945 + }, + { + "epoch": 0.047255107647734655, + "grad_norm": 0.5531324744224548, + "learning_rate": 0.0001, + "loss": 0.0436, + "step": 946 + }, + { + "epoch": 0.04730506019281682, + "grad_norm": 0.48640167713165283, + "learning_rate": 0.0001, + "loss": 0.0138, + "step": 947 + }, + { + "epoch": 0.047355012737898994, + "grad_norm": 0.3764100670814514, + "learning_rate": 0.0001, + "loss": 0.0328, + "step": 948 + }, + { + "epoch": 0.04740496528298117, + "grad_norm": 0.391716867685318, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 949 + }, + { + "epoch": 0.04745491782806334, + "grad_norm": 0.45609843730926514, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 950 + }, + { + "epoch": 0.04750487037314551, + "grad_norm": 0.4530395269393921, + "learning_rate": 0.0001, + "loss": 0.0824, + "step": 951 + }, + { + "epoch": 0.047554822918227685, + "grad_norm": 0.3733142614364624, + "learning_rate": 0.0001, + "loss": 0.0327, + "step": 952 + }, + { + "epoch": 0.04760477546330986, + "grad_norm": 0.4620816707611084, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 953 + }, + { + "epoch": 0.04765472800839203, + "grad_norm": 0.3944992423057556, + "learning_rate": 0.0001, + "loss": 0.1314, + "step": 954 + }, + { + "epoch": 0.047704680553474196, + "grad_norm": 0.3812400996685028, + "learning_rate": 0.0001, + "loss": 0.0117, + "step": 955 + }, + { + "epoch": 0.04775463309855637, + "grad_norm": 0.4378809630870819, + "learning_rate": 0.0001, + "loss": 0.0335, + "step": 956 + }, + { + "epoch": 0.04780458564363854, + "grad_norm": 0.34537050127983093, + "learning_rate": 0.0001, + "loss": 0.1311, + "step": 957 + }, + { + "epoch": 0.047854538188720715, + "grad_norm": 0.3831755220890045, + "learning_rate": 0.0001, + "loss": 0.1534, + "step": 958 + }, + { + "epoch": 0.04790449073380289, + "grad_norm": 0.38763120770454407, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 959 + }, + { + "epoch": 0.04795444327888506, + "grad_norm": 0.3386530876159668, + "learning_rate": 0.0001, + "loss": 0.008, + "step": 960 + }, + { + "epoch": 0.04800439582396723, + "grad_norm": 0.4324919879436493, + "learning_rate": 0.0001, + "loss": 0.0109, + "step": 961 + }, + { + "epoch": 0.048054348369049406, + "grad_norm": 0.37977129220962524, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 962 + }, + { + "epoch": 0.04810430091413157, + "grad_norm": 0.3761971592903137, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 963 + }, + { + "epoch": 0.048154253459213744, + "grad_norm": 0.35537853837013245, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 964 + }, + { + "epoch": 0.04820420600429592, + "grad_norm": 0.3714536428451538, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 965 + }, + { + "epoch": 0.04825415854937809, + "grad_norm": 0.38017410039901733, + "learning_rate": 0.0001, + "loss": 0.1325, + "step": 966 + }, + { + "epoch": 0.04830411109446026, + "grad_norm": 0.32011470198631287, + "learning_rate": 0.0001, + "loss": 0.0108, + "step": 967 + }, + { + "epoch": 0.048354063639542436, + "grad_norm": 0.30692657828330994, + "learning_rate": 0.0001, + "loss": 0.1485, + "step": 968 + }, + { + "epoch": 0.04840401618462461, + "grad_norm": 0.39544931054115295, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 969 + }, + { + "epoch": 0.04845396872970678, + "grad_norm": 0.3729303479194641, + "learning_rate": 0.0001, + "loss": 0.1317, + "step": 970 + }, + { + "epoch": 0.048503921274788954, + "grad_norm": 0.39142343401908875, + "learning_rate": 0.0001, + "loss": 0.0339, + "step": 971 + }, + { + "epoch": 0.04855387381987112, + "grad_norm": 0.30242568254470825, + "learning_rate": 0.0001, + "loss": 0.0104, + "step": 972 + }, + { + "epoch": 0.04860382636495329, + "grad_norm": 0.45217272639274597, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 973 + }, + { + "epoch": 0.048653778910035465, + "grad_norm": 0.30543848872184753, + "learning_rate": 0.0001, + "loss": 0.0102, + "step": 974 + }, + { + "epoch": 0.04870373145511764, + "grad_norm": 0.2618945837020874, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 975 + }, + { + "epoch": 0.04875368400019981, + "grad_norm": 0.33967751264572144, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 976 + }, + { + "epoch": 0.048803636545281984, + "grad_norm": 0.41331231594085693, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 977 + }, + { + "epoch": 0.048853589090364156, + "grad_norm": 0.2781030833721161, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 978 + }, + { + "epoch": 0.04890354163544633, + "grad_norm": 0.30564355850219727, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 979 + }, + { + "epoch": 0.048953494180528495, + "grad_norm": 0.3424684703350067, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 980 + }, + { + "epoch": 0.04900344672561067, + "grad_norm": 0.2737547755241394, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 981 + }, + { + "epoch": 0.04905339927069284, + "grad_norm": 0.2230074256658554, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 982 + }, + { + "epoch": 0.04910335181577501, + "grad_norm": 0.32913318276405334, + "learning_rate": 0.0001, + "loss": 0.0125, + "step": 983 + }, + { + "epoch": 0.049153304360857186, + "grad_norm": 0.2610573470592499, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 984 + }, + { + "epoch": 0.04920325690593936, + "grad_norm": 0.3222615420818329, + "learning_rate": 0.0001, + "loss": 0.0127, + "step": 985 + }, + { + "epoch": 0.04925320945102153, + "grad_norm": 0.2688504159450531, + "learning_rate": 0.0001, + "loss": 0.1312, + "step": 986 + }, + { + "epoch": 0.049303161996103705, + "grad_norm": 0.3115527629852295, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 987 + }, + { + "epoch": 0.04935311454118587, + "grad_norm": 0.33839547634124756, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 988 + }, + { + "epoch": 0.04940306708626804, + "grad_norm": 0.23810319602489471, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 989 + }, + { + "epoch": 0.049453019631350216, + "grad_norm": 0.2639316916465759, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 990 + }, + { + "epoch": 0.04950297217643239, + "grad_norm": 0.30230003595352173, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 991 + }, + { + "epoch": 0.04955292472151456, + "grad_norm": 0.20271383225917816, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 992 + }, + { + "epoch": 0.049602877266596734, + "grad_norm": 0.5029716491699219, + "learning_rate": 0.0001, + "loss": 0.0315, + "step": 993 + }, + { + "epoch": 0.04965282981167891, + "grad_norm": 0.3128684461116791, + "learning_rate": 0.0001, + "loss": 0.129, + "step": 994 + }, + { + "epoch": 0.04970278235676108, + "grad_norm": 0.5379160046577454, + "learning_rate": 0.0001, + "loss": 0.0207, + "step": 995 + }, + { + "epoch": 0.049752734901843246, + "grad_norm": 0.7535272240638733, + "learning_rate": 0.0001, + "loss": 0.0562, + "step": 996 + }, + { + "epoch": 0.04980268744692542, + "grad_norm": 0.6410498023033142, + "learning_rate": 0.0001, + "loss": 0.0499, + "step": 997 + }, + { + "epoch": 0.04985263999200759, + "grad_norm": 0.8953402042388916, + "learning_rate": 0.0001, + "loss": 0.0687, + "step": 998 + }, + { + "epoch": 0.049902592537089764, + "grad_norm": 0.5577667355537415, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 999 + }, + { + "epoch": 0.04995254508217194, + "grad_norm": 0.9724322557449341, + "learning_rate": 0.0001, + "loss": 0.1085, + "step": 1000 + }, + { + "epoch": 0.05000249762725411, + "grad_norm": 0.60840904712677, + "learning_rate": 0.0001, + "loss": 0.0231, + "step": 1001 + }, + { + "epoch": 0.05005245017233628, + "grad_norm": 0.7227354049682617, + "learning_rate": 0.0001, + "loss": 0.0538, + "step": 1002 + }, + { + "epoch": 0.050102402717418455, + "grad_norm": 0.6432965397834778, + "learning_rate": 0.0001, + "loss": 0.0324, + "step": 1003 + }, + { + "epoch": 0.05015235526250062, + "grad_norm": 0.5514644384384155, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 1004 + }, + { + "epoch": 0.050202307807582794, + "grad_norm": 0.4805179834365845, + "learning_rate": 0.0001, + "loss": 0.0242, + "step": 1005 + }, + { + "epoch": 0.05025226035266497, + "grad_norm": 0.5464047193527222, + "learning_rate": 0.0001, + "loss": 0.0526, + "step": 1006 + }, + { + "epoch": 0.05030221289774714, + "grad_norm": 0.49062442779541016, + "learning_rate": 0.0001, + "loss": 0.1357, + "step": 1007 + }, + { + "epoch": 0.05035216544282931, + "grad_norm": 0.39908483624458313, + "learning_rate": 0.0001, + "loss": 0.0223, + "step": 1008 + }, + { + "epoch": 0.050402117987911485, + "grad_norm": 0.46128636598587036, + "learning_rate": 0.0001, + "loss": 0.0701, + "step": 1009 + }, + { + "epoch": 0.05045207053299366, + "grad_norm": 0.36294570565223694, + "learning_rate": 0.0001, + "loss": 0.1411, + "step": 1010 + }, + { + "epoch": 0.05050202307807583, + "grad_norm": 0.4780181646347046, + "learning_rate": 0.0001, + "loss": 0.0098, + "step": 1011 + }, + { + "epoch": 0.050551975623158, + "grad_norm": 0.42728662490844727, + "learning_rate": 0.0001, + "loss": 0.1436, + "step": 1012 + }, + { + "epoch": 0.05060192816824017, + "grad_norm": 0.334293007850647, + "learning_rate": 0.0001, + "loss": 0.0129, + "step": 1013 + }, + { + "epoch": 0.05065188071332234, + "grad_norm": 0.498684287071228, + "learning_rate": 0.0001, + "loss": 0.0876, + "step": 1014 + }, + { + "epoch": 0.050701833258404515, + "grad_norm": 0.4742923378944397, + "learning_rate": 0.0001, + "loss": 0.1368, + "step": 1015 + }, + { + "epoch": 0.05075178580348669, + "grad_norm": 0.4373934864997864, + "learning_rate": 0.0001, + "loss": 0.0141, + "step": 1016 + }, + { + "epoch": 0.05080173834856886, + "grad_norm": 0.34460192918777466, + "learning_rate": 0.0001, + "loss": 0.027, + "step": 1017 + }, + { + "epoch": 0.05085169089365103, + "grad_norm": 0.49303239583969116, + "learning_rate": 0.0001, + "loss": 0.0124, + "step": 1018 + }, + { + "epoch": 0.050901643438733206, + "grad_norm": 0.40387728810310364, + "learning_rate": 0.0001, + "loss": 0.1345, + "step": 1019 + }, + { + "epoch": 0.05095159598381538, + "grad_norm": 0.442847341299057, + "learning_rate": 0.0001, + "loss": 0.0747, + "step": 1020 + }, + { + "epoch": 0.051001548528897545, + "grad_norm": 0.29956358671188354, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 1021 + }, + { + "epoch": 0.05105150107397972, + "grad_norm": 0.43899235129356384, + "learning_rate": 0.0001, + "loss": 0.0137, + "step": 1022 + }, + { + "epoch": 0.05110145361906189, + "grad_norm": 0.39160943031311035, + "learning_rate": 0.0001, + "loss": 0.0744, + "step": 1023 + }, + { + "epoch": 0.05115140616414406, + "grad_norm": 0.4648124873638153, + "learning_rate": 0.0001, + "loss": 0.0095, + "step": 1024 + }, + { + "epoch": 0.051201358709226236, + "grad_norm": 0.27889007329940796, + "learning_rate": 0.0001, + "loss": 0.0702, + "step": 1025 + }, + { + "epoch": 0.05125131125430841, + "grad_norm": 0.3246701657772064, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 1026 + }, + { + "epoch": 0.05130126379939058, + "grad_norm": 0.43406105041503906, + "learning_rate": 0.0001, + "loss": 0.0197, + "step": 1027 + }, + { + "epoch": 0.051351216344472754, + "grad_norm": 0.3338588774204254, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 1028 + }, + { + "epoch": 0.05140116888955492, + "grad_norm": 0.25960472226142883, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 1029 + }, + { + "epoch": 0.05145112143463709, + "grad_norm": 0.30789193511009216, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 1030 + }, + { + "epoch": 0.051501073979719265, + "grad_norm": 0.26323649287223816, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 1031 + }, + { + "epoch": 0.05155102652480144, + "grad_norm": 0.21558015048503876, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 1032 + }, + { + "epoch": 0.05160097906988361, + "grad_norm": 0.2800910770893097, + "learning_rate": 0.0001, + "loss": 0.13, + "step": 1033 + }, + { + "epoch": 0.051650931614965784, + "grad_norm": 0.2746593952178955, + "learning_rate": 0.0001, + "loss": 0.0226, + "step": 1034 + }, + { + "epoch": 0.05170088416004796, + "grad_norm": 0.24477647244930267, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 1035 + }, + { + "epoch": 0.05175083670513013, + "grad_norm": 0.23185820877552032, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 1036 + }, + { + "epoch": 0.051800789250212295, + "grad_norm": 0.23798510432243347, + "learning_rate": 0.0001, + "loss": 0.0122, + "step": 1037 + }, + { + "epoch": 0.05185074179529447, + "grad_norm": 0.2918018698692322, + "learning_rate": 0.0001, + "loss": 0.0112, + "step": 1038 + }, + { + "epoch": 0.05190069434037664, + "grad_norm": 0.5147783160209656, + "learning_rate": 0.0001, + "loss": 0.0826, + "step": 1039 + }, + { + "epoch": 0.051950646885458814, + "grad_norm": 0.38892868161201477, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 1040 + }, + { + "epoch": 0.052000599430540986, + "grad_norm": 0.5072981715202332, + "learning_rate": 0.0001, + "loss": 0.0235, + "step": 1041 + }, + { + "epoch": 0.05205055197562316, + "grad_norm": 0.266751229763031, + "learning_rate": 0.0001, + "loss": 0.0125, + "step": 1042 + }, + { + "epoch": 0.05210050452070533, + "grad_norm": 0.4530844986438751, + "learning_rate": 0.0001, + "loss": 0.0103, + "step": 1043 + }, + { + "epoch": 0.052150457065787505, + "grad_norm": 0.44961991906166077, + "learning_rate": 0.0001, + "loss": 0.0091, + "step": 1044 + }, + { + "epoch": 0.05220040961086967, + "grad_norm": 0.41019633412361145, + "learning_rate": 0.0001, + "loss": 0.286, + "step": 1045 + }, + { + "epoch": 0.05225036215595184, + "grad_norm": 0.32047519087791443, + "learning_rate": 0.0001, + "loss": 0.0146, + "step": 1046 + }, + { + "epoch": 0.052300314701034016, + "grad_norm": 0.37765684723854065, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 1047 + }, + { + "epoch": 0.05235026724611619, + "grad_norm": 0.3826135993003845, + "learning_rate": 0.0001, + "loss": 0.1343, + "step": 1048 + }, + { + "epoch": 0.05240021979119836, + "grad_norm": 0.4503767490386963, + "learning_rate": 0.0001, + "loss": 0.0408, + "step": 1049 + }, + { + "epoch": 0.052450172336280534, + "grad_norm": 0.32600802183151245, + "learning_rate": 0.0001, + "loss": 0.034, + "step": 1050 + }, + { + "epoch": 0.05250012488136271, + "grad_norm": 0.40268534421920776, + "learning_rate": 0.0001, + "loss": 0.0148, + "step": 1051 + }, + { + "epoch": 0.05255007742644488, + "grad_norm": 0.32810622453689575, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 1052 + }, + { + "epoch": 0.052600029971527046, + "grad_norm": 0.2636660635471344, + "learning_rate": 0.0001, + "loss": 0.0147, + "step": 1053 + }, + { + "epoch": 0.05264998251660922, + "grad_norm": 0.2527497708797455, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 1054 + }, + { + "epoch": 0.05269993506169139, + "grad_norm": 0.26588112115859985, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 1055 + }, + { + "epoch": 0.052749887606773564, + "grad_norm": 0.3688637912273407, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 1056 + }, + { + "epoch": 0.05279984015185574, + "grad_norm": 0.3358452320098877, + "learning_rate": 0.0001, + "loss": 0.0146, + "step": 1057 + }, + { + "epoch": 0.05284979269693791, + "grad_norm": 0.2504631578922272, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 1058 + }, + { + "epoch": 0.05289974524202008, + "grad_norm": 0.44300979375839233, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 1059 + }, + { + "epoch": 0.052949697787102255, + "grad_norm": 0.35865411162376404, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 1060 + }, + { + "epoch": 0.05299965033218443, + "grad_norm": 0.26253941655158997, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 1061 + }, + { + "epoch": 0.053049602877266594, + "grad_norm": 0.26705214381217957, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 1062 + }, + { + "epoch": 0.05309955542234877, + "grad_norm": 0.26279810070991516, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 1063 + }, + { + "epoch": 0.05314950796743094, + "grad_norm": 0.3182068169116974, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 1064 + }, + { + "epoch": 0.05319946051251311, + "grad_norm": 0.2963709831237793, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1065 + }, + { + "epoch": 0.053249413057595285, + "grad_norm": 0.2178197056055069, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1066 + }, + { + "epoch": 0.05329936560267746, + "grad_norm": 0.3973004221916199, + "learning_rate": 0.0001, + "loss": 0.2586, + "step": 1067 + }, + { + "epoch": 0.05334931814775963, + "grad_norm": 0.36436110734939575, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 1068 + }, + { + "epoch": 0.0533992706928418, + "grad_norm": 0.3788812458515167, + "learning_rate": 0.0001, + "loss": 0.0205, + "step": 1069 + }, + { + "epoch": 0.05344922323792397, + "grad_norm": 0.2796285152435303, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 1070 + }, + { + "epoch": 0.05349917578300614, + "grad_norm": 0.32218489050865173, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1071 + }, + { + "epoch": 0.053549128328088315, + "grad_norm": 0.2701510190963745, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1072 + }, + { + "epoch": 0.05359908087317049, + "grad_norm": 0.2640988528728485, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 1073 + }, + { + "epoch": 0.05364903341825266, + "grad_norm": 0.3201264441013336, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 1074 + }, + { + "epoch": 0.05369898596333483, + "grad_norm": 0.2602851688861847, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 1075 + }, + { + "epoch": 0.053748938508417006, + "grad_norm": 0.3209029734134674, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 1076 + }, + { + "epoch": 0.05379889105349918, + "grad_norm": 0.2207593321800232, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1077 + }, + { + "epoch": 0.053848843598581345, + "grad_norm": 0.2562515139579773, + "learning_rate": 0.0001, + "loss": 0.0089, + "step": 1078 + }, + { + "epoch": 0.05389879614366352, + "grad_norm": 0.20473358035087585, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 1079 + }, + { + "epoch": 0.05394874868874569, + "grad_norm": 0.20663048326969147, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 1080 + }, + { + "epoch": 0.05399870123382786, + "grad_norm": 0.34937745332717896, + "learning_rate": 0.0001, + "loss": 0.0217, + "step": 1081 + }, + { + "epoch": 0.054048653778910036, + "grad_norm": 0.19658634066581726, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 1082 + }, + { + "epoch": 0.05409860632399221, + "grad_norm": 0.23051045835018158, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 1083 + }, + { + "epoch": 0.05414855886907438, + "grad_norm": 0.30962616205215454, + "learning_rate": 0.0001, + "loss": 0.0095, + "step": 1084 + }, + { + "epoch": 0.054198511414156554, + "grad_norm": 0.21499262750148773, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 1085 + }, + { + "epoch": 0.05424846395923872, + "grad_norm": 0.2068566530942917, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 1086 + }, + { + "epoch": 0.05429841650432089, + "grad_norm": 0.23070570826530457, + "learning_rate": 0.0001, + "loss": 0.019, + "step": 1087 + }, + { + "epoch": 0.054348369049403066, + "grad_norm": 0.27063292264938354, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 1088 + }, + { + "epoch": 0.05439832159448524, + "grad_norm": 0.3593659996986389, + "learning_rate": 0.0001, + "loss": 0.1412, + "step": 1089 + }, + { + "epoch": 0.05444827413956741, + "grad_norm": 0.31281211972236633, + "learning_rate": 0.0001, + "loss": 0.0121, + "step": 1090 + }, + { + "epoch": 0.054498226684649584, + "grad_norm": 0.34327590465545654, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 1091 + }, + { + "epoch": 0.05454817922973176, + "grad_norm": 0.28235727548599243, + "learning_rate": 0.0001, + "loss": 0.0147, + "step": 1092 + }, + { + "epoch": 0.05459813177481393, + "grad_norm": 0.4503314197063446, + "learning_rate": 0.0001, + "loss": 0.1647, + "step": 1093 + }, + { + "epoch": 0.054648084319896095, + "grad_norm": 0.43037697672843933, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1094 + }, + { + "epoch": 0.05469803686497827, + "grad_norm": 0.34267500042915344, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1095 + }, + { + "epoch": 0.05474798941006044, + "grad_norm": 0.3057301342487335, + "learning_rate": 0.0001, + "loss": 0.0142, + "step": 1096 + }, + { + "epoch": 0.054797941955142614, + "grad_norm": 0.2984454035758972, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 1097 + }, + { + "epoch": 0.054847894500224786, + "grad_norm": 0.27010786533355713, + "learning_rate": 0.0001, + "loss": 0.017, + "step": 1098 + }, + { + "epoch": 0.05489784704530696, + "grad_norm": 0.31520000100135803, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 1099 + }, + { + "epoch": 0.05494779959038913, + "grad_norm": 0.23701544106006622, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 1100 + }, + { + "epoch": 0.054997752135471305, + "grad_norm": 0.31901246309280396, + "learning_rate": 0.0001, + "loss": 0.0083, + "step": 1101 + }, + { + "epoch": 0.05504770468055348, + "grad_norm": 0.2743745446205139, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 1102 + }, + { + "epoch": 0.05509765722563564, + "grad_norm": 0.3359304666519165, + "learning_rate": 0.0001, + "loss": 0.022, + "step": 1103 + }, + { + "epoch": 0.055147609770717816, + "grad_norm": 0.32580000162124634, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 1104 + }, + { + "epoch": 0.05519756231579999, + "grad_norm": 0.34044548869132996, + "learning_rate": 0.0001, + "loss": 0.0651, + "step": 1105 + }, + { + "epoch": 0.05524751486088216, + "grad_norm": 0.2140771597623825, + "learning_rate": 0.0001, + "loss": 0.0091, + "step": 1106 + }, + { + "epoch": 0.055297467405964335, + "grad_norm": 0.276991605758667, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 1107 + }, + { + "epoch": 0.05534741995104651, + "grad_norm": 0.3414885103702545, + "learning_rate": 0.0001, + "loss": 0.1302, + "step": 1108 + }, + { + "epoch": 0.05539737249612868, + "grad_norm": 0.3104630410671234, + "learning_rate": 0.0001, + "loss": 0.0103, + "step": 1109 + }, + { + "epoch": 0.05544732504121085, + "grad_norm": 0.23717840015888214, + "learning_rate": 0.0001, + "loss": 0.0131, + "step": 1110 + }, + { + "epoch": 0.05549727758629302, + "grad_norm": 0.3518575429916382, + "learning_rate": 0.0001, + "loss": 0.0079, + "step": 1111 + }, + { + "epoch": 0.05554723013137519, + "grad_norm": 0.35873207449913025, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1112 + }, + { + "epoch": 0.055597182676457364, + "grad_norm": 0.2985159456729889, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 1113 + }, + { + "epoch": 0.05564713522153954, + "grad_norm": 0.4126880168914795, + "learning_rate": 0.0001, + "loss": 0.1333, + "step": 1114 + }, + { + "epoch": 0.05569708776662171, + "grad_norm": 0.37120380997657776, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 1115 + }, + { + "epoch": 0.05574704031170388, + "grad_norm": 0.2966909408569336, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 1116 + }, + { + "epoch": 0.055796992856786055, + "grad_norm": 0.26306286454200745, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 1117 + }, + { + "epoch": 0.05584694540186823, + "grad_norm": 0.35864728689193726, + "learning_rate": 0.0001, + "loss": 0.0351, + "step": 1118 + }, + { + "epoch": 0.055896897946950394, + "grad_norm": 0.3679833710193634, + "learning_rate": 0.0001, + "loss": 0.0117, + "step": 1119 + }, + { + "epoch": 0.05594685049203257, + "grad_norm": 0.3705662786960602, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 1120 + }, + { + "epoch": 0.05599680303711474, + "grad_norm": 0.2895394563674927, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1121 + }, + { + "epoch": 0.05604675558219691, + "grad_norm": 0.3403322398662567, + "learning_rate": 0.0001, + "loss": 0.1287, + "step": 1122 + }, + { + "epoch": 0.056096708127279085, + "grad_norm": 0.34878337383270264, + "learning_rate": 0.0001, + "loss": 0.0163, + "step": 1123 + }, + { + "epoch": 0.05614666067236126, + "grad_norm": 0.35751307010650635, + "learning_rate": 0.0001, + "loss": 0.0086, + "step": 1124 + }, + { + "epoch": 0.05619661321744343, + "grad_norm": 0.3596067428588867, + "learning_rate": 0.0001, + "loss": 0.0155, + "step": 1125 + }, + { + "epoch": 0.056246565762525604, + "grad_norm": 0.337012380361557, + "learning_rate": 0.0001, + "loss": 0.0101, + "step": 1126 + }, + { + "epoch": 0.05629651830760777, + "grad_norm": 0.45058122277259827, + "learning_rate": 0.0001, + "loss": 0.0236, + "step": 1127 + }, + { + "epoch": 0.05634647085268994, + "grad_norm": 0.4049590229988098, + "learning_rate": 0.0001, + "loss": 0.0259, + "step": 1128 + }, + { + "epoch": 0.056396423397772115, + "grad_norm": 0.38079845905303955, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 1129 + }, + { + "epoch": 0.05644637594285429, + "grad_norm": 0.4088784456253052, + "learning_rate": 0.0001, + "loss": 0.0388, + "step": 1130 + }, + { + "epoch": 0.05649632848793646, + "grad_norm": 0.35677263140678406, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 1131 + }, + { + "epoch": 0.05654628103301863, + "grad_norm": 0.4154999852180481, + "learning_rate": 0.0001, + "loss": 0.0246, + "step": 1132 + }, + { + "epoch": 0.056596233578100806, + "grad_norm": 0.31602975726127625, + "learning_rate": 0.0001, + "loss": 0.0098, + "step": 1133 + }, + { + "epoch": 0.05664618612318298, + "grad_norm": 0.2950884997844696, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 1134 + }, + { + "epoch": 0.056696138668265145, + "grad_norm": 0.352402001619339, + "learning_rate": 0.0001, + "loss": 0.1352, + "step": 1135 + }, + { + "epoch": 0.05674609121334732, + "grad_norm": 0.3439008295536041, + "learning_rate": 0.0001, + "loss": 0.14, + "step": 1136 + }, + { + "epoch": 0.05679604375842949, + "grad_norm": 0.371886670589447, + "learning_rate": 0.0001, + "loss": 0.0357, + "step": 1137 + }, + { + "epoch": 0.05684599630351166, + "grad_norm": 0.2659211754798889, + "learning_rate": 0.0001, + "loss": 0.0158, + "step": 1138 + }, + { + "epoch": 0.056895948848593836, + "grad_norm": 0.3362864851951599, + "learning_rate": 0.0001, + "loss": 0.0122, + "step": 1139 + }, + { + "epoch": 0.05694590139367601, + "grad_norm": 0.36074477434158325, + "learning_rate": 0.0001, + "loss": 0.0105, + "step": 1140 + }, + { + "epoch": 0.05699585393875818, + "grad_norm": 0.3527892827987671, + "learning_rate": 0.0001, + "loss": 0.1362, + "step": 1141 + }, + { + "epoch": 0.057045806483840354, + "grad_norm": 0.23351247608661652, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 1142 + }, + { + "epoch": 0.05709575902892253, + "grad_norm": 0.2477540671825409, + "learning_rate": 0.0001, + "loss": 0.0188, + "step": 1143 + }, + { + "epoch": 0.05714571157400469, + "grad_norm": 0.4527773857116699, + "learning_rate": 0.0001, + "loss": 0.3071, + "step": 1144 + }, + { + "epoch": 0.057195664119086866, + "grad_norm": 0.3745346665382385, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 1145 + }, + { + "epoch": 0.05724561666416904, + "grad_norm": 0.2335844784975052, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 1146 + }, + { + "epoch": 0.05729556920925121, + "grad_norm": 0.28230080008506775, + "learning_rate": 0.0001, + "loss": 0.0233, + "step": 1147 + }, + { + "epoch": 0.057345521754333384, + "grad_norm": 0.299710750579834, + "learning_rate": 0.0001, + "loss": 0.0124, + "step": 1148 + }, + { + "epoch": 0.05739547429941556, + "grad_norm": 0.3806709945201874, + "learning_rate": 0.0001, + "loss": 0.0175, + "step": 1149 + }, + { + "epoch": 0.05744542684449773, + "grad_norm": 0.2648477256298065, + "learning_rate": 0.0001, + "loss": 0.0115, + "step": 1150 + }, + { + "epoch": 0.0574953793895799, + "grad_norm": 0.25067147612571716, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 1151 + }, + { + "epoch": 0.05754533193466207, + "grad_norm": 0.4361799359321594, + "learning_rate": 0.0001, + "loss": 0.2839, + "step": 1152 + }, + { + "epoch": 0.05759528447974424, + "grad_norm": 0.3185410797595978, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 1153 + }, + { + "epoch": 0.057645237024826414, + "grad_norm": 0.2404535710811615, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 1154 + }, + { + "epoch": 0.057695189569908586, + "grad_norm": 0.22883357107639313, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 1155 + }, + { + "epoch": 0.05774514211499076, + "grad_norm": 0.26109379529953003, + "learning_rate": 0.0001, + "loss": 0.0181, + "step": 1156 + }, + { + "epoch": 0.05779509466007293, + "grad_norm": 0.30225661396980286, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 1157 + }, + { + "epoch": 0.057845047205155105, + "grad_norm": 0.21754688024520874, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 1158 + }, + { + "epoch": 0.05789499975023728, + "grad_norm": 0.22746069729328156, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 1159 + }, + { + "epoch": 0.05794495229531944, + "grad_norm": 0.2897047996520996, + "learning_rate": 0.0001, + "loss": 0.012, + "step": 1160 + }, + { + "epoch": 0.057994904840401616, + "grad_norm": 0.2930499017238617, + "learning_rate": 0.0001, + "loss": 0.0453, + "step": 1161 + }, + { + "epoch": 0.05804485738548379, + "grad_norm": 0.289414644241333, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 1162 + }, + { + "epoch": 0.05809480993056596, + "grad_norm": 0.2670431435108185, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 1163 + }, + { + "epoch": 0.058144762475648135, + "grad_norm": 0.3369733989238739, + "learning_rate": 0.0001, + "loss": 0.0308, + "step": 1164 + }, + { + "epoch": 0.05819471502073031, + "grad_norm": 0.31486502289772034, + "learning_rate": 0.0001, + "loss": 0.0346, + "step": 1165 + }, + { + "epoch": 0.05824466756581248, + "grad_norm": 0.317638635635376, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 1166 + }, + { + "epoch": 0.05829462011089465, + "grad_norm": 0.27396830916404724, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 1167 + }, + { + "epoch": 0.05834457265597682, + "grad_norm": 0.27850261330604553, + "learning_rate": 0.0001, + "loss": 0.1329, + "step": 1168 + }, + { + "epoch": 0.05839452520105899, + "grad_norm": 0.2861446738243103, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 1169 + }, + { + "epoch": 0.058444477746141164, + "grad_norm": 0.34333592653274536, + "learning_rate": 0.0001, + "loss": 0.0112, + "step": 1170 + }, + { + "epoch": 0.05849443029122334, + "grad_norm": 0.21390733122825623, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 1171 + }, + { + "epoch": 0.05854438283630551, + "grad_norm": 0.20266395807266235, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 1172 + }, + { + "epoch": 0.05859433538138768, + "grad_norm": 0.5118569731712341, + "learning_rate": 0.0001, + "loss": 0.2788, + "step": 1173 + }, + { + "epoch": 0.058644287926469855, + "grad_norm": 0.28741273283958435, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 1174 + }, + { + "epoch": 0.05869424047155203, + "grad_norm": 0.29746386408805847, + "learning_rate": 0.0001, + "loss": 0.0445, + "step": 1175 + }, + { + "epoch": 0.058744193016634194, + "grad_norm": 0.40574413537979126, + "learning_rate": 0.0001, + "loss": 0.1429, + "step": 1176 + }, + { + "epoch": 0.05879414556171637, + "grad_norm": 0.4021613895893097, + "learning_rate": 0.0001, + "loss": 0.0138, + "step": 1177 + }, + { + "epoch": 0.05884409810679854, + "grad_norm": 0.3780933916568756, + "learning_rate": 0.0001, + "loss": 0.0357, + "step": 1178 + }, + { + "epoch": 0.05889405065188071, + "grad_norm": 0.3801221549510956, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 1179 + }, + { + "epoch": 0.058944003196962885, + "grad_norm": 0.33212384581565857, + "learning_rate": 0.0001, + "loss": 0.1362, + "step": 1180 + }, + { + "epoch": 0.05899395574204506, + "grad_norm": 0.43602028489112854, + "learning_rate": 0.0001, + "loss": 0.0358, + "step": 1181 + }, + { + "epoch": 0.05904390828712723, + "grad_norm": 0.31489628553390503, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 1182 + }, + { + "epoch": 0.059093860832209404, + "grad_norm": 0.3229597508907318, + "learning_rate": 0.0001, + "loss": 0.0156, + "step": 1183 + }, + { + "epoch": 0.059143813377291576, + "grad_norm": 0.32100048661231995, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1184 + }, + { + "epoch": 0.05919376592237374, + "grad_norm": 0.3513807952404022, + "learning_rate": 0.0001, + "loss": 0.0399, + "step": 1185 + }, + { + "epoch": 0.059243718467455915, + "grad_norm": 0.3252510726451874, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 1186 + }, + { + "epoch": 0.05929367101253809, + "grad_norm": 0.2827402353286743, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 1187 + }, + { + "epoch": 0.05934362355762026, + "grad_norm": 0.4565281271934509, + "learning_rate": 0.0001, + "loss": 0.0564, + "step": 1188 + }, + { + "epoch": 0.05939357610270243, + "grad_norm": 0.3241160213947296, + "learning_rate": 0.0001, + "loss": 0.0219, + "step": 1189 + }, + { + "epoch": 0.059443528647784606, + "grad_norm": 0.42378419637680054, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 1190 + }, + { + "epoch": 0.05949348119286678, + "grad_norm": 0.4668373167514801, + "learning_rate": 0.0001, + "loss": 0.0441, + "step": 1191 + }, + { + "epoch": 0.05954343373794895, + "grad_norm": 0.2746042311191559, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 1192 + }, + { + "epoch": 0.05959338628303112, + "grad_norm": 0.2744123637676239, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 1193 + }, + { + "epoch": 0.05964333882811329, + "grad_norm": 0.3283843696117401, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 1194 + }, + { + "epoch": 0.05969329137319546, + "grad_norm": 0.24948492646217346, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 1195 + }, + { + "epoch": 0.059743243918277636, + "grad_norm": 0.4652748703956604, + "learning_rate": 0.0001, + "loss": 0.0434, + "step": 1196 + }, + { + "epoch": 0.05979319646335981, + "grad_norm": 1.1597517728805542, + "learning_rate": 0.0001, + "loss": 0.1109, + "step": 1197 + }, + { + "epoch": 0.05984314900844198, + "grad_norm": 0.510887622833252, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 1198 + }, + { + "epoch": 0.059893101553524154, + "grad_norm": 0.5368074178695679, + "learning_rate": 0.0001, + "loss": 0.0156, + "step": 1199 + }, + { + "epoch": 0.05994305409860633, + "grad_norm": 0.4113841652870178, + "learning_rate": 0.0001, + "loss": 0.0221, + "step": 1200 + }, + { + "epoch": 0.05999300664368849, + "grad_norm": 0.4745721220970154, + "learning_rate": 0.0001, + "loss": 0.0094, + "step": 1201 + }, + { + "epoch": 0.060042959188770666, + "grad_norm": 0.5023432374000549, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 1202 + }, + { + "epoch": 0.06009291173385284, + "grad_norm": 0.28859278559684753, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 1203 + }, + { + "epoch": 0.06014286427893501, + "grad_norm": 0.49868834018707275, + "learning_rate": 0.0001, + "loss": 0.0156, + "step": 1204 + }, + { + "epoch": 0.060192816824017184, + "grad_norm": 0.5331000685691833, + "learning_rate": 0.0001, + "loss": 0.0197, + "step": 1205 + }, + { + "epoch": 0.06024276936909936, + "grad_norm": 0.48053351044654846, + "learning_rate": 0.0001, + "loss": 0.0164, + "step": 1206 + }, + { + "epoch": 0.06029272191418153, + "grad_norm": 0.5281684398651123, + "learning_rate": 0.0001, + "loss": 0.0146, + "step": 1207 + }, + { + "epoch": 0.0603426744592637, + "grad_norm": 0.48398324847221375, + "learning_rate": 0.0001, + "loss": 0.042, + "step": 1208 + }, + { + "epoch": 0.06039262700434587, + "grad_norm": 0.46096405386924744, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 1209 + }, + { + "epoch": 0.06044257954942804, + "grad_norm": 0.5751495361328125, + "learning_rate": 0.0001, + "loss": 0.0259, + "step": 1210 + }, + { + "epoch": 0.060492532094510214, + "grad_norm": 0.6592525839805603, + "learning_rate": 0.0001, + "loss": 0.1883, + "step": 1211 + }, + { + "epoch": 0.06054248463959239, + "grad_norm": 0.5775688290596008, + "learning_rate": 0.0001, + "loss": 0.0159, + "step": 1212 + }, + { + "epoch": 0.06059243718467456, + "grad_norm": 0.500139594078064, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 1213 + }, + { + "epoch": 0.06064238972975673, + "grad_norm": 0.3127259314060211, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1214 + }, + { + "epoch": 0.060692342274838905, + "grad_norm": 0.7449846863746643, + "learning_rate": 0.0001, + "loss": 0.0625, + "step": 1215 + }, + { + "epoch": 0.06074229481992108, + "grad_norm": 0.582450270652771, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 1216 + }, + { + "epoch": 0.060792247365003244, + "grad_norm": 0.6580715179443359, + "learning_rate": 0.0001, + "loss": 0.0166, + "step": 1217 + }, + { + "epoch": 0.060842199910085416, + "grad_norm": 0.46593108773231506, + "learning_rate": 0.0001, + "loss": 0.0098, + "step": 1218 + }, + { + "epoch": 0.06089215245516759, + "grad_norm": 0.4252864718437195, + "learning_rate": 0.0001, + "loss": 0.0204, + "step": 1219 + }, + { + "epoch": 0.06094210500024976, + "grad_norm": 0.600326418876648, + "learning_rate": 0.0001, + "loss": 0.1605, + "step": 1220 + }, + { + "epoch": 0.060992057545331935, + "grad_norm": 1.1578500270843506, + "learning_rate": 0.0001, + "loss": 0.1864, + "step": 1221 + }, + { + "epoch": 0.06104201009041411, + "grad_norm": 0.6856047511100769, + "learning_rate": 0.0001, + "loss": 0.0349, + "step": 1222 + }, + { + "epoch": 0.06109196263549628, + "grad_norm": 0.6123287081718445, + "learning_rate": 0.0001, + "loss": 0.0319, + "step": 1223 + }, + { + "epoch": 0.06114191518057845, + "grad_norm": 0.5679656863212585, + "learning_rate": 0.0001, + "loss": 0.0199, + "step": 1224 + }, + { + "epoch": 0.061191867725660626, + "grad_norm": 0.6197255253791809, + "learning_rate": 0.0001, + "loss": 0.0381, + "step": 1225 + }, + { + "epoch": 0.06124182027074279, + "grad_norm": 0.5784698128700256, + "learning_rate": 0.0001, + "loss": 0.0853, + "step": 1226 + }, + { + "epoch": 0.061291772815824964, + "grad_norm": 0.4656343162059784, + "learning_rate": 0.0001, + "loss": 0.0277, + "step": 1227 + }, + { + "epoch": 0.06134172536090714, + "grad_norm": 0.5620835423469543, + "learning_rate": 0.0001, + "loss": 0.0124, + "step": 1228 + }, + { + "epoch": 0.06139167790598931, + "grad_norm": 0.524835467338562, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 1229 + }, + { + "epoch": 0.06144163045107148, + "grad_norm": 0.4951538145542145, + "learning_rate": 0.0001, + "loss": 0.0129, + "step": 1230 + }, + { + "epoch": 0.061491582996153656, + "grad_norm": 0.6257418394088745, + "learning_rate": 0.0001, + "loss": 0.0302, + "step": 1231 + }, + { + "epoch": 0.06154153554123583, + "grad_norm": 0.4575392007827759, + "learning_rate": 0.0001, + "loss": 0.0294, + "step": 1232 + }, + { + "epoch": 0.061591488086318, + "grad_norm": 0.649028480052948, + "learning_rate": 0.0001, + "loss": 0.0804, + "step": 1233 + }, + { + "epoch": 0.06164144063140017, + "grad_norm": 0.6408773064613342, + "learning_rate": 0.0001, + "loss": 0.0286, + "step": 1234 + }, + { + "epoch": 0.06169139317648234, + "grad_norm": 0.4804571568965912, + "learning_rate": 0.0001, + "loss": 0.0155, + "step": 1235 + }, + { + "epoch": 0.06174134572156451, + "grad_norm": 0.47629162669181824, + "learning_rate": 0.0001, + "loss": 0.0188, + "step": 1236 + }, + { + "epoch": 0.061791298266646685, + "grad_norm": 0.5059531927108765, + "learning_rate": 0.0001, + "loss": 0.0151, + "step": 1237 + }, + { + "epoch": 0.06184125081172886, + "grad_norm": 0.5223966836929321, + "learning_rate": 0.0001, + "loss": 0.0579, + "step": 1238 + }, + { + "epoch": 0.06189120335681103, + "grad_norm": 0.5283417701721191, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 1239 + }, + { + "epoch": 0.061941155901893204, + "grad_norm": 0.5129599571228027, + "learning_rate": 0.0001, + "loss": 0.026, + "step": 1240 + }, + { + "epoch": 0.061991108446975376, + "grad_norm": 0.416242390871048, + "learning_rate": 0.0001, + "loss": 0.0175, + "step": 1241 + }, + { + "epoch": 0.06204106099205754, + "grad_norm": 0.5391104817390442, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 1242 + }, + { + "epoch": 0.062091013537139715, + "grad_norm": 0.536982536315918, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 1243 + }, + { + "epoch": 0.06214096608222189, + "grad_norm": 0.5583608746528625, + "learning_rate": 0.0001, + "loss": 0.0436, + "step": 1244 + }, + { + "epoch": 0.06219091862730406, + "grad_norm": 0.4226512908935547, + "learning_rate": 0.0001, + "loss": 0.0495, + "step": 1245 + }, + { + "epoch": 0.06224087117238623, + "grad_norm": 0.5036501884460449, + "learning_rate": 0.0001, + "loss": 0.013, + "step": 1246 + }, + { + "epoch": 0.062290823717468406, + "grad_norm": 0.4615970551967621, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 1247 + }, + { + "epoch": 0.06234077626255058, + "grad_norm": 0.43444904685020447, + "learning_rate": 0.0001, + "loss": 0.0123, + "step": 1248 + }, + { + "epoch": 0.06239072880763275, + "grad_norm": 0.4192769527435303, + "learning_rate": 0.0001, + "loss": 0.0167, + "step": 1249 + }, + { + "epoch": 0.06244068135271492, + "grad_norm": 0.5312977433204651, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 1250 + }, + { + "epoch": 0.06249063389779709, + "grad_norm": 0.6055718660354614, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 1251 + }, + { + "epoch": 0.06254058644287927, + "grad_norm": 0.4543202519416809, + "learning_rate": 0.0001, + "loss": 0.0271, + "step": 1252 + }, + { + "epoch": 0.06259053898796144, + "grad_norm": 0.31947779655456543, + "learning_rate": 0.0001, + "loss": 0.0211, + "step": 1253 + }, + { + "epoch": 0.0626404915330436, + "grad_norm": 0.38082772493362427, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 1254 + }, + { + "epoch": 0.06269044407812578, + "grad_norm": 0.49931225180625916, + "learning_rate": 0.0001, + "loss": 0.0654, + "step": 1255 + }, + { + "epoch": 0.06274039662320795, + "grad_norm": 0.33323875069618225, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 1256 + }, + { + "epoch": 0.06279034916829013, + "grad_norm": 0.8797289133071899, + "learning_rate": 0.0001, + "loss": 0.0813, + "step": 1257 + }, + { + "epoch": 0.06284030171337229, + "grad_norm": 0.5271270871162415, + "learning_rate": 0.0001, + "loss": 0.008, + "step": 1258 + }, + { + "epoch": 0.06289025425845447, + "grad_norm": 0.8905289769172668, + "learning_rate": 0.0001, + "loss": 0.0817, + "step": 1259 + }, + { + "epoch": 0.06294020680353664, + "grad_norm": 0.4658784568309784, + "learning_rate": 0.0001, + "loss": 0.0239, + "step": 1260 + }, + { + "epoch": 0.06299015934861882, + "grad_norm": 0.5972889065742493, + "learning_rate": 0.0001, + "loss": 0.0135, + "step": 1261 + }, + { + "epoch": 0.06304011189370098, + "grad_norm": 0.6471592783927917, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 1262 + }, + { + "epoch": 0.06309006443878315, + "grad_norm": 0.4146631956100464, + "learning_rate": 0.0001, + "loss": 0.0101, + "step": 1263 + }, + { + "epoch": 0.06314001698386533, + "grad_norm": 0.4542471766471863, + "learning_rate": 0.0001, + "loss": 0.0093, + "step": 1264 + }, + { + "epoch": 0.0631899695289475, + "grad_norm": 0.4858725965023041, + "learning_rate": 0.0001, + "loss": 0.0137, + "step": 1265 + }, + { + "epoch": 0.06323992207402968, + "grad_norm": 0.42851078510284424, + "learning_rate": 0.0001, + "loss": 0.0104, + "step": 1266 + }, + { + "epoch": 0.06328987461911184, + "grad_norm": 0.39815065264701843, + "learning_rate": 0.0001, + "loss": 0.0449, + "step": 1267 + }, + { + "epoch": 0.06333982716419402, + "grad_norm": 0.458392858505249, + "learning_rate": 0.0001, + "loss": 0.0816, + "step": 1268 + }, + { + "epoch": 0.06338977970927619, + "grad_norm": 0.4230707287788391, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 1269 + }, + { + "epoch": 0.06343973225435837, + "grad_norm": 0.4260375499725342, + "learning_rate": 0.0001, + "loss": 0.0083, + "step": 1270 + }, + { + "epoch": 0.06348968479944053, + "grad_norm": 0.410384863615036, + "learning_rate": 0.0001, + "loss": 0.0077, + "step": 1271 + }, + { + "epoch": 0.0635396373445227, + "grad_norm": 0.41662344336509705, + "learning_rate": 0.0001, + "loss": 0.0267, + "step": 1272 + }, + { + "epoch": 0.06358958988960488, + "grad_norm": 0.5154021978378296, + "learning_rate": 0.0001, + "loss": 0.0093, + "step": 1273 + }, + { + "epoch": 0.06363954243468704, + "grad_norm": 0.5845969319343567, + "learning_rate": 0.0001, + "loss": 0.0586, + "step": 1274 + }, + { + "epoch": 0.06368949497976922, + "grad_norm": 0.4121849834918976, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 1275 + }, + { + "epoch": 0.06373944752485139, + "grad_norm": 0.4130344092845917, + "learning_rate": 0.0001, + "loss": 0.0238, + "step": 1276 + }, + { + "epoch": 0.06378940006993357, + "grad_norm": 0.4540347754955292, + "learning_rate": 0.0001, + "loss": 0.0404, + "step": 1277 + }, + { + "epoch": 0.06383935261501573, + "grad_norm": 0.4990919530391693, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 1278 + }, + { + "epoch": 0.0638893051600979, + "grad_norm": 0.4776592552661896, + "learning_rate": 0.0001, + "loss": 0.0771, + "step": 1279 + }, + { + "epoch": 0.06393925770518008, + "grad_norm": 0.4220818281173706, + "learning_rate": 0.0001, + "loss": 0.1912, + "step": 1280 + }, + { + "epoch": 0.06398921025026225, + "grad_norm": 0.35637322068214417, + "learning_rate": 0.0001, + "loss": 0.0111, + "step": 1281 + }, + { + "epoch": 0.06403916279534443, + "grad_norm": 0.4198904037475586, + "learning_rate": 0.0001, + "loss": 0.0121, + "step": 1282 + }, + { + "epoch": 0.06408911534042659, + "grad_norm": 0.3599925637245178, + "learning_rate": 0.0001, + "loss": 0.0129, + "step": 1283 + }, + { + "epoch": 0.06413906788550877, + "grad_norm": 0.4795702397823334, + "learning_rate": 0.0001, + "loss": 0.1838, + "step": 1284 + }, + { + "epoch": 0.06418902043059094, + "grad_norm": 0.5392496585845947, + "learning_rate": 0.0001, + "loss": 0.029, + "step": 1285 + }, + { + "epoch": 0.06423897297567312, + "grad_norm": 0.6623640060424805, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 1286 + }, + { + "epoch": 0.06428892552075528, + "grad_norm": 0.5400853753089905, + "learning_rate": 0.0001, + "loss": 0.0095, + "step": 1287 + }, + { + "epoch": 0.06433887806583745, + "grad_norm": 0.5016689300537109, + "learning_rate": 0.0001, + "loss": 0.0219, + "step": 1288 + }, + { + "epoch": 0.06438883061091963, + "grad_norm": 0.5213687419891357, + "learning_rate": 0.0001, + "loss": 0.06, + "step": 1289 + }, + { + "epoch": 0.0644387831560018, + "grad_norm": 0.7829819321632385, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 1290 + }, + { + "epoch": 0.06448873570108397, + "grad_norm": 0.6404260396957397, + "learning_rate": 0.0001, + "loss": 0.0171, + "step": 1291 + }, + { + "epoch": 0.06453868824616614, + "grad_norm": 0.4006820619106293, + "learning_rate": 0.0001, + "loss": 0.014, + "step": 1292 + }, + { + "epoch": 0.06458864079124832, + "grad_norm": 0.36880046129226685, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 1293 + }, + { + "epoch": 0.06463859333633049, + "grad_norm": 0.45093271136283875, + "learning_rate": 0.0001, + "loss": 0.0162, + "step": 1294 + }, + { + "epoch": 0.06468854588141265, + "grad_norm": 0.5783540606498718, + "learning_rate": 0.0001, + "loss": 0.0093, + "step": 1295 + }, + { + "epoch": 0.06473849842649483, + "grad_norm": 0.4312465488910675, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 1296 + }, + { + "epoch": 0.064788450971577, + "grad_norm": 0.4067787826061249, + "learning_rate": 0.0001, + "loss": 0.3089, + "step": 1297 + }, + { + "epoch": 0.06483840351665918, + "grad_norm": 0.37592971324920654, + "learning_rate": 0.0001, + "loss": 0.0103, + "step": 1298 + }, + { + "epoch": 0.06488835606174134, + "grad_norm": 0.4974533021450043, + "learning_rate": 0.0001, + "loss": 0.0285, + "step": 1299 + }, + { + "epoch": 0.06493830860682352, + "grad_norm": 0.48662421107292175, + "learning_rate": 0.0001, + "loss": 0.0448, + "step": 1300 + }, + { + "epoch": 0.06498826115190569, + "grad_norm": 0.415107399225235, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1301 + }, + { + "epoch": 0.06503821369698787, + "grad_norm": 0.3412806987762451, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 1302 + }, + { + "epoch": 0.06508816624207003, + "grad_norm": 0.3550439476966858, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 1303 + }, + { + "epoch": 0.0651381187871522, + "grad_norm": 0.6693902015686035, + "learning_rate": 0.0001, + "loss": 0.0231, + "step": 1304 + }, + { + "epoch": 0.06518807133223438, + "grad_norm": 0.3464142382144928, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 1305 + }, + { + "epoch": 0.06523802387731654, + "grad_norm": 0.4759325683116913, + "learning_rate": 0.0001, + "loss": 0.0095, + "step": 1306 + }, + { + "epoch": 0.06528797642239872, + "grad_norm": 0.4304586350917816, + "learning_rate": 0.0001, + "loss": 0.0089, + "step": 1307 + }, + { + "epoch": 0.06533792896748089, + "grad_norm": 0.3621269166469574, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 1308 + }, + { + "epoch": 0.06538788151256307, + "grad_norm": 0.45465949177742004, + "learning_rate": 0.0001, + "loss": 0.0271, + "step": 1309 + }, + { + "epoch": 0.06543783405764524, + "grad_norm": 0.48988527059555054, + "learning_rate": 0.0001, + "loss": 0.014, + "step": 1310 + }, + { + "epoch": 0.06548778660272742, + "grad_norm": 0.7385422587394714, + "learning_rate": 0.0001, + "loss": 0.0661, + "step": 1311 + }, + { + "epoch": 0.06553773914780958, + "grad_norm": 0.34999048709869385, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 1312 + }, + { + "epoch": 0.06558769169289175, + "grad_norm": 0.7921525835990906, + "learning_rate": 0.0001, + "loss": 0.0663, + "step": 1313 + }, + { + "epoch": 0.06563764423797393, + "grad_norm": 0.4831075668334961, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 1314 + }, + { + "epoch": 0.0656875967830561, + "grad_norm": 0.9675500988960266, + "learning_rate": 0.0001, + "loss": 0.0527, + "step": 1315 + }, + { + "epoch": 0.06573754932813827, + "grad_norm": 0.4256071150302887, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 1316 + }, + { + "epoch": 0.06578750187322044, + "grad_norm": 0.6010918021202087, + "learning_rate": 0.0001, + "loss": 0.0336, + "step": 1317 + }, + { + "epoch": 0.06583745441830262, + "grad_norm": 0.5886648893356323, + "learning_rate": 0.0001, + "loss": 0.1488, + "step": 1318 + }, + { + "epoch": 0.06588740696338478, + "grad_norm": 0.660426676273346, + "learning_rate": 0.0001, + "loss": 0.2075, + "step": 1319 + }, + { + "epoch": 0.06593735950846695, + "grad_norm": 0.5241389870643616, + "learning_rate": 0.0001, + "loss": 0.1421, + "step": 1320 + }, + { + "epoch": 0.06598731205354913, + "grad_norm": 0.46334153413772583, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 1321 + }, + { + "epoch": 0.0660372645986313, + "grad_norm": 0.49448904395103455, + "learning_rate": 0.0001, + "loss": 0.0107, + "step": 1322 + }, + { + "epoch": 0.06608721714371348, + "grad_norm": 0.5947310328483582, + "learning_rate": 0.0001, + "loss": 0.0991, + "step": 1323 + }, + { + "epoch": 0.06613716968879564, + "grad_norm": 0.5005750060081482, + "learning_rate": 0.0001, + "loss": 0.0112, + "step": 1324 + }, + { + "epoch": 0.06618712223387782, + "grad_norm": 0.4933633804321289, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 1325 + }, + { + "epoch": 0.06623707477895999, + "grad_norm": 0.5053734183311462, + "learning_rate": 0.0001, + "loss": 0.0257, + "step": 1326 + }, + { + "epoch": 0.06628702732404217, + "grad_norm": 0.472201406955719, + "learning_rate": 0.0001, + "loss": 0.0121, + "step": 1327 + }, + { + "epoch": 0.06633697986912433, + "grad_norm": 0.8677076101303101, + "learning_rate": 0.0001, + "loss": 0.1247, + "step": 1328 + }, + { + "epoch": 0.0663869324142065, + "grad_norm": 0.5852549076080322, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 1329 + }, + { + "epoch": 0.06643688495928868, + "grad_norm": 0.5133956670761108, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 1330 + }, + { + "epoch": 0.06648683750437084, + "grad_norm": 0.507530689239502, + "learning_rate": 0.0001, + "loss": 0.0279, + "step": 1331 + }, + { + "epoch": 0.06653679004945302, + "grad_norm": 0.5913331508636475, + "learning_rate": 0.0001, + "loss": 0.0162, + "step": 1332 + }, + { + "epoch": 0.06658674259453519, + "grad_norm": 0.5711591243743896, + "learning_rate": 0.0001, + "loss": 0.008, + "step": 1333 + }, + { + "epoch": 0.06663669513961737, + "grad_norm": 0.45276522636413574, + "learning_rate": 0.0001, + "loss": 0.0179, + "step": 1334 + }, + { + "epoch": 0.06668664768469953, + "grad_norm": 0.5610842108726501, + "learning_rate": 0.0001, + "loss": 0.1266, + "step": 1335 + }, + { + "epoch": 0.0667366002297817, + "grad_norm": 0.4094065725803375, + "learning_rate": 0.0001, + "loss": 0.0451, + "step": 1336 + }, + { + "epoch": 0.06678655277486388, + "grad_norm": 0.47730401158332825, + "learning_rate": 0.0001, + "loss": 0.0248, + "step": 1337 + }, + { + "epoch": 0.06683650531994605, + "grad_norm": 0.5016578435897827, + "learning_rate": 0.0001, + "loss": 0.0142, + "step": 1338 + }, + { + "epoch": 0.06688645786502823, + "grad_norm": 0.4597826600074768, + "learning_rate": 0.0001, + "loss": 0.0358, + "step": 1339 + }, + { + "epoch": 0.06693641041011039, + "grad_norm": 0.4188750386238098, + "learning_rate": 0.0001, + "loss": 0.0149, + "step": 1340 + }, + { + "epoch": 0.06698636295519257, + "grad_norm": 0.42575347423553467, + "learning_rate": 0.0001, + "loss": 0.014, + "step": 1341 + }, + { + "epoch": 0.06703631550027474, + "grad_norm": 0.47153162956237793, + "learning_rate": 0.0001, + "loss": 0.0581, + "step": 1342 + }, + { + "epoch": 0.06708626804535692, + "grad_norm": 0.2984854280948639, + "learning_rate": 0.0001, + "loss": 0.0347, + "step": 1343 + }, + { + "epoch": 0.06713622059043908, + "grad_norm": 0.27835360169410706, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 1344 + }, + { + "epoch": 0.06718617313552125, + "grad_norm": 0.32021594047546387, + "learning_rate": 0.0001, + "loss": 0.0107, + "step": 1345 + }, + { + "epoch": 0.06723612568060343, + "grad_norm": 0.32559934258461, + "learning_rate": 0.0001, + "loss": 0.0113, + "step": 1346 + }, + { + "epoch": 0.0672860782256856, + "grad_norm": 0.3506375551223755, + "learning_rate": 0.0001, + "loss": 0.0144, + "step": 1347 + }, + { + "epoch": 0.06733603077076777, + "grad_norm": 0.368945449590683, + "learning_rate": 0.0001, + "loss": 0.1506, + "step": 1348 + }, + { + "epoch": 0.06738598331584994, + "grad_norm": 0.5657196044921875, + "learning_rate": 0.0001, + "loss": 0.0782, + "step": 1349 + }, + { + "epoch": 0.06743593586093212, + "grad_norm": 0.528343141078949, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 1350 + }, + { + "epoch": 0.06748588840601429, + "grad_norm": 0.4374363422393799, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 1351 + }, + { + "epoch": 0.06753584095109647, + "grad_norm": 0.31087252497673035, + "learning_rate": 0.0001, + "loss": 0.0108, + "step": 1352 + }, + { + "epoch": 0.06758579349617863, + "grad_norm": 0.5501514673233032, + "learning_rate": 0.0001, + "loss": 0.0881, + "step": 1353 + }, + { + "epoch": 0.0676357460412608, + "grad_norm": 0.4962764382362366, + "learning_rate": 0.0001, + "loss": 0.0309, + "step": 1354 + }, + { + "epoch": 0.06768569858634298, + "grad_norm": 0.5690407752990723, + "learning_rate": 0.0001, + "loss": 0.0293, + "step": 1355 + }, + { + "epoch": 0.06773565113142514, + "grad_norm": 0.5872376561164856, + "learning_rate": 0.0001, + "loss": 0.0734, + "step": 1356 + }, + { + "epoch": 0.06778560367650732, + "grad_norm": 0.4037892818450928, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 1357 + }, + { + "epoch": 0.06783555622158949, + "grad_norm": 0.5038079619407654, + "learning_rate": 0.0001, + "loss": 0.0109, + "step": 1358 + }, + { + "epoch": 0.06788550876667167, + "grad_norm": 0.4931522309780121, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 1359 + }, + { + "epoch": 0.06793546131175383, + "grad_norm": 0.5288280248641968, + "learning_rate": 0.0001, + "loss": 0.033, + "step": 1360 + }, + { + "epoch": 0.067985413856836, + "grad_norm": 0.6414732336997986, + "learning_rate": 0.0001, + "loss": 0.064, + "step": 1361 + }, + { + "epoch": 0.06803536640191818, + "grad_norm": 0.3640299439430237, + "learning_rate": 0.0001, + "loss": 0.0112, + "step": 1362 + }, + { + "epoch": 0.06808531894700035, + "grad_norm": 0.41173383593559265, + "learning_rate": 0.0001, + "loss": 0.0124, + "step": 1363 + }, + { + "epoch": 0.06813527149208252, + "grad_norm": 0.46226179599761963, + "learning_rate": 0.0001, + "loss": 0.0173, + "step": 1364 + }, + { + "epoch": 0.06818522403716469, + "grad_norm": 0.46488243341445923, + "learning_rate": 0.0001, + "loss": 0.2018, + "step": 1365 + }, + { + "epoch": 0.06823517658224687, + "grad_norm": 0.3866034150123596, + "learning_rate": 0.0001, + "loss": 0.0104, + "step": 1366 + }, + { + "epoch": 0.06828512912732904, + "grad_norm": 0.5039724707603455, + "learning_rate": 0.0001, + "loss": 0.0455, + "step": 1367 + }, + { + "epoch": 0.06833508167241122, + "grad_norm": 0.5020007491111755, + "learning_rate": 0.0001, + "loss": 0.0128, + "step": 1368 + }, + { + "epoch": 0.06838503421749338, + "grad_norm": 0.3808991611003876, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 1369 + }, + { + "epoch": 0.06843498676257555, + "grad_norm": 0.3188211917877197, + "learning_rate": 0.0001, + "loss": 0.0125, + "step": 1370 + }, + { + "epoch": 0.06848493930765773, + "grad_norm": 0.3311809301376343, + "learning_rate": 0.0001, + "loss": 0.0094, + "step": 1371 + }, + { + "epoch": 0.0685348918527399, + "grad_norm": 0.3887006342411041, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 1372 + }, + { + "epoch": 0.06858484439782207, + "grad_norm": 0.3785034716129303, + "learning_rate": 0.0001, + "loss": 0.0144, + "step": 1373 + }, + { + "epoch": 0.06863479694290424, + "grad_norm": 0.33349546790122986, + "learning_rate": 0.0001, + "loss": 0.2694, + "step": 1374 + }, + { + "epoch": 0.06868474948798642, + "grad_norm": 0.29577186703681946, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 1375 + }, + { + "epoch": 0.06873470203306858, + "grad_norm": 0.3691161274909973, + "learning_rate": 0.0001, + "loss": 0.0574, + "step": 1376 + }, + { + "epoch": 0.06878465457815075, + "grad_norm": 0.36356058716773987, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 1377 + }, + { + "epoch": 0.06883460712323293, + "grad_norm": 0.41510358452796936, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 1378 + }, + { + "epoch": 0.0688845596683151, + "grad_norm": 0.35539427399635315, + "learning_rate": 0.0001, + "loss": 0.1405, + "step": 1379 + }, + { + "epoch": 0.06893451221339728, + "grad_norm": 0.34593939781188965, + "learning_rate": 0.0001, + "loss": 0.0746, + "step": 1380 + }, + { + "epoch": 0.06898446475847944, + "grad_norm": 0.3710971474647522, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 1381 + }, + { + "epoch": 0.06903441730356162, + "grad_norm": 0.39679792523384094, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 1382 + }, + { + "epoch": 0.06908436984864379, + "grad_norm": 0.41014325618743896, + "learning_rate": 0.0001, + "loss": 0.0507, + "step": 1383 + }, + { + "epoch": 0.06913432239372597, + "grad_norm": 0.3208199739456177, + "learning_rate": 0.0001, + "loss": 0.061, + "step": 1384 + }, + { + "epoch": 0.06918427493880813, + "grad_norm": 0.3214609920978546, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 1385 + }, + { + "epoch": 0.0692342274838903, + "grad_norm": 0.3760223686695099, + "learning_rate": 0.0001, + "loss": 0.045, + "step": 1386 + }, + { + "epoch": 0.06928418002897248, + "grad_norm": 0.3663613796234131, + "learning_rate": 0.0001, + "loss": 0.0407, + "step": 1387 + }, + { + "epoch": 0.06933413257405464, + "grad_norm": 0.29727426171302795, + "learning_rate": 0.0001, + "loss": 0.0089, + "step": 1388 + }, + { + "epoch": 0.06938408511913682, + "grad_norm": 0.3744489252567291, + "learning_rate": 0.0001, + "loss": 0.0301, + "step": 1389 + }, + { + "epoch": 0.06943403766421899, + "grad_norm": 0.3158661425113678, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1390 + }, + { + "epoch": 0.06948399020930117, + "grad_norm": 0.2864086329936981, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 1391 + }, + { + "epoch": 0.06953394275438333, + "grad_norm": 0.250605970621109, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 1392 + }, + { + "epoch": 0.06958389529946551, + "grad_norm": 0.30142509937286377, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 1393 + }, + { + "epoch": 0.06963384784454768, + "grad_norm": 0.3022492229938507, + "learning_rate": 0.0001, + "loss": 0.1371, + "step": 1394 + }, + { + "epoch": 0.06968380038962985, + "grad_norm": 0.24623553454875946, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1395 + }, + { + "epoch": 0.06973375293471203, + "grad_norm": 0.3036571443080902, + "learning_rate": 0.0001, + "loss": 0.1452, + "step": 1396 + }, + { + "epoch": 0.06978370547979419, + "grad_norm": 0.3340453505516052, + "learning_rate": 0.0001, + "loss": 0.037, + "step": 1397 + }, + { + "epoch": 0.06983365802487637, + "grad_norm": 0.29508617520332336, + "learning_rate": 0.0001, + "loss": 0.0134, + "step": 1398 + }, + { + "epoch": 0.06988361056995854, + "grad_norm": 0.2808051109313965, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 1399 + }, + { + "epoch": 0.06993356311504072, + "grad_norm": 0.26601657271385193, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 1400 + }, + { + "epoch": 0.06998351566012288, + "grad_norm": 0.3226988911628723, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 1401 + }, + { + "epoch": 0.07003346820520505, + "grad_norm": 0.2596552073955536, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 1402 + }, + { + "epoch": 0.07008342075028723, + "grad_norm": 0.2774321138858795, + "learning_rate": 0.0001, + "loss": 0.0147, + "step": 1403 + }, + { + "epoch": 0.0701333732953694, + "grad_norm": 0.26414182782173157, + "learning_rate": 0.0001, + "loss": 0.0253, + "step": 1404 + }, + { + "epoch": 0.07018332584045157, + "grad_norm": 0.24339765310287476, + "learning_rate": 0.0001, + "loss": 0.0267, + "step": 1405 + }, + { + "epoch": 0.07023327838553374, + "grad_norm": 0.20894743502140045, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 1406 + }, + { + "epoch": 0.07028323093061592, + "grad_norm": 0.27931445837020874, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 1407 + }, + { + "epoch": 0.07033318347569809, + "grad_norm": 0.2852851450443268, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 1408 + }, + { + "epoch": 0.07038313602078027, + "grad_norm": 0.37182554602622986, + "learning_rate": 0.0001, + "loss": 0.0616, + "step": 1409 + }, + { + "epoch": 0.07043308856586243, + "grad_norm": 0.2531064450740814, + "learning_rate": 0.0001, + "loss": 0.0112, + "step": 1410 + }, + { + "epoch": 0.0704830411109446, + "grad_norm": 0.2819684147834778, + "learning_rate": 0.0001, + "loss": 0.0119, + "step": 1411 + }, + { + "epoch": 0.07053299365602678, + "grad_norm": 0.30525627732276917, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 1412 + }, + { + "epoch": 0.07058294620110894, + "grad_norm": 0.2991314232349396, + "learning_rate": 0.0001, + "loss": 0.0377, + "step": 1413 + }, + { + "epoch": 0.07063289874619112, + "grad_norm": 0.27715879678726196, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 1414 + }, + { + "epoch": 0.07068285129127329, + "grad_norm": 0.26899927854537964, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 1415 + }, + { + "epoch": 0.07073280383635547, + "grad_norm": 0.2872244417667389, + "learning_rate": 0.0001, + "loss": 0.0254, + "step": 1416 + }, + { + "epoch": 0.07078275638143763, + "grad_norm": 0.26354020833969116, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 1417 + }, + { + "epoch": 0.0708327089265198, + "grad_norm": 0.2824591100215912, + "learning_rate": 0.0001, + "loss": 0.0438, + "step": 1418 + }, + { + "epoch": 0.07088266147160198, + "grad_norm": 0.272034227848053, + "learning_rate": 0.0001, + "loss": 0.1428, + "step": 1419 + }, + { + "epoch": 0.07093261401668415, + "grad_norm": 0.2821238338947296, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 1420 + }, + { + "epoch": 0.07098256656176632, + "grad_norm": 0.3647078573703766, + "learning_rate": 0.0001, + "loss": 0.0098, + "step": 1421 + }, + { + "epoch": 0.07103251910684849, + "grad_norm": 0.3038094937801361, + "learning_rate": 0.0001, + "loss": 0.0163, + "step": 1422 + }, + { + "epoch": 0.07108247165193067, + "grad_norm": 0.2527802586555481, + "learning_rate": 0.0001, + "loss": 0.0156, + "step": 1423 + }, + { + "epoch": 0.07113242419701284, + "grad_norm": 0.20548628270626068, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 1424 + }, + { + "epoch": 0.07118237674209502, + "grad_norm": 0.26744356751441956, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 1425 + }, + { + "epoch": 0.07123232928717718, + "grad_norm": 0.24888643622398376, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 1426 + }, + { + "epoch": 0.07128228183225935, + "grad_norm": 0.20220361649990082, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 1427 + }, + { + "epoch": 0.07133223437734153, + "grad_norm": 0.2253728210926056, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 1428 + }, + { + "epoch": 0.0713821869224237, + "grad_norm": 0.25745731592178345, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 1429 + }, + { + "epoch": 0.07143213946750587, + "grad_norm": 0.18824045360088348, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 1430 + }, + { + "epoch": 0.07148209201258804, + "grad_norm": 0.27496668696403503, + "learning_rate": 0.0001, + "loss": 0.3022, + "step": 1431 + }, + { + "epoch": 0.07153204455767022, + "grad_norm": 0.195421040058136, + "learning_rate": 0.0001, + "loss": 0.014, + "step": 1432 + }, + { + "epoch": 0.07158199710275238, + "grad_norm": 0.23810052871704102, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 1433 + }, + { + "epoch": 0.07163194964783456, + "grad_norm": 0.2610308527946472, + "learning_rate": 0.0001, + "loss": 0.023, + "step": 1434 + }, + { + "epoch": 0.07168190219291673, + "grad_norm": 0.20543412864208221, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 1435 + }, + { + "epoch": 0.0717318547379989, + "grad_norm": 0.2742379605770111, + "learning_rate": 0.0001, + "loss": 0.1384, + "step": 1436 + }, + { + "epoch": 0.07178180728308108, + "grad_norm": 0.2863125205039978, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 1437 + }, + { + "epoch": 0.07183175982816324, + "grad_norm": 0.2659398019313812, + "learning_rate": 0.0001, + "loss": 0.0101, + "step": 1438 + }, + { + "epoch": 0.07188171237324542, + "grad_norm": 0.4398033320903778, + "learning_rate": 0.0001, + "loss": 0.0875, + "step": 1439 + }, + { + "epoch": 0.07193166491832759, + "grad_norm": 0.2933940589427948, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 1440 + }, + { + "epoch": 0.07198161746340977, + "grad_norm": 0.34439191222190857, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 1441 + }, + { + "epoch": 0.07203157000849193, + "grad_norm": 0.29717278480529785, + "learning_rate": 0.0001, + "loss": 0.0101, + "step": 1442 + }, + { + "epoch": 0.0720815225535741, + "grad_norm": 0.504572331905365, + "learning_rate": 0.0001, + "loss": 0.0385, + "step": 1443 + }, + { + "epoch": 0.07213147509865628, + "grad_norm": 0.3989051282405853, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 1444 + }, + { + "epoch": 0.07218142764373844, + "grad_norm": 0.4321722984313965, + "learning_rate": 0.0001, + "loss": 0.0143, + "step": 1445 + }, + { + "epoch": 0.07223138018882062, + "grad_norm": 0.38739025592803955, + "learning_rate": 0.0001, + "loss": 0.0091, + "step": 1446 + }, + { + "epoch": 0.07228133273390279, + "grad_norm": 0.29999518394470215, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 1447 + }, + { + "epoch": 0.07233128527898497, + "grad_norm": 0.3689385950565338, + "learning_rate": 0.0001, + "loss": 0.017, + "step": 1448 + }, + { + "epoch": 0.07238123782406714, + "grad_norm": 0.3841169774532318, + "learning_rate": 0.0001, + "loss": 0.0107, + "step": 1449 + }, + { + "epoch": 0.07243119036914931, + "grad_norm": 0.43087291717529297, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 1450 + }, + { + "epoch": 0.07248114291423148, + "grad_norm": 0.26089969277381897, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 1451 + }, + { + "epoch": 0.07253109545931365, + "grad_norm": 0.34360042214393616, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 1452 + }, + { + "epoch": 0.07258104800439583, + "grad_norm": 0.3613565266132355, + "learning_rate": 0.0001, + "loss": 0.0077, + "step": 1453 + }, + { + "epoch": 0.07263100054947799, + "grad_norm": 0.35350295901298523, + "learning_rate": 0.0001, + "loss": 0.017, + "step": 1454 + }, + { + "epoch": 0.07268095309456017, + "grad_norm": 0.2921310067176819, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 1455 + }, + { + "epoch": 0.07273090563964234, + "grad_norm": 0.30571189522743225, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 1456 + }, + { + "epoch": 0.07278085818472452, + "grad_norm": 0.3037170171737671, + "learning_rate": 0.0001, + "loss": 0.008, + "step": 1457 + }, + { + "epoch": 0.07283081072980668, + "grad_norm": 0.3165197968482971, + "learning_rate": 0.0001, + "loss": 0.1335, + "step": 1458 + }, + { + "epoch": 0.07288076327488885, + "grad_norm": 0.3041869103908539, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 1459 + }, + { + "epoch": 0.07293071581997103, + "grad_norm": 0.2517201900482178, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 1460 + }, + { + "epoch": 0.0729806683650532, + "grad_norm": 0.24968421459197998, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 1461 + }, + { + "epoch": 0.07303062091013537, + "grad_norm": 0.2984367907047272, + "learning_rate": 0.0001, + "loss": 0.0208, + "step": 1462 + }, + { + "epoch": 0.07308057345521754, + "grad_norm": 0.369328111410141, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 1463 + }, + { + "epoch": 0.07313052600029972, + "grad_norm": 0.3912499248981476, + "learning_rate": 0.0001, + "loss": 0.1576, + "step": 1464 + }, + { + "epoch": 0.07318047854538189, + "grad_norm": 0.22650037705898285, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 1465 + }, + { + "epoch": 0.07323043109046407, + "grad_norm": 0.23384107649326324, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 1466 + }, + { + "epoch": 0.07328038363554623, + "grad_norm": 0.24115489423274994, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 1467 + }, + { + "epoch": 0.0733303361806284, + "grad_norm": 0.2611357569694519, + "learning_rate": 0.0001, + "loss": 0.0204, + "step": 1468 + }, + { + "epoch": 0.07338028872571058, + "grad_norm": 0.4670883119106293, + "learning_rate": 0.0001, + "loss": 0.0557, + "step": 1469 + }, + { + "epoch": 0.07343024127079274, + "grad_norm": 0.29871639609336853, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 1470 + }, + { + "epoch": 0.07348019381587492, + "grad_norm": 0.37453916668891907, + "learning_rate": 0.0001, + "loss": 0.0257, + "step": 1471 + }, + { + "epoch": 0.07353014636095709, + "grad_norm": 0.36227062344551086, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 1472 + }, + { + "epoch": 0.07358009890603927, + "grad_norm": 0.3921825885772705, + "learning_rate": 0.0001, + "loss": 0.0245, + "step": 1473 + }, + { + "epoch": 0.07363005145112143, + "grad_norm": 0.36970797181129456, + "learning_rate": 0.0001, + "loss": 0.0379, + "step": 1474 + }, + { + "epoch": 0.0736800039962036, + "grad_norm": 0.33521831035614014, + "learning_rate": 0.0001, + "loss": 0.0169, + "step": 1475 + }, + { + "epoch": 0.07372995654128578, + "grad_norm": 0.2981873154640198, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 1476 + }, + { + "epoch": 0.07377990908636795, + "grad_norm": 0.32467126846313477, + "learning_rate": 0.0001, + "loss": 0.0259, + "step": 1477 + }, + { + "epoch": 0.07382986163145012, + "grad_norm": 0.3266807496547699, + "learning_rate": 0.0001, + "loss": 0.0248, + "step": 1478 + }, + { + "epoch": 0.07387981417653229, + "grad_norm": 0.318021684885025, + "learning_rate": 0.0001, + "loss": 0.0446, + "step": 1479 + }, + { + "epoch": 0.07392976672161447, + "grad_norm": 0.24916337430477142, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 1480 + }, + { + "epoch": 0.07397971926669664, + "grad_norm": 0.2910885214805603, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1481 + }, + { + "epoch": 0.07402967181177882, + "grad_norm": 0.3707987070083618, + "learning_rate": 0.0001, + "loss": 0.0252, + "step": 1482 + }, + { + "epoch": 0.07407962435686098, + "grad_norm": 0.30381202697753906, + "learning_rate": 0.0001, + "loss": 0.1377, + "step": 1483 + }, + { + "epoch": 0.07412957690194315, + "grad_norm": 0.31674787402153015, + "learning_rate": 0.0001, + "loss": 0.0451, + "step": 1484 + }, + { + "epoch": 0.07417952944702533, + "grad_norm": 0.3012963831424713, + "learning_rate": 0.0001, + "loss": 0.0142, + "step": 1485 + }, + { + "epoch": 0.0742294819921075, + "grad_norm": 0.2737312912940979, + "learning_rate": 0.0001, + "loss": 0.0359, + "step": 1486 + }, + { + "epoch": 0.07427943453718967, + "grad_norm": 0.2935931384563446, + "learning_rate": 0.0001, + "loss": 0.1308, + "step": 1487 + }, + { + "epoch": 0.07432938708227184, + "grad_norm": 0.3534037470817566, + "learning_rate": 0.0001, + "loss": 0.0276, + "step": 1488 + }, + { + "epoch": 0.07437933962735402, + "grad_norm": 0.2949204742908478, + "learning_rate": 0.0001, + "loss": 0.0146, + "step": 1489 + }, + { + "epoch": 0.07442929217243618, + "grad_norm": 0.32543638348579407, + "learning_rate": 0.0001, + "loss": 0.4201, + "step": 1490 + }, + { + "epoch": 0.07447924471751836, + "grad_norm": 0.2962041199207306, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1491 + }, + { + "epoch": 0.07452919726260053, + "grad_norm": 0.28090691566467285, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 1492 + }, + { + "epoch": 0.0745791498076827, + "grad_norm": 0.2775419056415558, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 1493 + }, + { + "epoch": 0.07462910235276488, + "grad_norm": 0.4072096347808838, + "learning_rate": 0.0001, + "loss": 0.0214, + "step": 1494 + }, + { + "epoch": 0.07467905489784704, + "grad_norm": 0.28265121579170227, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 1495 + }, + { + "epoch": 0.07472900744292922, + "grad_norm": 0.3559873700141907, + "learning_rate": 0.0001, + "loss": 0.0611, + "step": 1496 + }, + { + "epoch": 0.07477895998801139, + "grad_norm": 0.34819331765174866, + "learning_rate": 0.0001, + "loss": 0.0853, + "step": 1497 + }, + { + "epoch": 0.07482891253309357, + "grad_norm": 0.4149414002895355, + "learning_rate": 0.0001, + "loss": 0.0121, + "step": 1498 + }, + { + "epoch": 0.07487886507817573, + "grad_norm": 0.4285433292388916, + "learning_rate": 0.0001, + "loss": 0.154, + "step": 1499 + }, + { + "epoch": 0.0749288176232579, + "grad_norm": 0.3374941349029541, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 1500 + }, + { + "epoch": 0.07497877016834008, + "grad_norm": 0.30799975991249084, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 1501 + }, + { + "epoch": 0.07502872271342224, + "grad_norm": 0.28376731276512146, + "learning_rate": 0.0001, + "loss": 0.0118, + "step": 1502 + }, + { + "epoch": 0.07507867525850442, + "grad_norm": 0.2959024906158447, + "learning_rate": 0.0001, + "loss": 0.0557, + "step": 1503 + }, + { + "epoch": 0.07512862780358659, + "grad_norm": 0.29022398591041565, + "learning_rate": 0.0001, + "loss": 0.0248, + "step": 1504 + }, + { + "epoch": 0.07517858034866877, + "grad_norm": 0.39572909474372864, + "learning_rate": 0.0001, + "loss": 0.05, + "step": 1505 + }, + { + "epoch": 0.07522853289375094, + "grad_norm": 0.29507192969322205, + "learning_rate": 0.0001, + "loss": 0.0174, + "step": 1506 + }, + { + "epoch": 0.07527848543883311, + "grad_norm": 0.3175428509712219, + "learning_rate": 0.0001, + "loss": 0.0117, + "step": 1507 + }, + { + "epoch": 0.07532843798391528, + "grad_norm": 0.3169458508491516, + "learning_rate": 0.0001, + "loss": 0.0108, + "step": 1508 + }, + { + "epoch": 0.07537839052899745, + "grad_norm": 0.336222767829895, + "learning_rate": 0.0001, + "loss": 0.0133, + "step": 1509 + }, + { + "epoch": 0.07542834307407963, + "grad_norm": 0.3534261882305145, + "learning_rate": 0.0001, + "loss": 0.2708, + "step": 1510 + }, + { + "epoch": 0.07547829561916179, + "grad_norm": 0.2918601930141449, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 1511 + }, + { + "epoch": 0.07552824816424397, + "grad_norm": 0.2442450374364853, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 1512 + }, + { + "epoch": 0.07557820070932614, + "grad_norm": 0.21943646669387817, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 1513 + }, + { + "epoch": 0.07562815325440832, + "grad_norm": 0.281231552362442, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 1514 + }, + { + "epoch": 0.07567810579949048, + "grad_norm": 0.2879014313220978, + "learning_rate": 0.0001, + "loss": 0.0121, + "step": 1515 + }, + { + "epoch": 0.07572805834457265, + "grad_norm": 0.5961506962776184, + "learning_rate": 0.0001, + "loss": 0.1956, + "step": 1516 + }, + { + "epoch": 0.07577801088965483, + "grad_norm": 0.3556729257106781, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 1517 + }, + { + "epoch": 0.075827963434737, + "grad_norm": 0.3673868179321289, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 1518 + }, + { + "epoch": 0.07587791597981917, + "grad_norm": 0.32213521003723145, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 1519 + }, + { + "epoch": 0.07592786852490134, + "grad_norm": 0.3268394470214844, + "learning_rate": 0.0001, + "loss": 0.0603, + "step": 1520 + }, + { + "epoch": 0.07597782106998352, + "grad_norm": 0.24609072506427765, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 1521 + }, + { + "epoch": 0.07602777361506569, + "grad_norm": 0.41656067967414856, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 1522 + }, + { + "epoch": 0.07607772616014787, + "grad_norm": 0.26566651463508606, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 1523 + }, + { + "epoch": 0.07612767870523003, + "grad_norm": 0.25049537420272827, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 1524 + }, + { + "epoch": 0.0761776312503122, + "grad_norm": 0.35068362951278687, + "learning_rate": 0.0001, + "loss": 0.0407, + "step": 1525 + }, + { + "epoch": 0.07622758379539438, + "grad_norm": 0.29824957251548767, + "learning_rate": 0.0001, + "loss": 0.0135, + "step": 1526 + }, + { + "epoch": 0.07627753634047654, + "grad_norm": 0.2341575175523758, + "learning_rate": 0.0001, + "loss": 0.0113, + "step": 1527 + }, + { + "epoch": 0.07632748888555872, + "grad_norm": 0.21707338094711304, + "learning_rate": 0.0001, + "loss": 0.0083, + "step": 1528 + }, + { + "epoch": 0.07637744143064089, + "grad_norm": 0.27516666054725647, + "learning_rate": 0.0001, + "loss": 0.0067, + "step": 1529 + }, + { + "epoch": 0.07642739397572307, + "grad_norm": 0.24351441860198975, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1530 + }, + { + "epoch": 0.07647734652080523, + "grad_norm": 0.28094983100891113, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 1531 + }, + { + "epoch": 0.07652729906588741, + "grad_norm": 0.2412552386522293, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1532 + }, + { + "epoch": 0.07657725161096958, + "grad_norm": 0.5155234336853027, + "learning_rate": 0.0001, + "loss": 0.1786, + "step": 1533 + }, + { + "epoch": 0.07662720415605175, + "grad_norm": 0.23370124399662018, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1534 + }, + { + "epoch": 0.07667715670113393, + "grad_norm": 0.3327334523200989, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1535 + }, + { + "epoch": 0.07672710924621609, + "grad_norm": 0.3606230616569519, + "learning_rate": 0.0001, + "loss": 0.1501, + "step": 1536 + }, + { + "epoch": 0.07677706179129827, + "grad_norm": 0.29559314250946045, + "learning_rate": 0.0001, + "loss": 0.0112, + "step": 1537 + }, + { + "epoch": 0.07682701433638044, + "grad_norm": 0.25264212489128113, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 1538 + }, + { + "epoch": 0.07687696688146262, + "grad_norm": 0.2869705259799957, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 1539 + }, + { + "epoch": 0.07692691942654478, + "grad_norm": 0.25474312901496887, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 1540 + }, + { + "epoch": 0.07697687197162695, + "grad_norm": 0.25055766105651855, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 1541 + }, + { + "epoch": 0.07702682451670913, + "grad_norm": 0.19455352425575256, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 1542 + }, + { + "epoch": 0.0770767770617913, + "grad_norm": 0.3676683306694031, + "learning_rate": 0.0001, + "loss": 0.0179, + "step": 1543 + }, + { + "epoch": 0.07712672960687347, + "grad_norm": 0.32330888509750366, + "learning_rate": 0.0001, + "loss": 0.0383, + "step": 1544 + }, + { + "epoch": 0.07717668215195564, + "grad_norm": 0.23862867057323456, + "learning_rate": 0.0001, + "loss": 0.0102, + "step": 1545 + }, + { + "epoch": 0.07722663469703782, + "grad_norm": 0.22388960421085358, + "learning_rate": 0.0001, + "loss": 0.025, + "step": 1546 + }, + { + "epoch": 0.07727658724211998, + "grad_norm": 0.31289806962013245, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 1547 + }, + { + "epoch": 0.07732653978720216, + "grad_norm": 0.30147498846054077, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 1548 + }, + { + "epoch": 0.07737649233228433, + "grad_norm": 0.19817553460597992, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 1549 + }, + { + "epoch": 0.0774264448773665, + "grad_norm": 0.1976732313632965, + "learning_rate": 0.0001, + "loss": 0.0133, + "step": 1550 + }, + { + "epoch": 0.07747639742244868, + "grad_norm": 0.22445030510425568, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 1551 + }, + { + "epoch": 0.07752634996753084, + "grad_norm": 0.2251223474740982, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 1552 + }, + { + "epoch": 0.07757630251261302, + "grad_norm": 0.30441585183143616, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 1553 + }, + { + "epoch": 0.07762625505769519, + "grad_norm": 0.20094521343708038, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1554 + }, + { + "epoch": 0.07767620760277737, + "grad_norm": 0.18777704238891602, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 1555 + }, + { + "epoch": 0.07772616014785953, + "grad_norm": 0.2916937470436096, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 1556 + }, + { + "epoch": 0.0777761126929417, + "grad_norm": 0.29147928953170776, + "learning_rate": 0.0001, + "loss": 0.0085, + "step": 1557 + }, + { + "epoch": 0.07782606523802388, + "grad_norm": 0.22915031015872955, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 1558 + }, + { + "epoch": 0.07787601778310604, + "grad_norm": 0.20191162824630737, + "learning_rate": 0.0001, + "loss": 0.0174, + "step": 1559 + }, + { + "epoch": 0.07792597032818822, + "grad_norm": 0.21079127490520477, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 1560 + }, + { + "epoch": 0.07797592287327039, + "grad_norm": 0.2114945352077484, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 1561 + }, + { + "epoch": 0.07802587541835257, + "grad_norm": 0.18501059710979462, + "learning_rate": 0.0001, + "loss": 0.0077, + "step": 1562 + }, + { + "epoch": 0.07807582796343474, + "grad_norm": 0.1581672579050064, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 1563 + }, + { + "epoch": 0.07812578050851691, + "grad_norm": 0.24721364676952362, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 1564 + }, + { + "epoch": 0.07817573305359908, + "grad_norm": 0.17270773649215698, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 1565 + }, + { + "epoch": 0.07822568559868125, + "grad_norm": 0.16349250078201294, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 1566 + }, + { + "epoch": 0.07827563814376343, + "grad_norm": 0.14492495357990265, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 1567 + }, + { + "epoch": 0.07832559068884559, + "grad_norm": 0.23303525149822235, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1568 + }, + { + "epoch": 0.07837554323392777, + "grad_norm": 0.15150661766529083, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 1569 + }, + { + "epoch": 0.07842549577900994, + "grad_norm": 0.17000295221805573, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 1570 + }, + { + "epoch": 0.07847544832409212, + "grad_norm": 0.18853455781936646, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 1571 + }, + { + "epoch": 0.07852540086917428, + "grad_norm": 0.17627470195293427, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 1572 + }, + { + "epoch": 0.07857535341425646, + "grad_norm": 0.15316179394721985, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 1573 + }, + { + "epoch": 0.07862530595933863, + "grad_norm": 0.2327546924352646, + "learning_rate": 0.0001, + "loss": 0.2932, + "step": 1574 + }, + { + "epoch": 0.0786752585044208, + "grad_norm": 0.20712772011756897, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1575 + }, + { + "epoch": 0.07872521104950297, + "grad_norm": 0.2587328553199768, + "learning_rate": 0.0001, + "loss": 0.1338, + "step": 1576 + }, + { + "epoch": 0.07877516359458514, + "grad_norm": 0.20880088210105896, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 1577 + }, + { + "epoch": 0.07882511613966732, + "grad_norm": 0.25698405504226685, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 1578 + }, + { + "epoch": 0.07887506868474949, + "grad_norm": 0.2909889221191406, + "learning_rate": 0.0001, + "loss": 0.0091, + "step": 1579 + }, + { + "epoch": 0.07892502122983167, + "grad_norm": 0.21440254151821136, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 1580 + }, + { + "epoch": 0.07897497377491383, + "grad_norm": 0.23380975425243378, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 1581 + }, + { + "epoch": 0.079024926319996, + "grad_norm": 0.2189464569091797, + "learning_rate": 0.0001, + "loss": 0.1289, + "step": 1582 + }, + { + "epoch": 0.07907487886507818, + "grad_norm": 0.21624816954135895, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 1583 + }, + { + "epoch": 0.07912483141016034, + "grad_norm": 0.21028809249401093, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 1584 + }, + { + "epoch": 0.07917478395524252, + "grad_norm": 0.25216180086135864, + "learning_rate": 0.0001, + "loss": 0.139, + "step": 1585 + }, + { + "epoch": 0.07922473650032469, + "grad_norm": 0.2712916433811188, + "learning_rate": 0.0001, + "loss": 0.1286, + "step": 1586 + }, + { + "epoch": 0.07927468904540687, + "grad_norm": 0.23109865188598633, + "learning_rate": 0.0001, + "loss": 0.1293, + "step": 1587 + }, + { + "epoch": 0.07932464159048903, + "grad_norm": 0.24436414241790771, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1588 + }, + { + "epoch": 0.07937459413557121, + "grad_norm": 0.21204103529453278, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1589 + }, + { + "epoch": 0.07942454668065338, + "grad_norm": 0.24810677766799927, + "learning_rate": 0.0001, + "loss": 0.0105, + "step": 1590 + }, + { + "epoch": 0.07947449922573555, + "grad_norm": 0.2775145173072815, + "learning_rate": 0.0001, + "loss": 0.0133, + "step": 1591 + }, + { + "epoch": 0.07952445177081773, + "grad_norm": 0.25426068902015686, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 1592 + }, + { + "epoch": 0.07957440431589989, + "grad_norm": 0.23630714416503906, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 1593 + }, + { + "epoch": 0.07962435686098207, + "grad_norm": 0.3098001182079315, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 1594 + }, + { + "epoch": 0.07967430940606424, + "grad_norm": 0.2457035481929779, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 1595 + }, + { + "epoch": 0.07972426195114642, + "grad_norm": 0.2732433080673218, + "learning_rate": 0.0001, + "loss": 0.2833, + "step": 1596 + }, + { + "epoch": 0.07977421449622858, + "grad_norm": 0.23826360702514648, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 1597 + }, + { + "epoch": 0.07982416704131075, + "grad_norm": 0.24247969686985016, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 1598 + }, + { + "epoch": 0.07987411958639293, + "grad_norm": 0.22609001398086548, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 1599 + }, + { + "epoch": 0.0799240721314751, + "grad_norm": 0.188796728849411, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1600 + }, + { + "epoch": 0.07997402467655727, + "grad_norm": 0.24667741358280182, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 1601 + }, + { + "epoch": 0.08002397722163944, + "grad_norm": 0.23113395273685455, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 1602 + }, + { + "epoch": 0.08007392976672162, + "grad_norm": 0.3292839527130127, + "learning_rate": 0.0001, + "loss": 0.0315, + "step": 1603 + }, + { + "epoch": 0.08012388231180378, + "grad_norm": 0.21909423172473907, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 1604 + }, + { + "epoch": 0.08017383485688596, + "grad_norm": 0.19299758970737457, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 1605 + }, + { + "epoch": 0.08022378740196813, + "grad_norm": 0.16976752877235413, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 1606 + }, + { + "epoch": 0.0802737399470503, + "grad_norm": 0.28026556968688965, + "learning_rate": 0.0001, + "loss": 0.0134, + "step": 1607 + }, + { + "epoch": 0.08032369249213248, + "grad_norm": 0.2485332190990448, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 1608 + }, + { + "epoch": 0.08037364503721464, + "grad_norm": 0.20448510348796844, + "learning_rate": 0.0001, + "loss": 0.0155, + "step": 1609 + }, + { + "epoch": 0.08042359758229682, + "grad_norm": 0.294614315032959, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 1610 + }, + { + "epoch": 0.08047355012737899, + "grad_norm": 0.2704187333583832, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 1611 + }, + { + "epoch": 0.08052350267246117, + "grad_norm": 0.2848164737224579, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 1612 + }, + { + "epoch": 0.08057345521754333, + "grad_norm": 0.2626108229160309, + "learning_rate": 0.0001, + "loss": 0.1298, + "step": 1613 + }, + { + "epoch": 0.08062340776262551, + "grad_norm": 0.2489321380853653, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 1614 + }, + { + "epoch": 0.08067336030770768, + "grad_norm": 0.22750414907932281, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 1615 + }, + { + "epoch": 0.08072331285278984, + "grad_norm": 0.2460375279188156, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 1616 + }, + { + "epoch": 0.08077326539787202, + "grad_norm": 0.25257283449172974, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 1617 + }, + { + "epoch": 0.08082321794295419, + "grad_norm": 0.23117582499980927, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 1618 + }, + { + "epoch": 0.08087317048803637, + "grad_norm": 0.23166237771511078, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 1619 + }, + { + "epoch": 0.08092312303311854, + "grad_norm": 0.26586779952049255, + "learning_rate": 0.0001, + "loss": 0.0144, + "step": 1620 + }, + { + "epoch": 0.08097307557820071, + "grad_norm": 0.28140050172805786, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 1621 + }, + { + "epoch": 0.08102302812328288, + "grad_norm": 0.27647665143013, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 1622 + }, + { + "epoch": 0.08107298066836505, + "grad_norm": 0.25726303458213806, + "learning_rate": 0.0001, + "loss": 0.135, + "step": 1623 + }, + { + "epoch": 0.08112293321344723, + "grad_norm": 0.25906652212142944, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 1624 + }, + { + "epoch": 0.08117288575852939, + "grad_norm": 0.2365572154521942, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 1625 + }, + { + "epoch": 0.08122283830361157, + "grad_norm": 0.2819938361644745, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 1626 + }, + { + "epoch": 0.08127279084869374, + "grad_norm": 0.2720286548137665, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 1627 + }, + { + "epoch": 0.08132274339377592, + "grad_norm": 0.25539854168891907, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1628 + }, + { + "epoch": 0.08137269593885808, + "grad_norm": 0.3001675605773926, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 1629 + }, + { + "epoch": 0.08142264848394026, + "grad_norm": 0.20253786444664001, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 1630 + }, + { + "epoch": 0.08147260102902243, + "grad_norm": 0.25209978222846985, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 1631 + }, + { + "epoch": 0.0815225535741046, + "grad_norm": 0.32156315445899963, + "learning_rate": 0.0001, + "loss": 0.0318, + "step": 1632 + }, + { + "epoch": 0.08157250611918677, + "grad_norm": 0.23186559975147247, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 1633 + }, + { + "epoch": 0.08162245866426894, + "grad_norm": 0.2320137917995453, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 1634 + }, + { + "epoch": 0.08167241120935112, + "grad_norm": 0.26472803950309753, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 1635 + }, + { + "epoch": 0.08172236375443329, + "grad_norm": 0.29283198714256287, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1636 + }, + { + "epoch": 0.08177231629951547, + "grad_norm": 0.25234708189964294, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1637 + }, + { + "epoch": 0.08182226884459763, + "grad_norm": 0.4403884708881378, + "learning_rate": 0.0001, + "loss": 0.0525, + "step": 1638 + }, + { + "epoch": 0.0818722213896798, + "grad_norm": 0.28831177949905396, + "learning_rate": 0.0001, + "loss": 0.0211, + "step": 1639 + }, + { + "epoch": 0.08192217393476198, + "grad_norm": 0.527397871017456, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 1640 + }, + { + "epoch": 0.08197212647984414, + "grad_norm": 0.4645324647426605, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 1641 + }, + { + "epoch": 0.08202207902492632, + "grad_norm": 0.40310999751091003, + "learning_rate": 0.0001, + "loss": 0.0277, + "step": 1642 + }, + { + "epoch": 0.08207203157000849, + "grad_norm": 0.4594443142414093, + "learning_rate": 0.0001, + "loss": 0.0391, + "step": 1643 + }, + { + "epoch": 0.08212198411509067, + "grad_norm": 0.4937461316585541, + "learning_rate": 0.0001, + "loss": 0.0205, + "step": 1644 + }, + { + "epoch": 0.08217193666017283, + "grad_norm": 0.4159993529319763, + "learning_rate": 0.0001, + "loss": 0.012, + "step": 1645 + }, + { + "epoch": 0.08222188920525501, + "grad_norm": 0.37023308873176575, + "learning_rate": 0.0001, + "loss": 0.0141, + "step": 1646 + }, + { + "epoch": 0.08227184175033718, + "grad_norm": 0.47360295057296753, + "learning_rate": 0.0001, + "loss": 0.0476, + "step": 1647 + }, + { + "epoch": 0.08232179429541935, + "grad_norm": 0.4397547245025635, + "learning_rate": 0.0001, + "loss": 0.0383, + "step": 1648 + }, + { + "epoch": 0.08237174684050153, + "grad_norm": 0.3881329298019409, + "learning_rate": 0.0001, + "loss": 0.0096, + "step": 1649 + }, + { + "epoch": 0.08242169938558369, + "grad_norm": 0.27481862902641296, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 1650 + }, + { + "epoch": 0.08247165193066587, + "grad_norm": 0.41939210891723633, + "learning_rate": 0.0001, + "loss": 0.0372, + "step": 1651 + }, + { + "epoch": 0.08252160447574804, + "grad_norm": 0.43072623014450073, + "learning_rate": 0.0001, + "loss": 0.0234, + "step": 1652 + }, + { + "epoch": 0.08257155702083022, + "grad_norm": 0.27891775965690613, + "learning_rate": 0.0001, + "loss": 0.0113, + "step": 1653 + }, + { + "epoch": 0.08262150956591238, + "grad_norm": 0.2596665918827057, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 1654 + }, + { + "epoch": 0.08267146211099456, + "grad_norm": 0.30993732810020447, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 1655 + }, + { + "epoch": 0.08272141465607673, + "grad_norm": 0.31616291403770447, + "learning_rate": 0.0001, + "loss": 0.1304, + "step": 1656 + }, + { + "epoch": 0.0827713672011589, + "grad_norm": 0.32196715474128723, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 1657 + }, + { + "epoch": 0.08282131974624107, + "grad_norm": 0.2880435883998871, + "learning_rate": 0.0001, + "loss": 0.0255, + "step": 1658 + }, + { + "epoch": 0.08287127229132324, + "grad_norm": 0.2958340346813202, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 1659 + }, + { + "epoch": 0.08292122483640542, + "grad_norm": 0.2939961850643158, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 1660 + }, + { + "epoch": 0.08297117738148758, + "grad_norm": 0.25928306579589844, + "learning_rate": 0.0001, + "loss": 0.0105, + "step": 1661 + }, + { + "epoch": 0.08302112992656976, + "grad_norm": 0.3076728582382202, + "learning_rate": 0.0001, + "loss": 0.0092, + "step": 1662 + }, + { + "epoch": 0.08307108247165193, + "grad_norm": 0.23973584175109863, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 1663 + }, + { + "epoch": 0.0831210350167341, + "grad_norm": 0.28473353385925293, + "learning_rate": 0.0001, + "loss": 0.1517, + "step": 1664 + }, + { + "epoch": 0.08317098756181628, + "grad_norm": 0.27122607827186584, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 1665 + }, + { + "epoch": 0.08322094010689844, + "grad_norm": 0.27775511145591736, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1666 + }, + { + "epoch": 0.08327089265198062, + "grad_norm": 0.2588164806365967, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 1667 + }, + { + "epoch": 0.08332084519706279, + "grad_norm": 0.27267083525657654, + "learning_rate": 0.0001, + "loss": 0.2579, + "step": 1668 + }, + { + "epoch": 0.08337079774214497, + "grad_norm": 0.2237951010465622, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 1669 + }, + { + "epoch": 0.08342075028722713, + "grad_norm": 0.4044942259788513, + "learning_rate": 0.0001, + "loss": 0.0564, + "step": 1670 + }, + { + "epoch": 0.08347070283230931, + "grad_norm": 0.24046431481838226, + "learning_rate": 0.0001, + "loss": 0.0147, + "step": 1671 + }, + { + "epoch": 0.08352065537739148, + "grad_norm": 0.26687848567962646, + "learning_rate": 0.0001, + "loss": 0.0169, + "step": 1672 + }, + { + "epoch": 0.08357060792247364, + "grad_norm": 0.29410600662231445, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1673 + }, + { + "epoch": 0.08362056046755582, + "grad_norm": 0.29084959626197815, + "learning_rate": 0.0001, + "loss": 0.012, + "step": 1674 + }, + { + "epoch": 0.08367051301263799, + "grad_norm": 0.24766871333122253, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 1675 + }, + { + "epoch": 0.08372046555772017, + "grad_norm": 0.3117693364620209, + "learning_rate": 0.0001, + "loss": 0.0228, + "step": 1676 + }, + { + "epoch": 0.08377041810280234, + "grad_norm": 0.3263690769672394, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 1677 + }, + { + "epoch": 0.08382037064788452, + "grad_norm": 0.2639921009540558, + "learning_rate": 0.0001, + "loss": 0.0101, + "step": 1678 + }, + { + "epoch": 0.08387032319296668, + "grad_norm": 0.24179446697235107, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 1679 + }, + { + "epoch": 0.08392027573804885, + "grad_norm": 0.3255082964897156, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 1680 + }, + { + "epoch": 0.08397022828313103, + "grad_norm": 0.35971975326538086, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 1681 + }, + { + "epoch": 0.08402018082821319, + "grad_norm": 0.31078973412513733, + "learning_rate": 0.0001, + "loss": 0.0187, + "step": 1682 + }, + { + "epoch": 0.08407013337329537, + "grad_norm": 0.3046671450138092, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 1683 + }, + { + "epoch": 0.08412008591837754, + "grad_norm": 0.2504916489124298, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 1684 + }, + { + "epoch": 0.08417003846345972, + "grad_norm": 0.23347100615501404, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 1685 + }, + { + "epoch": 0.08421999100854188, + "grad_norm": 0.25089001655578613, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1686 + }, + { + "epoch": 0.08426994355362406, + "grad_norm": 0.32527631521224976, + "learning_rate": 0.0001, + "loss": 0.0138, + "step": 1687 + }, + { + "epoch": 0.08431989609870623, + "grad_norm": 0.26181942224502563, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 1688 + }, + { + "epoch": 0.0843698486437884, + "grad_norm": 0.23185230791568756, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 1689 + }, + { + "epoch": 0.08441980118887057, + "grad_norm": 0.24697139859199524, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 1690 + }, + { + "epoch": 0.08446975373395274, + "grad_norm": 0.27178239822387695, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 1691 + }, + { + "epoch": 0.08451970627903492, + "grad_norm": 0.2567026913166046, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 1692 + }, + { + "epoch": 0.08456965882411709, + "grad_norm": 0.2587982416152954, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 1693 + }, + { + "epoch": 0.08461961136919927, + "grad_norm": 0.20799022912979126, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 1694 + }, + { + "epoch": 0.08466956391428143, + "grad_norm": 0.3277876079082489, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 1695 + }, + { + "epoch": 0.08471951645936361, + "grad_norm": 0.30003854632377625, + "learning_rate": 0.0001, + "loss": 0.0241, + "step": 1696 + }, + { + "epoch": 0.08476946900444578, + "grad_norm": 0.23981094360351562, + "learning_rate": 0.0001, + "loss": 0.0186, + "step": 1697 + }, + { + "epoch": 0.08481942154952794, + "grad_norm": 0.3046433925628662, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 1698 + }, + { + "epoch": 0.08486937409461012, + "grad_norm": 0.29646286368370056, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 1699 + }, + { + "epoch": 0.08491932663969229, + "grad_norm": 0.3725103437900543, + "learning_rate": 0.0001, + "loss": 0.0129, + "step": 1700 + }, + { + "epoch": 0.08496927918477447, + "grad_norm": 0.2811043858528137, + "learning_rate": 0.0001, + "loss": 0.0122, + "step": 1701 + }, + { + "epoch": 0.08501923172985663, + "grad_norm": 0.24821923673152924, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 1702 + }, + { + "epoch": 0.08506918427493881, + "grad_norm": 0.26744285225868225, + "learning_rate": 0.0001, + "loss": 0.0254, + "step": 1703 + }, + { + "epoch": 0.08511913682002098, + "grad_norm": 0.3064006268978119, + "learning_rate": 0.0001, + "loss": 0.1299, + "step": 1704 + }, + { + "epoch": 0.08516908936510315, + "grad_norm": 0.2911625802516937, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 1705 + }, + { + "epoch": 0.08521904191018533, + "grad_norm": 0.34532293677330017, + "learning_rate": 0.0001, + "loss": 0.0139, + "step": 1706 + }, + { + "epoch": 0.08526899445526749, + "grad_norm": 0.3496280908584595, + "learning_rate": 0.0001, + "loss": 0.0191, + "step": 1707 + }, + { + "epoch": 0.08531894700034967, + "grad_norm": 0.29448115825653076, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 1708 + }, + { + "epoch": 0.08536889954543184, + "grad_norm": 0.22823254764080048, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 1709 + }, + { + "epoch": 0.08541885209051402, + "grad_norm": 0.31250593066215515, + "learning_rate": 0.0001, + "loss": 0.1429, + "step": 1710 + }, + { + "epoch": 0.08546880463559618, + "grad_norm": 0.32963061332702637, + "learning_rate": 0.0001, + "loss": 0.0583, + "step": 1711 + }, + { + "epoch": 0.08551875718067836, + "grad_norm": 0.28359827399253845, + "learning_rate": 0.0001, + "loss": 0.1343, + "step": 1712 + }, + { + "epoch": 0.08556870972576053, + "grad_norm": 0.32407230138778687, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 1713 + }, + { + "epoch": 0.0856186622708427, + "grad_norm": 0.3532545864582062, + "learning_rate": 0.0001, + "loss": 0.0119, + "step": 1714 + }, + { + "epoch": 0.08566861481592487, + "grad_norm": 0.2765447497367859, + "learning_rate": 0.0001, + "loss": 0.008, + "step": 1715 + }, + { + "epoch": 0.08571856736100704, + "grad_norm": 0.2469215840101242, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 1716 + }, + { + "epoch": 0.08576851990608922, + "grad_norm": 0.2695595622062683, + "learning_rate": 0.0001, + "loss": 0.0089, + "step": 1717 + }, + { + "epoch": 0.08581847245117138, + "grad_norm": 0.2505320906639099, + "learning_rate": 0.0001, + "loss": 0.0178, + "step": 1718 + }, + { + "epoch": 0.08586842499625356, + "grad_norm": 0.24892820417881012, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 1719 + }, + { + "epoch": 0.08591837754133573, + "grad_norm": 0.262691855430603, + "learning_rate": 0.0001, + "loss": 0.0144, + "step": 1720 + }, + { + "epoch": 0.0859683300864179, + "grad_norm": 0.2399580478668213, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 1721 + }, + { + "epoch": 0.08601828263150008, + "grad_norm": 0.3067623972892761, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 1722 + }, + { + "epoch": 0.08606823517658224, + "grad_norm": 0.3056345582008362, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 1723 + }, + { + "epoch": 0.08611818772166442, + "grad_norm": 0.2978316843509674, + "learning_rate": 0.0001, + "loss": 0.1342, + "step": 1724 + }, + { + "epoch": 0.08616814026674659, + "grad_norm": 0.28453755378723145, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 1725 + }, + { + "epoch": 0.08621809281182877, + "grad_norm": 0.26118171215057373, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 1726 + }, + { + "epoch": 0.08626804535691093, + "grad_norm": 0.22179876267910004, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 1727 + }, + { + "epoch": 0.08631799790199311, + "grad_norm": 0.3108757734298706, + "learning_rate": 0.0001, + "loss": 0.0275, + "step": 1728 + }, + { + "epoch": 0.08636795044707528, + "grad_norm": 0.21234388649463654, + "learning_rate": 0.0001, + "loss": 0.0086, + "step": 1729 + }, + { + "epoch": 0.08641790299215744, + "grad_norm": 0.25813958048820496, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1730 + }, + { + "epoch": 0.08646785553723962, + "grad_norm": 0.23491287231445312, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 1731 + }, + { + "epoch": 0.08651780808232179, + "grad_norm": 0.2736203968524933, + "learning_rate": 0.0001, + "loss": 0.0424, + "step": 1732 + }, + { + "epoch": 0.08656776062740397, + "grad_norm": 0.2626398503780365, + "learning_rate": 0.0001, + "loss": 0.0164, + "step": 1733 + }, + { + "epoch": 0.08661771317248614, + "grad_norm": 0.2620755732059479, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 1734 + }, + { + "epoch": 0.08666766571756832, + "grad_norm": 0.2011786848306656, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 1735 + }, + { + "epoch": 0.08671761826265048, + "grad_norm": 0.32807695865631104, + "learning_rate": 0.0001, + "loss": 0.0367, + "step": 1736 + }, + { + "epoch": 0.08676757080773266, + "grad_norm": 0.23475946485996246, + "learning_rate": 0.0001, + "loss": 0.1285, + "step": 1737 + }, + { + "epoch": 0.08681752335281483, + "grad_norm": 0.26993831992149353, + "learning_rate": 0.0001, + "loss": 0.0101, + "step": 1738 + }, + { + "epoch": 0.08686747589789699, + "grad_norm": 0.35605672001838684, + "learning_rate": 0.0001, + "loss": 0.0311, + "step": 1739 + }, + { + "epoch": 0.08691742844297917, + "grad_norm": 0.20793183147907257, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 1740 + }, + { + "epoch": 0.08696738098806134, + "grad_norm": 0.2776152789592743, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 1741 + }, + { + "epoch": 0.08701733353314352, + "grad_norm": 0.3238535523414612, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 1742 + }, + { + "epoch": 0.08706728607822568, + "grad_norm": 0.22044962644577026, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 1743 + }, + { + "epoch": 0.08711723862330786, + "grad_norm": 0.17116117477416992, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 1744 + }, + { + "epoch": 0.08716719116839003, + "grad_norm": 0.29331955313682556, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 1745 + }, + { + "epoch": 0.0872171437134722, + "grad_norm": 0.23734652996063232, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 1746 + }, + { + "epoch": 0.08726709625855437, + "grad_norm": 0.29867222905158997, + "learning_rate": 0.0001, + "loss": 0.0092, + "step": 1747 + }, + { + "epoch": 0.08731704880363654, + "grad_norm": 0.3685677647590637, + "learning_rate": 0.0001, + "loss": 0.067, + "step": 1748 + }, + { + "epoch": 0.08736700134871872, + "grad_norm": 0.2798598110675812, + "learning_rate": 0.0001, + "loss": 0.051, + "step": 1749 + }, + { + "epoch": 0.08741695389380089, + "grad_norm": 0.34543031454086304, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 1750 + }, + { + "epoch": 0.08746690643888307, + "grad_norm": 0.6142606735229492, + "learning_rate": 0.0001, + "loss": 0.0742, + "step": 1751 + }, + { + "epoch": 0.08751685898396523, + "grad_norm": 0.29574713110923767, + "learning_rate": 0.0001, + "loss": 0.0083, + "step": 1752 + }, + { + "epoch": 0.08756681152904741, + "grad_norm": 0.361080527305603, + "learning_rate": 0.0001, + "loss": 0.0136, + "step": 1753 + }, + { + "epoch": 0.08761676407412958, + "grad_norm": 0.342471182346344, + "learning_rate": 0.0001, + "loss": 0.0117, + "step": 1754 + }, + { + "epoch": 0.08766671661921174, + "grad_norm": 0.3150728642940521, + "learning_rate": 0.0001, + "loss": 0.0155, + "step": 1755 + }, + { + "epoch": 0.08771666916429392, + "grad_norm": 0.3102814555168152, + "learning_rate": 0.0001, + "loss": 0.0277, + "step": 1756 + }, + { + "epoch": 0.08776662170937609, + "grad_norm": 0.3190973699092865, + "learning_rate": 0.0001, + "loss": 0.0095, + "step": 1757 + }, + { + "epoch": 0.08781657425445827, + "grad_norm": 0.3201735019683838, + "learning_rate": 0.0001, + "loss": 0.012, + "step": 1758 + }, + { + "epoch": 0.08786652679954043, + "grad_norm": 0.3058953285217285, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 1759 + }, + { + "epoch": 0.08791647934462261, + "grad_norm": 0.25331032276153564, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 1760 + }, + { + "epoch": 0.08796643188970478, + "grad_norm": 0.3358631432056427, + "learning_rate": 0.0001, + "loss": 0.0259, + "step": 1761 + }, + { + "epoch": 0.08801638443478695, + "grad_norm": 0.2655375003814697, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 1762 + }, + { + "epoch": 0.08806633697986913, + "grad_norm": 0.26443296670913696, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1763 + }, + { + "epoch": 0.08811628952495129, + "grad_norm": 0.21756146848201752, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 1764 + }, + { + "epoch": 0.08816624207003347, + "grad_norm": 0.2665965259075165, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 1765 + }, + { + "epoch": 0.08821619461511564, + "grad_norm": 0.2759920358657837, + "learning_rate": 0.0001, + "loss": 0.1304, + "step": 1766 + }, + { + "epoch": 0.08826614716019782, + "grad_norm": 0.2507081627845764, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 1767 + }, + { + "epoch": 0.08831609970527998, + "grad_norm": 0.25561028718948364, + "learning_rate": 0.0001, + "loss": 0.0104, + "step": 1768 + }, + { + "epoch": 0.08836605225036216, + "grad_norm": 0.2179265171289444, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 1769 + }, + { + "epoch": 0.08841600479544433, + "grad_norm": 0.24876520037651062, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1770 + }, + { + "epoch": 0.0884659573405265, + "grad_norm": 0.21499063074588776, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 1771 + }, + { + "epoch": 0.08851590988560867, + "grad_norm": 0.26149648427963257, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 1772 + }, + { + "epoch": 0.08856586243069084, + "grad_norm": 0.20108677446842194, + "learning_rate": 0.0001, + "loss": 0.0099, + "step": 1773 + }, + { + "epoch": 0.08861581497577302, + "grad_norm": 0.1800982803106308, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1774 + }, + { + "epoch": 0.08866576752085518, + "grad_norm": 0.1937202662229538, + "learning_rate": 0.0001, + "loss": 0.1339, + "step": 1775 + }, + { + "epoch": 0.08871572006593736, + "grad_norm": 0.22672231495380402, + "learning_rate": 0.0001, + "loss": 0.0331, + "step": 1776 + }, + { + "epoch": 0.08876567261101953, + "grad_norm": 0.18361598253250122, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 1777 + }, + { + "epoch": 0.08881562515610171, + "grad_norm": 0.18931697309017181, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 1778 + }, + { + "epoch": 0.08886557770118388, + "grad_norm": 0.1376703530550003, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 1779 + }, + { + "epoch": 0.08891553024626604, + "grad_norm": 0.23717652261257172, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 1780 + }, + { + "epoch": 0.08896548279134822, + "grad_norm": 0.2606066167354584, + "learning_rate": 0.0001, + "loss": 0.0352, + "step": 1781 + }, + { + "epoch": 0.08901543533643039, + "grad_norm": 0.18590928614139557, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1782 + }, + { + "epoch": 0.08906538788151257, + "grad_norm": 0.24556121230125427, + "learning_rate": 0.0001, + "loss": 0.1307, + "step": 1783 + }, + { + "epoch": 0.08911534042659473, + "grad_norm": 0.1862546056509018, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 1784 + }, + { + "epoch": 0.08916529297167691, + "grad_norm": 0.2500929832458496, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 1785 + }, + { + "epoch": 0.08921524551675908, + "grad_norm": 0.2874443531036377, + "learning_rate": 0.0001, + "loss": 0.0354, + "step": 1786 + }, + { + "epoch": 0.08926519806184124, + "grad_norm": 0.26767462491989136, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 1787 + }, + { + "epoch": 0.08931515060692342, + "grad_norm": 0.22279052436351776, + "learning_rate": 0.0001, + "loss": 0.0097, + "step": 1788 + }, + { + "epoch": 0.08936510315200559, + "grad_norm": 0.29560405015945435, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 1789 + }, + { + "epoch": 0.08941505569708777, + "grad_norm": 0.2778550982475281, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 1790 + }, + { + "epoch": 0.08946500824216994, + "grad_norm": 0.2589988708496094, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 1791 + }, + { + "epoch": 0.08951496078725212, + "grad_norm": 0.2589195966720581, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 1792 + }, + { + "epoch": 0.08956491333233428, + "grad_norm": 0.2541419267654419, + "learning_rate": 0.0001, + "loss": 0.0107, + "step": 1793 + }, + { + "epoch": 0.08961486587741646, + "grad_norm": 0.27088916301727295, + "learning_rate": 0.0001, + "loss": 0.0332, + "step": 1794 + }, + { + "epoch": 0.08966481842249863, + "grad_norm": 0.2693803310394287, + "learning_rate": 0.0001, + "loss": 0.0209, + "step": 1795 + }, + { + "epoch": 0.08971477096758079, + "grad_norm": 0.2591119706630707, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 1796 + }, + { + "epoch": 0.08976472351266297, + "grad_norm": 0.2768175005912781, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 1797 + }, + { + "epoch": 0.08981467605774514, + "grad_norm": 0.23956309258937836, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 1798 + }, + { + "epoch": 0.08986462860282732, + "grad_norm": 0.2640906572341919, + "learning_rate": 0.0001, + "loss": 0.0102, + "step": 1799 + }, + { + "epoch": 0.08991458114790948, + "grad_norm": 0.22037386894226074, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 1800 + }, + { + "epoch": 0.08996453369299166, + "grad_norm": 0.2512912452220917, + "learning_rate": 0.0001, + "loss": 0.0263, + "step": 1801 + }, + { + "epoch": 0.09001448623807383, + "grad_norm": 0.3171727657318115, + "learning_rate": 0.0001, + "loss": 0.0429, + "step": 1802 + }, + { + "epoch": 0.090064438783156, + "grad_norm": 0.3290158212184906, + "learning_rate": 0.0001, + "loss": 0.0205, + "step": 1803 + }, + { + "epoch": 0.09011439132823817, + "grad_norm": 0.3548352122306824, + "learning_rate": 0.0001, + "loss": 0.0228, + "step": 1804 + }, + { + "epoch": 0.09016434387332034, + "grad_norm": 0.27413028478622437, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 1805 + }, + { + "epoch": 0.09021429641840252, + "grad_norm": 0.36783644556999207, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 1806 + }, + { + "epoch": 0.09026424896348469, + "grad_norm": 0.38203880190849304, + "learning_rate": 0.0001, + "loss": 0.0351, + "step": 1807 + }, + { + "epoch": 0.09031420150856687, + "grad_norm": 0.26133763790130615, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 1808 + }, + { + "epoch": 0.09036415405364903, + "grad_norm": 0.3232758939266205, + "learning_rate": 0.0001, + "loss": 0.0147, + "step": 1809 + }, + { + "epoch": 0.09041410659873121, + "grad_norm": 0.4145842492580414, + "learning_rate": 0.0001, + "loss": 0.0561, + "step": 1810 + }, + { + "epoch": 0.09046405914381338, + "grad_norm": 0.3390941023826599, + "learning_rate": 0.0001, + "loss": 0.0299, + "step": 1811 + }, + { + "epoch": 0.09051401168889554, + "grad_norm": 0.36105602979660034, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 1812 + }, + { + "epoch": 0.09056396423397772, + "grad_norm": 0.34907209873199463, + "learning_rate": 0.0001, + "loss": 0.0083, + "step": 1813 + }, + { + "epoch": 0.09061391677905989, + "grad_norm": 0.3409295976161957, + "learning_rate": 0.0001, + "loss": 0.0077, + "step": 1814 + }, + { + "epoch": 0.09066386932414207, + "grad_norm": 0.32765620946884155, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 1815 + }, + { + "epoch": 0.09071382186922423, + "grad_norm": 0.35001254081726074, + "learning_rate": 0.0001, + "loss": 0.0366, + "step": 1816 + }, + { + "epoch": 0.09076377441430641, + "grad_norm": 0.28778091073036194, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 1817 + }, + { + "epoch": 0.09081372695938858, + "grad_norm": 0.28130173683166504, + "learning_rate": 0.0001, + "loss": 0.0202, + "step": 1818 + }, + { + "epoch": 0.09086367950447076, + "grad_norm": 0.37358996272087097, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 1819 + }, + { + "epoch": 0.09091363204955293, + "grad_norm": 0.3650859296321869, + "learning_rate": 0.0001, + "loss": 0.0308, + "step": 1820 + }, + { + "epoch": 0.09096358459463509, + "grad_norm": 0.2514209449291229, + "learning_rate": 0.0001, + "loss": 0.0119, + "step": 1821 + }, + { + "epoch": 0.09101353713971727, + "grad_norm": 0.27899670600891113, + "learning_rate": 0.0001, + "loss": 0.0655, + "step": 1822 + }, + { + "epoch": 0.09106348968479944, + "grad_norm": 0.302004337310791, + "learning_rate": 0.0001, + "loss": 0.0141, + "step": 1823 + }, + { + "epoch": 0.09111344222988162, + "grad_norm": 0.2877510190010071, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1824 + }, + { + "epoch": 0.09116339477496378, + "grad_norm": 0.21438336372375488, + "learning_rate": 0.0001, + "loss": 0.0169, + "step": 1825 + }, + { + "epoch": 0.09121334732004596, + "grad_norm": 0.18982946872711182, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 1826 + }, + { + "epoch": 0.09126329986512813, + "grad_norm": 0.244679257273674, + "learning_rate": 0.0001, + "loss": 0.0373, + "step": 1827 + }, + { + "epoch": 0.0913132524102103, + "grad_norm": 0.24670560657978058, + "learning_rate": 0.0001, + "loss": 0.0103, + "step": 1828 + }, + { + "epoch": 0.09136320495529247, + "grad_norm": 0.2212386578321457, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 1829 + }, + { + "epoch": 0.09141315750037464, + "grad_norm": 0.24271567165851593, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 1830 + }, + { + "epoch": 0.09146311004545682, + "grad_norm": 0.26327019929885864, + "learning_rate": 0.0001, + "loss": 0.0177, + "step": 1831 + }, + { + "epoch": 0.09151306259053898, + "grad_norm": 0.21437044441699982, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1832 + }, + { + "epoch": 0.09156301513562116, + "grad_norm": 0.2061043232679367, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 1833 + }, + { + "epoch": 0.09161296768070333, + "grad_norm": 0.17838571965694427, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1834 + }, + { + "epoch": 0.09166292022578551, + "grad_norm": 0.31202706694602966, + "learning_rate": 0.0001, + "loss": 0.041, + "step": 1835 + }, + { + "epoch": 0.09171287277086768, + "grad_norm": 0.1951836198568344, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1836 + }, + { + "epoch": 0.09176282531594984, + "grad_norm": 0.20900475978851318, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1837 + }, + { + "epoch": 0.09181277786103202, + "grad_norm": 0.2027607262134552, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 1838 + }, + { + "epoch": 0.09186273040611419, + "grad_norm": 0.18921923637390137, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 1839 + }, + { + "epoch": 0.09191268295119637, + "grad_norm": 0.2133381962776184, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 1840 + }, + { + "epoch": 0.09196263549627853, + "grad_norm": 0.19951988756656647, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1841 + }, + { + "epoch": 0.09201258804136071, + "grad_norm": 0.19957824051380157, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 1842 + }, + { + "epoch": 0.09206254058644288, + "grad_norm": 0.19715861976146698, + "learning_rate": 0.0001, + "loss": 0.0079, + "step": 1843 + }, + { + "epoch": 0.09211249313152504, + "grad_norm": 0.2878347635269165, + "learning_rate": 0.0001, + "loss": 0.036, + "step": 1844 + }, + { + "epoch": 0.09216244567660722, + "grad_norm": 0.21960988640785217, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1845 + }, + { + "epoch": 0.09221239822168939, + "grad_norm": 0.21715283393859863, + "learning_rate": 0.0001, + "loss": 0.0121, + "step": 1846 + }, + { + "epoch": 0.09226235076677157, + "grad_norm": 0.2780461311340332, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 1847 + }, + { + "epoch": 0.09231230331185374, + "grad_norm": 0.2487633377313614, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 1848 + }, + { + "epoch": 0.09236225585693592, + "grad_norm": 0.25785312056541443, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 1849 + }, + { + "epoch": 0.09241220840201808, + "grad_norm": 0.21202358603477478, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 1850 + }, + { + "epoch": 0.09246216094710026, + "grad_norm": 0.30286717414855957, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 1851 + }, + { + "epoch": 0.09251211349218243, + "grad_norm": 0.25306177139282227, + "learning_rate": 0.0001, + "loss": 0.0083, + "step": 1852 + }, + { + "epoch": 0.09256206603726459, + "grad_norm": 0.20879915356636047, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 1853 + }, + { + "epoch": 0.09261201858234677, + "grad_norm": 0.1919087916612625, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 1854 + }, + { + "epoch": 0.09266197112742894, + "grad_norm": 0.2773818075656891, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 1855 + }, + { + "epoch": 0.09271192367251112, + "grad_norm": 0.2310531884431839, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 1856 + }, + { + "epoch": 0.09276187621759328, + "grad_norm": 0.20729263126850128, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 1857 + }, + { + "epoch": 0.09281182876267546, + "grad_norm": 0.18981070816516876, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 1858 + }, + { + "epoch": 0.09286178130775763, + "grad_norm": 0.22576245665550232, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 1859 + }, + { + "epoch": 0.09291173385283981, + "grad_norm": 0.2149481624364853, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 1860 + }, + { + "epoch": 0.09296168639792197, + "grad_norm": 0.21059295535087585, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 1861 + }, + { + "epoch": 0.09301163894300414, + "grad_norm": 0.15621455013751984, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 1862 + }, + { + "epoch": 0.09306159148808632, + "grad_norm": 0.1817348599433899, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 1863 + }, + { + "epoch": 0.09311154403316849, + "grad_norm": 0.1829511523246765, + "learning_rate": 0.0001, + "loss": 0.013, + "step": 1864 + }, + { + "epoch": 0.09316149657825067, + "grad_norm": 0.17025119066238403, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 1865 + }, + { + "epoch": 0.09321144912333283, + "grad_norm": 0.17771852016448975, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 1866 + }, + { + "epoch": 0.09326140166841501, + "grad_norm": 0.15750370919704437, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 1867 + }, + { + "epoch": 0.09331135421349718, + "grad_norm": 0.3115135729312897, + "learning_rate": 0.0001, + "loss": 0.0198, + "step": 1868 + }, + { + "epoch": 0.09336130675857934, + "grad_norm": 0.21457228064537048, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 1869 + }, + { + "epoch": 0.09341125930366152, + "grad_norm": 0.2442871630191803, + "learning_rate": 0.0001, + "loss": 0.0123, + "step": 1870 + }, + { + "epoch": 0.09346121184874369, + "grad_norm": 0.24172110855579376, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 1871 + }, + { + "epoch": 0.09351116439382587, + "grad_norm": 0.21375998854637146, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 1872 + }, + { + "epoch": 0.09356111693890803, + "grad_norm": 0.298247367143631, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 1873 + }, + { + "epoch": 0.09361106948399021, + "grad_norm": 0.2328507900238037, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 1874 + }, + { + "epoch": 0.09366102202907238, + "grad_norm": 0.2068490982055664, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1875 + }, + { + "epoch": 0.09371097457415456, + "grad_norm": 0.17441634833812714, + "learning_rate": 0.0001, + "loss": 0.1288, + "step": 1876 + }, + { + "epoch": 0.09376092711923673, + "grad_norm": 0.18199387192726135, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 1877 + }, + { + "epoch": 0.09381087966431889, + "grad_norm": 0.21443085372447968, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 1878 + }, + { + "epoch": 0.09386083220940107, + "grad_norm": 0.2389848232269287, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1879 + }, + { + "epoch": 0.09391078475448324, + "grad_norm": 0.15702757239341736, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 1880 + }, + { + "epoch": 0.09396073729956542, + "grad_norm": 0.28365039825439453, + "learning_rate": 0.0001, + "loss": 0.0195, + "step": 1881 + }, + { + "epoch": 0.09401068984464758, + "grad_norm": 0.22387933731079102, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 1882 + }, + { + "epoch": 0.09406064238972976, + "grad_norm": 0.26131153106689453, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1883 + }, + { + "epoch": 0.09411059493481193, + "grad_norm": 0.23959384858608246, + "learning_rate": 0.0001, + "loss": 0.0138, + "step": 1884 + }, + { + "epoch": 0.0941605474798941, + "grad_norm": 0.2729566991329193, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 1885 + }, + { + "epoch": 0.09421050002497627, + "grad_norm": 0.2456749975681305, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 1886 + }, + { + "epoch": 0.09426045257005844, + "grad_norm": 0.21685467660427094, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 1887 + }, + { + "epoch": 0.09431040511514062, + "grad_norm": 0.24356335401535034, + "learning_rate": 0.0001, + "loss": 0.1304, + "step": 1888 + }, + { + "epoch": 0.09436035766022279, + "grad_norm": 0.23982985317707062, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 1889 + }, + { + "epoch": 0.09441031020530496, + "grad_norm": 0.40539228916168213, + "learning_rate": 0.0001, + "loss": 0.3109, + "step": 1890 + }, + { + "epoch": 0.09446026275038713, + "grad_norm": 0.30621474981307983, + "learning_rate": 0.0001, + "loss": 0.0095, + "step": 1891 + }, + { + "epoch": 0.09451021529546931, + "grad_norm": 0.28134942054748535, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 1892 + }, + { + "epoch": 0.09456016784055148, + "grad_norm": 0.25676411390304565, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 1893 + }, + { + "epoch": 0.09461012038563364, + "grad_norm": 0.2408944070339203, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 1894 + }, + { + "epoch": 0.09466007293071582, + "grad_norm": 0.2887301743030548, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 1895 + }, + { + "epoch": 0.09471002547579799, + "grad_norm": 0.27440929412841797, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 1896 + }, + { + "epoch": 0.09475997802088017, + "grad_norm": 0.25459590554237366, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 1897 + }, + { + "epoch": 0.09480993056596233, + "grad_norm": 0.27917876839637756, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 1898 + }, + { + "epoch": 0.09485988311104451, + "grad_norm": 0.41501426696777344, + "learning_rate": 0.0001, + "loss": 0.0578, + "step": 1899 + }, + { + "epoch": 0.09490983565612668, + "grad_norm": 0.32395699620246887, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 1900 + }, + { + "epoch": 0.09495978820120886, + "grad_norm": 0.30922749638557434, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 1901 + }, + { + "epoch": 0.09500974074629102, + "grad_norm": 0.3760256767272949, + "learning_rate": 0.0001, + "loss": 0.0179, + "step": 1902 + }, + { + "epoch": 0.09505969329137319, + "grad_norm": 0.3599306344985962, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 1903 + }, + { + "epoch": 0.09510964583645537, + "grad_norm": 0.3629860579967499, + "learning_rate": 0.0001, + "loss": 0.0124, + "step": 1904 + }, + { + "epoch": 0.09515959838153754, + "grad_norm": 0.32596221566200256, + "learning_rate": 0.0001, + "loss": 0.0077, + "step": 1905 + }, + { + "epoch": 0.09520955092661972, + "grad_norm": 0.2584386169910431, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 1906 + }, + { + "epoch": 0.09525950347170188, + "grad_norm": 0.33561527729034424, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 1907 + }, + { + "epoch": 0.09530945601678406, + "grad_norm": 0.2644267678260803, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 1908 + }, + { + "epoch": 0.09535940856186623, + "grad_norm": 0.3110141456127167, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 1909 + }, + { + "epoch": 0.09540936110694839, + "grad_norm": 0.31636372208595276, + "learning_rate": 0.0001, + "loss": 0.0135, + "step": 1910 + }, + { + "epoch": 0.09545931365203057, + "grad_norm": 0.29046326875686646, + "learning_rate": 0.0001, + "loss": 0.0077, + "step": 1911 + }, + { + "epoch": 0.09550926619711274, + "grad_norm": 0.24403217434883118, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 1912 + }, + { + "epoch": 0.09555921874219492, + "grad_norm": 0.2646379768848419, + "learning_rate": 0.0001, + "loss": 0.0136, + "step": 1913 + }, + { + "epoch": 0.09560917128727708, + "grad_norm": 0.2927835285663605, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1914 + }, + { + "epoch": 0.09565912383235926, + "grad_norm": 0.2924993336200714, + "learning_rate": 0.0001, + "loss": 0.0096, + "step": 1915 + }, + { + "epoch": 0.09570907637744143, + "grad_norm": 0.3493165671825409, + "learning_rate": 0.0001, + "loss": 0.0256, + "step": 1916 + }, + { + "epoch": 0.09575902892252361, + "grad_norm": 0.3541044592857361, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 1917 + }, + { + "epoch": 0.09580898146760577, + "grad_norm": 0.3979651927947998, + "learning_rate": 0.0001, + "loss": 0.2828, + "step": 1918 + }, + { + "epoch": 0.09585893401268794, + "grad_norm": 0.25322243571281433, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 1919 + }, + { + "epoch": 0.09590888655777012, + "grad_norm": 0.252604603767395, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 1920 + }, + { + "epoch": 0.09595883910285229, + "grad_norm": 0.33269184827804565, + "learning_rate": 0.0001, + "loss": 0.1328, + "step": 1921 + }, + { + "epoch": 0.09600879164793447, + "grad_norm": 0.2815364897251129, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1922 + }, + { + "epoch": 0.09605874419301663, + "grad_norm": 0.2558574676513672, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 1923 + }, + { + "epoch": 0.09610869673809881, + "grad_norm": 0.32550159096717834, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 1924 + }, + { + "epoch": 0.09615864928318098, + "grad_norm": 0.2625240683555603, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 1925 + }, + { + "epoch": 0.09620860182826314, + "grad_norm": 0.23713257908821106, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 1926 + }, + { + "epoch": 0.09625855437334532, + "grad_norm": 0.24878673255443573, + "learning_rate": 0.0001, + "loss": 0.0083, + "step": 1927 + }, + { + "epoch": 0.09630850691842749, + "grad_norm": 0.25640052556991577, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 1928 + }, + { + "epoch": 0.09635845946350967, + "grad_norm": 0.21899272501468658, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 1929 + }, + { + "epoch": 0.09640841200859183, + "grad_norm": 0.22805753350257874, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 1930 + }, + { + "epoch": 0.09645836455367401, + "grad_norm": 0.28512439131736755, + "learning_rate": 0.0001, + "loss": 0.0264, + "step": 1931 + }, + { + "epoch": 0.09650831709875618, + "grad_norm": 0.24070577323436737, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 1932 + }, + { + "epoch": 0.09655826964383836, + "grad_norm": 0.26334148645401, + "learning_rate": 0.0001, + "loss": 0.1369, + "step": 1933 + }, + { + "epoch": 0.09660822218892053, + "grad_norm": 0.2790324091911316, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 1934 + }, + { + "epoch": 0.09665817473400269, + "grad_norm": 0.29194146394729614, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 1935 + }, + { + "epoch": 0.09670812727908487, + "grad_norm": 0.28189578652381897, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 1936 + }, + { + "epoch": 0.09675807982416704, + "grad_norm": 0.22217059135437012, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 1937 + }, + { + "epoch": 0.09680803236924922, + "grad_norm": 0.25927969813346863, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1938 + }, + { + "epoch": 0.09685798491433138, + "grad_norm": 0.27148333191871643, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 1939 + }, + { + "epoch": 0.09690793745941356, + "grad_norm": 0.2890212833881378, + "learning_rate": 0.0001, + "loss": 0.0101, + "step": 1940 + }, + { + "epoch": 0.09695789000449573, + "grad_norm": 0.2313322126865387, + "learning_rate": 0.0001, + "loss": 0.0097, + "step": 1941 + }, + { + "epoch": 0.09700784254957791, + "grad_norm": 0.27009984850883484, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 1942 + }, + { + "epoch": 0.09705779509466007, + "grad_norm": 0.20207151770591736, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 1943 + }, + { + "epoch": 0.09710774763974224, + "grad_norm": 0.25202682614326477, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 1944 + }, + { + "epoch": 0.09715770018482442, + "grad_norm": 0.19184675812721252, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 1945 + }, + { + "epoch": 0.09720765272990659, + "grad_norm": 0.33376437425613403, + "learning_rate": 0.0001, + "loss": 0.0128, + "step": 1946 + }, + { + "epoch": 0.09725760527498876, + "grad_norm": 0.25942111015319824, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1947 + }, + { + "epoch": 0.09730755782007093, + "grad_norm": 0.33536550402641296, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 1948 + }, + { + "epoch": 0.09735751036515311, + "grad_norm": 0.27511876821517944, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 1949 + }, + { + "epoch": 0.09740746291023528, + "grad_norm": 0.21032533049583435, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 1950 + }, + { + "epoch": 0.09745741545531744, + "grad_norm": 0.2803173363208771, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 1951 + }, + { + "epoch": 0.09750736800039962, + "grad_norm": 0.26221463084220886, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 1952 + }, + { + "epoch": 0.09755732054548179, + "grad_norm": 0.2287110537290573, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1953 + }, + { + "epoch": 0.09760727309056397, + "grad_norm": 0.20695410668849945, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 1954 + }, + { + "epoch": 0.09765722563564613, + "grad_norm": 0.20510050654411316, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 1955 + }, + { + "epoch": 0.09770717818072831, + "grad_norm": 0.2702549397945404, + "learning_rate": 0.0001, + "loss": 0.0156, + "step": 1956 + }, + { + "epoch": 0.09775713072581048, + "grad_norm": 0.25894203782081604, + "learning_rate": 0.0001, + "loss": 0.0197, + "step": 1957 + }, + { + "epoch": 0.09780708327089266, + "grad_norm": 0.2520279288291931, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 1958 + }, + { + "epoch": 0.09785703581597482, + "grad_norm": 0.2414485514163971, + "learning_rate": 0.0001, + "loss": 0.1343, + "step": 1959 + }, + { + "epoch": 0.09790698836105699, + "grad_norm": 0.2315210998058319, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 1960 + }, + { + "epoch": 0.09795694090613917, + "grad_norm": 0.2815287709236145, + "learning_rate": 0.0001, + "loss": 0.015, + "step": 1961 + }, + { + "epoch": 0.09800689345122134, + "grad_norm": 0.2098199874162674, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 1962 + }, + { + "epoch": 0.09805684599630352, + "grad_norm": 0.4844299554824829, + "learning_rate": 0.0001, + "loss": 0.0616, + "step": 1963 + }, + { + "epoch": 0.09810679854138568, + "grad_norm": 0.251445472240448, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 1964 + }, + { + "epoch": 0.09815675108646786, + "grad_norm": 0.3004269301891327, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 1965 + }, + { + "epoch": 0.09820670363155003, + "grad_norm": 0.26385605335235596, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 1966 + }, + { + "epoch": 0.09825665617663219, + "grad_norm": 0.5211923122406006, + "learning_rate": 0.0001, + "loss": 0.0483, + "step": 1967 + }, + { + "epoch": 0.09830660872171437, + "grad_norm": 0.325265109539032, + "learning_rate": 0.0001, + "loss": 0.0227, + "step": 1968 + }, + { + "epoch": 0.09835656126679654, + "grad_norm": 0.3397527039051056, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 1969 + }, + { + "epoch": 0.09840651381187872, + "grad_norm": 0.3754309415817261, + "learning_rate": 0.0001, + "loss": 0.135, + "step": 1970 + }, + { + "epoch": 0.09845646635696088, + "grad_norm": 0.30398029088974, + "learning_rate": 0.0001, + "loss": 0.0096, + "step": 1971 + }, + { + "epoch": 0.09850641890204306, + "grad_norm": 0.31477516889572144, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 1972 + }, + { + "epoch": 0.09855637144712523, + "grad_norm": 0.4071651101112366, + "learning_rate": 0.0001, + "loss": 0.1328, + "step": 1973 + }, + { + "epoch": 0.09860632399220741, + "grad_norm": 0.3340730667114258, + "learning_rate": 0.0001, + "loss": 0.1484, + "step": 1974 + }, + { + "epoch": 0.09865627653728958, + "grad_norm": 0.3065408170223236, + "learning_rate": 0.0001, + "loss": 0.0093, + "step": 1975 + }, + { + "epoch": 0.09870622908237174, + "grad_norm": 0.2571176290512085, + "learning_rate": 0.0001, + "loss": 0.0107, + "step": 1976 + }, + { + "epoch": 0.09875618162745392, + "grad_norm": 0.2564823031425476, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 1977 + }, + { + "epoch": 0.09880613417253609, + "grad_norm": 0.32375800609588623, + "learning_rate": 0.0001, + "loss": 0.0204, + "step": 1978 + }, + { + "epoch": 0.09885608671761827, + "grad_norm": 0.26253074407577515, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 1979 + }, + { + "epoch": 0.09890603926270043, + "grad_norm": 0.28321701288223267, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 1980 + }, + { + "epoch": 0.09895599180778261, + "grad_norm": 0.2810899615287781, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 1981 + }, + { + "epoch": 0.09900594435286478, + "grad_norm": 0.24571900069713593, + "learning_rate": 0.0001, + "loss": 0.0136, + "step": 1982 + }, + { + "epoch": 0.09905589689794696, + "grad_norm": 0.22215433418750763, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 1983 + }, + { + "epoch": 0.09910584944302912, + "grad_norm": 0.34236299991607666, + "learning_rate": 0.0001, + "loss": 0.1501, + "step": 1984 + }, + { + "epoch": 0.09915580198811129, + "grad_norm": 0.23163791000843048, + "learning_rate": 0.0001, + "loss": 0.0079, + "step": 1985 + }, + { + "epoch": 0.09920575453319347, + "grad_norm": 0.227296844124794, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 1986 + }, + { + "epoch": 0.09925570707827563, + "grad_norm": 0.20877499878406525, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 1987 + }, + { + "epoch": 0.09930565962335781, + "grad_norm": 0.21166831254959106, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 1988 + }, + { + "epoch": 0.09935561216843998, + "grad_norm": 0.19640587270259857, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 1989 + }, + { + "epoch": 0.09940556471352216, + "grad_norm": 0.20353911817073822, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 1990 + }, + { + "epoch": 0.09945551725860433, + "grad_norm": 0.23778453469276428, + "learning_rate": 0.0001, + "loss": 0.0117, + "step": 1991 + }, + { + "epoch": 0.09950546980368649, + "grad_norm": 0.23972150683403015, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 1992 + }, + { + "epoch": 0.09955542234876867, + "grad_norm": 0.24521875381469727, + "learning_rate": 0.0001, + "loss": 0.0396, + "step": 1993 + }, + { + "epoch": 0.09960537489385084, + "grad_norm": 0.18336789309978485, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 1994 + }, + { + "epoch": 0.09965532743893302, + "grad_norm": 0.2607848346233368, + "learning_rate": 0.0001, + "loss": 0.1313, + "step": 1995 + }, + { + "epoch": 0.09970527998401518, + "grad_norm": 0.21933268010616302, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 1996 + }, + { + "epoch": 0.09975523252909736, + "grad_norm": 0.2849874794483185, + "learning_rate": 0.0001, + "loss": 0.1415, + "step": 1997 + }, + { + "epoch": 0.09980518507417953, + "grad_norm": 0.24089758098125458, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 1998 + }, + { + "epoch": 0.09985513761926171, + "grad_norm": 0.30397918820381165, + "learning_rate": 0.0001, + "loss": 0.1517, + "step": 1999 + }, + { + "epoch": 0.09990509016434387, + "grad_norm": 0.3341406285762787, + "learning_rate": 0.0001, + "loss": 0.2766, + "step": 2000 + }, + { + "epoch": 0.09995504270942604, + "grad_norm": 0.18409790098667145, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 2001 + }, + { + "epoch": 0.10000499525450822, + "grad_norm": 0.2385450303554535, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2002 + }, + { + "epoch": 0.10005494779959039, + "grad_norm": 0.23112653195858002, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 2003 + }, + { + "epoch": 0.10010490034467256, + "grad_norm": 0.3039865493774414, + "learning_rate": 0.0001, + "loss": 0.0165, + "step": 2004 + }, + { + "epoch": 0.10015485288975473, + "grad_norm": 0.2345561534166336, + "learning_rate": 0.0001, + "loss": 0.024, + "step": 2005 + }, + { + "epoch": 0.10020480543483691, + "grad_norm": 0.2337828129529953, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2006 + }, + { + "epoch": 0.10025475797991908, + "grad_norm": 0.23875154554843903, + "learning_rate": 0.0001, + "loss": 0.129, + "step": 2007 + }, + { + "epoch": 0.10030471052500124, + "grad_norm": 0.24794462323188782, + "learning_rate": 0.0001, + "loss": 0.0185, + "step": 2008 + }, + { + "epoch": 0.10035466307008342, + "grad_norm": 0.2457248717546463, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 2009 + }, + { + "epoch": 0.10040461561516559, + "grad_norm": 0.27130013704299927, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2010 + }, + { + "epoch": 0.10045456816024777, + "grad_norm": 0.2645663022994995, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2011 + }, + { + "epoch": 0.10050452070532993, + "grad_norm": 0.21502409875392914, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 2012 + }, + { + "epoch": 0.10055447325041211, + "grad_norm": 0.21516689658164978, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2013 + }, + { + "epoch": 0.10060442579549428, + "grad_norm": 0.3221023678779602, + "learning_rate": 0.0001, + "loss": 0.0067, + "step": 2014 + }, + { + "epoch": 0.10065437834057646, + "grad_norm": 0.22555257380008698, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2015 + }, + { + "epoch": 0.10070433088565862, + "grad_norm": 0.18941229581832886, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2016 + }, + { + "epoch": 0.10075428343074079, + "grad_norm": 0.1894795000553131, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2017 + }, + { + "epoch": 0.10080423597582297, + "grad_norm": 0.24475698173046112, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 2018 + }, + { + "epoch": 0.10085418852090514, + "grad_norm": 0.3015715777873993, + "learning_rate": 0.0001, + "loss": 0.1412, + "step": 2019 + }, + { + "epoch": 0.10090414106598732, + "grad_norm": 0.2149016410112381, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 2020 + }, + { + "epoch": 0.10095409361106948, + "grad_norm": 0.2940464913845062, + "learning_rate": 0.0001, + "loss": 0.1384, + "step": 2021 + }, + { + "epoch": 0.10100404615615166, + "grad_norm": 0.29556411504745483, + "learning_rate": 0.0001, + "loss": 0.0118, + "step": 2022 + }, + { + "epoch": 0.10105399870123383, + "grad_norm": 0.24434804916381836, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2023 + }, + { + "epoch": 0.101103951246316, + "grad_norm": 0.22446304559707642, + "learning_rate": 0.0001, + "loss": 0.0089, + "step": 2024 + }, + { + "epoch": 0.10115390379139817, + "grad_norm": 0.22421447932720184, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2025 + }, + { + "epoch": 0.10120385633648034, + "grad_norm": 0.2533787786960602, + "learning_rate": 0.0001, + "loss": 0.0426, + "step": 2026 + }, + { + "epoch": 0.10125380888156252, + "grad_norm": 0.23472051322460175, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 2027 + }, + { + "epoch": 0.10130376142664468, + "grad_norm": 0.22423769533634186, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2028 + }, + { + "epoch": 0.10135371397172686, + "grad_norm": 0.3779785931110382, + "learning_rate": 0.0001, + "loss": 0.1594, + "step": 2029 + }, + { + "epoch": 0.10140366651680903, + "grad_norm": 0.21926216781139374, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2030 + }, + { + "epoch": 0.10145361906189121, + "grad_norm": 0.17416523396968842, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2031 + }, + { + "epoch": 0.10150357160697338, + "grad_norm": 0.31492090225219727, + "learning_rate": 0.0001, + "loss": 0.1381, + "step": 2032 + }, + { + "epoch": 0.10155352415205554, + "grad_norm": 0.27317342162132263, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 2033 + }, + { + "epoch": 0.10160347669713772, + "grad_norm": 0.21215447783470154, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2034 + }, + { + "epoch": 0.10165342924221989, + "grad_norm": 0.223892942070961, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 2035 + }, + { + "epoch": 0.10170338178730207, + "grad_norm": 0.1989050656557083, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2036 + }, + { + "epoch": 0.10175333433238423, + "grad_norm": 0.2892208993434906, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 2037 + }, + { + "epoch": 0.10180328687746641, + "grad_norm": 0.2148531675338745, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 2038 + }, + { + "epoch": 0.10185323942254858, + "grad_norm": 0.27908676862716675, + "learning_rate": 0.0001, + "loss": 0.0308, + "step": 2039 + }, + { + "epoch": 0.10190319196763076, + "grad_norm": 0.21705889701843262, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 2040 + }, + { + "epoch": 0.10195314451271292, + "grad_norm": 0.2209034264087677, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 2041 + }, + { + "epoch": 0.10200309705779509, + "grad_norm": 0.20556102693080902, + "learning_rate": 0.0001, + "loss": 0.0136, + "step": 2042 + }, + { + "epoch": 0.10205304960287727, + "grad_norm": 0.1919875591993332, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 2043 + }, + { + "epoch": 0.10210300214795943, + "grad_norm": 0.20895443856716156, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2044 + }, + { + "epoch": 0.10215295469304161, + "grad_norm": 0.21372191607952118, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 2045 + }, + { + "epoch": 0.10220290723812378, + "grad_norm": 0.17878863215446472, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 2046 + }, + { + "epoch": 0.10225285978320596, + "grad_norm": 0.20958876609802246, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2047 + }, + { + "epoch": 0.10230281232828813, + "grad_norm": 0.18787242472171783, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 2048 + }, + { + "epoch": 0.10235276487337029, + "grad_norm": 0.3316304385662079, + "learning_rate": 0.0001, + "loss": 0.1338, + "step": 2049 + }, + { + "epoch": 0.10240271741845247, + "grad_norm": 0.21771804988384247, + "learning_rate": 0.0001, + "loss": 0.1334, + "step": 2050 + }, + { + "epoch": 0.10245266996353464, + "grad_norm": 0.3974664509296417, + "learning_rate": 0.0001, + "loss": 0.0388, + "step": 2051 + }, + { + "epoch": 0.10250262250861682, + "grad_norm": 0.26742973923683167, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 2052 + }, + { + "epoch": 0.10255257505369898, + "grad_norm": 0.2082136571407318, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 2053 + }, + { + "epoch": 0.10260252759878116, + "grad_norm": 0.20226866006851196, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 2054 + }, + { + "epoch": 0.10265248014386333, + "grad_norm": 0.2591339945793152, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2055 + }, + { + "epoch": 0.10270243268894551, + "grad_norm": 0.23722143471240997, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 2056 + }, + { + "epoch": 0.10275238523402767, + "grad_norm": 0.263594388961792, + "learning_rate": 0.0001, + "loss": 0.0161, + "step": 2057 + }, + { + "epoch": 0.10280233777910984, + "grad_norm": 0.19590389728546143, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 2058 + }, + { + "epoch": 0.10285229032419202, + "grad_norm": 0.2847522497177124, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 2059 + }, + { + "epoch": 0.10290224286927419, + "grad_norm": 0.27224746346473694, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 2060 + }, + { + "epoch": 0.10295219541435637, + "grad_norm": 0.22569626569747925, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2061 + }, + { + "epoch": 0.10300214795943853, + "grad_norm": 0.32175514101982117, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2062 + }, + { + "epoch": 0.10305210050452071, + "grad_norm": 0.28382188081741333, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 2063 + }, + { + "epoch": 0.10310205304960288, + "grad_norm": 0.26934340596199036, + "learning_rate": 0.0001, + "loss": 0.0086, + "step": 2064 + }, + { + "epoch": 0.10315200559468504, + "grad_norm": 0.30512669682502747, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 2065 + }, + { + "epoch": 0.10320195813976722, + "grad_norm": 0.30272218585014343, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2066 + }, + { + "epoch": 0.10325191068484939, + "grad_norm": 0.29907792806625366, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 2067 + }, + { + "epoch": 0.10330186322993157, + "grad_norm": 0.2637840211391449, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 2068 + }, + { + "epoch": 0.10335181577501373, + "grad_norm": 0.3260914981365204, + "learning_rate": 0.0001, + "loss": 0.0089, + "step": 2069 + }, + { + "epoch": 0.10340176832009591, + "grad_norm": 0.23072031140327454, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2070 + }, + { + "epoch": 0.10345172086517808, + "grad_norm": 0.35831418633461, + "learning_rate": 0.0001, + "loss": 0.0191, + "step": 2071 + }, + { + "epoch": 0.10350167341026026, + "grad_norm": 0.23096977174282074, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2072 + }, + { + "epoch": 0.10355162595534242, + "grad_norm": 0.28077977895736694, + "learning_rate": 0.0001, + "loss": 0.0143, + "step": 2073 + }, + { + "epoch": 0.10360157850042459, + "grad_norm": 0.28001582622528076, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 2074 + }, + { + "epoch": 0.10365153104550677, + "grad_norm": 0.2509737014770508, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2075 + }, + { + "epoch": 0.10370148359058894, + "grad_norm": 0.19674645364284515, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 2076 + }, + { + "epoch": 0.10375143613567112, + "grad_norm": 0.20746563374996185, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 2077 + }, + { + "epoch": 0.10380138868075328, + "grad_norm": 0.24871309101581573, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 2078 + }, + { + "epoch": 0.10385134122583546, + "grad_norm": 0.28596457839012146, + "learning_rate": 0.0001, + "loss": 0.0249, + "step": 2079 + }, + { + "epoch": 0.10390129377091763, + "grad_norm": 0.17162007093429565, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2080 + }, + { + "epoch": 0.1039512463159998, + "grad_norm": 0.22193600237369537, + "learning_rate": 0.0001, + "loss": 0.0093, + "step": 2081 + }, + { + "epoch": 0.10400119886108197, + "grad_norm": 0.24528884887695312, + "learning_rate": 0.0001, + "loss": 0.0101, + "step": 2082 + }, + { + "epoch": 0.10405115140616414, + "grad_norm": 0.16820532083511353, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2083 + }, + { + "epoch": 0.10410110395124632, + "grad_norm": 0.1477731168270111, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 2084 + }, + { + "epoch": 0.10415105649632848, + "grad_norm": 0.26434457302093506, + "learning_rate": 0.0001, + "loss": 0.0154, + "step": 2085 + }, + { + "epoch": 0.10420100904141066, + "grad_norm": 0.21462100744247437, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 2086 + }, + { + "epoch": 0.10425096158649283, + "grad_norm": 0.22021786868572235, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 2087 + }, + { + "epoch": 0.10430091413157501, + "grad_norm": 0.22027264535427094, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 2088 + }, + { + "epoch": 0.10435086667665718, + "grad_norm": 0.21367768943309784, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2089 + }, + { + "epoch": 0.10440081922173934, + "grad_norm": 0.2383817732334137, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 2090 + }, + { + "epoch": 0.10445077176682152, + "grad_norm": 0.19569706916809082, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 2091 + }, + { + "epoch": 0.10450072431190369, + "grad_norm": 0.2182304859161377, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 2092 + }, + { + "epoch": 0.10455067685698587, + "grad_norm": 0.20080821216106415, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 2093 + }, + { + "epoch": 0.10460062940206803, + "grad_norm": 0.28222429752349854, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2094 + }, + { + "epoch": 0.10465058194715021, + "grad_norm": 0.21512216329574585, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2095 + }, + { + "epoch": 0.10470053449223238, + "grad_norm": 0.34900370240211487, + "learning_rate": 0.0001, + "loss": 0.0321, + "step": 2096 + }, + { + "epoch": 0.10475048703731456, + "grad_norm": 0.3186693787574768, + "learning_rate": 0.0001, + "loss": 0.0303, + "step": 2097 + }, + { + "epoch": 0.10480043958239672, + "grad_norm": 0.305541455745697, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2098 + }, + { + "epoch": 0.10485039212747889, + "grad_norm": 0.34874236583709717, + "learning_rate": 0.0001, + "loss": 0.0192, + "step": 2099 + }, + { + "epoch": 0.10490034467256107, + "grad_norm": 0.1988580822944641, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 2100 + }, + { + "epoch": 0.10495029721764323, + "grad_norm": 0.3637520670890808, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 2101 + }, + { + "epoch": 0.10500024976272541, + "grad_norm": 0.2665212154388428, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 2102 + }, + { + "epoch": 0.10505020230780758, + "grad_norm": 0.23124584555625916, + "learning_rate": 0.0001, + "loss": 0.0094, + "step": 2103 + }, + { + "epoch": 0.10510015485288976, + "grad_norm": 0.2857670187950134, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 2104 + }, + { + "epoch": 0.10515010739797193, + "grad_norm": 0.25949999690055847, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 2105 + }, + { + "epoch": 0.10520005994305409, + "grad_norm": 0.2364080846309662, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 2106 + }, + { + "epoch": 0.10525001248813627, + "grad_norm": 0.28932756185531616, + "learning_rate": 0.0001, + "loss": 0.1523, + "step": 2107 + }, + { + "epoch": 0.10529996503321844, + "grad_norm": 0.2473563551902771, + "learning_rate": 0.0001, + "loss": 0.015, + "step": 2108 + }, + { + "epoch": 0.10534991757830062, + "grad_norm": 0.2392590492963791, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2109 + }, + { + "epoch": 0.10539987012338278, + "grad_norm": 0.21343746781349182, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 2110 + }, + { + "epoch": 0.10544982266846496, + "grad_norm": 0.21515460312366486, + "learning_rate": 0.0001, + "loss": 0.0172, + "step": 2111 + }, + { + "epoch": 0.10549977521354713, + "grad_norm": 0.24257878959178925, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2112 + }, + { + "epoch": 0.10554972775862931, + "grad_norm": 0.28070011734962463, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2113 + }, + { + "epoch": 0.10559968030371147, + "grad_norm": 0.21566183865070343, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2114 + }, + { + "epoch": 0.10564963284879364, + "grad_norm": 0.2064676433801651, + "learning_rate": 0.0001, + "loss": 0.014, + "step": 2115 + }, + { + "epoch": 0.10569958539387582, + "grad_norm": 0.22648639976978302, + "learning_rate": 0.0001, + "loss": 0.0144, + "step": 2116 + }, + { + "epoch": 0.10574953793895799, + "grad_norm": 0.3192604184150696, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 2117 + }, + { + "epoch": 0.10579949048404017, + "grad_norm": 0.1914358288049698, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2118 + }, + { + "epoch": 0.10584944302912233, + "grad_norm": 0.17240358889102936, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2119 + }, + { + "epoch": 0.10589939557420451, + "grad_norm": 0.22068972885608673, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 2120 + }, + { + "epoch": 0.10594934811928668, + "grad_norm": 0.37300145626068115, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 2121 + }, + { + "epoch": 0.10599930066436886, + "grad_norm": 0.3031572103500366, + "learning_rate": 0.0001, + "loss": 0.0218, + "step": 2122 + }, + { + "epoch": 0.10604925320945102, + "grad_norm": 0.23878322541713715, + "learning_rate": 0.0001, + "loss": 0.1339, + "step": 2123 + }, + { + "epoch": 0.10609920575453319, + "grad_norm": 0.25794219970703125, + "learning_rate": 0.0001, + "loss": 0.1274, + "step": 2124 + }, + { + "epoch": 0.10614915829961537, + "grad_norm": 0.29717522859573364, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2125 + }, + { + "epoch": 0.10619911084469753, + "grad_norm": 0.26755639910697937, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 2126 + }, + { + "epoch": 0.10624906338977971, + "grad_norm": 0.2600543200969696, + "learning_rate": 0.0001, + "loss": 0.0145, + "step": 2127 + }, + { + "epoch": 0.10629901593486188, + "grad_norm": 0.18124428391456604, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2128 + }, + { + "epoch": 0.10634896847994406, + "grad_norm": 0.2927054464817047, + "learning_rate": 0.0001, + "loss": 0.0358, + "step": 2129 + }, + { + "epoch": 0.10639892102502622, + "grad_norm": 0.2734633982181549, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2130 + }, + { + "epoch": 0.10644887357010839, + "grad_norm": 0.2210836410522461, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2131 + }, + { + "epoch": 0.10649882611519057, + "grad_norm": 0.19746476411819458, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 2132 + }, + { + "epoch": 0.10654877866027274, + "grad_norm": 0.2875996232032776, + "learning_rate": 0.0001, + "loss": 0.0133, + "step": 2133 + }, + { + "epoch": 0.10659873120535492, + "grad_norm": 0.3104568123817444, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2134 + }, + { + "epoch": 0.10664868375043708, + "grad_norm": 0.26720166206359863, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 2135 + }, + { + "epoch": 0.10669863629551926, + "grad_norm": 0.21292056143283844, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 2136 + }, + { + "epoch": 0.10674858884060143, + "grad_norm": 0.23957596719264984, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 2137 + }, + { + "epoch": 0.1067985413856836, + "grad_norm": 0.3021026849746704, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 2138 + }, + { + "epoch": 0.10684849393076577, + "grad_norm": 0.23580065369606018, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 2139 + }, + { + "epoch": 0.10689844647584794, + "grad_norm": 0.20133650302886963, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 2140 + }, + { + "epoch": 0.10694839902093012, + "grad_norm": 0.2300414890050888, + "learning_rate": 0.0001, + "loss": 0.0559, + "step": 2141 + }, + { + "epoch": 0.10699835156601228, + "grad_norm": 0.2200678288936615, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2142 + }, + { + "epoch": 0.10704830411109446, + "grad_norm": 0.2545255720615387, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 2143 + }, + { + "epoch": 0.10709825665617663, + "grad_norm": 0.1905806064605713, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2144 + }, + { + "epoch": 0.10714820920125881, + "grad_norm": 0.15534092485904694, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2145 + }, + { + "epoch": 0.10719816174634098, + "grad_norm": 0.1781816929578781, + "learning_rate": 0.0001, + "loss": 0.1292, + "step": 2146 + }, + { + "epoch": 0.10724811429142314, + "grad_norm": 0.2047697901725769, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 2147 + }, + { + "epoch": 0.10729806683650532, + "grad_norm": 0.27110612392425537, + "learning_rate": 0.0001, + "loss": 0.1252, + "step": 2148 + }, + { + "epoch": 0.10734801938158749, + "grad_norm": 0.20358972251415253, + "learning_rate": 0.0001, + "loss": 0.126, + "step": 2149 + }, + { + "epoch": 0.10739797192666967, + "grad_norm": 0.1410055011510849, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2150 + }, + { + "epoch": 0.10744792447175183, + "grad_norm": 0.1834319829940796, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2151 + }, + { + "epoch": 0.10749787701683401, + "grad_norm": 0.22071295976638794, + "learning_rate": 0.0001, + "loss": 0.0141, + "step": 2152 + }, + { + "epoch": 0.10754782956191618, + "grad_norm": 0.2011844962835312, + "learning_rate": 0.0001, + "loss": 0.0126, + "step": 2153 + }, + { + "epoch": 0.10759778210699836, + "grad_norm": 0.17335228621959686, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2154 + }, + { + "epoch": 0.10764773465208052, + "grad_norm": 0.19473406672477722, + "learning_rate": 0.0001, + "loss": 0.127, + "step": 2155 + }, + { + "epoch": 0.10769768719716269, + "grad_norm": 0.3050709068775177, + "learning_rate": 0.0001, + "loss": 0.0121, + "step": 2156 + }, + { + "epoch": 0.10774763974224487, + "grad_norm": 0.2782425284385681, + "learning_rate": 0.0001, + "loss": 0.1415, + "step": 2157 + }, + { + "epoch": 0.10779759228732703, + "grad_norm": 0.250174880027771, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 2158 + }, + { + "epoch": 0.10784754483240921, + "grad_norm": 0.3102562129497528, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 2159 + }, + { + "epoch": 0.10789749737749138, + "grad_norm": 0.23188622295856476, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2160 + }, + { + "epoch": 0.10794744992257356, + "grad_norm": 0.2806209325790405, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 2161 + }, + { + "epoch": 0.10799740246765573, + "grad_norm": 0.3430045247077942, + "learning_rate": 0.0001, + "loss": 0.0091, + "step": 2162 + }, + { + "epoch": 0.1080473550127379, + "grad_norm": 0.3230925500392914, + "learning_rate": 0.0001, + "loss": 0.1461, + "step": 2163 + }, + { + "epoch": 0.10809730755782007, + "grad_norm": 0.24390915036201477, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 2164 + }, + { + "epoch": 0.10814726010290224, + "grad_norm": 0.2711476981639862, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2165 + }, + { + "epoch": 0.10819721264798442, + "grad_norm": 0.3381292223930359, + "learning_rate": 0.0001, + "loss": 0.0101, + "step": 2166 + }, + { + "epoch": 0.10824716519306658, + "grad_norm": 0.32711660861968994, + "learning_rate": 0.0001, + "loss": 0.0131, + "step": 2167 + }, + { + "epoch": 0.10829711773814876, + "grad_norm": 0.20159262418746948, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2168 + }, + { + "epoch": 0.10834707028323093, + "grad_norm": 0.2688027024269104, + "learning_rate": 0.0001, + "loss": 0.0345, + "step": 2169 + }, + { + "epoch": 0.10839702282831311, + "grad_norm": 0.3103504478931427, + "learning_rate": 0.0001, + "loss": 0.0143, + "step": 2170 + }, + { + "epoch": 0.10844697537339527, + "grad_norm": 0.3429497182369232, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 2171 + }, + { + "epoch": 0.10849692791847744, + "grad_norm": 0.31551840901374817, + "learning_rate": 0.0001, + "loss": 0.0127, + "step": 2172 + }, + { + "epoch": 0.10854688046355962, + "grad_norm": 0.2695477306842804, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 2173 + }, + { + "epoch": 0.10859683300864179, + "grad_norm": 0.36273810267448425, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2174 + }, + { + "epoch": 0.10864678555372397, + "grad_norm": 0.3582594096660614, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2175 + }, + { + "epoch": 0.10869673809880613, + "grad_norm": 0.3190753161907196, + "learning_rate": 0.0001, + "loss": 0.012, + "step": 2176 + }, + { + "epoch": 0.10874669064388831, + "grad_norm": 0.2631779909133911, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 2177 + }, + { + "epoch": 0.10879664318897048, + "grad_norm": 0.2882734537124634, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 2178 + }, + { + "epoch": 0.10884659573405266, + "grad_norm": 0.3499993681907654, + "learning_rate": 0.0001, + "loss": 0.1319, + "step": 2179 + }, + { + "epoch": 0.10889654827913482, + "grad_norm": 0.29931631684303284, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 2180 + }, + { + "epoch": 0.10894650082421699, + "grad_norm": 0.21697859466075897, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2181 + }, + { + "epoch": 0.10899645336929917, + "grad_norm": 0.3147348463535309, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2182 + }, + { + "epoch": 0.10904640591438133, + "grad_norm": 0.297290563583374, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2183 + }, + { + "epoch": 0.10909635845946351, + "grad_norm": 0.22592008113861084, + "learning_rate": 0.0001, + "loss": 0.0121, + "step": 2184 + }, + { + "epoch": 0.10914631100454568, + "grad_norm": 0.15813447535037994, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2185 + }, + { + "epoch": 0.10919626354962786, + "grad_norm": 0.17473800480365753, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2186 + }, + { + "epoch": 0.10924621609471002, + "grad_norm": 0.22383807599544525, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 2187 + }, + { + "epoch": 0.10929616863979219, + "grad_norm": 0.1994430124759674, + "learning_rate": 0.0001, + "loss": 0.1287, + "step": 2188 + }, + { + "epoch": 0.10934612118487437, + "grad_norm": 0.15517796576023102, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2189 + }, + { + "epoch": 0.10939607372995654, + "grad_norm": 0.19022881984710693, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2190 + }, + { + "epoch": 0.10944602627503872, + "grad_norm": 0.2515469491481781, + "learning_rate": 0.0001, + "loss": 0.0097, + "step": 2191 + }, + { + "epoch": 0.10949597882012088, + "grad_norm": 0.15049870312213898, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 2192 + }, + { + "epoch": 0.10954593136520306, + "grad_norm": 0.17204663157463074, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2193 + }, + { + "epoch": 0.10959588391028523, + "grad_norm": 0.1796512007713318, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2194 + }, + { + "epoch": 0.10964583645536741, + "grad_norm": 0.2593468129634857, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 2195 + }, + { + "epoch": 0.10969578900044957, + "grad_norm": 0.14687132835388184, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 2196 + }, + { + "epoch": 0.10974574154553174, + "grad_norm": 0.17837032675743103, + "learning_rate": 0.0001, + "loss": 0.0127, + "step": 2197 + }, + { + "epoch": 0.10979569409061392, + "grad_norm": 0.20156806707382202, + "learning_rate": 0.0001, + "loss": 0.0107, + "step": 2198 + }, + { + "epoch": 0.10984564663569608, + "grad_norm": 0.1971786469221115, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 2199 + }, + { + "epoch": 0.10989559918077826, + "grad_norm": 0.20324255526065826, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 2200 + }, + { + "epoch": 0.10994555172586043, + "grad_norm": 0.18014684319496155, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 2201 + }, + { + "epoch": 0.10999550427094261, + "grad_norm": 0.21959227323532104, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2202 + }, + { + "epoch": 0.11004545681602478, + "grad_norm": 0.23557381331920624, + "learning_rate": 0.0001, + "loss": 0.0134, + "step": 2203 + }, + { + "epoch": 0.11009540936110696, + "grad_norm": 0.19850023090839386, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 2204 + }, + { + "epoch": 0.11014536190618912, + "grad_norm": 0.16137439012527466, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 2205 + }, + { + "epoch": 0.11019531445127129, + "grad_norm": 0.22601252794265747, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2206 + }, + { + "epoch": 0.11024526699635347, + "grad_norm": 0.25681182742118835, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2207 + }, + { + "epoch": 0.11029521954143563, + "grad_norm": 0.28846001625061035, + "learning_rate": 0.0001, + "loss": 0.0329, + "step": 2208 + }, + { + "epoch": 0.11034517208651781, + "grad_norm": 0.23640328645706177, + "learning_rate": 0.0001, + "loss": 0.126, + "step": 2209 + }, + { + "epoch": 0.11039512463159998, + "grad_norm": 0.2235705554485321, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2210 + }, + { + "epoch": 0.11044507717668216, + "grad_norm": 0.2525349259376526, + "learning_rate": 0.0001, + "loss": 0.0246, + "step": 2211 + }, + { + "epoch": 0.11049502972176432, + "grad_norm": 0.257894903421402, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2212 + }, + { + "epoch": 0.11054498226684649, + "grad_norm": 0.23469959199428558, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2213 + }, + { + "epoch": 0.11059493481192867, + "grad_norm": 0.23916564881801605, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2214 + }, + { + "epoch": 0.11064488735701083, + "grad_norm": 0.28566715121269226, + "learning_rate": 0.0001, + "loss": 0.0258, + "step": 2215 + }, + { + "epoch": 0.11069483990209301, + "grad_norm": 0.2851690649986267, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2216 + }, + { + "epoch": 0.11074479244717518, + "grad_norm": 0.2537008821964264, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 2217 + }, + { + "epoch": 0.11079474499225736, + "grad_norm": 0.28410273790359497, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 2218 + }, + { + "epoch": 0.11084469753733953, + "grad_norm": 0.2158711701631546, + "learning_rate": 0.0001, + "loss": 0.015, + "step": 2219 + }, + { + "epoch": 0.1108946500824217, + "grad_norm": 0.31852614879608154, + "learning_rate": 0.0001, + "loss": 0.0157, + "step": 2220 + }, + { + "epoch": 0.11094460262750387, + "grad_norm": 0.2815430760383606, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2221 + }, + { + "epoch": 0.11099455517258604, + "grad_norm": 0.20181027054786682, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2222 + }, + { + "epoch": 0.11104450771766822, + "grad_norm": 0.2423340231180191, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 2223 + }, + { + "epoch": 0.11109446026275038, + "grad_norm": 0.220139741897583, + "learning_rate": 0.0001, + "loss": 0.1332, + "step": 2224 + }, + { + "epoch": 0.11114441280783256, + "grad_norm": 0.2398723065853119, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2225 + }, + { + "epoch": 0.11119436535291473, + "grad_norm": 0.20871195197105408, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 2226 + }, + { + "epoch": 0.11124431789799691, + "grad_norm": 0.18858702480793, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 2227 + }, + { + "epoch": 0.11129427044307907, + "grad_norm": 0.307672381401062, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 2228 + }, + { + "epoch": 0.11134422298816124, + "grad_norm": 0.24694712460041046, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2229 + }, + { + "epoch": 0.11139417553324342, + "grad_norm": 0.21196602284908295, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2230 + }, + { + "epoch": 0.11144412807832559, + "grad_norm": 0.2713146209716797, + "learning_rate": 0.0001, + "loss": 0.0258, + "step": 2231 + }, + { + "epoch": 0.11149408062340777, + "grad_norm": 0.2405003011226654, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 2232 + }, + { + "epoch": 0.11154403316848993, + "grad_norm": 0.19537682831287384, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 2233 + }, + { + "epoch": 0.11159398571357211, + "grad_norm": 0.21514160931110382, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 2234 + }, + { + "epoch": 0.11164393825865428, + "grad_norm": 0.19849887490272522, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 2235 + }, + { + "epoch": 0.11169389080373646, + "grad_norm": 0.23659482598304749, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2236 + }, + { + "epoch": 0.11174384334881862, + "grad_norm": 0.18330146372318268, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 2237 + }, + { + "epoch": 0.11179379589390079, + "grad_norm": 0.1766861081123352, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 2238 + }, + { + "epoch": 0.11184374843898297, + "grad_norm": 0.31213128566741943, + "learning_rate": 0.0001, + "loss": 0.035, + "step": 2239 + }, + { + "epoch": 0.11189370098406513, + "grad_norm": 0.25055697560310364, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2240 + }, + { + "epoch": 0.11194365352914731, + "grad_norm": 0.25342628359794617, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2241 + }, + { + "epoch": 0.11199360607422948, + "grad_norm": 0.218577578663826, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2242 + }, + { + "epoch": 0.11204355861931166, + "grad_norm": 0.2428862303495407, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 2243 + }, + { + "epoch": 0.11209351116439382, + "grad_norm": 0.2745761275291443, + "learning_rate": 0.0001, + "loss": 0.0112, + "step": 2244 + }, + { + "epoch": 0.112143463709476, + "grad_norm": 0.19137512147426605, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2245 + }, + { + "epoch": 0.11219341625455817, + "grad_norm": 0.17397794127464294, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 2246 + }, + { + "epoch": 0.11224336879964034, + "grad_norm": 0.21367129683494568, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 2247 + }, + { + "epoch": 0.11229332134472252, + "grad_norm": 0.1902260035276413, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2248 + }, + { + "epoch": 0.11234327388980468, + "grad_norm": 0.21359972655773163, + "learning_rate": 0.0001, + "loss": 0.0104, + "step": 2249 + }, + { + "epoch": 0.11239322643488686, + "grad_norm": 0.2343234121799469, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2250 + }, + { + "epoch": 0.11244317897996903, + "grad_norm": 0.18912813067436218, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 2251 + }, + { + "epoch": 0.11249313152505121, + "grad_norm": 0.1813860684633255, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2252 + }, + { + "epoch": 0.11254308407013337, + "grad_norm": 0.20411303639411926, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2253 + }, + { + "epoch": 0.11259303661521554, + "grad_norm": 0.14728225767612457, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2254 + }, + { + "epoch": 0.11264298916029772, + "grad_norm": 0.1271784007549286, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2255 + }, + { + "epoch": 0.11269294170537988, + "grad_norm": 0.1865532100200653, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 2256 + }, + { + "epoch": 0.11274289425046206, + "grad_norm": 0.22781462967395782, + "learning_rate": 0.0001, + "loss": 0.0363, + "step": 2257 + }, + { + "epoch": 0.11279284679554423, + "grad_norm": 0.1555604487657547, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 2258 + }, + { + "epoch": 0.11284279934062641, + "grad_norm": 0.14204396307468414, + "learning_rate": 0.0001, + "loss": 0.1268, + "step": 2259 + }, + { + "epoch": 0.11289275188570858, + "grad_norm": 0.21880283951759338, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 2260 + }, + { + "epoch": 0.11294270443079076, + "grad_norm": 0.21247421205043793, + "learning_rate": 0.0001, + "loss": 0.0124, + "step": 2261 + }, + { + "epoch": 0.11299265697587292, + "grad_norm": 0.2002231627702713, + "learning_rate": 0.0001, + "loss": 0.0158, + "step": 2262 + }, + { + "epoch": 0.11304260952095509, + "grad_norm": 0.15519559383392334, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 2263 + }, + { + "epoch": 0.11309256206603727, + "grad_norm": 0.2388724684715271, + "learning_rate": 0.0001, + "loss": 0.1295, + "step": 2264 + }, + { + "epoch": 0.11314251461111943, + "grad_norm": 0.19477128982543945, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2265 + }, + { + "epoch": 0.11319246715620161, + "grad_norm": 0.19303426146507263, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 2266 + }, + { + "epoch": 0.11324241970128378, + "grad_norm": 0.21362973749637604, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2267 + }, + { + "epoch": 0.11329237224636596, + "grad_norm": 0.22114557027816772, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 2268 + }, + { + "epoch": 0.11334232479144812, + "grad_norm": 0.16505174338817596, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2269 + }, + { + "epoch": 0.11339227733653029, + "grad_norm": 0.16489459574222565, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2270 + }, + { + "epoch": 0.11344222988161247, + "grad_norm": 0.1758997142314911, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2271 + }, + { + "epoch": 0.11349218242669463, + "grad_norm": 0.1755293309688568, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 2272 + }, + { + "epoch": 0.11354213497177681, + "grad_norm": 0.18737375736236572, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 2273 + }, + { + "epoch": 0.11359208751685898, + "grad_norm": 0.2101432830095291, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 2274 + }, + { + "epoch": 0.11364204006194116, + "grad_norm": 0.2162851095199585, + "learning_rate": 0.0001, + "loss": 0.1293, + "step": 2275 + }, + { + "epoch": 0.11369199260702333, + "grad_norm": 0.14842632412910461, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 2276 + }, + { + "epoch": 0.1137419451521055, + "grad_norm": 0.19310474395751953, + "learning_rate": 0.0001, + "loss": 0.0108, + "step": 2277 + }, + { + "epoch": 0.11379189769718767, + "grad_norm": 0.17069414258003235, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2278 + }, + { + "epoch": 0.11384185024226984, + "grad_norm": 0.16902130842208862, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 2279 + }, + { + "epoch": 0.11389180278735202, + "grad_norm": 0.2093765139579773, + "learning_rate": 0.0001, + "loss": 0.1245, + "step": 2280 + }, + { + "epoch": 0.11394175533243418, + "grad_norm": 0.16776444017887115, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2281 + }, + { + "epoch": 0.11399170787751636, + "grad_norm": 0.23320968449115753, + "learning_rate": 0.0001, + "loss": 0.0132, + "step": 2282 + }, + { + "epoch": 0.11404166042259853, + "grad_norm": 0.15788188576698303, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2283 + }, + { + "epoch": 0.11409161296768071, + "grad_norm": 0.2229970097541809, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 2284 + }, + { + "epoch": 0.11414156551276287, + "grad_norm": 0.21586258709430695, + "learning_rate": 0.0001, + "loss": 0.0067, + "step": 2285 + }, + { + "epoch": 0.11419151805784505, + "grad_norm": 0.3470185697078705, + "learning_rate": 0.0001, + "loss": 0.2751, + "step": 2286 + }, + { + "epoch": 0.11424147060292722, + "grad_norm": 0.1913575828075409, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 2287 + }, + { + "epoch": 0.11429142314800939, + "grad_norm": 0.22892150282859802, + "learning_rate": 0.0001, + "loss": 0.1258, + "step": 2288 + }, + { + "epoch": 0.11434137569309157, + "grad_norm": 0.1996409147977829, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2289 + }, + { + "epoch": 0.11439132823817373, + "grad_norm": 0.19257204234600067, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 2290 + }, + { + "epoch": 0.11444128078325591, + "grad_norm": 0.1539018452167511, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2291 + }, + { + "epoch": 0.11449123332833808, + "grad_norm": 0.16901084780693054, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 2292 + }, + { + "epoch": 0.11454118587342026, + "grad_norm": 0.29720339179039, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 2293 + }, + { + "epoch": 0.11459113841850242, + "grad_norm": 0.16825993359088898, + "learning_rate": 0.0001, + "loss": 0.1262, + "step": 2294 + }, + { + "epoch": 0.11464109096358459, + "grad_norm": 0.1848592460155487, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 2295 + }, + { + "epoch": 0.11469104350866677, + "grad_norm": 0.23057973384857178, + "learning_rate": 0.0001, + "loss": 0.1292, + "step": 2296 + }, + { + "epoch": 0.11474099605374893, + "grad_norm": 0.19288164377212524, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2297 + }, + { + "epoch": 0.11479094859883111, + "grad_norm": 0.17374949157238007, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 2298 + }, + { + "epoch": 0.11484090114391328, + "grad_norm": 0.18050412833690643, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 2299 + }, + { + "epoch": 0.11489085368899546, + "grad_norm": 0.15082721412181854, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 2300 + }, + { + "epoch": 0.11494080623407762, + "grad_norm": 0.1823042929172516, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 2301 + }, + { + "epoch": 0.1149907587791598, + "grad_norm": 0.18714654445648193, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2302 + }, + { + "epoch": 0.11504071132424197, + "grad_norm": 0.15101534128189087, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 2303 + }, + { + "epoch": 0.11509066386932414, + "grad_norm": 0.1934947520494461, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2304 + }, + { + "epoch": 0.11514061641440632, + "grad_norm": 0.1746787130832672, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2305 + }, + { + "epoch": 0.11519056895948848, + "grad_norm": 0.18155424296855927, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 2306 + }, + { + "epoch": 0.11524052150457066, + "grad_norm": 0.13829144835472107, + "learning_rate": 0.0001, + "loss": 0.0117, + "step": 2307 + }, + { + "epoch": 0.11529047404965283, + "grad_norm": 0.15389636158943176, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2308 + }, + { + "epoch": 0.11534042659473501, + "grad_norm": 0.18505904078483582, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2309 + }, + { + "epoch": 0.11539037913981717, + "grad_norm": 0.1702936440706253, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 2310 + }, + { + "epoch": 0.11544033168489934, + "grad_norm": 0.20754021406173706, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 2311 + }, + { + "epoch": 0.11549028422998152, + "grad_norm": 0.15918612480163574, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2312 + }, + { + "epoch": 0.11554023677506368, + "grad_norm": 0.157228022813797, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 2313 + }, + { + "epoch": 0.11559018932014586, + "grad_norm": 0.2691769003868103, + "learning_rate": 0.0001, + "loss": 0.0242, + "step": 2314 + }, + { + "epoch": 0.11564014186522803, + "grad_norm": 0.21081915497779846, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2315 + }, + { + "epoch": 0.11569009441031021, + "grad_norm": 0.2069864124059677, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2316 + }, + { + "epoch": 0.11574004695539238, + "grad_norm": 0.1860281527042389, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 2317 + }, + { + "epoch": 0.11578999950047456, + "grad_norm": 0.3776683211326599, + "learning_rate": 0.0001, + "loss": 0.0226, + "step": 2318 + }, + { + "epoch": 0.11583995204555672, + "grad_norm": 0.21087658405303955, + "learning_rate": 0.0001, + "loss": 0.126, + "step": 2319 + }, + { + "epoch": 0.11588990459063889, + "grad_norm": 0.27538251876831055, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 2320 + }, + { + "epoch": 0.11593985713572107, + "grad_norm": 0.19140054285526276, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2321 + }, + { + "epoch": 0.11598980968080323, + "grad_norm": 0.20484183728694916, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2322 + }, + { + "epoch": 0.11603976222588541, + "grad_norm": 0.26027926802635193, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 2323 + }, + { + "epoch": 0.11608971477096758, + "grad_norm": 0.2885098457336426, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2324 + }, + { + "epoch": 0.11613966731604976, + "grad_norm": 0.22144347429275513, + "learning_rate": 0.0001, + "loss": 0.0111, + "step": 2325 + }, + { + "epoch": 0.11618961986113192, + "grad_norm": 0.21024689078330994, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 2326 + }, + { + "epoch": 0.1162395724062141, + "grad_norm": 0.277691513299942, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2327 + }, + { + "epoch": 0.11628952495129627, + "grad_norm": 0.21819713711738586, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2328 + }, + { + "epoch": 0.11633947749637844, + "grad_norm": 0.22069410979747772, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2329 + }, + { + "epoch": 0.11638943004146061, + "grad_norm": 0.18498806655406952, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2330 + }, + { + "epoch": 0.11643938258654278, + "grad_norm": 0.1806635707616806, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2331 + }, + { + "epoch": 0.11648933513162496, + "grad_norm": 0.24612143635749817, + "learning_rate": 0.0001, + "loss": 0.022, + "step": 2332 + }, + { + "epoch": 0.11653928767670713, + "grad_norm": 0.22724434733390808, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 2333 + }, + { + "epoch": 0.1165892402217893, + "grad_norm": 0.18254037201404572, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 2334 + }, + { + "epoch": 0.11663919276687147, + "grad_norm": 0.2305103838443756, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2335 + }, + { + "epoch": 0.11668914531195364, + "grad_norm": 0.23973146080970764, + "learning_rate": 0.0001, + "loss": 0.1278, + "step": 2336 + }, + { + "epoch": 0.11673909785703582, + "grad_norm": 0.22186660766601562, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2337 + }, + { + "epoch": 0.11678905040211798, + "grad_norm": 0.2646212875843048, + "learning_rate": 0.0001, + "loss": 0.0215, + "step": 2338 + }, + { + "epoch": 0.11683900294720016, + "grad_norm": 0.2041870802640915, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 2339 + }, + { + "epoch": 0.11688895549228233, + "grad_norm": 0.25067052245140076, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2340 + }, + { + "epoch": 0.11693890803736451, + "grad_norm": 0.29168757796287537, + "learning_rate": 0.0001, + "loss": 0.1365, + "step": 2341 + }, + { + "epoch": 0.11698886058244667, + "grad_norm": 0.21720518171787262, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 2342 + }, + { + "epoch": 0.11703881312752885, + "grad_norm": 0.23417620360851288, + "learning_rate": 0.0001, + "loss": 0.0086, + "step": 2343 + }, + { + "epoch": 0.11708876567261102, + "grad_norm": 0.22474999725818634, + "learning_rate": 0.0001, + "loss": 0.1293, + "step": 2344 + }, + { + "epoch": 0.11713871821769319, + "grad_norm": 0.18349476158618927, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2345 + }, + { + "epoch": 0.11718867076277537, + "grad_norm": 0.2058199793100357, + "learning_rate": 0.0001, + "loss": 0.0115, + "step": 2346 + }, + { + "epoch": 0.11723862330785753, + "grad_norm": 0.2283610999584198, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 2347 + }, + { + "epoch": 0.11728857585293971, + "grad_norm": 0.20300769805908203, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 2348 + }, + { + "epoch": 0.11733852839802188, + "grad_norm": 0.17962178587913513, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 2349 + }, + { + "epoch": 0.11738848094310406, + "grad_norm": 0.2563682794570923, + "learning_rate": 0.0001, + "loss": 0.1305, + "step": 2350 + }, + { + "epoch": 0.11743843348818622, + "grad_norm": 0.30088356137275696, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 2351 + }, + { + "epoch": 0.11748838603326839, + "grad_norm": 0.27482303977012634, + "learning_rate": 0.0001, + "loss": 0.0201, + "step": 2352 + }, + { + "epoch": 0.11753833857835057, + "grad_norm": 0.19769127666950226, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2353 + }, + { + "epoch": 0.11758829112343273, + "grad_norm": 0.2947935163974762, + "learning_rate": 0.0001, + "loss": 0.0092, + "step": 2354 + }, + { + "epoch": 0.11763824366851491, + "grad_norm": 0.19557777047157288, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 2355 + }, + { + "epoch": 0.11768819621359708, + "grad_norm": 0.21106144785881042, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 2356 + }, + { + "epoch": 0.11773814875867926, + "grad_norm": 0.24985627830028534, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2357 + }, + { + "epoch": 0.11778810130376142, + "grad_norm": 0.21014340221881866, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 2358 + }, + { + "epoch": 0.1178380538488436, + "grad_norm": 0.19618192315101624, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 2359 + }, + { + "epoch": 0.11788800639392577, + "grad_norm": 0.19977769255638123, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2360 + }, + { + "epoch": 0.11793795893900794, + "grad_norm": 0.25395771861076355, + "learning_rate": 0.0001, + "loss": 0.0193, + "step": 2361 + }, + { + "epoch": 0.11798791148409012, + "grad_norm": 0.19541433453559875, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2362 + }, + { + "epoch": 0.11803786402917228, + "grad_norm": 0.17484316229820251, + "learning_rate": 0.0001, + "loss": 0.1259, + "step": 2363 + }, + { + "epoch": 0.11808781657425446, + "grad_norm": 0.190651997923851, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 2364 + }, + { + "epoch": 0.11813776911933663, + "grad_norm": 0.359479695558548, + "learning_rate": 0.0001, + "loss": 0.2684, + "step": 2365 + }, + { + "epoch": 0.11818772166441881, + "grad_norm": 0.1749057024717331, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2366 + }, + { + "epoch": 0.11823767420950097, + "grad_norm": 0.20768412947654724, + "learning_rate": 0.0001, + "loss": 0.1268, + "step": 2367 + }, + { + "epoch": 0.11828762675458315, + "grad_norm": 0.17974786460399628, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 2368 + }, + { + "epoch": 0.11833757929966532, + "grad_norm": 0.1848149299621582, + "learning_rate": 0.0001, + "loss": 0.128, + "step": 2369 + }, + { + "epoch": 0.11838753184474748, + "grad_norm": 0.19793859124183655, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 2370 + }, + { + "epoch": 0.11843748438982966, + "grad_norm": 0.19751282036304474, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 2371 + }, + { + "epoch": 0.11848743693491183, + "grad_norm": 0.14828820526599884, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 2372 + }, + { + "epoch": 0.11853738947999401, + "grad_norm": 0.16093085706233978, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2373 + }, + { + "epoch": 0.11858734202507618, + "grad_norm": 0.2323310524225235, + "learning_rate": 0.0001, + "loss": 0.1261, + "step": 2374 + }, + { + "epoch": 0.11863729457015836, + "grad_norm": 0.14456461369991302, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 2375 + }, + { + "epoch": 0.11868724711524052, + "grad_norm": 0.18472443521022797, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 2376 + }, + { + "epoch": 0.11873719966032269, + "grad_norm": 0.17938056588172913, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2377 + }, + { + "epoch": 0.11878715220540487, + "grad_norm": 0.15803813934326172, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2378 + }, + { + "epoch": 0.11883710475048703, + "grad_norm": 0.16855934262275696, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2379 + }, + { + "epoch": 0.11888705729556921, + "grad_norm": 0.1689583957195282, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2380 + }, + { + "epoch": 0.11893700984065138, + "grad_norm": 0.25821128487586975, + "learning_rate": 0.0001, + "loss": 0.1325, + "step": 2381 + }, + { + "epoch": 0.11898696238573356, + "grad_norm": 0.20074079930782318, + "learning_rate": 0.0001, + "loss": 0.0067, + "step": 2382 + }, + { + "epoch": 0.11903691493081572, + "grad_norm": 0.14311322569847107, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 2383 + }, + { + "epoch": 0.1190868674758979, + "grad_norm": 0.27739647030830383, + "learning_rate": 0.0001, + "loss": 0.0288, + "step": 2384 + }, + { + "epoch": 0.11913682002098007, + "grad_norm": 0.23949162662029266, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 2385 + }, + { + "epoch": 0.11918677256606224, + "grad_norm": 0.2700358033180237, + "learning_rate": 0.0001, + "loss": 0.0102, + "step": 2386 + }, + { + "epoch": 0.11923672511114441, + "grad_norm": 0.24149338901042938, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2387 + }, + { + "epoch": 0.11928667765622658, + "grad_norm": 0.222516730427742, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 2388 + }, + { + "epoch": 0.11933663020130876, + "grad_norm": 0.25183171033859253, + "learning_rate": 0.0001, + "loss": 0.0079, + "step": 2389 + }, + { + "epoch": 0.11938658274639093, + "grad_norm": 0.20075848698616028, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 2390 + }, + { + "epoch": 0.1194365352914731, + "grad_norm": 0.2572225034236908, + "learning_rate": 0.0001, + "loss": 0.1296, + "step": 2391 + }, + { + "epoch": 0.11948648783655527, + "grad_norm": 0.21503807604312897, + "learning_rate": 0.0001, + "loss": 0.0124, + "step": 2392 + }, + { + "epoch": 0.11953644038163744, + "grad_norm": 0.17125184834003448, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2393 + }, + { + "epoch": 0.11958639292671962, + "grad_norm": 0.2191842794418335, + "learning_rate": 0.0001, + "loss": 0.015, + "step": 2394 + }, + { + "epoch": 0.11963634547180178, + "grad_norm": 0.2456878274679184, + "learning_rate": 0.0001, + "loss": 0.0148, + "step": 2395 + }, + { + "epoch": 0.11968629801688396, + "grad_norm": 0.27757686376571655, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2396 + }, + { + "epoch": 0.11973625056196613, + "grad_norm": 0.20972232520580292, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2397 + }, + { + "epoch": 0.11978620310704831, + "grad_norm": 0.16445216536521912, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2398 + }, + { + "epoch": 0.11983615565213047, + "grad_norm": 0.2837493419647217, + "learning_rate": 0.0001, + "loss": 0.0119, + "step": 2399 + }, + { + "epoch": 0.11988610819721265, + "grad_norm": 0.3000233471393585, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 2400 + }, + { + "epoch": 0.11993606074229482, + "grad_norm": 0.26029518246650696, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 2401 + }, + { + "epoch": 0.11998601328737699, + "grad_norm": 0.194354847073555, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2402 + }, + { + "epoch": 0.12003596583245917, + "grad_norm": 0.2591858208179474, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2403 + }, + { + "epoch": 0.12008591837754133, + "grad_norm": 0.36504948139190674, + "learning_rate": 0.0001, + "loss": 0.023, + "step": 2404 + }, + { + "epoch": 0.12013587092262351, + "grad_norm": 0.26743054389953613, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2405 + }, + { + "epoch": 0.12018582346770568, + "grad_norm": 0.2885618507862091, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2406 + }, + { + "epoch": 0.12023577601278786, + "grad_norm": 0.22512035071849823, + "learning_rate": 0.0001, + "loss": 0.1294, + "step": 2407 + }, + { + "epoch": 0.12028572855787002, + "grad_norm": 0.28749439120292664, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 2408 + }, + { + "epoch": 0.1203356811029522, + "grad_norm": 0.27189889550209045, + "learning_rate": 0.0001, + "loss": 0.1298, + "step": 2409 + }, + { + "epoch": 0.12038563364803437, + "grad_norm": 0.2625684142112732, + "learning_rate": 0.0001, + "loss": 0.0138, + "step": 2410 + }, + { + "epoch": 0.12043558619311653, + "grad_norm": 0.2005453109741211, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 2411 + }, + { + "epoch": 0.12048553873819871, + "grad_norm": 0.20354284346103668, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2412 + }, + { + "epoch": 0.12053549128328088, + "grad_norm": 0.27035146951675415, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2413 + }, + { + "epoch": 0.12058544382836306, + "grad_norm": 0.20606471598148346, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 2414 + }, + { + "epoch": 0.12063539637344523, + "grad_norm": 0.24283230304718018, + "learning_rate": 0.0001, + "loss": 0.0129, + "step": 2415 + }, + { + "epoch": 0.1206853489185274, + "grad_norm": 0.24964329600334167, + "learning_rate": 0.0001, + "loss": 0.1282, + "step": 2416 + }, + { + "epoch": 0.12073530146360957, + "grad_norm": 0.26264965534210205, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 2417 + }, + { + "epoch": 0.12078525400869174, + "grad_norm": 0.20868422091007233, + "learning_rate": 0.0001, + "loss": 0.0105, + "step": 2418 + }, + { + "epoch": 0.12083520655377392, + "grad_norm": 0.23057147860527039, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 2419 + }, + { + "epoch": 0.12088515909885608, + "grad_norm": 0.25135254859924316, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2420 + }, + { + "epoch": 0.12093511164393826, + "grad_norm": 0.17931818962097168, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2421 + }, + { + "epoch": 0.12098506418902043, + "grad_norm": 0.18253466486930847, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2422 + }, + { + "epoch": 0.12103501673410261, + "grad_norm": 0.251122385263443, + "learning_rate": 0.0001, + "loss": 0.0186, + "step": 2423 + }, + { + "epoch": 0.12108496927918477, + "grad_norm": 0.24292367696762085, + "learning_rate": 0.0001, + "loss": 0.0124, + "step": 2424 + }, + { + "epoch": 0.12113492182426695, + "grad_norm": 0.2520798146724701, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2425 + }, + { + "epoch": 0.12118487436934912, + "grad_norm": 0.26621535420417786, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2426 + }, + { + "epoch": 0.12123482691443128, + "grad_norm": 0.17849011719226837, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2427 + }, + { + "epoch": 0.12128477945951346, + "grad_norm": 0.22234126925468445, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2428 + }, + { + "epoch": 0.12133473200459563, + "grad_norm": 0.22005686163902283, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2429 + }, + { + "epoch": 0.12138468454967781, + "grad_norm": 0.1889897584915161, + "learning_rate": 0.0001, + "loss": 0.0103, + "step": 2430 + }, + { + "epoch": 0.12143463709475998, + "grad_norm": 0.18874377012252808, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2431 + }, + { + "epoch": 0.12148458963984216, + "grad_norm": 0.17877860367298126, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2432 + }, + { + "epoch": 0.12153454218492432, + "grad_norm": 0.21334271132946014, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2433 + }, + { + "epoch": 0.12158449473000649, + "grad_norm": 0.22452564537525177, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2434 + }, + { + "epoch": 0.12163444727508867, + "grad_norm": 0.599785566329956, + "learning_rate": 0.0001, + "loss": 0.0434, + "step": 2435 + }, + { + "epoch": 0.12168439982017083, + "grad_norm": 0.495528906583786, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 2436 + }, + { + "epoch": 0.12173435236525301, + "grad_norm": 0.5691428780555725, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 2437 + }, + { + "epoch": 0.12178430491033518, + "grad_norm": 0.2904193699359894, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 2438 + }, + { + "epoch": 0.12183425745541736, + "grad_norm": 0.3053296208381653, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2439 + }, + { + "epoch": 0.12188421000049952, + "grad_norm": 0.6157121658325195, + "learning_rate": 0.0001, + "loss": 0.0172, + "step": 2440 + }, + { + "epoch": 0.1219341625455817, + "grad_norm": 0.4728807210922241, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2441 + }, + { + "epoch": 0.12198411509066387, + "grad_norm": 0.32087820768356323, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2442 + }, + { + "epoch": 0.12203406763574604, + "grad_norm": 0.3200421929359436, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2443 + }, + { + "epoch": 0.12208402018082821, + "grad_norm": 0.7114470601081848, + "learning_rate": 0.0001, + "loss": 0.0152, + "step": 2444 + }, + { + "epoch": 0.12213397272591038, + "grad_norm": 0.4174312651157379, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 2445 + }, + { + "epoch": 0.12218392527099256, + "grad_norm": 0.5355443358421326, + "learning_rate": 0.0001, + "loss": 0.023, + "step": 2446 + }, + { + "epoch": 0.12223387781607473, + "grad_norm": 0.4285222291946411, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 2447 + }, + { + "epoch": 0.1222838303611569, + "grad_norm": 0.35577407479286194, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2448 + }, + { + "epoch": 0.12233378290623907, + "grad_norm": 0.3765413761138916, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2449 + }, + { + "epoch": 0.12238373545132125, + "grad_norm": 0.3282161355018616, + "learning_rate": 0.0001, + "loss": 0.0096, + "step": 2450 + }, + { + "epoch": 0.12243368799640342, + "grad_norm": 0.2906756103038788, + "learning_rate": 0.0001, + "loss": 0.0105, + "step": 2451 + }, + { + "epoch": 0.12248364054148558, + "grad_norm": 0.4789772033691406, + "learning_rate": 0.0001, + "loss": 0.0462, + "step": 2452 + }, + { + "epoch": 0.12253359308656776, + "grad_norm": 0.37567174434661865, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 2453 + }, + { + "epoch": 0.12258354563164993, + "grad_norm": 0.5354001522064209, + "learning_rate": 0.0001, + "loss": 0.1566, + "step": 2454 + }, + { + "epoch": 0.12263349817673211, + "grad_norm": 0.363100528717041, + "learning_rate": 0.0001, + "loss": 0.0123, + "step": 2455 + }, + { + "epoch": 0.12268345072181427, + "grad_norm": 0.4419180452823639, + "learning_rate": 0.0001, + "loss": 0.0095, + "step": 2456 + }, + { + "epoch": 0.12273340326689645, + "grad_norm": 0.26922109723091125, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 2457 + }, + { + "epoch": 0.12278335581197862, + "grad_norm": 0.4963711202144623, + "learning_rate": 0.0001, + "loss": 0.158, + "step": 2458 + }, + { + "epoch": 0.12283330835706079, + "grad_norm": 0.49477115273475647, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 2459 + }, + { + "epoch": 0.12288326090214297, + "grad_norm": 0.5125694870948792, + "learning_rate": 0.0001, + "loss": 0.0648, + "step": 2460 + }, + { + "epoch": 0.12293321344722513, + "grad_norm": 0.32132068276405334, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 2461 + }, + { + "epoch": 0.12298316599230731, + "grad_norm": 0.3945857584476471, + "learning_rate": 0.0001, + "loss": 0.0085, + "step": 2462 + }, + { + "epoch": 0.12303311853738948, + "grad_norm": 0.3961140513420105, + "learning_rate": 0.0001, + "loss": 0.0221, + "step": 2463 + }, + { + "epoch": 0.12308307108247166, + "grad_norm": 0.3613954782485962, + "learning_rate": 0.0001, + "loss": 0.0402, + "step": 2464 + }, + { + "epoch": 0.12313302362755382, + "grad_norm": 0.47644880414009094, + "learning_rate": 0.0001, + "loss": 0.1378, + "step": 2465 + }, + { + "epoch": 0.123182976172636, + "grad_norm": 0.4092876613140106, + "learning_rate": 0.0001, + "loss": 0.0201, + "step": 2466 + }, + { + "epoch": 0.12323292871771817, + "grad_norm": 0.3968326449394226, + "learning_rate": 0.0001, + "loss": 0.0105, + "step": 2467 + }, + { + "epoch": 0.12328288126280033, + "grad_norm": 0.4016263782978058, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 2468 + }, + { + "epoch": 0.12333283380788251, + "grad_norm": 0.33869418501853943, + "learning_rate": 0.0001, + "loss": 0.0567, + "step": 2469 + }, + { + "epoch": 0.12338278635296468, + "grad_norm": 0.3450515568256378, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 2470 + }, + { + "epoch": 0.12343273889804686, + "grad_norm": 0.38845303654670715, + "learning_rate": 0.0001, + "loss": 0.0126, + "step": 2471 + }, + { + "epoch": 0.12348269144312903, + "grad_norm": 0.3274632692337036, + "learning_rate": 0.0001, + "loss": 0.0255, + "step": 2472 + }, + { + "epoch": 0.1235326439882112, + "grad_norm": 0.3390672206878662, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 2473 + }, + { + "epoch": 0.12358259653329337, + "grad_norm": 0.3917544186115265, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 2474 + }, + { + "epoch": 0.12363254907837554, + "grad_norm": 0.32677406072616577, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 2475 + }, + { + "epoch": 0.12368250162345772, + "grad_norm": 0.39051949977874756, + "learning_rate": 0.0001, + "loss": 0.0321, + "step": 2476 + }, + { + "epoch": 0.12373245416853988, + "grad_norm": 0.4108062982559204, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 2477 + }, + { + "epoch": 0.12378240671362206, + "grad_norm": 0.41959676146507263, + "learning_rate": 0.0001, + "loss": 0.1307, + "step": 2478 + }, + { + "epoch": 0.12383235925870423, + "grad_norm": 0.377141535282135, + "learning_rate": 0.0001, + "loss": 0.0335, + "step": 2479 + }, + { + "epoch": 0.12388231180378641, + "grad_norm": 0.31002315878868103, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 2480 + }, + { + "epoch": 0.12393226434886857, + "grad_norm": 0.40910977125167847, + "learning_rate": 0.0001, + "loss": 0.008, + "step": 2481 + }, + { + "epoch": 0.12398221689395075, + "grad_norm": 0.37798088788986206, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2482 + }, + { + "epoch": 0.12403216943903292, + "grad_norm": 0.30835703015327454, + "learning_rate": 0.0001, + "loss": 0.013, + "step": 2483 + }, + { + "epoch": 0.12408212198411508, + "grad_norm": 0.3198387920856476, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 2484 + }, + { + "epoch": 0.12413207452919726, + "grad_norm": 0.392446905374527, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2485 + }, + { + "epoch": 0.12418202707427943, + "grad_norm": 0.2609267830848694, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 2486 + }, + { + "epoch": 0.12423197961936161, + "grad_norm": 0.3761167526245117, + "learning_rate": 0.0001, + "loss": 0.0446, + "step": 2487 + }, + { + "epoch": 0.12428193216444378, + "grad_norm": 0.30149734020233154, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 2488 + }, + { + "epoch": 0.12433188470952596, + "grad_norm": 0.36426571011543274, + "learning_rate": 0.0001, + "loss": 0.0077, + "step": 2489 + }, + { + "epoch": 0.12438183725460812, + "grad_norm": 0.5130752325057983, + "learning_rate": 0.0001, + "loss": 0.0648, + "step": 2490 + }, + { + "epoch": 0.1244317897996903, + "grad_norm": 0.2496754378080368, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 2491 + }, + { + "epoch": 0.12448174234477247, + "grad_norm": 0.30426275730133057, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 2492 + }, + { + "epoch": 0.12453169488985463, + "grad_norm": 0.338081032037735, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2493 + }, + { + "epoch": 0.12458164743493681, + "grad_norm": 0.4306795001029968, + "learning_rate": 0.0001, + "loss": 0.0545, + "step": 2494 + }, + { + "epoch": 0.12463159998001898, + "grad_norm": 0.3351433277130127, + "learning_rate": 0.0001, + "loss": 0.0095, + "step": 2495 + }, + { + "epoch": 0.12468155252510116, + "grad_norm": 0.3758351504802704, + "learning_rate": 0.0001, + "loss": 0.0263, + "step": 2496 + }, + { + "epoch": 0.12473150507018332, + "grad_norm": 0.40244758129119873, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 2497 + }, + { + "epoch": 0.1247814576152655, + "grad_norm": 0.3707927167415619, + "learning_rate": 0.0001, + "loss": 0.0423, + "step": 2498 + }, + { + "epoch": 0.12483141016034767, + "grad_norm": 0.4004960060119629, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 2499 + }, + { + "epoch": 0.12488136270542984, + "grad_norm": 0.3703455924987793, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 2500 + }, + { + "epoch": 0.12493131525051202, + "grad_norm": 0.33381643891334534, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 2501 + }, + { + "epoch": 0.12498126779559418, + "grad_norm": 0.3779888153076172, + "learning_rate": 0.0001, + "loss": 0.0271, + "step": 2502 + }, + { + "epoch": 0.12503122034067635, + "grad_norm": 0.38933634757995605, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 2503 + }, + { + "epoch": 0.12508117288575854, + "grad_norm": 0.31271564960479736, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2504 + }, + { + "epoch": 0.1251311254308407, + "grad_norm": 0.35228556394577026, + "learning_rate": 0.0001, + "loss": 0.0136, + "step": 2505 + }, + { + "epoch": 0.12518107797592287, + "grad_norm": 0.3949032425880432, + "learning_rate": 0.0001, + "loss": 0.0227, + "step": 2506 + }, + { + "epoch": 0.12523103052100504, + "grad_norm": 0.3015921711921692, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 2507 + }, + { + "epoch": 0.1252809830660872, + "grad_norm": 0.42448845505714417, + "learning_rate": 0.0001, + "loss": 0.0283, + "step": 2508 + }, + { + "epoch": 0.1253309356111694, + "grad_norm": 0.2960430085659027, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 2509 + }, + { + "epoch": 0.12538088815625156, + "grad_norm": 0.27267661690711975, + "learning_rate": 0.0001, + "loss": 0.0118, + "step": 2510 + }, + { + "epoch": 0.12543084070133373, + "grad_norm": 0.3750583231449127, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 2511 + }, + { + "epoch": 0.1254807932464159, + "grad_norm": 0.33559244871139526, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2512 + }, + { + "epoch": 0.1255307457914981, + "grad_norm": 0.26698678731918335, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2513 + }, + { + "epoch": 0.12558069833658025, + "grad_norm": 0.2100878357887268, + "learning_rate": 0.0001, + "loss": 0.0109, + "step": 2514 + }, + { + "epoch": 0.12563065088166242, + "grad_norm": 0.3113160729408264, + "learning_rate": 0.0001, + "loss": 0.0307, + "step": 2515 + }, + { + "epoch": 0.12568060342674459, + "grad_norm": 0.2994106709957123, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2516 + }, + { + "epoch": 0.12573055597182675, + "grad_norm": 0.24659505486488342, + "learning_rate": 0.0001, + "loss": 0.0156, + "step": 2517 + }, + { + "epoch": 0.12578050851690895, + "grad_norm": 0.2111920267343521, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2518 + }, + { + "epoch": 0.1258304610619911, + "grad_norm": 0.26893696188926697, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2519 + }, + { + "epoch": 0.12588041360707328, + "grad_norm": 0.2867714464664459, + "learning_rate": 0.0001, + "loss": 0.0093, + "step": 2520 + }, + { + "epoch": 0.12593036615215544, + "grad_norm": 0.2354726940393448, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 2521 + }, + { + "epoch": 0.12598031869723764, + "grad_norm": 0.2296687364578247, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2522 + }, + { + "epoch": 0.1260302712423198, + "grad_norm": 0.29246795177459717, + "learning_rate": 0.0001, + "loss": 0.0105, + "step": 2523 + }, + { + "epoch": 0.12608022378740197, + "grad_norm": 0.23190635442733765, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 2524 + }, + { + "epoch": 0.12613017633248413, + "grad_norm": 0.2010083794593811, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2525 + }, + { + "epoch": 0.1261801288775663, + "grad_norm": 0.1755288690328598, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2526 + }, + { + "epoch": 0.1262300814226485, + "grad_norm": 0.2346162348985672, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 2527 + }, + { + "epoch": 0.12628003396773066, + "grad_norm": 0.18834732472896576, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2528 + }, + { + "epoch": 0.12632998651281283, + "grad_norm": 0.2588087022304535, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2529 + }, + { + "epoch": 0.126379939057895, + "grad_norm": 0.1910940408706665, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 2530 + }, + { + "epoch": 0.12642989160297718, + "grad_norm": 0.22109976410865784, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 2531 + }, + { + "epoch": 0.12647984414805935, + "grad_norm": 0.24281543493270874, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 2532 + }, + { + "epoch": 0.12652979669314152, + "grad_norm": 0.22851957380771637, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2533 + }, + { + "epoch": 0.12657974923822368, + "grad_norm": 0.18749330937862396, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 2534 + }, + { + "epoch": 0.12662970178330585, + "grad_norm": 0.18372388184070587, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 2535 + }, + { + "epoch": 0.12667965432838804, + "grad_norm": 0.20130912959575653, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 2536 + }, + { + "epoch": 0.1267296068734702, + "grad_norm": 0.23157382011413574, + "learning_rate": 0.0001, + "loss": 0.0147, + "step": 2537 + }, + { + "epoch": 0.12677955941855237, + "grad_norm": 0.16330337524414062, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 2538 + }, + { + "epoch": 0.12682951196363454, + "grad_norm": 0.20970338582992554, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2539 + }, + { + "epoch": 0.12687946450871673, + "grad_norm": 0.19941893219947815, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2540 + }, + { + "epoch": 0.1269294170537989, + "grad_norm": 0.1857190579175949, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2541 + }, + { + "epoch": 0.12697936959888106, + "grad_norm": 0.17531301081180573, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2542 + }, + { + "epoch": 0.12702932214396323, + "grad_norm": 0.19717948138713837, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 2543 + }, + { + "epoch": 0.1270792746890454, + "grad_norm": 0.3487708568572998, + "learning_rate": 0.0001, + "loss": 0.1677, + "step": 2544 + }, + { + "epoch": 0.1271292272341276, + "grad_norm": 0.23138993978500366, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 2545 + }, + { + "epoch": 0.12717917977920976, + "grad_norm": 0.17059117555618286, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 2546 + }, + { + "epoch": 0.12722913232429192, + "grad_norm": 0.21264222264289856, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2547 + }, + { + "epoch": 0.1272790848693741, + "grad_norm": 0.2803468406200409, + "learning_rate": 0.0001, + "loss": 0.0142, + "step": 2548 + }, + { + "epoch": 0.12732903741445625, + "grad_norm": 0.26616349816322327, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 2549 + }, + { + "epoch": 0.12737898995953845, + "grad_norm": 0.24550427496433258, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 2550 + }, + { + "epoch": 0.1274289425046206, + "grad_norm": 0.23894008994102478, + "learning_rate": 0.0001, + "loss": 0.0167, + "step": 2551 + }, + { + "epoch": 0.12747889504970278, + "grad_norm": 0.26069560647010803, + "learning_rate": 0.0001, + "loss": 0.1457, + "step": 2552 + }, + { + "epoch": 0.12752884759478494, + "grad_norm": 0.24669134616851807, + "learning_rate": 0.0001, + "loss": 0.0103, + "step": 2553 + }, + { + "epoch": 0.12757880013986714, + "grad_norm": 0.24837981164455414, + "learning_rate": 0.0001, + "loss": 0.0092, + "step": 2554 + }, + { + "epoch": 0.1276287526849493, + "grad_norm": 0.32630273699760437, + "learning_rate": 0.0001, + "loss": 0.0341, + "step": 2555 + }, + { + "epoch": 0.12767870523003147, + "grad_norm": 0.3048897087574005, + "learning_rate": 0.0001, + "loss": 0.0085, + "step": 2556 + }, + { + "epoch": 0.12772865777511364, + "grad_norm": 0.2543891668319702, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2557 + }, + { + "epoch": 0.1277786103201958, + "grad_norm": 0.26911622285842896, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 2558 + }, + { + "epoch": 0.127828562865278, + "grad_norm": 0.2269771695137024, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 2559 + }, + { + "epoch": 0.12787851541036016, + "grad_norm": 0.23732295632362366, + "learning_rate": 0.0001, + "loss": 0.0095, + "step": 2560 + }, + { + "epoch": 0.12792846795544233, + "grad_norm": 0.3114651143550873, + "learning_rate": 0.0001, + "loss": 0.0337, + "step": 2561 + }, + { + "epoch": 0.1279784205005245, + "grad_norm": 0.25086426734924316, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 2562 + }, + { + "epoch": 0.12802837304560669, + "grad_norm": 0.3268166780471802, + "learning_rate": 0.0001, + "loss": 0.0208, + "step": 2563 + }, + { + "epoch": 0.12807832559068885, + "grad_norm": 0.21307110786437988, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2564 + }, + { + "epoch": 0.12812827813577102, + "grad_norm": 0.2242622822523117, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 2565 + }, + { + "epoch": 0.12817823068085318, + "grad_norm": 0.3058738112449646, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2566 + }, + { + "epoch": 0.12822818322593535, + "grad_norm": 0.22291363775730133, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2567 + }, + { + "epoch": 0.12827813577101754, + "grad_norm": 0.21916216611862183, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 2568 + }, + { + "epoch": 0.1283280883160997, + "grad_norm": 0.2665741741657257, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 2569 + }, + { + "epoch": 0.12837804086118187, + "grad_norm": 0.2530044615268707, + "learning_rate": 0.0001, + "loss": 0.0091, + "step": 2570 + }, + { + "epoch": 0.12842799340626404, + "grad_norm": 0.2246680110692978, + "learning_rate": 0.0001, + "loss": 0.0139, + "step": 2571 + }, + { + "epoch": 0.12847794595134623, + "grad_norm": 0.2690187692642212, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 2572 + }, + { + "epoch": 0.1285278984964284, + "grad_norm": 0.2813708782196045, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 2573 + }, + { + "epoch": 0.12857785104151057, + "grad_norm": 0.2444905936717987, + "learning_rate": 0.0001, + "loss": 0.1619, + "step": 2574 + }, + { + "epoch": 0.12862780358659273, + "grad_norm": 0.21441559493541718, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 2575 + }, + { + "epoch": 0.1286777561316749, + "grad_norm": 0.21984465420246124, + "learning_rate": 0.0001, + "loss": 0.0091, + "step": 2576 + }, + { + "epoch": 0.1287277086767571, + "grad_norm": 0.2078491896390915, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 2577 + }, + { + "epoch": 0.12877766122183926, + "grad_norm": 0.20575851202011108, + "learning_rate": 0.0001, + "loss": 0.135, + "step": 2578 + }, + { + "epoch": 0.12882761376692142, + "grad_norm": 0.1645885705947876, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2579 + }, + { + "epoch": 0.1288775663120036, + "grad_norm": 0.20471054315567017, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 2580 + }, + { + "epoch": 0.12892751885708578, + "grad_norm": 0.21002137660980225, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 2581 + }, + { + "epoch": 0.12897747140216795, + "grad_norm": 0.1779075413942337, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2582 + }, + { + "epoch": 0.12902742394725011, + "grad_norm": 0.19296352565288544, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2583 + }, + { + "epoch": 0.12907737649233228, + "grad_norm": 0.21351803839206696, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 2584 + }, + { + "epoch": 0.12912732903741445, + "grad_norm": 0.19483977556228638, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2585 + }, + { + "epoch": 0.12917728158249664, + "grad_norm": 0.17316894233226776, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 2586 + }, + { + "epoch": 0.1292272341275788, + "grad_norm": 0.14128325879573822, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2587 + }, + { + "epoch": 0.12927718667266097, + "grad_norm": 0.2074478417634964, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 2588 + }, + { + "epoch": 0.12932713921774314, + "grad_norm": 0.19239987432956696, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2589 + }, + { + "epoch": 0.1293770917628253, + "grad_norm": 0.18166692554950714, + "learning_rate": 0.0001, + "loss": 0.0212, + "step": 2590 + }, + { + "epoch": 0.1294270443079075, + "grad_norm": 0.18644922971725464, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 2591 + }, + { + "epoch": 0.12947699685298966, + "grad_norm": 0.12962444126605988, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 2592 + }, + { + "epoch": 0.12952694939807183, + "grad_norm": 0.14206230640411377, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 2593 + }, + { + "epoch": 0.129576901943154, + "grad_norm": 0.2051478922367096, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2594 + }, + { + "epoch": 0.1296268544882362, + "grad_norm": 0.20398598909378052, + "learning_rate": 0.0001, + "loss": 0.0086, + "step": 2595 + }, + { + "epoch": 0.12967680703331835, + "grad_norm": 0.15181003510951996, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2596 + }, + { + "epoch": 0.12972675957840052, + "grad_norm": 0.23353277146816254, + "learning_rate": 0.0001, + "loss": 0.0329, + "step": 2597 + }, + { + "epoch": 0.12977671212348268, + "grad_norm": 0.1620395928621292, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2598 + }, + { + "epoch": 0.12982666466856485, + "grad_norm": 0.21719872951507568, + "learning_rate": 0.0001, + "loss": 0.0168, + "step": 2599 + }, + { + "epoch": 0.12987661721364704, + "grad_norm": 0.1919592171907425, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 2600 + }, + { + "epoch": 0.1299265697587292, + "grad_norm": 0.19832497835159302, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2601 + }, + { + "epoch": 0.12997652230381138, + "grad_norm": 0.16922147572040558, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2602 + }, + { + "epoch": 0.13002647484889354, + "grad_norm": 0.2547495365142822, + "learning_rate": 0.0001, + "loss": 0.0121, + "step": 2603 + }, + { + "epoch": 0.13007642739397574, + "grad_norm": 0.19115310907363892, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 2604 + }, + { + "epoch": 0.1301263799390579, + "grad_norm": 0.1909232884645462, + "learning_rate": 0.0001, + "loss": 0.0143, + "step": 2605 + }, + { + "epoch": 0.13017633248414007, + "grad_norm": 0.2326197773218155, + "learning_rate": 0.0001, + "loss": 0.0142, + "step": 2606 + }, + { + "epoch": 0.13022628502922223, + "grad_norm": 0.23840904235839844, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2607 + }, + { + "epoch": 0.1302762375743044, + "grad_norm": 0.1966378390789032, + "learning_rate": 0.0001, + "loss": 0.0132, + "step": 2608 + }, + { + "epoch": 0.1303261901193866, + "grad_norm": 0.21395447850227356, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 2609 + }, + { + "epoch": 0.13037614266446876, + "grad_norm": 0.23164516687393188, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 2610 + }, + { + "epoch": 0.13042609520955092, + "grad_norm": 0.17862774431705475, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2611 + }, + { + "epoch": 0.1304760477546331, + "grad_norm": 0.2006167769432068, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2612 + }, + { + "epoch": 0.13052600029971528, + "grad_norm": 0.2974291443824768, + "learning_rate": 0.0001, + "loss": 0.2769, + "step": 2613 + }, + { + "epoch": 0.13057595284479745, + "grad_norm": 0.24106912314891815, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2614 + }, + { + "epoch": 0.13062590538987962, + "grad_norm": 0.32179519534111023, + "learning_rate": 0.0001, + "loss": 0.0255, + "step": 2615 + }, + { + "epoch": 0.13067585793496178, + "grad_norm": 0.2077002376317978, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2616 + }, + { + "epoch": 0.13072581048004395, + "grad_norm": 0.19933754205703735, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2617 + }, + { + "epoch": 0.13077576302512614, + "grad_norm": 0.29703348875045776, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 2618 + }, + { + "epoch": 0.1308257155702083, + "grad_norm": 0.22607840597629547, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 2619 + }, + { + "epoch": 0.13087566811529047, + "grad_norm": 0.2364782989025116, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2620 + }, + { + "epoch": 0.13092562066037264, + "grad_norm": 0.21895676851272583, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 2621 + }, + { + "epoch": 0.13097557320545483, + "grad_norm": 0.2283090353012085, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2622 + }, + { + "epoch": 0.131025525750537, + "grad_norm": 0.1914522647857666, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2623 + }, + { + "epoch": 0.13107547829561916, + "grad_norm": 0.25435274839401245, + "learning_rate": 0.0001, + "loss": 0.0083, + "step": 2624 + }, + { + "epoch": 0.13112543084070133, + "grad_norm": 0.20490746200084686, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 2625 + }, + { + "epoch": 0.1311753833857835, + "grad_norm": 0.23470662534236908, + "learning_rate": 0.0001, + "loss": 0.1308, + "step": 2626 + }, + { + "epoch": 0.1312253359308657, + "grad_norm": 0.2682735025882721, + "learning_rate": 0.0001, + "loss": 0.1327, + "step": 2627 + }, + { + "epoch": 0.13127528847594785, + "grad_norm": 0.23146916925907135, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 2628 + }, + { + "epoch": 0.13132524102103002, + "grad_norm": 0.3269801139831543, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2629 + }, + { + "epoch": 0.1313751935661122, + "grad_norm": 0.22307650744915009, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2630 + }, + { + "epoch": 0.13142514611119435, + "grad_norm": 0.16325214505195618, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 2631 + }, + { + "epoch": 0.13147509865627655, + "grad_norm": 0.19014599919319153, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2632 + }, + { + "epoch": 0.1315250512013587, + "grad_norm": 0.19878073036670685, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2633 + }, + { + "epoch": 0.13157500374644088, + "grad_norm": 0.1725480705499649, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2634 + }, + { + "epoch": 0.13162495629152304, + "grad_norm": 0.19454121589660645, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2635 + }, + { + "epoch": 0.13167490883660524, + "grad_norm": 0.22595231235027313, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 2636 + }, + { + "epoch": 0.1317248613816874, + "grad_norm": 0.18338114023208618, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2637 + }, + { + "epoch": 0.13177481392676957, + "grad_norm": 0.22564592957496643, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 2638 + }, + { + "epoch": 0.13182476647185173, + "grad_norm": 0.18866370618343353, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2639 + }, + { + "epoch": 0.1318747190169339, + "grad_norm": 0.18404819071292877, + "learning_rate": 0.0001, + "loss": 0.1281, + "step": 2640 + }, + { + "epoch": 0.1319246715620161, + "grad_norm": 0.19847959280014038, + "learning_rate": 0.0001, + "loss": 0.0104, + "step": 2641 + }, + { + "epoch": 0.13197462410709826, + "grad_norm": 0.20142024755477905, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2642 + }, + { + "epoch": 0.13202457665218043, + "grad_norm": 0.1667070984840393, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2643 + }, + { + "epoch": 0.1320745291972626, + "grad_norm": 0.16392390429973602, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 2644 + }, + { + "epoch": 0.13212448174234478, + "grad_norm": 0.2198471873998642, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 2645 + }, + { + "epoch": 0.13217443428742695, + "grad_norm": 0.17527002096176147, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2646 + }, + { + "epoch": 0.13222438683250912, + "grad_norm": 0.2046232670545578, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 2647 + }, + { + "epoch": 0.13227433937759128, + "grad_norm": 0.23954610526561737, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 2648 + }, + { + "epoch": 0.13232429192267345, + "grad_norm": 0.21640734374523163, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 2649 + }, + { + "epoch": 0.13237424446775564, + "grad_norm": 0.20215174555778503, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2650 + }, + { + "epoch": 0.1324241970128378, + "grad_norm": 0.1933182030916214, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 2651 + }, + { + "epoch": 0.13247414955791997, + "grad_norm": 0.1568567007780075, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2652 + }, + { + "epoch": 0.13252410210300214, + "grad_norm": 0.22300228476524353, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2653 + }, + { + "epoch": 0.13257405464808433, + "grad_norm": 0.17471745610237122, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2654 + }, + { + "epoch": 0.1326240071931665, + "grad_norm": 0.19941836595535278, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 2655 + }, + { + "epoch": 0.13267395973824866, + "grad_norm": 0.20313376188278198, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2656 + }, + { + "epoch": 0.13272391228333083, + "grad_norm": 0.1385420560836792, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 2657 + }, + { + "epoch": 0.132773864828413, + "grad_norm": 0.19778405129909515, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2658 + }, + { + "epoch": 0.1328238173734952, + "grad_norm": 0.16971108317375183, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2659 + }, + { + "epoch": 0.13287376991857736, + "grad_norm": 0.2296539694070816, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 2660 + }, + { + "epoch": 0.13292372246365952, + "grad_norm": 0.14630544185638428, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 2661 + }, + { + "epoch": 0.1329736750087417, + "grad_norm": 0.16770850121974945, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2662 + }, + { + "epoch": 0.13302362755382388, + "grad_norm": 0.22975459694862366, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 2663 + }, + { + "epoch": 0.13307358009890605, + "grad_norm": 0.1772102564573288, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2664 + }, + { + "epoch": 0.1331235326439882, + "grad_norm": 0.1767193078994751, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2665 + }, + { + "epoch": 0.13317348518907038, + "grad_norm": 0.2125680297613144, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2666 + }, + { + "epoch": 0.13322343773415254, + "grad_norm": 0.21068178117275238, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 2667 + }, + { + "epoch": 0.13327339027923474, + "grad_norm": 0.16706690192222595, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 2668 + }, + { + "epoch": 0.1333233428243169, + "grad_norm": 0.18090079724788666, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2669 + }, + { + "epoch": 0.13337329536939907, + "grad_norm": 0.16171379387378693, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2670 + }, + { + "epoch": 0.13342324791448124, + "grad_norm": 0.21138443052768707, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2671 + }, + { + "epoch": 0.1334732004595634, + "grad_norm": 0.2200833559036255, + "learning_rate": 0.0001, + "loss": 0.0102, + "step": 2672 + }, + { + "epoch": 0.1335231530046456, + "grad_norm": 0.12466765940189362, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 2673 + }, + { + "epoch": 0.13357310554972776, + "grad_norm": 0.24127812683582306, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2674 + }, + { + "epoch": 0.13362305809480993, + "grad_norm": 0.20414100587368011, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2675 + }, + { + "epoch": 0.1336730106398921, + "grad_norm": 0.1924855262041092, + "learning_rate": 0.0001, + "loss": 0.0109, + "step": 2676 + }, + { + "epoch": 0.1337229631849743, + "grad_norm": 0.21263821423053741, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 2677 + }, + { + "epoch": 0.13377291573005645, + "grad_norm": 0.21018433570861816, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 2678 + }, + { + "epoch": 0.13382286827513862, + "grad_norm": 0.25257608294487, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 2679 + }, + { + "epoch": 0.13387282082022078, + "grad_norm": 0.18650692701339722, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 2680 + }, + { + "epoch": 0.13392277336530295, + "grad_norm": 0.3109716773033142, + "learning_rate": 0.0001, + "loss": 0.012, + "step": 2681 + }, + { + "epoch": 0.13397272591038514, + "grad_norm": 0.2181510180234909, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2682 + }, + { + "epoch": 0.1340226784554673, + "grad_norm": 0.22523748874664307, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2683 + }, + { + "epoch": 0.13407263100054947, + "grad_norm": 0.2018222063779831, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2684 + }, + { + "epoch": 0.13412258354563164, + "grad_norm": 0.22154155373573303, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2685 + }, + { + "epoch": 0.13417253609071383, + "grad_norm": 0.22769229114055634, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2686 + }, + { + "epoch": 0.134222488635796, + "grad_norm": 0.3132275342941284, + "learning_rate": 0.0001, + "loss": 0.0306, + "step": 2687 + }, + { + "epoch": 0.13427244118087817, + "grad_norm": 0.2570638954639435, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2688 + }, + { + "epoch": 0.13432239372596033, + "grad_norm": 0.2161727100610733, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 2689 + }, + { + "epoch": 0.1343723462710425, + "grad_norm": 0.3179208040237427, + "learning_rate": 0.0001, + "loss": 0.018, + "step": 2690 + }, + { + "epoch": 0.1344222988161247, + "grad_norm": 0.26855799555778503, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 2691 + }, + { + "epoch": 0.13447225136120686, + "grad_norm": 0.24580147862434387, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2692 + }, + { + "epoch": 0.13452220390628902, + "grad_norm": 0.19710834324359894, + "learning_rate": 0.0001, + "loss": 0.0125, + "step": 2693 + }, + { + "epoch": 0.1345721564513712, + "grad_norm": 0.2560456693172455, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 2694 + }, + { + "epoch": 0.13462210899645338, + "grad_norm": 0.24362534284591675, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 2695 + }, + { + "epoch": 0.13467206154153555, + "grad_norm": 0.33936142921447754, + "learning_rate": 0.0001, + "loss": 0.012, + "step": 2696 + }, + { + "epoch": 0.13472201408661771, + "grad_norm": 0.18355122208595276, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2697 + }, + { + "epoch": 0.13477196663169988, + "grad_norm": 0.2395194172859192, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2698 + }, + { + "epoch": 0.13482191917678205, + "grad_norm": 0.32712844014167786, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 2699 + }, + { + "epoch": 0.13487187172186424, + "grad_norm": 0.267635315656662, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2700 + }, + { + "epoch": 0.1349218242669464, + "grad_norm": 0.21257181465625763, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 2701 + }, + { + "epoch": 0.13497177681202857, + "grad_norm": 0.2604229152202606, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2702 + }, + { + "epoch": 0.13502172935711074, + "grad_norm": 0.25230860710144043, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2703 + }, + { + "epoch": 0.13507168190219293, + "grad_norm": 0.19731441140174866, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 2704 + }, + { + "epoch": 0.1351216344472751, + "grad_norm": 0.23331736028194427, + "learning_rate": 0.0001, + "loss": 0.1309, + "step": 2705 + }, + { + "epoch": 0.13517158699235726, + "grad_norm": 0.23232895135879517, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 2706 + }, + { + "epoch": 0.13522153953743943, + "grad_norm": 0.2195623517036438, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2707 + }, + { + "epoch": 0.1352714920825216, + "grad_norm": 0.18054750561714172, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 2708 + }, + { + "epoch": 0.1353214446276038, + "grad_norm": 0.14942486584186554, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 2709 + }, + { + "epoch": 0.13537139717268595, + "grad_norm": 0.2033783495426178, + "learning_rate": 0.0001, + "loss": 0.0105, + "step": 2710 + }, + { + "epoch": 0.13542134971776812, + "grad_norm": 0.32264411449432373, + "learning_rate": 0.0001, + "loss": 0.1322, + "step": 2711 + }, + { + "epoch": 0.13547130226285028, + "grad_norm": 0.28216350078582764, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 2712 + }, + { + "epoch": 0.13552125480793245, + "grad_norm": 0.22897951304912567, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2713 + }, + { + "epoch": 0.13557120735301464, + "grad_norm": 0.23099005222320557, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2714 + }, + { + "epoch": 0.1356211598980968, + "grad_norm": 0.29143622517585754, + "learning_rate": 0.0001, + "loss": 0.1314, + "step": 2715 + }, + { + "epoch": 0.13567111244317898, + "grad_norm": 0.23739281296730042, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2716 + }, + { + "epoch": 0.13572106498826114, + "grad_norm": 0.17612677812576294, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2717 + }, + { + "epoch": 0.13577101753334334, + "grad_norm": 0.2120138555765152, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2718 + }, + { + "epoch": 0.1358209700784255, + "grad_norm": 0.1712147444486618, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2719 + }, + { + "epoch": 0.13587092262350767, + "grad_norm": 0.1587189882993698, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 2720 + }, + { + "epoch": 0.13592087516858983, + "grad_norm": 0.17032761871814728, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2721 + }, + { + "epoch": 0.135970827713672, + "grad_norm": 0.22774776816368103, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 2722 + }, + { + "epoch": 0.1360207802587542, + "grad_norm": 0.23899851739406586, + "learning_rate": 0.0001, + "loss": 0.0148, + "step": 2723 + }, + { + "epoch": 0.13607073280383636, + "grad_norm": 0.18743470311164856, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 2724 + }, + { + "epoch": 0.13612068534891852, + "grad_norm": 0.24848298728466034, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 2725 + }, + { + "epoch": 0.1361706378940007, + "grad_norm": 0.21503271162509918, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 2726 + }, + { + "epoch": 0.13622059043908288, + "grad_norm": 0.28570112586021423, + "learning_rate": 0.0001, + "loss": 0.0168, + "step": 2727 + }, + { + "epoch": 0.13627054298416505, + "grad_norm": 0.2208719551563263, + "learning_rate": 0.0001, + "loss": 0.0134, + "step": 2728 + }, + { + "epoch": 0.13632049552924722, + "grad_norm": 0.29997509717941284, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 2729 + }, + { + "epoch": 0.13637044807432938, + "grad_norm": 0.2500983476638794, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 2730 + }, + { + "epoch": 0.13642040061941155, + "grad_norm": 0.23890860378742218, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 2731 + }, + { + "epoch": 0.13647035316449374, + "grad_norm": 0.22362825274467468, + "learning_rate": 0.0001, + "loss": 0.1274, + "step": 2732 + }, + { + "epoch": 0.1365203057095759, + "grad_norm": 0.2464539259672165, + "learning_rate": 0.0001, + "loss": 0.0104, + "step": 2733 + }, + { + "epoch": 0.13657025825465807, + "grad_norm": 0.1920681744813919, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2734 + }, + { + "epoch": 0.13662021079974024, + "grad_norm": 0.17617277801036835, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2735 + }, + { + "epoch": 0.13667016334482243, + "grad_norm": 0.28585270047187805, + "learning_rate": 0.0001, + "loss": 0.0154, + "step": 2736 + }, + { + "epoch": 0.1367201158899046, + "grad_norm": 0.1839839667081833, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 2737 + }, + { + "epoch": 0.13677006843498676, + "grad_norm": 0.21712109446525574, + "learning_rate": 0.0001, + "loss": 0.0103, + "step": 2738 + }, + { + "epoch": 0.13682002098006893, + "grad_norm": 0.14512434601783752, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 2739 + }, + { + "epoch": 0.1368699735251511, + "grad_norm": 0.1566961407661438, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2740 + }, + { + "epoch": 0.1369199260702333, + "grad_norm": 0.2149387001991272, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2741 + }, + { + "epoch": 0.13696987861531545, + "grad_norm": 0.16344206035137177, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 2742 + }, + { + "epoch": 0.13701983116039762, + "grad_norm": 0.1792602390050888, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2743 + }, + { + "epoch": 0.1370697837054798, + "grad_norm": 0.17670094966888428, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2744 + }, + { + "epoch": 0.13711973625056198, + "grad_norm": 0.17713558673858643, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 2745 + }, + { + "epoch": 0.13716968879564415, + "grad_norm": 0.17975080013275146, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 2746 + }, + { + "epoch": 0.1372196413407263, + "grad_norm": 0.174185112118721, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 2747 + }, + { + "epoch": 0.13726959388580848, + "grad_norm": 0.19534675776958466, + "learning_rate": 0.0001, + "loss": 0.0135, + "step": 2748 + }, + { + "epoch": 0.13731954643089064, + "grad_norm": 0.24845463037490845, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2749 + }, + { + "epoch": 0.13736949897597284, + "grad_norm": 0.21780268847942352, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2750 + }, + { + "epoch": 0.137419451521055, + "grad_norm": 0.26131829619407654, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 2751 + }, + { + "epoch": 0.13746940406613717, + "grad_norm": 0.23907332122325897, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2752 + }, + { + "epoch": 0.13751935661121933, + "grad_norm": 0.24803967773914337, + "learning_rate": 0.0001, + "loss": 0.0286, + "step": 2753 + }, + { + "epoch": 0.1375693091563015, + "grad_norm": 0.17551802098751068, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2754 + }, + { + "epoch": 0.1376192617013837, + "grad_norm": 0.253090500831604, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2755 + }, + { + "epoch": 0.13766921424646586, + "grad_norm": 0.2619493007659912, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 2756 + }, + { + "epoch": 0.13771916679154803, + "grad_norm": 0.2812314033508301, + "learning_rate": 0.0001, + "loss": 0.0237, + "step": 2757 + }, + { + "epoch": 0.1377691193366302, + "grad_norm": 0.21470145881175995, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2758 + }, + { + "epoch": 0.13781907188171238, + "grad_norm": 0.2947809398174286, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 2759 + }, + { + "epoch": 0.13786902442679455, + "grad_norm": 0.2862802743911743, + "learning_rate": 0.0001, + "loss": 0.0169, + "step": 2760 + }, + { + "epoch": 0.13791897697187672, + "grad_norm": 0.18783128261566162, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2761 + }, + { + "epoch": 0.13796892951695888, + "grad_norm": 0.2043687403202057, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2762 + }, + { + "epoch": 0.13801888206204105, + "grad_norm": 0.2892955243587494, + "learning_rate": 0.0001, + "loss": 0.0163, + "step": 2763 + }, + { + "epoch": 0.13806883460712324, + "grad_norm": 0.26957935094833374, + "learning_rate": 0.0001, + "loss": 0.0154, + "step": 2764 + }, + { + "epoch": 0.1381187871522054, + "grad_norm": 0.22774189710617065, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 2765 + }, + { + "epoch": 0.13816873969728757, + "grad_norm": 0.21620585024356842, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 2766 + }, + { + "epoch": 0.13821869224236974, + "grad_norm": 0.2281864583492279, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2767 + }, + { + "epoch": 0.13826864478745193, + "grad_norm": 0.24242320656776428, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2768 + }, + { + "epoch": 0.1383185973325341, + "grad_norm": 0.23556849360466003, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2769 + }, + { + "epoch": 0.13836854987761626, + "grad_norm": 0.32115206122398376, + "learning_rate": 0.0001, + "loss": 0.0353, + "step": 2770 + }, + { + "epoch": 0.13841850242269843, + "grad_norm": 0.22878220677375793, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2771 + }, + { + "epoch": 0.1384684549677806, + "grad_norm": 0.2349807620048523, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 2772 + }, + { + "epoch": 0.1385184075128628, + "grad_norm": 0.2302723526954651, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 2773 + }, + { + "epoch": 0.13856836005794496, + "grad_norm": 0.23169153928756714, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2774 + }, + { + "epoch": 0.13861831260302712, + "grad_norm": 0.28854840993881226, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 2775 + }, + { + "epoch": 0.1386682651481093, + "grad_norm": 0.3693179488182068, + "learning_rate": 0.0001, + "loss": 0.2827, + "step": 2776 + }, + { + "epoch": 0.13871821769319148, + "grad_norm": 0.1901874989271164, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2777 + }, + { + "epoch": 0.13876817023827365, + "grad_norm": 0.265239953994751, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 2778 + }, + { + "epoch": 0.1388181227833558, + "grad_norm": 0.3294161260128021, + "learning_rate": 0.0001, + "loss": 0.0116, + "step": 2779 + }, + { + "epoch": 0.13886807532843798, + "grad_norm": 0.2816888391971588, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2780 + }, + { + "epoch": 0.13891802787352014, + "grad_norm": 0.24002282321453094, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 2781 + }, + { + "epoch": 0.13896798041860234, + "grad_norm": 0.22068268060684204, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 2782 + }, + { + "epoch": 0.1390179329636845, + "grad_norm": 0.23467965424060822, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 2783 + }, + { + "epoch": 0.13906788550876667, + "grad_norm": 0.20561520755290985, + "learning_rate": 0.0001, + "loss": 0.1247, + "step": 2784 + }, + { + "epoch": 0.13911783805384884, + "grad_norm": 0.23892514407634735, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 2785 + }, + { + "epoch": 0.13916779059893103, + "grad_norm": 0.21658119559288025, + "learning_rate": 0.0001, + "loss": 0.0172, + "step": 2786 + }, + { + "epoch": 0.1392177431440132, + "grad_norm": 0.264303594827652, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2787 + }, + { + "epoch": 0.13926769568909536, + "grad_norm": 0.22723615169525146, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2788 + }, + { + "epoch": 0.13931764823417753, + "grad_norm": 0.19839058816432953, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2789 + }, + { + "epoch": 0.1393676007792597, + "grad_norm": 0.2876602113246918, + "learning_rate": 0.0001, + "loss": 0.0245, + "step": 2790 + }, + { + "epoch": 0.1394175533243419, + "grad_norm": 0.41634225845336914, + "learning_rate": 0.0001, + "loss": 0.1465, + "step": 2791 + }, + { + "epoch": 0.13946750586942405, + "grad_norm": 0.22219137847423553, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2792 + }, + { + "epoch": 0.13951745841450622, + "grad_norm": 0.27910879254341125, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2793 + }, + { + "epoch": 0.13956741095958838, + "grad_norm": 0.30739182233810425, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2794 + }, + { + "epoch": 0.13961736350467055, + "grad_norm": 0.21733012795448303, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2795 + }, + { + "epoch": 0.13966731604975274, + "grad_norm": 0.26065438985824585, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 2796 + }, + { + "epoch": 0.1397172685948349, + "grad_norm": 0.30606645345687866, + "learning_rate": 0.0001, + "loss": 0.0229, + "step": 2797 + }, + { + "epoch": 0.13976722113991707, + "grad_norm": 0.23521149158477783, + "learning_rate": 0.0001, + "loss": 0.0085, + "step": 2798 + }, + { + "epoch": 0.13981717368499924, + "grad_norm": 0.1863851249217987, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2799 + }, + { + "epoch": 0.13986712623008143, + "grad_norm": 0.22838595509529114, + "learning_rate": 0.0001, + "loss": 0.0142, + "step": 2800 + }, + { + "epoch": 0.1399170787751636, + "grad_norm": 0.2897157669067383, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 2801 + }, + { + "epoch": 0.13996703132024577, + "grad_norm": 0.2634405493736267, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2802 + }, + { + "epoch": 0.14001698386532793, + "grad_norm": 0.2229992151260376, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 2803 + }, + { + "epoch": 0.1400669364104101, + "grad_norm": 0.16003334522247314, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 2804 + }, + { + "epoch": 0.1401168889554923, + "grad_norm": 0.20543211698532104, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 2805 + }, + { + "epoch": 0.14016684150057446, + "grad_norm": 0.23888914287090302, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 2806 + }, + { + "epoch": 0.14021679404565662, + "grad_norm": 0.2502836287021637, + "learning_rate": 0.0001, + "loss": 0.0104, + "step": 2807 + }, + { + "epoch": 0.1402667465907388, + "grad_norm": 0.17518837749958038, + "learning_rate": 0.0001, + "loss": 0.1312, + "step": 2808 + }, + { + "epoch": 0.14031669913582098, + "grad_norm": 0.17773650586605072, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2809 + }, + { + "epoch": 0.14036665168090315, + "grad_norm": 0.2282513976097107, + "learning_rate": 0.0001, + "loss": 0.0153, + "step": 2810 + }, + { + "epoch": 0.14041660422598531, + "grad_norm": 0.1858305186033249, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2811 + }, + { + "epoch": 0.14046655677106748, + "grad_norm": 0.1677510142326355, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2812 + }, + { + "epoch": 0.14051650931614965, + "grad_norm": 0.18548548221588135, + "learning_rate": 0.0001, + "loss": 0.0092, + "step": 2813 + }, + { + "epoch": 0.14056646186123184, + "grad_norm": 0.15378113090991974, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 2814 + }, + { + "epoch": 0.140616414406314, + "grad_norm": 0.2029976099729538, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2815 + }, + { + "epoch": 0.14066636695139617, + "grad_norm": 0.1568613499403, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 2816 + }, + { + "epoch": 0.14071631949647834, + "grad_norm": 0.1506338268518448, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 2817 + }, + { + "epoch": 0.14076627204156053, + "grad_norm": 0.15074418485164642, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2818 + }, + { + "epoch": 0.1408162245866427, + "grad_norm": 0.16003285348415375, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 2819 + }, + { + "epoch": 0.14086617713172486, + "grad_norm": 0.17737187445163727, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 2820 + }, + { + "epoch": 0.14091612967680703, + "grad_norm": 0.19636616110801697, + "learning_rate": 0.0001, + "loss": 0.021, + "step": 2821 + }, + { + "epoch": 0.1409660822218892, + "grad_norm": 0.22272799909114838, + "learning_rate": 0.0001, + "loss": 0.1327, + "step": 2822 + }, + { + "epoch": 0.1410160347669714, + "grad_norm": 0.23677091300487518, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 2823 + }, + { + "epoch": 0.14106598731205355, + "grad_norm": 0.20973791182041168, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 2824 + }, + { + "epoch": 0.14111593985713572, + "grad_norm": 0.1974712610244751, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2825 + }, + { + "epoch": 0.14116589240221789, + "grad_norm": 0.1948525607585907, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 2826 + }, + { + "epoch": 0.14121584494730008, + "grad_norm": 0.27389290928840637, + "learning_rate": 0.0001, + "loss": 0.0253, + "step": 2827 + }, + { + "epoch": 0.14126579749238224, + "grad_norm": 0.1965353786945343, + "learning_rate": 0.0001, + "loss": 0.0227, + "step": 2828 + }, + { + "epoch": 0.1413157500374644, + "grad_norm": 0.2417474240064621, + "learning_rate": 0.0001, + "loss": 0.1259, + "step": 2829 + }, + { + "epoch": 0.14136570258254658, + "grad_norm": 0.3233601450920105, + "learning_rate": 0.0001, + "loss": 0.0311, + "step": 2830 + }, + { + "epoch": 0.14141565512762874, + "grad_norm": 0.23618781566619873, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 2831 + }, + { + "epoch": 0.14146560767271094, + "grad_norm": 0.21768537163734436, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 2832 + }, + { + "epoch": 0.1415155602177931, + "grad_norm": 0.2836056053638458, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2833 + }, + { + "epoch": 0.14156551276287527, + "grad_norm": 0.23140659928321838, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2834 + }, + { + "epoch": 0.14161546530795743, + "grad_norm": 0.17592835426330566, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2835 + }, + { + "epoch": 0.1416654178530396, + "grad_norm": 0.3388282060623169, + "learning_rate": 0.0001, + "loss": 0.1655, + "step": 2836 + }, + { + "epoch": 0.1417153703981218, + "grad_norm": 0.2584021985530853, + "learning_rate": 0.0001, + "loss": 0.0247, + "step": 2837 + }, + { + "epoch": 0.14176532294320396, + "grad_norm": 0.20903819799423218, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 2838 + }, + { + "epoch": 0.14181527548828612, + "grad_norm": 0.3615803122520447, + "learning_rate": 0.0001, + "loss": 0.1451, + "step": 2839 + }, + { + "epoch": 0.1418652280333683, + "grad_norm": 0.23348037898540497, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2840 + }, + { + "epoch": 0.14191518057845048, + "grad_norm": 0.2286750078201294, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 2841 + }, + { + "epoch": 0.14196513312353265, + "grad_norm": 0.271721214056015, + "learning_rate": 0.0001, + "loss": 0.0625, + "step": 2842 + }, + { + "epoch": 0.14201508566861482, + "grad_norm": 0.22821259498596191, + "learning_rate": 0.0001, + "loss": 0.0215, + "step": 2843 + }, + { + "epoch": 0.14206503821369698, + "grad_norm": 0.21526768803596497, + "learning_rate": 0.0001, + "loss": 0.0107, + "step": 2844 + }, + { + "epoch": 0.14211499075877915, + "grad_norm": 0.24234037101268768, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 2845 + }, + { + "epoch": 0.14216494330386134, + "grad_norm": 0.26442161202430725, + "learning_rate": 0.0001, + "loss": 0.044, + "step": 2846 + }, + { + "epoch": 0.1422148958489435, + "grad_norm": 0.22483597695827484, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2847 + }, + { + "epoch": 0.14226484839402567, + "grad_norm": 0.25831732153892517, + "learning_rate": 0.0001, + "loss": 0.0712, + "step": 2848 + }, + { + "epoch": 0.14231480093910784, + "grad_norm": 0.228341206908226, + "learning_rate": 0.0001, + "loss": 0.0354, + "step": 2849 + }, + { + "epoch": 0.14236475348419003, + "grad_norm": 0.2879784107208252, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 2850 + }, + { + "epoch": 0.1424147060292722, + "grad_norm": 0.18170180916786194, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 2851 + }, + { + "epoch": 0.14246465857435436, + "grad_norm": 0.20321598649024963, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 2852 + }, + { + "epoch": 0.14251461111943653, + "grad_norm": 0.3559463918209076, + "learning_rate": 0.0001, + "loss": 0.0391, + "step": 2853 + }, + { + "epoch": 0.1425645636645187, + "grad_norm": 0.20949684083461761, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 2854 + }, + { + "epoch": 0.1426145162096009, + "grad_norm": 0.15748195350170135, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2855 + }, + { + "epoch": 0.14266446875468305, + "grad_norm": 0.1984679251909256, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 2856 + }, + { + "epoch": 0.14271442129976522, + "grad_norm": 0.18145981431007385, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 2857 + }, + { + "epoch": 0.1427643738448474, + "grad_norm": 0.19488099217414856, + "learning_rate": 0.0001, + "loss": 0.0162, + "step": 2858 + }, + { + "epoch": 0.14281432638992958, + "grad_norm": 0.19230467081069946, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 2859 + }, + { + "epoch": 0.14286427893501175, + "grad_norm": 0.1995251476764679, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2860 + }, + { + "epoch": 0.1429142314800939, + "grad_norm": 0.1585390418767929, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 2861 + }, + { + "epoch": 0.14296418402517608, + "grad_norm": 0.201199471950531, + "learning_rate": 0.0001, + "loss": 0.0111, + "step": 2862 + }, + { + "epoch": 0.14301413657025824, + "grad_norm": 0.19954189658164978, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 2863 + }, + { + "epoch": 0.14306408911534044, + "grad_norm": 0.25970813632011414, + "learning_rate": 0.0001, + "loss": 0.014, + "step": 2864 + }, + { + "epoch": 0.1431140416604226, + "grad_norm": 0.16518375277519226, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 2865 + }, + { + "epoch": 0.14316399420550477, + "grad_norm": 0.17550383508205414, + "learning_rate": 0.0001, + "loss": 0.008, + "step": 2866 + }, + { + "epoch": 0.14321394675058693, + "grad_norm": 0.22380642592906952, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 2867 + }, + { + "epoch": 0.14326389929566913, + "grad_norm": 0.18489380180835724, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 2868 + }, + { + "epoch": 0.1433138518407513, + "grad_norm": 0.17957448959350586, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 2869 + }, + { + "epoch": 0.14336380438583346, + "grad_norm": 0.20282085239887238, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 2870 + }, + { + "epoch": 0.14341375693091563, + "grad_norm": 0.20760419964790344, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 2871 + }, + { + "epoch": 0.1434637094759978, + "grad_norm": 0.1619146168231964, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2872 + }, + { + "epoch": 0.14351366202107999, + "grad_norm": 0.2047608196735382, + "learning_rate": 0.0001, + "loss": 0.0156, + "step": 2873 + }, + { + "epoch": 0.14356361456616215, + "grad_norm": 0.20370493829250336, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2874 + }, + { + "epoch": 0.14361356711124432, + "grad_norm": 0.22157377004623413, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2875 + }, + { + "epoch": 0.14366351965632648, + "grad_norm": 0.1448531299829483, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 2876 + }, + { + "epoch": 0.14371347220140865, + "grad_norm": 0.172603577375412, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 2877 + }, + { + "epoch": 0.14376342474649084, + "grad_norm": 0.2644697427749634, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 2878 + }, + { + "epoch": 0.143813377291573, + "grad_norm": 0.17567040026187897, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2879 + }, + { + "epoch": 0.14386332983665517, + "grad_norm": 0.24478034675121307, + "learning_rate": 0.0001, + "loss": 0.0223, + "step": 2880 + }, + { + "epoch": 0.14391328238173734, + "grad_norm": 0.19013747572898865, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2881 + }, + { + "epoch": 0.14396323492681953, + "grad_norm": 0.29106006026268005, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2882 + }, + { + "epoch": 0.1440131874719017, + "grad_norm": 0.2861584722995758, + "learning_rate": 0.0001, + "loss": 0.018, + "step": 2883 + }, + { + "epoch": 0.14406314001698386, + "grad_norm": 0.1773727685213089, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2884 + }, + { + "epoch": 0.14411309256206603, + "grad_norm": 0.2706994116306305, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2885 + }, + { + "epoch": 0.1441630451071482, + "grad_norm": 0.2198411375284195, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2886 + }, + { + "epoch": 0.1442129976522304, + "grad_norm": 0.20276886224746704, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2887 + }, + { + "epoch": 0.14426295019731256, + "grad_norm": 0.2909362316131592, + "learning_rate": 0.0001, + "loss": 0.0141, + "step": 2888 + }, + { + "epoch": 0.14431290274239472, + "grad_norm": 0.2770034670829773, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2889 + }, + { + "epoch": 0.1443628552874769, + "grad_norm": 0.1720983237028122, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 2890 + }, + { + "epoch": 0.14441280783255908, + "grad_norm": 0.21958717703819275, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 2891 + }, + { + "epoch": 0.14446276037764125, + "grad_norm": 0.24519012868404388, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 2892 + }, + { + "epoch": 0.1445127129227234, + "grad_norm": 0.27114707231521606, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 2893 + }, + { + "epoch": 0.14456266546780558, + "grad_norm": 0.19404329359531403, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2894 + }, + { + "epoch": 0.14461261801288774, + "grad_norm": 0.24480584263801575, + "learning_rate": 0.0001, + "loss": 0.008, + "step": 2895 + }, + { + "epoch": 0.14466257055796994, + "grad_norm": 0.2218262404203415, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2896 + }, + { + "epoch": 0.1447125231030521, + "grad_norm": 0.2476608008146286, + "learning_rate": 0.0001, + "loss": 0.0098, + "step": 2897 + }, + { + "epoch": 0.14476247564813427, + "grad_norm": 0.18156316876411438, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2898 + }, + { + "epoch": 0.14481242819321644, + "grad_norm": 0.2221597135066986, + "learning_rate": 0.0001, + "loss": 0.126, + "step": 2899 + }, + { + "epoch": 0.14486238073829863, + "grad_norm": 0.26097771525382996, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2900 + }, + { + "epoch": 0.1449123332833808, + "grad_norm": 0.20324169099330902, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2901 + }, + { + "epoch": 0.14496228582846296, + "grad_norm": 0.1800057739019394, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2902 + }, + { + "epoch": 0.14501223837354513, + "grad_norm": 0.20065003633499146, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 2903 + }, + { + "epoch": 0.1450621909186273, + "grad_norm": 0.18637552857398987, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 2904 + }, + { + "epoch": 0.1451121434637095, + "grad_norm": 0.15301178395748138, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 2905 + }, + { + "epoch": 0.14516209600879165, + "grad_norm": 0.1966804414987564, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 2906 + }, + { + "epoch": 0.14521204855387382, + "grad_norm": 0.1713973581790924, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2907 + }, + { + "epoch": 0.14526200109895598, + "grad_norm": 0.22616542875766754, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2908 + }, + { + "epoch": 0.14531195364403818, + "grad_norm": 0.3322857916355133, + "learning_rate": 0.0001, + "loss": 0.0209, + "step": 2909 + }, + { + "epoch": 0.14536190618912034, + "grad_norm": 0.11894405633211136, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 2910 + }, + { + "epoch": 0.1454118587342025, + "grad_norm": 0.1738477647304535, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2911 + }, + { + "epoch": 0.14546181127928468, + "grad_norm": 0.20364691317081451, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 2912 + }, + { + "epoch": 0.14551176382436684, + "grad_norm": 0.18437331914901733, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 2913 + }, + { + "epoch": 0.14556171636944903, + "grad_norm": 0.15255561470985413, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2914 + }, + { + "epoch": 0.1456116689145312, + "grad_norm": 0.1858309805393219, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2915 + }, + { + "epoch": 0.14566162145961337, + "grad_norm": 0.18659144639968872, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 2916 + }, + { + "epoch": 0.14571157400469553, + "grad_norm": 0.1846657544374466, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2917 + }, + { + "epoch": 0.1457615265497777, + "grad_norm": 0.18982242047786713, + "learning_rate": 0.0001, + "loss": 0.1281, + "step": 2918 + }, + { + "epoch": 0.1458114790948599, + "grad_norm": 0.3062027096748352, + "learning_rate": 0.0001, + "loss": 0.0161, + "step": 2919 + }, + { + "epoch": 0.14586143163994206, + "grad_norm": 0.17897287011146545, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2920 + }, + { + "epoch": 0.14591138418502422, + "grad_norm": 0.23458820581436157, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2921 + }, + { + "epoch": 0.1459613367301064, + "grad_norm": 0.3119482696056366, + "learning_rate": 0.0001, + "loss": 0.131, + "step": 2922 + }, + { + "epoch": 0.14601128927518858, + "grad_norm": 0.21124406158924103, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 2923 + }, + { + "epoch": 0.14606124182027075, + "grad_norm": 0.21981637179851532, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 2924 + }, + { + "epoch": 0.14611119436535291, + "grad_norm": 0.19986096024513245, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2925 + }, + { + "epoch": 0.14616114691043508, + "grad_norm": 0.21797578036785126, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 2926 + }, + { + "epoch": 0.14621109945551725, + "grad_norm": 0.20064020156860352, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 2927 + }, + { + "epoch": 0.14626105200059944, + "grad_norm": 0.19305163621902466, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 2928 + }, + { + "epoch": 0.1463110045456816, + "grad_norm": 0.16769523918628693, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2929 + }, + { + "epoch": 0.14636095709076377, + "grad_norm": 0.1673709899187088, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 2930 + }, + { + "epoch": 0.14641090963584594, + "grad_norm": 0.1501087099313736, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2931 + }, + { + "epoch": 0.14646086218092813, + "grad_norm": 0.19204340875148773, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 2932 + }, + { + "epoch": 0.1465108147260103, + "grad_norm": 0.17008176445960999, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 2933 + }, + { + "epoch": 0.14656076727109246, + "grad_norm": 0.18718937039375305, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 2934 + }, + { + "epoch": 0.14661071981617463, + "grad_norm": 0.13085633516311646, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 2935 + }, + { + "epoch": 0.1466606723612568, + "grad_norm": 0.15796661376953125, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2936 + }, + { + "epoch": 0.146710624906339, + "grad_norm": 0.20500332117080688, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 2937 + }, + { + "epoch": 0.14676057745142115, + "grad_norm": 0.15351678431034088, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2938 + }, + { + "epoch": 0.14681052999650332, + "grad_norm": 0.16801969707012177, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 2939 + }, + { + "epoch": 0.14686048254158549, + "grad_norm": 0.18031497299671173, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2940 + }, + { + "epoch": 0.14691043508666768, + "grad_norm": 0.1412782222032547, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 2941 + }, + { + "epoch": 0.14696038763174984, + "grad_norm": 0.1702103465795517, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 2942 + }, + { + "epoch": 0.147010340176832, + "grad_norm": 0.14984963834285736, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 2943 + }, + { + "epoch": 0.14706029272191418, + "grad_norm": 0.15820056200027466, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2944 + }, + { + "epoch": 0.14711024526699634, + "grad_norm": 0.21180680394172668, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2945 + }, + { + "epoch": 0.14716019781207854, + "grad_norm": 0.23080092668533325, + "learning_rate": 0.0001, + "loss": 0.0132, + "step": 2946 + }, + { + "epoch": 0.1472101503571607, + "grad_norm": 0.16598819196224213, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 2947 + }, + { + "epoch": 0.14726010290224287, + "grad_norm": 0.1874716579914093, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 2948 + }, + { + "epoch": 0.14731005544732503, + "grad_norm": 0.13596653938293457, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 2949 + }, + { + "epoch": 0.1473600079924072, + "grad_norm": 0.1632291078567505, + "learning_rate": 0.0001, + "loss": 0.1268, + "step": 2950 + }, + { + "epoch": 0.1474099605374894, + "grad_norm": 0.18616625666618347, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2951 + }, + { + "epoch": 0.14745991308257156, + "grad_norm": 0.17979004979133606, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 2952 + }, + { + "epoch": 0.14750986562765372, + "grad_norm": 0.1295212060213089, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 2953 + }, + { + "epoch": 0.1475598181727359, + "grad_norm": 0.15725840628147125, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2954 + }, + { + "epoch": 0.14760977071781808, + "grad_norm": 0.16078349947929382, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 2955 + }, + { + "epoch": 0.14765972326290025, + "grad_norm": 0.15248653292655945, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 2956 + }, + { + "epoch": 0.14770967580798242, + "grad_norm": 0.14670470356941223, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2957 + }, + { + "epoch": 0.14775962835306458, + "grad_norm": 0.17615458369255066, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 2958 + }, + { + "epoch": 0.14780958089814675, + "grad_norm": 0.13640418648719788, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 2959 + }, + { + "epoch": 0.14785953344322894, + "grad_norm": 0.15067726373672485, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 2960 + }, + { + "epoch": 0.1479094859883111, + "grad_norm": 0.1424148827791214, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 2961 + }, + { + "epoch": 0.14795943853339327, + "grad_norm": 0.15649999678134918, + "learning_rate": 0.0001, + "loss": 0.1291, + "step": 2962 + }, + { + "epoch": 0.14800939107847544, + "grad_norm": 0.20812450349330902, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 2963 + }, + { + "epoch": 0.14805934362355763, + "grad_norm": 0.1713554561138153, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2964 + }, + { + "epoch": 0.1481092961686398, + "grad_norm": 0.18404777348041534, + "learning_rate": 0.0001, + "loss": 0.124, + "step": 2965 + }, + { + "epoch": 0.14815924871372196, + "grad_norm": 0.2106265425682068, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 2966 + }, + { + "epoch": 0.14820920125880413, + "grad_norm": 0.28050926327705383, + "learning_rate": 0.0001, + "loss": 0.0113, + "step": 2967 + }, + { + "epoch": 0.1482591538038863, + "grad_norm": 0.24784347414970398, + "learning_rate": 0.0001, + "loss": 0.0229, + "step": 2968 + }, + { + "epoch": 0.1483091063489685, + "grad_norm": 0.24436171352863312, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 2969 + }, + { + "epoch": 0.14835905889405065, + "grad_norm": 0.2502252161502838, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 2970 + }, + { + "epoch": 0.14840901143913282, + "grad_norm": 0.26751357316970825, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 2971 + }, + { + "epoch": 0.148458963984215, + "grad_norm": 0.19916707277297974, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 2972 + }, + { + "epoch": 0.14850891652929718, + "grad_norm": 0.24465401470661163, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 2973 + }, + { + "epoch": 0.14855886907437935, + "grad_norm": 0.297953724861145, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 2974 + }, + { + "epoch": 0.1486088216194615, + "grad_norm": 0.20743221044540405, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 2975 + }, + { + "epoch": 0.14865877416454368, + "grad_norm": 0.3634229004383087, + "learning_rate": 0.0001, + "loss": 0.1348, + "step": 2976 + }, + { + "epoch": 0.14870872670962584, + "grad_norm": 0.33427169919013977, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 2977 + }, + { + "epoch": 0.14875867925470804, + "grad_norm": 0.2693432569503784, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2978 + }, + { + "epoch": 0.1488086317997902, + "grad_norm": 0.17161327600479126, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 2979 + }, + { + "epoch": 0.14885858434487237, + "grad_norm": 0.41576963663101196, + "learning_rate": 0.0001, + "loss": 0.0181, + "step": 2980 + }, + { + "epoch": 0.14890853688995453, + "grad_norm": 0.43523910641670227, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 2981 + }, + { + "epoch": 0.14895848943503673, + "grad_norm": 0.379520446062088, + "learning_rate": 0.0001, + "loss": 0.0215, + "step": 2982 + }, + { + "epoch": 0.1490084419801189, + "grad_norm": 0.3140709698200226, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 2983 + }, + { + "epoch": 0.14905839452520106, + "grad_norm": 0.24494972825050354, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 2984 + }, + { + "epoch": 0.14910834707028323, + "grad_norm": 0.4263039231300354, + "learning_rate": 0.0001, + "loss": 0.0447, + "step": 2985 + }, + { + "epoch": 0.1491582996153654, + "grad_norm": 0.32068201899528503, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 2986 + }, + { + "epoch": 0.14920825216044759, + "grad_norm": 0.2625108063220978, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2987 + }, + { + "epoch": 0.14925820470552975, + "grad_norm": 0.2706150412559509, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 2988 + }, + { + "epoch": 0.14930815725061192, + "grad_norm": 0.335836261510849, + "learning_rate": 0.0001, + "loss": 0.0162, + "step": 2989 + }, + { + "epoch": 0.14935810979569408, + "grad_norm": 0.30348852276802063, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 2990 + }, + { + "epoch": 0.14940806234077625, + "grad_norm": 0.4346565008163452, + "learning_rate": 0.0001, + "loss": 0.2007, + "step": 2991 + }, + { + "epoch": 0.14945801488585844, + "grad_norm": 0.24740220606327057, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 2992 + }, + { + "epoch": 0.1495079674309406, + "grad_norm": 0.4034811556339264, + "learning_rate": 0.0001, + "loss": 0.0606, + "step": 2993 + }, + { + "epoch": 0.14955791997602277, + "grad_norm": 0.2692834138870239, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 2994 + }, + { + "epoch": 0.14960787252110494, + "grad_norm": 0.4175630807876587, + "learning_rate": 0.0001, + "loss": 0.0297, + "step": 2995 + }, + { + "epoch": 0.14965782506618713, + "grad_norm": 0.29328322410583496, + "learning_rate": 0.0001, + "loss": 0.0136, + "step": 2996 + }, + { + "epoch": 0.1497077776112693, + "grad_norm": 0.28728553652763367, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 2997 + }, + { + "epoch": 0.14975773015635147, + "grad_norm": 0.26590442657470703, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 2998 + }, + { + "epoch": 0.14980768270143363, + "grad_norm": 0.2843528687953949, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 2999 + }, + { + "epoch": 0.1498576352465158, + "grad_norm": 0.34388235211372375, + "learning_rate": 0.0001, + "loss": 0.0528, + "step": 3000 + }, + { + "epoch": 0.149907587791598, + "grad_norm": 0.29207131266593933, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 3001 + }, + { + "epoch": 0.14995754033668016, + "grad_norm": 0.32728898525238037, + "learning_rate": 0.0001, + "loss": 0.0335, + "step": 3002 + }, + { + "epoch": 0.15000749288176232, + "grad_norm": 0.3027557134628296, + "learning_rate": 0.0001, + "loss": 0.0204, + "step": 3003 + }, + { + "epoch": 0.1500574454268445, + "grad_norm": 0.2940175235271454, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 3004 + }, + { + "epoch": 0.15010739797192668, + "grad_norm": 0.24758101999759674, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3005 + }, + { + "epoch": 0.15015735051700885, + "grad_norm": 0.344765841960907, + "learning_rate": 0.0001, + "loss": 0.1866, + "step": 3006 + }, + { + "epoch": 0.150207303062091, + "grad_norm": 0.3320505619049072, + "learning_rate": 0.0001, + "loss": 0.0539, + "step": 3007 + }, + { + "epoch": 0.15025725560717318, + "grad_norm": 0.3203373849391937, + "learning_rate": 0.0001, + "loss": 0.0321, + "step": 3008 + }, + { + "epoch": 0.15030720815225534, + "grad_norm": 0.2534812390804291, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 3009 + }, + { + "epoch": 0.15035716069733754, + "grad_norm": 0.2803078889846802, + "learning_rate": 0.0001, + "loss": 0.0099, + "step": 3010 + }, + { + "epoch": 0.1504071132424197, + "grad_norm": 0.36000171303749084, + "learning_rate": 0.0001, + "loss": 0.0121, + "step": 3011 + }, + { + "epoch": 0.15045706578750187, + "grad_norm": 0.2923630475997925, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 3012 + }, + { + "epoch": 0.15050701833258404, + "grad_norm": 0.21679005026817322, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 3013 + }, + { + "epoch": 0.15055697087766623, + "grad_norm": 0.27204629778862, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 3014 + }, + { + "epoch": 0.1506069234227484, + "grad_norm": 0.27629345655441284, + "learning_rate": 0.0001, + "loss": 0.0097, + "step": 3015 + }, + { + "epoch": 0.15065687596783056, + "grad_norm": 0.3103879690170288, + "learning_rate": 0.0001, + "loss": 0.0343, + "step": 3016 + }, + { + "epoch": 0.15070682851291273, + "grad_norm": 0.2829141616821289, + "learning_rate": 0.0001, + "loss": 0.0722, + "step": 3017 + }, + { + "epoch": 0.1507567810579949, + "grad_norm": 0.32985273003578186, + "learning_rate": 0.0001, + "loss": 0.0092, + "step": 3018 + }, + { + "epoch": 0.1508067336030771, + "grad_norm": 0.43445679545402527, + "learning_rate": 0.0001, + "loss": 0.0174, + "step": 3019 + }, + { + "epoch": 0.15085668614815925, + "grad_norm": 0.2517944574356079, + "learning_rate": 0.0001, + "loss": 0.1273, + "step": 3020 + }, + { + "epoch": 0.15090663869324142, + "grad_norm": 0.45146408677101135, + "learning_rate": 0.0001, + "loss": 0.0823, + "step": 3021 + }, + { + "epoch": 0.15095659123832358, + "grad_norm": 0.458232045173645, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 3022 + }, + { + "epoch": 0.15100654378340578, + "grad_norm": 0.33494529128074646, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 3023 + }, + { + "epoch": 0.15105649632848794, + "grad_norm": 0.30296263098716736, + "learning_rate": 0.0001, + "loss": 0.0182, + "step": 3024 + }, + { + "epoch": 0.1511064488735701, + "grad_norm": 0.3189160227775574, + "learning_rate": 0.0001, + "loss": 0.012, + "step": 3025 + }, + { + "epoch": 0.15115640141865228, + "grad_norm": 0.36347946524620056, + "learning_rate": 0.0001, + "loss": 0.0308, + "step": 3026 + }, + { + "epoch": 0.15120635396373444, + "grad_norm": 0.3258523643016815, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 3027 + }, + { + "epoch": 0.15125630650881663, + "grad_norm": 0.23841972649097443, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 3028 + }, + { + "epoch": 0.1513062590538988, + "grad_norm": 0.2806205153465271, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 3029 + }, + { + "epoch": 0.15135621159898097, + "grad_norm": 0.2559715509414673, + "learning_rate": 0.0001, + "loss": 0.0103, + "step": 3030 + }, + { + "epoch": 0.15140616414406313, + "grad_norm": 0.2780052423477173, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 3031 + }, + { + "epoch": 0.1514561166891453, + "grad_norm": 0.2225748747587204, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3032 + }, + { + "epoch": 0.1515060692342275, + "grad_norm": 0.2031581997871399, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3033 + }, + { + "epoch": 0.15155602177930966, + "grad_norm": 0.24890057742595673, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 3034 + }, + { + "epoch": 0.15160597432439182, + "grad_norm": 0.2784612476825714, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 3035 + }, + { + "epoch": 0.151655926869474, + "grad_norm": 0.19182349741458893, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3036 + }, + { + "epoch": 0.15170587941455618, + "grad_norm": 0.2641083896160126, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 3037 + }, + { + "epoch": 0.15175583195963835, + "grad_norm": 0.2345716506242752, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 3038 + }, + { + "epoch": 0.15180578450472051, + "grad_norm": 0.1831074059009552, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3039 + }, + { + "epoch": 0.15185573704980268, + "grad_norm": 0.21214503049850464, + "learning_rate": 0.0001, + "loss": 0.0191, + "step": 3040 + }, + { + "epoch": 0.15190568959488485, + "grad_norm": 0.2242259383201599, + "learning_rate": 0.0001, + "loss": 0.0282, + "step": 3041 + }, + { + "epoch": 0.15195564213996704, + "grad_norm": 0.2032639980316162, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3042 + }, + { + "epoch": 0.1520055946850492, + "grad_norm": 0.16914546489715576, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3043 + }, + { + "epoch": 0.15205554723013137, + "grad_norm": 0.1867552399635315, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 3044 + }, + { + "epoch": 0.15210549977521354, + "grad_norm": 0.18483194708824158, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3045 + }, + { + "epoch": 0.15215545232029573, + "grad_norm": 0.18856483697891235, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3046 + }, + { + "epoch": 0.1522054048653779, + "grad_norm": 0.15799163281917572, + "learning_rate": 0.0001, + "loss": 0.008, + "step": 3047 + }, + { + "epoch": 0.15225535741046006, + "grad_norm": 0.2040591835975647, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3048 + }, + { + "epoch": 0.15230530995554223, + "grad_norm": 0.19294951856136322, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3049 + }, + { + "epoch": 0.1523552625006244, + "grad_norm": 0.16355527937412262, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3050 + }, + { + "epoch": 0.1524052150457066, + "grad_norm": 0.18276040256023407, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 3051 + }, + { + "epoch": 0.15245516759078875, + "grad_norm": 0.1736534982919693, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 3052 + }, + { + "epoch": 0.15250512013587092, + "grad_norm": 0.1890956461429596, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3053 + }, + { + "epoch": 0.15255507268095309, + "grad_norm": 0.16019974648952484, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3054 + }, + { + "epoch": 0.15260502522603528, + "grad_norm": 0.27682867646217346, + "learning_rate": 0.0001, + "loss": 0.1334, + "step": 3055 + }, + { + "epoch": 0.15265497777111744, + "grad_norm": 0.18267102539539337, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3056 + }, + { + "epoch": 0.1527049303161996, + "grad_norm": 0.2672710120677948, + "learning_rate": 0.0001, + "loss": 0.1461, + "step": 3057 + }, + { + "epoch": 0.15275488286128178, + "grad_norm": 0.1516232043504715, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3058 + }, + { + "epoch": 0.15280483540636394, + "grad_norm": 0.24576541781425476, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 3059 + }, + { + "epoch": 0.15285478795144614, + "grad_norm": 0.19964511692523956, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3060 + }, + { + "epoch": 0.1529047404965283, + "grad_norm": 0.16994449496269226, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3061 + }, + { + "epoch": 0.15295469304161047, + "grad_norm": 0.1930420845746994, + "learning_rate": 0.0001, + "loss": 0.1292, + "step": 3062 + }, + { + "epoch": 0.15300464558669263, + "grad_norm": 0.2458578199148178, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 3063 + }, + { + "epoch": 0.15305459813177483, + "grad_norm": 0.1885824203491211, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 3064 + }, + { + "epoch": 0.153104550676857, + "grad_norm": 0.18990235030651093, + "learning_rate": 0.0001, + "loss": 0.0205, + "step": 3065 + }, + { + "epoch": 0.15315450322193916, + "grad_norm": 0.19917117059230804, + "learning_rate": 0.0001, + "loss": 0.1274, + "step": 3066 + }, + { + "epoch": 0.15320445576702132, + "grad_norm": 0.24489815533161163, + "learning_rate": 0.0001, + "loss": 0.0421, + "step": 3067 + }, + { + "epoch": 0.1532544083121035, + "grad_norm": 0.18729430437088013, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 3068 + }, + { + "epoch": 0.15330436085718568, + "grad_norm": 0.34555065631866455, + "learning_rate": 0.0001, + "loss": 0.0768, + "step": 3069 + }, + { + "epoch": 0.15335431340226785, + "grad_norm": 0.24359071254730225, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 3070 + }, + { + "epoch": 0.15340426594735002, + "grad_norm": 0.28090184926986694, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3071 + }, + { + "epoch": 0.15345421849243218, + "grad_norm": 0.2448236048221588, + "learning_rate": 0.0001, + "loss": 0.1413, + "step": 3072 + }, + { + "epoch": 0.15350417103751435, + "grad_norm": 0.193623349070549, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 3073 + }, + { + "epoch": 0.15355412358259654, + "grad_norm": 0.25398024916648865, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3074 + }, + { + "epoch": 0.1536040761276787, + "grad_norm": 0.42818063497543335, + "learning_rate": 0.0001, + "loss": 0.0478, + "step": 3075 + }, + { + "epoch": 0.15365402867276087, + "grad_norm": 0.2938474118709564, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 3076 + }, + { + "epoch": 0.15370398121784304, + "grad_norm": 0.3095734119415283, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 3077 + }, + { + "epoch": 0.15375393376292523, + "grad_norm": 0.28716257214546204, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 3078 + }, + { + "epoch": 0.1538038863080074, + "grad_norm": 0.2194059193134308, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3079 + }, + { + "epoch": 0.15385383885308956, + "grad_norm": 0.27744370698928833, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 3080 + }, + { + "epoch": 0.15390379139817173, + "grad_norm": 0.2385987788438797, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 3081 + }, + { + "epoch": 0.1539537439432539, + "grad_norm": 0.26309847831726074, + "learning_rate": 0.0001, + "loss": 0.0231, + "step": 3082 + }, + { + "epoch": 0.1540036964883361, + "grad_norm": 0.25133469700813293, + "learning_rate": 0.0001, + "loss": 0.0131, + "step": 3083 + }, + { + "epoch": 0.15405364903341826, + "grad_norm": 0.25415945053100586, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 3084 + }, + { + "epoch": 0.15410360157850042, + "grad_norm": 0.25120872259140015, + "learning_rate": 0.0001, + "loss": 0.0131, + "step": 3085 + }, + { + "epoch": 0.1541535541235826, + "grad_norm": 0.21139861643314362, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 3086 + }, + { + "epoch": 0.15420350666866478, + "grad_norm": 0.24848558008670807, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 3087 + }, + { + "epoch": 0.15425345921374695, + "grad_norm": 0.2609512209892273, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 3088 + }, + { + "epoch": 0.1543034117588291, + "grad_norm": 0.21161451935768127, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 3089 + }, + { + "epoch": 0.15435336430391128, + "grad_norm": 0.27730366587638855, + "learning_rate": 0.0001, + "loss": 0.0382, + "step": 3090 + }, + { + "epoch": 0.15440331684899344, + "grad_norm": 0.20233629643917084, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 3091 + }, + { + "epoch": 0.15445326939407564, + "grad_norm": 0.21473902463912964, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3092 + }, + { + "epoch": 0.1545032219391578, + "grad_norm": 0.2160562127828598, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3093 + }, + { + "epoch": 0.15455317448423997, + "grad_norm": 0.2670044004917145, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 3094 + }, + { + "epoch": 0.15460312702932213, + "grad_norm": 0.2457994818687439, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 3095 + }, + { + "epoch": 0.15465307957440433, + "grad_norm": 0.2541225850582123, + "learning_rate": 0.0001, + "loss": 0.0136, + "step": 3096 + }, + { + "epoch": 0.1547030321194865, + "grad_norm": 0.2026132047176361, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 3097 + }, + { + "epoch": 0.15475298466456866, + "grad_norm": 0.21254286170005798, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 3098 + }, + { + "epoch": 0.15480293720965083, + "grad_norm": 0.2791108787059784, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 3099 + }, + { + "epoch": 0.154852889754733, + "grad_norm": 0.23135048151016235, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 3100 + }, + { + "epoch": 0.15490284229981519, + "grad_norm": 0.20162205398082733, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 3101 + }, + { + "epoch": 0.15495279484489735, + "grad_norm": 0.19182562828063965, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 3102 + }, + { + "epoch": 0.15500274738997952, + "grad_norm": 0.2174851894378662, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3103 + }, + { + "epoch": 0.15505269993506168, + "grad_norm": 0.21510152518749237, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 3104 + }, + { + "epoch": 0.15510265248014388, + "grad_norm": 0.20562829077243805, + "learning_rate": 0.0001, + "loss": 0.0067, + "step": 3105 + }, + { + "epoch": 0.15515260502522604, + "grad_norm": 0.224543496966362, + "learning_rate": 0.0001, + "loss": 0.1286, + "step": 3106 + }, + { + "epoch": 0.1552025575703082, + "grad_norm": 0.1594768464565277, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3107 + }, + { + "epoch": 0.15525251011539037, + "grad_norm": 0.1976701021194458, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3108 + }, + { + "epoch": 0.15530246266047254, + "grad_norm": 0.25908932089805603, + "learning_rate": 0.0001, + "loss": 0.016, + "step": 3109 + }, + { + "epoch": 0.15535241520555473, + "grad_norm": 0.19610974192619324, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 3110 + }, + { + "epoch": 0.1554023677506369, + "grad_norm": 0.21387995779514313, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3111 + }, + { + "epoch": 0.15545232029571907, + "grad_norm": 0.20474880933761597, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3112 + }, + { + "epoch": 0.15550227284080123, + "grad_norm": 0.19518117606639862, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 3113 + }, + { + "epoch": 0.1555522253858834, + "grad_norm": 0.24228592216968536, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 3114 + }, + { + "epoch": 0.1556021779309656, + "grad_norm": 0.27575773000717163, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3115 + }, + { + "epoch": 0.15565213047604776, + "grad_norm": 0.2311074286699295, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3116 + }, + { + "epoch": 0.15570208302112992, + "grad_norm": 0.1967175453901291, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 3117 + }, + { + "epoch": 0.1557520355662121, + "grad_norm": 0.36476966738700867, + "learning_rate": 0.0001, + "loss": 0.0091, + "step": 3118 + }, + { + "epoch": 0.15580198811129428, + "grad_norm": 0.21252278983592987, + "learning_rate": 0.0001, + "loss": 0.1265, + "step": 3119 + }, + { + "epoch": 0.15585194065637645, + "grad_norm": 0.22140096127986908, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3120 + }, + { + "epoch": 0.1559018932014586, + "grad_norm": 0.1410878300666809, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 3121 + }, + { + "epoch": 0.15595184574654078, + "grad_norm": 0.1637025624513626, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3122 + }, + { + "epoch": 0.15600179829162295, + "grad_norm": 0.2546426057815552, + "learning_rate": 0.0001, + "loss": 0.0089, + "step": 3123 + }, + { + "epoch": 0.15605175083670514, + "grad_norm": 0.2154088020324707, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3124 + }, + { + "epoch": 0.1561017033817873, + "grad_norm": 0.2820119857788086, + "learning_rate": 0.0001, + "loss": 0.1449, + "step": 3125 + }, + { + "epoch": 0.15615165592686947, + "grad_norm": 0.17683464288711548, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3126 + }, + { + "epoch": 0.15620160847195164, + "grad_norm": 0.2150915414094925, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3127 + }, + { + "epoch": 0.15625156101703383, + "grad_norm": 0.2278290092945099, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 3128 + }, + { + "epoch": 0.156301513562116, + "grad_norm": 0.2018953263759613, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 3129 + }, + { + "epoch": 0.15635146610719816, + "grad_norm": 0.1688683032989502, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 3130 + }, + { + "epoch": 0.15640141865228033, + "grad_norm": 0.19432689249515533, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 3131 + }, + { + "epoch": 0.1564513711973625, + "grad_norm": 0.2284502387046814, + "learning_rate": 0.0001, + "loss": 0.0213, + "step": 3132 + }, + { + "epoch": 0.1565013237424447, + "grad_norm": 0.2372451275587082, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 3133 + }, + { + "epoch": 0.15655127628752685, + "grad_norm": 0.26807641983032227, + "learning_rate": 0.0001, + "loss": 0.0587, + "step": 3134 + }, + { + "epoch": 0.15660122883260902, + "grad_norm": 0.2111518234014511, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3135 + }, + { + "epoch": 0.15665118137769118, + "grad_norm": 0.2546415627002716, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 3136 + }, + { + "epoch": 0.15670113392277338, + "grad_norm": 0.24445609748363495, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 3137 + }, + { + "epoch": 0.15675108646785554, + "grad_norm": 0.2078956663608551, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3138 + }, + { + "epoch": 0.1568010390129377, + "grad_norm": 0.17457756400108337, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3139 + }, + { + "epoch": 0.15685099155801988, + "grad_norm": 0.17872178554534912, + "learning_rate": 0.0001, + "loss": 0.0105, + "step": 3140 + }, + { + "epoch": 0.15690094410310204, + "grad_norm": 0.2969629764556885, + "learning_rate": 0.0001, + "loss": 0.2789, + "step": 3141 + }, + { + "epoch": 0.15695089664818423, + "grad_norm": 0.2170424461364746, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3142 + }, + { + "epoch": 0.1570008491932664, + "grad_norm": 0.18591058254241943, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 3143 + }, + { + "epoch": 0.15705080173834857, + "grad_norm": 0.1817602813243866, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 3144 + }, + { + "epoch": 0.15710075428343073, + "grad_norm": 0.28566500544548035, + "learning_rate": 0.0001, + "loss": 0.0136, + "step": 3145 + }, + { + "epoch": 0.15715070682851293, + "grad_norm": 0.17940008640289307, + "learning_rate": 0.0001, + "loss": 0.0113, + "step": 3146 + }, + { + "epoch": 0.1572006593735951, + "grad_norm": 0.1932079941034317, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 3147 + }, + { + "epoch": 0.15725061191867726, + "grad_norm": 0.1557086855173111, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3148 + }, + { + "epoch": 0.15730056446375942, + "grad_norm": 0.14993233978748322, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3149 + }, + { + "epoch": 0.1573505170088416, + "grad_norm": 0.1736813634634018, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3150 + }, + { + "epoch": 0.15740046955392378, + "grad_norm": 0.18568769097328186, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3151 + }, + { + "epoch": 0.15745042209900595, + "grad_norm": 0.24210180342197418, + "learning_rate": 0.0001, + "loss": 0.1346, + "step": 3152 + }, + { + "epoch": 0.15750037464408811, + "grad_norm": 0.17459766566753387, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 3153 + }, + { + "epoch": 0.15755032718917028, + "grad_norm": 0.2451207935810089, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3154 + }, + { + "epoch": 0.15760027973425245, + "grad_norm": 0.31459200382232666, + "learning_rate": 0.0001, + "loss": 0.2686, + "step": 3155 + }, + { + "epoch": 0.15765023227933464, + "grad_norm": 0.1617603749036789, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 3156 + }, + { + "epoch": 0.1577001848244168, + "grad_norm": 0.24809512495994568, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3157 + }, + { + "epoch": 0.15775013736949897, + "grad_norm": 0.20146308839321136, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3158 + }, + { + "epoch": 0.15780008991458114, + "grad_norm": 0.341422438621521, + "learning_rate": 0.0001, + "loss": 0.2575, + "step": 3159 + }, + { + "epoch": 0.15785004245966333, + "grad_norm": 0.2820937931537628, + "learning_rate": 0.0001, + "loss": 0.0178, + "step": 3160 + }, + { + "epoch": 0.1578999950047455, + "grad_norm": 0.24632245302200317, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 3161 + }, + { + "epoch": 0.15794994754982766, + "grad_norm": 0.1918366253376007, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3162 + }, + { + "epoch": 0.15799990009490983, + "grad_norm": 0.19728049635887146, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 3163 + }, + { + "epoch": 0.158049852639992, + "grad_norm": 0.19813816249370575, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3164 + }, + { + "epoch": 0.1580998051850742, + "grad_norm": 0.20772618055343628, + "learning_rate": 0.0001, + "loss": 0.0097, + "step": 3165 + }, + { + "epoch": 0.15814975773015635, + "grad_norm": 0.21368059515953064, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3166 + }, + { + "epoch": 0.15819971027523852, + "grad_norm": 0.21926964819431305, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 3167 + }, + { + "epoch": 0.15824966282032069, + "grad_norm": 0.2125472128391266, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3168 + }, + { + "epoch": 0.15829961536540288, + "grad_norm": 0.22614768147468567, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 3169 + }, + { + "epoch": 0.15834956791048505, + "grad_norm": 0.18195252120494843, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3170 + }, + { + "epoch": 0.1583995204555672, + "grad_norm": 0.20029225945472717, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3171 + }, + { + "epoch": 0.15844947300064938, + "grad_norm": 0.23317500948905945, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 3172 + }, + { + "epoch": 0.15849942554573154, + "grad_norm": 0.15179578959941864, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3173 + }, + { + "epoch": 0.15854937809081374, + "grad_norm": 0.1898055225610733, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3174 + }, + { + "epoch": 0.1585993306358959, + "grad_norm": 0.26081788539886475, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3175 + }, + { + "epoch": 0.15864928318097807, + "grad_norm": 0.17914626002311707, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 3176 + }, + { + "epoch": 0.15869923572606023, + "grad_norm": 0.13140001893043518, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3177 + }, + { + "epoch": 0.15874918827114243, + "grad_norm": 0.14894458651542664, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3178 + }, + { + "epoch": 0.1587991408162246, + "grad_norm": 0.32259783148765564, + "learning_rate": 0.0001, + "loss": 0.1366, + "step": 3179 + }, + { + "epoch": 0.15884909336130676, + "grad_norm": 0.16088837385177612, + "learning_rate": 0.0001, + "loss": 0.0007, + "step": 3180 + }, + { + "epoch": 0.15889904590638892, + "grad_norm": 0.20900201797485352, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 3181 + }, + { + "epoch": 0.1589489984514711, + "grad_norm": 0.2171621024608612, + "learning_rate": 0.0001, + "loss": 0.1288, + "step": 3182 + }, + { + "epoch": 0.15899895099655328, + "grad_norm": 0.15379811823368073, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3183 + }, + { + "epoch": 0.15904890354163545, + "grad_norm": 0.21694742143154144, + "learning_rate": 0.0001, + "loss": 0.0216, + "step": 3184 + }, + { + "epoch": 0.15909885608671762, + "grad_norm": 0.23717175424098969, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 3185 + }, + { + "epoch": 0.15914880863179978, + "grad_norm": 0.21942085027694702, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3186 + }, + { + "epoch": 0.15919876117688198, + "grad_norm": 0.4099750518798828, + "learning_rate": 0.0001, + "loss": 0.1438, + "step": 3187 + }, + { + "epoch": 0.15924871372196414, + "grad_norm": 0.27590227127075195, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 3188 + }, + { + "epoch": 0.1592986662670463, + "grad_norm": 0.19593051075935364, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3189 + }, + { + "epoch": 0.15934861881212847, + "grad_norm": 0.21973243355751038, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3190 + }, + { + "epoch": 0.15939857135721064, + "grad_norm": 0.36282026767730713, + "learning_rate": 0.0001, + "loss": 0.0448, + "step": 3191 + }, + { + "epoch": 0.15944852390229283, + "grad_norm": 0.2252025008201599, + "learning_rate": 0.0001, + "loss": 0.013, + "step": 3192 + }, + { + "epoch": 0.159498476447375, + "grad_norm": 0.46407264471054077, + "learning_rate": 0.0001, + "loss": 0.151, + "step": 3193 + }, + { + "epoch": 0.15954842899245716, + "grad_norm": 0.33632877469062805, + "learning_rate": 0.0001, + "loss": 0.0104, + "step": 3194 + }, + { + "epoch": 0.15959838153753933, + "grad_norm": 0.39911970496177673, + "learning_rate": 0.0001, + "loss": 0.0113, + "step": 3195 + }, + { + "epoch": 0.1596483340826215, + "grad_norm": 0.36586710810661316, + "learning_rate": 0.0001, + "loss": 0.0126, + "step": 3196 + }, + { + "epoch": 0.1596982866277037, + "grad_norm": 0.37409478425979614, + "learning_rate": 0.0001, + "loss": 0.1302, + "step": 3197 + }, + { + "epoch": 0.15974823917278586, + "grad_norm": 0.38384824991226196, + "learning_rate": 0.0001, + "loss": 0.0112, + "step": 3198 + }, + { + "epoch": 0.15979819171786802, + "grad_norm": 0.37138211727142334, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 3199 + }, + { + "epoch": 0.1598481442629502, + "grad_norm": 0.2540183663368225, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 3200 + }, + { + "epoch": 0.15989809680803238, + "grad_norm": 0.297057569026947, + "learning_rate": 0.0001, + "loss": 0.013, + "step": 3201 + }, + { + "epoch": 0.15994804935311455, + "grad_norm": 0.30063751339912415, + "learning_rate": 0.0001, + "loss": 0.0119, + "step": 3202 + }, + { + "epoch": 0.1599980018981967, + "grad_norm": 0.3734966814517975, + "learning_rate": 0.0001, + "loss": 0.0077, + "step": 3203 + }, + { + "epoch": 0.16004795444327888, + "grad_norm": 0.29404592514038086, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3204 + }, + { + "epoch": 0.16009790698836104, + "grad_norm": 0.36941754817962646, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 3205 + }, + { + "epoch": 0.16014785953344324, + "grad_norm": 0.2862963378429413, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3206 + }, + { + "epoch": 0.1601978120785254, + "grad_norm": 0.29190725088119507, + "learning_rate": 0.0001, + "loss": 0.1261, + "step": 3207 + }, + { + "epoch": 0.16024776462360757, + "grad_norm": 0.3160538673400879, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 3208 + }, + { + "epoch": 0.16029771716868974, + "grad_norm": 0.24235424399375916, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 3209 + }, + { + "epoch": 0.16034766971377193, + "grad_norm": 0.24466851353645325, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3210 + }, + { + "epoch": 0.1603976222588541, + "grad_norm": 0.2424413412809372, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 3211 + }, + { + "epoch": 0.16044757480393626, + "grad_norm": 0.30447861552238464, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 3212 + }, + { + "epoch": 0.16049752734901843, + "grad_norm": 0.20150041580200195, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 3213 + }, + { + "epoch": 0.1605474798941006, + "grad_norm": 0.2664974629878998, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3214 + }, + { + "epoch": 0.16059743243918279, + "grad_norm": 1.1028547286987305, + "learning_rate": 0.0001, + "loss": 0.0468, + "step": 3215 + }, + { + "epoch": 0.16064738498426495, + "grad_norm": 0.6030205488204956, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 3216 + }, + { + "epoch": 0.16069733752934712, + "grad_norm": 0.8289213180541992, + "learning_rate": 0.0001, + "loss": 0.0343, + "step": 3217 + }, + { + "epoch": 0.16074729007442928, + "grad_norm": 0.440584659576416, + "learning_rate": 0.0001, + "loss": 0.0399, + "step": 3218 + }, + { + "epoch": 0.16079724261951148, + "grad_norm": 0.698188304901123, + "learning_rate": 0.0001, + "loss": 0.0239, + "step": 3219 + }, + { + "epoch": 0.16084719516459364, + "grad_norm": 0.8107909560203552, + "learning_rate": 0.0001, + "loss": 0.2809, + "step": 3220 + }, + { + "epoch": 0.1608971477096758, + "grad_norm": 0.7814136147499084, + "learning_rate": 0.0001, + "loss": 0.0336, + "step": 3221 + }, + { + "epoch": 0.16094710025475797, + "grad_norm": 0.5529165863990784, + "learning_rate": 0.0001, + "loss": 0.0157, + "step": 3222 + }, + { + "epoch": 0.16099705279984014, + "grad_norm": 0.45942044258117676, + "learning_rate": 0.0001, + "loss": 0.0238, + "step": 3223 + }, + { + "epoch": 0.16104700534492233, + "grad_norm": 0.6543004512786865, + "learning_rate": 0.0001, + "loss": 0.0422, + "step": 3224 + }, + { + "epoch": 0.1610969578900045, + "grad_norm": 0.5346598029136658, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 3225 + }, + { + "epoch": 0.16114691043508667, + "grad_norm": 0.6618442535400391, + "learning_rate": 0.0001, + "loss": 0.3036, + "step": 3226 + }, + { + "epoch": 0.16119686298016883, + "grad_norm": 0.45812633633613586, + "learning_rate": 0.0001, + "loss": 0.063, + "step": 3227 + }, + { + "epoch": 0.16124681552525102, + "grad_norm": 0.4703197777271271, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 3228 + }, + { + "epoch": 0.1612967680703332, + "grad_norm": 0.5953631401062012, + "learning_rate": 0.0001, + "loss": 0.0148, + "step": 3229 + }, + { + "epoch": 0.16134672061541536, + "grad_norm": 0.3355160355567932, + "learning_rate": 0.0001, + "loss": 0.0175, + "step": 3230 + }, + { + "epoch": 0.16139667316049752, + "grad_norm": 0.4655294120311737, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 3231 + }, + { + "epoch": 0.1614466257055797, + "grad_norm": 0.4486992061138153, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 3232 + }, + { + "epoch": 0.16149657825066188, + "grad_norm": 0.5065616965293884, + "learning_rate": 0.0001, + "loss": 0.0288, + "step": 3233 + }, + { + "epoch": 0.16154653079574405, + "grad_norm": 0.42370912432670593, + "learning_rate": 0.0001, + "loss": 0.024, + "step": 3234 + }, + { + "epoch": 0.1615964833408262, + "grad_norm": 0.8823440670967102, + "learning_rate": 0.0001, + "loss": 0.0928, + "step": 3235 + }, + { + "epoch": 0.16164643588590838, + "grad_norm": 0.7148234248161316, + "learning_rate": 0.0001, + "loss": 0.0637, + "step": 3236 + }, + { + "epoch": 0.16169638843099055, + "grad_norm": 0.5669621229171753, + "learning_rate": 0.0001, + "loss": 0.0329, + "step": 3237 + }, + { + "epoch": 0.16174634097607274, + "grad_norm": 0.533944308757782, + "learning_rate": 0.0001, + "loss": 0.0091, + "step": 3238 + }, + { + "epoch": 0.1617962935211549, + "grad_norm": 0.6855841875076294, + "learning_rate": 0.0001, + "loss": 0.0939, + "step": 3239 + }, + { + "epoch": 0.16184624606623707, + "grad_norm": 0.5064708590507507, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 3240 + }, + { + "epoch": 0.16189619861131924, + "grad_norm": 0.598260760307312, + "learning_rate": 0.0001, + "loss": 0.0105, + "step": 3241 + }, + { + "epoch": 0.16194615115640143, + "grad_norm": 0.5685912370681763, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 3242 + }, + { + "epoch": 0.1619961037014836, + "grad_norm": 0.5236881971359253, + "learning_rate": 0.0001, + "loss": 0.0146, + "step": 3243 + }, + { + "epoch": 0.16204605624656576, + "grad_norm": 0.466288298368454, + "learning_rate": 0.0001, + "loss": 0.0163, + "step": 3244 + }, + { + "epoch": 0.16209600879164793, + "grad_norm": 0.5234620571136475, + "learning_rate": 0.0001, + "loss": 0.0138, + "step": 3245 + }, + { + "epoch": 0.1621459613367301, + "grad_norm": 0.4695202708244324, + "learning_rate": 0.0001, + "loss": 0.0221, + "step": 3246 + }, + { + "epoch": 0.1621959138818123, + "grad_norm": 0.5218177437782288, + "learning_rate": 0.0001, + "loss": 0.0115, + "step": 3247 + }, + { + "epoch": 0.16224586642689445, + "grad_norm": 0.6630246639251709, + "learning_rate": 0.0001, + "loss": 0.0362, + "step": 3248 + }, + { + "epoch": 0.16229581897197662, + "grad_norm": 0.3581981360912323, + "learning_rate": 0.0001, + "loss": 0.0093, + "step": 3249 + }, + { + "epoch": 0.16234577151705878, + "grad_norm": 0.3400290012359619, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 3250 + }, + { + "epoch": 0.16239572406214098, + "grad_norm": 0.377023845911026, + "learning_rate": 0.0001, + "loss": 0.0099, + "step": 3251 + }, + { + "epoch": 0.16244567660722314, + "grad_norm": 0.3578341603279114, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 3252 + }, + { + "epoch": 0.1624956291523053, + "grad_norm": 0.44655054807662964, + "learning_rate": 0.0001, + "loss": 0.0182, + "step": 3253 + }, + { + "epoch": 0.16254558169738748, + "grad_norm": 0.3204021453857422, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 3254 + }, + { + "epoch": 0.16259553424246964, + "grad_norm": 0.31395646929740906, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 3255 + }, + { + "epoch": 0.16264548678755184, + "grad_norm": 0.41930079460144043, + "learning_rate": 0.0001, + "loss": 0.0123, + "step": 3256 + }, + { + "epoch": 0.162695439332634, + "grad_norm": 0.3874184489250183, + "learning_rate": 0.0001, + "loss": 0.0181, + "step": 3257 + }, + { + "epoch": 0.16274539187771617, + "grad_norm": 0.43263325095176697, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 3258 + }, + { + "epoch": 0.16279534442279833, + "grad_norm": 0.5045973062515259, + "learning_rate": 0.0001, + "loss": 0.0229, + "step": 3259 + }, + { + "epoch": 0.16284529696788053, + "grad_norm": 0.31116288900375366, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3260 + }, + { + "epoch": 0.1628952495129627, + "grad_norm": 0.5128533244132996, + "learning_rate": 0.0001, + "loss": 0.0719, + "step": 3261 + }, + { + "epoch": 0.16294520205804486, + "grad_norm": 0.4034539759159088, + "learning_rate": 0.0001, + "loss": 0.0582, + "step": 3262 + }, + { + "epoch": 0.16299515460312702, + "grad_norm": 0.4527755677700043, + "learning_rate": 0.0001, + "loss": 0.0153, + "step": 3263 + }, + { + "epoch": 0.1630451071482092, + "grad_norm": 0.3395095467567444, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 3264 + }, + { + "epoch": 0.16309505969329138, + "grad_norm": 0.4136589765548706, + "learning_rate": 0.0001, + "loss": 0.0173, + "step": 3265 + }, + { + "epoch": 0.16314501223837355, + "grad_norm": 0.33379194140434265, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 3266 + }, + { + "epoch": 0.16319496478345571, + "grad_norm": 0.26519089937210083, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 3267 + }, + { + "epoch": 0.16324491732853788, + "grad_norm": 0.4736621081829071, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 3268 + }, + { + "epoch": 0.16329486987362007, + "grad_norm": 0.46284323930740356, + "learning_rate": 0.0001, + "loss": 0.043, + "step": 3269 + }, + { + "epoch": 0.16334482241870224, + "grad_norm": 0.34674134850502014, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 3270 + }, + { + "epoch": 0.1633947749637844, + "grad_norm": 0.3502301871776581, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 3271 + }, + { + "epoch": 0.16344472750886657, + "grad_norm": 0.4227505326271057, + "learning_rate": 0.0001, + "loss": 0.0283, + "step": 3272 + }, + { + "epoch": 0.16349468005394874, + "grad_norm": 0.365847647190094, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 3273 + }, + { + "epoch": 0.16354463259903093, + "grad_norm": 0.46246838569641113, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 3274 + }, + { + "epoch": 0.1635945851441131, + "grad_norm": 0.2626626789569855, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 3275 + }, + { + "epoch": 0.16364453768919526, + "grad_norm": 0.3413584232330322, + "learning_rate": 0.0001, + "loss": 0.0205, + "step": 3276 + }, + { + "epoch": 0.16369449023427743, + "grad_norm": 0.353702574968338, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 3277 + }, + { + "epoch": 0.1637444427793596, + "grad_norm": 0.37439218163490295, + "learning_rate": 0.0001, + "loss": 0.0516, + "step": 3278 + }, + { + "epoch": 0.1637943953244418, + "grad_norm": 0.305207222700119, + "learning_rate": 0.0001, + "loss": 0.0077, + "step": 3279 + }, + { + "epoch": 0.16384434786952395, + "grad_norm": 0.267319917678833, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 3280 + }, + { + "epoch": 0.16389430041460612, + "grad_norm": 0.29330942034721375, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 3281 + }, + { + "epoch": 0.16394425295968829, + "grad_norm": 0.2906748056411743, + "learning_rate": 0.0001, + "loss": 0.0097, + "step": 3282 + }, + { + "epoch": 0.16399420550477048, + "grad_norm": 0.3063846528530121, + "learning_rate": 0.0001, + "loss": 0.0107, + "step": 3283 + }, + { + "epoch": 0.16404415804985265, + "grad_norm": 0.2767302095890045, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 3284 + }, + { + "epoch": 0.1640941105949348, + "grad_norm": 0.24698218703269958, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 3285 + }, + { + "epoch": 0.16414406314001698, + "grad_norm": 0.23641879856586456, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 3286 + }, + { + "epoch": 0.16419401568509914, + "grad_norm": 0.26992788910865784, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 3287 + }, + { + "epoch": 0.16424396823018134, + "grad_norm": 0.2905365228652954, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 3288 + }, + { + "epoch": 0.1642939207752635, + "grad_norm": 0.2771669328212738, + "learning_rate": 0.0001, + "loss": 0.0083, + "step": 3289 + }, + { + "epoch": 0.16434387332034567, + "grad_norm": 0.27634215354919434, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 3290 + }, + { + "epoch": 0.16439382586542783, + "grad_norm": 0.2564670145511627, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 3291 + }, + { + "epoch": 0.16444377841051003, + "grad_norm": 0.2566397786140442, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 3292 + }, + { + "epoch": 0.1644937309555922, + "grad_norm": 0.2906823456287384, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 3293 + }, + { + "epoch": 0.16454368350067436, + "grad_norm": 0.30995574593544006, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 3294 + }, + { + "epoch": 0.16459363604575653, + "grad_norm": 0.23202380537986755, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 3295 + }, + { + "epoch": 0.1646435885908387, + "grad_norm": 0.21185944974422455, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3296 + }, + { + "epoch": 0.16469354113592088, + "grad_norm": 0.3064243793487549, + "learning_rate": 0.0001, + "loss": 0.0094, + "step": 3297 + }, + { + "epoch": 0.16474349368100305, + "grad_norm": 0.24285267293453217, + "learning_rate": 0.0001, + "loss": 0.0181, + "step": 3298 + }, + { + "epoch": 0.16479344622608522, + "grad_norm": 0.17217768728733063, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3299 + }, + { + "epoch": 0.16484339877116738, + "grad_norm": 0.3235059678554535, + "learning_rate": 0.0001, + "loss": 0.041, + "step": 3300 + }, + { + "epoch": 0.16489335131624958, + "grad_norm": 0.2817627191543579, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 3301 + }, + { + "epoch": 0.16494330386133174, + "grad_norm": 0.22593313455581665, + "learning_rate": 0.0001, + "loss": 0.0118, + "step": 3302 + }, + { + "epoch": 0.1649932564064139, + "grad_norm": 0.13437072932720184, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3303 + }, + { + "epoch": 0.16504320895149607, + "grad_norm": 0.12819741666316986, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3304 + }, + { + "epoch": 0.16509316149657824, + "grad_norm": 0.22856928408145905, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 3305 + }, + { + "epoch": 0.16514311404166043, + "grad_norm": 0.14929385483264923, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 3306 + }, + { + "epoch": 0.1651930665867426, + "grad_norm": 0.24728761613368988, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3307 + }, + { + "epoch": 0.16524301913182476, + "grad_norm": 0.21525129675865173, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 3308 + }, + { + "epoch": 0.16529297167690693, + "grad_norm": 0.1622259020805359, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 3309 + }, + { + "epoch": 0.16534292422198912, + "grad_norm": 0.22466206550598145, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 3310 + }, + { + "epoch": 0.1653928767670713, + "grad_norm": 0.2073773592710495, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 3311 + }, + { + "epoch": 0.16544282931215346, + "grad_norm": 0.2858324646949768, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 3312 + }, + { + "epoch": 0.16549278185723562, + "grad_norm": 0.16029046475887299, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 3313 + }, + { + "epoch": 0.1655427344023178, + "grad_norm": 0.19568277895450592, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 3314 + }, + { + "epoch": 0.16559268694739998, + "grad_norm": 0.2584255337715149, + "learning_rate": 0.0001, + "loss": 0.0219, + "step": 3315 + }, + { + "epoch": 0.16564263949248215, + "grad_norm": 0.1866542547941208, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3316 + }, + { + "epoch": 0.1656925920375643, + "grad_norm": 0.21952930092811584, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3317 + }, + { + "epoch": 0.16574254458264648, + "grad_norm": 0.31808924674987793, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 3318 + }, + { + "epoch": 0.16579249712772864, + "grad_norm": 0.34358152747154236, + "learning_rate": 0.0001, + "loss": 0.0401, + "step": 3319 + }, + { + "epoch": 0.16584244967281084, + "grad_norm": 0.33849412202835083, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 3320 + }, + { + "epoch": 0.165892402217893, + "grad_norm": 0.33871057629585266, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 3321 + }, + { + "epoch": 0.16594235476297517, + "grad_norm": 0.37741905450820923, + "learning_rate": 0.0001, + "loss": 0.1301, + "step": 3322 + }, + { + "epoch": 0.16599230730805734, + "grad_norm": 0.35238561034202576, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 3323 + }, + { + "epoch": 0.16604225985313953, + "grad_norm": 0.27130305767059326, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3324 + }, + { + "epoch": 0.1660922123982217, + "grad_norm": 0.3213149905204773, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3325 + }, + { + "epoch": 0.16614216494330386, + "grad_norm": 0.3046504557132721, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 3326 + }, + { + "epoch": 0.16619211748838603, + "grad_norm": 0.2854936122894287, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 3327 + }, + { + "epoch": 0.1662420700334682, + "grad_norm": 0.21274444460868835, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 3328 + }, + { + "epoch": 0.16629202257855039, + "grad_norm": 0.3072682321071625, + "learning_rate": 0.0001, + "loss": 0.13, + "step": 3329 + }, + { + "epoch": 0.16634197512363255, + "grad_norm": 0.36379194259643555, + "learning_rate": 0.0001, + "loss": 0.0339, + "step": 3330 + }, + { + "epoch": 0.16639192766871472, + "grad_norm": 0.28412511944770813, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3331 + }, + { + "epoch": 0.16644188021379688, + "grad_norm": 0.15837852656841278, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 3332 + }, + { + "epoch": 0.16649183275887908, + "grad_norm": 0.3072130084037781, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 3333 + }, + { + "epoch": 0.16654178530396124, + "grad_norm": 0.32794588804244995, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 3334 + }, + { + "epoch": 0.1665917378490434, + "grad_norm": 0.2153579592704773, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3335 + }, + { + "epoch": 0.16664169039412557, + "grad_norm": 0.23524385690689087, + "learning_rate": 0.0001, + "loss": 0.1289, + "step": 3336 + }, + { + "epoch": 0.16669164293920774, + "grad_norm": 0.21946720778942108, + "learning_rate": 0.0001, + "loss": 0.0133, + "step": 3337 + }, + { + "epoch": 0.16674159548428993, + "grad_norm": 0.28836748003959656, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3338 + }, + { + "epoch": 0.1667915480293721, + "grad_norm": 0.1984085887670517, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3339 + }, + { + "epoch": 0.16684150057445427, + "grad_norm": 0.22884781658649445, + "learning_rate": 0.0001, + "loss": 0.0238, + "step": 3340 + }, + { + "epoch": 0.16689145311953643, + "grad_norm": 0.1926250159740448, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3341 + }, + { + "epoch": 0.16694140566461863, + "grad_norm": 0.23444244265556335, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3342 + }, + { + "epoch": 0.1669913582097008, + "grad_norm": 0.18300996720790863, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 3343 + }, + { + "epoch": 0.16704131075478296, + "grad_norm": 0.19951112568378448, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3344 + }, + { + "epoch": 0.16709126329986512, + "grad_norm": 0.19232134521007538, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3345 + }, + { + "epoch": 0.1671412158449473, + "grad_norm": 0.1512376219034195, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3346 + }, + { + "epoch": 0.16719116839002948, + "grad_norm": 0.2856202721595764, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 3347 + }, + { + "epoch": 0.16724112093511165, + "grad_norm": 0.17122675478458405, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3348 + }, + { + "epoch": 0.1672910734801938, + "grad_norm": 0.18765713274478912, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3349 + }, + { + "epoch": 0.16734102602527598, + "grad_norm": 0.23998485505580902, + "learning_rate": 0.0001, + "loss": 0.1386, + "step": 3350 + }, + { + "epoch": 0.16739097857035817, + "grad_norm": 0.23949739336967468, + "learning_rate": 0.0001, + "loss": 0.0108, + "step": 3351 + }, + { + "epoch": 0.16744093111544034, + "grad_norm": 0.16357989609241486, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3352 + }, + { + "epoch": 0.1674908836605225, + "grad_norm": 0.1631276160478592, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 3353 + }, + { + "epoch": 0.16754083620560467, + "grad_norm": 0.18736469745635986, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3354 + }, + { + "epoch": 0.16759078875068684, + "grad_norm": 0.164104163646698, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 3355 + }, + { + "epoch": 0.16764074129576903, + "grad_norm": 0.11631035804748535, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3356 + }, + { + "epoch": 0.1676906938408512, + "grad_norm": 0.16442078351974487, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 3357 + }, + { + "epoch": 0.16774064638593336, + "grad_norm": 0.21143779158592224, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 3358 + }, + { + "epoch": 0.16779059893101553, + "grad_norm": 0.14573587477207184, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3359 + }, + { + "epoch": 0.1678405514760977, + "grad_norm": 0.14101748168468475, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3360 + }, + { + "epoch": 0.1678905040211799, + "grad_norm": 0.14676517248153687, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3361 + }, + { + "epoch": 0.16794045656626205, + "grad_norm": 0.16369804739952087, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 3362 + }, + { + "epoch": 0.16799040911134422, + "grad_norm": 0.18878090381622314, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 3363 + }, + { + "epoch": 0.16804036165642638, + "grad_norm": 0.14907564222812653, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3364 + }, + { + "epoch": 0.16809031420150858, + "grad_norm": 0.1700853854417801, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 3365 + }, + { + "epoch": 0.16814026674659074, + "grad_norm": 0.14021673798561096, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3366 + }, + { + "epoch": 0.1681902192916729, + "grad_norm": 0.1396811157464981, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3367 + }, + { + "epoch": 0.16824017183675508, + "grad_norm": 0.13804811239242554, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 3368 + }, + { + "epoch": 0.16829012438183724, + "grad_norm": 0.23071415722370148, + "learning_rate": 0.0001, + "loss": 0.1317, + "step": 3369 + }, + { + "epoch": 0.16834007692691944, + "grad_norm": 0.1408240646123886, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 3370 + }, + { + "epoch": 0.1683900294720016, + "grad_norm": 0.18097607791423798, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3371 + }, + { + "epoch": 0.16843998201708377, + "grad_norm": 0.1457076370716095, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 3372 + }, + { + "epoch": 0.16848993456216593, + "grad_norm": 0.14743421971797943, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3373 + }, + { + "epoch": 0.16853988710724813, + "grad_norm": 0.17439061403274536, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 3374 + }, + { + "epoch": 0.1685898396523303, + "grad_norm": 0.1861920803785324, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 3375 + }, + { + "epoch": 0.16863979219741246, + "grad_norm": 0.1371627300977707, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3376 + }, + { + "epoch": 0.16868974474249462, + "grad_norm": 0.16129425168037415, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3377 + }, + { + "epoch": 0.1687396972875768, + "grad_norm": 0.16102607548236847, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3378 + }, + { + "epoch": 0.16878964983265898, + "grad_norm": 0.15163223445415497, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3379 + }, + { + "epoch": 0.16883960237774115, + "grad_norm": 0.1776525229215622, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 3380 + }, + { + "epoch": 0.16888955492282332, + "grad_norm": 0.18803928792476654, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 3381 + }, + { + "epoch": 0.16893950746790548, + "grad_norm": 0.6444376707077026, + "learning_rate": 0.0001, + "loss": 0.0411, + "step": 3382 + }, + { + "epoch": 0.16898946001298767, + "grad_norm": 0.2977713644504547, + "learning_rate": 0.0001, + "loss": 0.0145, + "step": 3383 + }, + { + "epoch": 0.16903941255806984, + "grad_norm": 0.2528780698776245, + "learning_rate": 0.0001, + "loss": 0.0197, + "step": 3384 + }, + { + "epoch": 0.169089365103152, + "grad_norm": 0.20145653188228607, + "learning_rate": 0.0001, + "loss": 0.1264, + "step": 3385 + }, + { + "epoch": 0.16913931764823417, + "grad_norm": 0.2964414954185486, + "learning_rate": 0.0001, + "loss": 0.0227, + "step": 3386 + }, + { + "epoch": 0.16918927019331634, + "grad_norm": 0.3388718068599701, + "learning_rate": 0.0001, + "loss": 0.1467, + "step": 3387 + }, + { + "epoch": 0.16923922273839853, + "grad_norm": 0.2524680495262146, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3388 + }, + { + "epoch": 0.1692891752834807, + "grad_norm": 0.2601051330566406, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 3389 + }, + { + "epoch": 0.16933912782856286, + "grad_norm": 0.3746872544288635, + "learning_rate": 0.0001, + "loss": 0.0273, + "step": 3390 + }, + { + "epoch": 0.16938908037364503, + "grad_norm": 0.18837162852287292, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 3391 + }, + { + "epoch": 0.16943903291872722, + "grad_norm": 0.28200340270996094, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 3392 + }, + { + "epoch": 0.1694889854638094, + "grad_norm": 0.28280404210090637, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3393 + }, + { + "epoch": 0.16953893800889155, + "grad_norm": 0.2398843616247177, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 3394 + }, + { + "epoch": 0.16958889055397372, + "grad_norm": 0.5116312503814697, + "learning_rate": 0.0001, + "loss": 0.0897, + "step": 3395 + }, + { + "epoch": 0.16963884309905589, + "grad_norm": 0.26569509506225586, + "learning_rate": 0.0001, + "loss": 0.0724, + "step": 3396 + }, + { + "epoch": 0.16968879564413808, + "grad_norm": 0.41268569231033325, + "learning_rate": 0.0001, + "loss": 0.0547, + "step": 3397 + }, + { + "epoch": 0.16973874818922025, + "grad_norm": 0.6311531662940979, + "learning_rate": 0.0001, + "loss": 0.0594, + "step": 3398 + }, + { + "epoch": 0.1697887007343024, + "grad_norm": 0.37717190384864807, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 3399 + }, + { + "epoch": 0.16983865327938458, + "grad_norm": 0.5476840734481812, + "learning_rate": 0.0001, + "loss": 0.0369, + "step": 3400 + }, + { + "epoch": 0.16988860582446674, + "grad_norm": 0.38724347949028015, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 3401 + }, + { + "epoch": 0.16993855836954894, + "grad_norm": 0.2856350839138031, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 3402 + }, + { + "epoch": 0.1699885109146311, + "grad_norm": 0.3815651535987854, + "learning_rate": 0.0001, + "loss": 0.049, + "step": 3403 + }, + { + "epoch": 0.17003846345971327, + "grad_norm": 0.34038835763931274, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 3404 + }, + { + "epoch": 0.17008841600479543, + "grad_norm": 0.4006675183773041, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 3405 + }, + { + "epoch": 0.17013836854987763, + "grad_norm": 0.34150633215904236, + "learning_rate": 0.0001, + "loss": 0.0121, + "step": 3406 + }, + { + "epoch": 0.1701883210949598, + "grad_norm": 0.29232320189476013, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 3407 + }, + { + "epoch": 0.17023827364004196, + "grad_norm": 0.3426295220851898, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 3408 + }, + { + "epoch": 0.17028822618512413, + "grad_norm": 0.3667389750480652, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 3409 + }, + { + "epoch": 0.1703381787302063, + "grad_norm": 0.30402013659477234, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3410 + }, + { + "epoch": 0.17038813127528848, + "grad_norm": 0.2712043523788452, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3411 + }, + { + "epoch": 0.17043808382037065, + "grad_norm": 0.44535648822784424, + "learning_rate": 0.0001, + "loss": 0.1542, + "step": 3412 + }, + { + "epoch": 0.17048803636545282, + "grad_norm": 0.2801048457622528, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 3413 + }, + { + "epoch": 0.17053798891053498, + "grad_norm": 0.3522675335407257, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 3414 + }, + { + "epoch": 0.17058794145561718, + "grad_norm": 0.3282342553138733, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 3415 + }, + { + "epoch": 0.17063789400069934, + "grad_norm": 0.2001681923866272, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 3416 + }, + { + "epoch": 0.1706878465457815, + "grad_norm": 0.2849177122116089, + "learning_rate": 0.0001, + "loss": 0.0515, + "step": 3417 + }, + { + "epoch": 0.17073779909086367, + "grad_norm": 0.3264252841472626, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 3418 + }, + { + "epoch": 0.17078775163594584, + "grad_norm": 0.30766409635543823, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3419 + }, + { + "epoch": 0.17083770418102803, + "grad_norm": 0.24784764647483826, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 3420 + }, + { + "epoch": 0.1708876567261102, + "grad_norm": 0.31304484605789185, + "learning_rate": 0.0001, + "loss": 0.0161, + "step": 3421 + }, + { + "epoch": 0.17093760927119236, + "grad_norm": 0.18766939640045166, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3422 + }, + { + "epoch": 0.17098756181627453, + "grad_norm": 0.2696339190006256, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 3423 + }, + { + "epoch": 0.17103751436135672, + "grad_norm": 0.29368484020233154, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3424 + }, + { + "epoch": 0.1710874669064389, + "grad_norm": 0.2823731601238251, + "learning_rate": 0.0001, + "loss": 0.0086, + "step": 3425 + }, + { + "epoch": 0.17113741945152106, + "grad_norm": 0.1625608652830124, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3426 + }, + { + "epoch": 0.17118737199660322, + "grad_norm": 0.18203112483024597, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 3427 + }, + { + "epoch": 0.1712373245416854, + "grad_norm": 0.23642699420452118, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 3428 + }, + { + "epoch": 0.17128727708676758, + "grad_norm": 0.28144845366477966, + "learning_rate": 0.0001, + "loss": 0.0111, + "step": 3429 + }, + { + "epoch": 0.17133722963184975, + "grad_norm": 0.2811879515647888, + "learning_rate": 0.0001, + "loss": 0.0148, + "step": 3430 + }, + { + "epoch": 0.1713871821769319, + "grad_norm": 0.23985719680786133, + "learning_rate": 0.0001, + "loss": 0.0246, + "step": 3431 + }, + { + "epoch": 0.17143713472201408, + "grad_norm": 0.18653644621372223, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3432 + }, + { + "epoch": 0.17148708726709627, + "grad_norm": 0.23325692117214203, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3433 + }, + { + "epoch": 0.17153703981217844, + "grad_norm": 0.191384419798851, + "learning_rate": 0.0001, + "loss": 0.1291, + "step": 3434 + }, + { + "epoch": 0.1715869923572606, + "grad_norm": 0.20963366329669952, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 3435 + }, + { + "epoch": 0.17163694490234277, + "grad_norm": 0.18177318572998047, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 3436 + }, + { + "epoch": 0.17168689744742494, + "grad_norm": 0.18711332976818085, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3437 + }, + { + "epoch": 0.17173684999250713, + "grad_norm": 0.17247508466243744, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 3438 + }, + { + "epoch": 0.1717868025375893, + "grad_norm": 0.20025399327278137, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 3439 + }, + { + "epoch": 0.17183675508267146, + "grad_norm": 0.22278620302677155, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3440 + }, + { + "epoch": 0.17188670762775363, + "grad_norm": 0.1406714767217636, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3441 + }, + { + "epoch": 0.1719366601728358, + "grad_norm": 0.13086768984794617, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3442 + }, + { + "epoch": 0.17198661271791799, + "grad_norm": 0.1506304293870926, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 3443 + }, + { + "epoch": 0.17203656526300015, + "grad_norm": 0.23731380701065063, + "learning_rate": 0.0001, + "loss": 0.0153, + "step": 3444 + }, + { + "epoch": 0.17208651780808232, + "grad_norm": 0.2670730650424957, + "learning_rate": 0.0001, + "loss": 0.1369, + "step": 3445 + }, + { + "epoch": 0.17213647035316448, + "grad_norm": 0.20609374344348907, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3446 + }, + { + "epoch": 0.17218642289824668, + "grad_norm": 0.29293930530548096, + "learning_rate": 0.0001, + "loss": 0.2552, + "step": 3447 + }, + { + "epoch": 0.17223637544332884, + "grad_norm": 0.18259724974632263, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3448 + }, + { + "epoch": 0.172286327988411, + "grad_norm": 0.26452749967575073, + "learning_rate": 0.0001, + "loss": 0.0083, + "step": 3449 + }, + { + "epoch": 0.17233628053349317, + "grad_norm": 0.2058209478855133, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 3450 + }, + { + "epoch": 0.17238623307857534, + "grad_norm": 0.27980637550354004, + "learning_rate": 0.0001, + "loss": 0.1276, + "step": 3451 + }, + { + "epoch": 0.17243618562365753, + "grad_norm": 0.2005167156457901, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3452 + }, + { + "epoch": 0.1724861381687397, + "grad_norm": 0.23437398672103882, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 3453 + }, + { + "epoch": 0.17253609071382187, + "grad_norm": 0.20657142996788025, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3454 + }, + { + "epoch": 0.17258604325890403, + "grad_norm": 0.20437082648277283, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3455 + }, + { + "epoch": 0.17263599580398623, + "grad_norm": 0.19486235082149506, + "learning_rate": 0.0001, + "loss": 0.0123, + "step": 3456 + }, + { + "epoch": 0.1726859483490684, + "grad_norm": 0.20547138154506683, + "learning_rate": 0.0001, + "loss": 0.0136, + "step": 3457 + }, + { + "epoch": 0.17273590089415056, + "grad_norm": 0.14957089722156525, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3458 + }, + { + "epoch": 0.17278585343923272, + "grad_norm": 0.21892599761486053, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 3459 + }, + { + "epoch": 0.1728358059843149, + "grad_norm": 0.4166703224182129, + "learning_rate": 0.0001, + "loss": 0.1517, + "step": 3460 + }, + { + "epoch": 0.17288575852939708, + "grad_norm": 0.1820586770772934, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 3461 + }, + { + "epoch": 0.17293571107447925, + "grad_norm": 0.2176537662744522, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 3462 + }, + { + "epoch": 0.17298566361956141, + "grad_norm": 0.24481993913650513, + "learning_rate": 0.0001, + "loss": 0.0113, + "step": 3463 + }, + { + "epoch": 0.17303561616464358, + "grad_norm": 0.22524775564670563, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3464 + }, + { + "epoch": 0.17308556870972577, + "grad_norm": 0.28111281991004944, + "learning_rate": 0.0001, + "loss": 0.03, + "step": 3465 + }, + { + "epoch": 0.17313552125480794, + "grad_norm": 0.22796450555324554, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 3466 + }, + { + "epoch": 0.1731854737998901, + "grad_norm": 0.21417441964149475, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3467 + }, + { + "epoch": 0.17323542634497227, + "grad_norm": 0.21911542117595673, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 3468 + }, + { + "epoch": 0.17328537889005444, + "grad_norm": 0.2334877997636795, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 3469 + }, + { + "epoch": 0.17333533143513663, + "grad_norm": 0.2396426647901535, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 3470 + }, + { + "epoch": 0.1733852839802188, + "grad_norm": 0.23887541890144348, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 3471 + }, + { + "epoch": 0.17343523652530096, + "grad_norm": 0.24445796012878418, + "learning_rate": 0.0001, + "loss": 0.144, + "step": 3472 + }, + { + "epoch": 0.17348518907038313, + "grad_norm": 0.2198154628276825, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3473 + }, + { + "epoch": 0.17353514161546532, + "grad_norm": 0.22938768565654755, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 3474 + }, + { + "epoch": 0.1735850941605475, + "grad_norm": 0.22483526170253754, + "learning_rate": 0.0001, + "loss": 0.0198, + "step": 3475 + }, + { + "epoch": 0.17363504670562965, + "grad_norm": 0.20299075543880463, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3476 + }, + { + "epoch": 0.17368499925071182, + "grad_norm": 0.1789657473564148, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 3477 + }, + { + "epoch": 0.17373495179579398, + "grad_norm": 0.21798966825008392, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3478 + }, + { + "epoch": 0.17378490434087618, + "grad_norm": 0.3167245090007782, + "learning_rate": 0.0001, + "loss": 0.0358, + "step": 3479 + }, + { + "epoch": 0.17383485688595834, + "grad_norm": 0.24046136438846588, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 3480 + }, + { + "epoch": 0.1738848094310405, + "grad_norm": 0.3087064325809479, + "learning_rate": 0.0001, + "loss": 0.0462, + "step": 3481 + }, + { + "epoch": 0.17393476197612268, + "grad_norm": 0.323018342256546, + "learning_rate": 0.0001, + "loss": 0.0394, + "step": 3482 + }, + { + "epoch": 0.17398471452120484, + "grad_norm": 0.24195221066474915, + "learning_rate": 0.0001, + "loss": 0.1327, + "step": 3483 + }, + { + "epoch": 0.17403466706628704, + "grad_norm": 0.2871445417404175, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 3484 + }, + { + "epoch": 0.1740846196113692, + "grad_norm": 0.31334152817726135, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 3485 + }, + { + "epoch": 0.17413457215645137, + "grad_norm": 0.20497266948223114, + "learning_rate": 0.0001, + "loss": 0.0093, + "step": 3486 + }, + { + "epoch": 0.17418452470153353, + "grad_norm": 0.32294923067092896, + "learning_rate": 0.0001, + "loss": 0.0409, + "step": 3487 + }, + { + "epoch": 0.17423447724661573, + "grad_norm": 0.28154826164245605, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3488 + }, + { + "epoch": 0.1742844297916979, + "grad_norm": 0.29980218410491943, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 3489 + }, + { + "epoch": 0.17433438233678006, + "grad_norm": 0.40889328718185425, + "learning_rate": 0.0001, + "loss": 0.0338, + "step": 3490 + }, + { + "epoch": 0.17438433488186222, + "grad_norm": 0.33040520548820496, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 3491 + }, + { + "epoch": 0.1744342874269444, + "grad_norm": 0.25313034653663635, + "learning_rate": 0.0001, + "loss": 0.1307, + "step": 3492 + }, + { + "epoch": 0.17448423997202658, + "grad_norm": 0.19987808167934418, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 3493 + }, + { + "epoch": 0.17453419251710875, + "grad_norm": 0.2583928406238556, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 3494 + }, + { + "epoch": 0.17458414506219092, + "grad_norm": 0.1943647712469101, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3495 + }, + { + "epoch": 0.17463409760727308, + "grad_norm": 0.26209211349487305, + "learning_rate": 0.0001, + "loss": 0.0131, + "step": 3496 + }, + { + "epoch": 0.17468405015235527, + "grad_norm": 0.25356152653694153, + "learning_rate": 0.0001, + "loss": 0.0283, + "step": 3497 + }, + { + "epoch": 0.17473400269743744, + "grad_norm": 0.2293737530708313, + "learning_rate": 0.0001, + "loss": 0.0157, + "step": 3498 + }, + { + "epoch": 0.1747839552425196, + "grad_norm": 0.22160445153713226, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3499 + }, + { + "epoch": 0.17483390778760177, + "grad_norm": 0.260373055934906, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 3500 + }, + { + "epoch": 0.17488386033268394, + "grad_norm": 0.22547078132629395, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 3501 + }, + { + "epoch": 0.17493381287776613, + "grad_norm": 0.2604927122592926, + "learning_rate": 0.0001, + "loss": 0.0468, + "step": 3502 + }, + { + "epoch": 0.1749837654228483, + "grad_norm": 0.27023759484291077, + "learning_rate": 0.0001, + "loss": 0.0407, + "step": 3503 + }, + { + "epoch": 0.17503371796793046, + "grad_norm": 0.22057798504829407, + "learning_rate": 0.0001, + "loss": 0.0096, + "step": 3504 + }, + { + "epoch": 0.17508367051301263, + "grad_norm": 0.27883005142211914, + "learning_rate": 0.0001, + "loss": 0.0257, + "step": 3505 + }, + { + "epoch": 0.17513362305809482, + "grad_norm": 0.24735364317893982, + "learning_rate": 0.0001, + "loss": 0.1272, + "step": 3506 + }, + { + "epoch": 0.175183575603177, + "grad_norm": 0.2750735878944397, + "learning_rate": 0.0001, + "loss": 0.0182, + "step": 3507 + }, + { + "epoch": 0.17523352814825915, + "grad_norm": 0.2481147050857544, + "learning_rate": 0.0001, + "loss": 0.0251, + "step": 3508 + }, + { + "epoch": 0.17528348069334132, + "grad_norm": 0.23057736456394196, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 3509 + }, + { + "epoch": 0.1753334332384235, + "grad_norm": 0.23486638069152832, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 3510 + }, + { + "epoch": 0.17538338578350568, + "grad_norm": 0.20921793580055237, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3511 + }, + { + "epoch": 0.17543333832858785, + "grad_norm": 0.1671368032693863, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 3512 + }, + { + "epoch": 0.17548329087367, + "grad_norm": 0.18202275037765503, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 3513 + }, + { + "epoch": 0.17553324341875218, + "grad_norm": 0.23903736472129822, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 3514 + }, + { + "epoch": 0.17558319596383437, + "grad_norm": 0.1616910696029663, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3515 + }, + { + "epoch": 0.17563314850891654, + "grad_norm": 0.1644093096256256, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 3516 + }, + { + "epoch": 0.1756831010539987, + "grad_norm": 0.15883536636829376, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 3517 + }, + { + "epoch": 0.17573305359908087, + "grad_norm": 0.1672169268131256, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3518 + }, + { + "epoch": 0.17578300614416303, + "grad_norm": 0.23710131645202637, + "learning_rate": 0.0001, + "loss": 0.0067, + "step": 3519 + }, + { + "epoch": 0.17583295868924523, + "grad_norm": 0.1457831859588623, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3520 + }, + { + "epoch": 0.1758829112343274, + "grad_norm": 0.1038551852107048, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 3521 + }, + { + "epoch": 0.17593286377940956, + "grad_norm": 0.13519427180290222, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 3522 + }, + { + "epoch": 0.17598281632449173, + "grad_norm": 0.15270589292049408, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 3523 + }, + { + "epoch": 0.1760327688695739, + "grad_norm": 0.2372056543827057, + "learning_rate": 0.0001, + "loss": 0.0276, + "step": 3524 + }, + { + "epoch": 0.17608272141465608, + "grad_norm": 0.1681891679763794, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3525 + }, + { + "epoch": 0.17613267395973825, + "grad_norm": 0.1675669103860855, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 3526 + }, + { + "epoch": 0.17618262650482042, + "grad_norm": 0.15275909006595612, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3527 + }, + { + "epoch": 0.17623257904990258, + "grad_norm": 0.19038352370262146, + "learning_rate": 0.0001, + "loss": 0.0144, + "step": 3528 + }, + { + "epoch": 0.17628253159498478, + "grad_norm": 0.17834463715553284, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 3529 + }, + { + "epoch": 0.17633248414006694, + "grad_norm": 0.17108376324176788, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3530 + }, + { + "epoch": 0.1763824366851491, + "grad_norm": 0.18309420347213745, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3531 + }, + { + "epoch": 0.17643238923023127, + "grad_norm": 0.19593331217765808, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 3532 + }, + { + "epoch": 0.17648234177531344, + "grad_norm": 0.13562941551208496, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3533 + }, + { + "epoch": 0.17653229432039563, + "grad_norm": 0.16093872487545013, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3534 + }, + { + "epoch": 0.1765822468654778, + "grad_norm": 0.1723911017179489, + "learning_rate": 0.0001, + "loss": 0.0186, + "step": 3535 + }, + { + "epoch": 0.17663219941055996, + "grad_norm": 0.19904695451259613, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3536 + }, + { + "epoch": 0.17668215195564213, + "grad_norm": 0.1398600935935974, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3537 + }, + { + "epoch": 0.17673210450072432, + "grad_norm": 0.17188455164432526, + "learning_rate": 0.0001, + "loss": 0.0139, + "step": 3538 + }, + { + "epoch": 0.1767820570458065, + "grad_norm": 0.14417201280593872, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3539 + }, + { + "epoch": 0.17683200959088866, + "grad_norm": 0.16473297774791718, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 3540 + }, + { + "epoch": 0.17688196213597082, + "grad_norm": 0.27592483162879944, + "learning_rate": 0.0001, + "loss": 0.0235, + "step": 3541 + }, + { + "epoch": 0.176931914681053, + "grad_norm": 0.18961335718631744, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3542 + }, + { + "epoch": 0.17698186722613518, + "grad_norm": 0.31503280997276306, + "learning_rate": 0.0001, + "loss": 0.0218, + "step": 3543 + }, + { + "epoch": 0.17703181977121735, + "grad_norm": 0.14748506247997284, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 3544 + }, + { + "epoch": 0.1770817723162995, + "grad_norm": 0.23816610872745514, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 3545 + }, + { + "epoch": 0.17713172486138168, + "grad_norm": 0.16652333736419678, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3546 + }, + { + "epoch": 0.17718167740646387, + "grad_norm": 0.35552704334259033, + "learning_rate": 0.0001, + "loss": 0.0405, + "step": 3547 + }, + { + "epoch": 0.17723162995154604, + "grad_norm": 0.20267245173454285, + "learning_rate": 0.0001, + "loss": 0.1271, + "step": 3548 + }, + { + "epoch": 0.1772815824966282, + "grad_norm": 0.19989900290966034, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3549 + }, + { + "epoch": 0.17733153504171037, + "grad_norm": 0.2684052288532257, + "learning_rate": 0.0001, + "loss": 0.0107, + "step": 3550 + }, + { + "epoch": 0.17738148758679254, + "grad_norm": 0.38966116309165955, + "learning_rate": 0.0001, + "loss": 0.2763, + "step": 3551 + }, + { + "epoch": 0.17743144013187473, + "grad_norm": 0.36110344529151917, + "learning_rate": 0.0001, + "loss": 0.0295, + "step": 3552 + }, + { + "epoch": 0.1774813926769569, + "grad_norm": 0.27312028408050537, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 3553 + }, + { + "epoch": 0.17753134522203906, + "grad_norm": 0.3306892514228821, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 3554 + }, + { + "epoch": 0.17758129776712123, + "grad_norm": 0.2873741090297699, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 3555 + }, + { + "epoch": 0.17763125031220342, + "grad_norm": 0.22486017644405365, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3556 + }, + { + "epoch": 0.1776812028572856, + "grad_norm": 0.42323988676071167, + "learning_rate": 0.0001, + "loss": 0.0412, + "step": 3557 + }, + { + "epoch": 0.17773115540236775, + "grad_norm": 0.2949797213077545, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 3558 + }, + { + "epoch": 0.17778110794744992, + "grad_norm": 0.18950320780277252, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 3559 + }, + { + "epoch": 0.17783106049253208, + "grad_norm": 0.2096361368894577, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 3560 + }, + { + "epoch": 0.17788101303761428, + "grad_norm": 0.29609358310699463, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3561 + }, + { + "epoch": 0.17793096558269644, + "grad_norm": 0.3129887878894806, + "learning_rate": 0.0001, + "loss": 0.0281, + "step": 3562 + }, + { + "epoch": 0.1779809181277786, + "grad_norm": 0.2392149716615677, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 3563 + }, + { + "epoch": 0.17803087067286077, + "grad_norm": 0.21777455508708954, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3564 + }, + { + "epoch": 0.17808082321794294, + "grad_norm": 0.23278719186782837, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3565 + }, + { + "epoch": 0.17813077576302513, + "grad_norm": 0.25938180088996887, + "learning_rate": 0.0001, + "loss": 0.1379, + "step": 3566 + }, + { + "epoch": 0.1781807283081073, + "grad_norm": 0.17957355082035065, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 3567 + }, + { + "epoch": 0.17823068085318947, + "grad_norm": 0.22666147351264954, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 3568 + }, + { + "epoch": 0.17828063339827163, + "grad_norm": 0.23823265731334686, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3569 + }, + { + "epoch": 0.17833058594335383, + "grad_norm": 0.25952160358428955, + "learning_rate": 0.0001, + "loss": 0.0097, + "step": 3570 + }, + { + "epoch": 0.178380538488436, + "grad_norm": 0.18889613449573517, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3571 + }, + { + "epoch": 0.17843049103351816, + "grad_norm": 0.16651363670825958, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3572 + }, + { + "epoch": 0.17848044357860032, + "grad_norm": 0.25479039549827576, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3573 + }, + { + "epoch": 0.1785303961236825, + "grad_norm": 0.23353707790374756, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3574 + }, + { + "epoch": 0.17858034866876468, + "grad_norm": 0.19620685279369354, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 3575 + }, + { + "epoch": 0.17863030121384685, + "grad_norm": 0.2063521146774292, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3576 + }, + { + "epoch": 0.17868025375892901, + "grad_norm": 0.18701757490634918, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 3577 + }, + { + "epoch": 0.17873020630401118, + "grad_norm": 0.2280881404876709, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3578 + }, + { + "epoch": 0.17878015884909337, + "grad_norm": 0.18388332426548004, + "learning_rate": 0.0001, + "loss": 0.1266, + "step": 3579 + }, + { + "epoch": 0.17883011139417554, + "grad_norm": 0.16026172041893005, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3580 + }, + { + "epoch": 0.1788800639392577, + "grad_norm": 0.2178903967142105, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 3581 + }, + { + "epoch": 0.17893001648433987, + "grad_norm": 0.17952030897140503, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3582 + }, + { + "epoch": 0.17897996902942204, + "grad_norm": 0.1269085854291916, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3583 + }, + { + "epoch": 0.17902992157450423, + "grad_norm": 0.14094343781471252, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 3584 + }, + { + "epoch": 0.1790798741195864, + "grad_norm": 0.166362002491951, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3585 + }, + { + "epoch": 0.17912982666466856, + "grad_norm": 0.13575276732444763, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3586 + }, + { + "epoch": 0.17917977920975073, + "grad_norm": 0.14372877776622772, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3587 + }, + { + "epoch": 0.17922973175483292, + "grad_norm": 0.12935814261436462, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 3588 + }, + { + "epoch": 0.1792796842999151, + "grad_norm": 0.11958488821983337, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3589 + }, + { + "epoch": 0.17932963684499725, + "grad_norm": 0.14189454913139343, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3590 + }, + { + "epoch": 0.17937958939007942, + "grad_norm": 0.11142280697822571, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 3591 + }, + { + "epoch": 0.17942954193516159, + "grad_norm": 0.12626054883003235, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3592 + }, + { + "epoch": 0.17947949448024378, + "grad_norm": 0.14987686276435852, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3593 + }, + { + "epoch": 0.17952944702532594, + "grad_norm": 0.17070981860160828, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 3594 + }, + { + "epoch": 0.1795793995704081, + "grad_norm": 0.15849104523658752, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3595 + }, + { + "epoch": 0.17962935211549028, + "grad_norm": 0.10632708668708801, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3596 + }, + { + "epoch": 0.17967930466057247, + "grad_norm": 0.11923624575138092, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3597 + }, + { + "epoch": 0.17972925720565464, + "grad_norm": 0.1406676173210144, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3598 + }, + { + "epoch": 0.1797792097507368, + "grad_norm": 0.17797602713108063, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3599 + }, + { + "epoch": 0.17982916229581897, + "grad_norm": 0.17417965829372406, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 3600 + }, + { + "epoch": 0.17987911484090113, + "grad_norm": 0.09860288351774216, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3601 + }, + { + "epoch": 0.17992906738598333, + "grad_norm": 0.10582650452852249, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3602 + }, + { + "epoch": 0.1799790199310655, + "grad_norm": 0.17542964220046997, + "learning_rate": 0.0001, + "loss": 0.1251, + "step": 3603 + }, + { + "epoch": 0.18002897247614766, + "grad_norm": 0.16320079565048218, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3604 + }, + { + "epoch": 0.18007892502122982, + "grad_norm": 0.13716839253902435, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3605 + }, + { + "epoch": 0.180128877566312, + "grad_norm": 0.12413199990987778, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3606 + }, + { + "epoch": 0.18017883011139418, + "grad_norm": 0.13636289536952972, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 3607 + }, + { + "epoch": 0.18022878265647635, + "grad_norm": 0.1616387665271759, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 3608 + }, + { + "epoch": 0.18027873520155852, + "grad_norm": 0.1358456015586853, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3609 + }, + { + "epoch": 0.18032868774664068, + "grad_norm": 0.2639073431491852, + "learning_rate": 0.0001, + "loss": 0.1332, + "step": 3610 + }, + { + "epoch": 0.18037864029172287, + "grad_norm": 0.10878317058086395, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 3611 + }, + { + "epoch": 0.18042859283680504, + "grad_norm": 0.13693203032016754, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3612 + }, + { + "epoch": 0.1804785453818872, + "grad_norm": 0.12648892402648926, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 3613 + }, + { + "epoch": 0.18052849792696937, + "grad_norm": 0.10058585554361343, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3614 + }, + { + "epoch": 0.18057845047205154, + "grad_norm": 0.15671244263648987, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 3615 + }, + { + "epoch": 0.18062840301713373, + "grad_norm": 0.10784899443387985, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3616 + }, + { + "epoch": 0.1806783555622159, + "grad_norm": 0.23805604875087738, + "learning_rate": 0.0001, + "loss": 0.0086, + "step": 3617 + }, + { + "epoch": 0.18072830810729806, + "grad_norm": 0.12689153850078583, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3618 + }, + { + "epoch": 0.18077826065238023, + "grad_norm": 0.1286962330341339, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3619 + }, + { + "epoch": 0.18082821319746242, + "grad_norm": 0.13965149223804474, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3620 + }, + { + "epoch": 0.1808781657425446, + "grad_norm": 0.13652417063713074, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3621 + }, + { + "epoch": 0.18092811828762675, + "grad_norm": 0.11935410648584366, + "learning_rate": 0.0001, + "loss": 0.1277, + "step": 3622 + }, + { + "epoch": 0.18097807083270892, + "grad_norm": 0.09818411618471146, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 3623 + }, + { + "epoch": 0.1810280233777911, + "grad_norm": 0.12296271324157715, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 3624 + }, + { + "epoch": 0.18107797592287328, + "grad_norm": 0.13563844561576843, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 3625 + }, + { + "epoch": 0.18112792846795545, + "grad_norm": 0.12360364943742752, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3626 + }, + { + "epoch": 0.1811778810130376, + "grad_norm": 0.11080722510814667, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3627 + }, + { + "epoch": 0.18122783355811978, + "grad_norm": 0.14368568360805511, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3628 + }, + { + "epoch": 0.18127778610320197, + "grad_norm": 0.1349625289440155, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3629 + }, + { + "epoch": 0.18132773864828414, + "grad_norm": 0.13326984643936157, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3630 + }, + { + "epoch": 0.1813776911933663, + "grad_norm": 0.13764528930187225, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3631 + }, + { + "epoch": 0.18142764373844847, + "grad_norm": 0.13936319947242737, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3632 + }, + { + "epoch": 0.18147759628353063, + "grad_norm": 0.12382876873016357, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3633 + }, + { + "epoch": 0.18152754882861283, + "grad_norm": 0.1554289162158966, + "learning_rate": 0.0001, + "loss": 0.1272, + "step": 3634 + }, + { + "epoch": 0.181577501373695, + "grad_norm": 0.1342657208442688, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3635 + }, + { + "epoch": 0.18162745391877716, + "grad_norm": 0.33378151059150696, + "learning_rate": 0.0001, + "loss": 0.2674, + "step": 3636 + }, + { + "epoch": 0.18167740646385933, + "grad_norm": 0.19207648932933807, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 3637 + }, + { + "epoch": 0.18172735900894152, + "grad_norm": 0.11259417235851288, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 3638 + }, + { + "epoch": 0.18177731155402369, + "grad_norm": 0.13403666019439697, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3639 + }, + { + "epoch": 0.18182726409910585, + "grad_norm": 0.1345849633216858, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3640 + }, + { + "epoch": 0.18187721664418802, + "grad_norm": 0.17933325469493866, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 3641 + }, + { + "epoch": 0.18192716918927018, + "grad_norm": 0.17937932908535004, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 3642 + }, + { + "epoch": 0.18197712173435238, + "grad_norm": 0.1609066128730774, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 3643 + }, + { + "epoch": 0.18202707427943454, + "grad_norm": 0.1788482666015625, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3644 + }, + { + "epoch": 0.1820770268245167, + "grad_norm": 0.27582642436027527, + "learning_rate": 0.0001, + "loss": 0.1331, + "step": 3645 + }, + { + "epoch": 0.18212697936959887, + "grad_norm": 0.1784098595380783, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 3646 + }, + { + "epoch": 0.18217693191468104, + "grad_norm": 0.15303277969360352, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3647 + }, + { + "epoch": 0.18222688445976323, + "grad_norm": 0.1220184862613678, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 3648 + }, + { + "epoch": 0.1822768370048454, + "grad_norm": 0.1479857861995697, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3649 + }, + { + "epoch": 0.18232678954992756, + "grad_norm": 0.21260476112365723, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 3650 + }, + { + "epoch": 0.18237674209500973, + "grad_norm": 0.14573736488819122, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3651 + }, + { + "epoch": 0.18242669464009192, + "grad_norm": 0.1384643167257309, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 3652 + }, + { + "epoch": 0.1824766471851741, + "grad_norm": 0.15213365852832794, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3653 + }, + { + "epoch": 0.18252659973025626, + "grad_norm": 0.14146332442760468, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 3654 + }, + { + "epoch": 0.18257655227533842, + "grad_norm": 0.14361025393009186, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 3655 + }, + { + "epoch": 0.1826265048204206, + "grad_norm": 0.08434706926345825, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 3656 + }, + { + "epoch": 0.18267645736550278, + "grad_norm": 0.11691757291555405, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 3657 + }, + { + "epoch": 0.18272640991058495, + "grad_norm": 0.14941158890724182, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 3658 + }, + { + "epoch": 0.1827763624556671, + "grad_norm": 0.15865962207317352, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3659 + }, + { + "epoch": 0.18282631500074928, + "grad_norm": 0.15862959623336792, + "learning_rate": 0.0001, + "loss": 0.1275, + "step": 3660 + }, + { + "epoch": 0.18287626754583147, + "grad_norm": 0.1431736946105957, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3661 + }, + { + "epoch": 0.18292622009091364, + "grad_norm": 0.15958750247955322, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3662 + }, + { + "epoch": 0.1829761726359958, + "grad_norm": 0.1360558718442917, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 3663 + }, + { + "epoch": 0.18302612518107797, + "grad_norm": 0.19648562371730804, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 3664 + }, + { + "epoch": 0.18307607772616014, + "grad_norm": 0.14747262001037598, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 3665 + }, + { + "epoch": 0.18312603027124233, + "grad_norm": 0.12968342006206512, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 3666 + }, + { + "epoch": 0.1831759828163245, + "grad_norm": 0.18168972432613373, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 3667 + }, + { + "epoch": 0.18322593536140666, + "grad_norm": 0.19790595769882202, + "learning_rate": 0.0001, + "loss": 0.0125, + "step": 3668 + }, + { + "epoch": 0.18327588790648883, + "grad_norm": 0.1337841898202896, + "learning_rate": 0.0001, + "loss": 0.0007, + "step": 3669 + }, + { + "epoch": 0.18332584045157102, + "grad_norm": 0.18071849644184113, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3670 + }, + { + "epoch": 0.1833757929966532, + "grad_norm": 0.1481981724500656, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3671 + }, + { + "epoch": 0.18342574554173535, + "grad_norm": 0.1407371312379837, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3672 + }, + { + "epoch": 0.18347569808681752, + "grad_norm": 0.18151229619979858, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 3673 + }, + { + "epoch": 0.18352565063189968, + "grad_norm": 0.2792227566242218, + "learning_rate": 0.0001, + "loss": 0.0252, + "step": 3674 + }, + { + "epoch": 0.18357560317698188, + "grad_norm": 0.15192514657974243, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3675 + }, + { + "epoch": 0.18362555572206404, + "grad_norm": 0.22153109312057495, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3676 + }, + { + "epoch": 0.1836755082671462, + "grad_norm": 0.2187923640012741, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 3677 + }, + { + "epoch": 0.18372546081222837, + "grad_norm": 0.23814600706100464, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 3678 + }, + { + "epoch": 0.18377541335731057, + "grad_norm": 0.2143978774547577, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 3679 + }, + { + "epoch": 0.18382536590239273, + "grad_norm": 0.2612958550453186, + "learning_rate": 0.0001, + "loss": 0.0139, + "step": 3680 + }, + { + "epoch": 0.1838753184474749, + "grad_norm": 0.1952550709247589, + "learning_rate": 0.0001, + "loss": 0.0108, + "step": 3681 + }, + { + "epoch": 0.18392527099255707, + "grad_norm": 0.18490059673786163, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3682 + }, + { + "epoch": 0.18397522353763923, + "grad_norm": 0.24636194109916687, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 3683 + }, + { + "epoch": 0.18402517608272143, + "grad_norm": 0.2485150247812271, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3684 + }, + { + "epoch": 0.1840751286278036, + "grad_norm": 0.2602778673171997, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 3685 + }, + { + "epoch": 0.18412508117288576, + "grad_norm": 0.24037306010723114, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 3686 + }, + { + "epoch": 0.18417503371796792, + "grad_norm": 0.1727411448955536, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 3687 + }, + { + "epoch": 0.1842249862630501, + "grad_norm": 0.15396955609321594, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3688 + }, + { + "epoch": 0.18427493880813228, + "grad_norm": 0.1903076320886612, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3689 + }, + { + "epoch": 0.18432489135321445, + "grad_norm": 0.1701955944299698, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3690 + }, + { + "epoch": 0.18437484389829661, + "grad_norm": 0.1910553276538849, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3691 + }, + { + "epoch": 0.18442479644337878, + "grad_norm": 0.18538612127304077, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3692 + }, + { + "epoch": 0.18447474898846097, + "grad_norm": 0.23613858222961426, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 3693 + }, + { + "epoch": 0.18452470153354314, + "grad_norm": 0.15962736308574677, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3694 + }, + { + "epoch": 0.1845746540786253, + "grad_norm": 0.15524640679359436, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3695 + }, + { + "epoch": 0.18462460662370747, + "grad_norm": 0.15029673278331757, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3696 + }, + { + "epoch": 0.18467455916878964, + "grad_norm": 0.16734367609024048, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3697 + }, + { + "epoch": 0.18472451171387183, + "grad_norm": 0.17369939386844635, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 3698 + }, + { + "epoch": 0.184774464258954, + "grad_norm": 0.17012107372283936, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 3699 + }, + { + "epoch": 0.18482441680403616, + "grad_norm": 0.14454180002212524, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3700 + }, + { + "epoch": 0.18487436934911833, + "grad_norm": 0.1981503814458847, + "learning_rate": 0.0001, + "loss": 0.0258, + "step": 3701 + }, + { + "epoch": 0.18492432189420052, + "grad_norm": 0.14555181562900543, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 3702 + }, + { + "epoch": 0.1849742744392827, + "grad_norm": 0.17100878059864044, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3703 + }, + { + "epoch": 0.18502422698436485, + "grad_norm": 0.1284024715423584, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3704 + }, + { + "epoch": 0.18507417952944702, + "grad_norm": 0.15262454748153687, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 3705 + }, + { + "epoch": 0.18512413207452919, + "grad_norm": 0.1701250821352005, + "learning_rate": 0.0001, + "loss": 0.0103, + "step": 3706 + }, + { + "epoch": 0.18517408461961138, + "grad_norm": 0.10631030797958374, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3707 + }, + { + "epoch": 0.18522403716469354, + "grad_norm": 0.13394029438495636, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3708 + }, + { + "epoch": 0.1852739897097757, + "grad_norm": 0.17593130469322205, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 3709 + }, + { + "epoch": 0.18532394225485788, + "grad_norm": 0.144914910197258, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3710 + }, + { + "epoch": 0.18537389479994007, + "grad_norm": 0.10417655110359192, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 3711 + }, + { + "epoch": 0.18542384734502224, + "grad_norm": 0.13771528005599976, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 3712 + }, + { + "epoch": 0.1854737998901044, + "grad_norm": 0.17479296028614044, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3713 + }, + { + "epoch": 0.18552375243518657, + "grad_norm": 0.13869722187519073, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 3714 + }, + { + "epoch": 0.18557370498026873, + "grad_norm": 0.11437613517045975, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3715 + }, + { + "epoch": 0.18562365752535093, + "grad_norm": 0.18321917951107025, + "learning_rate": 0.0001, + "loss": 0.2495, + "step": 3716 + }, + { + "epoch": 0.1856736100704331, + "grad_norm": 0.12647199630737305, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 3717 + }, + { + "epoch": 0.18572356261551526, + "grad_norm": 0.1169242262840271, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3718 + }, + { + "epoch": 0.18577351516059742, + "grad_norm": 0.1194276437163353, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3719 + }, + { + "epoch": 0.18582346770567962, + "grad_norm": 0.11024703830480576, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3720 + }, + { + "epoch": 0.18587342025076178, + "grad_norm": 0.17889854311943054, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 3721 + }, + { + "epoch": 0.18592337279584395, + "grad_norm": 0.11932917684316635, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3722 + }, + { + "epoch": 0.18597332534092612, + "grad_norm": 0.11479837447404861, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3723 + }, + { + "epoch": 0.18602327788600828, + "grad_norm": 0.11777772754430771, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 3724 + }, + { + "epoch": 0.18607323043109048, + "grad_norm": 0.13282150030136108, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 3725 + }, + { + "epoch": 0.18612318297617264, + "grad_norm": 0.10250140726566315, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3726 + }, + { + "epoch": 0.1861731355212548, + "grad_norm": 0.18941861391067505, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 3727 + }, + { + "epoch": 0.18622308806633697, + "grad_norm": 0.11383891105651855, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3728 + }, + { + "epoch": 0.18627304061141914, + "grad_norm": 0.16653990745544434, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 3729 + }, + { + "epoch": 0.18632299315650133, + "grad_norm": 0.1263188272714615, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3730 + }, + { + "epoch": 0.1863729457015835, + "grad_norm": 0.13339102268218994, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3731 + }, + { + "epoch": 0.18642289824666566, + "grad_norm": 0.13773813843727112, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 3732 + }, + { + "epoch": 0.18647285079174783, + "grad_norm": 0.13150301575660706, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 3733 + }, + { + "epoch": 0.18652280333683002, + "grad_norm": 0.14030282199382782, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 3734 + }, + { + "epoch": 0.1865727558819122, + "grad_norm": 0.13017883896827698, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3735 + }, + { + "epoch": 0.18662270842699435, + "grad_norm": 0.10921935737133026, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 3736 + }, + { + "epoch": 0.18667266097207652, + "grad_norm": 0.11115012317895889, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 3737 + }, + { + "epoch": 0.1867226135171587, + "grad_norm": 0.14680476486682892, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3738 + }, + { + "epoch": 0.18677256606224088, + "grad_norm": 0.12946467101573944, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3739 + }, + { + "epoch": 0.18682251860732305, + "grad_norm": 0.11628226935863495, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 3740 + }, + { + "epoch": 0.1868724711524052, + "grad_norm": 0.08828949183225632, + "learning_rate": 0.0001, + "loss": 0.0005, + "step": 3741 + }, + { + "epoch": 0.18692242369748738, + "grad_norm": 0.16053679585456848, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3742 + }, + { + "epoch": 0.18697237624256957, + "grad_norm": 0.09696639329195023, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3743 + }, + { + "epoch": 0.18702232878765174, + "grad_norm": 0.1104586124420166, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 3744 + }, + { + "epoch": 0.1870722813327339, + "grad_norm": 0.14822903275489807, + "learning_rate": 0.0001, + "loss": 0.1267, + "step": 3745 + }, + { + "epoch": 0.18712223387781607, + "grad_norm": 0.11497656255960464, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 3746 + }, + { + "epoch": 0.18717218642289823, + "grad_norm": 0.17362868785858154, + "learning_rate": 0.0001, + "loss": 0.1295, + "step": 3747 + }, + { + "epoch": 0.18722213896798043, + "grad_norm": 0.1195683628320694, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 3748 + }, + { + "epoch": 0.1872720915130626, + "grad_norm": 0.13080871105194092, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 3749 + }, + { + "epoch": 0.18732204405814476, + "grad_norm": 0.13588669896125793, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3750 + }, + { + "epoch": 0.18737199660322693, + "grad_norm": 0.14203622937202454, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3751 + }, + { + "epoch": 0.18742194914830912, + "grad_norm": 0.1065102145075798, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3752 + }, + { + "epoch": 0.18747190169339129, + "grad_norm": 0.08300852030515671, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 3753 + }, + { + "epoch": 0.18752185423847345, + "grad_norm": 0.10215450823307037, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3754 + }, + { + "epoch": 0.18757180678355562, + "grad_norm": 0.1374543458223343, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 3755 + }, + { + "epoch": 0.18762175932863778, + "grad_norm": 0.31270650029182434, + "learning_rate": 0.0001, + "loss": 0.0667, + "step": 3756 + }, + { + "epoch": 0.18767171187371998, + "grad_norm": 0.16437695920467377, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3757 + }, + { + "epoch": 0.18772166441880214, + "grad_norm": 0.22120656073093414, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3758 + }, + { + "epoch": 0.1877716169638843, + "grad_norm": 0.24018940329551697, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 3759 + }, + { + "epoch": 0.18782156950896647, + "grad_norm": 0.1839991807937622, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3760 + }, + { + "epoch": 0.18787152205404867, + "grad_norm": 0.19687753915786743, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 3761 + }, + { + "epoch": 0.18792147459913083, + "grad_norm": 0.1837812066078186, + "learning_rate": 0.0001, + "loss": 0.0127, + "step": 3762 + }, + { + "epoch": 0.187971427144213, + "grad_norm": 0.19479134678840637, + "learning_rate": 0.0001, + "loss": 0.0103, + "step": 3763 + }, + { + "epoch": 0.18802137968929516, + "grad_norm": 0.14916788041591644, + "learning_rate": 0.0001, + "loss": 0.1292, + "step": 3764 + }, + { + "epoch": 0.18807133223437733, + "grad_norm": 0.21032997965812683, + "learning_rate": 0.0001, + "loss": 0.0246, + "step": 3765 + }, + { + "epoch": 0.18812128477945952, + "grad_norm": 0.215704545378685, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 3766 + }, + { + "epoch": 0.1881712373245417, + "grad_norm": 0.22363197803497314, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 3767 + }, + { + "epoch": 0.18822118986962386, + "grad_norm": 0.19579653441905975, + "learning_rate": 0.0001, + "loss": 0.0142, + "step": 3768 + }, + { + "epoch": 0.18827114241470602, + "grad_norm": 0.16839101910591125, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3769 + }, + { + "epoch": 0.1883210949597882, + "grad_norm": 0.18908901512622833, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 3770 + }, + { + "epoch": 0.18837104750487038, + "grad_norm": 0.15181152522563934, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 3771 + }, + { + "epoch": 0.18842100004995255, + "grad_norm": 0.1542092263698578, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3772 + }, + { + "epoch": 0.1884709525950347, + "grad_norm": 0.16993412375450134, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 3773 + }, + { + "epoch": 0.18852090514011688, + "grad_norm": 0.14628159999847412, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3774 + }, + { + "epoch": 0.18857085768519907, + "grad_norm": 0.21021029353141785, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 3775 + }, + { + "epoch": 0.18862081023028124, + "grad_norm": 0.1506299078464508, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3776 + }, + { + "epoch": 0.1886707627753634, + "grad_norm": 0.13474881649017334, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3777 + }, + { + "epoch": 0.18872071532044557, + "grad_norm": 0.1359461098909378, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3778 + }, + { + "epoch": 0.18877066786552774, + "grad_norm": 0.18294131755828857, + "learning_rate": 0.0001, + "loss": 0.0204, + "step": 3779 + }, + { + "epoch": 0.18882062041060993, + "grad_norm": 0.14794710278511047, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3780 + }, + { + "epoch": 0.1888705729556921, + "grad_norm": 0.1399676501750946, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3781 + }, + { + "epoch": 0.18892052550077426, + "grad_norm": 0.14843401312828064, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3782 + }, + { + "epoch": 0.18897047804585643, + "grad_norm": 0.2977631986141205, + "learning_rate": 0.0001, + "loss": 0.033, + "step": 3783 + }, + { + "epoch": 0.18902043059093862, + "grad_norm": 0.1547791063785553, + "learning_rate": 0.0001, + "loss": 0.0102, + "step": 3784 + }, + { + "epoch": 0.1890703831360208, + "grad_norm": 0.1893726885318756, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 3785 + }, + { + "epoch": 0.18912033568110295, + "grad_norm": 0.15230782330036163, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 3786 + }, + { + "epoch": 0.18917028822618512, + "grad_norm": 0.16175830364227295, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3787 + }, + { + "epoch": 0.18922024077126728, + "grad_norm": 0.36804261803627014, + "learning_rate": 0.0001, + "loss": 0.0735, + "step": 3788 + }, + { + "epoch": 0.18927019331634948, + "grad_norm": 0.16762860119342804, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3789 + }, + { + "epoch": 0.18932014586143164, + "grad_norm": 0.20575208961963654, + "learning_rate": 0.0001, + "loss": 0.0093, + "step": 3790 + }, + { + "epoch": 0.1893700984065138, + "grad_norm": 0.19244641065597534, + "learning_rate": 0.0001, + "loss": 0.0112, + "step": 3791 + }, + { + "epoch": 0.18942005095159598, + "grad_norm": 0.19718368351459503, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3792 + }, + { + "epoch": 0.18947000349667817, + "grad_norm": 0.20318645238876343, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 3793 + }, + { + "epoch": 0.18951995604176033, + "grad_norm": 0.21434521675109863, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 3794 + }, + { + "epoch": 0.1895699085868425, + "grad_norm": 0.25921741127967834, + "learning_rate": 0.0001, + "loss": 0.0504, + "step": 3795 + }, + { + "epoch": 0.18961986113192467, + "grad_norm": 0.19469000399112701, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3796 + }, + { + "epoch": 0.18966981367700683, + "grad_norm": 0.18948712944984436, + "learning_rate": 0.0001, + "loss": 0.13, + "step": 3797 + }, + { + "epoch": 0.18971976622208903, + "grad_norm": 0.19086985290050507, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3798 + }, + { + "epoch": 0.1897697187671712, + "grad_norm": 0.330478310585022, + "learning_rate": 0.0001, + "loss": 0.071, + "step": 3799 + }, + { + "epoch": 0.18981967131225336, + "grad_norm": 0.20487584173679352, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 3800 + }, + { + "epoch": 0.18986962385733552, + "grad_norm": 0.23866480588912964, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3801 + }, + { + "epoch": 0.18991957640241772, + "grad_norm": 0.2469099909067154, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 3802 + }, + { + "epoch": 0.18996952894749988, + "grad_norm": 0.19054941833019257, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 3803 + }, + { + "epoch": 0.19001948149258205, + "grad_norm": 0.23740001022815704, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 3804 + }, + { + "epoch": 0.19006943403766421, + "grad_norm": 0.24730828404426575, + "learning_rate": 0.0001, + "loss": 0.0115, + "step": 3805 + }, + { + "epoch": 0.19011938658274638, + "grad_norm": 0.23875312507152557, + "learning_rate": 0.0001, + "loss": 0.0247, + "step": 3806 + }, + { + "epoch": 0.19016933912782857, + "grad_norm": 0.21030570566654205, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 3807 + }, + { + "epoch": 0.19021929167291074, + "grad_norm": 0.19272884726524353, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 3808 + }, + { + "epoch": 0.1902692442179929, + "grad_norm": 0.21618963778018951, + "learning_rate": 0.0001, + "loss": 0.0097, + "step": 3809 + }, + { + "epoch": 0.19031919676307507, + "grad_norm": 0.17551927268505096, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3810 + }, + { + "epoch": 0.19036914930815724, + "grad_norm": 0.2253021001815796, + "learning_rate": 0.0001, + "loss": 0.1626, + "step": 3811 + }, + { + "epoch": 0.19041910185323943, + "grad_norm": 0.21034963428974152, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 3812 + }, + { + "epoch": 0.1904690543983216, + "grad_norm": 0.1547802984714508, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 3813 + }, + { + "epoch": 0.19051900694340376, + "grad_norm": 0.18007050454616547, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 3814 + }, + { + "epoch": 0.19056895948848593, + "grad_norm": 0.33530372381210327, + "learning_rate": 0.0001, + "loss": 0.0554, + "step": 3815 + }, + { + "epoch": 0.19061891203356812, + "grad_norm": 0.18067939579486847, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 3816 + }, + { + "epoch": 0.1906688645786503, + "grad_norm": 0.1839686930179596, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3817 + }, + { + "epoch": 0.19071881712373245, + "grad_norm": 0.1696476936340332, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3818 + }, + { + "epoch": 0.19076876966881462, + "grad_norm": 0.1536937803030014, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3819 + }, + { + "epoch": 0.19081872221389679, + "grad_norm": 0.23596401512622833, + "learning_rate": 0.0001, + "loss": 0.0135, + "step": 3820 + }, + { + "epoch": 0.19086867475897898, + "grad_norm": 0.1964299976825714, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 3821 + }, + { + "epoch": 0.19091862730406114, + "grad_norm": 0.16999611258506775, + "learning_rate": 0.0001, + "loss": 0.0149, + "step": 3822 + }, + { + "epoch": 0.1909685798491433, + "grad_norm": 0.1814420372247696, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3823 + }, + { + "epoch": 0.19101853239422548, + "grad_norm": 0.16043201088905334, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 3824 + }, + { + "epoch": 0.19106848493930767, + "grad_norm": 0.17925207316875458, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3825 + }, + { + "epoch": 0.19111843748438984, + "grad_norm": 0.17343257367610931, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3826 + }, + { + "epoch": 0.191168390029472, + "grad_norm": 0.2588725984096527, + "learning_rate": 0.0001, + "loss": 0.1485, + "step": 3827 + }, + { + "epoch": 0.19121834257455417, + "grad_norm": 0.14068740606307983, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3828 + }, + { + "epoch": 0.19126829511963633, + "grad_norm": 0.14332951605319977, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3829 + }, + { + "epoch": 0.19131824766471853, + "grad_norm": 0.1373288780450821, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3830 + }, + { + "epoch": 0.1913682002098007, + "grad_norm": 0.1815575808286667, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 3831 + }, + { + "epoch": 0.19141815275488286, + "grad_norm": 0.2539111077785492, + "learning_rate": 0.0001, + "loss": 0.1444, + "step": 3832 + }, + { + "epoch": 0.19146810529996502, + "grad_norm": 0.1245090514421463, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3833 + }, + { + "epoch": 0.19151805784504722, + "grad_norm": 0.1502147614955902, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3834 + }, + { + "epoch": 0.19156801039012938, + "grad_norm": 0.20225535333156586, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 3835 + }, + { + "epoch": 0.19161796293521155, + "grad_norm": 0.17414319515228271, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 3836 + }, + { + "epoch": 0.19166791548029372, + "grad_norm": 0.15557533502578735, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 3837 + }, + { + "epoch": 0.19171786802537588, + "grad_norm": 0.1335321068763733, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3838 + }, + { + "epoch": 0.19176782057045808, + "grad_norm": 0.16363361477851868, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3839 + }, + { + "epoch": 0.19181777311554024, + "grad_norm": 0.3035365343093872, + "learning_rate": 0.0001, + "loss": 0.0552, + "step": 3840 + }, + { + "epoch": 0.1918677256606224, + "grad_norm": 0.17358170449733734, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 3841 + }, + { + "epoch": 0.19191767820570457, + "grad_norm": 0.19453221559524536, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3842 + }, + { + "epoch": 0.19196763075078677, + "grad_norm": 0.1781156063079834, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3843 + }, + { + "epoch": 0.19201758329586893, + "grad_norm": 0.2038896679878235, + "learning_rate": 0.0001, + "loss": 0.0177, + "step": 3844 + }, + { + "epoch": 0.1920675358409511, + "grad_norm": 0.22020772099494934, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 3845 + }, + { + "epoch": 0.19211748838603326, + "grad_norm": 0.19680552184581757, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 3846 + }, + { + "epoch": 0.19216744093111543, + "grad_norm": 0.20436017215251923, + "learning_rate": 0.0001, + "loss": 0.1363, + "step": 3847 + }, + { + "epoch": 0.19221739347619762, + "grad_norm": 0.1576937735080719, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 3848 + }, + { + "epoch": 0.1922673460212798, + "grad_norm": 0.1869536191225052, + "learning_rate": 0.0001, + "loss": 0.0093, + "step": 3849 + }, + { + "epoch": 0.19231729856636195, + "grad_norm": 0.20112019777297974, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3850 + }, + { + "epoch": 0.19236725111144412, + "grad_norm": 0.20080547034740448, + "learning_rate": 0.0001, + "loss": 0.0137, + "step": 3851 + }, + { + "epoch": 0.1924172036565263, + "grad_norm": 0.15286125242710114, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3852 + }, + { + "epoch": 0.19246715620160848, + "grad_norm": 0.20117822289466858, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 3853 + }, + { + "epoch": 0.19251710874669065, + "grad_norm": 0.15920782089233398, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3854 + }, + { + "epoch": 0.1925670612917728, + "grad_norm": 0.18567223846912384, + "learning_rate": 0.0001, + "loss": 0.0104, + "step": 3855 + }, + { + "epoch": 0.19261701383685498, + "grad_norm": 0.15262140333652496, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3856 + }, + { + "epoch": 0.19266696638193717, + "grad_norm": 0.14881442487239838, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3857 + }, + { + "epoch": 0.19271691892701934, + "grad_norm": 0.13410469889640808, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3858 + }, + { + "epoch": 0.1927668714721015, + "grad_norm": 0.1515001654624939, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3859 + }, + { + "epoch": 0.19281682401718367, + "grad_norm": 0.14305131137371063, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3860 + }, + { + "epoch": 0.19286677656226583, + "grad_norm": 0.13183905184268951, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3861 + }, + { + "epoch": 0.19291672910734803, + "grad_norm": 0.1851363480091095, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 3862 + }, + { + "epoch": 0.1929666816524302, + "grad_norm": 0.15826502442359924, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3863 + }, + { + "epoch": 0.19301663419751236, + "grad_norm": 0.18123358488082886, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 3864 + }, + { + "epoch": 0.19306658674259453, + "grad_norm": 0.11063233762979507, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3865 + }, + { + "epoch": 0.19311653928767672, + "grad_norm": 0.16351856291294098, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3866 + }, + { + "epoch": 0.19316649183275889, + "grad_norm": 0.230702742934227, + "learning_rate": 0.0001, + "loss": 0.1378, + "step": 3867 + }, + { + "epoch": 0.19321644437784105, + "grad_norm": 0.16124987602233887, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3868 + }, + { + "epoch": 0.19326639692292322, + "grad_norm": 0.12163180112838745, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3869 + }, + { + "epoch": 0.19331634946800538, + "grad_norm": 0.1710912585258484, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3870 + }, + { + "epoch": 0.19336630201308758, + "grad_norm": 0.1527850478887558, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 3871 + }, + { + "epoch": 0.19341625455816974, + "grad_norm": 0.12230628728866577, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3872 + }, + { + "epoch": 0.1934662071032519, + "grad_norm": 0.1222217008471489, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3873 + }, + { + "epoch": 0.19351615964833407, + "grad_norm": 0.15000803768634796, + "learning_rate": 0.0001, + "loss": 0.1259, + "step": 3874 + }, + { + "epoch": 0.19356611219341627, + "grad_norm": 0.15871912240982056, + "learning_rate": 0.0001, + "loss": 0.0142, + "step": 3875 + }, + { + "epoch": 0.19361606473849843, + "grad_norm": 0.12074849754571915, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3876 + }, + { + "epoch": 0.1936660172835806, + "grad_norm": 0.12351047992706299, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 3877 + }, + { + "epoch": 0.19371596982866277, + "grad_norm": 0.14091257750988007, + "learning_rate": 0.0001, + "loss": 0.1281, + "step": 3878 + }, + { + "epoch": 0.19376592237374493, + "grad_norm": 0.1614142805337906, + "learning_rate": 0.0001, + "loss": 0.1356, + "step": 3879 + }, + { + "epoch": 0.19381587491882712, + "grad_norm": 0.1407800316810608, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 3880 + }, + { + "epoch": 0.1938658274639093, + "grad_norm": 0.12000904232263565, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3881 + }, + { + "epoch": 0.19391578000899146, + "grad_norm": 0.11340799182653427, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3882 + }, + { + "epoch": 0.19396573255407362, + "grad_norm": 0.12369527667760849, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3883 + }, + { + "epoch": 0.19401568509915582, + "grad_norm": 0.11634442955255508, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 3884 + }, + { + "epoch": 0.19406563764423798, + "grad_norm": 0.12119119614362717, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 3885 + }, + { + "epoch": 0.19411559018932015, + "grad_norm": 0.15942694246768951, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 3886 + }, + { + "epoch": 0.1941655427344023, + "grad_norm": 0.17414748668670654, + "learning_rate": 0.0001, + "loss": 0.0215, + "step": 3887 + }, + { + "epoch": 0.19421549527948448, + "grad_norm": 0.12764717638492584, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3888 + }, + { + "epoch": 0.19426544782456667, + "grad_norm": 0.14366117119789124, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3889 + }, + { + "epoch": 0.19431540036964884, + "grad_norm": 0.21550583839416504, + "learning_rate": 0.0001, + "loss": 0.0107, + "step": 3890 + }, + { + "epoch": 0.194365352914731, + "grad_norm": 0.15872226655483246, + "learning_rate": 0.0001, + "loss": 0.0133, + "step": 3891 + }, + { + "epoch": 0.19441530545981317, + "grad_norm": 0.12642355263233185, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 3892 + }, + { + "epoch": 0.19446525800489534, + "grad_norm": 0.13945014774799347, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 3893 + }, + { + "epoch": 0.19451521054997753, + "grad_norm": 0.11566105484962463, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 3894 + }, + { + "epoch": 0.1945651630950597, + "grad_norm": 0.2220306694507599, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3895 + }, + { + "epoch": 0.19461511564014186, + "grad_norm": 0.18134264647960663, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 3896 + }, + { + "epoch": 0.19466506818522403, + "grad_norm": 0.18803471326828003, + "learning_rate": 0.0001, + "loss": 0.1321, + "step": 3897 + }, + { + "epoch": 0.19471502073030622, + "grad_norm": 0.17865100502967834, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3898 + }, + { + "epoch": 0.1947649732753884, + "grad_norm": 0.17777927219867706, + "learning_rate": 0.0001, + "loss": 0.0142, + "step": 3899 + }, + { + "epoch": 0.19481492582047055, + "grad_norm": 0.17934732139110565, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3900 + }, + { + "epoch": 0.19486487836555272, + "grad_norm": 0.23933221399784088, + "learning_rate": 0.0001, + "loss": 0.0171, + "step": 3901 + }, + { + "epoch": 0.19491483091063488, + "grad_norm": 0.19002693891525269, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 3902 + }, + { + "epoch": 0.19496478345571708, + "grad_norm": 0.16622109711170197, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3903 + }, + { + "epoch": 0.19501473600079924, + "grad_norm": 0.12920181453227997, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 3904 + }, + { + "epoch": 0.1950646885458814, + "grad_norm": 0.22849172353744507, + "learning_rate": 0.0001, + "loss": 0.0251, + "step": 3905 + }, + { + "epoch": 0.19511464109096358, + "grad_norm": 0.20186810195446014, + "learning_rate": 0.0001, + "loss": 0.1277, + "step": 3906 + }, + { + "epoch": 0.19516459363604577, + "grad_norm": 0.19721712172031403, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 3907 + }, + { + "epoch": 0.19521454618112793, + "grad_norm": 0.3438423275947571, + "learning_rate": 0.0001, + "loss": 0.2752, + "step": 3908 + }, + { + "epoch": 0.1952644987262101, + "grad_norm": 0.22713181376457214, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 3909 + }, + { + "epoch": 0.19531445127129227, + "grad_norm": 0.1965995728969574, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 3910 + }, + { + "epoch": 0.19536440381637443, + "grad_norm": 0.21943721175193787, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 3911 + }, + { + "epoch": 0.19541435636145663, + "grad_norm": 0.1685936152935028, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3912 + }, + { + "epoch": 0.1954643089065388, + "grad_norm": 0.17028170824050903, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 3913 + }, + { + "epoch": 0.19551426145162096, + "grad_norm": 0.25691723823547363, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3914 + }, + { + "epoch": 0.19556421399670312, + "grad_norm": 0.19160053133964539, + "learning_rate": 0.0001, + "loss": 0.1258, + "step": 3915 + }, + { + "epoch": 0.19561416654178532, + "grad_norm": 0.17296229302883148, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 3916 + }, + { + "epoch": 0.19566411908686748, + "grad_norm": 0.209933340549469, + "learning_rate": 0.0001, + "loss": 0.0113, + "step": 3917 + }, + { + "epoch": 0.19571407163194965, + "grad_norm": 0.2345939576625824, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3918 + }, + { + "epoch": 0.19576402417703181, + "grad_norm": 0.12841612100601196, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 3919 + }, + { + "epoch": 0.19581397672211398, + "grad_norm": 0.1365075260400772, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3920 + }, + { + "epoch": 0.19586392926719617, + "grad_norm": 0.18014898896217346, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 3921 + }, + { + "epoch": 0.19591388181227834, + "grad_norm": 0.16230183839797974, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 3922 + }, + { + "epoch": 0.1959638343573605, + "grad_norm": 0.14503523707389832, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 3923 + }, + { + "epoch": 0.19601378690244267, + "grad_norm": 0.13584107160568237, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3924 + }, + { + "epoch": 0.19606373944752487, + "grad_norm": 0.19170616567134857, + "learning_rate": 0.0001, + "loss": 0.0164, + "step": 3925 + }, + { + "epoch": 0.19611369199260703, + "grad_norm": 0.17916354537010193, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 3926 + }, + { + "epoch": 0.1961636445376892, + "grad_norm": 0.1700867861509323, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 3927 + }, + { + "epoch": 0.19621359708277136, + "grad_norm": 0.23912787437438965, + "learning_rate": 0.0001, + "loss": 0.1314, + "step": 3928 + }, + { + "epoch": 0.19626354962785353, + "grad_norm": 0.16749101877212524, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3929 + }, + { + "epoch": 0.19631350217293572, + "grad_norm": 0.12813925743103027, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 3930 + }, + { + "epoch": 0.1963634547180179, + "grad_norm": 0.1338849812746048, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3931 + }, + { + "epoch": 0.19641340726310005, + "grad_norm": 0.12584170699119568, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 3932 + }, + { + "epoch": 0.19646335980818222, + "grad_norm": 0.14111360907554626, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3933 + }, + { + "epoch": 0.19651331235326439, + "grad_norm": 0.13455545902252197, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3934 + }, + { + "epoch": 0.19656326489834658, + "grad_norm": 0.16723136603832245, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 3935 + }, + { + "epoch": 0.19661321744342874, + "grad_norm": 0.13046075403690338, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 3936 + }, + { + "epoch": 0.1966631699885109, + "grad_norm": 0.12216313183307648, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 3937 + }, + { + "epoch": 0.19671312253359308, + "grad_norm": 0.13171592354774475, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3938 + }, + { + "epoch": 0.19676307507867527, + "grad_norm": 0.17814379930496216, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 3939 + }, + { + "epoch": 0.19681302762375744, + "grad_norm": 0.17683836817741394, + "learning_rate": 0.0001, + "loss": 0.1281, + "step": 3940 + }, + { + "epoch": 0.1968629801688396, + "grad_norm": 0.15978781878948212, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3941 + }, + { + "epoch": 0.19691293271392177, + "grad_norm": 0.13942213356494904, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 3942 + }, + { + "epoch": 0.19696288525900393, + "grad_norm": 0.11709847301244736, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3943 + }, + { + "epoch": 0.19701283780408613, + "grad_norm": 0.15627214312553406, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 3944 + }, + { + "epoch": 0.1970627903491683, + "grad_norm": 0.14301301538944244, + "learning_rate": 0.0001, + "loss": 0.0163, + "step": 3945 + }, + { + "epoch": 0.19711274289425046, + "grad_norm": 0.14832758903503418, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3946 + }, + { + "epoch": 0.19716269543933262, + "grad_norm": 0.13542620837688446, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 3947 + }, + { + "epoch": 0.19721264798441482, + "grad_norm": 0.1770097017288208, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 3948 + }, + { + "epoch": 0.19726260052949698, + "grad_norm": 0.2585621178150177, + "learning_rate": 0.0001, + "loss": 0.0189, + "step": 3949 + }, + { + "epoch": 0.19731255307457915, + "grad_norm": 0.15502959489822388, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3950 + }, + { + "epoch": 0.19736250561966132, + "grad_norm": 0.20384089648723602, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 3951 + }, + { + "epoch": 0.19741245816474348, + "grad_norm": 0.18106135725975037, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3952 + }, + { + "epoch": 0.19746241070982568, + "grad_norm": 0.26653075218200684, + "learning_rate": 0.0001, + "loss": 0.0201, + "step": 3953 + }, + { + "epoch": 0.19751236325490784, + "grad_norm": 0.23245255649089813, + "learning_rate": 0.0001, + "loss": 0.0115, + "step": 3954 + }, + { + "epoch": 0.19756231579999, + "grad_norm": 0.258915513753891, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3955 + }, + { + "epoch": 0.19761226834507217, + "grad_norm": 0.21508266031742096, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 3956 + }, + { + "epoch": 0.19766222089015437, + "grad_norm": 0.2640083134174347, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 3957 + }, + { + "epoch": 0.19771217343523653, + "grad_norm": 0.3559456169605255, + "learning_rate": 0.0001, + "loss": 0.0384, + "step": 3958 + }, + { + "epoch": 0.1977621259803187, + "grad_norm": 0.28200793266296387, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 3959 + }, + { + "epoch": 0.19781207852540086, + "grad_norm": 0.20215678215026855, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 3960 + }, + { + "epoch": 0.19786203107048303, + "grad_norm": 0.2042144387960434, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 3961 + }, + { + "epoch": 0.19791198361556522, + "grad_norm": 0.2854761481285095, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 3962 + }, + { + "epoch": 0.1979619361606474, + "grad_norm": 0.3050578534603119, + "learning_rate": 0.0001, + "loss": 0.1329, + "step": 3963 + }, + { + "epoch": 0.19801188870572956, + "grad_norm": 0.20922857522964478, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 3964 + }, + { + "epoch": 0.19806184125081172, + "grad_norm": 0.24461522698402405, + "learning_rate": 0.0001, + "loss": 0.008, + "step": 3965 + }, + { + "epoch": 0.19811179379589391, + "grad_norm": 0.22691656649112701, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 3966 + }, + { + "epoch": 0.19816174634097608, + "grad_norm": 0.20011356472969055, + "learning_rate": 0.0001, + "loss": 0.0231, + "step": 3967 + }, + { + "epoch": 0.19821169888605825, + "grad_norm": 0.20148110389709473, + "learning_rate": 0.0001, + "loss": 0.0124, + "step": 3968 + }, + { + "epoch": 0.1982616514311404, + "grad_norm": 0.2467770129442215, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3969 + }, + { + "epoch": 0.19831160397622258, + "grad_norm": 0.28165918588638306, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 3970 + }, + { + "epoch": 0.19836155652130477, + "grad_norm": 0.21285739541053772, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3971 + }, + { + "epoch": 0.19841150906638694, + "grad_norm": 0.20156130194664001, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 3972 + }, + { + "epoch": 0.1984614616114691, + "grad_norm": 0.20402169227600098, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 3973 + }, + { + "epoch": 0.19851141415655127, + "grad_norm": 0.20200882852077484, + "learning_rate": 0.0001, + "loss": 0.0149, + "step": 3974 + }, + { + "epoch": 0.19856136670163343, + "grad_norm": 0.19325532019138336, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 3975 + }, + { + "epoch": 0.19861131924671563, + "grad_norm": 0.22853368520736694, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3976 + }, + { + "epoch": 0.1986612717917978, + "grad_norm": 0.18312625586986542, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 3977 + }, + { + "epoch": 0.19871122433687996, + "grad_norm": 0.1858000010251999, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 3978 + }, + { + "epoch": 0.19876117688196213, + "grad_norm": 0.36356493830680847, + "learning_rate": 0.0001, + "loss": 0.2643, + "step": 3979 + }, + { + "epoch": 0.19881112942704432, + "grad_norm": 0.2649712860584259, + "learning_rate": 0.0001, + "loss": 0.0117, + "step": 3980 + }, + { + "epoch": 0.19886108197212649, + "grad_norm": 0.18839123845100403, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 3981 + }, + { + "epoch": 0.19891103451720865, + "grad_norm": 0.16899047791957855, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 3982 + }, + { + "epoch": 0.19896098706229082, + "grad_norm": 0.15967178344726562, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 3983 + }, + { + "epoch": 0.19901093960737298, + "grad_norm": 0.2016158252954483, + "learning_rate": 0.0001, + "loss": 0.0078, + "step": 3984 + }, + { + "epoch": 0.19906089215245518, + "grad_norm": 0.16848383843898773, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 3985 + }, + { + "epoch": 0.19911084469753734, + "grad_norm": 0.2194928079843521, + "learning_rate": 0.0001, + "loss": 0.1262, + "step": 3986 + }, + { + "epoch": 0.1991607972426195, + "grad_norm": 0.17979417741298676, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 3987 + }, + { + "epoch": 0.19921074978770167, + "grad_norm": 0.14156322181224823, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3988 + }, + { + "epoch": 0.19926070233278387, + "grad_norm": 0.1887117475271225, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 3989 + }, + { + "epoch": 0.19931065487786603, + "grad_norm": 0.14110387861728668, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3990 + }, + { + "epoch": 0.1993606074229482, + "grad_norm": 0.1346404105424881, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 3991 + }, + { + "epoch": 0.19941055996803037, + "grad_norm": 0.144647017121315, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 3992 + }, + { + "epoch": 0.19946051251311253, + "grad_norm": 0.1527031660079956, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 3993 + }, + { + "epoch": 0.19951046505819472, + "grad_norm": 0.28210657835006714, + "learning_rate": 0.0001, + "loss": 0.1337, + "step": 3994 + }, + { + "epoch": 0.1995604176032769, + "grad_norm": 0.14539086818695068, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 3995 + }, + { + "epoch": 0.19961037014835906, + "grad_norm": 0.14557835459709167, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 3996 + }, + { + "epoch": 0.19966032269344122, + "grad_norm": 0.15621669590473175, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 3997 + }, + { + "epoch": 0.19971027523852342, + "grad_norm": 0.1549607217311859, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 3998 + }, + { + "epoch": 0.19976022778360558, + "grad_norm": 0.146994948387146, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 3999 + }, + { + "epoch": 0.19981018032868775, + "grad_norm": 0.16436868906021118, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4000 + }, + { + "epoch": 0.1998601328737699, + "grad_norm": 0.1582600623369217, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 4001 + }, + { + "epoch": 0.19991008541885208, + "grad_norm": 0.14024904370307922, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 4002 + }, + { + "epoch": 0.19996003796393427, + "grad_norm": 0.12320385873317719, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4003 + }, + { + "epoch": 0.20000999050901644, + "grad_norm": 0.11240807920694351, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 4004 + }, + { + "epoch": 0.2000599430540986, + "grad_norm": 0.11097367107868195, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4005 + }, + { + "epoch": 0.20010989559918077, + "grad_norm": 0.13143932819366455, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4006 + }, + { + "epoch": 0.20015984814426296, + "grad_norm": 0.2792966067790985, + "learning_rate": 0.0001, + "loss": 0.0232, + "step": 4007 + }, + { + "epoch": 0.20020980068934513, + "grad_norm": 0.11232639849185944, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4008 + }, + { + "epoch": 0.2002597532344273, + "grad_norm": 0.2251305729150772, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 4009 + }, + { + "epoch": 0.20030970577950946, + "grad_norm": 0.171182319521904, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4010 + }, + { + "epoch": 0.20035965832459163, + "grad_norm": 0.19709667563438416, + "learning_rate": 0.0001, + "loss": 0.014, + "step": 4011 + }, + { + "epoch": 0.20040961086967382, + "grad_norm": 0.20571798086166382, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4012 + }, + { + "epoch": 0.200459563414756, + "grad_norm": 0.24969187378883362, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 4013 + }, + { + "epoch": 0.20050951595983815, + "grad_norm": 0.1525377482175827, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4014 + }, + { + "epoch": 0.20055946850492032, + "grad_norm": 0.19076809287071228, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4015 + }, + { + "epoch": 0.20060942105000248, + "grad_norm": 0.23716402053833008, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 4016 + }, + { + "epoch": 0.20065937359508468, + "grad_norm": 0.17862530052661896, + "learning_rate": 0.0001, + "loss": 0.0175, + "step": 4017 + }, + { + "epoch": 0.20070932614016684, + "grad_norm": 0.20793825387954712, + "learning_rate": 0.0001, + "loss": 0.0102, + "step": 4018 + }, + { + "epoch": 0.200759278685249, + "grad_norm": 0.1893077790737152, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 4019 + }, + { + "epoch": 0.20080923123033118, + "grad_norm": 0.1723158359527588, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4020 + }, + { + "epoch": 0.20085918377541337, + "grad_norm": 0.19872744381427765, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 4021 + }, + { + "epoch": 0.20090913632049553, + "grad_norm": 0.21671825647354126, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 4022 + }, + { + "epoch": 0.2009590888655777, + "grad_norm": 0.1842777132987976, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 4023 + }, + { + "epoch": 0.20100904141065987, + "grad_norm": 0.17971022427082062, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4024 + }, + { + "epoch": 0.20105899395574203, + "grad_norm": 0.16013430058956146, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4025 + }, + { + "epoch": 0.20110894650082423, + "grad_norm": 0.19081918895244598, + "learning_rate": 0.0001, + "loss": 0.1383, + "step": 4026 + }, + { + "epoch": 0.2011588990459064, + "grad_norm": 0.2074059247970581, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4027 + }, + { + "epoch": 0.20120885159098856, + "grad_norm": 0.1858195662498474, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4028 + }, + { + "epoch": 0.20125880413607072, + "grad_norm": 0.22589264810085297, + "learning_rate": 0.0001, + "loss": 0.0086, + "step": 4029 + }, + { + "epoch": 0.20130875668115292, + "grad_norm": 0.2020837366580963, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 4030 + }, + { + "epoch": 0.20135870922623508, + "grad_norm": 0.19742989540100098, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 4031 + }, + { + "epoch": 0.20140866177131725, + "grad_norm": 0.15245606005191803, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4032 + }, + { + "epoch": 0.20145861431639941, + "grad_norm": 0.20909181237220764, + "learning_rate": 0.0001, + "loss": 0.0083, + "step": 4033 + }, + { + "epoch": 0.20150856686148158, + "grad_norm": 0.19570817053318024, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 4034 + }, + { + "epoch": 0.20155851940656377, + "grad_norm": 0.2032807469367981, + "learning_rate": 0.0001, + "loss": 0.031, + "step": 4035 + }, + { + "epoch": 0.20160847195164594, + "grad_norm": 0.15072187781333923, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4036 + }, + { + "epoch": 0.2016584244967281, + "grad_norm": 0.16371554136276245, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4037 + }, + { + "epoch": 0.20170837704181027, + "grad_norm": 0.17297376692295074, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 4038 + }, + { + "epoch": 0.20175832958689247, + "grad_norm": 0.14674173295497894, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 4039 + }, + { + "epoch": 0.20180828213197463, + "grad_norm": 0.16236329078674316, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4040 + }, + { + "epoch": 0.2018582346770568, + "grad_norm": 0.15921154618263245, + "learning_rate": 0.0001, + "loss": 0.1283, + "step": 4041 + }, + { + "epoch": 0.20190818722213896, + "grad_norm": 0.16778938472270966, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 4042 + }, + { + "epoch": 0.20195813976722113, + "grad_norm": 0.1186925619840622, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 4043 + }, + { + "epoch": 0.20200809231230332, + "grad_norm": 0.15050582587718964, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4044 + }, + { + "epoch": 0.2020580448573855, + "grad_norm": 0.21061015129089355, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 4045 + }, + { + "epoch": 0.20210799740246765, + "grad_norm": 0.2175966054201126, + "learning_rate": 0.0001, + "loss": 0.0254, + "step": 4046 + }, + { + "epoch": 0.20215794994754982, + "grad_norm": 0.1563609540462494, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 4047 + }, + { + "epoch": 0.202207902492632, + "grad_norm": 0.1842314749956131, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 4048 + }, + { + "epoch": 0.20225785503771418, + "grad_norm": 0.1511191725730896, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4049 + }, + { + "epoch": 0.20230780758279635, + "grad_norm": 0.1518794447183609, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 4050 + }, + { + "epoch": 0.2023577601278785, + "grad_norm": 0.1273856908082962, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4051 + }, + { + "epoch": 0.20240771267296068, + "grad_norm": 0.11982972919940948, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 4052 + }, + { + "epoch": 0.20245766521804287, + "grad_norm": 0.13429178297519684, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4053 + }, + { + "epoch": 0.20250761776312504, + "grad_norm": 0.14765675365924835, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4054 + }, + { + "epoch": 0.2025575703082072, + "grad_norm": 0.14471450448036194, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 4055 + }, + { + "epoch": 0.20260752285328937, + "grad_norm": 0.15405678749084473, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 4056 + }, + { + "epoch": 0.20265747539837153, + "grad_norm": 0.16594237089157104, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 4057 + }, + { + "epoch": 0.20270742794345373, + "grad_norm": 0.12371552735567093, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 4058 + }, + { + "epoch": 0.2027573804885359, + "grad_norm": 0.14880268275737762, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 4059 + }, + { + "epoch": 0.20280733303361806, + "grad_norm": 0.12858274579048157, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4060 + }, + { + "epoch": 0.20285728557870022, + "grad_norm": 0.12396805733442307, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4061 + }, + { + "epoch": 0.20290723812378242, + "grad_norm": 0.1039007306098938, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4062 + }, + { + "epoch": 0.20295719066886458, + "grad_norm": 0.14054355025291443, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4063 + }, + { + "epoch": 0.20300714321394675, + "grad_norm": 0.10413596034049988, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4064 + }, + { + "epoch": 0.20305709575902892, + "grad_norm": 0.11336204409599304, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4065 + }, + { + "epoch": 0.20310704830411108, + "grad_norm": 0.1591884344816208, + "learning_rate": 0.0001, + "loss": 0.0301, + "step": 4066 + }, + { + "epoch": 0.20315700084919328, + "grad_norm": 0.12446451187133789, + "learning_rate": 0.0001, + "loss": 0.1277, + "step": 4067 + }, + { + "epoch": 0.20320695339427544, + "grad_norm": 0.1435154676437378, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 4068 + }, + { + "epoch": 0.2032569059393576, + "grad_norm": 0.11584332585334778, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4069 + }, + { + "epoch": 0.20330685848443977, + "grad_norm": 0.31158480048179626, + "learning_rate": 0.0001, + "loss": 0.0271, + "step": 4070 + }, + { + "epoch": 0.20335681102952197, + "grad_norm": 0.15213243663311005, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 4071 + }, + { + "epoch": 0.20340676357460413, + "grad_norm": 0.39774978160858154, + "learning_rate": 0.0001, + "loss": 0.1559, + "step": 4072 + }, + { + "epoch": 0.2034567161196863, + "grad_norm": 0.14904160797595978, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4073 + }, + { + "epoch": 0.20350666866476846, + "grad_norm": 0.22679606080055237, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 4074 + }, + { + "epoch": 0.20355662120985063, + "grad_norm": 0.2830743193626404, + "learning_rate": 0.0001, + "loss": 0.0126, + "step": 4075 + }, + { + "epoch": 0.20360657375493282, + "grad_norm": 0.16063381731510162, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4076 + }, + { + "epoch": 0.203656526300015, + "grad_norm": 0.2041870802640915, + "learning_rate": 0.0001, + "loss": 0.0148, + "step": 4077 + }, + { + "epoch": 0.20370647884509716, + "grad_norm": 0.17184577882289886, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 4078 + }, + { + "epoch": 0.20375643139017932, + "grad_norm": 0.23237314820289612, + "learning_rate": 0.0001, + "loss": 0.008, + "step": 4079 + }, + { + "epoch": 0.20380638393526151, + "grad_norm": 0.1779298484325409, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 4080 + }, + { + "epoch": 0.20385633648034368, + "grad_norm": 0.189801424741745, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 4081 + }, + { + "epoch": 0.20390628902542585, + "grad_norm": 0.2213854193687439, + "learning_rate": 0.0001, + "loss": 0.0047, + "step": 4082 + }, + { + "epoch": 0.203956241570508, + "grad_norm": 0.25322476029396057, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 4083 + }, + { + "epoch": 0.20400619411559018, + "grad_norm": 0.17127738893032074, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 4084 + }, + { + "epoch": 0.20405614666067237, + "grad_norm": 0.154336079955101, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4085 + }, + { + "epoch": 0.20410609920575454, + "grad_norm": 0.14674215018749237, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4086 + }, + { + "epoch": 0.2041560517508367, + "grad_norm": 0.18798784911632538, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 4087 + }, + { + "epoch": 0.20420600429591887, + "grad_norm": 0.1619892418384552, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 4088 + }, + { + "epoch": 0.20425595684100106, + "grad_norm": 0.16873441636562347, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4089 + }, + { + "epoch": 0.20430590938608323, + "grad_norm": 0.18354910612106323, + "learning_rate": 0.0001, + "loss": 0.1307, + "step": 4090 + }, + { + "epoch": 0.2043558619311654, + "grad_norm": 0.15260513126850128, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 4091 + }, + { + "epoch": 0.20440581447624756, + "grad_norm": 0.20681698620319366, + "learning_rate": 0.0001, + "loss": 0.0126, + "step": 4092 + }, + { + "epoch": 0.20445576702132973, + "grad_norm": 0.13918039202690125, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4093 + }, + { + "epoch": 0.20450571956641192, + "grad_norm": 0.2672349214553833, + "learning_rate": 0.0001, + "loss": 0.0269, + "step": 4094 + }, + { + "epoch": 0.20455567211149409, + "grad_norm": 0.18588200211524963, + "learning_rate": 0.0001, + "loss": 0.0185, + "step": 4095 + }, + { + "epoch": 0.20460562465657625, + "grad_norm": 0.1967248171567917, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 4096 + }, + { + "epoch": 0.20465557720165842, + "grad_norm": 0.1600494384765625, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 4097 + }, + { + "epoch": 0.20470552974674058, + "grad_norm": 0.15517227351665497, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4098 + }, + { + "epoch": 0.20475548229182278, + "grad_norm": 0.21486669778823853, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 4099 + }, + { + "epoch": 0.20480543483690494, + "grad_norm": 0.15750651061534882, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4100 + }, + { + "epoch": 0.2048553873819871, + "grad_norm": 0.15682701766490936, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 4101 + }, + { + "epoch": 0.20490533992706927, + "grad_norm": 0.1740035116672516, + "learning_rate": 0.0001, + "loss": 0.1299, + "step": 4102 + }, + { + "epoch": 0.20495529247215147, + "grad_norm": 0.14548498392105103, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 4103 + }, + { + "epoch": 0.20500524501723363, + "grad_norm": 0.15233971178531647, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 4104 + }, + { + "epoch": 0.2050551975623158, + "grad_norm": 0.12528395652770996, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4105 + }, + { + "epoch": 0.20510515010739797, + "grad_norm": 0.13517293334007263, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4106 + }, + { + "epoch": 0.20515510265248013, + "grad_norm": 0.19742028415203094, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 4107 + }, + { + "epoch": 0.20520505519756232, + "grad_norm": 0.11046238988637924, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4108 + }, + { + "epoch": 0.2052550077426445, + "grad_norm": 0.12550348043441772, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4109 + }, + { + "epoch": 0.20530496028772666, + "grad_norm": 0.21469105780124664, + "learning_rate": 0.0001, + "loss": 0.0327, + "step": 4110 + }, + { + "epoch": 0.20535491283280882, + "grad_norm": 0.21580550074577332, + "learning_rate": 0.0001, + "loss": 0.1301, + "step": 4111 + }, + { + "epoch": 0.20540486537789102, + "grad_norm": 0.1051882728934288, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4112 + }, + { + "epoch": 0.20545481792297318, + "grad_norm": 0.24835458397865295, + "learning_rate": 0.0001, + "loss": 0.0092, + "step": 4113 + }, + { + "epoch": 0.20550477046805535, + "grad_norm": 0.19385303556919098, + "learning_rate": 0.0001, + "loss": 0.0129, + "step": 4114 + }, + { + "epoch": 0.2055547230131375, + "grad_norm": 0.1987978219985962, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 4115 + }, + { + "epoch": 0.20560467555821968, + "grad_norm": 0.1901455670595169, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 4116 + }, + { + "epoch": 0.20565462810330187, + "grad_norm": 0.17151403427124023, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4117 + }, + { + "epoch": 0.20570458064838404, + "grad_norm": 0.2500184178352356, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 4118 + }, + { + "epoch": 0.2057545331934662, + "grad_norm": 0.2590499520301819, + "learning_rate": 0.0001, + "loss": 0.025, + "step": 4119 + }, + { + "epoch": 0.20580448573854837, + "grad_norm": 0.19745826721191406, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 4120 + }, + { + "epoch": 0.20585443828363056, + "grad_norm": 0.1940644383430481, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4121 + }, + { + "epoch": 0.20590439082871273, + "grad_norm": 0.18624763190746307, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 4122 + }, + { + "epoch": 0.2059543433737949, + "grad_norm": 0.19165384769439697, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 4123 + }, + { + "epoch": 0.20600429591887706, + "grad_norm": 0.27877146005630493, + "learning_rate": 0.0001, + "loss": 0.0152, + "step": 4124 + }, + { + "epoch": 0.20605424846395923, + "grad_norm": 0.21535111963748932, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 4125 + }, + { + "epoch": 0.20610420100904142, + "grad_norm": 0.20562057197093964, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 4126 + }, + { + "epoch": 0.2061541535541236, + "grad_norm": 0.29123520851135254, + "learning_rate": 0.0001, + "loss": 0.0181, + "step": 4127 + }, + { + "epoch": 0.20620410609920575, + "grad_norm": 0.21089506149291992, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4128 + }, + { + "epoch": 0.20625405864428792, + "grad_norm": 0.22295668721199036, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 4129 + }, + { + "epoch": 0.20630401118937008, + "grad_norm": 0.18527579307556152, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4130 + }, + { + "epoch": 0.20635396373445228, + "grad_norm": 0.17141102254390717, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 4131 + }, + { + "epoch": 0.20640391627953444, + "grad_norm": 0.20520484447479248, + "learning_rate": 0.0001, + "loss": 0.1316, + "step": 4132 + }, + { + "epoch": 0.2064538688246166, + "grad_norm": 0.1743798851966858, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 4133 + }, + { + "epoch": 0.20650382136969878, + "grad_norm": 0.2067987024784088, + "learning_rate": 0.0001, + "loss": 0.0134, + "step": 4134 + }, + { + "epoch": 0.20655377391478097, + "grad_norm": 0.20886540412902832, + "learning_rate": 0.0001, + "loss": 0.0112, + "step": 4135 + }, + { + "epoch": 0.20660372645986314, + "grad_norm": 0.14492329955101013, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4136 + }, + { + "epoch": 0.2066536790049453, + "grad_norm": 0.45006105303764343, + "learning_rate": 0.0001, + "loss": 0.0273, + "step": 4137 + }, + { + "epoch": 0.20670363155002747, + "grad_norm": 0.20632661879062653, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 4138 + }, + { + "epoch": 0.20675358409510963, + "grad_norm": 0.2714262306690216, + "learning_rate": 0.0001, + "loss": 0.1294, + "step": 4139 + }, + { + "epoch": 0.20680353664019183, + "grad_norm": 0.37370526790618896, + "learning_rate": 0.0001, + "loss": 0.0306, + "step": 4140 + }, + { + "epoch": 0.206853489185274, + "grad_norm": 0.17211411893367767, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 4141 + }, + { + "epoch": 0.20690344173035616, + "grad_norm": 0.22603324055671692, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4142 + }, + { + "epoch": 0.20695339427543832, + "grad_norm": 0.23350578546524048, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 4143 + }, + { + "epoch": 0.20700334682052052, + "grad_norm": 0.16403616964817047, + "learning_rate": 0.0001, + "loss": 0.1262, + "step": 4144 + }, + { + "epoch": 0.20705329936560268, + "grad_norm": 0.1730339378118515, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 4145 + }, + { + "epoch": 0.20710325191068485, + "grad_norm": 0.1850011795759201, + "learning_rate": 0.0001, + "loss": 0.126, + "step": 4146 + }, + { + "epoch": 0.20715320445576701, + "grad_norm": 0.36954227089881897, + "learning_rate": 0.0001, + "loss": 0.0435, + "step": 4147 + }, + { + "epoch": 0.20720315700084918, + "grad_norm": 0.21035762131214142, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 4148 + }, + { + "epoch": 0.20725310954593137, + "grad_norm": 0.18451696634292603, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4149 + }, + { + "epoch": 0.20730306209101354, + "grad_norm": 0.2752225399017334, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 4150 + }, + { + "epoch": 0.2073530146360957, + "grad_norm": 0.27759212255477905, + "learning_rate": 0.0001, + "loss": 0.0157, + "step": 4151 + }, + { + "epoch": 0.20740296718117787, + "grad_norm": 0.23680081963539124, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 4152 + }, + { + "epoch": 0.20745291972626007, + "grad_norm": 0.21584990620613098, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 4153 + }, + { + "epoch": 0.20750287227134223, + "grad_norm": 0.18379420042037964, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4154 + }, + { + "epoch": 0.2075528248164244, + "grad_norm": 0.17876960337162018, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 4155 + }, + { + "epoch": 0.20760277736150656, + "grad_norm": 0.19070909917354584, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 4156 + }, + { + "epoch": 0.20765272990658873, + "grad_norm": 0.2341756671667099, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4157 + }, + { + "epoch": 0.20770268245167092, + "grad_norm": 0.1900654435157776, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 4158 + }, + { + "epoch": 0.2077526349967531, + "grad_norm": 0.1504492312669754, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 4159 + }, + { + "epoch": 0.20780258754183525, + "grad_norm": 0.19023917615413666, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 4160 + }, + { + "epoch": 0.20785254008691742, + "grad_norm": 0.18740501999855042, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4161 + }, + { + "epoch": 0.2079024926319996, + "grad_norm": 0.26915594935417175, + "learning_rate": 0.0001, + "loss": 0.0221, + "step": 4162 + }, + { + "epoch": 0.20795244517708178, + "grad_norm": 0.14262281358242035, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4163 + }, + { + "epoch": 0.20800239772216395, + "grad_norm": 0.20539602637290955, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 4164 + }, + { + "epoch": 0.2080523502672461, + "grad_norm": 0.21320651471614838, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 4165 + }, + { + "epoch": 0.20810230281232828, + "grad_norm": 0.18572968244552612, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4166 + }, + { + "epoch": 0.20815225535741047, + "grad_norm": 0.1574053317308426, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4167 + }, + { + "epoch": 0.20820220790249264, + "grad_norm": 0.11030549556016922, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4168 + }, + { + "epoch": 0.2082521604475748, + "grad_norm": 0.2217383235692978, + "learning_rate": 0.0001, + "loss": 0.012, + "step": 4169 + }, + { + "epoch": 0.20830211299265697, + "grad_norm": 0.17374861240386963, + "learning_rate": 0.0001, + "loss": 0.1291, + "step": 4170 + }, + { + "epoch": 0.20835206553773913, + "grad_norm": 0.149580717086792, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4171 + }, + { + "epoch": 0.20840201808282133, + "grad_norm": 0.14647488296031952, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4172 + }, + { + "epoch": 0.2084519706279035, + "grad_norm": 0.1491505354642868, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 4173 + }, + { + "epoch": 0.20850192317298566, + "grad_norm": 0.17916974425315857, + "learning_rate": 0.0001, + "loss": 0.0089, + "step": 4174 + }, + { + "epoch": 0.20855187571806783, + "grad_norm": 0.1518479585647583, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4175 + }, + { + "epoch": 0.20860182826315002, + "grad_norm": 0.20708978176116943, + "learning_rate": 0.0001, + "loss": 0.1306, + "step": 4176 + }, + { + "epoch": 0.20865178080823218, + "grad_norm": 0.19785290956497192, + "learning_rate": 0.0001, + "loss": 0.1403, + "step": 4177 + }, + { + "epoch": 0.20870173335331435, + "grad_norm": 0.12092983722686768, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4178 + }, + { + "epoch": 0.20875168589839652, + "grad_norm": 0.19085244834423065, + "learning_rate": 0.0001, + "loss": 0.126, + "step": 4179 + }, + { + "epoch": 0.20880163844347868, + "grad_norm": 0.15202496945858002, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4180 + }, + { + "epoch": 0.20885159098856088, + "grad_norm": 0.15533755719661713, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 4181 + }, + { + "epoch": 0.20890154353364304, + "grad_norm": 0.18400892615318298, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4182 + }, + { + "epoch": 0.2089514960787252, + "grad_norm": 0.26398470997810364, + "learning_rate": 0.0001, + "loss": 0.0261, + "step": 4183 + }, + { + "epoch": 0.20900144862380737, + "grad_norm": 0.16909559071063995, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4184 + }, + { + "epoch": 0.20905140116888957, + "grad_norm": 0.18636806309223175, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4185 + }, + { + "epoch": 0.20910135371397173, + "grad_norm": 0.17691999673843384, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 4186 + }, + { + "epoch": 0.2091513062590539, + "grad_norm": 0.16841083765029907, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4187 + }, + { + "epoch": 0.20920125880413606, + "grad_norm": 0.17616333067417145, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4188 + }, + { + "epoch": 0.20925121134921823, + "grad_norm": 0.13383004069328308, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4189 + }, + { + "epoch": 0.20930116389430042, + "grad_norm": 0.12826287746429443, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 4190 + }, + { + "epoch": 0.2093511164393826, + "grad_norm": 0.12185932695865631, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 4191 + }, + { + "epoch": 0.20940106898446476, + "grad_norm": 0.2297944724559784, + "learning_rate": 0.0001, + "loss": 0.0185, + "step": 4192 + }, + { + "epoch": 0.20945102152954692, + "grad_norm": 0.19292236864566803, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 4193 + }, + { + "epoch": 0.20950097407462911, + "grad_norm": 0.16779041290283203, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4194 + }, + { + "epoch": 0.20955092661971128, + "grad_norm": 0.16881945729255676, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4195 + }, + { + "epoch": 0.20960087916479345, + "grad_norm": 0.17638398706912994, + "learning_rate": 0.0001, + "loss": 0.1261, + "step": 4196 + }, + { + "epoch": 0.2096508317098756, + "grad_norm": 0.16875658929347992, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4197 + }, + { + "epoch": 0.20970078425495778, + "grad_norm": 0.15241476893424988, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 4198 + }, + { + "epoch": 0.20975073680003997, + "grad_norm": 0.14316777884960175, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 4199 + }, + { + "epoch": 0.20980068934512214, + "grad_norm": 0.22337685525417328, + "learning_rate": 0.0001, + "loss": 0.0316, + "step": 4200 + }, + { + "epoch": 0.2098506418902043, + "grad_norm": 0.19440540671348572, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4201 + }, + { + "epoch": 0.20990059443528647, + "grad_norm": 0.1627621203660965, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4202 + }, + { + "epoch": 0.20995054698036866, + "grad_norm": 0.26291847229003906, + "learning_rate": 0.0001, + "loss": 0.0128, + "step": 4203 + }, + { + "epoch": 0.21000049952545083, + "grad_norm": 0.22646832466125488, + "learning_rate": 0.0001, + "loss": 0.0287, + "step": 4204 + }, + { + "epoch": 0.210050452070533, + "grad_norm": 0.19826363027095795, + "learning_rate": 0.0001, + "loss": 0.1301, + "step": 4205 + }, + { + "epoch": 0.21010040461561516, + "grad_norm": 0.2534175515174866, + "learning_rate": 0.0001, + "loss": 0.0218, + "step": 4206 + }, + { + "epoch": 0.21015035716069733, + "grad_norm": 0.18850478529930115, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4207 + }, + { + "epoch": 0.21020030970577952, + "grad_norm": 0.21454490721225739, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 4208 + }, + { + "epoch": 0.21025026225086169, + "grad_norm": 0.21800166368484497, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 4209 + }, + { + "epoch": 0.21030021479594385, + "grad_norm": 0.17966030538082123, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 4210 + }, + { + "epoch": 0.21035016734102602, + "grad_norm": 0.16780051589012146, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4211 + }, + { + "epoch": 0.21040011988610818, + "grad_norm": 0.18592669069766998, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 4212 + }, + { + "epoch": 0.21045007243119038, + "grad_norm": 0.18536525964736938, + "learning_rate": 0.0001, + "loss": 0.0138, + "step": 4213 + }, + { + "epoch": 0.21050002497627254, + "grad_norm": 0.15427260100841522, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4214 + }, + { + "epoch": 0.2105499775213547, + "grad_norm": 0.18055404722690582, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 4215 + }, + { + "epoch": 0.21059993006643687, + "grad_norm": 0.1736365556716919, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4216 + }, + { + "epoch": 0.21064988261151907, + "grad_norm": 0.1716710925102234, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4217 + }, + { + "epoch": 0.21069983515660123, + "grad_norm": 0.22882360219955444, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 4218 + }, + { + "epoch": 0.2107497877016834, + "grad_norm": 0.21301129460334778, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 4219 + }, + { + "epoch": 0.21079974024676557, + "grad_norm": 0.1581575721502304, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 4220 + }, + { + "epoch": 0.21084969279184773, + "grad_norm": 0.16256435215473175, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4221 + }, + { + "epoch": 0.21089964533692993, + "grad_norm": 0.1600835770368576, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4222 + }, + { + "epoch": 0.2109495978820121, + "grad_norm": 0.2166055291891098, + "learning_rate": 0.0001, + "loss": 0.007, + "step": 4223 + }, + { + "epoch": 0.21099955042709426, + "grad_norm": 0.20574504137039185, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4224 + }, + { + "epoch": 0.21104950297217642, + "grad_norm": 0.1450815051794052, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 4225 + }, + { + "epoch": 0.21109945551725862, + "grad_norm": 0.15527039766311646, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4226 + }, + { + "epoch": 0.21114940806234078, + "grad_norm": 0.21529436111450195, + "learning_rate": 0.0001, + "loss": 0.0091, + "step": 4227 + }, + { + "epoch": 0.21119936060742295, + "grad_norm": 0.1452944278717041, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4228 + }, + { + "epoch": 0.2112493131525051, + "grad_norm": 0.13558822870254517, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 4229 + }, + { + "epoch": 0.21129926569758728, + "grad_norm": 0.18689337372779846, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 4230 + }, + { + "epoch": 0.21134921824266947, + "grad_norm": 0.15414097905158997, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4231 + }, + { + "epoch": 0.21139917078775164, + "grad_norm": 0.14526303112506866, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 4232 + }, + { + "epoch": 0.2114491233328338, + "grad_norm": 0.1287827044725418, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4233 + }, + { + "epoch": 0.21149907587791597, + "grad_norm": 0.17256779968738556, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 4234 + }, + { + "epoch": 0.21154902842299816, + "grad_norm": 0.18814218044281006, + "learning_rate": 0.0001, + "loss": 0.1297, + "step": 4235 + }, + { + "epoch": 0.21159898096808033, + "grad_norm": 0.13376159965991974, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4236 + }, + { + "epoch": 0.2116489335131625, + "grad_norm": 0.10943929851055145, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 4237 + }, + { + "epoch": 0.21169888605824466, + "grad_norm": 0.17092101275920868, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 4238 + }, + { + "epoch": 0.21174883860332683, + "grad_norm": 0.17001031339168549, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4239 + }, + { + "epoch": 0.21179879114840902, + "grad_norm": 0.1314164251089096, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 4240 + }, + { + "epoch": 0.2118487436934912, + "grad_norm": 0.1181357353925705, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4241 + }, + { + "epoch": 0.21189869623857335, + "grad_norm": 0.17633064091205597, + "learning_rate": 0.0001, + "loss": 0.0209, + "step": 4242 + }, + { + "epoch": 0.21194864878365552, + "grad_norm": 0.1303318440914154, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4243 + }, + { + "epoch": 0.2119986013287377, + "grad_norm": 0.1325073540210724, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4244 + }, + { + "epoch": 0.21204855387381988, + "grad_norm": 0.13621947169303894, + "learning_rate": 0.0001, + "loss": 0.1243, + "step": 4245 + }, + { + "epoch": 0.21209850641890204, + "grad_norm": 0.11974945664405823, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4246 + }, + { + "epoch": 0.2121484589639842, + "grad_norm": 0.13084857165813446, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4247 + }, + { + "epoch": 0.21219841150906638, + "grad_norm": 0.10031858831644058, + "learning_rate": 0.0001, + "loss": 0.0006, + "step": 4248 + }, + { + "epoch": 0.21224836405414857, + "grad_norm": 0.09924881905317307, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4249 + }, + { + "epoch": 0.21229831659923074, + "grad_norm": 0.10711529850959778, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4250 + }, + { + "epoch": 0.2123482691443129, + "grad_norm": 0.15978530049324036, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 4251 + }, + { + "epoch": 0.21239822168939507, + "grad_norm": 0.11530689895153046, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 4252 + }, + { + "epoch": 0.21244817423447723, + "grad_norm": 0.16005538403987885, + "learning_rate": 0.0001, + "loss": 0.0108, + "step": 4253 + }, + { + "epoch": 0.21249812677955943, + "grad_norm": 0.2016419917345047, + "learning_rate": 0.0001, + "loss": 0.01, + "step": 4254 + }, + { + "epoch": 0.2125480793246416, + "grad_norm": 0.19257207214832306, + "learning_rate": 0.0001, + "loss": 0.0323, + "step": 4255 + }, + { + "epoch": 0.21259803186972376, + "grad_norm": 0.22692252695560455, + "learning_rate": 0.0001, + "loss": 0.1342, + "step": 4256 + }, + { + "epoch": 0.21264798441480592, + "grad_norm": 0.1919603794813156, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 4257 + }, + { + "epoch": 0.21269793695988812, + "grad_norm": 0.19250603020191193, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 4258 + }, + { + "epoch": 0.21274788950497028, + "grad_norm": 0.1488431692123413, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 4259 + }, + { + "epoch": 0.21279784205005245, + "grad_norm": 0.12799213826656342, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 4260 + }, + { + "epoch": 0.21284779459513462, + "grad_norm": 0.147190660238266, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4261 + }, + { + "epoch": 0.21289774714021678, + "grad_norm": 0.13499557971954346, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4262 + }, + { + "epoch": 0.21294769968529897, + "grad_norm": 0.19965465366840363, + "learning_rate": 0.0001, + "loss": 0.035, + "step": 4263 + }, + { + "epoch": 0.21299765223038114, + "grad_norm": 0.23849722743034363, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 4264 + }, + { + "epoch": 0.2130476047754633, + "grad_norm": 0.21533402800559998, + "learning_rate": 0.0001, + "loss": 0.0373, + "step": 4265 + }, + { + "epoch": 0.21309755732054547, + "grad_norm": 0.1531350463628769, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4266 + }, + { + "epoch": 0.21314750986562767, + "grad_norm": 0.2190825492143631, + "learning_rate": 0.0001, + "loss": 0.0494, + "step": 4267 + }, + { + "epoch": 0.21319746241070983, + "grad_norm": 0.1599700152873993, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4268 + }, + { + "epoch": 0.213247414955792, + "grad_norm": 0.18715474009513855, + "learning_rate": 0.0001, + "loss": 0.0067, + "step": 4269 + }, + { + "epoch": 0.21329736750087416, + "grad_norm": 0.14991837739944458, + "learning_rate": 0.0001, + "loss": 0.0107, + "step": 4270 + }, + { + "epoch": 0.21334732004595633, + "grad_norm": 0.14337685704231262, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 4271 + }, + { + "epoch": 0.21339727259103852, + "grad_norm": 0.13584622740745544, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 4272 + }, + { + "epoch": 0.2134472251361207, + "grad_norm": 0.13832040131092072, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4273 + }, + { + "epoch": 0.21349717768120285, + "grad_norm": 0.122470922768116, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4274 + }, + { + "epoch": 0.21354713022628502, + "grad_norm": 0.16802720725536346, + "learning_rate": 0.0001, + "loss": 0.129, + "step": 4275 + }, + { + "epoch": 0.2135970827713672, + "grad_norm": 0.1664104014635086, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 4276 + }, + { + "epoch": 0.21364703531644938, + "grad_norm": 0.11558624356985092, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4277 + }, + { + "epoch": 0.21369698786153155, + "grad_norm": 0.16769741475582123, + "learning_rate": 0.0001, + "loss": 0.1297, + "step": 4278 + }, + { + "epoch": 0.2137469404066137, + "grad_norm": 0.12920205295085907, + "learning_rate": 0.0001, + "loss": 0.0155, + "step": 4279 + }, + { + "epoch": 0.21379689295169588, + "grad_norm": 0.1344211995601654, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4280 + }, + { + "epoch": 0.21384684549677807, + "grad_norm": 0.12982967495918274, + "learning_rate": 0.0001, + "loss": 0.0068, + "step": 4281 + }, + { + "epoch": 0.21389679804186024, + "grad_norm": 0.13000327348709106, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4282 + }, + { + "epoch": 0.2139467505869424, + "grad_norm": 0.12725940346717834, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 4283 + }, + { + "epoch": 0.21399670313202457, + "grad_norm": 0.13321107625961304, + "learning_rate": 0.0001, + "loss": 0.0114, + "step": 4284 + }, + { + "epoch": 0.21404665567710676, + "grad_norm": 0.12578776478767395, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4285 + }, + { + "epoch": 0.21409660822218893, + "grad_norm": 0.11479800939559937, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4286 + }, + { + "epoch": 0.2141465607672711, + "grad_norm": 0.10291586816310883, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4287 + }, + { + "epoch": 0.21419651331235326, + "grad_norm": 0.08830348402261734, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4288 + }, + { + "epoch": 0.21424646585743543, + "grad_norm": 0.18698649108409882, + "learning_rate": 0.0001, + "loss": 0.0146, + "step": 4289 + }, + { + "epoch": 0.21429641840251762, + "grad_norm": 0.09514685720205307, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4290 + }, + { + "epoch": 0.21434637094759978, + "grad_norm": 0.1653391271829605, + "learning_rate": 0.0001, + "loss": 0.0123, + "step": 4291 + }, + { + "epoch": 0.21439632349268195, + "grad_norm": 0.12695670127868652, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4292 + }, + { + "epoch": 0.21444627603776412, + "grad_norm": 0.15202488005161285, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 4293 + }, + { + "epoch": 0.21449622858284628, + "grad_norm": 0.12349159270524979, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4294 + }, + { + "epoch": 0.21454618112792848, + "grad_norm": 0.14737366139888763, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 4295 + }, + { + "epoch": 0.21459613367301064, + "grad_norm": 0.14929543435573578, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4296 + }, + { + "epoch": 0.2146460862180928, + "grad_norm": 0.11772896349430084, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4297 + }, + { + "epoch": 0.21469603876317497, + "grad_norm": 0.15578466653823853, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4298 + }, + { + "epoch": 0.21474599130825717, + "grad_norm": 0.17868292331695557, + "learning_rate": 0.0001, + "loss": 0.0139, + "step": 4299 + }, + { + "epoch": 0.21479594385333933, + "grad_norm": 0.1586223989725113, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4300 + }, + { + "epoch": 0.2148458963984215, + "grad_norm": 0.1411726027727127, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4301 + }, + { + "epoch": 0.21489584894350366, + "grad_norm": 0.11090930551290512, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4302 + }, + { + "epoch": 0.21494580148858583, + "grad_norm": 0.16497014462947845, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 4303 + }, + { + "epoch": 0.21499575403366802, + "grad_norm": 0.16599206626415253, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4304 + }, + { + "epoch": 0.2150457065787502, + "grad_norm": 0.14063698053359985, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4305 + }, + { + "epoch": 0.21509565912383236, + "grad_norm": 0.15015730261802673, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4306 + }, + { + "epoch": 0.21514561166891452, + "grad_norm": 0.14101958274841309, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4307 + }, + { + "epoch": 0.21519556421399672, + "grad_norm": 0.16568027436733246, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4308 + }, + { + "epoch": 0.21524551675907888, + "grad_norm": 0.18557530641555786, + "learning_rate": 0.0001, + "loss": 0.0203, + "step": 4309 + }, + { + "epoch": 0.21529546930416105, + "grad_norm": 0.15642164647579193, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4310 + }, + { + "epoch": 0.2153454218492432, + "grad_norm": 0.13234943151474, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4311 + }, + { + "epoch": 0.21539537439432538, + "grad_norm": 0.16067659854888916, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4312 + }, + { + "epoch": 0.21544532693940757, + "grad_norm": 0.15500220656394958, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4313 + }, + { + "epoch": 0.21549527948448974, + "grad_norm": 0.16149409115314484, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4314 + }, + { + "epoch": 0.2155452320295719, + "grad_norm": 0.12132501602172852, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 4315 + }, + { + "epoch": 0.21559518457465407, + "grad_norm": 0.11409708112478256, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4316 + }, + { + "epoch": 0.21564513711973626, + "grad_norm": 0.14359430968761444, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4317 + }, + { + "epoch": 0.21569508966481843, + "grad_norm": 0.16272346675395966, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 4318 + }, + { + "epoch": 0.2157450422099006, + "grad_norm": 0.1134307011961937, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4319 + }, + { + "epoch": 0.21579499475498276, + "grad_norm": 0.12981785833835602, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4320 + }, + { + "epoch": 0.21584494730006493, + "grad_norm": 0.16362015902996063, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 4321 + }, + { + "epoch": 0.21589489984514712, + "grad_norm": 0.10933832079172134, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4322 + }, + { + "epoch": 0.21594485239022929, + "grad_norm": 0.12219497561454773, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4323 + }, + { + "epoch": 0.21599480493531145, + "grad_norm": 0.14065565168857574, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 4324 + }, + { + "epoch": 0.21604475748039362, + "grad_norm": 0.08794839680194855, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4325 + }, + { + "epoch": 0.2160947100254758, + "grad_norm": 0.10252576321363449, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4326 + }, + { + "epoch": 0.21614466257055798, + "grad_norm": 0.15452761948108673, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 4327 + }, + { + "epoch": 0.21619461511564014, + "grad_norm": 0.09269049018621445, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4328 + }, + { + "epoch": 0.2162445676607223, + "grad_norm": 0.12631778419017792, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 4329 + }, + { + "epoch": 0.21629452020580447, + "grad_norm": 0.10867051780223846, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4330 + }, + { + "epoch": 0.21634447275088667, + "grad_norm": 0.13131721317768097, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 4331 + }, + { + "epoch": 0.21639442529596883, + "grad_norm": 0.1794922947883606, + "learning_rate": 0.0001, + "loss": 0.1281, + "step": 4332 + }, + { + "epoch": 0.216444377841051, + "grad_norm": 0.14135338366031647, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 4333 + }, + { + "epoch": 0.21649433038613317, + "grad_norm": 0.13941152393817902, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4334 + }, + { + "epoch": 0.21654428293121533, + "grad_norm": 0.11375284194946289, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4335 + }, + { + "epoch": 0.21659423547629753, + "grad_norm": 0.17664550244808197, + "learning_rate": 0.0001, + "loss": 0.0066, + "step": 4336 + }, + { + "epoch": 0.2166441880213797, + "grad_norm": 0.12764304876327515, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4337 + }, + { + "epoch": 0.21669414056646186, + "grad_norm": 0.10781477391719818, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4338 + }, + { + "epoch": 0.21674409311154402, + "grad_norm": 0.15763962268829346, + "learning_rate": 0.0001, + "loss": 0.0086, + "step": 4339 + }, + { + "epoch": 0.21679404565662622, + "grad_norm": 0.16717088222503662, + "learning_rate": 0.0001, + "loss": 0.0083, + "step": 4340 + }, + { + "epoch": 0.21684399820170838, + "grad_norm": 0.14209845662117004, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4341 + }, + { + "epoch": 0.21689395074679055, + "grad_norm": 0.18583010137081146, + "learning_rate": 0.0001, + "loss": 0.0089, + "step": 4342 + }, + { + "epoch": 0.21694390329187271, + "grad_norm": 0.14677314460277557, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4343 + }, + { + "epoch": 0.21699385583695488, + "grad_norm": 0.15172691643238068, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4344 + }, + { + "epoch": 0.21704380838203707, + "grad_norm": 0.1289059966802597, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4345 + }, + { + "epoch": 0.21709376092711924, + "grad_norm": 0.15514607727527618, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 4346 + }, + { + "epoch": 0.2171437134722014, + "grad_norm": 0.12546038627624512, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4347 + }, + { + "epoch": 0.21719366601728357, + "grad_norm": 0.1917644888162613, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 4348 + }, + { + "epoch": 0.21724361856236576, + "grad_norm": 0.13786713778972626, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4349 + }, + { + "epoch": 0.21729357110744793, + "grad_norm": 0.15932053327560425, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4350 + }, + { + "epoch": 0.2173435236525301, + "grad_norm": 0.14663904905319214, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4351 + }, + { + "epoch": 0.21739347619761226, + "grad_norm": 0.12990739941596985, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4352 + }, + { + "epoch": 0.21744342874269443, + "grad_norm": 0.13212546706199646, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4353 + }, + { + "epoch": 0.21749338128777662, + "grad_norm": 0.11268505454063416, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4354 + }, + { + "epoch": 0.2175433338328588, + "grad_norm": 0.13951392471790314, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4355 + }, + { + "epoch": 0.21759328637794095, + "grad_norm": 0.1531682312488556, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 4356 + }, + { + "epoch": 0.21764323892302312, + "grad_norm": 0.1239812821149826, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4357 + }, + { + "epoch": 0.2176931914681053, + "grad_norm": 0.1139494925737381, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4358 + }, + { + "epoch": 0.21774314401318748, + "grad_norm": 0.14543239772319794, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4359 + }, + { + "epoch": 0.21779309655826964, + "grad_norm": 0.146283820271492, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4360 + }, + { + "epoch": 0.2178430491033518, + "grad_norm": 0.08576687425374985, + "learning_rate": 0.0001, + "loss": 0.0007, + "step": 4361 + }, + { + "epoch": 0.21789300164843398, + "grad_norm": 0.13765421509742737, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4362 + }, + { + "epoch": 0.21794295419351617, + "grad_norm": 0.13816723227500916, + "learning_rate": 0.0001, + "loss": 0.0062, + "step": 4363 + }, + { + "epoch": 0.21799290673859834, + "grad_norm": 0.10513364523649216, + "learning_rate": 0.0001, + "loss": 0.0006, + "step": 4364 + }, + { + "epoch": 0.2180428592836805, + "grad_norm": 0.12412042170763016, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4365 + }, + { + "epoch": 0.21809281182876267, + "grad_norm": 0.1260640025138855, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4366 + }, + { + "epoch": 0.21814276437384486, + "grad_norm": 0.09693204611539841, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4367 + }, + { + "epoch": 0.21819271691892703, + "grad_norm": 0.2052062749862671, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 4368 + }, + { + "epoch": 0.2182426694640092, + "grad_norm": 0.11390062421560287, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4369 + }, + { + "epoch": 0.21829262200909136, + "grad_norm": 0.13275618851184845, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4370 + }, + { + "epoch": 0.21834257455417352, + "grad_norm": 0.10427042096853256, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4371 + }, + { + "epoch": 0.21839252709925572, + "grad_norm": 0.1713639497756958, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 4372 + }, + { + "epoch": 0.21844247964433788, + "grad_norm": 0.11239568144083023, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4373 + }, + { + "epoch": 0.21849243218942005, + "grad_norm": 0.12489471584558487, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 4374 + }, + { + "epoch": 0.21854238473450222, + "grad_norm": 0.11376647651195526, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4375 + }, + { + "epoch": 0.21859233727958438, + "grad_norm": 0.11471616476774216, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4376 + }, + { + "epoch": 0.21864228982466657, + "grad_norm": 0.1451781988143921, + "learning_rate": 0.0001, + "loss": 0.0156, + "step": 4377 + }, + { + "epoch": 0.21869224236974874, + "grad_norm": 0.1503412276506424, + "learning_rate": 0.0001, + "loss": 0.1284, + "step": 4378 + }, + { + "epoch": 0.2187421949148309, + "grad_norm": 0.12692397832870483, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4379 + }, + { + "epoch": 0.21879214745991307, + "grad_norm": 0.1768447905778885, + "learning_rate": 0.0001, + "loss": 0.1301, + "step": 4380 + }, + { + "epoch": 0.21884210000499527, + "grad_norm": 0.13067074120044708, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4381 + }, + { + "epoch": 0.21889205255007743, + "grad_norm": 0.16691891849040985, + "learning_rate": 0.0001, + "loss": 0.1331, + "step": 4382 + }, + { + "epoch": 0.2189420050951596, + "grad_norm": 0.13650310039520264, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 4383 + }, + { + "epoch": 0.21899195764024176, + "grad_norm": 0.14607511460781097, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4384 + }, + { + "epoch": 0.21904191018532393, + "grad_norm": 0.21645723283290863, + "learning_rate": 0.0001, + "loss": 0.009, + "step": 4385 + }, + { + "epoch": 0.21909186273040612, + "grad_norm": 0.1274375021457672, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4386 + }, + { + "epoch": 0.2191418152754883, + "grad_norm": 0.14975613355636597, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4387 + }, + { + "epoch": 0.21919176782057045, + "grad_norm": 0.14077943563461304, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 4388 + }, + { + "epoch": 0.21924172036565262, + "grad_norm": 0.10249077528715134, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4389 + }, + { + "epoch": 0.21929167291073481, + "grad_norm": 0.12964315712451935, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4390 + }, + { + "epoch": 0.21934162545581698, + "grad_norm": 0.14250214397907257, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4391 + }, + { + "epoch": 0.21939157800089915, + "grad_norm": 0.16975872218608856, + "learning_rate": 0.0001, + "loss": 0.0153, + "step": 4392 + }, + { + "epoch": 0.2194415305459813, + "grad_norm": 0.16080544888973236, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 4393 + }, + { + "epoch": 0.21949148309106348, + "grad_norm": 0.12333311140537262, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4394 + }, + { + "epoch": 0.21954143563614567, + "grad_norm": 0.14829593896865845, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4395 + }, + { + "epoch": 0.21959138818122784, + "grad_norm": 0.20305153727531433, + "learning_rate": 0.0001, + "loss": 0.0098, + "step": 4396 + }, + { + "epoch": 0.21964134072631, + "grad_norm": 0.16328726708889008, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4397 + }, + { + "epoch": 0.21969129327139217, + "grad_norm": 0.14184680581092834, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4398 + }, + { + "epoch": 0.21974124581647436, + "grad_norm": 0.13325262069702148, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4399 + }, + { + "epoch": 0.21979119836155653, + "grad_norm": 0.1945600062608719, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 4400 + }, + { + "epoch": 0.2198411509066387, + "grad_norm": 0.13565166294574738, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4401 + }, + { + "epoch": 0.21989110345172086, + "grad_norm": 0.16485242545604706, + "learning_rate": 0.0001, + "loss": 0.0099, + "step": 4402 + }, + { + "epoch": 0.21994105599680303, + "grad_norm": 0.15928266942501068, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4403 + }, + { + "epoch": 0.21999100854188522, + "grad_norm": 0.11788854748010635, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4404 + }, + { + "epoch": 0.22004096108696738, + "grad_norm": 0.15162664651870728, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4405 + }, + { + "epoch": 0.22009091363204955, + "grad_norm": 0.15778227150440216, + "learning_rate": 0.0001, + "loss": 0.1258, + "step": 4406 + }, + { + "epoch": 0.22014086617713172, + "grad_norm": 0.13462789356708527, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4407 + }, + { + "epoch": 0.2201908187222139, + "grad_norm": 0.11901023983955383, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 4408 + }, + { + "epoch": 0.22024077126729608, + "grad_norm": 0.13538099825382233, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4409 + }, + { + "epoch": 0.22029072381237824, + "grad_norm": 0.09221425652503967, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4410 + }, + { + "epoch": 0.2203406763574604, + "grad_norm": 0.1501673012971878, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4411 + }, + { + "epoch": 0.22039062890254257, + "grad_norm": 0.13822585344314575, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4412 + }, + { + "epoch": 0.22044058144762477, + "grad_norm": 0.17410029470920563, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 4413 + }, + { + "epoch": 0.22049053399270693, + "grad_norm": 0.14502236247062683, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4414 + }, + { + "epoch": 0.2205404865377891, + "grad_norm": 0.11231149733066559, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4415 + }, + { + "epoch": 0.22059043908287126, + "grad_norm": 0.13136330246925354, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4416 + }, + { + "epoch": 0.22064039162795343, + "grad_norm": 0.1950877606868744, + "learning_rate": 0.0001, + "loss": 0.0087, + "step": 4417 + }, + { + "epoch": 0.22069034417303562, + "grad_norm": 0.15584665536880493, + "learning_rate": 0.0001, + "loss": 0.1251, + "step": 4418 + }, + { + "epoch": 0.2207402967181178, + "grad_norm": 0.15884505212306976, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 4419 + }, + { + "epoch": 0.22079024926319996, + "grad_norm": 0.17914779484272003, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4420 + }, + { + "epoch": 0.22084020180828212, + "grad_norm": 0.13178230822086334, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4421 + }, + { + "epoch": 0.22089015435336432, + "grad_norm": 0.15709564089775085, + "learning_rate": 0.0001, + "loss": 0.0063, + "step": 4422 + }, + { + "epoch": 0.22094010689844648, + "grad_norm": 0.1288858950138092, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4423 + }, + { + "epoch": 0.22099005944352865, + "grad_norm": 0.1316152811050415, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4424 + }, + { + "epoch": 0.2210400119886108, + "grad_norm": 0.11136738210916519, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4425 + }, + { + "epoch": 0.22108996453369298, + "grad_norm": 0.12966232001781464, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 4426 + }, + { + "epoch": 0.22113991707877517, + "grad_norm": 0.11042344570159912, + "learning_rate": 0.0001, + "loss": 0.0007, + "step": 4427 + }, + { + "epoch": 0.22118986962385734, + "grad_norm": 0.13218040764331818, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 4428 + }, + { + "epoch": 0.2212398221689395, + "grad_norm": 0.13374508917331696, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 4429 + }, + { + "epoch": 0.22128977471402167, + "grad_norm": 0.1453903168439865, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4430 + }, + { + "epoch": 0.22133972725910386, + "grad_norm": 0.14558210968971252, + "learning_rate": 0.0001, + "loss": 0.0092, + "step": 4431 + }, + { + "epoch": 0.22138967980418603, + "grad_norm": 0.11048490554094315, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4432 + }, + { + "epoch": 0.2214396323492682, + "grad_norm": 0.10832061618566513, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4433 + }, + { + "epoch": 0.22148958489435036, + "grad_norm": 0.09646496921777725, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4434 + }, + { + "epoch": 0.22153953743943253, + "grad_norm": 0.1282537430524826, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4435 + }, + { + "epoch": 0.22158948998451472, + "grad_norm": 0.1264013797044754, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4436 + }, + { + "epoch": 0.2216394425295969, + "grad_norm": 0.08974157273769379, + "learning_rate": 0.0001, + "loss": 0.0005, + "step": 4437 + }, + { + "epoch": 0.22168939507467905, + "grad_norm": 0.1014566645026207, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4438 + }, + { + "epoch": 0.22173934761976122, + "grad_norm": 0.15370327234268188, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 4439 + }, + { + "epoch": 0.2217893001648434, + "grad_norm": 0.146701380610466, + "learning_rate": 0.0001, + "loss": 0.0007, + "step": 4440 + }, + { + "epoch": 0.22183925270992558, + "grad_norm": 0.12912802398204803, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 4441 + }, + { + "epoch": 0.22188920525500774, + "grad_norm": 0.26630645990371704, + "learning_rate": 0.0001, + "loss": 0.1455, + "step": 4442 + }, + { + "epoch": 0.2219391578000899, + "grad_norm": 0.20075836777687073, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4443 + }, + { + "epoch": 0.22198911034517207, + "grad_norm": 0.17859524488449097, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 4444 + }, + { + "epoch": 0.22203906289025427, + "grad_norm": 0.16327299177646637, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4445 + }, + { + "epoch": 0.22208901543533643, + "grad_norm": 0.2238943725824356, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 4446 + }, + { + "epoch": 0.2221389679804186, + "grad_norm": 0.1894819736480713, + "learning_rate": 0.0001, + "loss": 0.0234, + "step": 4447 + }, + { + "epoch": 0.22218892052550077, + "grad_norm": 0.14911970496177673, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4448 + }, + { + "epoch": 0.22223887307058296, + "grad_norm": 0.217206209897995, + "learning_rate": 0.0001, + "loss": 0.0295, + "step": 4449 + }, + { + "epoch": 0.22228882561566513, + "grad_norm": 0.17523138225078583, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 4450 + }, + { + "epoch": 0.2223387781607473, + "grad_norm": 0.1806574910879135, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 4451 + }, + { + "epoch": 0.22238873070582946, + "grad_norm": 0.1915900707244873, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 4452 + }, + { + "epoch": 0.22243868325091162, + "grad_norm": 0.14449916779994965, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4453 + }, + { + "epoch": 0.22248863579599382, + "grad_norm": 0.19418743252754211, + "learning_rate": 0.0001, + "loss": 0.1381, + "step": 4454 + }, + { + "epoch": 0.22253858834107598, + "grad_norm": 0.16119720041751862, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 4455 + }, + { + "epoch": 0.22258854088615815, + "grad_norm": 0.18035845458507538, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 4456 + }, + { + "epoch": 0.22263849343124031, + "grad_norm": 0.19140702486038208, + "learning_rate": 0.0001, + "loss": 0.0138, + "step": 4457 + }, + { + "epoch": 0.22268844597632248, + "grad_norm": 0.16256973147392273, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4458 + }, + { + "epoch": 0.22273839852140467, + "grad_norm": 0.1860375851392746, + "learning_rate": 0.0001, + "loss": 0.0082, + "step": 4459 + }, + { + "epoch": 0.22278835106648684, + "grad_norm": 0.13677756488323212, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4460 + }, + { + "epoch": 0.222838303611569, + "grad_norm": 0.11041472852230072, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4461 + }, + { + "epoch": 0.22288825615665117, + "grad_norm": 0.14720283448696136, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 4462 + }, + { + "epoch": 0.22293820870173336, + "grad_norm": 0.16562065482139587, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 4463 + }, + { + "epoch": 0.22298816124681553, + "grad_norm": 0.14853616058826447, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4464 + }, + { + "epoch": 0.2230381137918977, + "grad_norm": 0.1486397683620453, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 4465 + }, + { + "epoch": 0.22308806633697986, + "grad_norm": 0.12761202454566956, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 4466 + }, + { + "epoch": 0.22313801888206203, + "grad_norm": 0.1791621744632721, + "learning_rate": 0.0001, + "loss": 0.019, + "step": 4467 + }, + { + "epoch": 0.22318797142714422, + "grad_norm": 0.1505601406097412, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 4468 + }, + { + "epoch": 0.2232379239722264, + "grad_norm": 0.1726195514202118, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4469 + }, + { + "epoch": 0.22328787651730855, + "grad_norm": 0.1421716809272766, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4470 + }, + { + "epoch": 0.22333782906239072, + "grad_norm": 0.18978549540042877, + "learning_rate": 0.0001, + "loss": 0.0157, + "step": 4471 + }, + { + "epoch": 0.2233877816074729, + "grad_norm": 0.1522749811410904, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4472 + }, + { + "epoch": 0.22343773415255508, + "grad_norm": 0.16240574419498444, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4473 + }, + { + "epoch": 0.22348768669763724, + "grad_norm": 0.1436353325843811, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 4474 + }, + { + "epoch": 0.2235376392427194, + "grad_norm": 0.1728198081254959, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 4475 + }, + { + "epoch": 0.22358759178780158, + "grad_norm": 0.154798224568367, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4476 + }, + { + "epoch": 0.22363754433288377, + "grad_norm": 0.19729819893836975, + "learning_rate": 0.0001, + "loss": 0.0194, + "step": 4477 + }, + { + "epoch": 0.22368749687796594, + "grad_norm": 0.16508978605270386, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4478 + }, + { + "epoch": 0.2237374494230481, + "grad_norm": 0.1573578119277954, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4479 + }, + { + "epoch": 0.22378740196813027, + "grad_norm": 0.15980330109596252, + "learning_rate": 0.0001, + "loss": 0.008, + "step": 4480 + }, + { + "epoch": 0.22383735451321246, + "grad_norm": 0.18848863244056702, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 4481 + }, + { + "epoch": 0.22388730705829463, + "grad_norm": 0.1481337696313858, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4482 + }, + { + "epoch": 0.2239372596033768, + "grad_norm": 0.13952820003032684, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4483 + }, + { + "epoch": 0.22398721214845896, + "grad_norm": 0.2021830826997757, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4484 + }, + { + "epoch": 0.22403716469354112, + "grad_norm": 0.16818086802959442, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4485 + }, + { + "epoch": 0.22408711723862332, + "grad_norm": 0.14066027104854584, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 4486 + }, + { + "epoch": 0.22413706978370548, + "grad_norm": 0.16546331346035004, + "learning_rate": 0.0001, + "loss": 0.0137, + "step": 4487 + }, + { + "epoch": 0.22418702232878765, + "grad_norm": 0.19093790650367737, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 4488 + }, + { + "epoch": 0.22423697487386982, + "grad_norm": 0.1949923038482666, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4489 + }, + { + "epoch": 0.224286927418952, + "grad_norm": 0.14151065051555634, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4490 + }, + { + "epoch": 0.22433687996403417, + "grad_norm": 0.14423765242099762, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 4491 + }, + { + "epoch": 0.22438683250911634, + "grad_norm": 0.17419838905334473, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4492 + }, + { + "epoch": 0.2244367850541985, + "grad_norm": 0.16264081001281738, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4493 + }, + { + "epoch": 0.22448673759928067, + "grad_norm": 0.11263524740934372, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 4494 + }, + { + "epoch": 0.22453669014436287, + "grad_norm": 0.14587555825710297, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 4495 + }, + { + "epoch": 0.22458664268944503, + "grad_norm": 0.20780354738235474, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 4496 + }, + { + "epoch": 0.2246365952345272, + "grad_norm": 0.16749094426631927, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 4497 + }, + { + "epoch": 0.22468654777960936, + "grad_norm": 0.18972527980804443, + "learning_rate": 0.0001, + "loss": 0.1266, + "step": 4498 + }, + { + "epoch": 0.22473650032469153, + "grad_norm": 0.14989328384399414, + "learning_rate": 0.0001, + "loss": 0.1294, + "step": 4499 + }, + { + "epoch": 0.22478645286977372, + "grad_norm": 0.24338169395923615, + "learning_rate": 0.0001, + "loss": 0.0162, + "step": 4500 + }, + { + "epoch": 0.2248364054148559, + "grad_norm": 0.14295876026153564, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 4501 + }, + { + "epoch": 0.22488635795993805, + "grad_norm": 0.1524827480316162, + "learning_rate": 0.0001, + "loss": 0.005, + "step": 4502 + }, + { + "epoch": 0.22493631050502022, + "grad_norm": 0.15632474422454834, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 4503 + }, + { + "epoch": 0.22498626305010241, + "grad_norm": 0.24459053575992584, + "learning_rate": 0.0001, + "loss": 0.0189, + "step": 4504 + }, + { + "epoch": 0.22503621559518458, + "grad_norm": 0.1525755375623703, + "learning_rate": 0.0001, + "loss": 0.126, + "step": 4505 + }, + { + "epoch": 0.22508616814026675, + "grad_norm": 0.18890053033828735, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4506 + }, + { + "epoch": 0.2251361206853489, + "grad_norm": 0.20076952874660492, + "learning_rate": 0.0001, + "loss": 0.0123, + "step": 4507 + }, + { + "epoch": 0.22518607323043108, + "grad_norm": 0.15680941939353943, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 4508 + }, + { + "epoch": 0.22523602577551327, + "grad_norm": 0.15520115196704865, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4509 + }, + { + "epoch": 0.22528597832059544, + "grad_norm": 0.15858295559883118, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4510 + }, + { + "epoch": 0.2253359308656776, + "grad_norm": 0.15706707537174225, + "learning_rate": 0.0001, + "loss": 0.0036, + "step": 4511 + }, + { + "epoch": 0.22538588341075977, + "grad_norm": 0.24226431548595428, + "learning_rate": 0.0001, + "loss": 0.0181, + "step": 4512 + }, + { + "epoch": 0.22543583595584196, + "grad_norm": 0.1434573531150818, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4513 + }, + { + "epoch": 0.22548578850092413, + "grad_norm": 0.15608158707618713, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4514 + }, + { + "epoch": 0.2255357410460063, + "grad_norm": 0.17004357278347015, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 4515 + }, + { + "epoch": 0.22558569359108846, + "grad_norm": 0.15387602150440216, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4516 + }, + { + "epoch": 0.22563564613617063, + "grad_norm": 0.24014617502689362, + "learning_rate": 0.0001, + "loss": 0.0204, + "step": 4517 + }, + { + "epoch": 0.22568559868125282, + "grad_norm": 0.1563449203968048, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 4518 + }, + { + "epoch": 0.22573555122633499, + "grad_norm": 0.19403444230556488, + "learning_rate": 0.0001, + "loss": 0.0073, + "step": 4519 + }, + { + "epoch": 0.22578550377141715, + "grad_norm": 0.21866947412490845, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 4520 + }, + { + "epoch": 0.22583545631649932, + "grad_norm": 0.16108526289463043, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4521 + }, + { + "epoch": 0.2258854088615815, + "grad_norm": 0.19449077546596527, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 4522 + }, + { + "epoch": 0.22593536140666368, + "grad_norm": 0.16879399120807648, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4523 + }, + { + "epoch": 0.22598531395174584, + "grad_norm": 0.15814033150672913, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4524 + }, + { + "epoch": 0.226035266496828, + "grad_norm": 0.12969504296779633, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4525 + }, + { + "epoch": 0.22608521904191017, + "grad_norm": 0.28829824924468994, + "learning_rate": 0.0001, + "loss": 0.0378, + "step": 4526 + }, + { + "epoch": 0.22613517158699237, + "grad_norm": 0.18609116971492767, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 4527 + }, + { + "epoch": 0.22618512413207453, + "grad_norm": 0.22282083332538605, + "learning_rate": 0.0001, + "loss": 0.0181, + "step": 4528 + }, + { + "epoch": 0.2262350766771567, + "grad_norm": 0.2312765121459961, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 4529 + }, + { + "epoch": 0.22628502922223886, + "grad_norm": 0.2788696885108948, + "learning_rate": 0.0001, + "loss": 0.0277, + "step": 4530 + }, + { + "epoch": 0.22633498176732106, + "grad_norm": 0.24940241873264313, + "learning_rate": 0.0001, + "loss": 0.0162, + "step": 4531 + }, + { + "epoch": 0.22638493431240322, + "grad_norm": 0.15805459022521973, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4532 + }, + { + "epoch": 0.2264348868574854, + "grad_norm": 0.17091397941112518, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4533 + }, + { + "epoch": 0.22648483940256756, + "grad_norm": 0.19967132806777954, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 4534 + }, + { + "epoch": 0.22653479194764972, + "grad_norm": 0.1885724514722824, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4535 + }, + { + "epoch": 0.22658474449273192, + "grad_norm": 0.19311454892158508, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 4536 + }, + { + "epoch": 0.22663469703781408, + "grad_norm": 0.1686359941959381, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 4537 + }, + { + "epoch": 0.22668464958289625, + "grad_norm": 0.1670079529285431, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4538 + }, + { + "epoch": 0.2267346021279784, + "grad_norm": 0.19877101480960846, + "learning_rate": 0.0001, + "loss": 0.0124, + "step": 4539 + }, + { + "epoch": 0.22678455467306058, + "grad_norm": 0.1787259727716446, + "learning_rate": 0.0001, + "loss": 0.024, + "step": 4540 + }, + { + "epoch": 0.22683450721814277, + "grad_norm": 0.21194882690906525, + "learning_rate": 0.0001, + "loss": 0.027, + "step": 4541 + }, + { + "epoch": 0.22688445976322494, + "grad_norm": 0.21314354240894318, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4542 + }, + { + "epoch": 0.2269344123083071, + "grad_norm": 0.20278654992580414, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 4543 + }, + { + "epoch": 0.22698436485338927, + "grad_norm": 0.20400509238243103, + "learning_rate": 0.0001, + "loss": 0.0075, + "step": 4544 + }, + { + "epoch": 0.22703431739847146, + "grad_norm": 0.21238276362419128, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 4545 + }, + { + "epoch": 0.22708426994355363, + "grad_norm": 0.16645924746990204, + "learning_rate": 0.0001, + "loss": 0.0133, + "step": 4546 + }, + { + "epoch": 0.2271342224886358, + "grad_norm": 0.16835932433605194, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 4547 + }, + { + "epoch": 0.22718417503371796, + "grad_norm": 0.1959880292415619, + "learning_rate": 0.0001, + "loss": 0.0072, + "step": 4548 + }, + { + "epoch": 0.22723412757880013, + "grad_norm": 0.14479601383209229, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 4549 + }, + { + "epoch": 0.22728408012388232, + "grad_norm": 0.17115344107151031, + "learning_rate": 0.0001, + "loss": 0.1326, + "step": 4550 + }, + { + "epoch": 0.2273340326689645, + "grad_norm": 0.18621008098125458, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 4551 + }, + { + "epoch": 0.22738398521404665, + "grad_norm": 0.1633569449186325, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4552 + }, + { + "epoch": 0.22743393775912882, + "grad_norm": 0.1721550077199936, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 4553 + }, + { + "epoch": 0.227483890304211, + "grad_norm": 0.1596277952194214, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4554 + }, + { + "epoch": 0.22753384284929318, + "grad_norm": 0.1794111728668213, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 4555 + }, + { + "epoch": 0.22758379539437534, + "grad_norm": 0.11732343584299088, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4556 + }, + { + "epoch": 0.2276337479394575, + "grad_norm": 0.1759188324213028, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 4557 + }, + { + "epoch": 0.22768370048453968, + "grad_norm": 0.17292223870754242, + "learning_rate": 0.0001, + "loss": 0.1383, + "step": 4558 + }, + { + "epoch": 0.22773365302962187, + "grad_norm": 0.1223817765712738, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4559 + }, + { + "epoch": 0.22778360557470403, + "grad_norm": 0.1761341542005539, + "learning_rate": 0.0001, + "loss": 0.0118, + "step": 4560 + }, + { + "epoch": 0.2278335581197862, + "grad_norm": 0.149825319647789, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4561 + }, + { + "epoch": 0.22788351066486837, + "grad_norm": 0.21689951419830322, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 4562 + }, + { + "epoch": 0.22793346320995056, + "grad_norm": 0.208944171667099, + "learning_rate": 0.0001, + "loss": 0.0141, + "step": 4563 + }, + { + "epoch": 0.22798341575503273, + "grad_norm": 0.16500094532966614, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4564 + }, + { + "epoch": 0.2280333683001149, + "grad_norm": 0.15810826420783997, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4565 + }, + { + "epoch": 0.22808332084519706, + "grad_norm": 0.15343688428401947, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4566 + }, + { + "epoch": 0.22813327339027922, + "grad_norm": 0.1380101591348648, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4567 + }, + { + "epoch": 0.22818322593536142, + "grad_norm": 0.1527237594127655, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 4568 + }, + { + "epoch": 0.22823317848044358, + "grad_norm": 0.19867700338363647, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 4569 + }, + { + "epoch": 0.22828313102552575, + "grad_norm": 0.13763368129730225, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4570 + }, + { + "epoch": 0.22833308357060791, + "grad_norm": 0.14440086483955383, + "learning_rate": 0.0001, + "loss": 0.0064, + "step": 4571 + }, + { + "epoch": 0.2283830361156901, + "grad_norm": 0.16028337180614471, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 4572 + }, + { + "epoch": 0.22843298866077227, + "grad_norm": 0.14554640650749207, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 4573 + }, + { + "epoch": 0.22848294120585444, + "grad_norm": 0.1620796173810959, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4574 + }, + { + "epoch": 0.2285328937509366, + "grad_norm": 0.1451551765203476, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4575 + }, + { + "epoch": 0.22858284629601877, + "grad_norm": 0.1951354444026947, + "learning_rate": 0.0001, + "loss": 0.0126, + "step": 4576 + }, + { + "epoch": 0.22863279884110096, + "grad_norm": 0.13551858067512512, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 4577 + }, + { + "epoch": 0.22868275138618313, + "grad_norm": 0.19054898619651794, + "learning_rate": 0.0001, + "loss": 0.1258, + "step": 4578 + }, + { + "epoch": 0.2287327039312653, + "grad_norm": 0.16566289961338043, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 4579 + }, + { + "epoch": 0.22878265647634746, + "grad_norm": 0.1681358814239502, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 4580 + }, + { + "epoch": 0.22883260902142963, + "grad_norm": 0.16742542386054993, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4581 + }, + { + "epoch": 0.22888256156651182, + "grad_norm": 0.15897101163864136, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4582 + }, + { + "epoch": 0.228932514111594, + "grad_norm": 0.11888754367828369, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4583 + }, + { + "epoch": 0.22898246665667615, + "grad_norm": 0.15157833695411682, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4584 + }, + { + "epoch": 0.22903241920175832, + "grad_norm": 0.17365378141403198, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4585 + }, + { + "epoch": 0.2290823717468405, + "grad_norm": 0.125684916973114, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4586 + }, + { + "epoch": 0.22913232429192268, + "grad_norm": 0.21544265747070312, + "learning_rate": 0.0001, + "loss": 0.0267, + "step": 4587 + }, + { + "epoch": 0.22918227683700484, + "grad_norm": 0.18313392996788025, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4588 + }, + { + "epoch": 0.229232229382087, + "grad_norm": 0.19760024547576904, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 4589 + }, + { + "epoch": 0.22928218192716918, + "grad_norm": 0.18522733449935913, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4590 + }, + { + "epoch": 0.22933213447225137, + "grad_norm": 0.1509527713060379, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4591 + }, + { + "epoch": 0.22938208701733354, + "grad_norm": 0.14842702448368073, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 4592 + }, + { + "epoch": 0.2294320395624157, + "grad_norm": 0.18662361800670624, + "learning_rate": 0.0001, + "loss": 0.0079, + "step": 4593 + }, + { + "epoch": 0.22948199210749787, + "grad_norm": 0.21202261745929718, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 4594 + }, + { + "epoch": 0.22953194465258006, + "grad_norm": 0.15809613466262817, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4595 + }, + { + "epoch": 0.22958189719766223, + "grad_norm": 0.14328746497631073, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4596 + }, + { + "epoch": 0.2296318497427444, + "grad_norm": 0.18711528182029724, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4597 + }, + { + "epoch": 0.22968180228782656, + "grad_norm": 0.26266050338745117, + "learning_rate": 0.0001, + "loss": 0.0264, + "step": 4598 + }, + { + "epoch": 0.22973175483290872, + "grad_norm": 0.13397175073623657, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4599 + }, + { + "epoch": 0.22978170737799092, + "grad_norm": 0.19081763923168182, + "learning_rate": 0.0001, + "loss": 0.0127, + "step": 4600 + }, + { + "epoch": 0.22983165992307308, + "grad_norm": 0.2211369425058365, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 4601 + }, + { + "epoch": 0.22988161246815525, + "grad_norm": 0.2626698315143585, + "learning_rate": 0.0001, + "loss": 0.0246, + "step": 4602 + }, + { + "epoch": 0.22993156501323742, + "grad_norm": 0.17903119325637817, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4603 + }, + { + "epoch": 0.2299815175583196, + "grad_norm": 0.20174230635166168, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 4604 + }, + { + "epoch": 0.23003147010340178, + "grad_norm": 0.17506709694862366, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4605 + }, + { + "epoch": 0.23008142264848394, + "grad_norm": 0.18026068806648254, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4606 + }, + { + "epoch": 0.2301313751935661, + "grad_norm": 0.22232601046562195, + "learning_rate": 0.0001, + "loss": 0.015, + "step": 4607 + }, + { + "epoch": 0.23018132773864827, + "grad_norm": 0.16278661787509918, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4608 + }, + { + "epoch": 0.23023128028373047, + "grad_norm": 0.15654826164245605, + "learning_rate": 0.0001, + "loss": 0.0067, + "step": 4609 + }, + { + "epoch": 0.23028123282881263, + "grad_norm": 0.21597489714622498, + "learning_rate": 0.0001, + "loss": 0.0108, + "step": 4610 + }, + { + "epoch": 0.2303311853738948, + "grad_norm": 0.16424532234668732, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4611 + }, + { + "epoch": 0.23038113791897696, + "grad_norm": 0.17856408655643463, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4612 + }, + { + "epoch": 0.23043109046405916, + "grad_norm": 0.19303633272647858, + "learning_rate": 0.0001, + "loss": 0.0157, + "step": 4613 + }, + { + "epoch": 0.23048104300914132, + "grad_norm": 0.17319756746292114, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4614 + }, + { + "epoch": 0.2305309955542235, + "grad_norm": 0.2423527091741562, + "learning_rate": 0.0001, + "loss": 0.0147, + "step": 4615 + }, + { + "epoch": 0.23058094809930565, + "grad_norm": 0.18511700630187988, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 4616 + }, + { + "epoch": 0.23063090064438782, + "grad_norm": 0.18106338381767273, + "learning_rate": 0.0001, + "loss": 0.0065, + "step": 4617 + }, + { + "epoch": 0.23068085318947001, + "grad_norm": 0.18935202062129974, + "learning_rate": 0.0001, + "loss": 0.0057, + "step": 4618 + }, + { + "epoch": 0.23073080573455218, + "grad_norm": 0.20496496558189392, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 4619 + }, + { + "epoch": 0.23078075827963435, + "grad_norm": 0.17287981510162354, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4620 + }, + { + "epoch": 0.2308307108247165, + "grad_norm": 0.1822500377893448, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 4621 + }, + { + "epoch": 0.23088066336979868, + "grad_norm": 0.1774657666683197, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4622 + }, + { + "epoch": 0.23093061591488087, + "grad_norm": 0.18383657932281494, + "learning_rate": 0.0001, + "loss": 0.0039, + "step": 4623 + }, + { + "epoch": 0.23098056845996304, + "grad_norm": 0.15873225033283234, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 4624 + }, + { + "epoch": 0.2310305210050452, + "grad_norm": 0.1833661049604416, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4625 + }, + { + "epoch": 0.23108047355012737, + "grad_norm": 0.2650780975818634, + "learning_rate": 0.0001, + "loss": 0.0235, + "step": 4626 + }, + { + "epoch": 0.23113042609520956, + "grad_norm": 0.18040095269680023, + "learning_rate": 0.0001, + "loss": 0.1233, + "step": 4627 + }, + { + "epoch": 0.23118037864029173, + "grad_norm": 0.12092218548059464, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4628 + }, + { + "epoch": 0.2312303311853739, + "grad_norm": 0.13542377948760986, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4629 + }, + { + "epoch": 0.23128028373045606, + "grad_norm": 0.16517415642738342, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 4630 + }, + { + "epoch": 0.23133023627553823, + "grad_norm": 0.13673725724220276, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4631 + }, + { + "epoch": 0.23138018882062042, + "grad_norm": 0.17742253839969635, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 4632 + }, + { + "epoch": 0.23143014136570259, + "grad_norm": 0.11809150874614716, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4633 + }, + { + "epoch": 0.23148009391078475, + "grad_norm": 0.13940231502056122, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4634 + }, + { + "epoch": 0.23153004645586692, + "grad_norm": 0.1251850575208664, + "learning_rate": 0.0001, + "loss": 0.0096, + "step": 4635 + }, + { + "epoch": 0.2315799990009491, + "grad_norm": 0.14543288946151733, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4636 + }, + { + "epoch": 0.23162995154603128, + "grad_norm": 0.13916870951652527, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 4637 + }, + { + "epoch": 0.23167990409111344, + "grad_norm": 0.15518821775913239, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4638 + }, + { + "epoch": 0.2317298566361956, + "grad_norm": 0.16682782769203186, + "learning_rate": 0.0001, + "loss": 0.0111, + "step": 4639 + }, + { + "epoch": 0.23177980918127777, + "grad_norm": 0.10688278824090958, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4640 + }, + { + "epoch": 0.23182976172635997, + "grad_norm": 0.19084042310714722, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 4641 + }, + { + "epoch": 0.23187971427144213, + "grad_norm": 0.14959561824798584, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4642 + }, + { + "epoch": 0.2319296668165243, + "grad_norm": 0.13634942471981049, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4643 + }, + { + "epoch": 0.23197961936160647, + "grad_norm": 0.21182788908481598, + "learning_rate": 0.0001, + "loss": 0.0178, + "step": 4644 + }, + { + "epoch": 0.23202957190668866, + "grad_norm": 0.13618330657482147, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4645 + }, + { + "epoch": 0.23207952445177082, + "grad_norm": 0.1653374284505844, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4646 + }, + { + "epoch": 0.232129476996853, + "grad_norm": 0.17425723373889923, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4647 + }, + { + "epoch": 0.23217942954193516, + "grad_norm": 0.13339298963546753, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4648 + }, + { + "epoch": 0.23222938208701732, + "grad_norm": 0.13863565027713776, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4649 + }, + { + "epoch": 0.23227933463209952, + "grad_norm": 0.15100158751010895, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 4650 + }, + { + "epoch": 0.23232928717718168, + "grad_norm": 0.1701284945011139, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 4651 + }, + { + "epoch": 0.23237923972226385, + "grad_norm": 0.20623533427715302, + "learning_rate": 0.0001, + "loss": 0.012, + "step": 4652 + }, + { + "epoch": 0.232429192267346, + "grad_norm": 0.13579228520393372, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4653 + }, + { + "epoch": 0.2324791448124282, + "grad_norm": 0.16983655095100403, + "learning_rate": 0.0001, + "loss": 0.0128, + "step": 4654 + }, + { + "epoch": 0.23252909735751037, + "grad_norm": 0.14610524475574493, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4655 + }, + { + "epoch": 0.23257904990259254, + "grad_norm": 0.14509043097496033, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4656 + }, + { + "epoch": 0.2326290024476747, + "grad_norm": 0.12849968671798706, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4657 + }, + { + "epoch": 0.23267895499275687, + "grad_norm": 0.1819840669631958, + "learning_rate": 0.0001, + "loss": 0.0093, + "step": 4658 + }, + { + "epoch": 0.23272890753783906, + "grad_norm": 0.12255731970071793, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4659 + }, + { + "epoch": 0.23277886008292123, + "grad_norm": 0.1469271183013916, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 4660 + }, + { + "epoch": 0.2328288126280034, + "grad_norm": 0.15582425892353058, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4661 + }, + { + "epoch": 0.23287876517308556, + "grad_norm": 0.15972593426704407, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 4662 + }, + { + "epoch": 0.23292871771816773, + "grad_norm": 0.17988871037960052, + "learning_rate": 0.0001, + "loss": 0.0079, + "step": 4663 + }, + { + "epoch": 0.23297867026324992, + "grad_norm": 0.15467095375061035, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 4664 + }, + { + "epoch": 0.2330286228083321, + "grad_norm": 0.19708003103733063, + "learning_rate": 0.0001, + "loss": 0.0126, + "step": 4665 + }, + { + "epoch": 0.23307857535341425, + "grad_norm": 0.24028049409389496, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 4666 + }, + { + "epoch": 0.23312852789849642, + "grad_norm": 0.15685932338237762, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 4667 + }, + { + "epoch": 0.2331784804435786, + "grad_norm": 0.22158311307430267, + "learning_rate": 0.0001, + "loss": 0.1261, + "step": 4668 + }, + { + "epoch": 0.23322843298866078, + "grad_norm": 0.1905198097229004, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4669 + }, + { + "epoch": 0.23327838553374294, + "grad_norm": 0.132534459233284, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4670 + }, + { + "epoch": 0.2333283380788251, + "grad_norm": 0.19821573793888092, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 4671 + }, + { + "epoch": 0.23337829062390728, + "grad_norm": 0.2234257310628891, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4672 + }, + { + "epoch": 0.23342824316898947, + "grad_norm": 0.13207067549228668, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4673 + }, + { + "epoch": 0.23347819571407163, + "grad_norm": 0.19883626699447632, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 4674 + }, + { + "epoch": 0.2335281482591538, + "grad_norm": 0.21777626872062683, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 4675 + }, + { + "epoch": 0.23357810080423597, + "grad_norm": 0.14742055535316467, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4676 + }, + { + "epoch": 0.23362805334931816, + "grad_norm": 0.18589577078819275, + "learning_rate": 0.0001, + "loss": 0.0106, + "step": 4677 + }, + { + "epoch": 0.23367800589440033, + "grad_norm": 0.20645155012607574, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4678 + }, + { + "epoch": 0.2337279584394825, + "grad_norm": 0.19005605578422546, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4679 + }, + { + "epoch": 0.23377791098456466, + "grad_norm": 0.24131710827350616, + "learning_rate": 0.0001, + "loss": 0.0163, + "step": 4680 + }, + { + "epoch": 0.23382786352964682, + "grad_norm": 0.1495773047208786, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4681 + }, + { + "epoch": 0.23387781607472902, + "grad_norm": 0.2132454514503479, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 4682 + }, + { + "epoch": 0.23392776861981118, + "grad_norm": 0.14416271448135376, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4683 + }, + { + "epoch": 0.23397772116489335, + "grad_norm": 0.1507241129875183, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4684 + }, + { + "epoch": 0.23402767370997551, + "grad_norm": 0.21209605038166046, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 4685 + }, + { + "epoch": 0.2340776262550577, + "grad_norm": 0.18812666833400726, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4686 + }, + { + "epoch": 0.23412757880013987, + "grad_norm": 0.14433957636356354, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4687 + }, + { + "epoch": 0.23417753134522204, + "grad_norm": 0.1366855502128601, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 4688 + }, + { + "epoch": 0.2342274838903042, + "grad_norm": 0.17665280401706696, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 4689 + }, + { + "epoch": 0.23427743643538637, + "grad_norm": 0.1431378424167633, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4690 + }, + { + "epoch": 0.23432738898046857, + "grad_norm": 0.13964219391345978, + "learning_rate": 0.0001, + "loss": 0.1289, + "step": 4691 + }, + { + "epoch": 0.23437734152555073, + "grad_norm": 0.15812678635120392, + "learning_rate": 0.0001, + "loss": 0.006, + "step": 4692 + }, + { + "epoch": 0.2344272940706329, + "grad_norm": 0.14917327463626862, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4693 + }, + { + "epoch": 0.23447724661571506, + "grad_norm": 0.154142364859581, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4694 + }, + { + "epoch": 0.23452719916079726, + "grad_norm": 0.1305272877216339, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4695 + }, + { + "epoch": 0.23457715170587942, + "grad_norm": 0.13517135381698608, + "learning_rate": 0.0001, + "loss": 0.0175, + "step": 4696 + }, + { + "epoch": 0.2346271042509616, + "grad_norm": 0.1340615153312683, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4697 + }, + { + "epoch": 0.23467705679604375, + "grad_norm": 0.16920486092567444, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 4698 + }, + { + "epoch": 0.23472700934112592, + "grad_norm": 0.14423584938049316, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4699 + }, + { + "epoch": 0.2347769618862081, + "grad_norm": 0.13937827944755554, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 4700 + }, + { + "epoch": 0.23482691443129028, + "grad_norm": 0.14475427567958832, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4701 + }, + { + "epoch": 0.23487686697637244, + "grad_norm": 0.2139853686094284, + "learning_rate": 0.0001, + "loss": 0.0097, + "step": 4702 + }, + { + "epoch": 0.2349268195214546, + "grad_norm": 0.13665148615837097, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4703 + }, + { + "epoch": 0.23497677206653678, + "grad_norm": 0.15878461301326752, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 4704 + }, + { + "epoch": 0.23502672461161897, + "grad_norm": 0.12377498298883438, + "learning_rate": 0.0001, + "loss": 0.126, + "step": 4705 + }, + { + "epoch": 0.23507667715670114, + "grad_norm": 0.13070978224277496, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4706 + }, + { + "epoch": 0.2351266297017833, + "grad_norm": 0.1305178552865982, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4707 + }, + { + "epoch": 0.23517658224686547, + "grad_norm": 0.11486855149269104, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4708 + }, + { + "epoch": 0.23522653479194766, + "grad_norm": 0.29576435685157776, + "learning_rate": 0.0001, + "loss": 0.015, + "step": 4709 + }, + { + "epoch": 0.23527648733702983, + "grad_norm": 0.19797728955745697, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 4710 + }, + { + "epoch": 0.235326439882112, + "grad_norm": 0.2256006896495819, + "learning_rate": 0.0001, + "loss": 0.0084, + "step": 4711 + }, + { + "epoch": 0.23537639242719416, + "grad_norm": 0.2411932796239853, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 4712 + }, + { + "epoch": 0.23542634497227632, + "grad_norm": 0.21745210886001587, + "learning_rate": 0.0001, + "loss": 0.0081, + "step": 4713 + }, + { + "epoch": 0.23547629751735852, + "grad_norm": 0.18216641247272491, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4714 + }, + { + "epoch": 0.23552625006244068, + "grad_norm": 0.16875037550926208, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 4715 + }, + { + "epoch": 0.23557620260752285, + "grad_norm": 0.163890078663826, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4716 + }, + { + "epoch": 0.23562615515260502, + "grad_norm": 0.1671469360589981, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4717 + }, + { + "epoch": 0.2356761076976872, + "grad_norm": 0.21001598238945007, + "learning_rate": 0.0001, + "loss": 0.0132, + "step": 4718 + }, + { + "epoch": 0.23572606024276938, + "grad_norm": 0.16815181076526642, + "learning_rate": 0.0001, + "loss": 0.0086, + "step": 4719 + }, + { + "epoch": 0.23577601278785154, + "grad_norm": 0.1750739961862564, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 4720 + }, + { + "epoch": 0.2358259653329337, + "grad_norm": 0.17087994515895844, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4721 + }, + { + "epoch": 0.23587591787801587, + "grad_norm": 0.20218272507190704, + "learning_rate": 0.0001, + "loss": 0.0071, + "step": 4722 + }, + { + "epoch": 0.23592587042309807, + "grad_norm": 0.17505745589733124, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 4723 + }, + { + "epoch": 0.23597582296818023, + "grad_norm": 0.16807472705841064, + "learning_rate": 0.0001, + "loss": 0.0058, + "step": 4724 + }, + { + "epoch": 0.2360257755132624, + "grad_norm": 0.15496203303337097, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 4725 + }, + { + "epoch": 0.23607572805834456, + "grad_norm": 0.13920453190803528, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4726 + }, + { + "epoch": 0.23612568060342676, + "grad_norm": 0.1694546490907669, + "learning_rate": 0.0001, + "loss": 0.011, + "step": 4727 + }, + { + "epoch": 0.23617563314850892, + "grad_norm": 0.1708100587129593, + "learning_rate": 0.0001, + "loss": 0.1258, + "step": 4728 + }, + { + "epoch": 0.2362255856935911, + "grad_norm": 0.14853255450725555, + "learning_rate": 0.0001, + "loss": 0.0024, + "step": 4729 + }, + { + "epoch": 0.23627553823867325, + "grad_norm": 0.14118385314941406, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 4730 + }, + { + "epoch": 0.23632549078375542, + "grad_norm": 0.11876876652240753, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 4731 + }, + { + "epoch": 0.23637544332883761, + "grad_norm": 0.14564667642116547, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4732 + }, + { + "epoch": 0.23642539587391978, + "grad_norm": 0.11417131870985031, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4733 + }, + { + "epoch": 0.23647534841900195, + "grad_norm": 0.1860118955373764, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 4734 + }, + { + "epoch": 0.2365253009640841, + "grad_norm": 0.1611231565475464, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4735 + }, + { + "epoch": 0.2365752535091663, + "grad_norm": 0.10053136199712753, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4736 + }, + { + "epoch": 0.23662520605424847, + "grad_norm": 0.14076682925224304, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 4737 + }, + { + "epoch": 0.23667515859933064, + "grad_norm": 0.31401774287223816, + "learning_rate": 0.0001, + "loss": 0.2707, + "step": 4738 + }, + { + "epoch": 0.2367251111444128, + "grad_norm": 0.17698919773101807, + "learning_rate": 0.0001, + "loss": 0.0069, + "step": 4739 + }, + { + "epoch": 0.23677506368949497, + "grad_norm": 0.16551457345485687, + "learning_rate": 0.0001, + "loss": 0.0212, + "step": 4740 + }, + { + "epoch": 0.23682501623457716, + "grad_norm": 0.1602415293455124, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4741 + }, + { + "epoch": 0.23687496877965933, + "grad_norm": 0.1949390023946762, + "learning_rate": 0.0001, + "loss": 0.1251, + "step": 4742 + }, + { + "epoch": 0.2369249213247415, + "grad_norm": 0.240828737616539, + "learning_rate": 0.0001, + "loss": 0.0175, + "step": 4743 + }, + { + "epoch": 0.23697487386982366, + "grad_norm": 0.17143350839614868, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4744 + }, + { + "epoch": 0.23702482641490583, + "grad_norm": 0.1447724550962448, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 4745 + }, + { + "epoch": 0.23707477895998802, + "grad_norm": 0.15837585926055908, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4746 + }, + { + "epoch": 0.23712473150507019, + "grad_norm": 0.1372082233428955, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4747 + }, + { + "epoch": 0.23717468405015235, + "grad_norm": 0.19099166989326477, + "learning_rate": 0.0001, + "loss": 0.016, + "step": 4748 + }, + { + "epoch": 0.23722463659523452, + "grad_norm": 0.14058491587638855, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4749 + }, + { + "epoch": 0.2372745891403167, + "grad_norm": 0.16603919863700867, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 4750 + }, + { + "epoch": 0.23732454168539888, + "grad_norm": 0.1321035921573639, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4751 + }, + { + "epoch": 0.23737449423048104, + "grad_norm": 0.17788191139698029, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 4752 + }, + { + "epoch": 0.2374244467755632, + "grad_norm": 0.2293657511472702, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 4753 + }, + { + "epoch": 0.23747439932064537, + "grad_norm": 0.17875969409942627, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 4754 + }, + { + "epoch": 0.23752435186572757, + "grad_norm": 0.15902917087078094, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4755 + }, + { + "epoch": 0.23757430441080973, + "grad_norm": 0.15832120180130005, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4756 + }, + { + "epoch": 0.2376242569558919, + "grad_norm": 0.22977592051029205, + "learning_rate": 0.0001, + "loss": 0.0067, + "step": 4757 + }, + { + "epoch": 0.23767420950097407, + "grad_norm": 0.15705567598342896, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 4758 + }, + { + "epoch": 0.23772416204605626, + "grad_norm": 0.18113791942596436, + "learning_rate": 0.0001, + "loss": 0.0056, + "step": 4759 + }, + { + "epoch": 0.23777411459113842, + "grad_norm": 0.18111178278923035, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4760 + }, + { + "epoch": 0.2378240671362206, + "grad_norm": 0.1462104469537735, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4761 + }, + { + "epoch": 0.23787401968130276, + "grad_norm": 0.14665943384170532, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4762 + }, + { + "epoch": 0.23792397222638492, + "grad_norm": 0.21042731404304504, + "learning_rate": 0.0001, + "loss": 0.0055, + "step": 4763 + }, + { + "epoch": 0.23797392477146712, + "grad_norm": 0.1744718849658966, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4764 + }, + { + "epoch": 0.23802387731654928, + "grad_norm": 0.17191340029239655, + "learning_rate": 0.0001, + "loss": 0.0103, + "step": 4765 + }, + { + "epoch": 0.23807382986163145, + "grad_norm": 0.1377277672290802, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 4766 + }, + { + "epoch": 0.2381237824067136, + "grad_norm": 0.1797107309103012, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4767 + }, + { + "epoch": 0.2381737349517958, + "grad_norm": 0.2009592205286026, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 4768 + }, + { + "epoch": 0.23822368749687797, + "grad_norm": 0.14701995253562927, + "learning_rate": 0.0001, + "loss": 0.1262, + "step": 4769 + }, + { + "epoch": 0.23827364004196014, + "grad_norm": 0.22651974856853485, + "learning_rate": 0.0001, + "loss": 0.0139, + "step": 4770 + }, + { + "epoch": 0.2383235925870423, + "grad_norm": 0.17062291502952576, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4771 + }, + { + "epoch": 0.23837354513212447, + "grad_norm": 0.14761841297149658, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4772 + }, + { + "epoch": 0.23842349767720666, + "grad_norm": 0.14526380598545074, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4773 + }, + { + "epoch": 0.23847345022228883, + "grad_norm": 0.19379101693630219, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 4774 + }, + { + "epoch": 0.238523402767371, + "grad_norm": 0.17698009312152863, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 4775 + }, + { + "epoch": 0.23857335531245316, + "grad_norm": 0.13876043260097504, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4776 + }, + { + "epoch": 0.23862330785753536, + "grad_norm": 0.14886537194252014, + "learning_rate": 0.0001, + "loss": 0.0061, + "step": 4777 + }, + { + "epoch": 0.23867326040261752, + "grad_norm": 0.12540307641029358, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4778 + }, + { + "epoch": 0.2387232129476997, + "grad_norm": 0.16591580212116241, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 4779 + }, + { + "epoch": 0.23877316549278185, + "grad_norm": 0.15832045674324036, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4780 + }, + { + "epoch": 0.23882311803786402, + "grad_norm": 0.17196665704250336, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 4781 + }, + { + "epoch": 0.2388730705829462, + "grad_norm": 0.19362807273864746, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4782 + }, + { + "epoch": 0.23892302312802838, + "grad_norm": 0.16599909961223602, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 4783 + }, + { + "epoch": 0.23897297567311054, + "grad_norm": 0.18901123106479645, + "learning_rate": 0.0001, + "loss": 0.0026, + "step": 4784 + }, + { + "epoch": 0.2390229282181927, + "grad_norm": 0.13565000891685486, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4785 + }, + { + "epoch": 0.23907288076327488, + "grad_norm": 0.3623614013195038, + "learning_rate": 0.0001, + "loss": 0.2567, + "step": 4786 + }, + { + "epoch": 0.23912283330835707, + "grad_norm": 0.15336525440216064, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4787 + }, + { + "epoch": 0.23917278585343923, + "grad_norm": 0.1605212539434433, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4788 + }, + { + "epoch": 0.2392227383985214, + "grad_norm": 0.15421898663043976, + "learning_rate": 0.0001, + "loss": 0.0041, + "step": 4789 + }, + { + "epoch": 0.23927269094360357, + "grad_norm": 0.127956360578537, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4790 + }, + { + "epoch": 0.23932264348868576, + "grad_norm": 0.11825651675462723, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4791 + }, + { + "epoch": 0.23937259603376793, + "grad_norm": 0.14121297001838684, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4792 + }, + { + "epoch": 0.2394225485788501, + "grad_norm": 0.1304389089345932, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4793 + }, + { + "epoch": 0.23947250112393226, + "grad_norm": 0.1663120985031128, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 4794 + }, + { + "epoch": 0.23952245366901442, + "grad_norm": 0.10842222720384598, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4795 + }, + { + "epoch": 0.23957240621409662, + "grad_norm": 0.12497247010469437, + "learning_rate": 0.0001, + "loss": 0.0007, + "step": 4796 + }, + { + "epoch": 0.23962235875917878, + "grad_norm": 0.12499269843101501, + "learning_rate": 0.0001, + "loss": 0.0006, + "step": 4797 + }, + { + "epoch": 0.23967231130426095, + "grad_norm": 0.1318102478981018, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4798 + }, + { + "epoch": 0.23972226384934311, + "grad_norm": 0.12960706651210785, + "learning_rate": 0.0001, + "loss": 0.0015, + "step": 4799 + }, + { + "epoch": 0.2397722163944253, + "grad_norm": 0.1444765329360962, + "learning_rate": 0.0001, + "loss": 0.1282, + "step": 4800 + }, + { + "epoch": 0.23982216893950747, + "grad_norm": 0.1387467086315155, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4801 + }, + { + "epoch": 0.23987212148458964, + "grad_norm": 0.15122249722480774, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 4802 + }, + { + "epoch": 0.2399220740296718, + "grad_norm": 0.11008159816265106, + "learning_rate": 0.0001, + "loss": 0.0005, + "step": 4803 + }, + { + "epoch": 0.23997202657475397, + "grad_norm": 0.12578731775283813, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4804 + }, + { + "epoch": 0.24002197911983617, + "grad_norm": 0.12044280022382736, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 4805 + }, + { + "epoch": 0.24007193166491833, + "grad_norm": 0.10074251145124435, + "learning_rate": 0.0001, + "loss": 0.0007, + "step": 4806 + }, + { + "epoch": 0.2401218842100005, + "grad_norm": 0.16696476936340332, + "learning_rate": 0.0001, + "loss": 0.1267, + "step": 4807 + }, + { + "epoch": 0.24017183675508266, + "grad_norm": 0.14142322540283203, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4808 + }, + { + "epoch": 0.24022178930016486, + "grad_norm": 0.12355049699544907, + "learning_rate": 0.0001, + "loss": 0.0007, + "step": 4809 + }, + { + "epoch": 0.24027174184524702, + "grad_norm": 0.1070142462849617, + "learning_rate": 0.0001, + "loss": 0.0006, + "step": 4810 + }, + { + "epoch": 0.2403216943903292, + "grad_norm": 0.12057966738939285, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4811 + }, + { + "epoch": 0.24037164693541135, + "grad_norm": 0.13836677372455597, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4812 + }, + { + "epoch": 0.24042159948049352, + "grad_norm": 0.11194325238466263, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4813 + }, + { + "epoch": 0.2404715520255757, + "grad_norm": 0.1371983289718628, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4814 + }, + { + "epoch": 0.24052150457065788, + "grad_norm": 0.13507398962974548, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4815 + }, + { + "epoch": 0.24057145711574004, + "grad_norm": 0.11322902143001556, + "learning_rate": 0.0001, + "loss": 0.0006, + "step": 4816 + }, + { + "epoch": 0.2406214096608222, + "grad_norm": 0.1485094577074051, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4817 + }, + { + "epoch": 0.2406713622059044, + "grad_norm": 0.1513773500919342, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 4818 + }, + { + "epoch": 0.24072131475098657, + "grad_norm": 0.11596491932868958, + "learning_rate": 0.0001, + "loss": 0.0007, + "step": 4819 + }, + { + "epoch": 0.24077126729606874, + "grad_norm": 0.14607347548007965, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4820 + }, + { + "epoch": 0.2408212198411509, + "grad_norm": 0.15974655747413635, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 4821 + }, + { + "epoch": 0.24087117238623307, + "grad_norm": 0.14758050441741943, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4822 + }, + { + "epoch": 0.24092112493131526, + "grad_norm": 0.13783399760723114, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4823 + }, + { + "epoch": 0.24097107747639743, + "grad_norm": 0.1266639083623886, + "learning_rate": 0.0001, + "loss": 0.0006, + "step": 4824 + }, + { + "epoch": 0.2410210300214796, + "grad_norm": 0.11567917466163635, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4825 + }, + { + "epoch": 0.24107098256656176, + "grad_norm": 0.11081850528717041, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4826 + }, + { + "epoch": 0.24112093511164392, + "grad_norm": 0.1331404447555542, + "learning_rate": 0.0001, + "loss": 0.0008, + "step": 4827 + }, + { + "epoch": 0.24117088765672612, + "grad_norm": 0.1858152598142624, + "learning_rate": 0.0001, + "loss": 0.0145, + "step": 4828 + }, + { + "epoch": 0.24122084020180828, + "grad_norm": 0.17615307867527008, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 4829 + }, + { + "epoch": 0.24127079274689045, + "grad_norm": 0.18932941555976868, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4830 + }, + { + "epoch": 0.24132074529197262, + "grad_norm": 0.18523314595222473, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 4831 + }, + { + "epoch": 0.2413706978370548, + "grad_norm": 0.7068895101547241, + "learning_rate": 0.0001, + "loss": 0.1638, + "step": 4832 + }, + { + "epoch": 0.24142065038213698, + "grad_norm": 0.17239978909492493, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4833 + }, + { + "epoch": 0.24147060292721914, + "grad_norm": 0.22304023802280426, + "learning_rate": 0.0001, + "loss": 0.0131, + "step": 4834 + }, + { + "epoch": 0.2415205554723013, + "grad_norm": 0.17641356587409973, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 4835 + }, + { + "epoch": 0.24157050801738347, + "grad_norm": 0.20885232090950012, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 4836 + }, + { + "epoch": 0.24162046056246567, + "grad_norm": 0.2302258461713791, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 4837 + }, + { + "epoch": 0.24167041310754783, + "grad_norm": 0.23071278631687164, + "learning_rate": 0.0001, + "loss": 0.0074, + "step": 4838 + }, + { + "epoch": 0.24172036565263, + "grad_norm": 0.1508045196533203, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4839 + }, + { + "epoch": 0.24177031819771216, + "grad_norm": 0.15558385848999023, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4840 + }, + { + "epoch": 0.24182027074279436, + "grad_norm": 0.17126208543777466, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 4841 + }, + { + "epoch": 0.24187022328787652, + "grad_norm": 0.15049947798252106, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4842 + }, + { + "epoch": 0.2419201758329587, + "grad_norm": 0.17745761573314667, + "learning_rate": 0.0001, + "loss": 0.1289, + "step": 4843 + }, + { + "epoch": 0.24197012837804086, + "grad_norm": 0.18813897669315338, + "learning_rate": 0.0001, + "loss": 0.1367, + "step": 4844 + }, + { + "epoch": 0.24202008092312302, + "grad_norm": 0.1862485408782959, + "learning_rate": 0.0001, + "loss": 0.0129, + "step": 4845 + }, + { + "epoch": 0.24207003346820521, + "grad_norm": 0.16039207577705383, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4846 + }, + { + "epoch": 0.24211998601328738, + "grad_norm": 0.19413600862026215, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 4847 + }, + { + "epoch": 0.24216993855836955, + "grad_norm": 0.17502178251743317, + "learning_rate": 0.0001, + "loss": 0.1304, + "step": 4848 + }, + { + "epoch": 0.2422198911034517, + "grad_norm": 0.15586034953594208, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 4849 + }, + { + "epoch": 0.2422698436485339, + "grad_norm": 0.18501605093479156, + "learning_rate": 0.0001, + "loss": 0.0043, + "step": 4850 + }, + { + "epoch": 0.24231979619361607, + "grad_norm": 0.17558053135871887, + "learning_rate": 0.0001, + "loss": 0.0141, + "step": 4851 + }, + { + "epoch": 0.24236974873869824, + "grad_norm": 0.15825189650058746, + "learning_rate": 0.0001, + "loss": 0.0035, + "step": 4852 + }, + { + "epoch": 0.2424197012837804, + "grad_norm": 0.15423771739006042, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 4853 + }, + { + "epoch": 0.24246965382886257, + "grad_norm": 0.1515721082687378, + "learning_rate": 0.0001, + "loss": 0.0059, + "step": 4854 + }, + { + "epoch": 0.24251960637394476, + "grad_norm": 0.2496429681777954, + "learning_rate": 0.0001, + "loss": 0.0108, + "step": 4855 + }, + { + "epoch": 0.24256955891902693, + "grad_norm": 0.15412433445453644, + "learning_rate": 0.0001, + "loss": 0.0088, + "step": 4856 + }, + { + "epoch": 0.2426195114641091, + "grad_norm": 0.14532199501991272, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4857 + }, + { + "epoch": 0.24266946400919126, + "grad_norm": 0.15028764307498932, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4858 + }, + { + "epoch": 0.24271941655427345, + "grad_norm": 0.18035317957401276, + "learning_rate": 0.0001, + "loss": 0.1277, + "step": 4859 + }, + { + "epoch": 0.24276936909935562, + "grad_norm": 0.15885601937770844, + "learning_rate": 0.0001, + "loss": 0.126, + "step": 4860 + }, + { + "epoch": 0.24281932164443779, + "grad_norm": 0.16548651456832886, + "learning_rate": 0.0001, + "loss": 0.0124, + "step": 4861 + }, + { + "epoch": 0.24286927418951995, + "grad_norm": 0.13341116905212402, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 4862 + }, + { + "epoch": 0.24291922673460212, + "grad_norm": 0.14658108353614807, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4863 + }, + { + "epoch": 0.2429691792796843, + "grad_norm": 0.13044194877147675, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4864 + }, + { + "epoch": 0.24301913182476648, + "grad_norm": 0.15097928047180176, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4865 + }, + { + "epoch": 0.24306908436984864, + "grad_norm": 0.1437360942363739, + "learning_rate": 0.0001, + "loss": 0.128, + "step": 4866 + }, + { + "epoch": 0.2431190369149308, + "grad_norm": 0.14551350474357605, + "learning_rate": 0.0001, + "loss": 0.0076, + "step": 4867 + }, + { + "epoch": 0.24316898946001297, + "grad_norm": 0.15500140190124512, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 4868 + }, + { + "epoch": 0.24321894200509517, + "grad_norm": 0.12672723829746246, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4869 + }, + { + "epoch": 0.24326889455017733, + "grad_norm": 0.1677049845457077, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4870 + }, + { + "epoch": 0.2433188470952595, + "grad_norm": 0.14677512645721436, + "learning_rate": 0.0001, + "loss": 0.0028, + "step": 4871 + }, + { + "epoch": 0.24336879964034167, + "grad_norm": 0.30169427394866943, + "learning_rate": 0.0001, + "loss": 0.275, + "step": 4872 + }, + { + "epoch": 0.24341875218542386, + "grad_norm": 0.1362256556749344, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4873 + }, + { + "epoch": 0.24346870473050602, + "grad_norm": 0.1746174544095993, + "learning_rate": 0.0001, + "loss": 0.0091, + "step": 4874 + }, + { + "epoch": 0.2435186572755882, + "grad_norm": 0.1500610113143921, + "learning_rate": 0.0001, + "loss": 0.0029, + "step": 4875 + }, + { + "epoch": 0.24356860982067036, + "grad_norm": 0.14803291857242584, + "learning_rate": 0.0001, + "loss": 0.1263, + "step": 4876 + }, + { + "epoch": 0.24361856236575252, + "grad_norm": 0.1779724657535553, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4877 + }, + { + "epoch": 0.24366851491083472, + "grad_norm": 0.1470341980457306, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4878 + }, + { + "epoch": 0.24371846745591688, + "grad_norm": 0.18568100035190582, + "learning_rate": 0.0001, + "loss": 0.1263, + "step": 4879 + }, + { + "epoch": 0.24376842000099905, + "grad_norm": 0.13006669282913208, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4880 + }, + { + "epoch": 0.2438183725460812, + "grad_norm": 0.14696866273880005, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4881 + }, + { + "epoch": 0.2438683250911634, + "grad_norm": 0.2639386057853699, + "learning_rate": 0.0001, + "loss": 0.0207, + "step": 4882 + }, + { + "epoch": 0.24391827763624557, + "grad_norm": 0.1429034024477005, + "learning_rate": 0.0001, + "loss": 0.0042, + "step": 4883 + }, + { + "epoch": 0.24396823018132774, + "grad_norm": 0.21752966940402985, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 4884 + }, + { + "epoch": 0.2440181827264099, + "grad_norm": 0.2784268856048584, + "learning_rate": 0.0001, + "loss": 0.0215, + "step": 4885 + }, + { + "epoch": 0.24406813527149207, + "grad_norm": 0.17079854011535645, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4886 + }, + { + "epoch": 0.24411808781657426, + "grad_norm": 0.13152199983596802, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4887 + }, + { + "epoch": 0.24416804036165643, + "grad_norm": 0.15444135665893555, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4888 + }, + { + "epoch": 0.2442179929067386, + "grad_norm": 0.1415828913450241, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4889 + }, + { + "epoch": 0.24426794545182076, + "grad_norm": 0.16324718296527863, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4890 + }, + { + "epoch": 0.24431789799690296, + "grad_norm": 0.16878971457481384, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 4891 + }, + { + "epoch": 0.24436785054198512, + "grad_norm": 0.13826948404312134, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4892 + }, + { + "epoch": 0.2444178030870673, + "grad_norm": 0.14636904001235962, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4893 + }, + { + "epoch": 0.24446775563214945, + "grad_norm": 0.14765328168869019, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4894 + }, + { + "epoch": 0.24451770817723162, + "grad_norm": 0.21934404969215393, + "learning_rate": 0.0001, + "loss": 0.019, + "step": 4895 + }, + { + "epoch": 0.2445676607223138, + "grad_norm": 0.21100930869579315, + "learning_rate": 0.0001, + "loss": 0.0217, + "step": 4896 + }, + { + "epoch": 0.24461761326739598, + "grad_norm": 0.23247358202934265, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4897 + }, + { + "epoch": 0.24466756581247814, + "grad_norm": 0.2009182721376419, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4898 + }, + { + "epoch": 0.2447175183575603, + "grad_norm": 0.21002820134162903, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 4899 + }, + { + "epoch": 0.2447674709026425, + "grad_norm": 0.19376933574676514, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 4900 + }, + { + "epoch": 0.24481742344772467, + "grad_norm": 0.19302423298358917, + "learning_rate": 0.0001, + "loss": 0.0125, + "step": 4901 + }, + { + "epoch": 0.24486737599280683, + "grad_norm": 0.1673697829246521, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4902 + }, + { + "epoch": 0.244917328537889, + "grad_norm": 0.2000037580728531, + "learning_rate": 0.0001, + "loss": 0.0089, + "step": 4903 + }, + { + "epoch": 0.24496728108297117, + "grad_norm": 0.20640195906162262, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 4904 + }, + { + "epoch": 0.24501723362805336, + "grad_norm": 0.2429591417312622, + "learning_rate": 0.0001, + "loss": 0.0164, + "step": 4905 + }, + { + "epoch": 0.24506718617313553, + "grad_norm": 0.20273306965827942, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 4906 + }, + { + "epoch": 0.2451171387182177, + "grad_norm": 0.20665240287780762, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 4907 + }, + { + "epoch": 0.24516709126329986, + "grad_norm": 0.19800938665866852, + "learning_rate": 0.0001, + "loss": 0.1298, + "step": 4908 + }, + { + "epoch": 0.24521704380838202, + "grad_norm": 0.18154789507389069, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4909 + }, + { + "epoch": 0.24526699635346422, + "grad_norm": 0.19100897014141083, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 4910 + }, + { + "epoch": 0.24531694889854638, + "grad_norm": 0.13013964891433716, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 4911 + }, + { + "epoch": 0.24536690144362855, + "grad_norm": 0.14408151805400848, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4912 + }, + { + "epoch": 0.24541685398871071, + "grad_norm": 0.1723465621471405, + "learning_rate": 0.0001, + "loss": 0.0045, + "step": 4913 + }, + { + "epoch": 0.2454668065337929, + "grad_norm": 0.136610746383667, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4914 + }, + { + "epoch": 0.24551675907887507, + "grad_norm": 0.1350744217634201, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 4915 + }, + { + "epoch": 0.24556671162395724, + "grad_norm": 0.17278185486793518, + "learning_rate": 0.0001, + "loss": 0.0049, + "step": 4916 + }, + { + "epoch": 0.2456166641690394, + "grad_norm": 0.14200183749198914, + "learning_rate": 0.0001, + "loss": 0.001, + "step": 4917 + }, + { + "epoch": 0.24566661671412157, + "grad_norm": 0.10237434506416321, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4918 + }, + { + "epoch": 0.24571656925920377, + "grad_norm": 0.12676194310188293, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4919 + }, + { + "epoch": 0.24576652180428593, + "grad_norm": 0.1214652806520462, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4920 + }, + { + "epoch": 0.2458164743493681, + "grad_norm": 0.15914130210876465, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 4921 + }, + { + "epoch": 0.24586642689445026, + "grad_norm": 0.12189163267612457, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4922 + }, + { + "epoch": 0.24591637943953246, + "grad_norm": 0.17525821924209595, + "learning_rate": 0.0001, + "loss": 0.0034, + "step": 4923 + }, + { + "epoch": 0.24596633198461462, + "grad_norm": 0.13818638026714325, + "learning_rate": 0.0001, + "loss": 0.0033, + "step": 4924 + }, + { + "epoch": 0.2460162845296968, + "grad_norm": 0.10205940157175064, + "learning_rate": 0.0001, + "loss": 0.0007, + "step": 4925 + }, + { + "epoch": 0.24606623707477895, + "grad_norm": 0.15608733892440796, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4926 + }, + { + "epoch": 0.24611618961986112, + "grad_norm": 0.12141215801239014, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4927 + }, + { + "epoch": 0.2461661421649433, + "grad_norm": 0.11980684101581573, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4928 + }, + { + "epoch": 0.24621609471002548, + "grad_norm": 0.14952850341796875, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4929 + }, + { + "epoch": 0.24626604725510765, + "grad_norm": 0.07747093588113785, + "learning_rate": 0.0001, + "loss": 0.0005, + "step": 4930 + }, + { + "epoch": 0.2463159998001898, + "grad_norm": 0.1714310646057129, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4931 + }, + { + "epoch": 0.246365952345272, + "grad_norm": 0.14097009599208832, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4932 + }, + { + "epoch": 0.24641590489035417, + "grad_norm": 0.14266961812973022, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4933 + }, + { + "epoch": 0.24646585743543634, + "grad_norm": 0.11605453491210938, + "learning_rate": 0.0001, + "loss": 0.0006, + "step": 4934 + }, + { + "epoch": 0.2465158099805185, + "grad_norm": 0.15945322811603546, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4935 + }, + { + "epoch": 0.24656576252560067, + "grad_norm": 0.18099211156368256, + "learning_rate": 0.0001, + "loss": 0.1278, + "step": 4936 + }, + { + "epoch": 0.24661571507068286, + "grad_norm": 0.12159311771392822, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4937 + }, + { + "epoch": 0.24666566761576503, + "grad_norm": 0.14418865740299225, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4938 + }, + { + "epoch": 0.2467156201608472, + "grad_norm": 0.1892230063676834, + "learning_rate": 0.0001, + "loss": 0.1359, + "step": 4939 + }, + { + "epoch": 0.24676557270592936, + "grad_norm": 0.12162505090236664, + "learning_rate": 0.0001, + "loss": 0.0007, + "step": 4940 + }, + { + "epoch": 0.24681552525101155, + "grad_norm": 0.1569829136133194, + "learning_rate": 0.0001, + "loss": 0.0037, + "step": 4941 + }, + { + "epoch": 0.24686547779609372, + "grad_norm": 0.15859781205654144, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 4942 + }, + { + "epoch": 0.24691543034117588, + "grad_norm": 0.15541842579841614, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4943 + }, + { + "epoch": 0.24696538288625805, + "grad_norm": 0.12902970612049103, + "learning_rate": 0.0001, + "loss": 0.0007, + "step": 4944 + }, + { + "epoch": 0.24701533543134022, + "grad_norm": 0.12732748687267303, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4945 + }, + { + "epoch": 0.2470652879764224, + "grad_norm": 0.1269531399011612, + "learning_rate": 0.0001, + "loss": 0.1256, + "step": 4946 + }, + { + "epoch": 0.24711524052150458, + "grad_norm": 0.14504283666610718, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4947 + }, + { + "epoch": 0.24716519306658674, + "grad_norm": 0.10945535451173782, + "learning_rate": 0.0001, + "loss": 0.0006, + "step": 4948 + }, + { + "epoch": 0.2472151456116689, + "grad_norm": 0.13239529728889465, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4949 + }, + { + "epoch": 0.24726509815675107, + "grad_norm": 0.13874661922454834, + "learning_rate": 0.0001, + "loss": 0.1264, + "step": 4950 + }, + { + "epoch": 0.24731505070183327, + "grad_norm": 0.13219527900218964, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4951 + }, + { + "epoch": 0.24736500324691543, + "grad_norm": 0.12116933614015579, + "learning_rate": 0.0001, + "loss": 0.0053, + "step": 4952 + }, + { + "epoch": 0.2474149557919976, + "grad_norm": 0.13744588196277618, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4953 + }, + { + "epoch": 0.24746490833707976, + "grad_norm": 0.15363582968711853, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4954 + }, + { + "epoch": 0.24751486088216196, + "grad_norm": 0.11357418447732925, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4955 + }, + { + "epoch": 0.24756481342724412, + "grad_norm": 0.1743597388267517, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4956 + }, + { + "epoch": 0.2476147659723263, + "grad_norm": 0.12678879499435425, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4957 + }, + { + "epoch": 0.24766471851740846, + "grad_norm": 0.17920546233654022, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4958 + }, + { + "epoch": 0.24771467106249062, + "grad_norm": 0.14548711478710175, + "learning_rate": 0.0001, + "loss": 0.0011, + "step": 4959 + }, + { + "epoch": 0.24776462360757281, + "grad_norm": 0.1214849203824997, + "learning_rate": 0.0001, + "loss": 0.0048, + "step": 4960 + }, + { + "epoch": 0.24781457615265498, + "grad_norm": 0.2166869342327118, + "learning_rate": 0.0001, + "loss": 0.0052, + "step": 4961 + }, + { + "epoch": 0.24786452869773715, + "grad_norm": 0.24699737131595612, + "learning_rate": 0.0001, + "loss": 0.003, + "step": 4962 + }, + { + "epoch": 0.2479144812428193, + "grad_norm": 0.15745015442371368, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4963 + }, + { + "epoch": 0.2479644337879015, + "grad_norm": 0.12905853986740112, + "learning_rate": 0.0001, + "loss": 0.0006, + "step": 4964 + }, + { + "epoch": 0.24801438633298367, + "grad_norm": 0.3159245550632477, + "learning_rate": 0.0001, + "loss": 0.0162, + "step": 4965 + }, + { + "epoch": 0.24806433887806584, + "grad_norm": 0.18296942114830017, + "learning_rate": 0.0001, + "loss": 0.0046, + "step": 4966 + }, + { + "epoch": 0.248114291423148, + "grad_norm": 0.1805519461631775, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4967 + }, + { + "epoch": 0.24816424396823017, + "grad_norm": 0.20674805343151093, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4968 + }, + { + "epoch": 0.24821419651331236, + "grad_norm": 0.3475694954395294, + "learning_rate": 0.0001, + "loss": 0.019, + "step": 4969 + }, + { + "epoch": 0.24826414905839453, + "grad_norm": 0.25089436769485474, + "learning_rate": 0.0001, + "loss": 0.0054, + "step": 4970 + }, + { + "epoch": 0.2483141016034767, + "grad_norm": 0.21561241149902344, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4971 + }, + { + "epoch": 0.24836405414855886, + "grad_norm": 0.2602939009666443, + "learning_rate": 0.0001, + "loss": 0.0119, + "step": 4972 + }, + { + "epoch": 0.24841400669364105, + "grad_norm": 0.21150289475917816, + "learning_rate": 0.0001, + "loss": 0.0016, + "step": 4973 + }, + { + "epoch": 0.24846395923872322, + "grad_norm": 0.25298210978507996, + "learning_rate": 0.0001, + "loss": 0.0044, + "step": 4974 + }, + { + "epoch": 0.24851391178380539, + "grad_norm": 0.2208634912967682, + "learning_rate": 0.0001, + "loss": 0.0025, + "step": 4975 + }, + { + "epoch": 0.24856386432888755, + "grad_norm": 0.18376871943473816, + "learning_rate": 0.0001, + "loss": 0.0019, + "step": 4976 + }, + { + "epoch": 0.24861381687396972, + "grad_norm": 0.23034998774528503, + "learning_rate": 0.0001, + "loss": 0.1291, + "step": 4977 + }, + { + "epoch": 0.2486637694190519, + "grad_norm": 0.25161004066467285, + "learning_rate": 0.0001, + "loss": 0.0051, + "step": 4978 + }, + { + "epoch": 0.24871372196413408, + "grad_norm": 0.17541353404521942, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4979 + }, + { + "epoch": 0.24876367450921624, + "grad_norm": 0.1643267273902893, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4980 + }, + { + "epoch": 0.2488136270542984, + "grad_norm": 0.2210501730442047, + "learning_rate": 0.0001, + "loss": 0.0038, + "step": 4981 + }, + { + "epoch": 0.2488635795993806, + "grad_norm": 0.19205568730831146, + "learning_rate": 0.0001, + "loss": 0.0022, + "step": 4982 + }, + { + "epoch": 0.24891353214446277, + "grad_norm": 0.1926109939813614, + "learning_rate": 0.0001, + "loss": 0.0021, + "step": 4983 + }, + { + "epoch": 0.24896348468954493, + "grad_norm": 0.22943753004074097, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4984 + }, + { + "epoch": 0.2490134372346271, + "grad_norm": 0.15264363586902618, + "learning_rate": 0.0001, + "loss": 0.002, + "step": 4985 + }, + { + "epoch": 0.24906338977970927, + "grad_norm": 0.21032923460006714, + "learning_rate": 0.0001, + "loss": 0.0031, + "step": 4986 + }, + { + "epoch": 0.24911334232479146, + "grad_norm": 0.3864649534225464, + "learning_rate": 0.0001, + "loss": 0.2676, + "step": 4987 + }, + { + "epoch": 0.24916329486987362, + "grad_norm": 0.21831798553466797, + "learning_rate": 0.0001, + "loss": 0.0023, + "step": 4988 + }, + { + "epoch": 0.2492132474149558, + "grad_norm": 0.14193226397037506, + "learning_rate": 0.0001, + "loss": 0.0018, + "step": 4989 + }, + { + "epoch": 0.24926319996003796, + "grad_norm": 0.18116974830627441, + "learning_rate": 0.0001, + "loss": 0.0013, + "step": 4990 + }, + { + "epoch": 0.24931315250512012, + "grad_norm": 0.2854144871234894, + "learning_rate": 0.0001, + "loss": 0.13, + "step": 4991 + }, + { + "epoch": 0.24936310505020232, + "grad_norm": 0.24052797257900238, + "learning_rate": 0.0001, + "loss": 0.004, + "step": 4992 + }, + { + "epoch": 0.24941305759528448, + "grad_norm": 0.15315239131450653, + "learning_rate": 0.0001, + "loss": 0.1266, + "step": 4993 + }, + { + "epoch": 0.24946301014036665, + "grad_norm": 0.21306875348091125, + "learning_rate": 0.0001, + "loss": 0.0027, + "step": 4994 + }, + { + "epoch": 0.2495129626854488, + "grad_norm": 0.24236305058002472, + "learning_rate": 0.0001, + "loss": 0.0017, + "step": 4995 + }, + { + "epoch": 0.249562915230531, + "grad_norm": 0.13626734912395477, + "learning_rate": 0.0001, + "loss": 0.0009, + "step": 4996 + }, + { + "epoch": 0.24961286777561317, + "grad_norm": 0.17733381688594818, + "learning_rate": 0.0001, + "loss": 0.0032, + "step": 4997 + }, + { + "epoch": 0.24966282032069534, + "grad_norm": 0.23094044625759125, + "learning_rate": 0.0001, + "loss": 0.0014, + "step": 4998 + }, + { + "epoch": 0.2497127728657775, + "grad_norm": 0.16916953027248383, + "learning_rate": 0.0001, + "loss": 0.0012, + "step": 4999 + }, + { + "epoch": 0.24976272541085967, + "grad_norm": 0.10609002411365509, + "learning_rate": 0.0001, + "loss": 0.0006, + "step": 5000 + } + ], + "logging_steps": 1.0, + "max_steps": 20019, + "num_input_tokens_seen": 0, + "num_train_epochs": 1, + "save_steps": 1000, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 3.51321980928e+16, + "train_batch_size": 8, + "trial_name": null, + "trial_params": null +}