{ "best_metric": null, "best_model_checkpoint": null, "epoch": 0.44518642181413465, "eval_steps": 500, "global_step": 1000, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.00044518642181413465, "grad_norm": NaN, "learning_rate": 0.0002, "loss": 4.4716, "step": 1 }, { "epoch": 0.0008903728436282693, "grad_norm": NaN, "learning_rate": 0.0002, "loss": 4.9872, "step": 2 }, { "epoch": 0.001335559265442404, "grad_norm": NaN, "learning_rate": 0.0002, "loss": 5.0735, "step": 3 }, { "epoch": 0.0017807456872565386, "grad_norm": NaN, "learning_rate": 0.0002, "loss": 7.0202, "step": 4 }, { "epoch": 0.0022259321090706734, "grad_norm": 2.553895950317383, "learning_rate": 0.00019999950652018584, "loss": 4.1176, "step": 5 }, { "epoch": 0.002671118530884808, "grad_norm": 3.750920534133911, "learning_rate": 0.0001999980260856137, "loss": 4.3848, "step": 6 }, { "epoch": 0.0031163049526989426, "grad_norm": 4.422790050506592, "learning_rate": 0.000199995558710895, "loss": 5.3942, "step": 7 }, { "epoch": 0.003561491374513077, "grad_norm": NaN, "learning_rate": 0.000199995558710895, "loss": 3.3069, "step": 8 }, { "epoch": 0.004006677796327212, "grad_norm": 6.551366329193115, "learning_rate": 0.00019999210442038162, "loss": 3.3369, "step": 9 }, { "epoch": 0.004451864218141347, "grad_norm": 5.16669225692749, "learning_rate": 0.00019998766324816607, "loss": 4.0822, "step": 10 }, { "epoch": 0.004897050639955481, "grad_norm": 4.758444309234619, "learning_rate": 0.0001999822352380809, "loss": 3.1787, "step": 11 }, { "epoch": 0.005342237061769616, "grad_norm": 7.418261528015137, "learning_rate": 0.00019997582044369843, "loss": 3.594, "step": 12 }, { "epoch": 0.005787423483583751, "grad_norm": 6.103316307067871, "learning_rate": 0.00019996841892833, "loss": 2.7363, "step": 13 }, { "epoch": 0.006232609905397885, "grad_norm": 5.146238803863525, "learning_rate": 0.00019996003076502565, "loss": 2.4596, "step": 14 }, { "epoch": 0.00667779632721202, "grad_norm": 6.732138633728027, "learning_rate": 0.00019995065603657316, "loss": 3.1536, "step": 15 }, { "epoch": 0.007122982749026154, "grad_norm": 6.138510227203369, "learning_rate": 0.0001999402948354973, "loss": 2.5301, "step": 16 }, { "epoch": 0.007568169170840289, "grad_norm": 7.237410068511963, "learning_rate": 0.00019992894726405893, "loss": 2.5562, "step": 17 }, { "epoch": 0.008013355592654424, "grad_norm": 5.183891773223877, "learning_rate": 0.000199916613434254, "loss": 3.6864, "step": 18 }, { "epoch": 0.00845854201446856, "grad_norm": 4.217153072357178, "learning_rate": 0.0001999032934678125, "loss": 2.2188, "step": 19 }, { "epoch": 0.008903728436282694, "grad_norm": 4.953031063079834, "learning_rate": 0.00019988898749619702, "loss": 2.159, "step": 20 }, { "epoch": 0.009348914858096828, "grad_norm": 4.839296340942383, "learning_rate": 0.00019987369566060176, "loss": 2.3152, "step": 21 }, { "epoch": 0.009794101279910962, "grad_norm": 4.408138275146484, "learning_rate": 0.00019985741811195097, "loss": 2.2196, "step": 22 }, { "epoch": 0.010239287701725098, "grad_norm": 3.8916175365448, "learning_rate": 0.00019984015501089752, "loss": 2.4175, "step": 23 }, { "epoch": 0.010684474123539232, "grad_norm": 4.368136882781982, "learning_rate": 0.0001998219065278212, "loss": 2.5028, "step": 24 }, { "epoch": 0.011129660545353366, "grad_norm": 6.353954792022705, "learning_rate": 0.00019980267284282717, "loss": 2.6495, "step": 25 }, { "epoch": 0.011574846967167502, "grad_norm": 4.783512115478516, "learning_rate": 0.00019978245414574417, "loss": 1.9392, "step": 26 }, { "epoch": 0.012020033388981636, "grad_norm": 3.656338930130005, "learning_rate": 0.00019976125063612252, "loss": 1.8599, "step": 27 }, { "epoch": 0.01246521981079577, "grad_norm": 5.02943229675293, "learning_rate": 0.00019973906252323238, "loss": 2.272, "step": 28 }, { "epoch": 0.012910406232609905, "grad_norm": 3.1446378231048584, "learning_rate": 0.0001997158900260614, "loss": 2.2221, "step": 29 }, { "epoch": 0.01335559265442404, "grad_norm": 2.948500871658325, "learning_rate": 0.0001996917333733128, "loss": 1.7518, "step": 30 }, { "epoch": 0.013800779076238175, "grad_norm": 3.700584650039673, "learning_rate": 0.00019966659280340297, "loss": 2.2607, "step": 31 }, { "epoch": 0.014245965498052309, "grad_norm": 4.679660797119141, "learning_rate": 0.00019964046856445924, "loss": 1.4464, "step": 32 }, { "epoch": 0.014691151919866445, "grad_norm": 13.211495399475098, "learning_rate": 0.00019961336091431727, "loss": 2.8803, "step": 33 }, { "epoch": 0.015136338341680579, "grad_norm": 2.2771530151367188, "learning_rate": 0.00019958527012051857, "loss": 1.5439, "step": 34 }, { "epoch": 0.015581524763494713, "grad_norm": 4.475869178771973, "learning_rate": 0.00019955619646030802, "loss": 2.488, "step": 35 }, { "epoch": 0.016026711185308847, "grad_norm": 3.8205974102020264, "learning_rate": 0.00019952614022063084, "loss": 2.0277, "step": 36 }, { "epoch": 0.016471897607122983, "grad_norm": 3.721785545349121, "learning_rate": 0.00019949510169813003, "loss": 2.293, "step": 37 }, { "epoch": 0.01691708402893712, "grad_norm": 5.029823303222656, "learning_rate": 0.00019946308119914323, "loss": 2.5691, "step": 38 }, { "epoch": 0.01736227045075125, "grad_norm": 3.547927141189575, "learning_rate": 0.0001994300790396999, "loss": 1.4268, "step": 39 }, { "epoch": 0.017807456872565387, "grad_norm": 4.380621910095215, "learning_rate": 0.000199396095545518, "loss": 2.2319, "step": 40 }, { "epoch": 0.01825264329437952, "grad_norm": 3.113034248352051, "learning_rate": 0.00019936113105200085, "loss": 2.0469, "step": 41 }, { "epoch": 0.018697829716193656, "grad_norm": 3.8902220726013184, "learning_rate": 0.00019932518590423394, "loss": 2.7515, "step": 42 }, { "epoch": 0.01914301613800779, "grad_norm": 6.047741413116455, "learning_rate": 0.00019928826045698136, "loss": 2.1295, "step": 43 }, { "epoch": 0.019588202559821924, "grad_norm": 1.9819254875183105, "learning_rate": 0.0001992503550746824, "loss": 1.1775, "step": 44 }, { "epoch": 0.02003338898163606, "grad_norm": 3.1958131790161133, "learning_rate": 0.0001992114701314478, "loss": 1.9084, "step": 45 }, { "epoch": 0.020478575403450196, "grad_norm": 5.241977691650391, "learning_rate": 0.0001991716060110563, "loss": 3.0362, "step": 46 }, { "epoch": 0.020923761825264328, "grad_norm": 2.977036237716675, "learning_rate": 0.00019913076310695068, "loss": 1.704, "step": 47 }, { "epoch": 0.021368948247078464, "grad_norm": 4.862756729125977, "learning_rate": 0.00019908894182223388, "loss": 1.9846, "step": 48 }, { "epoch": 0.0218141346688926, "grad_norm": 2.590459108352661, "learning_rate": 0.00019904614256966512, "loss": 2.2383, "step": 49 }, { "epoch": 0.022259321090706732, "grad_norm": 4.649803161621094, "learning_rate": 0.00019900236577165576, "loss": 2.0824, "step": 50 }, { "epoch": 0.02270450751252087, "grad_norm": 4.012078285217285, "learning_rate": 0.0001989576118602651, "loss": 1.8357, "step": 51 }, { "epoch": 0.023149693934335004, "grad_norm": 2.382760763168335, "learning_rate": 0.00019891188127719618, "loss": 1.4587, "step": 52 }, { "epoch": 0.023594880356149137, "grad_norm": 3.601736545562744, "learning_rate": 0.0001988651744737914, "loss": 2.118, "step": 53 }, { "epoch": 0.024040066777963272, "grad_norm": 3.790908098220825, "learning_rate": 0.00019881749191102808, "loss": 2.4478, "step": 54 }, { "epoch": 0.02448525319977741, "grad_norm": 5.634518623352051, "learning_rate": 0.00019876883405951377, "loss": 2.0788, "step": 55 }, { "epoch": 0.02493043962159154, "grad_norm": 2.9153330326080322, "learning_rate": 0.00019871920139948192, "loss": 1.753, "step": 56 }, { "epoch": 0.025375626043405677, "grad_norm": 3.083784341812134, "learning_rate": 0.0001986685944207868, "loss": 2.0111, "step": 57 }, { "epoch": 0.02582081246521981, "grad_norm": 3.6852848529815674, "learning_rate": 0.0001986170136228989, "loss": 2.4499, "step": 58 }, { "epoch": 0.026265998887033945, "grad_norm": 3.1907293796539307, "learning_rate": 0.00019856445951489982, "loss": 1.6657, "step": 59 }, { "epoch": 0.02671118530884808, "grad_norm": 4.264678955078125, "learning_rate": 0.0001985109326154774, "loss": 2.1187, "step": 60 }, { "epoch": 0.027156371730662213, "grad_norm": 2.061347484588623, "learning_rate": 0.00019845643345292054, "loss": 1.2979, "step": 61 }, { "epoch": 0.02760155815247635, "grad_norm": 2.9782309532165527, "learning_rate": 0.00019840096256511398, "loss": 1.96, "step": 62 }, { "epoch": 0.028046744574290485, "grad_norm": 3.8613250255584717, "learning_rate": 0.00019834452049953297, "loss": 3.4576, "step": 63 }, { "epoch": 0.028491930996104618, "grad_norm": 1.9967275857925415, "learning_rate": 0.00019828710781323792, "loss": 0.9779, "step": 64 }, { "epoch": 0.028937117417918753, "grad_norm": 2.790372371673584, "learning_rate": 0.0001982287250728689, "loss": 1.5419, "step": 65 }, { "epoch": 0.02938230383973289, "grad_norm": 2.1522772312164307, "learning_rate": 0.0001981693728546399, "loss": 1.3176, "step": 66 }, { "epoch": 0.029827490261547022, "grad_norm": 2.084890842437744, "learning_rate": 0.0001981090517443334, "loss": 0.8775, "step": 67 }, { "epoch": 0.030272676683361158, "grad_norm": 2.6281416416168213, "learning_rate": 0.00019804776233729444, "loss": 1.7329, "step": 68 }, { "epoch": 0.030717863105175294, "grad_norm": 2.443356990814209, "learning_rate": 0.0001979855052384247, "loss": 1.6412, "step": 69 }, { "epoch": 0.031163049526989426, "grad_norm": 3.233210325241089, "learning_rate": 0.00019792228106217658, "loss": 1.9627, "step": 70 }, { "epoch": 0.03160823594880356, "grad_norm": 4.562886714935303, "learning_rate": 0.00019785809043254722, "loss": 1.3901, "step": 71 }, { "epoch": 0.032053422370617694, "grad_norm": 4.928150653839111, "learning_rate": 0.0001977929339830722, "loss": 1.8413, "step": 72 }, { "epoch": 0.032498608792431834, "grad_norm": 2.872011184692383, "learning_rate": 0.00019772681235681936, "loss": 1.8261, "step": 73 }, { "epoch": 0.032943795214245966, "grad_norm": 4.901350975036621, "learning_rate": 0.00019765972620638248, "loss": 2.5204, "step": 74 }, { "epoch": 0.0333889816360601, "grad_norm": 4.601914882659912, "learning_rate": 0.00019759167619387476, "loss": 2.3304, "step": 75 }, { "epoch": 0.03383416805787424, "grad_norm": 4.285044193267822, "learning_rate": 0.00019752266299092236, "loss": 2.148, "step": 76 }, { "epoch": 0.03427935447968837, "grad_norm": 3.728804111480713, "learning_rate": 0.00019745268727865774, "loss": 2.2936, "step": 77 }, { "epoch": 0.0347245409015025, "grad_norm": 3.0641114711761475, "learning_rate": 0.0001973817497477129, "loss": 1.7878, "step": 78 }, { "epoch": 0.03516972732331664, "grad_norm": 4.134016990661621, "learning_rate": 0.00019730985109821266, "loss": 2.9227, "step": 79 }, { "epoch": 0.035614913745130775, "grad_norm": 12.307600975036621, "learning_rate": 0.00019723699203976766, "loss": 2.7279, "step": 80 }, { "epoch": 0.03606010016694491, "grad_norm": 3.992847204208374, "learning_rate": 0.0001971631732914674, "loss": 2.5772, "step": 81 }, { "epoch": 0.03650528658875904, "grad_norm": 3.676469564437866, "learning_rate": 0.0001970883955818731, "loss": 1.5831, "step": 82 }, { "epoch": 0.03695047301057318, "grad_norm": 3.0918328762054443, "learning_rate": 0.0001970126596490106, "loss": 1.4057, "step": 83 }, { "epoch": 0.03739565943238731, "grad_norm": 3.945146322250366, "learning_rate": 0.00019693596624036292, "loss": 1.5807, "step": 84 }, { "epoch": 0.037840845854201444, "grad_norm": 2.956975221633911, "learning_rate": 0.0001968583161128631, "loss": 1.9335, "step": 85 }, { "epoch": 0.03828603227601558, "grad_norm": 2.6506834030151367, "learning_rate": 0.00019677971003288655, "loss": 1.8569, "step": 86 }, { "epoch": 0.038731218697829715, "grad_norm": 3.6588492393493652, "learning_rate": 0.00019670014877624353, "loss": 1.6848, "step": 87 }, { "epoch": 0.03917640511964385, "grad_norm": 3.677953004837036, "learning_rate": 0.00019661963312817148, "loss": 2.5729, "step": 88 }, { "epoch": 0.03962159154145799, "grad_norm": 3.1620278358459473, "learning_rate": 0.0001965381638833274, "loss": 1.8112, "step": 89 }, { "epoch": 0.04006677796327212, "grad_norm": 4.549400806427002, "learning_rate": 0.00019645574184577982, "loss": 3.0601, "step": 90 }, { "epoch": 0.04051196438508625, "grad_norm": 4.790261745452881, "learning_rate": 0.000196372367829001, "loss": 2.0025, "step": 91 }, { "epoch": 0.04095715080690039, "grad_norm": 2.0430498123168945, "learning_rate": 0.00019628804265585877, "loss": 1.757, "step": 92 }, { "epoch": 0.041402337228714524, "grad_norm": 3.846684217453003, "learning_rate": 0.0001962027671586086, "loss": 2.8248, "step": 93 }, { "epoch": 0.041847523650528656, "grad_norm": 2.1094014644622803, "learning_rate": 0.0001961165421788852, "loss": 1.388, "step": 94 }, { "epoch": 0.042292710072342796, "grad_norm": 2.475429058074951, "learning_rate": 0.0001960293685676943, "loss": 2.109, "step": 95 }, { "epoch": 0.04273789649415693, "grad_norm": 2.539384603500366, "learning_rate": 0.0001959412471854043, "loss": 1.7985, "step": 96 }, { "epoch": 0.04318308291597106, "grad_norm": 3.7397396564483643, "learning_rate": 0.0001958521789017376, "loss": 2.3767, "step": 97 }, { "epoch": 0.0436282693377852, "grad_norm": 4.037478446960449, "learning_rate": 0.00019576216459576222, "loss": 2.5889, "step": 98 }, { "epoch": 0.04407345575959933, "grad_norm": 5.122471809387207, "learning_rate": 0.00019567120515588308, "loss": 2.2834, "step": 99 }, { "epoch": 0.044518642181413465, "grad_norm": 3.0687732696533203, "learning_rate": 0.00019557930147983302, "loss": 2.6478, "step": 100 }, { "epoch": 0.044963828603227604, "grad_norm": 2.641993284225464, "learning_rate": 0.00019548645447466431, "loss": 1.2885, "step": 101 }, { "epoch": 0.04540901502504174, "grad_norm": 2.480499505996704, "learning_rate": 0.00019539266505673938, "loss": 1.4892, "step": 102 }, { "epoch": 0.04585420144685587, "grad_norm": 2.8571128845214844, "learning_rate": 0.00019529793415172192, "loss": 1.5001, "step": 103 }, { "epoch": 0.04629938786867001, "grad_norm": 2.330220937728882, "learning_rate": 0.00019520226269456768, "loss": 1.3509, "step": 104 }, { "epoch": 0.04674457429048414, "grad_norm": 6.893773555755615, "learning_rate": 0.00019510565162951537, "loss": 1.6334, "step": 105 }, { "epoch": 0.04718976071229827, "grad_norm": NaN, "learning_rate": 0.00019510565162951537, "loss": 1.9998, "step": 106 }, { "epoch": 0.04763494713411241, "grad_norm": 3.612894058227539, "learning_rate": 0.00019500810191007718, "loss": 2.5597, "step": 107 }, { "epoch": 0.048080133555926545, "grad_norm": 2.3237907886505127, "learning_rate": 0.00019490961449902946, "loss": 1.6031, "step": 108 }, { "epoch": 0.04852531997774068, "grad_norm": 2.755427122116089, "learning_rate": 0.0001948101903684032, "loss": 1.8851, "step": 109 }, { "epoch": 0.04897050639955482, "grad_norm": 2.8905320167541504, "learning_rate": 0.00019470983049947444, "loss": 1.5392, "step": 110 }, { "epoch": 0.04941569282136895, "grad_norm": 10.140717506408691, "learning_rate": 0.00019460853588275454, "loss": 2.005, "step": 111 }, { "epoch": 0.04986087924318308, "grad_norm": 3.27565336227417, "learning_rate": 0.00019450630751798048, "loss": 2.2091, "step": 112 }, { "epoch": 0.05030606566499722, "grad_norm": 4.00056266784668, "learning_rate": 0.000194403146414105, "loss": 3.019, "step": 113 }, { "epoch": 0.05075125208681135, "grad_norm": 3.5745298862457275, "learning_rate": 0.00019429905358928646, "loss": 2.154, "step": 114 }, { "epoch": 0.051196438508625486, "grad_norm": 3.2333099842071533, "learning_rate": 0.00019419403007087907, "loss": 2.1104, "step": 115 }, { "epoch": 0.05164162493043962, "grad_norm": 2.650325298309326, "learning_rate": 0.00019408807689542257, "loss": 1.1883, "step": 116 }, { "epoch": 0.05208681135225376, "grad_norm": 2.167717218399048, "learning_rate": 0.00019398119510863197, "loss": 2.0315, "step": 117 }, { "epoch": 0.05253199777406789, "grad_norm": 3.0747454166412354, "learning_rate": 0.00019387338576538744, "loss": 1.7789, "step": 118 }, { "epoch": 0.05297718419588202, "grad_norm": 1.7313055992126465, "learning_rate": 0.00019376464992972356, "loss": 1.4003, "step": 119 }, { "epoch": 0.05342237061769616, "grad_norm": 2.92069149017334, "learning_rate": 0.00019365498867481923, "loss": 1.3967, "step": 120 }, { "epoch": 0.053867557039510294, "grad_norm": 7.5436553955078125, "learning_rate": 0.00019354440308298675, "loss": 1.9777, "step": 121 }, { "epoch": 0.05431274346132443, "grad_norm": 8.587759971618652, "learning_rate": 0.00019343289424566122, "loss": 1.8453, "step": 122 }, { "epoch": 0.054757929883138566, "grad_norm": 2.8797378540039062, "learning_rate": 0.00019332046326338986, "loss": 1.8348, "step": 123 }, { "epoch": 0.0552031163049527, "grad_norm": 3.496337652206421, "learning_rate": 0.0001932071112458211, "loss": 2.2295, "step": 124 }, { "epoch": 0.05564830272676683, "grad_norm": 4.732133865356445, "learning_rate": 0.00019309283931169356, "loss": 3.2504, "step": 125 }, { "epoch": 0.05609348914858097, "grad_norm": 3.367631196975708, "learning_rate": 0.00019297764858882514, "loss": 1.3877, "step": 126 }, { "epoch": 0.0565386755703951, "grad_norm": 3.1331379413604736, "learning_rate": 0.00019286154021410173, "loss": 1.775, "step": 127 }, { "epoch": 0.056983861992209235, "grad_norm": 2.303420305252075, "learning_rate": 0.00019274451533346615, "loss": 2.3335, "step": 128 }, { "epoch": 0.057429048414023375, "grad_norm": 3.3306965827941895, "learning_rate": 0.00019262657510190666, "loss": 2.1583, "step": 129 }, { "epoch": 0.05787423483583751, "grad_norm": 2.900506019592285, "learning_rate": 0.0001925077206834458, "loss": 1.7225, "step": 130 }, { "epoch": 0.05831942125765164, "grad_norm": 2.4355456829071045, "learning_rate": 0.0001923879532511287, "loss": 1.4379, "step": 131 }, { "epoch": 0.05876460767946578, "grad_norm": 6.534120559692383, "learning_rate": 0.0001922672739870115, "loss": 3.3114, "step": 132 }, { "epoch": 0.05920979410127991, "grad_norm": 4.634522914886475, "learning_rate": 0.00019214568408214985, "loss": 3.683, "step": 133 }, { "epoch": 0.059654980523094044, "grad_norm": 3.742173671722412, "learning_rate": 0.00019202318473658705, "loss": 2.1639, "step": 134 }, { "epoch": 0.06010016694490818, "grad_norm": 3.062386989593506, "learning_rate": 0.00019189977715934213, "loss": 2.2926, "step": 135 }, { "epoch": 0.060545353366722315, "grad_norm": 3.8586795330047607, "learning_rate": 0.00019177546256839812, "loss": 1.788, "step": 136 }, { "epoch": 0.06099053978853645, "grad_norm": 3.739783525466919, "learning_rate": 0.0001916502421906898, "loss": 2.7422, "step": 137 }, { "epoch": 0.06143572621035059, "grad_norm": 3.0615429878234863, "learning_rate": 0.00019152411726209176, "loss": 2.0941, "step": 138 }, { "epoch": 0.06188091263216472, "grad_norm": 2.7861218452453613, "learning_rate": 0.00019139708902740613, "loss": 1.3494, "step": 139 }, { "epoch": 0.06232609905397885, "grad_norm": 4.011098384857178, "learning_rate": 0.0001912691587403503, "loss": 2.6336, "step": 140 }, { "epoch": 0.06277128547579298, "grad_norm": 4.350919723510742, "learning_rate": 0.00019114032766354453, "loss": 1.4581, "step": 141 }, { "epoch": 0.06321647189760712, "grad_norm": 3.9159739017486572, "learning_rate": 0.00019101059706849957, "loss": 2.5859, "step": 142 }, { "epoch": 0.06366165831942126, "grad_norm": 4.638038158416748, "learning_rate": 0.00019087996823560402, "loss": 2.6958, "step": 143 }, { "epoch": 0.06410684474123539, "grad_norm": 4.493806838989258, "learning_rate": 0.0001907484424541117, "loss": 1.7282, "step": 144 }, { "epoch": 0.06455203116304953, "grad_norm": 3.276454448699951, "learning_rate": 0.00019061602102212898, "loss": 1.4519, "step": 145 }, { "epoch": 0.06499721758486367, "grad_norm": 3.375685691833496, "learning_rate": 0.00019048270524660196, "loss": 2.7348, "step": 146 }, { "epoch": 0.06544240400667779, "grad_norm": 3.7552807331085205, "learning_rate": 0.0001903484964433035, "loss": 2.4439, "step": 147 }, { "epoch": 0.06588759042849193, "grad_norm": 3.9915504455566406, "learning_rate": 0.00019021339593682028, "loss": 2.7966, "step": 148 }, { "epoch": 0.06633277685030607, "grad_norm": 2.8780786991119385, "learning_rate": 0.00019007740506053983, "loss": 1.7375, "step": 149 }, { "epoch": 0.0667779632721202, "grad_norm": 2.792412042617798, "learning_rate": 0.0001899405251566371, "loss": 2.4148, "step": 150 }, { "epoch": 0.06722314969393434, "grad_norm": 3.3408055305480957, "learning_rate": 0.00018980275757606157, "loss": 2.6953, "step": 151 }, { "epoch": 0.06766833611574848, "grad_norm": 2.7032229900360107, "learning_rate": 0.00018966410367852362, "loss": 2.0699, "step": 152 }, { "epoch": 0.0681135225375626, "grad_norm": 3.501795530319214, "learning_rate": 0.00018952456483248119, "loss": 2.6974, "step": 153 }, { "epoch": 0.06855870895937674, "grad_norm": 2.4695076942443848, "learning_rate": 0.0001893841424151264, "loss": 1.7339, "step": 154 }, { "epoch": 0.06900389538119088, "grad_norm": 4.014305591583252, "learning_rate": 0.0001892428378123718, "loss": 2.4761, "step": 155 }, { "epoch": 0.069449081803005, "grad_norm": 2.3375473022460938, "learning_rate": 0.0001891006524188368, "loss": 2.0071, "step": 156 }, { "epoch": 0.06989426822481914, "grad_norm": 3.323019504547119, "learning_rate": 0.00018895758763783383, "loss": 2.226, "step": 157 }, { "epoch": 0.07033945464663328, "grad_norm": 7.093875885009766, "learning_rate": 0.00018881364488135448, "loss": 1.7551, "step": 158 }, { "epoch": 0.07078464106844741, "grad_norm": 2.1790101528167725, "learning_rate": 0.00018866882557005567, "loss": 1.7356, "step": 159 }, { "epoch": 0.07122982749026155, "grad_norm": 4.966728687286377, "learning_rate": 0.00018852313113324552, "loss": 2.3969, "step": 160 }, { "epoch": 0.07167501391207569, "grad_norm": 2.207064390182495, "learning_rate": 0.00018837656300886937, "loss": 1.9052, "step": 161 }, { "epoch": 0.07212020033388981, "grad_norm": 2.988813877105713, "learning_rate": 0.00018822912264349534, "loss": 2.2362, "step": 162 }, { "epoch": 0.07256538675570395, "grad_norm": 15.190692901611328, "learning_rate": 0.00018808081149230036, "loss": 1.7636, "step": 163 }, { "epoch": 0.07301057317751808, "grad_norm": 2.8087406158447266, "learning_rate": 0.00018793163101905563, "loss": 2.0533, "step": 164 }, { "epoch": 0.07345575959933222, "grad_norm": 2.135378122329712, "learning_rate": 0.00018778158269611218, "loss": 2.2528, "step": 165 }, { "epoch": 0.07390094602114636, "grad_norm": 3.5980896949768066, "learning_rate": 0.00018763066800438636, "loss": 2.5453, "step": 166 }, { "epoch": 0.07434613244296048, "grad_norm": 2.6481971740722656, "learning_rate": 0.0001874788884333453, "loss": 1.8122, "step": 167 }, { "epoch": 0.07479131886477462, "grad_norm": 2.283841371536255, "learning_rate": 0.00018732624548099204, "loss": 2.0774, "step": 168 }, { "epoch": 0.07523650528658876, "grad_norm": 2.59877610206604, "learning_rate": 0.0001871727406538509, "loss": 2.0153, "step": 169 }, { "epoch": 0.07568169170840289, "grad_norm": 4.261568546295166, "learning_rate": 0.0001870183754669526, "loss": 1.56, "step": 170 }, { "epoch": 0.07612687813021703, "grad_norm": 2.588036060333252, "learning_rate": 0.00018686315144381913, "loss": 2.4187, "step": 171 }, { "epoch": 0.07657206455203117, "grad_norm": 3.837876796722412, "learning_rate": 0.000186707070116449, "loss": 2.1299, "step": 172 }, { "epoch": 0.07701725097384529, "grad_norm": 2.0875399112701416, "learning_rate": 0.0001865501330253019, "loss": 2.1619, "step": 173 }, { "epoch": 0.07746243739565943, "grad_norm": 2.641989231109619, "learning_rate": 0.00018639234171928353, "loss": 1.7574, "step": 174 }, { "epoch": 0.07790762381747357, "grad_norm": 3.0749399662017822, "learning_rate": 0.0001862336977557304, "loss": 1.6736, "step": 175 }, { "epoch": 0.0783528102392877, "grad_norm": 2.718775749206543, "learning_rate": 0.0001860742027003944, "loss": 2.1462, "step": 176 }, { "epoch": 0.07879799666110184, "grad_norm": 2.1774768829345703, "learning_rate": 0.00018591385812742725, "loss": 1.4246, "step": 177 }, { "epoch": 0.07924318308291597, "grad_norm": 3.1828486919403076, "learning_rate": 0.00018575266561936523, "loss": 2.3951, "step": 178 }, { "epoch": 0.0796883695047301, "grad_norm": 2.520548105239868, "learning_rate": 0.00018559062676711332, "loss": 1.6368, "step": 179 }, { "epoch": 0.08013355592654424, "grad_norm": 1.9750266075134277, "learning_rate": 0.0001854277431699295, "loss": 1.4846, "step": 180 }, { "epoch": 0.08057874234835838, "grad_norm": 5.070473670959473, "learning_rate": 0.00018526401643540922, "loss": 1.8334, "step": 181 }, { "epoch": 0.0810239287701725, "grad_norm": 2.920872688293457, "learning_rate": 0.00018509944817946922, "loss": 1.6212, "step": 182 }, { "epoch": 0.08146911519198664, "grad_norm": 2.4279935359954834, "learning_rate": 0.00018493404002633166, "loss": 1.7948, "step": 183 }, { "epoch": 0.08191430161380078, "grad_norm": 3.453925609588623, "learning_rate": 0.00018476779360850832, "loss": 2.1735, "step": 184 }, { "epoch": 0.08235948803561491, "grad_norm": 1.8669699430465698, "learning_rate": 0.00018460071056678422, "loss": 1.2327, "step": 185 }, { "epoch": 0.08280467445742905, "grad_norm": 2.559237003326416, "learning_rate": 0.00018443279255020152, "loss": 1.6827, "step": 186 }, { "epoch": 0.08324986087924319, "grad_norm": 3.643465518951416, "learning_rate": 0.00018426404121604323, "loss": 2.3146, "step": 187 }, { "epoch": 0.08369504730105731, "grad_norm": 3.2583975791931152, "learning_rate": 0.00018409445822981693, "loss": 1.9232, "step": 188 }, { "epoch": 0.08414023372287145, "grad_norm": 3.837696075439453, "learning_rate": 0.00018392404526523817, "loss": 2.215, "step": 189 }, { "epoch": 0.08458542014468559, "grad_norm": 2.9309654235839844, "learning_rate": 0.0001837528040042142, "loss": 1.9777, "step": 190 }, { "epoch": 0.08503060656649972, "grad_norm": 3.0458357334136963, "learning_rate": 0.00018358073613682706, "loss": 2.0955, "step": 191 }, { "epoch": 0.08547579298831386, "grad_norm": 3.166865825653076, "learning_rate": 0.00018340784336131713, "loss": 1.3475, "step": 192 }, { "epoch": 0.085920979410128, "grad_norm": 3.3954527378082275, "learning_rate": 0.00018323412738406635, "loss": 2.514, "step": 193 }, { "epoch": 0.08636616583194212, "grad_norm": 3.6221113204956055, "learning_rate": 0.00018305958991958127, "loss": 2.1971, "step": 194 }, { "epoch": 0.08681135225375626, "grad_norm": 2.5096681118011475, "learning_rate": 0.0001828842326904762, "loss": 1.988, "step": 195 }, { "epoch": 0.0872565386755704, "grad_norm": 2.7260031700134277, "learning_rate": 0.00018270805742745617, "loss": 1.7442, "step": 196 }, { "epoch": 0.08770172509738453, "grad_norm": 3.155184507369995, "learning_rate": 0.00018253106586929997, "loss": 2.5275, "step": 197 }, { "epoch": 0.08814691151919866, "grad_norm": 2.810918092727661, "learning_rate": 0.00018235325976284275, "loss": 2.2599, "step": 198 }, { "epoch": 0.0885920979410128, "grad_norm": 3.6741535663604736, "learning_rate": 0.00018217464086295904, "loss": 2.4509, "step": 199 }, { "epoch": 0.08903728436282693, "grad_norm": 3.171786069869995, "learning_rate": 0.00018199521093254523, "loss": 1.5725, "step": 200 }, { "epoch": 0.08948247078464107, "grad_norm": 5.2465500831604, "learning_rate": 0.00018181497174250236, "loss": 2.1309, "step": 201 }, { "epoch": 0.08992765720645521, "grad_norm": 3.3447489738464355, "learning_rate": 0.00018163392507171842, "loss": 2.7054, "step": 202 }, { "epoch": 0.09037284362826933, "grad_norm": 2.2638697624206543, "learning_rate": 0.00018145207270705096, "loss": 1.6347, "step": 203 }, { "epoch": 0.09081803005008347, "grad_norm": 2.3262786865234375, "learning_rate": 0.0001812694164433094, "loss": 1.9896, "step": 204 }, { "epoch": 0.09126321647189761, "grad_norm": 2.7014877796173096, "learning_rate": 0.00018108595808323736, "loss": 2.3665, "step": 205 }, { "epoch": 0.09170840289371174, "grad_norm": 2.7195963859558105, "learning_rate": 0.00018090169943749476, "loss": 2.0004, "step": 206 }, { "epoch": 0.09215358931552588, "grad_norm": 3.5895092487335205, "learning_rate": 0.00018071664232464002, "loss": 2.7216, "step": 207 }, { "epoch": 0.09259877573734002, "grad_norm": 6.0607008934021, "learning_rate": 0.0001805307885711122, "loss": 1.8817, "step": 208 }, { "epoch": 0.09304396215915414, "grad_norm": 3.0178444385528564, "learning_rate": 0.00018034414001121278, "loss": 1.9194, "step": 209 }, { "epoch": 0.09348914858096828, "grad_norm": 3.298114776611328, "learning_rate": 0.00018015669848708767, "loss": 3.2093, "step": 210 }, { "epoch": 0.09393433500278242, "grad_norm": 2.1203761100769043, "learning_rate": 0.00017996846584870908, "loss": 1.5833, "step": 211 }, { "epoch": 0.09437952142459655, "grad_norm": 2.907470464706421, "learning_rate": 0.0001797794439538571, "loss": 1.7891, "step": 212 }, { "epoch": 0.09482470784641069, "grad_norm": 3.151555061340332, "learning_rate": 0.0001795896346681016, "loss": 2.0487, "step": 213 }, { "epoch": 0.09526989426822483, "grad_norm": 3.080402374267578, "learning_rate": 0.00017939903986478355, "loss": 1.7114, "step": 214 }, { "epoch": 0.09571508069003895, "grad_norm": 4.0168561935424805, "learning_rate": 0.00017920766142499672, "loss": 2.5786, "step": 215 }, { "epoch": 0.09616026711185309, "grad_norm": 1.990530252456665, "learning_rate": 0.00017901550123756906, "loss": 2.1145, "step": 216 }, { "epoch": 0.09660545353366723, "grad_norm": 2.382887840270996, "learning_rate": 0.00017882256119904403, "loss": 1.8551, "step": 217 }, { "epoch": 0.09705063995548135, "grad_norm": 4.134660243988037, "learning_rate": 0.00017862884321366188, "loss": 2.4086, "step": 218 }, { "epoch": 0.0974958263772955, "grad_norm": 2.927734375, "learning_rate": 0.000178434349193341, "loss": 1.9386, "step": 219 }, { "epoch": 0.09794101279910963, "grad_norm": 2.8078081607818604, "learning_rate": 0.0001782390810576588, "loss": 2.0586, "step": 220 }, { "epoch": 0.09838619922092376, "grad_norm": 4.8399248123168945, "learning_rate": 0.000178043040733833, "loss": 1.3215, "step": 221 }, { "epoch": 0.0988313856427379, "grad_norm": 2.1833369731903076, "learning_rate": 0.00017784623015670238, "loss": 1.8898, "step": 222 }, { "epoch": 0.09927657206455204, "grad_norm": 3.099783420562744, "learning_rate": 0.00017764865126870786, "loss": 2.9137, "step": 223 }, { "epoch": 0.09972175848636616, "grad_norm": 2.1298272609710693, "learning_rate": 0.00017745030601987337, "loss": 1.4856, "step": 224 }, { "epoch": 0.1001669449081803, "grad_norm": 2.0361313819885254, "learning_rate": 0.00017725119636778644, "loss": 1.8, "step": 225 }, { "epoch": 0.10061213132999444, "grad_norm": 2.4032180309295654, "learning_rate": 0.00017705132427757895, "loss": 1.8565, "step": 226 }, { "epoch": 0.10105731775180857, "grad_norm": 3.0993106365203857, "learning_rate": 0.00017685069172190766, "loss": 2.5554, "step": 227 }, { "epoch": 0.1015025041736227, "grad_norm": 4.992886066436768, "learning_rate": 0.00017664930068093498, "loss": 3.3477, "step": 228 }, { "epoch": 0.10194769059543683, "grad_norm": 3.065136194229126, "learning_rate": 0.00017644715314230918, "loss": 1.785, "step": 229 }, { "epoch": 0.10239287701725097, "grad_norm": 2.915815830230713, "learning_rate": 0.0001762442511011448, "loss": 1.7885, "step": 230 }, { "epoch": 0.10283806343906511, "grad_norm": 1.5576674938201904, "learning_rate": 0.0001760405965600031, "loss": 1.2228, "step": 231 }, { "epoch": 0.10328324986087924, "grad_norm": 4.22326135635376, "learning_rate": 0.0001758361915288722, "loss": 1.8784, "step": 232 }, { "epoch": 0.10372843628269338, "grad_norm": 2.074902296066284, "learning_rate": 0.0001756310380251472, "loss": 2.1881, "step": 233 }, { "epoch": 0.10417362270450752, "grad_norm": 3.5856900215148926, "learning_rate": 0.00017542513807361037, "loss": 2.452, "step": 234 }, { "epoch": 0.10461880912632164, "grad_norm": 3.115443229675293, "learning_rate": 0.00017521849370641114, "loss": 1.9735, "step": 235 }, { "epoch": 0.10506399554813578, "grad_norm": 4.1047868728637695, "learning_rate": 0.00017501110696304596, "loss": 4.1891, "step": 236 }, { "epoch": 0.10550918196994992, "grad_norm": 2.4047348499298096, "learning_rate": 0.00017480297989033825, "loss": 1.5507, "step": 237 }, { "epoch": 0.10595436839176404, "grad_norm": 2.6233081817626953, "learning_rate": 0.00017459411454241822, "loss": 1.6275, "step": 238 }, { "epoch": 0.10639955481357818, "grad_norm": 2.559443235397339, "learning_rate": 0.00017438451298070252, "loss": 2.1019, "step": 239 }, { "epoch": 0.10684474123539232, "grad_norm": 2.711853265762329, "learning_rate": 0.00017417417727387394, "loss": 1.8006, "step": 240 }, { "epoch": 0.10728992765720645, "grad_norm": 2.1308255195617676, "learning_rate": 0.000173963109497861, "loss": 1.0008, "step": 241 }, { "epoch": 0.10773511407902059, "grad_norm": 2.0422074794769287, "learning_rate": 0.0001737513117358174, "loss": 2.1842, "step": 242 }, { "epoch": 0.10818030050083473, "grad_norm": 1.971974492073059, "learning_rate": 0.0001735387860781016, "loss": 1.1267, "step": 243 }, { "epoch": 0.10862548692264885, "grad_norm": 1.90377676486969, "learning_rate": 0.00017332553462225602, "loss": 1.4106, "step": 244 }, { "epoch": 0.10907067334446299, "grad_norm": 6.567409992218018, "learning_rate": 0.00017311155947298643, "loss": 3.1737, "step": 245 }, { "epoch": 0.10951585976627713, "grad_norm": 2.6171557903289795, "learning_rate": 0.00017289686274214118, "loss": 2.0543, "step": 246 }, { "epoch": 0.10996104618809126, "grad_norm": 3.360865354537964, "learning_rate": 0.0001726814465486903, "loss": 3.0316, "step": 247 }, { "epoch": 0.1104062326099054, "grad_norm": 5.862345218658447, "learning_rate": 0.0001724653130187047, "loss": 2.1349, "step": 248 }, { "epoch": 0.11085141903171954, "grad_norm": 5.230323314666748, "learning_rate": 0.00017224846428533499, "loss": 1.959, "step": 249 }, { "epoch": 0.11129660545353366, "grad_norm": 1.8790351152420044, "learning_rate": 0.0001720309024887907, "loss": 1.559, "step": 250 }, { "epoch": 0.1117417918753478, "grad_norm": 2.6701748371124268, "learning_rate": 0.00017181262977631888, "loss": 2.3727, "step": 251 }, { "epoch": 0.11218697829716194, "grad_norm": 2.2404518127441406, "learning_rate": 0.00017159364830218312, "loss": 1.933, "step": 252 }, { "epoch": 0.11263216471897607, "grad_norm": 3.8700356483459473, "learning_rate": 0.00017137396022764214, "loss": 3.0309, "step": 253 }, { "epoch": 0.1130773511407902, "grad_norm": 2.1600594520568848, "learning_rate": 0.00017115356772092857, "loss": 1.9069, "step": 254 }, { "epoch": 0.11352253756260434, "grad_norm": 3.375443935394287, "learning_rate": 0.0001709324729572274, "loss": 2.4507, "step": 255 }, { "epoch": 0.11396772398441847, "grad_norm": 2.818181037902832, "learning_rate": 0.00017071067811865476, "loss": 1.9793, "step": 256 }, { "epoch": 0.11441291040623261, "grad_norm": 3.2607429027557373, "learning_rate": 0.00017048818539423615, "loss": 1.9051, "step": 257 }, { "epoch": 0.11485809682804675, "grad_norm": 2.977018117904663, "learning_rate": 0.00017026499697988493, "loss": 1.0261, "step": 258 }, { "epoch": 0.11530328324986087, "grad_norm": 2.8753457069396973, "learning_rate": 0.00017004111507838064, "loss": 1.4664, "step": 259 }, { "epoch": 0.11574846967167501, "grad_norm": 3.7540669441223145, "learning_rate": 0.00016981654189934727, "loss": 2.4403, "step": 260 }, { "epoch": 0.11619365609348915, "grad_norm": 2.8768951892852783, "learning_rate": 0.00016959127965923142, "loss": 1.925, "step": 261 }, { "epoch": 0.11663884251530328, "grad_norm": 3.1603147983551025, "learning_rate": 0.0001693653305812805, "loss": 2.3041, "step": 262 }, { "epoch": 0.11708402893711742, "grad_norm": 3.66261625289917, "learning_rate": 0.00016913869689552064, "loss": 2.9761, "step": 263 }, { "epoch": 0.11752921535893156, "grad_norm": 5.0103020668029785, "learning_rate": 0.00016891138083873487, "loss": 2.4806, "step": 264 }, { "epoch": 0.11797440178074568, "grad_norm": 3.2384135723114014, "learning_rate": 0.00016868338465444085, "loss": 2.8883, "step": 265 }, { "epoch": 0.11841958820255982, "grad_norm": 3.064087390899658, "learning_rate": 0.00016845471059286887, "loss": 2.3382, "step": 266 }, { "epoch": 0.11886477462437396, "grad_norm": 3.015291452407837, "learning_rate": 0.00016822536091093965, "loss": 2.1407, "step": 267 }, { "epoch": 0.11930996104618809, "grad_norm": 3.0050747394561768, "learning_rate": 0.00016799533787224192, "loss": 3.0019, "step": 268 }, { "epoch": 0.11975514746800223, "grad_norm": 3.889848470687866, "learning_rate": 0.00016776464374701025, "loss": 2.9921, "step": 269 }, { "epoch": 0.12020033388981637, "grad_norm": 3.5669052600860596, "learning_rate": 0.00016753328081210245, "loss": 1.9408, "step": 270 }, { "epoch": 0.12064552031163049, "grad_norm": 2.8057496547698975, "learning_rate": 0.00016730125135097735, "loss": 2.0, "step": 271 }, { "epoch": 0.12109070673344463, "grad_norm": 3.1653785705566406, "learning_rate": 0.000167068557653672, "loss": 2.188, "step": 272 }, { "epoch": 0.12153589315525877, "grad_norm": 2.8963727951049805, "learning_rate": 0.0001668352020167793, "loss": 1.8975, "step": 273 }, { "epoch": 0.1219810795770729, "grad_norm": 2.387599468231201, "learning_rate": 0.00016660118674342517, "loss": 1.5727, "step": 274 }, { "epoch": 0.12242626599888703, "grad_norm": 2.26202654838562, "learning_rate": 0.00016636651414324587, "loss": 1.0566, "step": 275 }, { "epoch": 0.12287145242070117, "grad_norm": 2.7368710041046143, "learning_rate": 0.00016613118653236518, "loss": 2.203, "step": 276 }, { "epoch": 0.1233166388425153, "grad_norm": 3.0989296436309814, "learning_rate": 0.0001658952062333717, "loss": 2.1975, "step": 277 }, { "epoch": 0.12376182526432944, "grad_norm": 2.62036395072937, "learning_rate": 0.00016565857557529566, "loss": 1.5915, "step": 278 }, { "epoch": 0.12420701168614358, "grad_norm": 2.7251317501068115, "learning_rate": 0.00016542129689358612, "loss": 1.9822, "step": 279 }, { "epoch": 0.1246521981079577, "grad_norm": 2.146713972091675, "learning_rate": 0.0001651833725300879, "loss": 1.5296, "step": 280 }, { "epoch": 0.12509738452977184, "grad_norm": 2.940171718597412, "learning_rate": 0.00016494480483301836, "loss": 1.4362, "step": 281 }, { "epoch": 0.12554257095158597, "grad_norm": 5.681768417358398, "learning_rate": 0.00016470559615694446, "loss": 2.3687, "step": 282 }, { "epoch": 0.12598775737340012, "grad_norm": 3.23738169670105, "learning_rate": 0.00016446574886275913, "loss": 2.8589, "step": 283 }, { "epoch": 0.12643294379521425, "grad_norm": 4.700006008148193, "learning_rate": 0.00016422526531765846, "loss": 2.2194, "step": 284 }, { "epoch": 0.12687813021702837, "grad_norm": 4.28733491897583, "learning_rate": 0.00016398414789511786, "loss": 3.1353, "step": 285 }, { "epoch": 0.12732331663884253, "grad_norm": 2.7851359844207764, "learning_rate": 0.000163742398974869, "loss": 1.5339, "step": 286 }, { "epoch": 0.12776850306065665, "grad_norm": 4.9445624351501465, "learning_rate": 0.00016350002094287609, "loss": 1.8456, "step": 287 }, { "epoch": 0.12821368948247078, "grad_norm": 3.781156539916992, "learning_rate": 0.00016325701619131246, "loss": 1.9967, "step": 288 }, { "epoch": 0.12865887590428493, "grad_norm": 4.377411842346191, "learning_rate": 0.00016301338711853693, "loss": 3.0335, "step": 289 }, { "epoch": 0.12910406232609906, "grad_norm": 3.9102425575256348, "learning_rate": 0.00016276913612907007, "loss": 2.9909, "step": 290 }, { "epoch": 0.12954924874791318, "grad_norm": 4.036067485809326, "learning_rate": 0.00016252426563357055, "loss": 2.1329, "step": 291 }, { "epoch": 0.12999443516972733, "grad_norm": 2.960005283355713, "learning_rate": 0.00016227877804881127, "loss": 1.7441, "step": 292 }, { "epoch": 0.13043962159154146, "grad_norm": 1.907597303390503, "learning_rate": 0.00016203267579765563, "loss": 1.3116, "step": 293 }, { "epoch": 0.13088480801335559, "grad_norm": 3.022691249847412, "learning_rate": 0.00016178596130903344, "loss": 2.5592, "step": 294 }, { "epoch": 0.13132999443516974, "grad_norm": 4.502274990081787, "learning_rate": 0.00016153863701791717, "loss": 2.3546, "step": 295 }, { "epoch": 0.13177518085698386, "grad_norm": 3.090162754058838, "learning_rate": 0.00016129070536529766, "loss": 2.0902, "step": 296 }, { "epoch": 0.132220367278798, "grad_norm": 2.6545863151550293, "learning_rate": 0.00016104216879816026, "loss": 1.932, "step": 297 }, { "epoch": 0.13266555370061214, "grad_norm": 2.626149892807007, "learning_rate": 0.00016079302976946055, "loss": 1.9929, "step": 298 }, { "epoch": 0.13311074012242627, "grad_norm": 2.5994675159454346, "learning_rate": 0.00016054329073810015, "loss": 2.0321, "step": 299 }, { "epoch": 0.1335559265442404, "grad_norm": 2.5016086101531982, "learning_rate": 0.00016029295416890248, "loss": 1.6849, "step": 300 }, { "epoch": 0.13400111296605455, "grad_norm": 2.2394087314605713, "learning_rate": 0.00016004202253258842, "loss": 1.6157, "step": 301 }, { "epoch": 0.13444629938786867, "grad_norm": 2.9624788761138916, "learning_rate": 0.0001597904983057519, "loss": 1.0671, "step": 302 }, { "epoch": 0.1348914858096828, "grad_norm": 3.160810708999634, "learning_rate": 0.00015953838397083552, "loss": 2.1351, "step": 303 }, { "epoch": 0.13533667223149695, "grad_norm": 2.8223936557769775, "learning_rate": 0.00015928568201610595, "loss": 1.9108, "step": 304 }, { "epoch": 0.13578185865331108, "grad_norm": 2.559141159057617, "learning_rate": 0.00015903239493562948, "loss": 2.1271, "step": 305 }, { "epoch": 0.1362270450751252, "grad_norm": 3.396461009979248, "learning_rate": 0.00015877852522924732, "loss": 1.8675, "step": 306 }, { "epoch": 0.13667223149693936, "grad_norm": 3.4334423542022705, "learning_rate": 0.00015852407540255104, "loss": 1.7112, "step": 307 }, { "epoch": 0.13711741791875348, "grad_norm": 5.243045330047607, "learning_rate": 0.00015826904796685762, "loss": 1.5088, "step": 308 }, { "epoch": 0.1375626043405676, "grad_norm": 2.0283901691436768, "learning_rate": 0.00015801344543918495, "loss": 1.5021, "step": 309 }, { "epoch": 0.13800779076238176, "grad_norm": 2.131378173828125, "learning_rate": 0.00015775727034222675, "loss": 2.0285, "step": 310 }, { "epoch": 0.13845297718419589, "grad_norm": 2.8632185459136963, "learning_rate": 0.00015750052520432787, "loss": 2.1884, "step": 311 }, { "epoch": 0.13889816360601, "grad_norm": 3.2297849655151367, "learning_rate": 0.0001572432125594591, "loss": 2.0599, "step": 312 }, { "epoch": 0.13934335002782416, "grad_norm": 4.282790660858154, "learning_rate": 0.00015698533494719238, "loss": 2.6433, "step": 313 }, { "epoch": 0.1397885364496383, "grad_norm": 3.296445369720459, "learning_rate": 0.00015672689491267567, "loss": 2.5076, "step": 314 }, { "epoch": 0.14023372287145242, "grad_norm": 2.099283456802368, "learning_rate": 0.00015646789500660773, "loss": 1.3551, "step": 315 }, { "epoch": 0.14067890929326657, "grad_norm": 2.6336355209350586, "learning_rate": 0.00015620833778521307, "loss": 1.5616, "step": 316 }, { "epoch": 0.1411240957150807, "grad_norm": 1.6293888092041016, "learning_rate": 0.0001559482258102167, "loss": 0.9969, "step": 317 }, { "epoch": 0.14156928213689482, "grad_norm": 2.045325517654419, "learning_rate": 0.00015568756164881882, "loss": 1.6796, "step": 318 }, { "epoch": 0.14201446855870897, "grad_norm": 3.8815982341766357, "learning_rate": 0.00015542634787366942, "loss": 1.6373, "step": 319 }, { "epoch": 0.1424596549805231, "grad_norm": 2.5085458755493164, "learning_rate": 0.00015516458706284303, "loss": 2.3436, "step": 320 }, { "epoch": 0.14290484140233722, "grad_norm": 2.1215457916259766, "learning_rate": 0.0001549022817998132, "loss": 1.2887, "step": 321 }, { "epoch": 0.14335002782415138, "grad_norm": 1.9667388200759888, "learning_rate": 0.00015463943467342693, "loss": 1.7435, "step": 322 }, { "epoch": 0.1437952142459655, "grad_norm": 4.962423324584961, "learning_rate": 0.00015437604827787927, "loss": 3.1438, "step": 323 }, { "epoch": 0.14424040066777963, "grad_norm": 3.1294186115264893, "learning_rate": 0.00015411212521268758, "loss": 1.922, "step": 324 }, { "epoch": 0.14468558708959378, "grad_norm": 2.8280906677246094, "learning_rate": 0.00015384766808266602, "loss": 1.8233, "step": 325 }, { "epoch": 0.1451307735114079, "grad_norm": 3.0242607593536377, "learning_rate": 0.00015358267949789966, "loss": 2.0155, "step": 326 }, { "epoch": 0.14557595993322203, "grad_norm": 1.9289848804473877, "learning_rate": 0.00015331716207371888, "loss": 1.6602, "step": 327 }, { "epoch": 0.14602114635503616, "grad_norm": 3.2559800148010254, "learning_rate": 0.0001530511184306734, "loss": 2.0712, "step": 328 }, { "epoch": 0.1464663327768503, "grad_norm": 3.1982977390289307, "learning_rate": 0.00015278455119450664, "loss": 1.9866, "step": 329 }, { "epoch": 0.14691151919866444, "grad_norm": 4.816375255584717, "learning_rate": 0.0001525174629961296, "loss": 3.3914, "step": 330 }, { "epoch": 0.14735670562047856, "grad_norm": 2.63909912109375, "learning_rate": 0.0001522498564715949, "loss": 2.4218, "step": 331 }, { "epoch": 0.14780189204229272, "grad_norm": 2.412900686264038, "learning_rate": 0.00015198173426207094, "loss": 1.9517, "step": 332 }, { "epoch": 0.14824707846410684, "grad_norm": 2.017374038696289, "learning_rate": 0.00015171309901381572, "loss": 1.4106, "step": 333 }, { "epoch": 0.14869226488592097, "grad_norm": 3.504751205444336, "learning_rate": 0.00015144395337815064, "loss": 2.372, "step": 334 }, { "epoch": 0.14913745130773512, "grad_norm": 3.916456460952759, "learning_rate": 0.00015117430001143452, "loss": 1.9819, "step": 335 }, { "epoch": 0.14958263772954924, "grad_norm": 1.9360079765319824, "learning_rate": 0.00015090414157503714, "loss": 1.5803, "step": 336 }, { "epoch": 0.15002782415136337, "grad_norm": 4.142049312591553, "learning_rate": 0.00015063348073531324, "loss": 3.1576, "step": 337 }, { "epoch": 0.15047301057317752, "grad_norm": 5.079156875610352, "learning_rate": 0.0001503623201635761, "loss": 2.724, "step": 338 }, { "epoch": 0.15091819699499165, "grad_norm": 2.2369110584259033, "learning_rate": 0.000150090662536071, "loss": 1.2975, "step": 339 }, { "epoch": 0.15136338341680577, "grad_norm": 3.156975746154785, "learning_rate": 0.0001498185105339491, "loss": 1.9116, "step": 340 }, { "epoch": 0.15180856983861993, "grad_norm": 2.597970724105835, "learning_rate": 0.00014954586684324078, "loss": 1.4781, "step": 341 }, { "epoch": 0.15225375626043405, "grad_norm": 2.6372573375701904, "learning_rate": 0.00014927273415482915, "loss": 2.4297, "step": 342 }, { "epoch": 0.15269894268224818, "grad_norm": 5.823768615722656, "learning_rate": 0.00014899911516442365, "loss": 3.3218, "step": 343 }, { "epoch": 0.15314412910406233, "grad_norm": 1.7671197652816772, "learning_rate": 0.00014872501257253323, "loss": 1.5805, "step": 344 }, { "epoch": 0.15358931552587646, "grad_norm": 4.895401954650879, "learning_rate": 0.0001484504290844398, "loss": 2.6022, "step": 345 }, { "epoch": 0.15403450194769058, "grad_norm": 2.013131618499756, "learning_rate": 0.00014817536741017152, "loss": 1.9509, "step": 346 }, { "epoch": 0.15447968836950474, "grad_norm": 4.518467426300049, "learning_rate": 0.00014789983026447612, "loss": 3.4165, "step": 347 }, { "epoch": 0.15492487479131886, "grad_norm": 3.0389232635498047, "learning_rate": 0.0001476238203667939, "loss": 2.2441, "step": 348 }, { "epoch": 0.155370061213133, "grad_norm": 2.6705610752105713, "learning_rate": 0.0001473473404412312, "loss": 1.6084, "step": 349 }, { "epoch": 0.15581524763494714, "grad_norm": 2.1380245685577393, "learning_rate": 0.0001470703932165333, "loss": 2.0694, "step": 350 }, { "epoch": 0.15626043405676127, "grad_norm": 2.587960720062256, "learning_rate": 0.00014679298142605734, "loss": 1.7345, "step": 351 }, { "epoch": 0.1567056204785754, "grad_norm": 2.9578754901885986, "learning_rate": 0.00014651510780774583, "loss": 2.6031, "step": 352 }, { "epoch": 0.15715080690038954, "grad_norm": 2.348881483078003, "learning_rate": 0.00014623677510409918, "loss": 1.448, "step": 353 }, { "epoch": 0.15759599332220367, "grad_norm": 1.7157777547836304, "learning_rate": 0.00014595798606214882, "loss": 1.8623, "step": 354 }, { "epoch": 0.1580411797440178, "grad_norm": 2.54980206489563, "learning_rate": 0.00014567874343342997, "loss": 2.263, "step": 355 }, { "epoch": 0.15848636616583195, "grad_norm": 3.329082727432251, "learning_rate": 0.00014539904997395468, "loss": 2.8837, "step": 356 }, { "epoch": 0.15893155258764607, "grad_norm": 2.646977186203003, "learning_rate": 0.00014511890844418453, "loss": 2.1387, "step": 357 }, { "epoch": 0.1593767390094602, "grad_norm": 2.9344465732574463, "learning_rate": 0.00014483832160900326, "loss": 1.9743, "step": 358 }, { "epoch": 0.15982192543127435, "grad_norm": 2.056246757507324, "learning_rate": 0.00014455729223768966, "loss": 1.8003, "step": 359 }, { "epoch": 0.16026711185308848, "grad_norm": 2.6241796016693115, "learning_rate": 0.0001442758231038902, "loss": 1.888, "step": 360 }, { "epoch": 0.1607122982749026, "grad_norm": 2.479048490524292, "learning_rate": 0.00014399391698559152, "loss": 2.3365, "step": 361 }, { "epoch": 0.16115748469671676, "grad_norm": 3.3290910720825195, "learning_rate": 0.0001437115766650933, "loss": 2.5672, "step": 362 }, { "epoch": 0.16160267111853088, "grad_norm": 2.197995901107788, "learning_rate": 0.00014342880492898048, "loss": 1.4891, "step": 363 }, { "epoch": 0.162047857540345, "grad_norm": 2.3591930866241455, "learning_rate": 0.0001431456045680959, "loss": 1.7955, "step": 364 }, { "epoch": 0.16249304396215916, "grad_norm": 1.573541283607483, "learning_rate": 0.00014286197837751286, "loss": 1.1049, "step": 365 }, { "epoch": 0.1629382303839733, "grad_norm": 2.382366895675659, "learning_rate": 0.00014257792915650728, "loss": 1.717, "step": 366 }, { "epoch": 0.1633834168057874, "grad_norm": 3.2191505432128906, "learning_rate": 0.00014229345970853032, "loss": 1.6779, "step": 367 }, { "epoch": 0.16382860322760157, "grad_norm": 2.418705940246582, "learning_rate": 0.00014200857284118066, "loss": 2.6337, "step": 368 }, { "epoch": 0.1642737896494157, "grad_norm": 5.708262920379639, "learning_rate": 0.00014172327136617656, "loss": 1.7388, "step": 369 }, { "epoch": 0.16471897607122982, "grad_norm": 2.3304173946380615, "learning_rate": 0.00014143755809932845, "loss": 1.4906, "step": 370 }, { "epoch": 0.16516416249304397, "grad_norm": 2.6991124153137207, "learning_rate": 0.00014115143586051088, "loss": 2.3806, "step": 371 }, { "epoch": 0.1656093489148581, "grad_norm": 2.831113815307617, "learning_rate": 0.00014086490747363493, "loss": 2.0115, "step": 372 }, { "epoch": 0.16605453533667222, "grad_norm": 1.9368008375167847, "learning_rate": 0.00014057797576662, "loss": 1.6859, "step": 373 }, { "epoch": 0.16649972175848637, "grad_norm": 2.908867120742798, "learning_rate": 0.00014029064357136628, "loss": 2.2395, "step": 374 }, { "epoch": 0.1669449081803005, "grad_norm": 2.90267014503479, "learning_rate": 0.00014000291372372647, "loss": 1.6788, "step": 375 }, { "epoch": 0.16739009460211463, "grad_norm": 4.940428256988525, "learning_rate": 0.00013971478906347806, "loss": 3.0354, "step": 376 }, { "epoch": 0.16783528102392878, "grad_norm": 3.7296106815338135, "learning_rate": 0.00013942627243429512, "loss": 3.1005, "step": 377 }, { "epoch": 0.1682804674457429, "grad_norm": 1.515480399131775, "learning_rate": 0.00013913736668372026, "loss": 0.9675, "step": 378 }, { "epoch": 0.16872565386755703, "grad_norm": 2.466693878173828, "learning_rate": 0.00013884807466313663, "loss": 2.4831, "step": 379 }, { "epoch": 0.16917084028937118, "grad_norm": 2.1269826889038086, "learning_rate": 0.00013855839922773968, "loss": 1.7787, "step": 380 }, { "epoch": 0.1696160267111853, "grad_norm": 2.2861855030059814, "learning_rate": 0.000138268343236509, "loss": 1.8964, "step": 381 }, { "epoch": 0.17006121313299943, "grad_norm": 7.59018611907959, "learning_rate": 0.00013797790955218014, "loss": 3.1163, "step": 382 }, { "epoch": 0.1705063995548136, "grad_norm": 3.9868404865264893, "learning_rate": 0.00013768710104121627, "loss": 2.4488, "step": 383 }, { "epoch": 0.1709515859766277, "grad_norm": 1.6617087125778198, "learning_rate": 0.00013739592057378003, "loss": 1.21, "step": 384 }, { "epoch": 0.17139677239844184, "grad_norm": 1.8703160285949707, "learning_rate": 0.0001371043710237051, "loss": 1.5203, "step": 385 }, { "epoch": 0.171841958820256, "grad_norm": 3.014777898788452, "learning_rate": 0.00013681245526846783, "loss": 2.0583, "step": 386 }, { "epoch": 0.17228714524207012, "grad_norm": 1.9843742847442627, "learning_rate": 0.0001365201761891588, "loss": 1.2145, "step": 387 }, { "epoch": 0.17273233166388424, "grad_norm": 2.274531364440918, "learning_rate": 0.00013622753667045457, "loss": 1.6243, "step": 388 }, { "epoch": 0.1731775180856984, "grad_norm": 2.8857228755950928, "learning_rate": 0.00013593453960058908, "loss": 2.1109, "step": 389 }, { "epoch": 0.17362270450751252, "grad_norm": 2.410280227661133, "learning_rate": 0.00013564118787132506, "loss": 1.6705, "step": 390 }, { "epoch": 0.17406789092932665, "grad_norm": 2.83205509185791, "learning_rate": 0.00013534748437792573, "loss": 1.6343, "step": 391 }, { "epoch": 0.1745130773511408, "grad_norm": 3.435878276824951, "learning_rate": 0.0001350534320191259, "loss": 2.7, "step": 392 }, { "epoch": 0.17495826377295493, "grad_norm": 2.2407634258270264, "learning_rate": 0.0001347590336971037, "loss": 1.5352, "step": 393 }, { "epoch": 0.17540345019476905, "grad_norm": 2.265737295150757, "learning_rate": 0.0001344642923174517, "loss": 1.8782, "step": 394 }, { "epoch": 0.1758486366165832, "grad_norm": 3.784402847290039, "learning_rate": 0.00013416921078914835, "loss": 2.0235, "step": 395 }, { "epoch": 0.17629382303839733, "grad_norm": 3.2451465129852295, "learning_rate": 0.00013387379202452917, "loss": 1.9769, "step": 396 }, { "epoch": 0.17673900946021145, "grad_norm": 2.4640157222747803, "learning_rate": 0.00013357803893925807, "loss": 1.5395, "step": 397 }, { "epoch": 0.1771841958820256, "grad_norm": 2.5422592163085938, "learning_rate": 0.00013328195445229868, "loss": 1.4376, "step": 398 }, { "epoch": 0.17762938230383973, "grad_norm": 2.2282865047454834, "learning_rate": 0.00013298554148588528, "loss": 1.3988, "step": 399 }, { "epoch": 0.17807456872565386, "grad_norm": 3.0729358196258545, "learning_rate": 0.00013268880296549425, "loss": 1.2906, "step": 400 }, { "epoch": 0.178519755147468, "grad_norm": 2.901768684387207, "learning_rate": 0.00013239174181981495, "loss": 1.7331, "step": 401 }, { "epoch": 0.17896494156928214, "grad_norm": 3.0331332683563232, "learning_rate": 0.00013209436098072095, "loss": 1.2559, "step": 402 }, { "epoch": 0.17941012799109626, "grad_norm": 2.4391226768493652, "learning_rate": 0.00013179666338324108, "loss": 2.0366, "step": 403 }, { "epoch": 0.17985531441291042, "grad_norm": 3.8267745971679688, "learning_rate": 0.0001314986519655305, "loss": 2.2878, "step": 404 }, { "epoch": 0.18030050083472454, "grad_norm": 2.9066944122314453, "learning_rate": 0.0001312003296688415, "loss": 2.3687, "step": 405 }, { "epoch": 0.18074568725653867, "grad_norm": 2.571004867553711, "learning_rate": 0.00013090169943749476, "loss": 1.4725, "step": 406 }, { "epoch": 0.18119087367835282, "grad_norm": 2.621811866760254, "learning_rate": 0.0001306027642188501, "loss": 1.4061, "step": 407 }, { "epoch": 0.18163606010016695, "grad_norm": 3.159045457839966, "learning_rate": 0.00013030352696327742, "loss": 2.3518, "step": 408 }, { "epoch": 0.18208124652198107, "grad_norm": 2.415534496307373, "learning_rate": 0.00013000399062412763, "loss": 1.9811, "step": 409 }, { "epoch": 0.18252643294379522, "grad_norm": 3.9818074703216553, "learning_rate": 0.0001297041581577035, "loss": 2.3213, "step": 410 }, { "epoch": 0.18297161936560935, "grad_norm": 2.050266981124878, "learning_rate": 0.0001294040325232304, "loss": 1.3752, "step": 411 }, { "epoch": 0.18341680578742348, "grad_norm": 2.8136508464813232, "learning_rate": 0.00012910361668282719, "loss": 1.8344, "step": 412 }, { "epoch": 0.18386199220923763, "grad_norm": 1.4741458892822266, "learning_rate": 0.00012880291360147693, "loss": 1.1769, "step": 413 }, { "epoch": 0.18430717863105175, "grad_norm": 2.022780418395996, "learning_rate": 0.0001285019262469976, "loss": 1.2992, "step": 414 }, { "epoch": 0.18475236505286588, "grad_norm": 3.656526565551758, "learning_rate": 0.00012820065759001293, "loss": 1.7559, "step": 415 }, { "epoch": 0.18519755147468003, "grad_norm": 1.9175677299499512, "learning_rate": 0.00012789911060392294, "loss": 1.2447, "step": 416 }, { "epoch": 0.18564273789649416, "grad_norm": 3.0030620098114014, "learning_rate": 0.0001275972882648746, "loss": 1.7294, "step": 417 }, { "epoch": 0.18608792431830828, "grad_norm": 2.6003453731536865, "learning_rate": 0.00012729519355173254, "loss": 1.9416, "step": 418 }, { "epoch": 0.18653311074012244, "grad_norm": 3.4832310676574707, "learning_rate": 0.00012699282944604967, "loss": 2.2738, "step": 419 }, { "epoch": 0.18697829716193656, "grad_norm": 3.8813576698303223, "learning_rate": 0.00012669019893203759, "loss": 2.3793, "step": 420 }, { "epoch": 0.1874234835837507, "grad_norm": 2.7621383666992188, "learning_rate": 0.0001263873049965373, "loss": 1.966, "step": 421 }, { "epoch": 0.18786867000556484, "grad_norm": 2.440943717956543, "learning_rate": 0.00012608415062898972, "loss": 1.8048, "step": 422 }, { "epoch": 0.18831385642737897, "grad_norm": 1.448773980140686, "learning_rate": 0.000125780738821406, "loss": 1.2906, "step": 423 }, { "epoch": 0.1887590428491931, "grad_norm": 3.421792507171631, "learning_rate": 0.00012547707256833823, "loss": 2.0552, "step": 424 }, { "epoch": 0.18920422927100725, "grad_norm": 3.0485334396362305, "learning_rate": 0.00012517315486684972, "loss": 1.9516, "step": 425 }, { "epoch": 0.18964941569282137, "grad_norm": 2.456167221069336, "learning_rate": 0.0001248689887164855, "loss": 1.821, "step": 426 }, { "epoch": 0.1900946021146355, "grad_norm": 2.6346614360809326, "learning_rate": 0.00012456457711924266, "loss": 2.2551, "step": 427 }, { "epoch": 0.19053978853644965, "grad_norm": 3.651096820831299, "learning_rate": 0.00012425992307954075, "loss": 2.8791, "step": 428 }, { "epoch": 0.19098497495826378, "grad_norm": 1.784805417060852, "learning_rate": 0.0001239550296041922, "loss": 1.3371, "step": 429 }, { "epoch": 0.1914301613800779, "grad_norm": 3.8563966751098633, "learning_rate": 0.00012364989970237248, "loss": 1.7833, "step": 430 }, { "epoch": 0.19187534780189205, "grad_norm": 4.481318950653076, "learning_rate": 0.00012334453638559057, "loss": 2.8327, "step": 431 }, { "epoch": 0.19232053422370618, "grad_norm": 2.5223546028137207, "learning_rate": 0.00012303894266765908, "loss": 2.3534, "step": 432 }, { "epoch": 0.1927657206455203, "grad_norm": 4.295266151428223, "learning_rate": 0.00012273312156466464, "loss": 2.6395, "step": 433 }, { "epoch": 0.19321090706733446, "grad_norm": 1.6645418405532837, "learning_rate": 0.00012242707609493814, "loss": 1.0704, "step": 434 }, { "epoch": 0.19365609348914858, "grad_norm": 3.143169641494751, "learning_rate": 0.00012212080927902474, "loss": 1.7698, "step": 435 }, { "epoch": 0.1941012799109627, "grad_norm": 2.1398355960845947, "learning_rate": 0.00012181432413965428, "loss": 1.534, "step": 436 }, { "epoch": 0.19454646633277686, "grad_norm": 3.0436127185821533, "learning_rate": 0.00012150762370171136, "loss": 2.5599, "step": 437 }, { "epoch": 0.194991652754591, "grad_norm": 2.8342740535736084, "learning_rate": 0.00012120071099220549, "loss": 2.1801, "step": 438 }, { "epoch": 0.1954368391764051, "grad_norm": 2.6189768314361572, "learning_rate": 0.00012089358904024117, "loss": 1.9147, "step": 439 }, { "epoch": 0.19588202559821927, "grad_norm": 3.389474868774414, "learning_rate": 0.00012058626087698814, "loss": 1.3261, "step": 440 }, { "epoch": 0.1963272120200334, "grad_norm": 3.9404706954956055, "learning_rate": 0.00012027872953565125, "loss": 2.1684, "step": 441 }, { "epoch": 0.19677239844184752, "grad_norm": 2.1697638034820557, "learning_rate": 0.00011997099805144069, "loss": 1.4573, "step": 442 }, { "epoch": 0.19721758486366167, "grad_norm": 3.369276762008667, "learning_rate": 0.000119663069461542, "loss": 2.4629, "step": 443 }, { "epoch": 0.1976627712854758, "grad_norm": 3.570361375808716, "learning_rate": 0.00011935494680508606, "loss": 2.0022, "step": 444 }, { "epoch": 0.19810795770728992, "grad_norm": 2.2363481521606445, "learning_rate": 0.00011904663312311901, "loss": 1.5469, "step": 445 }, { "epoch": 0.19855314412910408, "grad_norm": 3.8988430500030518, "learning_rate": 0.00011873813145857249, "loss": 1.5989, "step": 446 }, { "epoch": 0.1989983305509182, "grad_norm": 4.510793685913086, "learning_rate": 0.00011842944485623335, "loss": 1.8663, "step": 447 }, { "epoch": 0.19944351697273233, "grad_norm": 2.888258457183838, "learning_rate": 0.00011812057636271374, "loss": 1.8436, "step": 448 }, { "epoch": 0.19988870339454648, "grad_norm": 3.260255813598633, "learning_rate": 0.000117811529026421, "loss": 1.8474, "step": 449 }, { "epoch": 0.2003338898163606, "grad_norm": 3.7456436157226562, "learning_rate": 0.00011750230589752762, "loss": 2.7048, "step": 450 }, { "epoch": 0.20077907623817473, "grad_norm": 3.437774181365967, "learning_rate": 0.00011719291002794096, "loss": 1.8771, "step": 451 }, { "epoch": 0.20122426265998888, "grad_norm": 2.5170540809631348, "learning_rate": 0.00011688334447127338, "loss": 1.2413, "step": 452 }, { "epoch": 0.201669449081803, "grad_norm": 2.3565282821655273, "learning_rate": 0.00011657361228281199, "loss": 1.5339, "step": 453 }, { "epoch": 0.20211463550361713, "grad_norm": 1.9782150983810425, "learning_rate": 0.00011626371651948838, "loss": 1.6498, "step": 454 }, { "epoch": 0.2025598219254313, "grad_norm": 3.0511531829833984, "learning_rate": 0.00011595366023984864, "loss": 2.4361, "step": 455 }, { "epoch": 0.2030050083472454, "grad_norm": 2.8406717777252197, "learning_rate": 0.0001156434465040231, "loss": 2.3422, "step": 456 }, { "epoch": 0.20345019476905954, "grad_norm": 1.845874309539795, "learning_rate": 0.00011533307837369607, "loss": 1.1649, "step": 457 }, { "epoch": 0.20389538119087366, "grad_norm": 2.0563290119171143, "learning_rate": 0.00011502255891207572, "loss": 2.0534, "step": 458 }, { "epoch": 0.20434056761268782, "grad_norm": 2.988293170928955, "learning_rate": 0.00011471189118386375, "loss": 2.1835, "step": 459 }, { "epoch": 0.20478575403450194, "grad_norm": 2.55254864692688, "learning_rate": 0.00011440107825522521, "loss": 1.6571, "step": 460 }, { "epoch": 0.20523094045631607, "grad_norm": 3.426617383956909, "learning_rate": 0.00011409012319375827, "loss": 2.8862, "step": 461 }, { "epoch": 0.20567612687813022, "grad_norm": 2.7885515689849854, "learning_rate": 0.0001137790290684638, "loss": 1.8314, "step": 462 }, { "epoch": 0.20612131329994435, "grad_norm": 3.6172313690185547, "learning_rate": 0.00011346779894971527, "loss": 2.8842, "step": 463 }, { "epoch": 0.20656649972175847, "grad_norm": 2.387355089187622, "learning_rate": 0.00011315643590922827, "loss": 1.9039, "step": 464 }, { "epoch": 0.20701168614357263, "grad_norm": 3.7346434593200684, "learning_rate": 0.0001128449430200303, "loss": 2.7045, "step": 465 }, { "epoch": 0.20745687256538675, "grad_norm": 2.339855909347534, "learning_rate": 0.00011253332335643043, "loss": 2.2928, "step": 466 }, { "epoch": 0.20790205898720088, "grad_norm": 2.6608927249908447, "learning_rate": 0.00011222157999398895, "loss": 1.6835, "step": 467 }, { "epoch": 0.20834724540901503, "grad_norm": 1.9378608465194702, "learning_rate": 0.00011190971600948699, "loss": 1.2669, "step": 468 }, { "epoch": 0.20879243183082916, "grad_norm": 1.9632965326309204, "learning_rate": 0.00011159773448089614, "loss": 1.8431, "step": 469 }, { "epoch": 0.20923761825264328, "grad_norm": 2.015932083129883, "learning_rate": 0.00011128563848734816, "loss": 2.0745, "step": 470 }, { "epoch": 0.20968280467445743, "grad_norm": 3.485055446624756, "learning_rate": 0.00011097343110910452, "loss": 2.3325, "step": 471 }, { "epoch": 0.21012799109627156, "grad_norm": 1.4672657251358032, "learning_rate": 0.000110661115427526, "loss": 1.0659, "step": 472 }, { "epoch": 0.21057317751808569, "grad_norm": 1.980690360069275, "learning_rate": 0.00011034869452504226, "loss": 1.4239, "step": 473 }, { "epoch": 0.21101836393989984, "grad_norm": 3.352896213531494, "learning_rate": 0.00011003617148512149, "loss": 2.4245, "step": 474 }, { "epoch": 0.21146355036171396, "grad_norm": 2.4032557010650635, "learning_rate": 0.00010972354939223996, "loss": 2.1085, "step": 475 }, { "epoch": 0.2119087367835281, "grad_norm": 2.4151711463928223, "learning_rate": 0.00010941083133185146, "loss": 1.4129, "step": 476 }, { "epoch": 0.21235392320534224, "grad_norm": 2.9115583896636963, "learning_rate": 0.00010909802039035701, "loss": 1.8839, "step": 477 }, { "epoch": 0.21279910962715637, "grad_norm": 2.2955610752105713, "learning_rate": 0.00010878511965507434, "loss": 1.5365, "step": 478 }, { "epoch": 0.2132442960489705, "grad_norm": 2.006725311279297, "learning_rate": 0.00010847213221420736, "loss": 1.1548, "step": 479 }, { "epoch": 0.21368948247078465, "grad_norm": 2.6657001972198486, "learning_rate": 0.00010815906115681578, "loss": 2.425, "step": 480 }, { "epoch": 0.21413466889259877, "grad_norm": 2.453512668609619, "learning_rate": 0.0001078459095727845, "loss": 1.5641, "step": 481 }, { "epoch": 0.2145798553144129, "grad_norm": 2.2438836097717285, "learning_rate": 0.00010753268055279329, "loss": 1.736, "step": 482 }, { "epoch": 0.21502504173622705, "grad_norm": 2.286283254623413, "learning_rate": 0.0001072193771882861, "loss": 1.2366, "step": 483 }, { "epoch": 0.21547022815804118, "grad_norm": 2.357325553894043, "learning_rate": 0.00010690600257144061, "loss": 1.9888, "step": 484 }, { "epoch": 0.2159154145798553, "grad_norm": 4.799047470092773, "learning_rate": 0.0001065925597951378, "loss": 2.1147, "step": 485 }, { "epoch": 0.21636060100166946, "grad_norm": 3.4145114421844482, "learning_rate": 0.00010627905195293135, "loss": 2.2844, "step": 486 }, { "epoch": 0.21680578742348358, "grad_norm": 2.5542376041412354, "learning_rate": 0.00010596548213901708, "loss": 1.5962, "step": 487 }, { "epoch": 0.2172509738452977, "grad_norm": 3.4638633728027344, "learning_rate": 0.00010565185344820247, "loss": 2.0081, "step": 488 }, { "epoch": 0.21769616026711186, "grad_norm": 3.015550374984741, "learning_rate": 0.00010533816897587606, "loss": 1.978, "step": 489 }, { "epoch": 0.21814134668892599, "grad_norm": 3.2160592079162598, "learning_rate": 0.00010502443181797697, "loss": 1.6013, "step": 490 }, { "epoch": 0.2185865331107401, "grad_norm": 3.16475510597229, "learning_rate": 0.00010471064507096426, "loss": 2.1781, "step": 491 }, { "epoch": 0.21903171953255426, "grad_norm": 3.783090591430664, "learning_rate": 0.0001043968118317865, "loss": 2.6753, "step": 492 }, { "epoch": 0.2194769059543684, "grad_norm": 3.5325706005096436, "learning_rate": 0.00010408293519785101, "loss": 2.4122, "step": 493 }, { "epoch": 0.21992209237618252, "grad_norm": 2.7166507244110107, "learning_rate": 0.00010376901826699348, "loss": 1.2042, "step": 494 }, { "epoch": 0.22036727879799667, "grad_norm": 3.5661749839782715, "learning_rate": 0.00010345506413744726, "loss": 2.304, "step": 495 }, { "epoch": 0.2208124652198108, "grad_norm": 2.4347705841064453, "learning_rate": 0.00010314107590781284, "loss": 1.6218, "step": 496 }, { "epoch": 0.22125765164162492, "grad_norm": 2.070305109024048, "learning_rate": 0.00010282705667702734, "loss": 2.0498, "step": 497 }, { "epoch": 0.22170283806343907, "grad_norm": 2.101881980895996, "learning_rate": 0.00010251300954433376, "loss": 1.6202, "step": 498 }, { "epoch": 0.2221480244852532, "grad_norm": 1.2417051792144775, "learning_rate": 0.00010219893760925052, "loss": 0.9997, "step": 499 }, { "epoch": 0.22259321090706732, "grad_norm": 2.754783868789673, "learning_rate": 0.00010188484397154084, "loss": 2.2696, "step": 500 }, { "epoch": 0.22303839732888148, "grad_norm": 2.7350590229034424, "learning_rate": 0.00010157073173118208, "loss": 1.888, "step": 501 }, { "epoch": 0.2234835837506956, "grad_norm": 2.365584373474121, "learning_rate": 0.00010125660398833528, "loss": 1.5181, "step": 502 }, { "epoch": 0.22392877017250973, "grad_norm": 1.7909997701644897, "learning_rate": 0.00010094246384331442, "loss": 1.8491, "step": 503 }, { "epoch": 0.22437395659432388, "grad_norm": 2.5978925228118896, "learning_rate": 0.00010062831439655591, "loss": 2.0812, "step": 504 }, { "epoch": 0.224819143016138, "grad_norm": 1.8115602731704712, "learning_rate": 0.00010031415874858797, "loss": 1.5947, "step": 505 }, { "epoch": 0.22526432943795213, "grad_norm": 3.372662305831909, "learning_rate": 0.0001, "loss": 1.846, "step": 506 }, { "epoch": 0.22570951585976629, "grad_norm": 2.081552505493164, "learning_rate": 9.968584125141204e-05, "loss": 1.7191, "step": 507 }, { "epoch": 0.2261547022815804, "grad_norm": 2.355083465576172, "learning_rate": 9.937168560344412e-05, "loss": 1.8958, "step": 508 }, { "epoch": 0.22659988870339454, "grad_norm": 2.8670079708099365, "learning_rate": 9.90575361566856e-05, "loss": 2.0792, "step": 509 }, { "epoch": 0.2270450751252087, "grad_norm": 3.7465760707855225, "learning_rate": 9.874339601166473e-05, "loss": 2.3301, "step": 510 }, { "epoch": 0.22749026154702282, "grad_norm": 3.3605611324310303, "learning_rate": 9.842926826881796e-05, "loss": 3.4267, "step": 511 }, { "epoch": 0.22793544796883694, "grad_norm": 2.507578134536743, "learning_rate": 9.81151560284592e-05, "loss": 1.9814, "step": 512 }, { "epoch": 0.2283806343906511, "grad_norm": 4.343908786773682, "learning_rate": 9.78010623907495e-05, "loss": 2.2251, "step": 513 }, { "epoch": 0.22882582081246522, "grad_norm": 2.300027370452881, "learning_rate": 9.748699045566626e-05, "loss": 1.7964, "step": 514 }, { "epoch": 0.22927100723427934, "grad_norm": 2.0999529361724854, "learning_rate": 9.717294332297268e-05, "loss": 2.1174, "step": 515 }, { "epoch": 0.2297161936560935, "grad_norm": 2.6414802074432373, "learning_rate": 9.685892409218717e-05, "loss": 2.0422, "step": 516 }, { "epoch": 0.23016138007790762, "grad_norm": 2.8301937580108643, "learning_rate": 9.654493586255278e-05, "loss": 2.0245, "step": 517 }, { "epoch": 0.23060656649972175, "grad_norm": 1.4676358699798584, "learning_rate": 9.623098173300654e-05, "loss": 1.3448, "step": 518 }, { "epoch": 0.2310517529215359, "grad_norm": 3.920909881591797, "learning_rate": 9.591706480214901e-05, "loss": 2.7722, "step": 519 }, { "epoch": 0.23149693934335003, "grad_norm": 4.668717861175537, "learning_rate": 9.560318816821353e-05, "loss": 1.8226, "step": 520 }, { "epoch": 0.23194212576516415, "grad_norm": 1.8420554399490356, "learning_rate": 9.528935492903575e-05, "loss": 1.0351, "step": 521 }, { "epoch": 0.2323873121869783, "grad_norm": 3.2772724628448486, "learning_rate": 9.497556818202306e-05, "loss": 2.7337, "step": 522 }, { "epoch": 0.23283249860879243, "grad_norm": 2.2478621006011963, "learning_rate": 9.466183102412395e-05, "loss": 1.9922, "step": 523 }, { "epoch": 0.23327768503060656, "grad_norm": 1.188016653060913, "learning_rate": 9.434814655179755e-05, "loss": 1.0313, "step": 524 }, { "epoch": 0.2337228714524207, "grad_norm": 3.9148828983306885, "learning_rate": 9.403451786098294e-05, "loss": 2.2554, "step": 525 }, { "epoch": 0.23416805787423484, "grad_norm": 2.4724814891815186, "learning_rate": 9.372094804706867e-05, "loss": 1.8411, "step": 526 }, { "epoch": 0.23461324429604896, "grad_norm": 4.861220359802246, "learning_rate": 9.340744020486222e-05, "loss": 1.7102, "step": 527 }, { "epoch": 0.23505843071786311, "grad_norm": 1.9564683437347412, "learning_rate": 9.309399742855942e-05, "loss": 1.119, "step": 528 }, { "epoch": 0.23550361713967724, "grad_norm": 2.6515750885009766, "learning_rate": 9.278062281171393e-05, "loss": 2.0485, "step": 529 }, { "epoch": 0.23594880356149137, "grad_norm": 3.663888454437256, "learning_rate": 9.246731944720675e-05, "loss": 2.0759, "step": 530 }, { "epoch": 0.23639398998330552, "grad_norm": 1.989341378211975, "learning_rate": 9.215409042721552e-05, "loss": 1.3344, "step": 531 }, { "epoch": 0.23683917640511964, "grad_norm": 2.359320878982544, "learning_rate": 9.184093884318425e-05, "loss": 1.3044, "step": 532 }, { "epoch": 0.23728436282693377, "grad_norm": 4.149430274963379, "learning_rate": 9.152786778579267e-05, "loss": 2.1484, "step": 533 }, { "epoch": 0.23772954924874792, "grad_norm": 3.5896191596984863, "learning_rate": 9.121488034492569e-05, "loss": 2.8477, "step": 534 }, { "epoch": 0.23817473567056205, "grad_norm": 2.467137336730957, "learning_rate": 9.090197960964301e-05, "loss": 1.5795, "step": 535 }, { "epoch": 0.23861992209237617, "grad_norm": 3.346538543701172, "learning_rate": 9.058916866814858e-05, "loss": 2.2605, "step": 536 }, { "epoch": 0.23906510851419033, "grad_norm": 3.115483522415161, "learning_rate": 9.027645060776006e-05, "loss": 1.9898, "step": 537 }, { "epoch": 0.23951029493600445, "grad_norm": 2.0012478828430176, "learning_rate": 8.99638285148785e-05, "loss": 1.3743, "step": 538 }, { "epoch": 0.23995548135781858, "grad_norm": 3.6035563945770264, "learning_rate": 8.965130547495776e-05, "loss": 1.6191, "step": 539 }, { "epoch": 0.24040066777963273, "grad_norm": 1.9701684713363647, "learning_rate": 8.933888457247402e-05, "loss": 1.7474, "step": 540 }, { "epoch": 0.24084585420144686, "grad_norm": 3.0901401042938232, "learning_rate": 8.902656889089548e-05, "loss": 2.3074, "step": 541 }, { "epoch": 0.24129104062326098, "grad_norm": 3.2117514610290527, "learning_rate": 8.871436151265184e-05, "loss": 2.0538, "step": 542 }, { "epoch": 0.24173622704507514, "grad_norm": 2.3342740535736084, "learning_rate": 8.840226551910387e-05, "loss": 1.1496, "step": 543 }, { "epoch": 0.24218141346688926, "grad_norm": 2.8328073024749756, "learning_rate": 8.809028399051302e-05, "loss": 1.2791, "step": 544 }, { "epoch": 0.2426265998887034, "grad_norm": 2.9056038856506348, "learning_rate": 8.777842000601105e-05, "loss": 2.2656, "step": 545 }, { "epoch": 0.24307178631051754, "grad_norm": 2.6859729290008545, "learning_rate": 8.746667664356956e-05, "loss": 2.004, "step": 546 }, { "epoch": 0.24351697273233167, "grad_norm": 2.280202627182007, "learning_rate": 8.715505697996971e-05, "loss": 2.0117, "step": 547 }, { "epoch": 0.2439621591541458, "grad_norm": 3.593543291091919, "learning_rate": 8.684356409077176e-05, "loss": 2.4651, "step": 548 }, { "epoch": 0.24440734557595994, "grad_norm": 2.155273199081421, "learning_rate": 8.653220105028474e-05, "loss": 0.9082, "step": 549 }, { "epoch": 0.24485253199777407, "grad_norm": 3.196293354034424, "learning_rate": 8.62209709315362e-05, "loss": 2.4122, "step": 550 }, { "epoch": 0.2452977184195882, "grad_norm": 2.0001351833343506, "learning_rate": 8.590987680624174e-05, "loss": 1.6249, "step": 551 }, { "epoch": 0.24574290484140235, "grad_norm": 2.7972264289855957, "learning_rate": 8.559892174477479e-05, "loss": 1.9658, "step": 552 }, { "epoch": 0.24618809126321647, "grad_norm": 2.3301680088043213, "learning_rate": 8.528810881613626e-05, "loss": 2.2419, "step": 553 }, { "epoch": 0.2466332776850306, "grad_norm": 3.339153289794922, "learning_rate": 8.497744108792429e-05, "loss": 2.1285, "step": 554 }, { "epoch": 0.24707846410684475, "grad_norm": 4.093669891357422, "learning_rate": 8.466692162630392e-05, "loss": 1.7672, "step": 555 }, { "epoch": 0.24752365052865888, "grad_norm": 3.632969379425049, "learning_rate": 8.435655349597689e-05, "loss": 1.7379, "step": 556 }, { "epoch": 0.247968836950473, "grad_norm": 2.793414831161499, "learning_rate": 8.404633976015134e-05, "loss": 1.859, "step": 557 }, { "epoch": 0.24841402337228716, "grad_norm": 2.6596624851226807, "learning_rate": 8.373628348051165e-05, "loss": 1.9903, "step": 558 }, { "epoch": 0.24885920979410128, "grad_norm": 2.221126079559326, "learning_rate": 8.342638771718802e-05, "loss": 2.6768, "step": 559 }, { "epoch": 0.2493043962159154, "grad_norm": 2.701017379760742, "learning_rate": 8.311665552872662e-05, "loss": 1.6827, "step": 560 }, { "epoch": 0.24974958263772956, "grad_norm": 1.7237666845321655, "learning_rate": 8.280708997205904e-05, "loss": 1.5455, "step": 561 }, { "epoch": 0.2501947690595437, "grad_norm": 2.3445723056793213, "learning_rate": 8.249769410247239e-05, "loss": 1.7404, "step": 562 }, { "epoch": 0.2506399554813578, "grad_norm": 1.6703085899353027, "learning_rate": 8.218847097357898e-05, "loss": 1.5689, "step": 563 }, { "epoch": 0.25108514190317194, "grad_norm": 2.455984115600586, "learning_rate": 8.187942363728625e-05, "loss": 1.366, "step": 564 }, { "epoch": 0.25153032832498606, "grad_norm": 2.8952829837799072, "learning_rate": 8.157055514376666e-05, "loss": 2.6693, "step": 565 }, { "epoch": 0.25197551474680024, "grad_norm": 2.0182900428771973, "learning_rate": 8.126186854142752e-05, "loss": 1.7768, "step": 566 }, { "epoch": 0.25242070116861437, "grad_norm": 3.7107861042022705, "learning_rate": 8.095336687688102e-05, "loss": 2.4294, "step": 567 }, { "epoch": 0.2528658875904285, "grad_norm": 2.1526546478271484, "learning_rate": 8.064505319491398e-05, "loss": 2.0468, "step": 568 }, { "epoch": 0.2533110740122426, "grad_norm": 2.4397218227386475, "learning_rate": 8.033693053845801e-05, "loss": 2.1954, "step": 569 }, { "epoch": 0.25375626043405675, "grad_norm": 2.494946241378784, "learning_rate": 8.002900194855932e-05, "loss": 1.9958, "step": 570 }, { "epoch": 0.25420144685587087, "grad_norm": 4.778120994567871, "learning_rate": 7.972127046434878e-05, "loss": 2.9802, "step": 571 }, { "epoch": 0.25464663327768505, "grad_norm": 7.366669178009033, "learning_rate": 7.941373912301189e-05, "loss": 1.6644, "step": 572 }, { "epoch": 0.2550918196994992, "grad_norm": 2.677617311477661, "learning_rate": 7.910641095975886e-05, "loss": 2.0434, "step": 573 }, { "epoch": 0.2555370061213133, "grad_norm": 1.901222825050354, "learning_rate": 7.879928900779456e-05, "loss": 1.0331, "step": 574 }, { "epoch": 0.25598219254312743, "grad_norm": 4.581434726715088, "learning_rate": 7.849237629828869e-05, "loss": 3.2133, "step": 575 }, { "epoch": 0.25642737896494155, "grad_norm": 3.0468733310699463, "learning_rate": 7.818567586034577e-05, "loss": 1.6714, "step": 576 }, { "epoch": 0.2568725653867557, "grad_norm": 2.401651620864868, "learning_rate": 7.787919072097531e-05, "loss": 1.4202, "step": 577 }, { "epoch": 0.25731775180856986, "grad_norm": 3.3343799114227295, "learning_rate": 7.75729239050619e-05, "loss": 1.9961, "step": 578 }, { "epoch": 0.257762938230384, "grad_norm": 2.1830644607543945, "learning_rate": 7.726687843533538e-05, "loss": 1.8748, "step": 579 }, { "epoch": 0.2582081246521981, "grad_norm": 3.9290175437927246, "learning_rate": 7.696105733234098e-05, "loss": 2.0981, "step": 580 }, { "epoch": 0.25865331107401224, "grad_norm": 2.6848762035369873, "learning_rate": 7.66554636144095e-05, "loss": 2.07, "step": 581 }, { "epoch": 0.25909849749582636, "grad_norm": 3.8313651084899902, "learning_rate": 7.635010029762756e-05, "loss": 1.8205, "step": 582 }, { "epoch": 0.2595436839176405, "grad_norm": 2.1311826705932617, "learning_rate": 7.604497039580785e-05, "loss": 1.5328, "step": 583 }, { "epoch": 0.25998887033945467, "grad_norm": 2.774587631225586, "learning_rate": 7.574007692045928e-05, "loss": 1.3775, "step": 584 }, { "epoch": 0.2604340567612688, "grad_norm": 2.86027193069458, "learning_rate": 7.543542288075739e-05, "loss": 2.5958, "step": 585 }, { "epoch": 0.2608792431830829, "grad_norm": 3.274968147277832, "learning_rate": 7.513101128351454e-05, "loss": 2.5216, "step": 586 }, { "epoch": 0.26132442960489705, "grad_norm": 2.597700595855713, "learning_rate": 7.48268451331503e-05, "loss": 2.5979, "step": 587 }, { "epoch": 0.26176961602671117, "grad_norm": 1.9474818706512451, "learning_rate": 7.45229274316618e-05, "loss": 1.4696, "step": 588 }, { "epoch": 0.2622148024485253, "grad_norm": 3.209158182144165, "learning_rate": 7.421926117859403e-05, "loss": 2.4483, "step": 589 }, { "epoch": 0.2626599888703395, "grad_norm": 2.9960036277770996, "learning_rate": 7.391584937101033e-05, "loss": 2.4985, "step": 590 }, { "epoch": 0.2631051752921536, "grad_norm": 1.7019833326339722, "learning_rate": 7.361269500346274e-05, "loss": 1.3692, "step": 591 }, { "epoch": 0.26355036171396773, "grad_norm": 3.178032398223877, "learning_rate": 7.330980106796246e-05, "loss": 1.9495, "step": 592 }, { "epoch": 0.26399554813578185, "grad_norm": 3.838534355163574, "learning_rate": 7.300717055395039e-05, "loss": 2.3653, "step": 593 }, { "epoch": 0.264440734557596, "grad_norm": 2.432239294052124, "learning_rate": 7.270480644826749e-05, "loss": 1.0926, "step": 594 }, { "epoch": 0.2648859209794101, "grad_norm": 3.833432197570801, "learning_rate": 7.240271173512546e-05, "loss": 2.6508, "step": 595 }, { "epoch": 0.2653311074012243, "grad_norm": 3.8783698081970215, "learning_rate": 7.210088939607708e-05, "loss": 2.1285, "step": 596 }, { "epoch": 0.2657762938230384, "grad_norm": 3.5125503540039062, "learning_rate": 7.179934240998706e-05, "loss": 2.6731, "step": 597 }, { "epoch": 0.26622148024485254, "grad_norm": 2.598796844482422, "learning_rate": 7.149807375300239e-05, "loss": 2.4519, "step": 598 }, { "epoch": 0.26666666666666666, "grad_norm": 2.9682698249816895, "learning_rate": 7.119708639852312e-05, "loss": 1.7751, "step": 599 }, { "epoch": 0.2671118530884808, "grad_norm": 2.220308780670166, "learning_rate": 7.089638331717284e-05, "loss": 1.7819, "step": 600 }, { "epoch": 0.2675570395102949, "grad_norm": 2.421492338180542, "learning_rate": 7.059596747676962e-05, "loss": 2.3129, "step": 601 }, { "epoch": 0.2680022259321091, "grad_norm": 2.011636257171631, "learning_rate": 7.029584184229653e-05, "loss": 1.264, "step": 602 }, { "epoch": 0.2684474123539232, "grad_norm": 1.722353458404541, "learning_rate": 6.999600937587239e-05, "loss": 1.4916, "step": 603 }, { "epoch": 0.26889259877573735, "grad_norm": 2.3344128131866455, "learning_rate": 6.969647303672262e-05, "loss": 2.0492, "step": 604 }, { "epoch": 0.26933778519755147, "grad_norm": 10.07349681854248, "learning_rate": 6.939723578114993e-05, "loss": 2.213, "step": 605 }, { "epoch": 0.2697829716193656, "grad_norm": 1.6907522678375244, "learning_rate": 6.909830056250527e-05, "loss": 1.3918, "step": 606 }, { "epoch": 0.2702281580411797, "grad_norm": 3.627065896987915, "learning_rate": 6.879967033115853e-05, "loss": 2.4359, "step": 607 }, { "epoch": 0.2706733444629939, "grad_norm": 1.5359236001968384, "learning_rate": 6.850134803446954e-05, "loss": 1.1468, "step": 608 }, { "epoch": 0.27111853088480803, "grad_norm": 2.957019567489624, "learning_rate": 6.820333661675893e-05, "loss": 1.8714, "step": 609 }, { "epoch": 0.27156371730662215, "grad_norm": 2.876551389694214, "learning_rate": 6.790563901927907e-05, "loss": 1.946, "step": 610 }, { "epoch": 0.2720089037284363, "grad_norm": 2.3107662200927734, "learning_rate": 6.760825818018508e-05, "loss": 2.3033, "step": 611 }, { "epoch": 0.2724540901502504, "grad_norm": 1.7891398668289185, "learning_rate": 6.731119703450577e-05, "loss": 1.8722, "step": 612 }, { "epoch": 0.27289927657206453, "grad_norm": 1.788131594657898, "learning_rate": 6.701445851411472e-05, "loss": 1.3048, "step": 613 }, { "epoch": 0.2733444629938787, "grad_norm": 2.7209267616271973, "learning_rate": 6.671804554770135e-05, "loss": 2.1986, "step": 614 }, { "epoch": 0.27378964941569284, "grad_norm": 3.6044182777404785, "learning_rate": 6.642196106074194e-05, "loss": 2.3438, "step": 615 }, { "epoch": 0.27423483583750696, "grad_norm": 3.2944424152374268, "learning_rate": 6.612620797547087e-05, "loss": 2.0854, "step": 616 }, { "epoch": 0.2746800222593211, "grad_norm": 3.1867449283599854, "learning_rate": 6.583078921085167e-05, "loss": 1.9628, "step": 617 }, { "epoch": 0.2751252086811352, "grad_norm": 1.6597883701324463, "learning_rate": 6.55357076825483e-05, "loss": 1.0473, "step": 618 }, { "epoch": 0.27557039510294934, "grad_norm": 4.027482986450195, "learning_rate": 6.52409663028963e-05, "loss": 2.1282, "step": 619 }, { "epoch": 0.2760155815247635, "grad_norm": 2.803708791732788, "learning_rate": 6.494656798087412e-05, "loss": 1.8455, "step": 620 }, { "epoch": 0.27646076794657765, "grad_norm": 3.2381043434143066, "learning_rate": 6.465251562207431e-05, "loss": 2.3748, "step": 621 }, { "epoch": 0.27690595436839177, "grad_norm": 3.580366373062134, "learning_rate": 6.435881212867493e-05, "loss": 2.5845, "step": 622 }, { "epoch": 0.2773511407902059, "grad_norm": 4.986792087554932, "learning_rate": 6.406546039941094e-05, "loss": 2.0802, "step": 623 }, { "epoch": 0.27779632721202, "grad_norm": 1.9550381898880005, "learning_rate": 6.377246332954544e-05, "loss": 1.6063, "step": 624 }, { "epoch": 0.27824151363383415, "grad_norm": 2.505039930343628, "learning_rate": 6.347982381084123e-05, "loss": 1.6332, "step": 625 }, { "epoch": 0.27868670005564833, "grad_norm": 1.7756378650665283, "learning_rate": 6.318754473153221e-05, "loss": 1.3104, "step": 626 }, { "epoch": 0.27913188647746245, "grad_norm": 2.1896908283233643, "learning_rate": 6.289562897629492e-05, "loss": 1.7569, "step": 627 }, { "epoch": 0.2795770728992766, "grad_norm": 1.8758255243301392, "learning_rate": 6.260407942621998e-05, "loss": 1.756, "step": 628 }, { "epoch": 0.2800222593210907, "grad_norm": 3.3655426502227783, "learning_rate": 6.231289895878375e-05, "loss": 2.0823, "step": 629 }, { "epoch": 0.28046744574290483, "grad_norm": 3.293968915939331, "learning_rate": 6.20220904478199e-05, "loss": 1.9035, "step": 630 }, { "epoch": 0.28091263216471896, "grad_norm": 3.167731285095215, "learning_rate": 6.173165676349103e-05, "loss": 1.5836, "step": 631 }, { "epoch": 0.28135781858653314, "grad_norm": 3.473268985748291, "learning_rate": 6.144160077226036e-05, "loss": 1.7259, "step": 632 }, { "epoch": 0.28180300500834726, "grad_norm": 2.428987979888916, "learning_rate": 6.11519253368634e-05, "loss": 1.5845, "step": 633 }, { "epoch": 0.2822481914301614, "grad_norm": 3.5516490936279297, "learning_rate": 6.086263331627976e-05, "loss": 1.5608, "step": 634 }, { "epoch": 0.2826933778519755, "grad_norm": 4.088588714599609, "learning_rate": 6.05737275657049e-05, "loss": 2.4316, "step": 635 }, { "epoch": 0.28313856427378964, "grad_norm": 3.0667595863342285, "learning_rate": 6.0285210936521955e-05, "loss": 1.3786, "step": 636 }, { "epoch": 0.28358375069560376, "grad_norm": 2.605963945388794, "learning_rate": 5.999708627627354e-05, "loss": 1.7549, "step": 637 }, { "epoch": 0.28402893711741795, "grad_norm": 3.6883420944213867, "learning_rate": 5.9709356428633746e-05, "loss": 2.2541, "step": 638 }, { "epoch": 0.28447412353923207, "grad_norm": 2.800830364227295, "learning_rate": 5.9422024233380013e-05, "loss": 1.8305, "step": 639 }, { "epoch": 0.2849193099610462, "grad_norm": 2.8093373775482178, "learning_rate": 5.913509252636511e-05, "loss": 2.8368, "step": 640 }, { "epoch": 0.2853644963828603, "grad_norm": 2.3125829696655273, "learning_rate": 5.884856413948913e-05, "loss": 1.0694, "step": 641 }, { "epoch": 0.28580968280467445, "grad_norm": 2.6709086894989014, "learning_rate": 5.856244190067159e-05, "loss": 1.6645, "step": 642 }, { "epoch": 0.2862548692264886, "grad_norm": 3.269890546798706, "learning_rate": 5.82767286338235e-05, "loss": 1.9574, "step": 643 }, { "epoch": 0.28670005564830275, "grad_norm": 6.454814910888672, "learning_rate": 5.799142715881938e-05, "loss": 1.9828, "step": 644 }, { "epoch": 0.2871452420701169, "grad_norm": 2.483273983001709, "learning_rate": 5.770654029146969e-05, "loss": 1.1946, "step": 645 }, { "epoch": 0.287590428491931, "grad_norm": 1.7462166547775269, "learning_rate": 5.7422070843492734e-05, "loss": 1.4047, "step": 646 }, { "epoch": 0.28803561491374513, "grad_norm": 4.918879508972168, "learning_rate": 5.713802162248718e-05, "loss": 2.362, "step": 647 }, { "epoch": 0.28848080133555926, "grad_norm": 2.5706145763397217, "learning_rate": 5.6854395431904094e-05, "loss": 2.0059, "step": 648 }, { "epoch": 0.2889259877573734, "grad_norm": 3.0318686962127686, "learning_rate": 5.657119507101954e-05, "loss": 2.7449, "step": 649 }, { "epoch": 0.28937117417918756, "grad_norm": 1.3213516473770142, "learning_rate": 5.6288423334906735e-05, "loss": 1.1325, "step": 650 }, { "epoch": 0.2898163606010017, "grad_norm": 3.3162622451782227, "learning_rate": 5.6006083014408484e-05, "loss": 2.1244, "step": 651 }, { "epoch": 0.2902615470228158, "grad_norm": 2.8271548748016357, "learning_rate": 5.572417689610987e-05, "loss": 2.6029, "step": 652 }, { "epoch": 0.29070673344462994, "grad_norm": 3.4697909355163574, "learning_rate": 5.544270776231038e-05, "loss": 1.8302, "step": 653 }, { "epoch": 0.29115191986644406, "grad_norm": 2.9080650806427, "learning_rate": 5.5161678390996796e-05, "loss": 2.4099, "step": 654 }, { "epoch": 0.2915971062882582, "grad_norm": 2.8306384086608887, "learning_rate": 5.488109155581549e-05, "loss": 2.1103, "step": 655 }, { "epoch": 0.2920422927100723, "grad_norm": 3.779367208480835, "learning_rate": 5.4600950026045326e-05, "loss": 2.3853, "step": 656 }, { "epoch": 0.2924874791318865, "grad_norm": 3.2461814880371094, "learning_rate": 5.4321256566570036e-05, "loss": 1.6209, "step": 657 }, { "epoch": 0.2929326655537006, "grad_norm": 3.401667356491089, "learning_rate": 5.404201393785122e-05, "loss": 1.8184, "step": 658 }, { "epoch": 0.29337785197551475, "grad_norm": 3.3337910175323486, "learning_rate": 5.3763224895900846e-05, "loss": 2.288, "step": 659 }, { "epoch": 0.2938230383973289, "grad_norm": 4.496949672698975, "learning_rate": 5.348489219225416e-05, "loss": 2.0749, "step": 660 }, { "epoch": 0.294268224819143, "grad_norm": 4.434914588928223, "learning_rate": 5.320701857394268e-05, "loss": 2.3961, "step": 661 }, { "epoch": 0.2947134112409571, "grad_norm": 1.705828070640564, "learning_rate": 5.292960678346675e-05, "loss": 2.0444, "step": 662 }, { "epoch": 0.2951585976627713, "grad_norm": 3.431540012359619, "learning_rate": 5.265265955876879e-05, "loss": 1.4793, "step": 663 }, { "epoch": 0.29560378408458543, "grad_norm": 2.5585508346557617, "learning_rate": 5.237617963320608e-05, "loss": 1.8598, "step": 664 }, { "epoch": 0.29604897050639956, "grad_norm": 2.0425503253936768, "learning_rate": 5.210016973552391e-05, "loss": 1.6394, "step": 665 }, { "epoch": 0.2964941569282137, "grad_norm": 1.9641846418380737, "learning_rate": 5.182463258982846e-05, "loss": 1.1607, "step": 666 }, { "epoch": 0.2969393433500278, "grad_norm": 2.71909499168396, "learning_rate": 5.1549570915560206e-05, "loss": 1.9043, "step": 667 }, { "epoch": 0.29738452977184193, "grad_norm": 1.7965915203094482, "learning_rate": 5.127498742746675e-05, "loss": 1.4473, "step": 668 }, { "epoch": 0.2978297161936561, "grad_norm": 2.792185068130493, "learning_rate": 5.100088483557634e-05, "loss": 2.1568, "step": 669 }, { "epoch": 0.29827490261547024, "grad_norm": 2.522604465484619, "learning_rate": 5.072726584517086e-05, "loss": 1.4188, "step": 670 }, { "epoch": 0.29872008903728436, "grad_norm": 2.1286098957061768, "learning_rate": 5.045413315675924e-05, "loss": 1.2097, "step": 671 }, { "epoch": 0.2991652754590985, "grad_norm": 2.812614917755127, "learning_rate": 5.018148946605092e-05, "loss": 2.0791, "step": 672 }, { "epoch": 0.2996104618809126, "grad_norm": 1.8847200870513916, "learning_rate": 4.990933746392899e-05, "loss": 1.6048, "step": 673 }, { "epoch": 0.30005564830272674, "grad_norm": 2.700967788696289, "learning_rate": 4.9637679836423924e-05, "loss": 1.9822, "step": 674 }, { "epoch": 0.3005008347245409, "grad_norm": 5.683379650115967, "learning_rate": 4.9366519264686725e-05, "loss": 2.3686, "step": 675 }, { "epoch": 0.30094602114635505, "grad_norm": 2.7878029346466064, "learning_rate": 4.909585842496287e-05, "loss": 2.0972, "step": 676 }, { "epoch": 0.3013912075681692, "grad_norm": 1.970714807510376, "learning_rate": 4.8825699988565485e-05, "loss": 1.8908, "step": 677 }, { "epoch": 0.3018363939899833, "grad_norm": 3.5123045444488525, "learning_rate": 4.8556046621849346e-05, "loss": 2.4987, "step": 678 }, { "epoch": 0.3022815804117974, "grad_norm": 3.1310598850250244, "learning_rate": 4.828690098618429e-05, "loss": 2.2711, "step": 679 }, { "epoch": 0.30272676683361155, "grad_norm": 2.584655284881592, "learning_rate": 4.8018265737929044e-05, "loss": 1.9538, "step": 680 }, { "epoch": 0.30317195325542573, "grad_norm": 2.380634307861328, "learning_rate": 4.7750143528405126e-05, "loss": 1.322, "step": 681 }, { "epoch": 0.30361713967723986, "grad_norm": 3.004396438598633, "learning_rate": 4.748253700387042e-05, "loss": 2.1901, "step": 682 }, { "epoch": 0.304062326099054, "grad_norm": 2.5817222595214844, "learning_rate": 4.721544880549337e-05, "loss": 1.9884, "step": 683 }, { "epoch": 0.3045075125208681, "grad_norm": 1.8147649765014648, "learning_rate": 4.694888156932658e-05, "loss": 1.6065, "step": 684 }, { "epoch": 0.30495269894268223, "grad_norm": 2.1298556327819824, "learning_rate": 4.668283792628114e-05, "loss": 2.1248, "step": 685 }, { "epoch": 0.30539788536449636, "grad_norm": 2.9568581581115723, "learning_rate": 4.6417320502100316e-05, "loss": 2.8798, "step": 686 }, { "epoch": 0.30584307178631054, "grad_norm": 3.2827939987182617, "learning_rate": 4.615233191733398e-05, "loss": 2.6892, "step": 687 }, { "epoch": 0.30628825820812466, "grad_norm": 2.5867035388946533, "learning_rate": 4.588787478731242e-05, "loss": 2.105, "step": 688 }, { "epoch": 0.3067334446299388, "grad_norm": 3.4361913204193115, "learning_rate": 4.5623951722120736e-05, "loss": 2.7065, "step": 689 }, { "epoch": 0.3071786310517529, "grad_norm": 3.75927996635437, "learning_rate": 4.5360565326573104e-05, "loss": 1.6896, "step": 690 }, { "epoch": 0.30762381747356704, "grad_norm": 3.1970183849334717, "learning_rate": 4.5097718200186814e-05, "loss": 2.5938, "step": 691 }, { "epoch": 0.30806900389538117, "grad_norm": 2.316333770751953, "learning_rate": 4.483541293715698e-05, "loss": 2.1561, "step": 692 }, { "epoch": 0.30851419031719535, "grad_norm": 5.406709671020508, "learning_rate": 4.457365212633058e-05, "loss": 1.39, "step": 693 }, { "epoch": 0.3089593767390095, "grad_norm": 1.8153971433639526, "learning_rate": 4.431243835118124e-05, "loss": 1.6361, "step": 694 }, { "epoch": 0.3094045631608236, "grad_norm": 3.764073133468628, "learning_rate": 4.4051774189783315e-05, "loss": 2.0475, "step": 695 }, { "epoch": 0.3098497495826377, "grad_norm": 2.5098719596862793, "learning_rate": 4.379166221478697e-05, "loss": 1.5775, "step": 696 }, { "epoch": 0.31029493600445185, "grad_norm": 2.3882479667663574, "learning_rate": 4.3532104993392306e-05, "loss": 2.1238, "step": 697 }, { "epoch": 0.310740122426266, "grad_norm": 2.4165163040161133, "learning_rate": 4.327310508732437e-05, "loss": 2.2308, "step": 698 }, { "epoch": 0.31118530884808016, "grad_norm": 2.2906312942504883, "learning_rate": 4.301466505280762e-05, "loss": 2.2186, "step": 699 }, { "epoch": 0.3116304952698943, "grad_norm": 3.0668275356292725, "learning_rate": 4.2756787440540936e-05, "loss": 1.7851, "step": 700 }, { "epoch": 0.3120756816917084, "grad_norm": 2.195206642150879, "learning_rate": 4.249947479567218e-05, "loss": 1.6858, "step": 701 }, { "epoch": 0.31252086811352253, "grad_norm": 2.6582798957824707, "learning_rate": 4.224272965777326e-05, "loss": 1.5366, "step": 702 }, { "epoch": 0.31296605453533666, "grad_norm": 1.982883095741272, "learning_rate": 4.1986554560815096e-05, "loss": 1.6462, "step": 703 }, { "epoch": 0.3134112409571508, "grad_norm": 2.2738630771636963, "learning_rate": 4.173095203314241e-05, "loss": 1.7683, "step": 704 }, { "epoch": 0.31385642737896496, "grad_norm": 2.549669027328491, "learning_rate": 4.1475924597449024e-05, "loss": 2.3111, "step": 705 }, { "epoch": 0.3143016138007791, "grad_norm": 2.8641653060913086, "learning_rate": 4.12214747707527e-05, "loss": 1.7643, "step": 706 }, { "epoch": 0.3147468002225932, "grad_norm": 1.6562206745147705, "learning_rate": 4.096760506437057e-05, "loss": 1.6724, "step": 707 }, { "epoch": 0.31519198664440734, "grad_norm": 2.6927621364593506, "learning_rate": 4.071431798389408e-05, "loss": 1.4428, "step": 708 }, { "epoch": 0.31563717306622147, "grad_norm": 2.4343008995056152, "learning_rate": 4.0461616029164526e-05, "loss": 1.2359, "step": 709 }, { "epoch": 0.3160823594880356, "grad_norm": 2.6044414043426514, "learning_rate": 4.020950169424815e-05, "loss": 1.8076, "step": 710 }, { "epoch": 0.3165275459098498, "grad_norm": 4.747753143310547, "learning_rate": 3.9957977467411615e-05, "loss": 1.983, "step": 711 }, { "epoch": 0.3169727323316639, "grad_norm": 4.754560947418213, "learning_rate": 3.9707045831097555e-05, "loss": 2.7843, "step": 712 }, { "epoch": 0.317417918753478, "grad_norm": 2.5929174423217773, "learning_rate": 3.945670926189987e-05, "loss": 2.3036, "step": 713 }, { "epoch": 0.31786310517529215, "grad_norm": 3.1126303672790527, "learning_rate": 3.920697023053949e-05, "loss": 2.2455, "step": 714 }, { "epoch": 0.3183082915971063, "grad_norm": 3.110023260116577, "learning_rate": 3.895783120183976e-05, "loss": 1.8107, "step": 715 }, { "epoch": 0.3187534780189204, "grad_norm": 2.862748622894287, "learning_rate": 3.8709294634702376e-05, "loss": 1.9125, "step": 716 }, { "epoch": 0.3191986644407346, "grad_norm": 2.372352361679077, "learning_rate": 3.846136298208285e-05, "loss": 1.9124, "step": 717 }, { "epoch": 0.3196438508625487, "grad_norm": 3.2221293449401855, "learning_rate": 3.821403869096658e-05, "loss": 1.8325, "step": 718 }, { "epoch": 0.32008903728436283, "grad_norm": 2.2187700271606445, "learning_rate": 3.796732420234443e-05, "loss": 1.388, "step": 719 }, { "epoch": 0.32053422370617696, "grad_norm": 2.560734510421753, "learning_rate": 3.7721221951188765e-05, "loss": 1.374, "step": 720 }, { "epoch": 0.3209794101279911, "grad_norm": 2.4914464950561523, "learning_rate": 3.747573436642951e-05, "loss": 2.0269, "step": 721 }, { "epoch": 0.3214245965498052, "grad_norm": 2.3180055618286133, "learning_rate": 3.7230863870929964e-05, "loss": 2.0662, "step": 722 }, { "epoch": 0.3218697829716194, "grad_norm": 2.14855694770813, "learning_rate": 3.698661288146311e-05, "loss": 1.3372, "step": 723 }, { "epoch": 0.3223149693934335, "grad_norm": 3.403369426727295, "learning_rate": 3.674298380868756e-05, "loss": 2.3522, "step": 724 }, { "epoch": 0.32276015581524764, "grad_norm": 6.225140571594238, "learning_rate": 3.649997905712396e-05, "loss": 1.1184, "step": 725 }, { "epoch": 0.32320534223706177, "grad_norm": 1.9929250478744507, "learning_rate": 3.6257601025131026e-05, "loss": 1.4805, "step": 726 }, { "epoch": 0.3236505286588759, "grad_norm": 1.9394887685775757, "learning_rate": 3.601585210488218e-05, "loss": 1.9297, "step": 727 }, { "epoch": 0.32409571508069, "grad_norm": 2.5257530212402344, "learning_rate": 3.577473468234156e-05, "loss": 1.6671, "step": 728 }, { "epoch": 0.3245409015025042, "grad_norm": 3.4196643829345703, "learning_rate": 3.553425113724088e-05, "loss": 2.7964, "step": 729 }, { "epoch": 0.3249860879243183, "grad_norm": 2.4060604572296143, "learning_rate": 3.52944038430556e-05, "loss": 1.7696, "step": 730 }, { "epoch": 0.32543127434613245, "grad_norm": 2.9572129249572754, "learning_rate": 3.5055195166981645e-05, "loss": 1.6015, "step": 731 }, { "epoch": 0.3258764607679466, "grad_norm": 2.2669548988342285, "learning_rate": 3.481662746991214e-05, "loss": 2.3187, "step": 732 }, { "epoch": 0.3263216471897607, "grad_norm": 4.523863792419434, "learning_rate": 3.4578703106413904e-05, "loss": 3.4186, "step": 733 }, { "epoch": 0.3267668336115748, "grad_norm": 2.4980828762054443, "learning_rate": 3.4341424424704375e-05, "loss": 1.3333, "step": 734 }, { "epoch": 0.327212020033389, "grad_norm": 1.4893592596054077, "learning_rate": 3.4104793766628304e-05, "loss": 1.2549, "step": 735 }, { "epoch": 0.32765720645520313, "grad_norm": 2.372570753097534, "learning_rate": 3.386881346763483e-05, "loss": 1.3731, "step": 736 }, { "epoch": 0.32810239287701726, "grad_norm": 1.388790249824524, "learning_rate": 3.363348585675414e-05, "loss": 1.0765, "step": 737 }, { "epoch": 0.3285475792988314, "grad_norm": 3.7632062435150146, "learning_rate": 3.339881325657484e-05, "loss": 2.1809, "step": 738 }, { "epoch": 0.3289927657206455, "grad_norm": 3.2846572399139404, "learning_rate": 3.316479798322072e-05, "loss": 1.7677, "step": 739 }, { "epoch": 0.32943795214245963, "grad_norm": 9.13051986694336, "learning_rate": 3.2931442346328004e-05, "loss": 1.6814, "step": 740 }, { "epoch": 0.3298831385642738, "grad_norm": 1.820805311203003, "learning_rate": 3.269874864902269e-05, "loss": 1.8115, "step": 741 }, { "epoch": 0.33032832498608794, "grad_norm": 4.556207180023193, "learning_rate": 3.246671918789755e-05, "loss": 2.7417, "step": 742 }, { "epoch": 0.33077351140790207, "grad_norm": 2.135354995727539, "learning_rate": 3.223535625298979e-05, "loss": 1.5414, "step": 743 }, { "epoch": 0.3312186978297162, "grad_norm": 12.734091758728027, "learning_rate": 3.200466212775808e-05, "loss": 1.7974, "step": 744 }, { "epoch": 0.3316638842515303, "grad_norm": 2.428943634033203, "learning_rate": 3.1774639089060363e-05, "loss": 1.732, "step": 745 }, { "epoch": 0.33210907067334444, "grad_norm": 2.2564094066619873, "learning_rate": 3.154528940713113e-05, "loss": 1.8338, "step": 746 }, { "epoch": 0.3325542570951586, "grad_norm": 2.3983113765716553, "learning_rate": 3.1316615345559185e-05, "loss": 1.7076, "step": 747 }, { "epoch": 0.33299944351697275, "grad_norm": 1.7274196147918701, "learning_rate": 3.108861916126518e-05, "loss": 1.2812, "step": 748 }, { "epoch": 0.3334446299387869, "grad_norm": 2.3063457012176514, "learning_rate": 3.086130310447937e-05, "loss": 2.0274, "step": 749 }, { "epoch": 0.333889816360601, "grad_norm": 2.640960931777954, "learning_rate": 3.063466941871952e-05, "loss": 1.4717, "step": 750 }, { "epoch": 0.3343350027824151, "grad_norm": 2.7002780437469482, "learning_rate": 3.0408720340768572e-05, "loss": 2.0358, "step": 751 }, { "epoch": 0.33478018920422925, "grad_norm": 2.129476547241211, "learning_rate": 3.018345810065275e-05, "loss": 1.7658, "step": 752 }, { "epoch": 0.33522537562604343, "grad_norm": 3.172895669937134, "learning_rate": 2.9958884921619367e-05, "loss": 2.3525, "step": 753 }, { "epoch": 0.33567056204785756, "grad_norm": 3.458550453186035, "learning_rate": 2.9735003020115092e-05, "loss": 2.2845, "step": 754 }, { "epoch": 0.3361157484696717, "grad_norm": 2.624788284301758, "learning_rate": 2.9511814605763855e-05, "loss": 2.0448, "step": 755 }, { "epoch": 0.3365609348914858, "grad_norm": 4.643282413482666, "learning_rate": 2.9289321881345254e-05, "loss": 2.8465, "step": 756 }, { "epoch": 0.33700612131329993, "grad_norm": 2.141824960708618, "learning_rate": 2.9067527042772636e-05, "loss": 1.795, "step": 757 }, { "epoch": 0.33745130773511406, "grad_norm": 1.945853352546692, "learning_rate": 2.8846432279071467e-05, "loss": 1.4118, "step": 758 }, { "epoch": 0.33789649415692824, "grad_norm": 2.440788507461548, "learning_rate": 2.8626039772357882e-05, "loss": 1.8631, "step": 759 }, { "epoch": 0.33834168057874237, "grad_norm": 2.524973154067993, "learning_rate": 2.840635169781688e-05, "loss": 1.9311, "step": 760 }, { "epoch": 0.3387868670005565, "grad_norm": 2.88173508644104, "learning_rate": 2.8187370223681132e-05, "loss": 1.8423, "step": 761 }, { "epoch": 0.3392320534223706, "grad_norm": 9.78935718536377, "learning_rate": 2.7969097511209308e-05, "loss": 1.7553, "step": 762 }, { "epoch": 0.33967723984418474, "grad_norm": 4.4023356437683105, "learning_rate": 2.775153571466502e-05, "loss": 2.0394, "step": 763 }, { "epoch": 0.34012242626599887, "grad_norm": 2.8718042373657227, "learning_rate": 2.753468698129533e-05, "loss": 1.5894, "step": 764 }, { "epoch": 0.34056761268781305, "grad_norm": 1.903527855873108, "learning_rate": 2.7318553451309726e-05, "loss": 1.3534, "step": 765 }, { "epoch": 0.3410127991096272, "grad_norm": 2.23759388923645, "learning_rate": 2.7103137257858868e-05, "loss": 1.871, "step": 766 }, { "epoch": 0.3414579855314413, "grad_norm": 3.0346007347106934, "learning_rate": 2.688844052701359e-05, "loss": 2.4884, "step": 767 }, { "epoch": 0.3419031719532554, "grad_norm": 2.180615186691284, "learning_rate": 2.6674465377744017e-05, "loss": 1.3251, "step": 768 }, { "epoch": 0.34234835837506955, "grad_norm": 5.008700847625732, "learning_rate": 2.646121392189841e-05, "loss": 2.3742, "step": 769 }, { "epoch": 0.3427935447968837, "grad_norm": 2.506002426147461, "learning_rate": 2.624868826418262e-05, "loss": 1.756, "step": 770 }, { "epoch": 0.34323873121869786, "grad_norm": 3.1411874294281006, "learning_rate": 2.603689050213902e-05, "loss": 2.8913, "step": 771 }, { "epoch": 0.343683917640512, "grad_norm": 2.647324800491333, "learning_rate": 2.582582272612609e-05, "loss": 2.6637, "step": 772 }, { "epoch": 0.3441291040623261, "grad_norm": 2.1316630840301514, "learning_rate": 2.561548701929749e-05, "loss": 1.3719, "step": 773 }, { "epoch": 0.34457429048414023, "grad_norm": 3.7572953701019287, "learning_rate": 2.540588545758179e-05, "loss": 1.7199, "step": 774 }, { "epoch": 0.34501947690595436, "grad_norm": 7.4356369972229, "learning_rate": 2.5197020109661772e-05, "loss": 1.4686, "step": 775 }, { "epoch": 0.3454646633277685, "grad_norm": 2.2626800537109375, "learning_rate": 2.4988893036954043e-05, "loss": 1.8888, "step": 776 }, { "epoch": 0.34590984974958267, "grad_norm": 3.9464282989501953, "learning_rate": 2.4781506293588873e-05, "loss": 2.2344, "step": 777 }, { "epoch": 0.3463550361713968, "grad_norm": 3.29831862449646, "learning_rate": 2.4574861926389615e-05, "loss": 2.4997, "step": 778 }, { "epoch": 0.3468002225932109, "grad_norm": 2.7223682403564453, "learning_rate": 2.436896197485282e-05, "loss": 2.3799, "step": 779 }, { "epoch": 0.34724540901502504, "grad_norm": 2.570220947265625, "learning_rate": 2.4163808471127812e-05, "loss": 1.6605, "step": 780 }, { "epoch": 0.34769059543683917, "grad_norm": 2.4514687061309814, "learning_rate": 2.3959403439996907e-05, "loss": 1.3153, "step": 781 }, { "epoch": 0.3481357818586533, "grad_norm": 4.2457733154296875, "learning_rate": 2.37557488988552e-05, "loss": 2.2081, "step": 782 }, { "epoch": 0.3485809682804674, "grad_norm": 2.6355888843536377, "learning_rate": 2.3552846857690846e-05, "loss": 2.1335, "step": 783 }, { "epoch": 0.3490261547022816, "grad_norm": 3.205240488052368, "learning_rate": 2.3350699319065026e-05, "loss": 2.0054, "step": 784 }, { "epoch": 0.3494713411240957, "grad_norm": 2.160902976989746, "learning_rate": 2.3149308278092342e-05, "loss": 1.7905, "step": 785 }, { "epoch": 0.34991652754590985, "grad_norm": 2.5220730304718018, "learning_rate": 2.2948675722421086e-05, "loss": 1.7417, "step": 786 }, { "epoch": 0.350361713967724, "grad_norm": 4.050189018249512, "learning_rate": 2.2748803632213557e-05, "loss": 2.9296, "step": 787 }, { "epoch": 0.3508069003895381, "grad_norm": 1.7380865812301636, "learning_rate": 2.254969398012663e-05, "loss": 1.3506, "step": 788 }, { "epoch": 0.3512520868113522, "grad_norm": 3.115523099899292, "learning_rate": 2.235134873129213e-05, "loss": 2.3948, "step": 789 }, { "epoch": 0.3516972732331664, "grad_norm": 2.615837812423706, "learning_rate": 2.2153769843297667e-05, "loss": 1.865, "step": 790 }, { "epoch": 0.35214245965498053, "grad_norm": 2.9471611976623535, "learning_rate": 2.195695926616702e-05, "loss": 1.7431, "step": 791 }, { "epoch": 0.35258764607679466, "grad_norm": 2.246422529220581, "learning_rate": 2.1760918942341192e-05, "loss": 1.0834, "step": 792 }, { "epoch": 0.3530328324986088, "grad_norm": 3.3071248531341553, "learning_rate": 2.1565650806658975e-05, "loss": 2.3679, "step": 793 }, { "epoch": 0.3534780189204229, "grad_norm": 1.6390622854232788, "learning_rate": 2.137115678633811e-05, "loss": 1.3217, "step": 794 }, { "epoch": 0.35392320534223703, "grad_norm": 4.03280782699585, "learning_rate": 2.1177438800956007e-05, "loss": 1.9452, "step": 795 }, { "epoch": 0.3543683917640512, "grad_norm": 15.388529777526855, "learning_rate": 2.098449876243096e-05, "loss": 2.3951, "step": 796 }, { "epoch": 0.35481357818586534, "grad_norm": 3.017653703689575, "learning_rate": 2.07923385750033e-05, "loss": 1.4409, "step": 797 }, { "epoch": 0.35525876460767947, "grad_norm": 2.619394540786743, "learning_rate": 2.0600960135216462e-05, "loss": 1.7445, "step": 798 }, { "epoch": 0.3557039510294936, "grad_norm": 3.4079883098602295, "learning_rate": 2.0410365331898416e-05, "loss": 2.4086, "step": 799 }, { "epoch": 0.3561491374513077, "grad_norm": 1.3419363498687744, "learning_rate": 2.0220556046142893e-05, "loss": 0.9088, "step": 800 }, { "epoch": 0.35659432387312184, "grad_norm": 3.1316444873809814, "learning_rate": 2.0031534151290943e-05, "loss": 1.4489, "step": 801 }, { "epoch": 0.357039510294936, "grad_norm": 2.7865233421325684, "learning_rate": 1.9843301512912327e-05, "loss": 1.5738, "step": 802 }, { "epoch": 0.35748469671675015, "grad_norm": 2.195303201675415, "learning_rate": 1.965585998878724e-05, "loss": 1.1416, "step": 803 }, { "epoch": 0.3579298831385643, "grad_norm": 1.7528187036514282, "learning_rate": 1.946921142888781e-05, "loss": 1.3798, "step": 804 }, { "epoch": 0.3583750695603784, "grad_norm": 1.894110083580017, "learning_rate": 1.928335767535997e-05, "loss": 1.6355, "step": 805 }, { "epoch": 0.3588202559821925, "grad_norm": 3.0293827056884766, "learning_rate": 1.9098300562505266e-05, "loss": 2.377, "step": 806 }, { "epoch": 0.35926544240400665, "grad_norm": 3.3845605850219727, "learning_rate": 1.891404191676265e-05, "loss": 1.7129, "step": 807 }, { "epoch": 0.35971062882582083, "grad_norm": 2.3603949546813965, "learning_rate": 1.8730583556690605e-05, "loss": 1.7646, "step": 808 }, { "epoch": 0.36015581524763496, "grad_norm": 2.9297170639038086, "learning_rate": 1.854792729294905e-05, "loss": 2.088, "step": 809 }, { "epoch": 0.3606010016694491, "grad_norm": 2.4962754249572754, "learning_rate": 1.8366074928281607e-05, "loss": 1.7159, "step": 810 }, { "epoch": 0.3610461880912632, "grad_norm": 2.170757532119751, "learning_rate": 1.818502825749764e-05, "loss": 1.5805, "step": 811 }, { "epoch": 0.36149137451307733, "grad_norm": 3.127051830291748, "learning_rate": 1.8004789067454764e-05, "loss": 2.4222, "step": 812 }, { "epoch": 0.36193656093489146, "grad_norm": 2.367252826690674, "learning_rate": 1.7825359137040988e-05, "loss": 2.109, "step": 813 }, { "epoch": 0.36238174735670564, "grad_norm": 1.9050239324569702, "learning_rate": 1.7646740237157256e-05, "loss": 1.2665, "step": 814 }, { "epoch": 0.36282693377851977, "grad_norm": 2.2031283378601074, "learning_rate": 1.7468934130700044e-05, "loss": 1.2599, "step": 815 }, { "epoch": 0.3632721202003339, "grad_norm": 3.291349172592163, "learning_rate": 1.7291942572543807e-05, "loss": 2.1776, "step": 816 }, { "epoch": 0.363717306622148, "grad_norm": 4.794470310211182, "learning_rate": 1.7115767309523812e-05, "loss": 2.1294, "step": 817 }, { "epoch": 0.36416249304396214, "grad_norm": 1.8986549377441406, "learning_rate": 1.6940410080418723e-05, "loss": 1.5655, "step": 818 }, { "epoch": 0.36460767946577627, "grad_norm": 3.300102472305298, "learning_rate": 1.6765872615933677e-05, "loss": 2.4501, "step": 819 }, { "epoch": 0.36505286588759045, "grad_norm": 2.3774209022521973, "learning_rate": 1.6592156638682886e-05, "loss": 2.0107, "step": 820 }, { "epoch": 0.3654980523094046, "grad_norm": 1.869045615196228, "learning_rate": 1.6419263863172997e-05, "loss": 1.6384, "step": 821 }, { "epoch": 0.3659432387312187, "grad_norm": 2.376424551010132, "learning_rate": 1.6247195995785837e-05, "loss": 1.6731, "step": 822 }, { "epoch": 0.3663884251530328, "grad_norm": 2.211214303970337, "learning_rate": 1.6075954734761845e-05, "loss": 1.6778, "step": 823 }, { "epoch": 0.36683361157484695, "grad_norm": 2.649627685546875, "learning_rate": 1.5905541770183096e-05, "loss": 2.41, "step": 824 }, { "epoch": 0.3672787979966611, "grad_norm": 1.5385890007019043, "learning_rate": 1.5735958783956794e-05, "loss": 1.1177, "step": 825 }, { "epoch": 0.36772398441847526, "grad_norm": 2.307979106903076, "learning_rate": 1.5567207449798515e-05, "loss": 1.6581, "step": 826 }, { "epoch": 0.3681691708402894, "grad_norm": 2.140033006668091, "learning_rate": 1.539928943321579e-05, "loss": 1.9136, "step": 827 }, { "epoch": 0.3686143572621035, "grad_norm": 3.384453773498535, "learning_rate": 1.5232206391491699e-05, "loss": 2.8381, "step": 828 }, { "epoch": 0.36905954368391763, "grad_norm": 2.655250072479248, "learning_rate": 1.5065959973668353e-05, "loss": 1.5791, "step": 829 }, { "epoch": 0.36950473010573176, "grad_norm": 1.3919587135314941, "learning_rate": 1.4900551820530828e-05, "loss": 1.1324, "step": 830 }, { "epoch": 0.3699499165275459, "grad_norm": 4.468016147613525, "learning_rate": 1.4735983564590783e-05, "loss": 2.4508, "step": 831 }, { "epoch": 0.37039510294936007, "grad_norm": 2.089130163192749, "learning_rate": 1.4572256830070497e-05, "loss": 1.5146, "step": 832 }, { "epoch": 0.3708402893711742, "grad_norm": 1.788549780845642, "learning_rate": 1.4409373232886702e-05, "loss": 1.2323, "step": 833 }, { "epoch": 0.3712854757929883, "grad_norm": 3.411046266555786, "learning_rate": 1.4247334380634792e-05, "loss": 2.3234, "step": 834 }, { "epoch": 0.37173066221480244, "grad_norm": 3.8933844566345215, "learning_rate": 1.4086141872572789e-05, "loss": 1.9897, "step": 835 }, { "epoch": 0.37217584863661657, "grad_norm": 2.343925952911377, "learning_rate": 1.3925797299605647e-05, "loss": 1.4688, "step": 836 }, { "epoch": 0.3726210350584307, "grad_norm": 3.1801838874816895, "learning_rate": 1.3766302244269624e-05, "loss": 2.4004, "step": 837 }, { "epoch": 0.3730662214802449, "grad_norm": 1.550551414489746, "learning_rate": 1.3607658280716473e-05, "loss": 1.0341, "step": 838 }, { "epoch": 0.373511407902059, "grad_norm": 2.117818593978882, "learning_rate": 1.3449866974698122e-05, "loss": 1.7987, "step": 839 }, { "epoch": 0.3739565943238731, "grad_norm": 4.381692409515381, "learning_rate": 1.3292929883550998e-05, "loss": 2.4703, "step": 840 }, { "epoch": 0.37440178074568725, "grad_norm": 2.0128989219665527, "learning_rate": 1.3136848556180892e-05, "loss": 1.8252, "step": 841 }, { "epoch": 0.3748469671675014, "grad_norm": 1.8894273042678833, "learning_rate": 1.2981624533047432e-05, "loss": 1.7092, "step": 842 }, { "epoch": 0.3752921535893155, "grad_norm": 3.7159740924835205, "learning_rate": 1.2827259346149122e-05, "loss": 2.0822, "step": 843 }, { "epoch": 0.3757373400111297, "grad_norm": 3.0898549556732178, "learning_rate": 1.2673754519008008e-05, "loss": 2.6043, "step": 844 }, { "epoch": 0.3761825264329438, "grad_norm": 2.4676640033721924, "learning_rate": 1.2521111566654731e-05, "loss": 2.4497, "step": 845 }, { "epoch": 0.37662771285475793, "grad_norm": 3.5587337017059326, "learning_rate": 1.2369331995613665e-05, "loss": 1.6656, "step": 846 }, { "epoch": 0.37707289927657206, "grad_norm": 7.141547679901123, "learning_rate": 1.2218417303887842e-05, "loss": 1.7547, "step": 847 }, { "epoch": 0.3775180856983862, "grad_norm": 1.7326031923294067, "learning_rate": 1.206836898094439e-05, "loss": 1.4759, "step": 848 }, { "epoch": 0.3779632721202003, "grad_norm": 3.1554577350616455, "learning_rate": 1.191918850769964e-05, "loss": 2.4164, "step": 849 }, { "epoch": 0.3784084585420145, "grad_norm": 1.9731732606887817, "learning_rate": 1.1770877356504683e-05, "loss": 1.0614, "step": 850 }, { "epoch": 0.3788536449638286, "grad_norm": 2.997244119644165, "learning_rate": 1.1623436991130654e-05, "loss": 2.0257, "step": 851 }, { "epoch": 0.37929883138564274, "grad_norm": 5.286291599273682, "learning_rate": 1.1476868866754486e-05, "loss": 2.0731, "step": 852 }, { "epoch": 0.37974401780745687, "grad_norm": 2.4582889080047607, "learning_rate": 1.1331174429944347e-05, "loss": 1.406, "step": 853 }, { "epoch": 0.380189204229271, "grad_norm": 2.530067205429077, "learning_rate": 1.1186355118645554e-05, "loss": 1.6258, "step": 854 }, { "epoch": 0.3806343906510851, "grad_norm": 3.028555393218994, "learning_rate": 1.1042412362166222e-05, "loss": 2.8568, "step": 855 }, { "epoch": 0.3810795770728993, "grad_norm": 2.7910170555114746, "learning_rate": 1.0899347581163221e-05, "loss": 1.8254, "step": 856 }, { "epoch": 0.3815247634947134, "grad_norm": 39.85447692871094, "learning_rate": 1.0757162187628222e-05, "loss": 2.8449, "step": 857 }, { "epoch": 0.38196994991652755, "grad_norm": 3.291005849838257, "learning_rate": 1.0615857584873623e-05, "loss": 2.0754, "step": 858 }, { "epoch": 0.3824151363383417, "grad_norm": 4.250852584838867, "learning_rate": 1.0475435167518843e-05, "loss": 2.1429, "step": 859 }, { "epoch": 0.3828603227601558, "grad_norm": 2.714912176132202, "learning_rate": 1.0335896321476413e-05, "loss": 1.7616, "step": 860 }, { "epoch": 0.38330550918196993, "grad_norm": 2.5076630115509033, "learning_rate": 1.0197242423938446e-05, "loss": 1.864, "step": 861 }, { "epoch": 0.3837506956037841, "grad_norm": 3.874208450317383, "learning_rate": 1.0059474843362892e-05, "loss": 3.0877, "step": 862 }, { "epoch": 0.38419588202559823, "grad_norm": 3.2415525913238525, "learning_rate": 9.922594939460194e-06, "loss": 2.3881, "step": 863 }, { "epoch": 0.38464106844741236, "grad_norm": 2.6872646808624268, "learning_rate": 9.786604063179728e-06, "loss": 1.953, "step": 864 }, { "epoch": 0.3850862548692265, "grad_norm": 2.7855000495910645, "learning_rate": 9.651503556696516e-06, "loss": 2.1969, "step": 865 }, { "epoch": 0.3855314412910406, "grad_norm": 3.2661259174346924, "learning_rate": 9.517294753398064e-06, "loss": 2.2943, "step": 866 }, { "epoch": 0.38597662771285474, "grad_norm": 2.2386388778686523, "learning_rate": 9.383978977871021e-06, "loss": 1.3356, "step": 867 }, { "epoch": 0.3864218141346689, "grad_norm": 2.699187755584717, "learning_rate": 9.251557545888312e-06, "loss": 2.4968, "step": 868 }, { "epoch": 0.38686700055648304, "grad_norm": 2.743082046508789, "learning_rate": 9.120031764395987e-06, "loss": 4.0225, "step": 869 }, { "epoch": 0.38731218697829717, "grad_norm": 2.0932228565216064, "learning_rate": 8.989402931500434e-06, "loss": 1.4646, "step": 870 }, { "epoch": 0.3877573734001113, "grad_norm": 2.3609516620635986, "learning_rate": 8.85967233645547e-06, "loss": 2.0499, "step": 871 }, { "epoch": 0.3882025598219254, "grad_norm": 8.27224063873291, "learning_rate": 8.730841259649725e-06, "loss": 2.7773, "step": 872 }, { "epoch": 0.38864774624373954, "grad_norm": 2.892641544342041, "learning_rate": 8.602910972593892e-06, "loss": 2.2208, "step": 873 }, { "epoch": 0.3890929326655537, "grad_norm": 2.17576265335083, "learning_rate": 8.475882737908248e-06, "loss": 1.9523, "step": 874 }, { "epoch": 0.38953811908736785, "grad_norm": 3.0030341148376465, "learning_rate": 8.34975780931021e-06, "loss": 2.3068, "step": 875 }, { "epoch": 0.389983305509182, "grad_norm": 3.5457253456115723, "learning_rate": 8.224537431601886e-06, "loss": 1.9086, "step": 876 }, { "epoch": 0.3904284919309961, "grad_norm": 2.699582815170288, "learning_rate": 8.100222840657878e-06, "loss": 2.1184, "step": 877 }, { "epoch": 0.3908736783528102, "grad_norm": 2.5596392154693604, "learning_rate": 7.976815263412963e-06, "loss": 1.7771, "step": 878 }, { "epoch": 0.39131886477462435, "grad_norm": 2.6684186458587646, "learning_rate": 7.854315917850163e-06, "loss": 1.7569, "step": 879 }, { "epoch": 0.39176405119643853, "grad_norm": 2.5746593475341797, "learning_rate": 7.73272601298851e-06, "loss": 2.0132, "step": 880 }, { "epoch": 0.39220923761825266, "grad_norm": 2.5082550048828125, "learning_rate": 7.612046748871327e-06, "loss": 1.7144, "step": 881 }, { "epoch": 0.3926544240400668, "grad_norm": 1.8464329242706299, "learning_rate": 7.492279316554207e-06, "loss": 1.1433, "step": 882 }, { "epoch": 0.3930996104618809, "grad_norm": 3.3372461795806885, "learning_rate": 7.3734248980933395e-06, "loss": 2.8916, "step": 883 }, { "epoch": 0.39354479688369504, "grad_norm": 3.623055934906006, "learning_rate": 7.255484666533874e-06, "loss": 1.7107, "step": 884 }, { "epoch": 0.39398998330550916, "grad_norm": 2.4824955463409424, "learning_rate": 7.138459785898266e-06, "loss": 1.79, "step": 885 }, { "epoch": 0.39443516972732334, "grad_norm": 2.6194679737091064, "learning_rate": 7.022351411174866e-06, "loss": 1.6792, "step": 886 }, { "epoch": 0.39488035614913747, "grad_norm": 2.1351640224456787, "learning_rate": 6.907160688306425e-06, "loss": 1.992, "step": 887 }, { "epoch": 0.3953255425709516, "grad_norm": 3.6620707511901855, "learning_rate": 6.7928887541789055e-06, "loss": 1.9295, "step": 888 }, { "epoch": 0.3957707289927657, "grad_norm": 2.735880136489868, "learning_rate": 6.679536736610137e-06, "loss": 2.5191, "step": 889 }, { "epoch": 0.39621591541457984, "grad_norm": 2.25370192527771, "learning_rate": 6.5671057543387985e-06, "loss": 1.7509, "step": 890 }, { "epoch": 0.39666110183639397, "grad_norm": 3.4820597171783447, "learning_rate": 6.455596917013273e-06, "loss": 2.0117, "step": 891 }, { "epoch": 0.39710628825820815, "grad_norm": 2.8001556396484375, "learning_rate": 6.345011325180772e-06, "loss": 1.6458, "step": 892 }, { "epoch": 0.3975514746800223, "grad_norm": 1.6623258590698242, "learning_rate": 6.235350070276447e-06, "loss": 1.6371, "step": 893 }, { "epoch": 0.3979966611018364, "grad_norm": 2.0735392570495605, "learning_rate": 6.126614234612593e-06, "loss": 1.8299, "step": 894 }, { "epoch": 0.3984418475236505, "grad_norm": 2.833833694458008, "learning_rate": 6.018804891368035e-06, "loss": 1.6262, "step": 895 }, { "epoch": 0.39888703394546465, "grad_norm": 1.4123960733413696, "learning_rate": 5.911923104577455e-06, "loss": 1.2736, "step": 896 }, { "epoch": 0.3993322203672788, "grad_norm": 4.113855838775635, "learning_rate": 5.805969929120947e-06, "loss": 2.449, "step": 897 }, { "epoch": 0.39977740678909296, "grad_norm": 4.152754306793213, "learning_rate": 5.700946410713548e-06, "loss": 1.9736, "step": 898 }, { "epoch": 0.4002225932109071, "grad_norm": 2.434448480606079, "learning_rate": 5.5968535858950345e-06, "loss": 2.2586, "step": 899 }, { "epoch": 0.4006677796327212, "grad_norm": 2.730743169784546, "learning_rate": 5.49369248201953e-06, "loss": 2.1945, "step": 900 }, { "epoch": 0.40111296605453534, "grad_norm": 2.3305442333221436, "learning_rate": 5.39146411724547e-06, "loss": 1.844, "step": 901 }, { "epoch": 0.40155815247634946, "grad_norm": 2.125002861022949, "learning_rate": 5.290169500525577e-06, "loss": 1.8199, "step": 902 }, { "epoch": 0.4020033388981636, "grad_norm": 2.352309226989746, "learning_rate": 5.189809631596798e-06, "loss": 2.143, "step": 903 }, { "epoch": 0.40244852531997777, "grad_norm": 2.7642769813537598, "learning_rate": 5.0903855009705514e-06, "loss": 1.75, "step": 904 }, { "epoch": 0.4028937117417919, "grad_norm": 4.074885845184326, "learning_rate": 4.991898089922819e-06, "loss": 2.1392, "step": 905 }, { "epoch": 0.403338898163606, "grad_norm": 2.652728319168091, "learning_rate": 4.8943483704846475e-06, "loss": 1.9152, "step": 906 }, { "epoch": 0.40378408458542014, "grad_norm": 2.720862627029419, "learning_rate": 4.797737305432337e-06, "loss": 2.3358, "step": 907 }, { "epoch": 0.40422927100723427, "grad_norm": 2.838879346847534, "learning_rate": 4.702065848278126e-06, "loss": 2.1309, "step": 908 }, { "epoch": 0.4046744574290484, "grad_norm": 2.1942715644836426, "learning_rate": 4.607334943260655e-06, "loss": 1.4136, "step": 909 }, { "epoch": 0.4051196438508626, "grad_norm": 3.1751365661621094, "learning_rate": 4.513545525335705e-06, "loss": 2.0721, "step": 910 }, { "epoch": 0.4055648302726767, "grad_norm": 2.386066436767578, "learning_rate": 4.420698520166988e-06, "loss": 1.7174, "step": 911 }, { "epoch": 0.4060100166944908, "grad_norm": 2.3867030143737793, "learning_rate": 4.328794844116946e-06, "loss": 1.9815, "step": 912 }, { "epoch": 0.40645520311630495, "grad_norm": 2.3891232013702393, "learning_rate": 4.237835404237778e-06, "loss": 1.9543, "step": 913 }, { "epoch": 0.4069003895381191, "grad_norm": 2.0092246532440186, "learning_rate": 4.147821098262405e-06, "loss": 1.7221, "step": 914 }, { "epoch": 0.4073455759599332, "grad_norm": 13.39091968536377, "learning_rate": 4.0587528145957235e-06, "loss": 1.7269, "step": 915 }, { "epoch": 0.40779076238174733, "grad_norm": 2.8730368614196777, "learning_rate": 3.970631432305694e-06, "loss": 1.867, "step": 916 }, { "epoch": 0.4082359488035615, "grad_norm": 4.520809173583984, "learning_rate": 3.883457821114811e-06, "loss": 2.3426, "step": 917 }, { "epoch": 0.40868113522537564, "grad_norm": 2.5547308921813965, "learning_rate": 3.797232841391407e-06, "loss": 1.6473, "step": 918 }, { "epoch": 0.40912632164718976, "grad_norm": 2.6396074295043945, "learning_rate": 3.711957344141237e-06, "loss": 1.5152, "step": 919 }, { "epoch": 0.4095715080690039, "grad_norm": 2.5423967838287354, "learning_rate": 3.627632170999029e-06, "loss": 1.8634, "step": 920 }, { "epoch": 0.410016694490818, "grad_norm": 2.359440565109253, "learning_rate": 3.5442581542201923e-06, "loss": 1.6601, "step": 921 }, { "epoch": 0.41046188091263214, "grad_norm": 3.3366010189056396, "learning_rate": 3.461836116672612e-06, "loss": 2.3835, "step": 922 }, { "epoch": 0.4109070673344463, "grad_norm": 1.9757188558578491, "learning_rate": 3.380366871828522e-06, "loss": 1.3504, "step": 923 }, { "epoch": 0.41135225375626044, "grad_norm": 1.7242034673690796, "learning_rate": 3.2998512237565005e-06, "loss": 0.9853, "step": 924 }, { "epoch": 0.41179744017807457, "grad_norm": 4.0235161781311035, "learning_rate": 3.2202899671134546e-06, "loss": 2.5185, "step": 925 }, { "epoch": 0.4122426265998887, "grad_norm": 3.6245474815368652, "learning_rate": 3.1416838871368924e-06, "loss": 2.2566, "step": 926 }, { "epoch": 0.4126878130217028, "grad_norm": 2.94282865524292, "learning_rate": 3.064033759637064e-06, "loss": 1.7241, "step": 927 }, { "epoch": 0.41313299944351695, "grad_norm": 1.7278319597244263, "learning_rate": 2.9873403509894203e-06, "loss": 1.6847, "step": 928 }, { "epoch": 0.4135781858653311, "grad_norm": 2.720810651779175, "learning_rate": 2.9116044181269007e-06, "loss": 1.7949, "step": 929 }, { "epoch": 0.41402337228714525, "grad_norm": 2.830357789993286, "learning_rate": 2.836826708532603e-06, "loss": 1.8299, "step": 930 }, { "epoch": 0.4144685587089594, "grad_norm": 2.6716318130493164, "learning_rate": 2.7630079602323442e-06, "loss": 1.4774, "step": 931 }, { "epoch": 0.4149137451307735, "grad_norm": 2.116966724395752, "learning_rate": 2.690148901787337e-06, "loss": 0.5776, "step": 932 }, { "epoch": 0.41535893155258763, "grad_norm": 1.9328233003616333, "learning_rate": 2.618250252287113e-06, "loss": 1.4808, "step": 933 }, { "epoch": 0.41580411797440175, "grad_norm": 2.859921932220459, "learning_rate": 2.5473127213422763e-06, "loss": 2.1348, "step": 934 }, { "epoch": 0.41624930439621594, "grad_norm": 2.039973020553589, "learning_rate": 2.4773370090776626e-06, "loss": 1.8692, "step": 935 }, { "epoch": 0.41669449081803006, "grad_norm": 3.830960273742676, "learning_rate": 2.4083238061252567e-06, "loss": 1.9331, "step": 936 }, { "epoch": 0.4171396772398442, "grad_norm": 2.213256597518921, "learning_rate": 2.3402737936175425e-06, "loss": 1.8484, "step": 937 }, { "epoch": 0.4175848636616583, "grad_norm": 2.631376266479492, "learning_rate": 2.273187643180652e-06, "loss": 2.4657, "step": 938 }, { "epoch": 0.41803005008347244, "grad_norm": 2.208151340484619, "learning_rate": 2.2070660169278166e-06, "loss": 1.9784, "step": 939 }, { "epoch": 0.41847523650528656, "grad_norm": 2.4797284603118896, "learning_rate": 2.141909567452793e-06, "loss": 2.1487, "step": 940 }, { "epoch": 0.41892042292710074, "grad_norm": 2.340677261352539, "learning_rate": 2.0777189378234143e-06, "loss": 1.695, "step": 941 }, { "epoch": 0.41936560934891487, "grad_norm": 2.2142953872680664, "learning_rate": 2.014494761575314e-06, "loss": 1.9991, "step": 942 }, { "epoch": 0.419810795770729, "grad_norm": 2.083629846572876, "learning_rate": 1.9522376627055583e-06, "loss": 1.3691, "step": 943 }, { "epoch": 0.4202559821925431, "grad_norm": 2.2179574966430664, "learning_rate": 1.8909482556666024e-06, "loss": 1.5776, "step": 944 }, { "epoch": 0.42070116861435725, "grad_norm": 2.716609239578247, "learning_rate": 1.8306271453601199e-06, "loss": 1.6952, "step": 945 }, { "epoch": 0.42114635503617137, "grad_norm": 3.5457558631896973, "learning_rate": 1.771274927131139e-06, "loss": 2.2282, "step": 946 }, { "epoch": 0.42159154145798555, "grad_norm": 2.859788656234741, "learning_rate": 1.712892186762083e-06, "loss": 2.4899, "step": 947 }, { "epoch": 0.4220367278797997, "grad_norm": 2.126269817352295, "learning_rate": 1.6554795004670388e-06, "loss": 1.7597, "step": 948 }, { "epoch": 0.4224819143016138, "grad_norm": 11.28099250793457, "learning_rate": 1.5990374348860305e-06, "loss": 2.0871, "step": 949 }, { "epoch": 0.42292710072342793, "grad_norm": 2.5182433128356934, "learning_rate": 1.543566547079467e-06, "loss": 1.6924, "step": 950 }, { "epoch": 0.42337228714524205, "grad_norm": 2.587977886199951, "learning_rate": 1.4890673845226133e-06, "loss": 2.0768, "step": 951 }, { "epoch": 0.4238174735670562, "grad_norm": 2.2390692234039307, "learning_rate": 1.4355404851001952e-06, "loss": 1.8168, "step": 952 }, { "epoch": 0.42426265998887036, "grad_norm": 2.229036331176758, "learning_rate": 1.3829863771011253e-06, "loss": 1.4062, "step": 953 }, { "epoch": 0.4247078464106845, "grad_norm": 2.124692440032959, "learning_rate": 1.3314055792131964e-06, "loss": 1.9792, "step": 954 }, { "epoch": 0.4251530328324986, "grad_norm": 9.60531997680664, "learning_rate": 1.280798600518085e-06, "loss": 2.1784, "step": 955 }, { "epoch": 0.42559821925431274, "grad_norm": 3.135800838470459, "learning_rate": 1.231165940486234e-06, "loss": 2.4013, "step": 956 }, { "epoch": 0.42604340567612686, "grad_norm": 1.8015056848526, "learning_rate": 1.1825080889719563e-06, "loss": 2.0513, "step": 957 }, { "epoch": 0.426488592097941, "grad_norm": 2.0358049869537354, "learning_rate": 1.134825526208605e-06, "loss": 1.6947, "step": 958 }, { "epoch": 0.42693377851975517, "grad_norm": 2.1654505729675293, "learning_rate": 1.0881187228038215e-06, "loss": 1.5756, "step": 959 }, { "epoch": 0.4273789649415693, "grad_norm": 1.4749082326889038, "learning_rate": 1.0423881397349068e-06, "loss": 1.0004, "step": 960 }, { "epoch": 0.4278241513633834, "grad_norm": 2.168402671813965, "learning_rate": 9.976342283442463e-07, "loss": 1.8201, "step": 961 }, { "epoch": 0.42826933778519755, "grad_norm": 2.350822925567627, "learning_rate": 9.538574303348813e-07, "loss": 1.9771, "step": 962 }, { "epoch": 0.42871452420701167, "grad_norm": 3.4289748668670654, "learning_rate": 9.110581777661331e-07, "loss": 2.1341, "step": 963 }, { "epoch": 0.4291597106288258, "grad_norm": 2.5226845741271973, "learning_rate": 8.692368930493521e-07, "loss": 1.4104, "step": 964 }, { "epoch": 0.42960489705064, "grad_norm": 3.7586185932159424, "learning_rate": 8.283939889437209e-07, "loss": 2.83, "step": 965 }, { "epoch": 0.4300500834724541, "grad_norm": 2.972186803817749, "learning_rate": 7.885298685522235e-07, "loss": 2.0237, "step": 966 }, { "epoch": 0.43049526989426823, "grad_norm": 2.904418468475342, "learning_rate": 7.496449253176274e-07, "loss": 1.6953, "step": 967 }, { "epoch": 0.43094045631608235, "grad_norm": 2.9558122158050537, "learning_rate": 7.117395430186414e-07, "loss": 3.0708, "step": 968 }, { "epoch": 0.4313856427378965, "grad_norm": 2.732140064239502, "learning_rate": 6.748140957660631e-07, "loss": 1.7392, "step": 969 }, { "epoch": 0.4318308291597106, "grad_norm": 2.7709476947784424, "learning_rate": 6.388689479991605e-07, "loss": 1.7855, "step": 970 }, { "epoch": 0.4322760155815248, "grad_norm": 1.9800370931625366, "learning_rate": 6.039044544820404e-07, "loss": 0.8279, "step": 971 }, { "epoch": 0.4327212020033389, "grad_norm": 3.503222703933716, "learning_rate": 5.699209603001076e-07, "loss": 1.2507, "step": 972 }, { "epoch": 0.43316638842515304, "grad_norm": 3.6716744899749756, "learning_rate": 5.369188008567672e-07, "loss": 1.5849, "step": 973 }, { "epoch": 0.43361157484696716, "grad_norm": 3.8361189365386963, "learning_rate": 5.048983018699827e-07, "loss": 2.7786, "step": 974 }, { "epoch": 0.4340567612687813, "grad_norm": 2.1607441902160645, "learning_rate": 4.738597793691679e-07, "loss": 1.6388, "step": 975 }, { "epoch": 0.4345019476905954, "grad_norm": 2.7278549671173096, "learning_rate": 4.438035396920004e-07, "loss": 2.5643, "step": 976 }, { "epoch": 0.4349471341124096, "grad_norm": 2.0202202796936035, "learning_rate": 4.1472987948143473e-07, "loss": 1.6711, "step": 977 }, { "epoch": 0.4353923205342237, "grad_norm": 2.3101208209991455, "learning_rate": 3.866390856827495e-07, "loss": 1.8005, "step": 978 }, { "epoch": 0.43583750695603785, "grad_norm": 1.7602624893188477, "learning_rate": 3.595314355407609e-07, "loss": 1.4123, "step": 979 }, { "epoch": 0.43628269337785197, "grad_norm": 2.8393287658691406, "learning_rate": 3.3340719659701313e-07, "loss": 1.9375, "step": 980 }, { "epoch": 0.4367278797996661, "grad_norm": 2.671792507171631, "learning_rate": 3.0826662668720364e-07, "loss": 2.1422, "step": 981 }, { "epoch": 0.4371730662214802, "grad_norm": 2.798468828201294, "learning_rate": 2.841099739386066e-07, "loss": 1.1786, "step": 982 }, { "epoch": 0.4376182526432944, "grad_norm": 2.4670653343200684, "learning_rate": 2.609374767676309e-07, "loss": 2.0231, "step": 983 }, { "epoch": 0.43806343906510853, "grad_norm": 2.650512218475342, "learning_rate": 2.387493638774774e-07, "loss": 2.1189, "step": 984 }, { "epoch": 0.43850862548692265, "grad_norm": 2.984624147415161, "learning_rate": 2.175458542558517e-07, "loss": 2.3214, "step": 985 }, { "epoch": 0.4389538119087368, "grad_norm": 2.806220769882202, "learning_rate": 1.973271571728441e-07, "loss": 2.8719, "step": 986 }, { "epoch": 0.4393989983305509, "grad_norm": 2.488363742828369, "learning_rate": 1.7809347217881966e-07, "loss": 2.2061, "step": 987 }, { "epoch": 0.43984418475236503, "grad_norm": 4.78094482421875, "learning_rate": 1.598449891024978e-07, "loss": 2.4017, "step": 988 }, { "epoch": 0.4402893711741792, "grad_norm": 1.6316055059432983, "learning_rate": 1.425818880490315e-07, "loss": 1.8267, "step": 989 }, { "epoch": 0.44073455759599334, "grad_norm": 2.0801150798797607, "learning_rate": 1.2630433939825327e-07, "loss": 1.6163, "step": 990 }, { "epoch": 0.44117974401780746, "grad_norm": 1.5451546907424927, "learning_rate": 1.1101250380300965e-07, "loss": 1.3939, "step": 991 }, { "epoch": 0.4416249304396216, "grad_norm": 1.971220850944519, "learning_rate": 9.670653218752934e-08, "loss": 1.7467, "step": 992 }, { "epoch": 0.4420701168614357, "grad_norm": 3.593325138092041, "learning_rate": 8.33865657459909e-08, "loss": 2.2187, "step": 993 }, { "epoch": 0.44251530328324984, "grad_norm": 1.9523143768310547, "learning_rate": 7.105273594107953e-08, "loss": 1.2635, "step": 994 }, { "epoch": 0.442960489705064, "grad_norm": 2.172438144683838, "learning_rate": 5.970516450271025e-08, "loss": 1.3526, "step": 995 }, { "epoch": 0.44340567612687815, "grad_norm": 1.9198912382125854, "learning_rate": 4.934396342684e-08, "loss": 1.1006, "step": 996 }, { "epoch": 0.44385086254869227, "grad_norm": 2.753998041152954, "learning_rate": 3.996923497434635e-08, "loss": 2.1173, "step": 997 }, { "epoch": 0.4442960489705064, "grad_norm": 2.4736664295196533, "learning_rate": 3.1581071670006015e-08, "loss": 2.3068, "step": 998 }, { "epoch": 0.4447412353923205, "grad_norm": 2.1527669429779053, "learning_rate": 2.417955630159563e-08, "loss": 1.4739, "step": 999 }, { "epoch": 0.44518642181413465, "grad_norm": 2.12872052192688, "learning_rate": 1.7764761919103477e-08, "loss": 2.0056, "step": 1000 }, { "epoch": 0.44518642181413465, "eval_loss": 1.9295015335083008, "eval_runtime": 133.475, "eval_samples_per_second": 3.544, "eval_steps_per_second": 3.544, "step": 1000 } ], "logging_steps": 1, "max_steps": 1000, "num_input_tokens_seen": 0, "num_train_epochs": 1, "save_steps": 500, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": true }, "attributes": {} } }, "total_flos": 3.173290868736e+17, "train_batch_size": 1, "trial_name": null, "trial_params": null }