{ "best_metric": null, "best_model_checkpoint": null, "epoch": 1.9998219373219372, "eval_steps": 1404, "global_step": 11232, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.00017806267806267807, "grad_norm": 0.2854898273944855, "learning_rate": 1e-05, "loss": 1.1997, "step": 1 }, { "epoch": 0.00017806267806267807, "eval_loss": 1.3698358535766602, "eval_runtime": 24.1591, "eval_samples_per_second": 43.089, "eval_steps_per_second": 21.565, "step": 1 }, { "epoch": 0.00035612535612535614, "grad_norm": 0.3508087396621704, "learning_rate": 2e-05, "loss": 1.4134, "step": 2 }, { "epoch": 0.0005341880341880342, "grad_norm": 0.27050870656967163, "learning_rate": 3e-05, "loss": 1.3447, "step": 3 }, { "epoch": 0.0007122507122507123, "grad_norm": 0.27706292271614075, "learning_rate": 4e-05, "loss": 1.0354, "step": 4 }, { "epoch": 0.0008903133903133903, "grad_norm": 0.30398961901664734, "learning_rate": 5e-05, "loss": 1.1441, "step": 5 }, { "epoch": 0.0010683760683760685, "grad_norm": 0.3103881776332855, "learning_rate": 6e-05, "loss": 1.341, "step": 6 }, { "epoch": 0.0012464387464387464, "grad_norm": 0.5191189646720886, "learning_rate": 7e-05, "loss": 1.3457, "step": 7 }, { "epoch": 0.0014245014245014246, "grad_norm": 0.4449467360973358, "learning_rate": 8e-05, "loss": 1.5051, "step": 8 }, { "epoch": 0.0016025641025641025, "grad_norm": 0.3914581537246704, "learning_rate": 9e-05, "loss": 1.5525, "step": 9 }, { "epoch": 0.0017806267806267807, "grad_norm": 0.37746086716651917, "learning_rate": 0.0001, "loss": 1.3266, "step": 10 }, { "epoch": 0.001958689458689459, "grad_norm": 0.35226109623908997, "learning_rate": 0.00011000000000000002, "loss": 1.5416, "step": 11 }, { "epoch": 0.002136752136752137, "grad_norm": 0.3343672454357147, "learning_rate": 0.00012, "loss": 1.3221, "step": 12 }, { "epoch": 0.0023148148148148147, "grad_norm": 0.47298333048820496, "learning_rate": 0.00013000000000000002, "loss": 1.2999, "step": 13 }, { "epoch": 0.002492877492877493, "grad_norm": 0.377814918756485, "learning_rate": 0.00014, "loss": 1.1688, "step": 14 }, { "epoch": 0.002670940170940171, "grad_norm": 0.46344801783561707, "learning_rate": 0.00015000000000000001, "loss": 1.3565, "step": 15 }, { "epoch": 0.002849002849002849, "grad_norm": 0.49615249037742615, "learning_rate": 0.00016, "loss": 1.5692, "step": 16 }, { "epoch": 0.003027065527065527, "grad_norm": 0.5109946131706238, "learning_rate": 0.00017, "loss": 1.2991, "step": 17 }, { "epoch": 0.003205128205128205, "grad_norm": 0.5125070214271545, "learning_rate": 0.00018, "loss": 1.3309, "step": 18 }, { "epoch": 0.003383190883190883, "grad_norm": 0.4517767131328583, "learning_rate": 0.00019, "loss": 1.357, "step": 19 }, { "epoch": 0.0035612535612535613, "grad_norm": 0.47267794609069824, "learning_rate": 0.0002, "loss": 1.1301, "step": 20 }, { "epoch": 0.0037393162393162395, "grad_norm": 0.46823424100875854, "learning_rate": 0.00019999999902035388, "loss": 1.1195, "step": 21 }, { "epoch": 0.003917378917378918, "grad_norm": 0.440036803483963, "learning_rate": 0.00019999999608141548, "loss": 1.2822, "step": 22 }, { "epoch": 0.004095441595441595, "grad_norm": 0.371101975440979, "learning_rate": 0.00019999999118318492, "loss": 1.132, "step": 23 }, { "epoch": 0.004273504273504274, "grad_norm": 0.44691094756126404, "learning_rate": 0.00019999998432566226, "loss": 1.2968, "step": 24 }, { "epoch": 0.004451566951566952, "grad_norm": 0.5462725162506104, "learning_rate": 0.0001999999755088476, "loss": 1.1714, "step": 25 }, { "epoch": 0.004629629629629629, "grad_norm": 0.39860013127326965, "learning_rate": 0.0001999999647327412, "loss": 1.0407, "step": 26 }, { "epoch": 0.004807692307692308, "grad_norm": 0.5031934380531311, "learning_rate": 0.0001999999519973432, "loss": 1.2773, "step": 27 }, { "epoch": 0.004985754985754986, "grad_norm": 0.42162764072418213, "learning_rate": 0.0001999999373026539, "loss": 1.2824, "step": 28 }, { "epoch": 0.005163817663817663, "grad_norm": 0.40964868664741516, "learning_rate": 0.00019999992064867353, "loss": 1.226, "step": 29 }, { "epoch": 0.005341880341880342, "grad_norm": 0.41650915145874023, "learning_rate": 0.00019999990203540245, "loss": 1.2677, "step": 30 }, { "epoch": 0.00551994301994302, "grad_norm": 0.40052226185798645, "learning_rate": 0.00019999988146284103, "loss": 0.9443, "step": 31 }, { "epoch": 0.005698005698005698, "grad_norm": 0.5198387503623962, "learning_rate": 0.00019999985893098964, "loss": 1.3043, "step": 32 }, { "epoch": 0.005876068376068376, "grad_norm": 0.50941002368927, "learning_rate": 0.00019999983443984878, "loss": 1.2002, "step": 33 }, { "epoch": 0.006054131054131054, "grad_norm": 0.30082932114601135, "learning_rate": 0.00019999980798941888, "loss": 0.9904, "step": 34 }, { "epoch": 0.006232193732193732, "grad_norm": 0.4228935241699219, "learning_rate": 0.00019999977957970048, "loss": 1.1137, "step": 35 }, { "epoch": 0.00641025641025641, "grad_norm": 0.41294750571250916, "learning_rate": 0.0001999997492106941, "loss": 1.3385, "step": 36 }, { "epoch": 0.006588319088319089, "grad_norm": 0.4415493905544281, "learning_rate": 0.00019999971688240041, "loss": 1.1695, "step": 37 }, { "epoch": 0.006766381766381766, "grad_norm": 0.3726460933685303, "learning_rate": 0.00019999968259482, "loss": 1.1734, "step": 38 }, { "epoch": 0.006944444444444444, "grad_norm": 0.3969627320766449, "learning_rate": 0.0001999996463479535, "loss": 1.1209, "step": 39 }, { "epoch": 0.007122507122507123, "grad_norm": 0.3779667913913727, "learning_rate": 0.0001999996081418017, "loss": 1.1635, "step": 40 }, { "epoch": 0.0073005698005698, "grad_norm": 0.3933636546134949, "learning_rate": 0.0001999995679763653, "loss": 1.1514, "step": 41 }, { "epoch": 0.007478632478632479, "grad_norm": 0.3567957282066345, "learning_rate": 0.00019999952585164507, "loss": 1.2488, "step": 42 }, { "epoch": 0.007656695156695157, "grad_norm": 0.32506081461906433, "learning_rate": 0.00019999948176764186, "loss": 1.149, "step": 43 }, { "epoch": 0.007834757834757835, "grad_norm": 0.46588361263275146, "learning_rate": 0.0001999994357243566, "loss": 1.4263, "step": 44 }, { "epoch": 0.008012820512820512, "grad_norm": 0.5070307850837708, "learning_rate": 0.00019999938772179005, "loss": 1.0698, "step": 45 }, { "epoch": 0.00819088319088319, "grad_norm": 0.38199326395988464, "learning_rate": 0.00019999933775994327, "loss": 0.9907, "step": 46 }, { "epoch": 0.00836894586894587, "grad_norm": 0.43684661388397217, "learning_rate": 0.0001999992858388172, "loss": 1.2905, "step": 47 }, { "epoch": 0.008547008547008548, "grad_norm": 0.44482162594795227, "learning_rate": 0.00019999923195841284, "loss": 1.2153, "step": 48 }, { "epoch": 0.008725071225071225, "grad_norm": 0.4259667694568634, "learning_rate": 0.0001999991761187313, "loss": 1.1582, "step": 49 }, { "epoch": 0.008903133903133903, "grad_norm": 0.41649091243743896, "learning_rate": 0.00019999911831977357, "loss": 1.0185, "step": 50 }, { "epoch": 0.009081196581196582, "grad_norm": 0.4179716110229492, "learning_rate": 0.0001999990585615409, "loss": 1.3579, "step": 51 }, { "epoch": 0.009259259259259259, "grad_norm": 0.3372558355331421, "learning_rate": 0.00019999899684403438, "loss": 1.0638, "step": 52 }, { "epoch": 0.009437321937321937, "grad_norm": 0.41294020414352417, "learning_rate": 0.00019999893316725525, "loss": 1.1932, "step": 53 }, { "epoch": 0.009615384615384616, "grad_norm": 0.4407919645309448, "learning_rate": 0.00019999886753120473, "loss": 1.4129, "step": 54 }, { "epoch": 0.009793447293447293, "grad_norm": 0.47948843240737915, "learning_rate": 0.00019999879993588414, "loss": 1.2424, "step": 55 }, { "epoch": 0.009971509971509971, "grad_norm": 0.3535355031490326, "learning_rate": 0.00019999873038129484, "loss": 1.0145, "step": 56 }, { "epoch": 0.01014957264957265, "grad_norm": 0.5067078471183777, "learning_rate": 0.00019999865886743813, "loss": 1.4708, "step": 57 }, { "epoch": 0.010327635327635327, "grad_norm": 0.42862898111343384, "learning_rate": 0.0001999985853943154, "loss": 1.0399, "step": 58 }, { "epoch": 0.010505698005698005, "grad_norm": 0.4769059419631958, "learning_rate": 0.00019999850996192816, "loss": 1.1258, "step": 59 }, { "epoch": 0.010683760683760684, "grad_norm": 0.4065442383289337, "learning_rate": 0.0001999984325702778, "loss": 1.2077, "step": 60 }, { "epoch": 0.010861823361823363, "grad_norm": 0.5318329930305481, "learning_rate": 0.0001999983532193659, "loss": 1.2298, "step": 61 }, { "epoch": 0.01103988603988604, "grad_norm": 0.4777173101902008, "learning_rate": 0.000199998271909194, "loss": 1.3195, "step": 62 }, { "epoch": 0.011217948717948718, "grad_norm": 0.37553808093070984, "learning_rate": 0.0001999981886397637, "loss": 1.1188, "step": 63 }, { "epoch": 0.011396011396011397, "grad_norm": 0.3920556902885437, "learning_rate": 0.0001999981034110766, "loss": 1.1448, "step": 64 }, { "epoch": 0.011574074074074073, "grad_norm": 0.454272598028183, "learning_rate": 0.0001999980162231344, "loss": 1.0812, "step": 65 }, { "epoch": 0.011752136752136752, "grad_norm": 0.4354456663131714, "learning_rate": 0.00019999792707593882, "loss": 1.1174, "step": 66 }, { "epoch": 0.01193019943019943, "grad_norm": 0.5030252933502197, "learning_rate": 0.00019999783596949156, "loss": 1.2925, "step": 67 }, { "epoch": 0.012108262108262107, "grad_norm": 0.5141571164131165, "learning_rate": 0.00019999774290379446, "loss": 1.6193, "step": 68 }, { "epoch": 0.012286324786324786, "grad_norm": 0.417298287153244, "learning_rate": 0.0001999976478788493, "loss": 1.1875, "step": 69 }, { "epoch": 0.012464387464387465, "grad_norm": 0.4642415940761566, "learning_rate": 0.00019999755089465795, "loss": 1.4138, "step": 70 }, { "epoch": 0.012642450142450143, "grad_norm": 0.43184754252433777, "learning_rate": 0.0001999974519512223, "loss": 1.0697, "step": 71 }, { "epoch": 0.01282051282051282, "grad_norm": 0.46698349714279175, "learning_rate": 0.00019999735104854436, "loss": 0.709, "step": 72 }, { "epoch": 0.012998575498575499, "grad_norm": 0.37253814935684204, "learning_rate": 0.000199997248186626, "loss": 1.2084, "step": 73 }, { "epoch": 0.013176638176638177, "grad_norm": 0.3851388692855835, "learning_rate": 0.0001999971433654693, "loss": 1.0548, "step": 74 }, { "epoch": 0.013354700854700854, "grad_norm": 0.4434688985347748, "learning_rate": 0.00019999703658507635, "loss": 1.4084, "step": 75 }, { "epoch": 0.013532763532763533, "grad_norm": 0.43164482712745667, "learning_rate": 0.00019999692784544913, "loss": 1.4872, "step": 76 }, { "epoch": 0.013710826210826211, "grad_norm": 0.4224303364753723, "learning_rate": 0.00019999681714658984, "loss": 1.2221, "step": 77 }, { "epoch": 0.013888888888888888, "grad_norm": 0.35588955879211426, "learning_rate": 0.00019999670448850069, "loss": 0.84, "step": 78 }, { "epoch": 0.014066951566951567, "grad_norm": 0.3970590829849243, "learning_rate": 0.0001999965898711838, "loss": 1.1886, "step": 79 }, { "epoch": 0.014245014245014245, "grad_norm": 0.4331924319267273, "learning_rate": 0.00019999647329464146, "loss": 1.179, "step": 80 }, { "epoch": 0.014423076923076924, "grad_norm": 0.4226946234703064, "learning_rate": 0.00019999635475887598, "loss": 1.1496, "step": 81 }, { "epoch": 0.0146011396011396, "grad_norm": 0.381592720746994, "learning_rate": 0.00019999623426388962, "loss": 1.1774, "step": 82 }, { "epoch": 0.01477920227920228, "grad_norm": 0.4190855622291565, "learning_rate": 0.00019999611180968478, "loss": 1.1491, "step": 83 }, { "epoch": 0.014957264957264958, "grad_norm": 0.3904292583465576, "learning_rate": 0.00019999598739626389, "loss": 1.1275, "step": 84 }, { "epoch": 0.015135327635327635, "grad_norm": 0.4515478014945984, "learning_rate": 0.0001999958610236293, "loss": 1.2404, "step": 85 }, { "epoch": 0.015313390313390313, "grad_norm": 0.48341724276542664, "learning_rate": 0.00019999573269178359, "loss": 1.3572, "step": 86 }, { "epoch": 0.015491452991452992, "grad_norm": 0.42150333523750305, "learning_rate": 0.00019999560240072914, "loss": 1.0203, "step": 87 }, { "epoch": 0.01566951566951567, "grad_norm": 0.45445525646209717, "learning_rate": 0.00019999547015046867, "loss": 1.0677, "step": 88 }, { "epoch": 0.01584757834757835, "grad_norm": 0.3581015467643738, "learning_rate": 0.00019999533594100463, "loss": 1.0693, "step": 89 }, { "epoch": 0.016025641025641024, "grad_norm": 0.4430878758430481, "learning_rate": 0.00019999519977233971, "loss": 1.1591, "step": 90 }, { "epoch": 0.016203703703703703, "grad_norm": 0.3940352201461792, "learning_rate": 0.0001999950616444766, "loss": 1.1325, "step": 91 }, { "epoch": 0.01638176638176638, "grad_norm": 0.4521673321723938, "learning_rate": 0.00019999492155741794, "loss": 1.3288, "step": 92 }, { "epoch": 0.01655982905982906, "grad_norm": 0.3988296687602997, "learning_rate": 0.00019999477951116658, "loss": 1.0023, "step": 93 }, { "epoch": 0.01673789173789174, "grad_norm": 0.38709723949432373, "learning_rate": 0.00019999463550572516, "loss": 1.2623, "step": 94 }, { "epoch": 0.016915954415954417, "grad_norm": 0.35376182198524475, "learning_rate": 0.00019999448954109662, "loss": 1.0643, "step": 95 }, { "epoch": 0.017094017094017096, "grad_norm": 0.49547120928764343, "learning_rate": 0.00019999434161728377, "loss": 1.2121, "step": 96 }, { "epoch": 0.01727207977207977, "grad_norm": 0.49593672156333923, "learning_rate": 0.00019999419173428952, "loss": 1.1635, "step": 97 }, { "epoch": 0.01745014245014245, "grad_norm": 0.4146541953086853, "learning_rate": 0.0001999940398921168, "loss": 1.1452, "step": 98 }, { "epoch": 0.017628205128205128, "grad_norm": 0.5177254676818848, "learning_rate": 0.00019999388609076858, "loss": 1.2178, "step": 99 }, { "epoch": 0.017806267806267807, "grad_norm": 0.4012768864631653, "learning_rate": 0.0001999937303302479, "loss": 0.9222, "step": 100 }, { "epoch": 0.017984330484330485, "grad_norm": 0.4597131907939911, "learning_rate": 0.00019999357261055777, "loss": 0.979, "step": 101 }, { "epoch": 0.018162393162393164, "grad_norm": 0.6190966963768005, "learning_rate": 0.00019999341293170132, "loss": 1.3909, "step": 102 }, { "epoch": 0.01834045584045584, "grad_norm": 0.4576462209224701, "learning_rate": 0.00019999325129368164, "loss": 1.073, "step": 103 }, { "epoch": 0.018518518518518517, "grad_norm": 0.4036749005317688, "learning_rate": 0.00019999308769650192, "loss": 1.1354, "step": 104 }, { "epoch": 0.018696581196581196, "grad_norm": 0.4722452759742737, "learning_rate": 0.00019999292214016538, "loss": 1.2039, "step": 105 }, { "epoch": 0.018874643874643875, "grad_norm": 0.5338274240493774, "learning_rate": 0.00019999275462467527, "loss": 1.225, "step": 106 }, { "epoch": 0.019052706552706553, "grad_norm": 0.4301491677761078, "learning_rate": 0.00019999258515003484, "loss": 1.0601, "step": 107 }, { "epoch": 0.019230769230769232, "grad_norm": 0.33271175622940063, "learning_rate": 0.0001999924137162474, "loss": 0.8441, "step": 108 }, { "epoch": 0.01940883190883191, "grad_norm": 0.4648784399032593, "learning_rate": 0.0001999922403233163, "loss": 1.2038, "step": 109 }, { "epoch": 0.019586894586894586, "grad_norm": 0.37915176153182983, "learning_rate": 0.00019999206497124504, "loss": 1.0923, "step": 110 }, { "epoch": 0.019764957264957264, "grad_norm": 0.3865506052970886, "learning_rate": 0.00019999188766003695, "loss": 0.9535, "step": 111 }, { "epoch": 0.019943019943019943, "grad_norm": 0.35739636421203613, "learning_rate": 0.0001999917083896955, "loss": 1.2688, "step": 112 }, { "epoch": 0.02012108262108262, "grad_norm": 0.3943796157836914, "learning_rate": 0.0001999915271602243, "loss": 1.1097, "step": 113 }, { "epoch": 0.0202991452991453, "grad_norm": 0.44758161902427673, "learning_rate": 0.0001999913439716268, "loss": 1.2698, "step": 114 }, { "epoch": 0.02047720797720798, "grad_norm": 0.3749747574329376, "learning_rate": 0.00019999115882390664, "loss": 1.1091, "step": 115 }, { "epoch": 0.020655270655270654, "grad_norm": 0.3479487895965576, "learning_rate": 0.00019999097171706745, "loss": 1.0049, "step": 116 }, { "epoch": 0.020833333333333332, "grad_norm": 0.4491243064403534, "learning_rate": 0.00019999078265111285, "loss": 1.1857, "step": 117 }, { "epoch": 0.02101139601139601, "grad_norm": 0.345289021730423, "learning_rate": 0.00019999059162604662, "loss": 1.1397, "step": 118 }, { "epoch": 0.02118945868945869, "grad_norm": 0.5467649698257446, "learning_rate": 0.00019999039864187243, "loss": 1.2196, "step": 119 }, { "epoch": 0.021367521367521368, "grad_norm": 0.36446481943130493, "learning_rate": 0.00019999020369859409, "loss": 0.796, "step": 120 }, { "epoch": 0.021545584045584047, "grad_norm": 0.4225841760635376, "learning_rate": 0.00019999000679621543, "loss": 0.9684, "step": 121 }, { "epoch": 0.021723646723646725, "grad_norm": 0.4205594062805176, "learning_rate": 0.0001999898079347403, "loss": 1.2762, "step": 122 }, { "epoch": 0.0219017094017094, "grad_norm": 0.43773892521858215, "learning_rate": 0.00019998960711417257, "loss": 1.117, "step": 123 }, { "epoch": 0.02207977207977208, "grad_norm": 0.41279685497283936, "learning_rate": 0.00019998940433451623, "loss": 1.1502, "step": 124 }, { "epoch": 0.022257834757834757, "grad_norm": 0.4090803563594818, "learning_rate": 0.0001999891995957752, "loss": 1.2591, "step": 125 }, { "epoch": 0.022435897435897436, "grad_norm": 0.6000410914421082, "learning_rate": 0.0001999889928979535, "loss": 1.4321, "step": 126 }, { "epoch": 0.022613960113960115, "grad_norm": 0.524264395236969, "learning_rate": 0.00019998878424105524, "loss": 1.1849, "step": 127 }, { "epoch": 0.022792022792022793, "grad_norm": 0.4581047296524048, "learning_rate": 0.00019998857362508443, "loss": 1.0598, "step": 128 }, { "epoch": 0.022970085470085472, "grad_norm": 0.42663446068763733, "learning_rate": 0.00019998836105004526, "loss": 1.1909, "step": 129 }, { "epoch": 0.023148148148148147, "grad_norm": 0.45709118247032166, "learning_rate": 0.00019998814651594183, "loss": 1.2104, "step": 130 }, { "epoch": 0.023326210826210825, "grad_norm": 0.39528369903564453, "learning_rate": 0.0001999879300227784, "loss": 1.3073, "step": 131 }, { "epoch": 0.023504273504273504, "grad_norm": 0.46896448731422424, "learning_rate": 0.00019998771157055914, "loss": 1.3202, "step": 132 }, { "epoch": 0.023682336182336183, "grad_norm": 0.4386129677295685, "learning_rate": 0.00019998749115928842, "loss": 1.2196, "step": 133 }, { "epoch": 0.02386039886039886, "grad_norm": 0.45920488238334656, "learning_rate": 0.00019998726878897051, "loss": 1.3668, "step": 134 }, { "epoch": 0.02403846153846154, "grad_norm": 0.4115797281265259, "learning_rate": 0.0001999870444596098, "loss": 1.1052, "step": 135 }, { "epoch": 0.024216524216524215, "grad_norm": 0.3860839903354645, "learning_rate": 0.0001999868181712106, "loss": 1.0344, "step": 136 }, { "epoch": 0.024394586894586893, "grad_norm": 0.42514732480049133, "learning_rate": 0.00019998658992377742, "loss": 1.1979, "step": 137 }, { "epoch": 0.024572649572649572, "grad_norm": 0.36001840233802795, "learning_rate": 0.00019998635971731475, "loss": 1.4536, "step": 138 }, { "epoch": 0.02475071225071225, "grad_norm": 0.3739112317562103, "learning_rate": 0.00019998612755182707, "loss": 1.0097, "step": 139 }, { "epoch": 0.02492877492877493, "grad_norm": 0.37545472383499146, "learning_rate": 0.00019998589342731888, "loss": 0.829, "step": 140 }, { "epoch": 0.025106837606837608, "grad_norm": 0.38660728931427, "learning_rate": 0.0001999856573437948, "loss": 1.1324, "step": 141 }, { "epoch": 0.025284900284900286, "grad_norm": 0.3741356432437897, "learning_rate": 0.00019998541930125953, "loss": 1.0934, "step": 142 }, { "epoch": 0.02546296296296296, "grad_norm": 0.41900336742401123, "learning_rate": 0.00019998517929971764, "loss": 1.0336, "step": 143 }, { "epoch": 0.02564102564102564, "grad_norm": 0.4167572259902954, "learning_rate": 0.00019998493733917384, "loss": 1.2571, "step": 144 }, { "epoch": 0.02581908831908832, "grad_norm": 0.39437636733055115, "learning_rate": 0.0001999846934196329, "loss": 1.2283, "step": 145 }, { "epoch": 0.025997150997150997, "grad_norm": 0.39129480719566345, "learning_rate": 0.00019998444754109964, "loss": 0.9893, "step": 146 }, { "epoch": 0.026175213675213676, "grad_norm": 0.45533549785614014, "learning_rate": 0.0001999841997035788, "loss": 1.0793, "step": 147 }, { "epoch": 0.026353276353276354, "grad_norm": 0.3741768002510071, "learning_rate": 0.00019998394990707524, "loss": 1.2179, "step": 148 }, { "epoch": 0.026531339031339033, "grad_norm": 0.4066533148288727, "learning_rate": 0.0001999836981515939, "loss": 1.1443, "step": 149 }, { "epoch": 0.026709401709401708, "grad_norm": 0.4851688742637634, "learning_rate": 0.0001999834444371397, "loss": 1.1668, "step": 150 }, { "epoch": 0.026887464387464387, "grad_norm": 0.428091436624527, "learning_rate": 0.0001999831887637176, "loss": 1.2676, "step": 151 }, { "epoch": 0.027065527065527065, "grad_norm": 0.4024655222892761, "learning_rate": 0.0001999829311313326, "loss": 1.3115, "step": 152 }, { "epoch": 0.027243589743589744, "grad_norm": 0.43983033299446106, "learning_rate": 0.00019998267153998976, "loss": 1.1019, "step": 153 }, { "epoch": 0.027421652421652423, "grad_norm": 0.4317505359649658, "learning_rate": 0.0001999824099896942, "loss": 1.3129, "step": 154 }, { "epoch": 0.0275997150997151, "grad_norm": 0.43107882142066956, "learning_rate": 0.000199982146480451, "loss": 1.2134, "step": 155 }, { "epoch": 0.027777777777777776, "grad_norm": 0.3939448297023773, "learning_rate": 0.00019998188101226532, "loss": 1.0321, "step": 156 }, { "epoch": 0.027955840455840455, "grad_norm": 0.4641847610473633, "learning_rate": 0.00019998161358514237, "loss": 1.2369, "step": 157 }, { "epoch": 0.028133903133903133, "grad_norm": 0.3538529872894287, "learning_rate": 0.0001999813441990874, "loss": 1.2061, "step": 158 }, { "epoch": 0.028311965811965812, "grad_norm": 0.3277950584888458, "learning_rate": 0.0001999810728541057, "loss": 0.9419, "step": 159 }, { "epoch": 0.02849002849002849, "grad_norm": 0.424710750579834, "learning_rate": 0.00019998079955020254, "loss": 1.3302, "step": 160 }, { "epoch": 0.02866809116809117, "grad_norm": 0.4120834469795227, "learning_rate": 0.00019998052428738333, "loss": 1.079, "step": 161 }, { "epoch": 0.028846153846153848, "grad_norm": 0.45811930298805237, "learning_rate": 0.00019998024706565346, "loss": 1.1259, "step": 162 }, { "epoch": 0.029024216524216523, "grad_norm": 0.3873266875743866, "learning_rate": 0.0001999799678850183, "loss": 1.2124, "step": 163 }, { "epoch": 0.0292022792022792, "grad_norm": 0.5806412696838379, "learning_rate": 0.00019997968674548337, "loss": 1.3467, "step": 164 }, { "epoch": 0.02938034188034188, "grad_norm": 0.3906802833080292, "learning_rate": 0.00019997940364705418, "loss": 1.1438, "step": 165 }, { "epoch": 0.02955840455840456, "grad_norm": 0.45201995968818665, "learning_rate": 0.00019997911858973626, "loss": 1.1469, "step": 166 }, { "epoch": 0.029736467236467237, "grad_norm": 0.4965892732143402, "learning_rate": 0.0001999788315735352, "loss": 1.0829, "step": 167 }, { "epoch": 0.029914529914529916, "grad_norm": 0.32578057050704956, "learning_rate": 0.0001999785425984566, "loss": 1.0432, "step": 168 }, { "epoch": 0.03009259259259259, "grad_norm": 0.4146028161048889, "learning_rate": 0.00019997825166450617, "loss": 1.1657, "step": 169 }, { "epoch": 0.03027065527065527, "grad_norm": 0.4342964291572571, "learning_rate": 0.0001999779587716896, "loss": 1.2038, "step": 170 }, { "epoch": 0.030448717948717948, "grad_norm": 0.40128546953201294, "learning_rate": 0.00019997766392001258, "loss": 1.3044, "step": 171 }, { "epoch": 0.030626780626780627, "grad_norm": 0.4357539117336273, "learning_rate": 0.00019997736710948094, "loss": 1.2143, "step": 172 }, { "epoch": 0.030804843304843305, "grad_norm": 0.4821035861968994, "learning_rate": 0.00019997706834010045, "loss": 1.0469, "step": 173 }, { "epoch": 0.030982905982905984, "grad_norm": 0.3966675102710724, "learning_rate": 0.000199976767611877, "loss": 1.2122, "step": 174 }, { "epoch": 0.031160968660968662, "grad_norm": 0.4265064299106598, "learning_rate": 0.00019997646492481648, "loss": 1.0871, "step": 175 }, { "epoch": 0.03133903133903134, "grad_norm": 0.3445652723312378, "learning_rate": 0.00019997616027892485, "loss": 1.0412, "step": 176 }, { "epoch": 0.031517094017094016, "grad_norm": 0.47187718749046326, "learning_rate": 0.000199975853674208, "loss": 1.0822, "step": 177 }, { "epoch": 0.0316951566951567, "grad_norm": 0.37751707434654236, "learning_rate": 0.000199975545110672, "loss": 1.1439, "step": 178 }, { "epoch": 0.03187321937321937, "grad_norm": 0.38792455196380615, "learning_rate": 0.00019997523458832286, "loss": 0.8604, "step": 179 }, { "epoch": 0.03205128205128205, "grad_norm": 0.35199594497680664, "learning_rate": 0.00019997492210716667, "loss": 1.0819, "step": 180 }, { "epoch": 0.03222934472934473, "grad_norm": 0.4828922748565674, "learning_rate": 0.00019997460766720958, "loss": 1.1879, "step": 181 }, { "epoch": 0.032407407407407406, "grad_norm": 0.46153363585472107, "learning_rate": 0.00019997429126845774, "loss": 1.1592, "step": 182 }, { "epoch": 0.03258547008547009, "grad_norm": 0.4844890832901001, "learning_rate": 0.0001999739729109173, "loss": 1.1334, "step": 183 }, { "epoch": 0.03276353276353276, "grad_norm": 0.414617121219635, "learning_rate": 0.00019997365259459457, "loss": 1.0547, "step": 184 }, { "epoch": 0.032941595441595445, "grad_norm": 0.46544626355171204, "learning_rate": 0.00019997333031949581, "loss": 1.4067, "step": 185 }, { "epoch": 0.03311965811965812, "grad_norm": 0.48489415645599365, "learning_rate": 0.0001999730060856273, "loss": 1.4027, "step": 186 }, { "epoch": 0.033297720797720795, "grad_norm": 0.3963346481323242, "learning_rate": 0.0001999726798929954, "loss": 1.1327, "step": 187 }, { "epoch": 0.03347578347578348, "grad_norm": 0.3809385895729065, "learning_rate": 0.00019997235174160652, "loss": 1.3475, "step": 188 }, { "epoch": 0.03365384615384615, "grad_norm": 0.3866960406303406, "learning_rate": 0.0001999720216314671, "loss": 1.1576, "step": 189 }, { "epoch": 0.033831908831908834, "grad_norm": 0.34976935386657715, "learning_rate": 0.00019997168956258356, "loss": 0.9361, "step": 190 }, { "epoch": 0.03400997150997151, "grad_norm": 0.38681939244270325, "learning_rate": 0.00019997135553496243, "loss": 1.1796, "step": 191 }, { "epoch": 0.03418803418803419, "grad_norm": 0.41905197501182556, "learning_rate": 0.0001999710195486103, "loss": 1.1714, "step": 192 }, { "epoch": 0.03436609686609687, "grad_norm": 0.42356589436531067, "learning_rate": 0.0001999706816035337, "loss": 1.0022, "step": 193 }, { "epoch": 0.03454415954415954, "grad_norm": 0.3929740786552429, "learning_rate": 0.00019997034169973925, "loss": 1.3769, "step": 194 }, { "epoch": 0.034722222222222224, "grad_norm": 0.4325186312198639, "learning_rate": 0.00019996999983723366, "loss": 1.3057, "step": 195 }, { "epoch": 0.0349002849002849, "grad_norm": 0.3954029381275177, "learning_rate": 0.00019996965601602355, "loss": 1.1958, "step": 196 }, { "epoch": 0.03507834757834758, "grad_norm": 0.34454262256622314, "learning_rate": 0.00019996931023611572, "loss": 1.0972, "step": 197 }, { "epoch": 0.035256410256410256, "grad_norm": 0.48900291323661804, "learning_rate": 0.0001999689624975169, "loss": 1.213, "step": 198 }, { "epoch": 0.03543447293447293, "grad_norm": 0.35214388370513916, "learning_rate": 0.00019996861280023397, "loss": 1.0285, "step": 199 }, { "epoch": 0.03561253561253561, "grad_norm": 0.49393126368522644, "learning_rate": 0.00019996826114427373, "loss": 1.2313, "step": 200 }, { "epoch": 0.03579059829059829, "grad_norm": 0.3994458019733429, "learning_rate": 0.00019996790752964305, "loss": 1.0474, "step": 201 }, { "epoch": 0.03596866096866097, "grad_norm": 0.5387318730354309, "learning_rate": 0.0001999675519563489, "loss": 1.3067, "step": 202 }, { "epoch": 0.036146723646723646, "grad_norm": 0.4976751208305359, "learning_rate": 0.00019996719442439824, "loss": 1.2593, "step": 203 }, { "epoch": 0.03632478632478633, "grad_norm": 0.47052907943725586, "learning_rate": 0.0001999668349337981, "loss": 1.1036, "step": 204 }, { "epoch": 0.036502849002849, "grad_norm": 0.39616644382476807, "learning_rate": 0.00019996647348455543, "loss": 1.0481, "step": 205 }, { "epoch": 0.03668091168091168, "grad_norm": 0.42987677454948425, "learning_rate": 0.00019996611007667742, "loss": 1.0923, "step": 206 }, { "epoch": 0.03685897435897436, "grad_norm": 0.47065848112106323, "learning_rate": 0.00019996574471017113, "loss": 1.1403, "step": 207 }, { "epoch": 0.037037037037037035, "grad_norm": 0.4363015592098236, "learning_rate": 0.00019996537738504373, "loss": 1.253, "step": 208 }, { "epoch": 0.03721509971509972, "grad_norm": 0.4038296937942505, "learning_rate": 0.00019996500810130243, "loss": 1.1679, "step": 209 }, { "epoch": 0.03739316239316239, "grad_norm": 0.5038532018661499, "learning_rate": 0.00019996463685895445, "loss": 1.1182, "step": 210 }, { "epoch": 0.037571225071225074, "grad_norm": 0.37740692496299744, "learning_rate": 0.00019996426365800706, "loss": 1.0465, "step": 211 }, { "epoch": 0.03774928774928775, "grad_norm": 0.47794604301452637, "learning_rate": 0.00019996388849846759, "loss": 1.2836, "step": 212 }, { "epoch": 0.037927350427350424, "grad_norm": 0.38460609316825867, "learning_rate": 0.0001999635113803434, "loss": 1.2099, "step": 213 }, { "epoch": 0.038105413105413107, "grad_norm": 0.42016157507896423, "learning_rate": 0.0001999631323036418, "loss": 1.152, "step": 214 }, { "epoch": 0.03828347578347578, "grad_norm": 0.4024946391582489, "learning_rate": 0.00019996275126837033, "loss": 1.1534, "step": 215 }, { "epoch": 0.038461538461538464, "grad_norm": 0.4573793411254883, "learning_rate": 0.00019996236827453642, "loss": 1.2019, "step": 216 }, { "epoch": 0.03863960113960114, "grad_norm": 0.3642503321170807, "learning_rate": 0.0001999619833221475, "loss": 1.0541, "step": 217 }, { "epoch": 0.03881766381766382, "grad_norm": 0.38492897152900696, "learning_rate": 0.0001999615964112112, "loss": 1.1269, "step": 218 }, { "epoch": 0.038995726495726496, "grad_norm": 0.427219420671463, "learning_rate": 0.0001999612075417351, "loss": 1.1126, "step": 219 }, { "epoch": 0.03917378917378917, "grad_norm": 0.40781742334365845, "learning_rate": 0.00019996081671372676, "loss": 1.2207, "step": 220 }, { "epoch": 0.03935185185185185, "grad_norm": 0.39229512214660645, "learning_rate": 0.00019996042392719386, "loss": 1.0403, "step": 221 }, { "epoch": 0.03952991452991453, "grad_norm": 0.42038577795028687, "learning_rate": 0.0001999600291821441, "loss": 1.2157, "step": 222 }, { "epoch": 0.03970797720797721, "grad_norm": 0.3963491916656494, "learning_rate": 0.00019995963247858525, "loss": 1.0532, "step": 223 }, { "epoch": 0.039886039886039885, "grad_norm": 0.4389874041080475, "learning_rate": 0.00019995923381652502, "loss": 1.4279, "step": 224 }, { "epoch": 0.04006410256410257, "grad_norm": 0.357312947511673, "learning_rate": 0.00019995883319597123, "loss": 0.9871, "step": 225 }, { "epoch": 0.04024216524216524, "grad_norm": 0.3644427955150604, "learning_rate": 0.00019995843061693181, "loss": 1.0879, "step": 226 }, { "epoch": 0.04042022792022792, "grad_norm": 0.4074651002883911, "learning_rate": 0.00019995802607941453, "loss": 1.2138, "step": 227 }, { "epoch": 0.0405982905982906, "grad_norm": 0.40709465742111206, "learning_rate": 0.0001999576195834274, "loss": 1.1905, "step": 228 }, { "epoch": 0.040776353276353275, "grad_norm": 0.4280182719230652, "learning_rate": 0.00019995721112897838, "loss": 1.2331, "step": 229 }, { "epoch": 0.04095441595441596, "grad_norm": 0.37846076488494873, "learning_rate": 0.00019995680071607544, "loss": 1.078, "step": 230 }, { "epoch": 0.04113247863247863, "grad_norm": 0.3877260088920593, "learning_rate": 0.0001999563883447266, "loss": 1.0309, "step": 231 }, { "epoch": 0.04131054131054131, "grad_norm": 0.42886826395988464, "learning_rate": 0.00019995597401494, "loss": 1.0403, "step": 232 }, { "epoch": 0.04148860398860399, "grad_norm": 0.4316534101963043, "learning_rate": 0.00019995555772672372, "loss": 1.2418, "step": 233 }, { "epoch": 0.041666666666666664, "grad_norm": 0.45768865942955017, "learning_rate": 0.00019995513948008593, "loss": 1.233, "step": 234 }, { "epoch": 0.041844729344729346, "grad_norm": 0.5647913813591003, "learning_rate": 0.00019995471927503481, "loss": 1.1346, "step": 235 }, { "epoch": 0.04202279202279202, "grad_norm": 0.3797492980957031, "learning_rate": 0.00019995429711157863, "loss": 1.1574, "step": 236 }, { "epoch": 0.042200854700854704, "grad_norm": 0.4392767548561096, "learning_rate": 0.00019995387298972562, "loss": 0.8988, "step": 237 }, { "epoch": 0.04237891737891738, "grad_norm": 0.37331557273864746, "learning_rate": 0.0001999534469094841, "loss": 1.0439, "step": 238 }, { "epoch": 0.042556980056980054, "grad_norm": 0.3785935938358307, "learning_rate": 0.00019995301887086245, "loss": 0.9839, "step": 239 }, { "epoch": 0.042735042735042736, "grad_norm": 0.4351862668991089, "learning_rate": 0.00019995258887386898, "loss": 1.2653, "step": 240 }, { "epoch": 0.04291310541310541, "grad_norm": 0.399475634098053, "learning_rate": 0.0001999521569185122, "loss": 0.9877, "step": 241 }, { "epoch": 0.04309116809116809, "grad_norm": 0.42332810163497925, "learning_rate": 0.00019995172300480053, "loss": 1.2403, "step": 242 }, { "epoch": 0.04326923076923077, "grad_norm": 0.4397708475589752, "learning_rate": 0.00019995128713274247, "loss": 0.9316, "step": 243 }, { "epoch": 0.04344729344729345, "grad_norm": 0.3614110052585602, "learning_rate": 0.00019995084930234658, "loss": 1.1088, "step": 244 }, { "epoch": 0.043625356125356125, "grad_norm": 0.39433717727661133, "learning_rate": 0.0001999504095136214, "loss": 1.2002, "step": 245 }, { "epoch": 0.0438034188034188, "grad_norm": 0.33088216185569763, "learning_rate": 0.0001999499677665756, "loss": 0.8796, "step": 246 }, { "epoch": 0.04398148148148148, "grad_norm": 0.5239143967628479, "learning_rate": 0.00019994952406121784, "loss": 1.2808, "step": 247 }, { "epoch": 0.04415954415954416, "grad_norm": 0.42156723141670227, "learning_rate": 0.00019994907839755675, "loss": 1.1775, "step": 248 }, { "epoch": 0.04433760683760684, "grad_norm": 0.42569902539253235, "learning_rate": 0.0001999486307756011, "loss": 1.001, "step": 249 }, { "epoch": 0.044515669515669515, "grad_norm": 0.38241544365882874, "learning_rate": 0.00019994818119535964, "loss": 1.1064, "step": 250 }, { "epoch": 0.0446937321937322, "grad_norm": 0.4185071885585785, "learning_rate": 0.0001999477296568412, "loss": 1.2109, "step": 251 }, { "epoch": 0.04487179487179487, "grad_norm": 0.4189644157886505, "learning_rate": 0.00019994727616005464, "loss": 1.2902, "step": 252 }, { "epoch": 0.04504985754985755, "grad_norm": 0.34671884775161743, "learning_rate": 0.0001999468207050088, "loss": 0.9429, "step": 253 }, { "epoch": 0.04522792022792023, "grad_norm": 0.42391687631607056, "learning_rate": 0.00019994636329171266, "loss": 0.7179, "step": 254 }, { "epoch": 0.045405982905982904, "grad_norm": 0.3803195655345917, "learning_rate": 0.00019994590392017513, "loss": 1.0318, "step": 255 }, { "epoch": 0.045584045584045586, "grad_norm": 0.3389956057071686, "learning_rate": 0.00019994544259040525, "loss": 1.0485, "step": 256 }, { "epoch": 0.04576210826210826, "grad_norm": 0.4927038550376892, "learning_rate": 0.000199944979302412, "loss": 1.3426, "step": 257 }, { "epoch": 0.045940170940170943, "grad_norm": 0.33200421929359436, "learning_rate": 0.00019994451405620453, "loss": 1.0071, "step": 258 }, { "epoch": 0.04611823361823362, "grad_norm": 0.38028615713119507, "learning_rate": 0.00019994404685179195, "loss": 1.0985, "step": 259 }, { "epoch": 0.046296296296296294, "grad_norm": 0.3752151429653168, "learning_rate": 0.00019994357768918333, "loss": 0.9209, "step": 260 }, { "epoch": 0.046474358974358976, "grad_norm": 0.43030866980552673, "learning_rate": 0.00019994310656838796, "loss": 0.9921, "step": 261 }, { "epoch": 0.04665242165242165, "grad_norm": 0.4402460753917694, "learning_rate": 0.00019994263348941502, "loss": 1.1051, "step": 262 }, { "epoch": 0.04683048433048433, "grad_norm": 0.43012720346450806, "learning_rate": 0.0001999421584522738, "loss": 1.1839, "step": 263 }, { "epoch": 0.04700854700854701, "grad_norm": 0.4195305407047272, "learning_rate": 0.0001999416814569736, "loss": 1.1749, "step": 264 }, { "epoch": 0.04718660968660968, "grad_norm": 0.45623287558555603, "learning_rate": 0.00019994120250352372, "loss": 1.2433, "step": 265 }, { "epoch": 0.047364672364672365, "grad_norm": 0.4736156761646271, "learning_rate": 0.00019994072159193363, "loss": 1.2882, "step": 266 }, { "epoch": 0.04754273504273504, "grad_norm": 0.36698561906814575, "learning_rate": 0.0001999402387222127, "loss": 1.1486, "step": 267 }, { "epoch": 0.04772079772079772, "grad_norm": 0.3854144215583801, "learning_rate": 0.00019993975389437038, "loss": 0.8115, "step": 268 }, { "epoch": 0.0478988603988604, "grad_norm": 0.41512808203697205, "learning_rate": 0.0001999392671084162, "loss": 1.0959, "step": 269 }, { "epoch": 0.04807692307692308, "grad_norm": 0.3869563341140747, "learning_rate": 0.0001999387783643597, "loss": 1.087, "step": 270 }, { "epoch": 0.048254985754985755, "grad_norm": 0.4649744927883148, "learning_rate": 0.00019993828766221044, "loss": 1.0011, "step": 271 }, { "epoch": 0.04843304843304843, "grad_norm": 0.40331923961639404, "learning_rate": 0.00019993779500197803, "loss": 1.1463, "step": 272 }, { "epoch": 0.04861111111111111, "grad_norm": 0.3826279938220978, "learning_rate": 0.0001999373003836721, "loss": 1.1491, "step": 273 }, { "epoch": 0.04878917378917379, "grad_norm": 0.3967166543006897, "learning_rate": 0.00019993680380730243, "loss": 1.1462, "step": 274 }, { "epoch": 0.04896723646723647, "grad_norm": 0.4298507869243622, "learning_rate": 0.00019993630527287865, "loss": 1.2471, "step": 275 }, { "epoch": 0.049145299145299144, "grad_norm": 0.41486215591430664, "learning_rate": 0.0001999358047804106, "loss": 1.287, "step": 276 }, { "epoch": 0.049323361823361826, "grad_norm": 0.3914124369621277, "learning_rate": 0.00019993530232990803, "loss": 1.0935, "step": 277 }, { "epoch": 0.0495014245014245, "grad_norm": 0.39888378977775574, "learning_rate": 0.00019993479792138082, "loss": 1.2347, "step": 278 }, { "epoch": 0.049679487179487176, "grad_norm": 0.3911665678024292, "learning_rate": 0.00019993429155483884, "loss": 1.0917, "step": 279 }, { "epoch": 0.04985754985754986, "grad_norm": 0.42871445417404175, "learning_rate": 0.00019993378323029197, "loss": 1.0277, "step": 280 }, { "epoch": 0.050035612535612534, "grad_norm": 0.35397860407829285, "learning_rate": 0.00019993327294775027, "loss": 0.9549, "step": 281 }, { "epoch": 0.050213675213675216, "grad_norm": 0.4528059959411621, "learning_rate": 0.00019993276070722364, "loss": 1.2338, "step": 282 }, { "epoch": 0.05039173789173789, "grad_norm": 0.354735791683197, "learning_rate": 0.00019993224650872218, "loss": 1.1892, "step": 283 }, { "epoch": 0.05056980056980057, "grad_norm": 0.44407567381858826, "learning_rate": 0.00019993173035225592, "loss": 1.1621, "step": 284 }, { "epoch": 0.05074786324786325, "grad_norm": 0.4177244305610657, "learning_rate": 0.000199931212237835, "loss": 1.1184, "step": 285 }, { "epoch": 0.05092592592592592, "grad_norm": 0.5627759695053101, "learning_rate": 0.0001999306921654696, "loss": 1.0755, "step": 286 }, { "epoch": 0.051103988603988605, "grad_norm": 0.46767523884773254, "learning_rate": 0.00019993017013516986, "loss": 1.2654, "step": 287 }, { "epoch": 0.05128205128205128, "grad_norm": 0.4163128733634949, "learning_rate": 0.000199929646146946, "loss": 1.1307, "step": 288 }, { "epoch": 0.05146011396011396, "grad_norm": 0.36954161524772644, "learning_rate": 0.00019992912020080832, "loss": 0.8274, "step": 289 }, { "epoch": 0.05163817663817664, "grad_norm": 0.4770594835281372, "learning_rate": 0.00019992859229676712, "loss": 1.2235, "step": 290 }, { "epoch": 0.05181623931623932, "grad_norm": 0.4174608290195465, "learning_rate": 0.00019992806243483274, "loss": 1.2893, "step": 291 }, { "epoch": 0.051994301994301995, "grad_norm": 0.3794898986816406, "learning_rate": 0.00019992753061501555, "loss": 1.104, "step": 292 }, { "epoch": 0.05217236467236467, "grad_norm": 0.3912592828273773, "learning_rate": 0.000199926996837326, "loss": 1.0043, "step": 293 }, { "epoch": 0.05235042735042735, "grad_norm": 0.39641159772872925, "learning_rate": 0.00019992646110177448, "loss": 1.083, "step": 294 }, { "epoch": 0.05252849002849003, "grad_norm": 0.3518857955932617, "learning_rate": 0.00019992592340837157, "loss": 0.9275, "step": 295 }, { "epoch": 0.05270655270655271, "grad_norm": 0.3955721855163574, "learning_rate": 0.00019992538375712777, "loss": 1.0153, "step": 296 }, { "epoch": 0.052884615384615384, "grad_norm": 0.3837333023548126, "learning_rate": 0.00019992484214805364, "loss": 1.1664, "step": 297 }, { "epoch": 0.053062678062678066, "grad_norm": 0.39400920271873474, "learning_rate": 0.0001999242985811598, "loss": 1.0532, "step": 298 }, { "epoch": 0.05324074074074074, "grad_norm": 0.39258649945259094, "learning_rate": 0.00019992375305645692, "loss": 1.0081, "step": 299 }, { "epoch": 0.053418803418803416, "grad_norm": 0.49768248200416565, "learning_rate": 0.00019992320557395566, "loss": 1.2553, "step": 300 }, { "epoch": 0.0535968660968661, "grad_norm": 0.364776074886322, "learning_rate": 0.00019992265613366677, "loss": 1.0582, "step": 301 }, { "epoch": 0.053774928774928774, "grad_norm": 0.47317907214164734, "learning_rate": 0.00019992210473560097, "loss": 1.3114, "step": 302 }, { "epoch": 0.053952991452991456, "grad_norm": 0.3706119656562805, "learning_rate": 0.00019992155137976917, "loss": 0.9554, "step": 303 }, { "epoch": 0.05413105413105413, "grad_norm": 0.42809563875198364, "learning_rate": 0.0001999209960661821, "loss": 1.306, "step": 304 }, { "epoch": 0.054309116809116806, "grad_norm": 0.4514487385749817, "learning_rate": 0.00019992043879485066, "loss": 1.0147, "step": 305 }, { "epoch": 0.05448717948717949, "grad_norm": 0.36672836542129517, "learning_rate": 0.0001999198795657858, "loss": 1.1392, "step": 306 }, { "epoch": 0.05466524216524216, "grad_norm": 0.4206554889678955, "learning_rate": 0.00019991931837899847, "loss": 1.2405, "step": 307 }, { "epoch": 0.054843304843304845, "grad_norm": 0.46168261766433716, "learning_rate": 0.00019991875523449966, "loss": 1.2707, "step": 308 }, { "epoch": 0.05502136752136752, "grad_norm": 0.39503365755081177, "learning_rate": 0.00019991819013230039, "loss": 1.0776, "step": 309 }, { "epoch": 0.0551994301994302, "grad_norm": 0.35244834423065186, "learning_rate": 0.00019991762307241178, "loss": 1.0864, "step": 310 }, { "epoch": 0.05537749287749288, "grad_norm": 0.3865319490432739, "learning_rate": 0.0001999170540548449, "loss": 1.3659, "step": 311 }, { "epoch": 0.05555555555555555, "grad_norm": 0.3666876554489136, "learning_rate": 0.0001999164830796109, "loss": 0.9884, "step": 312 }, { "epoch": 0.055733618233618235, "grad_norm": 0.4278281629085541, "learning_rate": 0.00019991591014672096, "loss": 1.1522, "step": 313 }, { "epoch": 0.05591168091168091, "grad_norm": 0.4172627031803131, "learning_rate": 0.0001999153352561863, "loss": 1.2527, "step": 314 }, { "epoch": 0.05608974358974359, "grad_norm": 0.38872212171554565, "learning_rate": 0.00019991475840801823, "loss": 1.2985, "step": 315 }, { "epoch": 0.05626780626780627, "grad_norm": 0.4160458445549011, "learning_rate": 0.00019991417960222804, "loss": 1.1347, "step": 316 }, { "epoch": 0.05644586894586895, "grad_norm": 0.5169723033905029, "learning_rate": 0.00019991359883882705, "loss": 1.0819, "step": 317 }, { "epoch": 0.056623931623931624, "grad_norm": 0.42306259274482727, "learning_rate": 0.0001999130161178266, "loss": 1.3139, "step": 318 }, { "epoch": 0.0568019943019943, "grad_norm": 0.41975873708724976, "learning_rate": 0.00019991243143923816, "loss": 1.2277, "step": 319 }, { "epoch": 0.05698005698005698, "grad_norm": 0.3873472511768341, "learning_rate": 0.00019991184480307324, "loss": 1.156, "step": 320 }, { "epoch": 0.057158119658119656, "grad_norm": 0.43656104803085327, "learning_rate": 0.0001999112562093432, "loss": 1.2344, "step": 321 }, { "epoch": 0.05733618233618234, "grad_norm": 0.3738791048526764, "learning_rate": 0.00019991066565805968, "loss": 0.9573, "step": 322 }, { "epoch": 0.05751424501424501, "grad_norm": 0.3838156461715698, "learning_rate": 0.00019991007314923418, "loss": 0.9274, "step": 323 }, { "epoch": 0.057692307692307696, "grad_norm": 0.4564770758152008, "learning_rate": 0.00019990947868287837, "loss": 1.0756, "step": 324 }, { "epoch": 0.05787037037037037, "grad_norm": 0.4560079872608185, "learning_rate": 0.00019990888225900386, "loss": 1.1508, "step": 325 }, { "epoch": 0.058048433048433046, "grad_norm": 0.44356057047843933, "learning_rate": 0.00019990828387762236, "loss": 1.2323, "step": 326 }, { "epoch": 0.05822649572649573, "grad_norm": 0.46390119194984436, "learning_rate": 0.00019990768353874553, "loss": 1.0031, "step": 327 }, { "epoch": 0.0584045584045584, "grad_norm": 0.4502357244491577, "learning_rate": 0.00019990708124238525, "loss": 1.3454, "step": 328 }, { "epoch": 0.058582621082621085, "grad_norm": 0.3979945182800293, "learning_rate": 0.0001999064769885532, "loss": 1.2833, "step": 329 }, { "epoch": 0.05876068376068376, "grad_norm": 0.3899286687374115, "learning_rate": 0.00019990587077726128, "loss": 1.0175, "step": 330 }, { "epoch": 0.05893874643874644, "grad_norm": 0.41422948241233826, "learning_rate": 0.00019990526260852139, "loss": 1.1151, "step": 331 }, { "epoch": 0.05911680911680912, "grad_norm": 0.4266608953475952, "learning_rate": 0.0001999046524823454, "loss": 1.1119, "step": 332 }, { "epoch": 0.05929487179487179, "grad_norm": 0.46563324332237244, "learning_rate": 0.00019990404039874524, "loss": 1.2358, "step": 333 }, { "epoch": 0.059472934472934474, "grad_norm": 0.4404347240924835, "learning_rate": 0.00019990342635773297, "loss": 1.1748, "step": 334 }, { "epoch": 0.05965099715099715, "grad_norm": 0.5133237838745117, "learning_rate": 0.00019990281035932062, "loss": 1.1649, "step": 335 }, { "epoch": 0.05982905982905983, "grad_norm": 0.3593895435333252, "learning_rate": 0.00019990219240352018, "loss": 1.0318, "step": 336 }, { "epoch": 0.06000712250712251, "grad_norm": 0.40554583072662354, "learning_rate": 0.00019990157249034384, "loss": 1.1202, "step": 337 }, { "epoch": 0.06018518518518518, "grad_norm": 0.3770706057548523, "learning_rate": 0.00019990095061980372, "loss": 0.9908, "step": 338 }, { "epoch": 0.060363247863247864, "grad_norm": 0.39676955342292786, "learning_rate": 0.000199900326791912, "loss": 0.8176, "step": 339 }, { "epoch": 0.06054131054131054, "grad_norm": 0.41448578238487244, "learning_rate": 0.00019989970100668086, "loss": 1.2877, "step": 340 }, { "epoch": 0.06071937321937322, "grad_norm": 0.4200015068054199, "learning_rate": 0.00019989907326412265, "loss": 1.2293, "step": 341 }, { "epoch": 0.060897435897435896, "grad_norm": 0.47350621223449707, "learning_rate": 0.0001998984435642496, "loss": 1.2331, "step": 342 }, { "epoch": 0.06107549857549858, "grad_norm": 0.47050634026527405, "learning_rate": 0.00019989781190707406, "loss": 0.8888, "step": 343 }, { "epoch": 0.06125356125356125, "grad_norm": 0.4994896948337555, "learning_rate": 0.00019989717829260842, "loss": 1.0921, "step": 344 }, { "epoch": 0.06143162393162393, "grad_norm": 0.36340200901031494, "learning_rate": 0.0001998965427208651, "loss": 0.9777, "step": 345 }, { "epoch": 0.06160968660968661, "grad_norm": 0.3538152873516083, "learning_rate": 0.00019989590519185654, "loss": 1.0055, "step": 346 }, { "epoch": 0.061787749287749286, "grad_norm": 0.5388944149017334, "learning_rate": 0.00019989526570559526, "loss": 1.1001, "step": 347 }, { "epoch": 0.06196581196581197, "grad_norm": 0.4411574602127075, "learning_rate": 0.00019989462426209373, "loss": 1.0038, "step": 348 }, { "epoch": 0.06214387464387464, "grad_norm": 0.3930876851081848, "learning_rate": 0.00019989398086136455, "loss": 1.1534, "step": 349 }, { "epoch": 0.062321937321937325, "grad_norm": 0.47357070446014404, "learning_rate": 0.00019989333550342033, "loss": 1.2687, "step": 350 }, { "epoch": 0.0625, "grad_norm": 0.40302303433418274, "learning_rate": 0.00019989268818827372, "loss": 1.1894, "step": 351 }, { "epoch": 0.06267806267806268, "grad_norm": 0.4470510184764862, "learning_rate": 0.00019989203891593738, "loss": 1.2207, "step": 352 }, { "epoch": 0.06285612535612535, "grad_norm": 0.42235100269317627, "learning_rate": 0.00019989138768642406, "loss": 1.2086, "step": 353 }, { "epoch": 0.06303418803418803, "grad_norm": 0.38305309414863586, "learning_rate": 0.0001998907344997465, "loss": 1.0473, "step": 354 }, { "epoch": 0.06321225071225071, "grad_norm": 0.3893027901649475, "learning_rate": 0.0001998900793559175, "loss": 1.1746, "step": 355 }, { "epoch": 0.0633903133903134, "grad_norm": 0.41206735372543335, "learning_rate": 0.0001998894222549499, "loss": 1.188, "step": 356 }, { "epoch": 0.06356837606837606, "grad_norm": 0.3700513243675232, "learning_rate": 0.00019988876319685658, "loss": 0.9862, "step": 357 }, { "epoch": 0.06374643874643875, "grad_norm": 0.3708794116973877, "learning_rate": 0.0001998881021816504, "loss": 1.2003, "step": 358 }, { "epoch": 0.06392450142450143, "grad_norm": 0.4058014154434204, "learning_rate": 0.00019988743920934442, "loss": 1.2311, "step": 359 }, { "epoch": 0.0641025641025641, "grad_norm": 0.39134132862091064, "learning_rate": 0.00019988677427995155, "loss": 1.001, "step": 360 }, { "epoch": 0.06428062678062678, "grad_norm": 0.3853437602519989, "learning_rate": 0.00019988610739348484, "loss": 1.0725, "step": 361 }, { "epoch": 0.06445868945868946, "grad_norm": 0.47114330530166626, "learning_rate": 0.00019988543854995735, "loss": 1.2196, "step": 362 }, { "epoch": 0.06463675213675214, "grad_norm": 0.40465688705444336, "learning_rate": 0.00019988476774938216, "loss": 1.1869, "step": 363 }, { "epoch": 0.06481481481481481, "grad_norm": 0.40301886200904846, "learning_rate": 0.00019988409499177245, "loss": 1.1765, "step": 364 }, { "epoch": 0.0649928774928775, "grad_norm": 0.43443185091018677, "learning_rate": 0.0001998834202771414, "loss": 1.2022, "step": 365 }, { "epoch": 0.06517094017094018, "grad_norm": 0.4712986350059509, "learning_rate": 0.00019988274360550217, "loss": 1.156, "step": 366 }, { "epoch": 0.06534900284900284, "grad_norm": 0.4524450898170471, "learning_rate": 0.00019988206497686815, "loss": 1.2917, "step": 367 }, { "epoch": 0.06552706552706553, "grad_norm": 0.40302205085754395, "learning_rate": 0.0001998813843912525, "loss": 0.9993, "step": 368 }, { "epoch": 0.06570512820512821, "grad_norm": 0.39435216784477234, "learning_rate": 0.00019988070184866864, "loss": 1.0914, "step": 369 }, { "epoch": 0.06588319088319089, "grad_norm": 0.39267390966415405, "learning_rate": 0.00019988001734912988, "loss": 1.3138, "step": 370 }, { "epoch": 0.06606125356125356, "grad_norm": 0.38351675868034363, "learning_rate": 0.00019987933089264968, "loss": 1.0997, "step": 371 }, { "epoch": 0.06623931623931624, "grad_norm": 0.3294839859008789, "learning_rate": 0.00019987864247924145, "loss": 0.9656, "step": 372 }, { "epoch": 0.06641737891737892, "grad_norm": 0.45333364605903625, "learning_rate": 0.00019987795210891872, "loss": 1.095, "step": 373 }, { "epoch": 0.06659544159544159, "grad_norm": 0.4362282454967499, "learning_rate": 0.00019987725978169501, "loss": 1.2103, "step": 374 }, { "epoch": 0.06677350427350427, "grad_norm": 0.41314780712127686, "learning_rate": 0.00019987656549758385, "loss": 1.2115, "step": 375 }, { "epoch": 0.06695156695156695, "grad_norm": 0.4230864644050598, "learning_rate": 0.00019987586925659888, "loss": 1.17, "step": 376 }, { "epoch": 0.06712962962962964, "grad_norm": 0.4703855812549591, "learning_rate": 0.00019987517105875372, "loss": 1.367, "step": 377 }, { "epoch": 0.0673076923076923, "grad_norm": 0.4671297073364258, "learning_rate": 0.00019987447090406206, "loss": 1.2543, "step": 378 }, { "epoch": 0.06748575498575499, "grad_norm": 0.43746981024742126, "learning_rate": 0.0001998737687925376, "loss": 1.214, "step": 379 }, { "epoch": 0.06766381766381767, "grad_norm": 0.40889596939086914, "learning_rate": 0.00019987306472419412, "loss": 1.0496, "step": 380 }, { "epoch": 0.06784188034188034, "grad_norm": 0.3677358627319336, "learning_rate": 0.0001998723586990454, "loss": 1.1242, "step": 381 }, { "epoch": 0.06801994301994302, "grad_norm": 0.3892628848552704, "learning_rate": 0.00019987165071710527, "loss": 1.0246, "step": 382 }, { "epoch": 0.0681980056980057, "grad_norm": 0.4281293749809265, "learning_rate": 0.00019987094077838764, "loss": 1.2817, "step": 383 }, { "epoch": 0.06837606837606838, "grad_norm": 0.45030340552330017, "learning_rate": 0.00019987022888290636, "loss": 1.159, "step": 384 }, { "epoch": 0.06855413105413105, "grad_norm": 0.6327905058860779, "learning_rate": 0.00019986951503067545, "loss": 0.9577, "step": 385 }, { "epoch": 0.06873219373219373, "grad_norm": 0.40339627861976624, "learning_rate": 0.0001998687992217088, "loss": 1.138, "step": 386 }, { "epoch": 0.06891025641025642, "grad_norm": 0.4018291234970093, "learning_rate": 0.00019986808145602052, "loss": 0.9109, "step": 387 }, { "epoch": 0.06908831908831908, "grad_norm": 0.41566264629364014, "learning_rate": 0.00019986736173362464, "loss": 1.1516, "step": 388 }, { "epoch": 0.06926638176638177, "grad_norm": 0.3569067418575287, "learning_rate": 0.00019986664005453527, "loss": 1.2329, "step": 389 }, { "epoch": 0.06944444444444445, "grad_norm": 0.3959648907184601, "learning_rate": 0.0001998659164187665, "loss": 1.1041, "step": 390 }, { "epoch": 0.06962250712250712, "grad_norm": 0.42853206396102905, "learning_rate": 0.00019986519082633257, "loss": 1.0859, "step": 391 }, { "epoch": 0.0698005698005698, "grad_norm": 0.42005518078804016, "learning_rate": 0.0001998644632772477, "loss": 1.2017, "step": 392 }, { "epoch": 0.06997863247863248, "grad_norm": 0.4296947419643402, "learning_rate": 0.00019986373377152612, "loss": 1.1464, "step": 393 }, { "epoch": 0.07015669515669516, "grad_norm": 0.394747793674469, "learning_rate": 0.0001998630023091821, "loss": 1.0316, "step": 394 }, { "epoch": 0.07033475783475783, "grad_norm": 0.3779357969760895, "learning_rate": 0.00019986226889023002, "loss": 1.1081, "step": 395 }, { "epoch": 0.07051282051282051, "grad_norm": 0.4271804690361023, "learning_rate": 0.00019986153351468424, "loss": 0.985, "step": 396 }, { "epoch": 0.0706908831908832, "grad_norm": 0.49412235617637634, "learning_rate": 0.00019986079618255912, "loss": 1.2606, "step": 397 }, { "epoch": 0.07086894586894586, "grad_norm": 0.43657439947128296, "learning_rate": 0.00019986005689386915, "loss": 1.2266, "step": 398 }, { "epoch": 0.07104700854700854, "grad_norm": 0.4060729444026947, "learning_rate": 0.0001998593156486288, "loss": 1.1787, "step": 399 }, { "epoch": 0.07122507122507123, "grad_norm": 0.387046217918396, "learning_rate": 0.00019985857244685264, "loss": 0.9411, "step": 400 }, { "epoch": 0.07140313390313391, "grad_norm": 0.4243999123573303, "learning_rate": 0.00019985782728855516, "loss": 1.2024, "step": 401 }, { "epoch": 0.07158119658119658, "grad_norm": 0.43113812804222107, "learning_rate": 0.000199857080173751, "loss": 1.1246, "step": 402 }, { "epoch": 0.07175925925925926, "grad_norm": 0.4653271436691284, "learning_rate": 0.0001998563311024548, "loss": 1.2343, "step": 403 }, { "epoch": 0.07193732193732194, "grad_norm": 0.43260812759399414, "learning_rate": 0.0001998555800746812, "loss": 0.9543, "step": 404 }, { "epoch": 0.07211538461538461, "grad_norm": 0.4635484516620636, "learning_rate": 0.00019985482709044495, "loss": 1.1091, "step": 405 }, { "epoch": 0.07229344729344729, "grad_norm": 0.38362643122673035, "learning_rate": 0.00019985407214976076, "loss": 1.2584, "step": 406 }, { "epoch": 0.07247150997150997, "grad_norm": 0.4068310558795929, "learning_rate": 0.00019985331525264351, "loss": 1.1944, "step": 407 }, { "epoch": 0.07264957264957266, "grad_norm": 0.43909943103790283, "learning_rate": 0.00019985255639910795, "loss": 1.3748, "step": 408 }, { "epoch": 0.07282763532763532, "grad_norm": 0.48674601316452026, "learning_rate": 0.000199851795589169, "loss": 1.2684, "step": 409 }, { "epoch": 0.073005698005698, "grad_norm": 0.4218580722808838, "learning_rate": 0.0001998510328228415, "loss": 1.168, "step": 410 }, { "epoch": 0.07318376068376069, "grad_norm": 0.4688236117362976, "learning_rate": 0.00019985026810014046, "loss": 1.3088, "step": 411 }, { "epoch": 0.07336182336182336, "grad_norm": 0.3863612711429596, "learning_rate": 0.00019984950142108083, "loss": 1.0261, "step": 412 }, { "epoch": 0.07353988603988604, "grad_norm": 0.4177640378475189, "learning_rate": 0.00019984873278567765, "loss": 1.1985, "step": 413 }, { "epoch": 0.07371794871794872, "grad_norm": 0.4645586311817169, "learning_rate": 0.00019984796219394592, "loss": 1.2463, "step": 414 }, { "epoch": 0.0738960113960114, "grad_norm": 0.5051766633987427, "learning_rate": 0.00019984718964590083, "loss": 1.3031, "step": 415 }, { "epoch": 0.07407407407407407, "grad_norm": 0.4200040400028229, "learning_rate": 0.0001998464151415575, "loss": 1.0842, "step": 416 }, { "epoch": 0.07425213675213675, "grad_norm": 0.34211036562919617, "learning_rate": 0.000199845638680931, "loss": 0.9659, "step": 417 }, { "epoch": 0.07443019943019943, "grad_norm": 0.3553323447704315, "learning_rate": 0.00019984486026403668, "loss": 1.0102, "step": 418 }, { "epoch": 0.0746082621082621, "grad_norm": 0.4967300295829773, "learning_rate": 0.00019984407989088974, "loss": 1.3125, "step": 419 }, { "epoch": 0.07478632478632478, "grad_norm": 0.41649797558784485, "learning_rate": 0.00019984329756150544, "loss": 1.3092, "step": 420 }, { "epoch": 0.07496438746438747, "grad_norm": 0.43825802206993103, "learning_rate": 0.00019984251327589912, "loss": 1.3678, "step": 421 }, { "epoch": 0.07514245014245015, "grad_norm": 0.363394170999527, "learning_rate": 0.00019984172703408617, "loss": 1.305, "step": 422 }, { "epoch": 0.07532051282051282, "grad_norm": 0.411563903093338, "learning_rate": 0.000199840938836082, "loss": 1.4248, "step": 423 }, { "epoch": 0.0754985754985755, "grad_norm": 0.40548190474510193, "learning_rate": 0.000199840148681902, "loss": 1.1081, "step": 424 }, { "epoch": 0.07567663817663818, "grad_norm": 0.3781099021434784, "learning_rate": 0.00019983935657156171, "loss": 1.185, "step": 425 }, { "epoch": 0.07585470085470085, "grad_norm": 0.46597573161125183, "learning_rate": 0.00019983856250507662, "loss": 1.119, "step": 426 }, { "epoch": 0.07603276353276353, "grad_norm": 0.3988197147846222, "learning_rate": 0.00019983776648246232, "loss": 1.206, "step": 427 }, { "epoch": 0.07621082621082621, "grad_norm": 0.41210901737213135, "learning_rate": 0.00019983696850373433, "loss": 1.1843, "step": 428 }, { "epoch": 0.0763888888888889, "grad_norm": 0.41870948672294617, "learning_rate": 0.00019983616856890837, "loss": 1.2248, "step": 429 }, { "epoch": 0.07656695156695156, "grad_norm": 0.4320056140422821, "learning_rate": 0.00019983536667800007, "loss": 0.9743, "step": 430 }, { "epoch": 0.07674501424501425, "grad_norm": 0.48455503582954407, "learning_rate": 0.00019983456283102517, "loss": 1.0438, "step": 431 }, { "epoch": 0.07692307692307693, "grad_norm": 0.38712427020072937, "learning_rate": 0.00019983375702799935, "loss": 1.2041, "step": 432 }, { "epoch": 0.0771011396011396, "grad_norm": 0.3578857481479645, "learning_rate": 0.0001998329492689385, "loss": 1.1623, "step": 433 }, { "epoch": 0.07727920227920228, "grad_norm": 0.43065932393074036, "learning_rate": 0.00019983213955385834, "loss": 1.3033, "step": 434 }, { "epoch": 0.07745726495726496, "grad_norm": 0.4882095754146576, "learning_rate": 0.00019983132788277484, "loss": 1.1635, "step": 435 }, { "epoch": 0.07763532763532764, "grad_norm": 0.3429015874862671, "learning_rate": 0.00019983051425570382, "loss": 0.7289, "step": 436 }, { "epoch": 0.07781339031339031, "grad_norm": 0.4320310056209564, "learning_rate": 0.00019982969867266128, "loss": 1.3685, "step": 437 }, { "epoch": 0.07799145299145299, "grad_norm": 0.39891982078552246, "learning_rate": 0.00019982888113366314, "loss": 1.0444, "step": 438 }, { "epoch": 0.07816951566951567, "grad_norm": 0.3675695061683655, "learning_rate": 0.00019982806163872547, "loss": 1.0527, "step": 439 }, { "epoch": 0.07834757834757834, "grad_norm": 0.42824694514274597, "learning_rate": 0.0001998272401878643, "loss": 1.166, "step": 440 }, { "epoch": 0.07852564102564102, "grad_norm": 0.3721694350242615, "learning_rate": 0.00019982641678109575, "loss": 1.1328, "step": 441 }, { "epoch": 0.0787037037037037, "grad_norm": 0.33899208903312683, "learning_rate": 0.00019982559141843592, "loss": 1.016, "step": 442 }, { "epoch": 0.07888176638176639, "grad_norm": 0.4029340147972107, "learning_rate": 0.000199824764099901, "loss": 1.0076, "step": 443 }, { "epoch": 0.07905982905982906, "grad_norm": 0.4169132113456726, "learning_rate": 0.0001998239348255072, "loss": 1.208, "step": 444 }, { "epoch": 0.07923789173789174, "grad_norm": 0.3865824043750763, "learning_rate": 0.00019982310359527075, "loss": 1.067, "step": 445 }, { "epoch": 0.07941595441595442, "grad_norm": 0.4218919277191162, "learning_rate": 0.00019982227040920796, "loss": 1.195, "step": 446 }, { "epoch": 0.07959401709401709, "grad_norm": 0.40504586696624756, "learning_rate": 0.00019982143526733512, "loss": 1.0188, "step": 447 }, { "epoch": 0.07977207977207977, "grad_norm": 0.38330578804016113, "learning_rate": 0.00019982059816966863, "loss": 1.0484, "step": 448 }, { "epoch": 0.07995014245014245, "grad_norm": 0.43731689453125, "learning_rate": 0.00019981975911622488, "loss": 1.074, "step": 449 }, { "epoch": 0.08012820512820513, "grad_norm": 0.40858447551727295, "learning_rate": 0.00019981891810702033, "loss": 1.0008, "step": 450 }, { "epoch": 0.0803062678062678, "grad_norm": 0.4031754732131958, "learning_rate": 0.00019981807514207143, "loss": 1.2179, "step": 451 }, { "epoch": 0.08048433048433049, "grad_norm": 0.41920867562294006, "learning_rate": 0.00019981723022139466, "loss": 1.1406, "step": 452 }, { "epoch": 0.08066239316239317, "grad_norm": 0.40305474400520325, "learning_rate": 0.00019981638334500668, "loss": 1.098, "step": 453 }, { "epoch": 0.08084045584045584, "grad_norm": 0.4564182460308075, "learning_rate": 0.00019981553451292396, "loss": 1.419, "step": 454 }, { "epoch": 0.08101851851851852, "grad_norm": 0.3832945227622986, "learning_rate": 0.00019981468372516322, "loss": 1.0919, "step": 455 }, { "epoch": 0.0811965811965812, "grad_norm": 0.43062624335289, "learning_rate": 0.0001998138309817411, "loss": 1.0458, "step": 456 }, { "epoch": 0.08137464387464387, "grad_norm": 0.3871173560619354, "learning_rate": 0.0001998129762826743, "loss": 1.1391, "step": 457 }, { "epoch": 0.08155270655270655, "grad_norm": 0.43423157930374146, "learning_rate": 0.0001998121196279796, "loss": 1.1132, "step": 458 }, { "epoch": 0.08173076923076923, "grad_norm": 0.4341012239456177, "learning_rate": 0.00019981126101767372, "loss": 1.113, "step": 459 }, { "epoch": 0.08190883190883191, "grad_norm": 0.36748576164245605, "learning_rate": 0.00019981040045177352, "loss": 0.8108, "step": 460 }, { "epoch": 0.08208689458689458, "grad_norm": 0.43133220076560974, "learning_rate": 0.00019980953793029586, "loss": 1.1861, "step": 461 }, { "epoch": 0.08226495726495726, "grad_norm": 0.37204909324645996, "learning_rate": 0.00019980867345325767, "loss": 0.9222, "step": 462 }, { "epoch": 0.08244301994301995, "grad_norm": 0.43370047211647034, "learning_rate": 0.00019980780702067582, "loss": 1.2984, "step": 463 }, { "epoch": 0.08262108262108261, "grad_norm": 0.4991510808467865, "learning_rate": 0.00019980693863256736, "loss": 1.2222, "step": 464 }, { "epoch": 0.0827991452991453, "grad_norm": 0.44318175315856934, "learning_rate": 0.00019980606828894927, "loss": 1.2262, "step": 465 }, { "epoch": 0.08297720797720798, "grad_norm": 0.380231648683548, "learning_rate": 0.0001998051959898386, "loss": 1.0274, "step": 466 }, { "epoch": 0.08315527065527066, "grad_norm": 0.39519667625427246, "learning_rate": 0.0001998043217352524, "loss": 1.2499, "step": 467 }, { "epoch": 0.08333333333333333, "grad_norm": 0.457499235868454, "learning_rate": 0.0001998034455252079, "loss": 1.0751, "step": 468 }, { "epoch": 0.08351139601139601, "grad_norm": 0.368522584438324, "learning_rate": 0.00019980256735972215, "loss": 1.0776, "step": 469 }, { "epoch": 0.08368945868945869, "grad_norm": 0.3768427073955536, "learning_rate": 0.00019980168723881243, "loss": 1.2198, "step": 470 }, { "epoch": 0.08386752136752136, "grad_norm": 0.37045565247535706, "learning_rate": 0.000199800805162496, "loss": 1.1816, "step": 471 }, { "epoch": 0.08404558404558404, "grad_norm": 0.4219281077384949, "learning_rate": 0.0001997999211307901, "loss": 1.0515, "step": 472 }, { "epoch": 0.08422364672364673, "grad_norm": 0.3815271258354187, "learning_rate": 0.00019979903514371207, "loss": 1.1709, "step": 473 }, { "epoch": 0.08440170940170941, "grad_norm": 0.4566493630409241, "learning_rate": 0.00019979814720127924, "loss": 1.3063, "step": 474 }, { "epoch": 0.08457977207977208, "grad_norm": 0.4043879806995392, "learning_rate": 0.000199797257303509, "loss": 1.0549, "step": 475 }, { "epoch": 0.08475783475783476, "grad_norm": 0.3897830545902252, "learning_rate": 0.00019979636545041886, "loss": 1.1483, "step": 476 }, { "epoch": 0.08493589743589744, "grad_norm": 0.36097025871276855, "learning_rate": 0.00019979547164202622, "loss": 1.1196, "step": 477 }, { "epoch": 0.08511396011396011, "grad_norm": 0.3766986131668091, "learning_rate": 0.00019979457587834863, "loss": 1.0131, "step": 478 }, { "epoch": 0.08529202279202279, "grad_norm": 0.39460286498069763, "learning_rate": 0.00019979367815940364, "loss": 1.1729, "step": 479 }, { "epoch": 0.08547008547008547, "grad_norm": 0.4137469232082367, "learning_rate": 0.00019979277848520885, "loss": 1.2569, "step": 480 }, { "epoch": 0.08564814814814815, "grad_norm": 0.464688777923584, "learning_rate": 0.00019979187685578183, "loss": 1.2064, "step": 481 }, { "epoch": 0.08582621082621082, "grad_norm": 0.4245518147945404, "learning_rate": 0.0001997909732711403, "loss": 0.9812, "step": 482 }, { "epoch": 0.0860042735042735, "grad_norm": 0.43368837237358093, "learning_rate": 0.00019979006773130197, "loss": 1.2822, "step": 483 }, { "epoch": 0.08618233618233619, "grad_norm": 0.4232824444770813, "learning_rate": 0.00019978916023628452, "loss": 1.1446, "step": 484 }, { "epoch": 0.08636039886039885, "grad_norm": 0.4183506369590759, "learning_rate": 0.00019978825078610578, "loss": 1.2605, "step": 485 }, { "epoch": 0.08653846153846154, "grad_norm": 0.4391268491744995, "learning_rate": 0.00019978733938078356, "loss": 1.2165, "step": 486 }, { "epoch": 0.08671652421652422, "grad_norm": 0.4139612317085266, "learning_rate": 0.0001997864260203357, "loss": 0.9389, "step": 487 }, { "epoch": 0.0868945868945869, "grad_norm": 0.4058656096458435, "learning_rate": 0.00019978551070478013, "loss": 1.0652, "step": 488 }, { "epoch": 0.08707264957264957, "grad_norm": 0.42333099246025085, "learning_rate": 0.00019978459343413473, "loss": 1.119, "step": 489 }, { "epoch": 0.08725071225071225, "grad_norm": 0.4573031961917877, "learning_rate": 0.00019978367420841754, "loss": 1.1546, "step": 490 }, { "epoch": 0.08742877492877493, "grad_norm": 0.4161617159843445, "learning_rate": 0.00019978275302764655, "loss": 1.0836, "step": 491 }, { "epoch": 0.0876068376068376, "grad_norm": 0.422145277261734, "learning_rate": 0.00019978182989183977, "loss": 1.1908, "step": 492 }, { "epoch": 0.08778490028490028, "grad_norm": 0.4588126838207245, "learning_rate": 0.00019978090480101532, "loss": 1.1758, "step": 493 }, { "epoch": 0.08796296296296297, "grad_norm": 0.4425722062587738, "learning_rate": 0.00019977997775519132, "loss": 1.088, "step": 494 }, { "epoch": 0.08814102564102565, "grad_norm": 0.37860307097435, "learning_rate": 0.00019977904875438594, "loss": 1.1532, "step": 495 }, { "epoch": 0.08831908831908832, "grad_norm": 0.40435823798179626, "learning_rate": 0.00019977811779861733, "loss": 1.1271, "step": 496 }, { "epoch": 0.088497150997151, "grad_norm": 0.42578884959220886, "learning_rate": 0.0001997771848879038, "loss": 0.9889, "step": 497 }, { "epoch": 0.08867521367521368, "grad_norm": 0.3439478874206543, "learning_rate": 0.00019977625002226361, "loss": 1.1273, "step": 498 }, { "epoch": 0.08885327635327635, "grad_norm": 0.362341970205307, "learning_rate": 0.00019977531320171504, "loss": 1.0214, "step": 499 }, { "epoch": 0.08903133903133903, "grad_norm": 0.4305768609046936, "learning_rate": 0.0001997743744262765, "loss": 1.2648, "step": 500 }, { "epoch": 0.08920940170940171, "grad_norm": 0.35900023579597473, "learning_rate": 0.00019977343369596636, "loss": 1.0274, "step": 501 }, { "epoch": 0.0893874643874644, "grad_norm": 0.4950818717479706, "learning_rate": 0.00019977249101080306, "loss": 1.1483, "step": 502 }, { "epoch": 0.08956552706552706, "grad_norm": 0.3800346553325653, "learning_rate": 0.00019977154637080503, "loss": 1.0636, "step": 503 }, { "epoch": 0.08974358974358974, "grad_norm": 0.46202352643013, "learning_rate": 0.0001997705997759908, "loss": 1.1544, "step": 504 }, { "epoch": 0.08992165242165243, "grad_norm": 0.36818403005599976, "learning_rate": 0.00019976965122637895, "loss": 0.9824, "step": 505 }, { "epoch": 0.0900997150997151, "grad_norm": 0.40248095989227295, "learning_rate": 0.00019976870072198805, "loss": 1.1002, "step": 506 }, { "epoch": 0.09027777777777778, "grad_norm": 0.3841850459575653, "learning_rate": 0.00019976774826283667, "loss": 1.2433, "step": 507 }, { "epoch": 0.09045584045584046, "grad_norm": 0.46892330050468445, "learning_rate": 0.0001997667938489435, "loss": 1.3194, "step": 508 }, { "epoch": 0.09063390313390314, "grad_norm": 0.39059561491012573, "learning_rate": 0.0001997658374803273, "loss": 1.1778, "step": 509 }, { "epoch": 0.09081196581196581, "grad_norm": 0.3793235421180725, "learning_rate": 0.00019976487915700672, "loss": 1.0659, "step": 510 }, { "epoch": 0.09099002849002849, "grad_norm": 0.39067742228507996, "learning_rate": 0.00019976391887900058, "loss": 1.107, "step": 511 }, { "epoch": 0.09116809116809117, "grad_norm": 0.40121713280677795, "learning_rate": 0.00019976295664632772, "loss": 1.102, "step": 512 }, { "epoch": 0.09134615384615384, "grad_norm": 0.49830010533332825, "learning_rate": 0.00019976199245900697, "loss": 1.1701, "step": 513 }, { "epoch": 0.09152421652421652, "grad_norm": 0.4536968171596527, "learning_rate": 0.0001997610263170572, "loss": 1.1067, "step": 514 }, { "epoch": 0.0917022792022792, "grad_norm": 0.3832971453666687, "learning_rate": 0.00019976005822049735, "loss": 1.0991, "step": 515 }, { "epoch": 0.09188034188034189, "grad_norm": 0.4093509614467621, "learning_rate": 0.0001997590881693464, "loss": 1.0565, "step": 516 }, { "epoch": 0.09205840455840456, "grad_norm": 0.46073687076568604, "learning_rate": 0.0001997581161636233, "loss": 1.0057, "step": 517 }, { "epoch": 0.09223646723646724, "grad_norm": 0.5001922845840454, "learning_rate": 0.0001997571422033472, "loss": 1.2639, "step": 518 }, { "epoch": 0.09241452991452992, "grad_norm": 0.4620618224143982, "learning_rate": 0.00019975616628853713, "loss": 1.0966, "step": 519 }, { "epoch": 0.09259259259259259, "grad_norm": 0.3788183927536011, "learning_rate": 0.0001997551884192122, "loss": 0.9783, "step": 520 }, { "epoch": 0.09277065527065527, "grad_norm": 0.45589539408683777, "learning_rate": 0.00019975420859539154, "loss": 1.2194, "step": 521 }, { "epoch": 0.09294871794871795, "grad_norm": 0.40747523307800293, "learning_rate": 0.00019975322681709443, "loss": 1.0349, "step": 522 }, { "epoch": 0.09312678062678063, "grad_norm": 0.5045142769813538, "learning_rate": 0.00019975224308434002, "loss": 1.1373, "step": 523 }, { "epoch": 0.0933048433048433, "grad_norm": 0.40352702140808105, "learning_rate": 0.00019975125739714767, "loss": 1.1236, "step": 524 }, { "epoch": 0.09348290598290598, "grad_norm": 0.4301735758781433, "learning_rate": 0.0001997502697555366, "loss": 1.2932, "step": 525 }, { "epoch": 0.09366096866096867, "grad_norm": 0.36800238490104675, "learning_rate": 0.00019974928015952624, "loss": 1.0734, "step": 526 }, { "epoch": 0.09383903133903133, "grad_norm": 0.4027230143547058, "learning_rate": 0.00019974828860913594, "loss": 1.2776, "step": 527 }, { "epoch": 0.09401709401709402, "grad_norm": 0.42497140169143677, "learning_rate": 0.0001997472951043851, "loss": 1.248, "step": 528 }, { "epoch": 0.0941951566951567, "grad_norm": 0.3888593018054962, "learning_rate": 0.00019974629964529325, "loss": 1.0231, "step": 529 }, { "epoch": 0.09437321937321937, "grad_norm": 0.3761361241340637, "learning_rate": 0.00019974530223187986, "loss": 1.0216, "step": 530 }, { "epoch": 0.09455128205128205, "grad_norm": 0.42192980647087097, "learning_rate": 0.00019974430286416448, "loss": 1.0731, "step": 531 }, { "epoch": 0.09472934472934473, "grad_norm": 0.44244512915611267, "learning_rate": 0.00019974330154216667, "loss": 1.2793, "step": 532 }, { "epoch": 0.09490740740740741, "grad_norm": 0.378252774477005, "learning_rate": 0.0001997422982659061, "loss": 1.0462, "step": 533 }, { "epoch": 0.09508547008547008, "grad_norm": 0.45589110255241394, "learning_rate": 0.00019974129303540236, "loss": 1.1884, "step": 534 }, { "epoch": 0.09526353276353276, "grad_norm": 0.33930808305740356, "learning_rate": 0.0001997402858506752, "loss": 0.8381, "step": 535 }, { "epoch": 0.09544159544159544, "grad_norm": 0.45408427715301514, "learning_rate": 0.0001997392767117443, "loss": 1.2379, "step": 536 }, { "epoch": 0.09561965811965811, "grad_norm": 0.44125741720199585, "learning_rate": 0.0001997382656186295, "loss": 1.1941, "step": 537 }, { "epoch": 0.0957977207977208, "grad_norm": 0.4075697660446167, "learning_rate": 0.00019973725257135054, "loss": 1.0142, "step": 538 }, { "epoch": 0.09597578347578348, "grad_norm": 0.4258415102958679, "learning_rate": 0.00019973623756992733, "loss": 1.0447, "step": 539 }, { "epoch": 0.09615384615384616, "grad_norm": 0.2738485038280487, "learning_rate": 0.0001997352206143797, "loss": 0.5521, "step": 540 }, { "epoch": 0.09633190883190883, "grad_norm": 0.38815587759017944, "learning_rate": 0.00019973420170472762, "loss": 1.1052, "step": 541 }, { "epoch": 0.09650997150997151, "grad_norm": 0.3909834027290344, "learning_rate": 0.00019973318084099106, "loss": 1.0494, "step": 542 }, { "epoch": 0.09668803418803419, "grad_norm": 0.4517597258090973, "learning_rate": 0.00019973215802318996, "loss": 1.0611, "step": 543 }, { "epoch": 0.09686609686609686, "grad_norm": 0.48659002780914307, "learning_rate": 0.00019973113325134442, "loss": 0.9967, "step": 544 }, { "epoch": 0.09704415954415954, "grad_norm": 0.4039791524410248, "learning_rate": 0.0001997301065254745, "loss": 1.251, "step": 545 }, { "epoch": 0.09722222222222222, "grad_norm": 0.3985383212566376, "learning_rate": 0.0001997290778456003, "loss": 1.2263, "step": 546 }, { "epoch": 0.0974002849002849, "grad_norm": 0.4540637731552124, "learning_rate": 0.00019972804721174199, "loss": 1.2084, "step": 547 }, { "epoch": 0.09757834757834757, "grad_norm": 0.36867982149124146, "learning_rate": 0.00019972701462391977, "loss": 0.9704, "step": 548 }, { "epoch": 0.09775641025641026, "grad_norm": 0.40199780464172363, "learning_rate": 0.00019972598008215385, "loss": 1.1121, "step": 549 }, { "epoch": 0.09793447293447294, "grad_norm": 0.42728984355926514, "learning_rate": 0.00019972494358646455, "loss": 1.1606, "step": 550 }, { "epoch": 0.0981125356125356, "grad_norm": 0.4212374687194824, "learning_rate": 0.0001997239051368721, "loss": 1.3093, "step": 551 }, { "epoch": 0.09829059829059829, "grad_norm": 0.3972226083278656, "learning_rate": 0.0001997228647333969, "loss": 1.1218, "step": 552 }, { "epoch": 0.09846866096866097, "grad_norm": 0.43649932742118835, "learning_rate": 0.00019972182237605935, "loss": 1.2532, "step": 553 }, { "epoch": 0.09864672364672365, "grad_norm": 0.3812280595302582, "learning_rate": 0.0001997207780648798, "loss": 1.0409, "step": 554 }, { "epoch": 0.09882478632478632, "grad_norm": 0.41684821248054504, "learning_rate": 0.00019971973179987878, "loss": 0.9569, "step": 555 }, { "epoch": 0.099002849002849, "grad_norm": 0.38081470131874084, "learning_rate": 0.00019971868358107674, "loss": 1.1615, "step": 556 }, { "epoch": 0.09918091168091168, "grad_norm": 0.3702073097229004, "learning_rate": 0.0001997176334084943, "loss": 1.3907, "step": 557 }, { "epoch": 0.09935897435897435, "grad_norm": 0.3625728189945221, "learning_rate": 0.00019971658128215193, "loss": 1.1897, "step": 558 }, { "epoch": 0.09953703703703703, "grad_norm": 0.3815405070781708, "learning_rate": 0.0001997155272020703, "loss": 1.1473, "step": 559 }, { "epoch": 0.09971509971509972, "grad_norm": 0.48664286732673645, "learning_rate": 0.00019971447116827004, "loss": 1.2462, "step": 560 }, { "epoch": 0.0998931623931624, "grad_norm": 0.3708696663379669, "learning_rate": 0.0001997134131807719, "loss": 1.0979, "step": 561 }, { "epoch": 0.10007122507122507, "grad_norm": 0.44511324167251587, "learning_rate": 0.00019971235323959654, "loss": 1.2313, "step": 562 }, { "epoch": 0.10024928774928775, "grad_norm": 0.3687448799610138, "learning_rate": 0.00019971129134476473, "loss": 1.1526, "step": 563 }, { "epoch": 0.10042735042735043, "grad_norm": 0.4506866931915283, "learning_rate": 0.00019971022749629735, "loss": 1.0003, "step": 564 }, { "epoch": 0.1006054131054131, "grad_norm": 0.41910406947135925, "learning_rate": 0.00019970916169421515, "loss": 1.013, "step": 565 }, { "epoch": 0.10078347578347578, "grad_norm": 0.39728936553001404, "learning_rate": 0.0001997080939385391, "loss": 1.0501, "step": 566 }, { "epoch": 0.10096153846153846, "grad_norm": 0.41415902972221375, "learning_rate": 0.00019970702422929005, "loss": 1.0791, "step": 567 }, { "epoch": 0.10113960113960115, "grad_norm": 0.45630788803100586, "learning_rate": 0.00019970595256648896, "loss": 1.2884, "step": 568 }, { "epoch": 0.10131766381766381, "grad_norm": 0.4371698796749115, "learning_rate": 0.00019970487895015686, "loss": 1.0684, "step": 569 }, { "epoch": 0.1014957264957265, "grad_norm": 0.4350591003894806, "learning_rate": 0.00019970380338031477, "loss": 1.2415, "step": 570 }, { "epoch": 0.10167378917378918, "grad_norm": 0.4232708215713501, "learning_rate": 0.00019970272585698382, "loss": 1.2656, "step": 571 }, { "epoch": 0.10185185185185185, "grad_norm": 0.3917689919471741, "learning_rate": 0.00019970164638018502, "loss": 1.0178, "step": 572 }, { "epoch": 0.10202991452991453, "grad_norm": 0.4262804388999939, "learning_rate": 0.0001997005649499396, "loss": 1.1805, "step": 573 }, { "epoch": 0.10220797720797721, "grad_norm": 0.5217884182929993, "learning_rate": 0.0001996994815662687, "loss": 1.2392, "step": 574 }, { "epoch": 0.10238603988603989, "grad_norm": 0.4273875057697296, "learning_rate": 0.00019969839622919358, "loss": 1.0844, "step": 575 }, { "epoch": 0.10256410256410256, "grad_norm": 0.41588085889816284, "learning_rate": 0.00019969730893873547, "loss": 1.2437, "step": 576 }, { "epoch": 0.10274216524216524, "grad_norm": 0.41617709398269653, "learning_rate": 0.0001996962196949157, "loss": 0.9519, "step": 577 }, { "epoch": 0.10292022792022792, "grad_norm": 0.4832979142665863, "learning_rate": 0.00019969512849775565, "loss": 1.1889, "step": 578 }, { "epoch": 0.10309829059829059, "grad_norm": 0.3936060965061188, "learning_rate": 0.0001996940353472766, "loss": 0.9888, "step": 579 }, { "epoch": 0.10327635327635327, "grad_norm": 0.4147680997848511, "learning_rate": 0.00019969294024350004, "loss": 1.0733, "step": 580 }, { "epoch": 0.10345441595441596, "grad_norm": 0.37791356444358826, "learning_rate": 0.00019969184318644742, "loss": 1.212, "step": 581 }, { "epoch": 0.10363247863247864, "grad_norm": 0.44297221302986145, "learning_rate": 0.00019969074417614023, "loss": 1.0535, "step": 582 }, { "epoch": 0.10381054131054131, "grad_norm": 0.4032835066318512, "learning_rate": 0.0001996896432126, "loss": 1.1869, "step": 583 }, { "epoch": 0.10398860398860399, "grad_norm": 0.49271953105926514, "learning_rate": 0.00019968854029584827, "loss": 1.1661, "step": 584 }, { "epoch": 0.10416666666666667, "grad_norm": 0.362699031829834, "learning_rate": 0.0001996874354259067, "loss": 0.868, "step": 585 }, { "epoch": 0.10434472934472934, "grad_norm": 0.401795357465744, "learning_rate": 0.0001996863286027969, "loss": 1.1045, "step": 586 }, { "epoch": 0.10452279202279202, "grad_norm": 0.45380479097366333, "learning_rate": 0.00019968521982654058, "loss": 0.8503, "step": 587 }, { "epoch": 0.1047008547008547, "grad_norm": 0.49759066104888916, "learning_rate": 0.00019968410909715947, "loss": 1.4073, "step": 588 }, { "epoch": 0.10487891737891739, "grad_norm": 0.4421198070049286, "learning_rate": 0.0001996829964146753, "loss": 1.1512, "step": 589 }, { "epoch": 0.10505698005698005, "grad_norm": 0.46675658226013184, "learning_rate": 0.00019968188177910988, "loss": 1.0132, "step": 590 }, { "epoch": 0.10523504273504274, "grad_norm": 0.5710657238960266, "learning_rate": 0.00019968076519048507, "loss": 1.267, "step": 591 }, { "epoch": 0.10541310541310542, "grad_norm": 0.4655563235282898, "learning_rate": 0.00019967964664882276, "loss": 1.1204, "step": 592 }, { "epoch": 0.10559116809116809, "grad_norm": 0.3895256519317627, "learning_rate": 0.00019967852615414478, "loss": 1.0814, "step": 593 }, { "epoch": 0.10576923076923077, "grad_norm": 0.424216091632843, "learning_rate": 0.00019967740370647322, "loss": 1.1663, "step": 594 }, { "epoch": 0.10594729344729345, "grad_norm": 0.3978985846042633, "learning_rate": 0.00019967627930582996, "loss": 0.909, "step": 595 }, { "epoch": 0.10612535612535613, "grad_norm": 0.47064995765686035, "learning_rate": 0.00019967515295223705, "loss": 1.2351, "step": 596 }, { "epoch": 0.1063034188034188, "grad_norm": 0.42449644207954407, "learning_rate": 0.0001996740246457166, "loss": 0.9739, "step": 597 }, { "epoch": 0.10648148148148148, "grad_norm": 0.39033401012420654, "learning_rate": 0.00019967289438629066, "loss": 1.0933, "step": 598 }, { "epoch": 0.10665954415954416, "grad_norm": 0.4398612678050995, "learning_rate": 0.00019967176217398143, "loss": 1.2479, "step": 599 }, { "epoch": 0.10683760683760683, "grad_norm": 0.3946632742881775, "learning_rate": 0.00019967062800881107, "loss": 1.0417, "step": 600 }, { "epoch": 0.10701566951566951, "grad_norm": 0.5083445906639099, "learning_rate": 0.0001996694918908018, "loss": 1.1109, "step": 601 }, { "epoch": 0.1071937321937322, "grad_norm": 0.477724552154541, "learning_rate": 0.00019966835381997585, "loss": 1.2891, "step": 602 }, { "epoch": 0.10737179487179487, "grad_norm": 0.4110167920589447, "learning_rate": 0.0001996672137963556, "loss": 1.0555, "step": 603 }, { "epoch": 0.10754985754985755, "grad_norm": 0.44078320264816284, "learning_rate": 0.00019966607181996334, "loss": 0.9188, "step": 604 }, { "epoch": 0.10772792022792023, "grad_norm": 0.41251105070114136, "learning_rate": 0.00019966492789082142, "loss": 1.2592, "step": 605 }, { "epoch": 0.10790598290598291, "grad_norm": 0.37701505422592163, "learning_rate": 0.00019966378200895227, "loss": 1.0233, "step": 606 }, { "epoch": 0.10808404558404558, "grad_norm": 0.44624966382980347, "learning_rate": 0.00019966263417437835, "loss": 1.2273, "step": 607 }, { "epoch": 0.10826210826210826, "grad_norm": 0.3618549108505249, "learning_rate": 0.00019966148438712214, "loss": 0.9101, "step": 608 }, { "epoch": 0.10844017094017094, "grad_norm": 0.384574294090271, "learning_rate": 0.00019966033264720616, "loss": 1.1769, "step": 609 }, { "epoch": 0.10861823361823361, "grad_norm": 0.50872403383255, "learning_rate": 0.000199659178954653, "loss": 1.1213, "step": 610 }, { "epoch": 0.1087962962962963, "grad_norm": 0.39736685156822205, "learning_rate": 0.00019965802330948527, "loss": 1.275, "step": 611 }, { "epoch": 0.10897435897435898, "grad_norm": 0.484660267829895, "learning_rate": 0.00019965686571172557, "loss": 1.1671, "step": 612 }, { "epoch": 0.10915242165242166, "grad_norm": 0.41420218348503113, "learning_rate": 0.0001996557061613966, "loss": 0.9541, "step": 613 }, { "epoch": 0.10933048433048433, "grad_norm": 0.4057196080684662, "learning_rate": 0.00019965454465852112, "loss": 1.0145, "step": 614 }, { "epoch": 0.10950854700854701, "grad_norm": 0.4559510052204132, "learning_rate": 0.00019965338120312182, "loss": 1.0889, "step": 615 }, { "epoch": 0.10968660968660969, "grad_norm": 0.40960055589675903, "learning_rate": 0.00019965221579522154, "loss": 1.1447, "step": 616 }, { "epoch": 0.10986467236467236, "grad_norm": 0.4701732099056244, "learning_rate": 0.0001996510484348431, "loss": 1.2871, "step": 617 }, { "epoch": 0.11004273504273504, "grad_norm": 0.38420796394348145, "learning_rate": 0.0001996498791220094, "loss": 1.058, "step": 618 }, { "epoch": 0.11022079772079772, "grad_norm": 0.4014730453491211, "learning_rate": 0.00019964870785674327, "loss": 1.023, "step": 619 }, { "epoch": 0.1103988603988604, "grad_norm": 0.38846179842948914, "learning_rate": 0.00019964753463906773, "loss": 0.9834, "step": 620 }, { "epoch": 0.11057692307692307, "grad_norm": 0.5120236277580261, "learning_rate": 0.00019964635946900577, "loss": 1.2347, "step": 621 }, { "epoch": 0.11075498575498575, "grad_norm": 0.40483301877975464, "learning_rate": 0.00019964518234658038, "loss": 1.131, "step": 622 }, { "epoch": 0.11093304843304844, "grad_norm": 0.445782870054245, "learning_rate": 0.00019964400327181464, "loss": 0.9349, "step": 623 }, { "epoch": 0.1111111111111111, "grad_norm": 0.490460604429245, "learning_rate": 0.00019964282224473165, "loss": 1.0257, "step": 624 }, { "epoch": 0.11128917378917379, "grad_norm": 0.37585243582725525, "learning_rate": 0.00019964163926535454, "loss": 0.9724, "step": 625 }, { "epoch": 0.11146723646723647, "grad_norm": 0.4160473346710205, "learning_rate": 0.00019964045433370651, "loss": 0.874, "step": 626 }, { "epoch": 0.11164529914529915, "grad_norm": 0.442425012588501, "learning_rate": 0.00019963926744981074, "loss": 1.064, "step": 627 }, { "epoch": 0.11182336182336182, "grad_norm": 0.4451471269130707, "learning_rate": 0.00019963807861369054, "loss": 1.2343, "step": 628 }, { "epoch": 0.1120014245014245, "grad_norm": 0.5018183588981628, "learning_rate": 0.00019963688782536913, "loss": 1.1226, "step": 629 }, { "epoch": 0.11217948717948718, "grad_norm": 0.43723925948143005, "learning_rate": 0.0001996356950848699, "loss": 1.0178, "step": 630 }, { "epoch": 0.11235754985754985, "grad_norm": 0.4794611930847168, "learning_rate": 0.0001996345003922162, "loss": 0.9695, "step": 631 }, { "epoch": 0.11253561253561253, "grad_norm": 0.5021790266036987, "learning_rate": 0.00019963330374743143, "loss": 1.1748, "step": 632 }, { "epoch": 0.11271367521367522, "grad_norm": 0.47228625416755676, "learning_rate": 0.00019963210515053906, "loss": 1.2138, "step": 633 }, { "epoch": 0.1128917378917379, "grad_norm": 0.4261155128479004, "learning_rate": 0.00019963090460156256, "loss": 0.9428, "step": 634 }, { "epoch": 0.11306980056980057, "grad_norm": 0.3279525339603424, "learning_rate": 0.00019962970210052542, "loss": 0.7803, "step": 635 }, { "epoch": 0.11324786324786325, "grad_norm": 0.5106086730957031, "learning_rate": 0.00019962849764745125, "loss": 1.113, "step": 636 }, { "epoch": 0.11342592592592593, "grad_norm": 0.38272222876548767, "learning_rate": 0.00019962729124236363, "loss": 0.896, "step": 637 }, { "epoch": 0.1136039886039886, "grad_norm": 0.39532098174095154, "learning_rate": 0.0001996260828852862, "loss": 0.9308, "step": 638 }, { "epoch": 0.11378205128205128, "grad_norm": 0.44947221875190735, "learning_rate": 0.00019962487257624262, "loss": 1.207, "step": 639 }, { "epoch": 0.11396011396011396, "grad_norm": 0.40684598684310913, "learning_rate": 0.00019962366031525664, "loss": 1.11, "step": 640 }, { "epoch": 0.11413817663817664, "grad_norm": 0.4296625852584839, "learning_rate": 0.00019962244610235194, "loss": 1.2784, "step": 641 }, { "epoch": 0.11431623931623931, "grad_norm": 0.4560794532299042, "learning_rate": 0.0001996212299375524, "loss": 1.1191, "step": 642 }, { "epoch": 0.114494301994302, "grad_norm": 0.40246087312698364, "learning_rate": 0.00019962001182088177, "loss": 1.1401, "step": 643 }, { "epoch": 0.11467236467236468, "grad_norm": 0.3938910663127899, "learning_rate": 0.000199618791752364, "loss": 1.0959, "step": 644 }, { "epoch": 0.11485042735042734, "grad_norm": 0.4123380184173584, "learning_rate": 0.00019961756973202287, "loss": 1.2824, "step": 645 }, { "epoch": 0.11502849002849003, "grad_norm": 0.41085442900657654, "learning_rate": 0.00019961634575988243, "loss": 1.1137, "step": 646 }, { "epoch": 0.11520655270655271, "grad_norm": 0.38276201486587524, "learning_rate": 0.0001996151198359667, "loss": 1.0747, "step": 647 }, { "epoch": 0.11538461538461539, "grad_norm": 0.49269407987594604, "learning_rate": 0.00019961389196029953, "loss": 1.1731, "step": 648 }, { "epoch": 0.11556267806267806, "grad_norm": 0.5152469277381897, "learning_rate": 0.00019961266213290512, "loss": 1.3574, "step": 649 }, { "epoch": 0.11574074074074074, "grad_norm": 0.4835714101791382, "learning_rate": 0.0001996114303538075, "loss": 1.2859, "step": 650 }, { "epoch": 0.11591880341880342, "grad_norm": 0.4284524917602539, "learning_rate": 0.00019961019662303087, "loss": 1.1103, "step": 651 }, { "epoch": 0.11609686609686609, "grad_norm": 0.3933276832103729, "learning_rate": 0.00019960896094059933, "loss": 1.2647, "step": 652 }, { "epoch": 0.11627492877492877, "grad_norm": 0.33749741315841675, "learning_rate": 0.00019960772330653712, "loss": 0.819, "step": 653 }, { "epoch": 0.11645299145299146, "grad_norm": 0.48122069239616394, "learning_rate": 0.00019960648372086852, "loss": 1.2781, "step": 654 }, { "epoch": 0.11663105413105414, "grad_norm": 0.4681607186794281, "learning_rate": 0.00019960524218361775, "loss": 0.9723, "step": 655 }, { "epoch": 0.1168091168091168, "grad_norm": 0.3974960148334503, "learning_rate": 0.0001996039986948092, "loss": 1.0302, "step": 656 }, { "epoch": 0.11698717948717949, "grad_norm": 0.43180662393569946, "learning_rate": 0.0001996027532544672, "loss": 1.3265, "step": 657 }, { "epoch": 0.11716524216524217, "grad_norm": 0.4481917917728424, "learning_rate": 0.00019960150586261613, "loss": 1.136, "step": 658 }, { "epoch": 0.11734330484330484, "grad_norm": 0.43428945541381836, "learning_rate": 0.00019960025651928045, "loss": 1.2412, "step": 659 }, { "epoch": 0.11752136752136752, "grad_norm": 0.36211395263671875, "learning_rate": 0.00019959900522448467, "loss": 0.9563, "step": 660 }, { "epoch": 0.1176994301994302, "grad_norm": 0.43585848808288574, "learning_rate": 0.0001995977519782533, "loss": 1.1677, "step": 661 }, { "epoch": 0.11787749287749288, "grad_norm": 0.4232597351074219, "learning_rate": 0.00019959649678061086, "loss": 1.1187, "step": 662 }, { "epoch": 0.11805555555555555, "grad_norm": 0.3304753303527832, "learning_rate": 0.00019959523963158194, "loss": 0.8473, "step": 663 }, { "epoch": 0.11823361823361823, "grad_norm": 0.37600061297416687, "learning_rate": 0.0001995939805311912, "loss": 1.1227, "step": 664 }, { "epoch": 0.11841168091168092, "grad_norm": 0.33417847752571106, "learning_rate": 0.0001995927194794633, "loss": 1.0315, "step": 665 }, { "epoch": 0.11858974358974358, "grad_norm": 0.46799129247665405, "learning_rate": 0.00019959145647642298, "loss": 1.135, "step": 666 }, { "epoch": 0.11876780626780627, "grad_norm": 0.4141576886177063, "learning_rate": 0.0001995901915220949, "loss": 1.0956, "step": 667 }, { "epoch": 0.11894586894586895, "grad_norm": 0.3824596405029297, "learning_rate": 0.0001995889246165039, "loss": 1.1782, "step": 668 }, { "epoch": 0.11912393162393162, "grad_norm": 0.4087786376476288, "learning_rate": 0.00019958765575967484, "loss": 0.9704, "step": 669 }, { "epoch": 0.1193019943019943, "grad_norm": 0.5161317586898804, "learning_rate": 0.00019958638495163252, "loss": 1.2207, "step": 670 }, { "epoch": 0.11948005698005698, "grad_norm": 0.4782274067401886, "learning_rate": 0.0001995851121924019, "loss": 1.1257, "step": 671 }, { "epoch": 0.11965811965811966, "grad_norm": 0.40617331862449646, "learning_rate": 0.00019958383748200782, "loss": 1.1153, "step": 672 }, { "epoch": 0.11983618233618233, "grad_norm": 0.40149980783462524, "learning_rate": 0.00019958256082047533, "loss": 0.9785, "step": 673 }, { "epoch": 0.12001424501424501, "grad_norm": 0.4378886818885803, "learning_rate": 0.00019958128220782942, "loss": 1.1355, "step": 674 }, { "epoch": 0.1201923076923077, "grad_norm": 0.4449596703052521, "learning_rate": 0.0001995800016440952, "loss": 1.0325, "step": 675 }, { "epoch": 0.12037037037037036, "grad_norm": 0.4268079698085785, "learning_rate": 0.00019957871912929765, "loss": 1.1901, "step": 676 }, { "epoch": 0.12054843304843305, "grad_norm": 0.4250091016292572, "learning_rate": 0.00019957743466346198, "loss": 1.0084, "step": 677 }, { "epoch": 0.12072649572649573, "grad_norm": 0.40724286437034607, "learning_rate": 0.0001995761482466133, "loss": 1.0866, "step": 678 }, { "epoch": 0.12090455840455841, "grad_norm": 0.42478349804878235, "learning_rate": 0.00019957485987877688, "loss": 1.1909, "step": 679 }, { "epoch": 0.12108262108262108, "grad_norm": 0.371362566947937, "learning_rate": 0.0001995735695599779, "loss": 1.083, "step": 680 }, { "epoch": 0.12126068376068376, "grad_norm": 0.4715283513069153, "learning_rate": 0.0001995722772902417, "loss": 1.2942, "step": 681 }, { "epoch": 0.12143874643874644, "grad_norm": 0.3611983060836792, "learning_rate": 0.00019957098306959355, "loss": 0.9878, "step": 682 }, { "epoch": 0.12161680911680911, "grad_norm": 0.4764883816242218, "learning_rate": 0.00019956968689805883, "loss": 1.0082, "step": 683 }, { "epoch": 0.12179487179487179, "grad_norm": 0.33170604705810547, "learning_rate": 0.00019956838877566293, "loss": 0.8529, "step": 684 }, { "epoch": 0.12197293447293447, "grad_norm": 0.46896886825561523, "learning_rate": 0.00019956708870243133, "loss": 1.0745, "step": 685 }, { "epoch": 0.12215099715099716, "grad_norm": 0.4120674431324005, "learning_rate": 0.00019956578667838941, "loss": 1.1828, "step": 686 }, { "epoch": 0.12232905982905982, "grad_norm": 0.45671191811561584, "learning_rate": 0.00019956448270356275, "loss": 1.3484, "step": 687 }, { "epoch": 0.1225071225071225, "grad_norm": 0.4023838937282562, "learning_rate": 0.00019956317677797687, "loss": 0.9623, "step": 688 }, { "epoch": 0.12268518518518519, "grad_norm": 0.5205856561660767, "learning_rate": 0.00019956186890165737, "loss": 1.2221, "step": 689 }, { "epoch": 0.12286324786324786, "grad_norm": 0.43956050276756287, "learning_rate": 0.00019956055907462987, "loss": 1.1051, "step": 690 }, { "epoch": 0.12304131054131054, "grad_norm": 0.4341758191585541, "learning_rate": 0.00019955924729692003, "loss": 0.8972, "step": 691 }, { "epoch": 0.12321937321937322, "grad_norm": 0.42025020718574524, "learning_rate": 0.00019955793356855357, "loss": 1.1137, "step": 692 }, { "epoch": 0.1233974358974359, "grad_norm": 0.44375079870224, "learning_rate": 0.0001995566178895562, "loss": 1.2783, "step": 693 }, { "epoch": 0.12357549857549857, "grad_norm": 0.4703320264816284, "learning_rate": 0.00019955530025995372, "loss": 1.1991, "step": 694 }, { "epoch": 0.12375356125356125, "grad_norm": 0.43781620264053345, "learning_rate": 0.00019955398067977195, "loss": 1.2316, "step": 695 }, { "epoch": 0.12393162393162394, "grad_norm": 0.4362877607345581, "learning_rate": 0.0001995526591490367, "loss": 1.1374, "step": 696 }, { "epoch": 0.1241096866096866, "grad_norm": 0.4434499442577362, "learning_rate": 0.00019955133566777392, "loss": 1.1034, "step": 697 }, { "epoch": 0.12428774928774929, "grad_norm": 0.46613508462905884, "learning_rate": 0.00019955001023600955, "loss": 1.2252, "step": 698 }, { "epoch": 0.12446581196581197, "grad_norm": 0.46226736903190613, "learning_rate": 0.00019954868285376945, "loss": 1.0296, "step": 699 }, { "epoch": 0.12464387464387465, "grad_norm": 0.4460904002189636, "learning_rate": 0.00019954735352107977, "loss": 1.0553, "step": 700 }, { "epoch": 0.12482193732193732, "grad_norm": 0.36708924174308777, "learning_rate": 0.00019954602223796648, "loss": 0.9384, "step": 701 }, { "epoch": 0.125, "grad_norm": 0.3780093491077423, "learning_rate": 0.00019954468900445566, "loss": 0.9062, "step": 702 }, { "epoch": 0.12517806267806267, "grad_norm": 0.41797417402267456, "learning_rate": 0.00019954335382057345, "loss": 1.0344, "step": 703 }, { "epoch": 0.12535612535612536, "grad_norm": 0.43710798025131226, "learning_rate": 0.00019954201668634597, "loss": 1.1324, "step": 704 }, { "epoch": 0.12553418803418803, "grad_norm": 0.4732789695262909, "learning_rate": 0.00019954067760179952, "loss": 1.1419, "step": 705 }, { "epoch": 0.1257122507122507, "grad_norm": 0.43248575925827026, "learning_rate": 0.00019953933656696022, "loss": 1.5112, "step": 706 }, { "epoch": 0.1258903133903134, "grad_norm": 0.4074753522872925, "learning_rate": 0.00019953799358185442, "loss": 0.9751, "step": 707 }, { "epoch": 0.12606837606837606, "grad_norm": 0.4586823880672455, "learning_rate": 0.0001995366486465084, "loss": 1.267, "step": 708 }, { "epoch": 0.12624643874643873, "grad_norm": 0.4716857075691223, "learning_rate": 0.0001995353017609485, "loss": 1.1636, "step": 709 }, { "epoch": 0.12642450142450143, "grad_norm": 0.5214398503303528, "learning_rate": 0.00019953395292520115, "loss": 1.2317, "step": 710 }, { "epoch": 0.1266025641025641, "grad_norm": 0.42961129546165466, "learning_rate": 0.00019953260213929276, "loss": 1.0271, "step": 711 }, { "epoch": 0.1267806267806268, "grad_norm": 0.4764653444290161, "learning_rate": 0.00019953124940324979, "loss": 1.1747, "step": 712 }, { "epoch": 0.12695868945868946, "grad_norm": 0.4420304000377655, "learning_rate": 0.00019952989471709874, "loss": 0.9783, "step": 713 }, { "epoch": 0.12713675213675213, "grad_norm": 0.44114625453948975, "learning_rate": 0.00019952853808086616, "loss": 1.1953, "step": 714 }, { "epoch": 0.12731481481481483, "grad_norm": 0.501923143863678, "learning_rate": 0.0001995271794945786, "loss": 0.9886, "step": 715 }, { "epoch": 0.1274928774928775, "grad_norm": 0.42266538739204407, "learning_rate": 0.00019952581895826276, "loss": 1.2033, "step": 716 }, { "epoch": 0.12767094017094016, "grad_norm": 0.37770554423332214, "learning_rate": 0.00019952445647194523, "loss": 1.0164, "step": 717 }, { "epoch": 0.12784900284900286, "grad_norm": 0.369266152381897, "learning_rate": 0.00019952309203565268, "loss": 0.9186, "step": 718 }, { "epoch": 0.12802706552706553, "grad_norm": 0.40446221828460693, "learning_rate": 0.00019952172564941193, "loss": 1.1576, "step": 719 }, { "epoch": 0.1282051282051282, "grad_norm": 0.504172146320343, "learning_rate": 0.00019952035731324967, "loss": 1.2695, "step": 720 }, { "epoch": 0.1283831908831909, "grad_norm": 0.37284108996391296, "learning_rate": 0.0001995189870271928, "loss": 1.0288, "step": 721 }, { "epoch": 0.12856125356125356, "grad_norm": 0.41811618208885193, "learning_rate": 0.00019951761479126805, "loss": 1.2241, "step": 722 }, { "epoch": 0.12873931623931623, "grad_norm": 0.44706249237060547, "learning_rate": 0.0001995162406055024, "loss": 1.0831, "step": 723 }, { "epoch": 0.12891737891737892, "grad_norm": 0.426572322845459, "learning_rate": 0.00019951486446992273, "loss": 1.0047, "step": 724 }, { "epoch": 0.1290954415954416, "grad_norm": 0.4446277618408203, "learning_rate": 0.00019951348638455602, "loss": 1.0827, "step": 725 }, { "epoch": 0.12927350427350429, "grad_norm": 0.3934919834136963, "learning_rate": 0.00019951210634942926, "loss": 0.9808, "step": 726 }, { "epoch": 0.12945156695156695, "grad_norm": 0.4316558241844177, "learning_rate": 0.0001995107243645695, "loss": 1.3341, "step": 727 }, { "epoch": 0.12962962962962962, "grad_norm": 0.43074217438697815, "learning_rate": 0.00019950934043000382, "loss": 1.007, "step": 728 }, { "epoch": 0.12980769230769232, "grad_norm": 0.5212171673774719, "learning_rate": 0.0001995079545457593, "loss": 1.1822, "step": 729 }, { "epoch": 0.129985754985755, "grad_norm": 0.3749600946903229, "learning_rate": 0.00019950656671186313, "loss": 0.9657, "step": 730 }, { "epoch": 0.13016381766381765, "grad_norm": 0.36626043915748596, "learning_rate": 0.00019950517692834252, "loss": 1.1274, "step": 731 }, { "epoch": 0.13034188034188035, "grad_norm": 0.4635467529296875, "learning_rate": 0.00019950378519522467, "loss": 1.2305, "step": 732 }, { "epoch": 0.13051994301994302, "grad_norm": 0.4077455699443817, "learning_rate": 0.00019950239151253683, "loss": 0.9485, "step": 733 }, { "epoch": 0.1306980056980057, "grad_norm": 0.4222758114337921, "learning_rate": 0.0001995009958803063, "loss": 1.0376, "step": 734 }, { "epoch": 0.13087606837606838, "grad_norm": 0.4330402612686157, "learning_rate": 0.0001994995982985605, "loss": 1.1774, "step": 735 }, { "epoch": 0.13105413105413105, "grad_norm": 0.42275673151016235, "learning_rate": 0.00019949819876732673, "loss": 1.1238, "step": 736 }, { "epoch": 0.13123219373219372, "grad_norm": 0.45576968789100647, "learning_rate": 0.00019949679728663246, "loss": 1.0428, "step": 737 }, { "epoch": 0.13141025641025642, "grad_norm": 0.5508752465248108, "learning_rate": 0.00019949539385650514, "loss": 1.3221, "step": 738 }, { "epoch": 0.13158831908831908, "grad_norm": 0.4115872383117676, "learning_rate": 0.00019949398847697225, "loss": 1.0301, "step": 739 }, { "epoch": 0.13176638176638178, "grad_norm": 0.4662442207336426, "learning_rate": 0.00019949258114806132, "loss": 1.3263, "step": 740 }, { "epoch": 0.13194444444444445, "grad_norm": 0.6077266931533813, "learning_rate": 0.00019949117186979999, "loss": 1.0269, "step": 741 }, { "epoch": 0.13212250712250712, "grad_norm": 0.47039318084716797, "learning_rate": 0.00019948976064221579, "loss": 1.3782, "step": 742 }, { "epoch": 0.1323005698005698, "grad_norm": 0.4773450493812561, "learning_rate": 0.0001994883474653364, "loss": 1.289, "step": 743 }, { "epoch": 0.13247863247863248, "grad_norm": 0.40180155634880066, "learning_rate": 0.00019948693233918952, "loss": 0.8691, "step": 744 }, { "epoch": 0.13265669515669515, "grad_norm": 0.45216289162635803, "learning_rate": 0.00019948551526380288, "loss": 1.071, "step": 745 }, { "epoch": 0.13283475783475784, "grad_norm": 0.4289272427558899, "learning_rate": 0.0001994840962392042, "loss": 1.0422, "step": 746 }, { "epoch": 0.1330128205128205, "grad_norm": 0.4617730379104614, "learning_rate": 0.00019948267526542134, "loss": 1.0835, "step": 747 }, { "epoch": 0.13319088319088318, "grad_norm": 0.42710617184638977, "learning_rate": 0.00019948125234248208, "loss": 1.0535, "step": 748 }, { "epoch": 0.13336894586894588, "grad_norm": 0.43433234095573425, "learning_rate": 0.0001994798274704144, "loss": 0.9313, "step": 749 }, { "epoch": 0.13354700854700854, "grad_norm": 0.46270284056663513, "learning_rate": 0.0001994784006492461, "loss": 1.0903, "step": 750 }, { "epoch": 0.1337250712250712, "grad_norm": 0.5319814682006836, "learning_rate": 0.00019947697187900517, "loss": 1.2329, "step": 751 }, { "epoch": 0.1339031339031339, "grad_norm": 0.3511372208595276, "learning_rate": 0.00019947554115971967, "loss": 0.7116, "step": 752 }, { "epoch": 0.13408119658119658, "grad_norm": 0.4103890359401703, "learning_rate": 0.00019947410849141756, "loss": 1.1527, "step": 753 }, { "epoch": 0.13425925925925927, "grad_norm": 0.5390757322311401, "learning_rate": 0.00019947267387412695, "loss": 1.1682, "step": 754 }, { "epoch": 0.13443732193732194, "grad_norm": 0.29939723014831543, "learning_rate": 0.0001994712373078759, "loss": 0.5848, "step": 755 }, { "epoch": 0.1346153846153846, "grad_norm": 0.4605920612812042, "learning_rate": 0.0001994697987926926, "loss": 0.9448, "step": 756 }, { "epoch": 0.1347934472934473, "grad_norm": 0.426213800907135, "learning_rate": 0.00019946835832860527, "loss": 1.0487, "step": 757 }, { "epoch": 0.13497150997150997, "grad_norm": 0.4209515154361725, "learning_rate": 0.00019946691591564203, "loss": 1.0951, "step": 758 }, { "epoch": 0.13514957264957264, "grad_norm": 0.39555591344833374, "learning_rate": 0.0001994654715538312, "loss": 0.8754, "step": 759 }, { "epoch": 0.13532763532763534, "grad_norm": 0.4065483510494232, "learning_rate": 0.0001994640252432011, "loss": 0.9451, "step": 760 }, { "epoch": 0.135505698005698, "grad_norm": 0.4489104151725769, "learning_rate": 0.00019946257698378003, "loss": 1.2031, "step": 761 }, { "epoch": 0.13568376068376067, "grad_norm": 0.39928409457206726, "learning_rate": 0.0001994611267755964, "loss": 1.1124, "step": 762 }, { "epoch": 0.13586182336182337, "grad_norm": 0.4145409166812897, "learning_rate": 0.00019945967461867858, "loss": 1.083, "step": 763 }, { "epoch": 0.13603988603988604, "grad_norm": 0.43508613109588623, "learning_rate": 0.00019945822051305507, "loss": 1.1119, "step": 764 }, { "epoch": 0.1362179487179487, "grad_norm": 0.5186598300933838, "learning_rate": 0.0001994567644587543, "loss": 1.3256, "step": 765 }, { "epoch": 0.1363960113960114, "grad_norm": 0.4615778625011444, "learning_rate": 0.00019945530645580487, "loss": 1.3906, "step": 766 }, { "epoch": 0.13657407407407407, "grad_norm": 0.4838152527809143, "learning_rate": 0.00019945384650423532, "loss": 0.8169, "step": 767 }, { "epoch": 0.13675213675213677, "grad_norm": 0.49253368377685547, "learning_rate": 0.0001994523846040742, "loss": 1.1613, "step": 768 }, { "epoch": 0.13693019943019943, "grad_norm": 0.4697009325027466, "learning_rate": 0.00019945092075535024, "loss": 1.1722, "step": 769 }, { "epoch": 0.1371082621082621, "grad_norm": 0.47162383794784546, "learning_rate": 0.00019944945495809204, "loss": 1.054, "step": 770 }, { "epoch": 0.1372863247863248, "grad_norm": 0.4653547704219818, "learning_rate": 0.00019944798721232835, "loss": 1.1791, "step": 771 }, { "epoch": 0.13746438746438747, "grad_norm": 0.4244011640548706, "learning_rate": 0.000199446517518088, "loss": 1.1557, "step": 772 }, { "epoch": 0.13764245014245013, "grad_norm": 0.43812859058380127, "learning_rate": 0.00019944504587539967, "loss": 1.1567, "step": 773 }, { "epoch": 0.13782051282051283, "grad_norm": 0.3984275162220001, "learning_rate": 0.00019944357228429227, "loss": 1.0715, "step": 774 }, { "epoch": 0.1379985754985755, "grad_norm": 0.3794248104095459, "learning_rate": 0.0001994420967447946, "loss": 0.9377, "step": 775 }, { "epoch": 0.13817663817663817, "grad_norm": 0.4214578866958618, "learning_rate": 0.00019944061925693566, "loss": 1.0112, "step": 776 }, { "epoch": 0.13835470085470086, "grad_norm": 0.4738999605178833, "learning_rate": 0.00019943913982074435, "loss": 0.8718, "step": 777 }, { "epoch": 0.13853276353276353, "grad_norm": 0.43455326557159424, "learning_rate": 0.00019943765843624965, "loss": 1.1343, "step": 778 }, { "epoch": 0.1387108262108262, "grad_norm": 0.44973456859588623, "learning_rate": 0.00019943617510348062, "loss": 1.0487, "step": 779 }, { "epoch": 0.1388888888888889, "grad_norm": 0.4216597080230713, "learning_rate": 0.00019943468982246628, "loss": 1.0765, "step": 780 }, { "epoch": 0.13906695156695156, "grad_norm": 0.5089883208274841, "learning_rate": 0.00019943320259323578, "loss": 1.3137, "step": 781 }, { "epoch": 0.13924501424501423, "grad_norm": 0.4358222782611847, "learning_rate": 0.00019943171341581822, "loss": 1.1891, "step": 782 }, { "epoch": 0.13942307692307693, "grad_norm": 0.40918609499931335, "learning_rate": 0.00019943022229024275, "loss": 1.279, "step": 783 }, { "epoch": 0.1396011396011396, "grad_norm": 0.4614863395690918, "learning_rate": 0.00019942872921653866, "loss": 1.2477, "step": 784 }, { "epoch": 0.1397792022792023, "grad_norm": 0.4141528904438019, "learning_rate": 0.00019942723419473515, "loss": 0.9622, "step": 785 }, { "epoch": 0.13995726495726496, "grad_norm": 0.536139726638794, "learning_rate": 0.00019942573722486154, "loss": 1.2127, "step": 786 }, { "epoch": 0.14013532763532763, "grad_norm": 0.4968845546245575, "learning_rate": 0.0001994242383069471, "loss": 1.2965, "step": 787 }, { "epoch": 0.14031339031339032, "grad_norm": 0.3897174894809723, "learning_rate": 0.00019942273744102132, "loss": 0.9907, "step": 788 }, { "epoch": 0.140491452991453, "grad_norm": 0.466307669878006, "learning_rate": 0.0001994212346271135, "loss": 1.2021, "step": 789 }, { "epoch": 0.14066951566951566, "grad_norm": 0.49283576011657715, "learning_rate": 0.0001994197298652531, "loss": 1.0969, "step": 790 }, { "epoch": 0.14084757834757836, "grad_norm": 0.4686102271080017, "learning_rate": 0.00019941822315546964, "loss": 1.0125, "step": 791 }, { "epoch": 0.14102564102564102, "grad_norm": 0.4389997124671936, "learning_rate": 0.0001994167144977926, "loss": 1.1294, "step": 792 }, { "epoch": 0.1412037037037037, "grad_norm": 0.38539355993270874, "learning_rate": 0.00019941520389225162, "loss": 1.1231, "step": 793 }, { "epoch": 0.1413817663817664, "grad_norm": 0.4860847592353821, "learning_rate": 0.00019941369133887618, "loss": 1.2268, "step": 794 }, { "epoch": 0.14155982905982906, "grad_norm": 0.4567467272281647, "learning_rate": 0.00019941217683769598, "loss": 1.1482, "step": 795 }, { "epoch": 0.14173789173789172, "grad_norm": 0.5549420714378357, "learning_rate": 0.00019941066038874067, "loss": 1.1899, "step": 796 }, { "epoch": 0.14191595441595442, "grad_norm": 0.3950003385543823, "learning_rate": 0.00019940914199204, "loss": 0.96, "step": 797 }, { "epoch": 0.1420940170940171, "grad_norm": 0.43845999240875244, "learning_rate": 0.00019940762164762373, "loss": 1.0338, "step": 798 }, { "epoch": 0.14227207977207978, "grad_norm": 0.468537300825119, "learning_rate": 0.00019940609935552157, "loss": 1.2416, "step": 799 }, { "epoch": 0.14245014245014245, "grad_norm": 0.4292038679122925, "learning_rate": 0.0001994045751157634, "loss": 1.1397, "step": 800 }, { "epoch": 0.14262820512820512, "grad_norm": 0.3800995647907257, "learning_rate": 0.00019940304892837908, "loss": 0.939, "step": 801 }, { "epoch": 0.14280626780626782, "grad_norm": 0.38004353642463684, "learning_rate": 0.00019940152079339852, "loss": 1.0485, "step": 802 }, { "epoch": 0.14298433048433049, "grad_norm": 0.4658142924308777, "learning_rate": 0.00019939999071085163, "loss": 1.1561, "step": 803 }, { "epoch": 0.14316239316239315, "grad_norm": 0.4235048294067383, "learning_rate": 0.0001993984586807684, "loss": 1.0516, "step": 804 }, { "epoch": 0.14334045584045585, "grad_norm": 0.42925819754600525, "learning_rate": 0.00019939692470317887, "loss": 1.2238, "step": 805 }, { "epoch": 0.14351851851851852, "grad_norm": 0.43701639771461487, "learning_rate": 0.00019939538877811308, "loss": 1.0129, "step": 806 }, { "epoch": 0.14369658119658119, "grad_norm": 0.42786353826522827, "learning_rate": 0.00019939385090560113, "loss": 1.1355, "step": 807 }, { "epoch": 0.14387464387464388, "grad_norm": 0.371218740940094, "learning_rate": 0.00019939231108567312, "loss": 0.9712, "step": 808 }, { "epoch": 0.14405270655270655, "grad_norm": 0.4834294617176056, "learning_rate": 0.00019939076931835926, "loss": 1.1375, "step": 809 }, { "epoch": 0.14423076923076922, "grad_norm": 0.4700150191783905, "learning_rate": 0.00019938922560368974, "loss": 1.1943, "step": 810 }, { "epoch": 0.14440883190883191, "grad_norm": 0.4430996775627136, "learning_rate": 0.0001993876799416948, "loss": 1.1976, "step": 811 }, { "epoch": 0.14458689458689458, "grad_norm": 0.4161672592163086, "learning_rate": 0.00019938613233240476, "loss": 1.0291, "step": 812 }, { "epoch": 0.14476495726495728, "grad_norm": 0.39838850498199463, "learning_rate": 0.0001993845827758499, "loss": 1.2103, "step": 813 }, { "epoch": 0.14494301994301995, "grad_norm": 0.429198294878006, "learning_rate": 0.00019938303127206057, "loss": 0.9971, "step": 814 }, { "epoch": 0.14512108262108261, "grad_norm": 0.4589254856109619, "learning_rate": 0.00019938147782106719, "loss": 1.2392, "step": 815 }, { "epoch": 0.1452991452991453, "grad_norm": 0.42506635189056396, "learning_rate": 0.00019937992242290023, "loss": 1.0827, "step": 816 }, { "epoch": 0.14547720797720798, "grad_norm": 0.3778113126754761, "learning_rate": 0.00019937836507759012, "loss": 1.021, "step": 817 }, { "epoch": 0.14565527065527065, "grad_norm": 0.43071216344833374, "learning_rate": 0.0001993768057851674, "loss": 1.273, "step": 818 }, { "epoch": 0.14583333333333334, "grad_norm": 0.4944681227207184, "learning_rate": 0.00019937524454566262, "loss": 1.3037, "step": 819 }, { "epoch": 0.146011396011396, "grad_norm": 0.4438824951648712, "learning_rate": 0.00019937368135910632, "loss": 1.1383, "step": 820 }, { "epoch": 0.14618945868945868, "grad_norm": 0.400215744972229, "learning_rate": 0.0001993721162255292, "loss": 1.0669, "step": 821 }, { "epoch": 0.14636752136752137, "grad_norm": 0.4341452121734619, "learning_rate": 0.00019937054914496185, "loss": 1.1431, "step": 822 }, { "epoch": 0.14654558404558404, "grad_norm": 0.3941744267940521, "learning_rate": 0.00019936898011743503, "loss": 1.1593, "step": 823 }, { "epoch": 0.1467236467236467, "grad_norm": 0.4318541884422302, "learning_rate": 0.00019936740914297947, "loss": 1.2814, "step": 824 }, { "epoch": 0.1469017094017094, "grad_norm": 0.44488632678985596, "learning_rate": 0.00019936583622162595, "loss": 1.1054, "step": 825 }, { "epoch": 0.14707977207977208, "grad_norm": 0.38701096177101135, "learning_rate": 0.00019936426135340528, "loss": 1.1086, "step": 826 }, { "epoch": 0.14725783475783477, "grad_norm": 0.45794424414634705, "learning_rate": 0.0001993626845383483, "loss": 1.2395, "step": 827 }, { "epoch": 0.14743589743589744, "grad_norm": 0.49237680435180664, "learning_rate": 0.00019936110577648596, "loss": 1.3483, "step": 828 }, { "epoch": 0.1476139601139601, "grad_norm": 0.481666624546051, "learning_rate": 0.00019935952506784914, "loss": 1.1848, "step": 829 }, { "epoch": 0.1477920227920228, "grad_norm": 0.4015209376811981, "learning_rate": 0.00019935794241246883, "loss": 1.0624, "step": 830 }, { "epoch": 0.14797008547008547, "grad_norm": 0.47975999116897583, "learning_rate": 0.00019935635781037606, "loss": 1.1595, "step": 831 }, { "epoch": 0.14814814814814814, "grad_norm": 0.4440356492996216, "learning_rate": 0.00019935477126160181, "loss": 1.1325, "step": 832 }, { "epoch": 0.14832621082621084, "grad_norm": 0.4167410731315613, "learning_rate": 0.00019935318276617723, "loss": 1.0662, "step": 833 }, { "epoch": 0.1485042735042735, "grad_norm": 0.4107447862625122, "learning_rate": 0.0001993515923241334, "loss": 0.8816, "step": 834 }, { "epoch": 0.14868233618233617, "grad_norm": 0.4020158648490906, "learning_rate": 0.00019934999993550154, "loss": 0.9797, "step": 835 }, { "epoch": 0.14886039886039887, "grad_norm": 0.4186473786830902, "learning_rate": 0.0001993484056003128, "loss": 1.1243, "step": 836 }, { "epoch": 0.14903846153846154, "grad_norm": 0.5534794926643372, "learning_rate": 0.00019934680931859842, "loss": 1.1189, "step": 837 }, { "epoch": 0.1492165242165242, "grad_norm": 0.37901270389556885, "learning_rate": 0.0001993452110903897, "loss": 0.9241, "step": 838 }, { "epoch": 0.1493945868945869, "grad_norm": 0.41773587465286255, "learning_rate": 0.00019934361091571793, "loss": 0.9467, "step": 839 }, { "epoch": 0.14957264957264957, "grad_norm": 0.4962073564529419, "learning_rate": 0.00019934200879461448, "loss": 1.2423, "step": 840 }, { "epoch": 0.14975071225071226, "grad_norm": 0.38565897941589355, "learning_rate": 0.00019934040472711074, "loss": 1.1545, "step": 841 }, { "epoch": 0.14992877492877493, "grad_norm": 0.4295346736907959, "learning_rate": 0.0001993387987132381, "loss": 1.2482, "step": 842 }, { "epoch": 0.1501068376068376, "grad_norm": 0.4279189705848694, "learning_rate": 0.0001993371907530281, "loss": 1.1135, "step": 843 }, { "epoch": 0.1502849002849003, "grad_norm": 0.44649168848991394, "learning_rate": 0.0001993355808465122, "loss": 1.0734, "step": 844 }, { "epoch": 0.15046296296296297, "grad_norm": 0.453707218170166, "learning_rate": 0.0001993339689937219, "loss": 1.0992, "step": 845 }, { "epoch": 0.15064102564102563, "grad_norm": 0.5113263726234436, "learning_rate": 0.00019933235519468886, "loss": 1.1792, "step": 846 }, { "epoch": 0.15081908831908833, "grad_norm": 0.5822970271110535, "learning_rate": 0.00019933073944944466, "loss": 1.367, "step": 847 }, { "epoch": 0.150997150997151, "grad_norm": 0.3946528732776642, "learning_rate": 0.00019932912175802097, "loss": 0.9781, "step": 848 }, { "epoch": 0.15117521367521367, "grad_norm": 0.5429860949516296, "learning_rate": 0.00019932750212044945, "loss": 0.9783, "step": 849 }, { "epoch": 0.15135327635327636, "grad_norm": 0.45847952365875244, "learning_rate": 0.0001993258805367619, "loss": 1.1352, "step": 850 }, { "epoch": 0.15153133903133903, "grad_norm": 0.42770692706108093, "learning_rate": 0.00019932425700699004, "loss": 1.2365, "step": 851 }, { "epoch": 0.1517094017094017, "grad_norm": 0.41845405101776123, "learning_rate": 0.00019932263153116565, "loss": 1.2642, "step": 852 }, { "epoch": 0.1518874643874644, "grad_norm": 0.4641731083393097, "learning_rate": 0.00019932100410932066, "loss": 1.2009, "step": 853 }, { "epoch": 0.15206552706552706, "grad_norm": 0.4128672778606415, "learning_rate": 0.00019931937474148689, "loss": 1.1981, "step": 854 }, { "epoch": 0.15224358974358973, "grad_norm": 0.4730764925479889, "learning_rate": 0.00019931774342769632, "loss": 1.2145, "step": 855 }, { "epoch": 0.15242165242165243, "grad_norm": 0.36611825227737427, "learning_rate": 0.00019931611016798089, "loss": 0.8504, "step": 856 }, { "epoch": 0.1525997150997151, "grad_norm": 0.40944692492485046, "learning_rate": 0.00019931447496237254, "loss": 1.2853, "step": 857 }, { "epoch": 0.1527777777777778, "grad_norm": 0.4521993398666382, "learning_rate": 0.0001993128378109034, "loss": 1.0198, "step": 858 }, { "epoch": 0.15295584045584046, "grad_norm": 0.42113015055656433, "learning_rate": 0.0001993111987136055, "loss": 1.1284, "step": 859 }, { "epoch": 0.15313390313390313, "grad_norm": 0.4117624759674072, "learning_rate": 0.00019930955767051098, "loss": 1.0445, "step": 860 }, { "epoch": 0.15331196581196582, "grad_norm": 0.4807964265346527, "learning_rate": 0.00019930791468165197, "loss": 1.1378, "step": 861 }, { "epoch": 0.1534900284900285, "grad_norm": 0.4186483323574066, "learning_rate": 0.00019930626974706063, "loss": 1.1636, "step": 862 }, { "epoch": 0.15366809116809116, "grad_norm": 0.3764737844467163, "learning_rate": 0.00019930462286676926, "loss": 0.9523, "step": 863 }, { "epoch": 0.15384615384615385, "grad_norm": 0.4283556044101715, "learning_rate": 0.00019930297404081008, "loss": 1.1008, "step": 864 }, { "epoch": 0.15402421652421652, "grad_norm": 0.4485796093940735, "learning_rate": 0.00019930132326921541, "loss": 1.0834, "step": 865 }, { "epoch": 0.1542022792022792, "grad_norm": 0.3882720172405243, "learning_rate": 0.0001992996705520176, "loss": 1.1086, "step": 866 }, { "epoch": 0.1543803418803419, "grad_norm": 0.44698455929756165, "learning_rate": 0.00019929801588924902, "loss": 1.1437, "step": 867 }, { "epoch": 0.15455840455840456, "grad_norm": 0.46978411078453064, "learning_rate": 0.00019929635928094208, "loss": 1.091, "step": 868 }, { "epoch": 0.15473646723646722, "grad_norm": 0.4717854857444763, "learning_rate": 0.00019929470072712927, "loss": 1.1959, "step": 869 }, { "epoch": 0.15491452991452992, "grad_norm": 0.4324854016304016, "learning_rate": 0.00019929304022784305, "loss": 1.2062, "step": 870 }, { "epoch": 0.1550925925925926, "grad_norm": 0.3948180675506592, "learning_rate": 0.00019929137778311597, "loss": 1.1101, "step": 871 }, { "epoch": 0.15527065527065528, "grad_norm": 0.40345287322998047, "learning_rate": 0.0001992897133929806, "loss": 0.8894, "step": 872 }, { "epoch": 0.15544871794871795, "grad_norm": 0.44931963086128235, "learning_rate": 0.00019928804705746957, "loss": 0.9389, "step": 873 }, { "epoch": 0.15562678062678062, "grad_norm": 0.529196560382843, "learning_rate": 0.0001992863787766155, "loss": 1.3362, "step": 874 }, { "epoch": 0.15580484330484332, "grad_norm": 0.41218671202659607, "learning_rate": 0.0001992847085504511, "loss": 1.0727, "step": 875 }, { "epoch": 0.15598290598290598, "grad_norm": 0.44074541330337524, "learning_rate": 0.00019928303637900907, "loss": 1.1091, "step": 876 }, { "epoch": 0.15616096866096865, "grad_norm": 0.5264310240745544, "learning_rate": 0.00019928136226232218, "loss": 1.201, "step": 877 }, { "epoch": 0.15633903133903135, "grad_norm": 0.4255099594593048, "learning_rate": 0.00019927968620042324, "loss": 1.2514, "step": 878 }, { "epoch": 0.15651709401709402, "grad_norm": 0.4030280113220215, "learning_rate": 0.0001992780081933451, "loss": 1.0422, "step": 879 }, { "epoch": 0.15669515669515668, "grad_norm": 0.5270203948020935, "learning_rate": 0.00019927632824112058, "loss": 1.2476, "step": 880 }, { "epoch": 0.15687321937321938, "grad_norm": 0.37767237424850464, "learning_rate": 0.00019927464634378268, "loss": 1.0768, "step": 881 }, { "epoch": 0.15705128205128205, "grad_norm": 0.4535936415195465, "learning_rate": 0.0001992729625013643, "loss": 1.2097, "step": 882 }, { "epoch": 0.15722934472934472, "grad_norm": 0.4282119870185852, "learning_rate": 0.00019927127671389843, "loss": 1.0904, "step": 883 }, { "epoch": 0.1574074074074074, "grad_norm": 0.3924157917499542, "learning_rate": 0.0001992695889814181, "loss": 0.9692, "step": 884 }, { "epoch": 0.15758547008547008, "grad_norm": 0.525075376033783, "learning_rate": 0.0001992678993039564, "loss": 1.0292, "step": 885 }, { "epoch": 0.15776353276353278, "grad_norm": 0.4388505518436432, "learning_rate": 0.00019926620768154644, "loss": 1.1944, "step": 886 }, { "epoch": 0.15794159544159544, "grad_norm": 0.4362235963344574, "learning_rate": 0.00019926451411422132, "loss": 0.97, "step": 887 }, { "epoch": 0.1581196581196581, "grad_norm": 0.4265296459197998, "learning_rate": 0.0001992628186020143, "loss": 0.9196, "step": 888 }, { "epoch": 0.1582977207977208, "grad_norm": 0.4019876718521118, "learning_rate": 0.0001992611211449585, "loss": 1.1368, "step": 889 }, { "epoch": 0.15847578347578348, "grad_norm": 0.5003397464752197, "learning_rate": 0.00019925942174308726, "loss": 1.2582, "step": 890 }, { "epoch": 0.15865384615384615, "grad_norm": 0.4774404466152191, "learning_rate": 0.00019925772039643382, "loss": 1.2277, "step": 891 }, { "epoch": 0.15883190883190884, "grad_norm": 0.4590449333190918, "learning_rate": 0.00019925601710503153, "loss": 1.1679, "step": 892 }, { "epoch": 0.1590099715099715, "grad_norm": 0.4221442639827728, "learning_rate": 0.0001992543118689138, "loss": 1.1626, "step": 893 }, { "epoch": 0.15918803418803418, "grad_norm": 0.47613003849983215, "learning_rate": 0.00019925260468811403, "loss": 1.1509, "step": 894 }, { "epoch": 0.15936609686609687, "grad_norm": 0.41706812381744385, "learning_rate": 0.0001992508955626656, "loss": 1.0366, "step": 895 }, { "epoch": 0.15954415954415954, "grad_norm": 0.5064654350280762, "learning_rate": 0.00019924918449260205, "loss": 1.0729, "step": 896 }, { "epoch": 0.1597222222222222, "grad_norm": 0.5019610524177551, "learning_rate": 0.00019924747147795696, "loss": 1.0642, "step": 897 }, { "epoch": 0.1599002849002849, "grad_norm": 0.4345671534538269, "learning_rate": 0.00019924575651876378, "loss": 1.1747, "step": 898 }, { "epoch": 0.16007834757834757, "grad_norm": 0.4397568702697754, "learning_rate": 0.0001992440396150562, "loss": 1.282, "step": 899 }, { "epoch": 0.16025641025641027, "grad_norm": 0.520187497138977, "learning_rate": 0.0001992423207668678, "loss": 0.976, "step": 900 }, { "epoch": 0.16043447293447294, "grad_norm": 0.39329993724823, "learning_rate": 0.0001992405999742323, "loss": 0.9829, "step": 901 }, { "epoch": 0.1606125356125356, "grad_norm": 0.42361345887184143, "learning_rate": 0.00019923887723718339, "loss": 1.139, "step": 902 }, { "epoch": 0.1607905982905983, "grad_norm": 0.3846314251422882, "learning_rate": 0.00019923715255575482, "loss": 0.8262, "step": 903 }, { "epoch": 0.16096866096866097, "grad_norm": 0.39258381724357605, "learning_rate": 0.0001992354259299804, "loss": 0.9638, "step": 904 }, { "epoch": 0.16114672364672364, "grad_norm": 0.4000850319862366, "learning_rate": 0.00019923369735989397, "loss": 0.91, "step": 905 }, { "epoch": 0.16132478632478633, "grad_norm": 0.46303513646125793, "learning_rate": 0.00019923196684552936, "loss": 1.1447, "step": 906 }, { "epoch": 0.161502849002849, "grad_norm": 0.38437438011169434, "learning_rate": 0.0001992302343869205, "loss": 1.0212, "step": 907 }, { "epoch": 0.16168091168091167, "grad_norm": 0.44585472345352173, "learning_rate": 0.00019922849998410135, "loss": 1.1964, "step": 908 }, { "epoch": 0.16185897435897437, "grad_norm": 0.41959813237190247, "learning_rate": 0.00019922676363710583, "loss": 0.9925, "step": 909 }, { "epoch": 0.16203703703703703, "grad_norm": 0.47442761063575745, "learning_rate": 0.00019922502534596803, "loss": 0.9237, "step": 910 }, { "epoch": 0.1622150997150997, "grad_norm": 0.5065128207206726, "learning_rate": 0.00019922328511072198, "loss": 1.2573, "step": 911 }, { "epoch": 0.1623931623931624, "grad_norm": 0.4739879369735718, "learning_rate": 0.0001992215429314018, "loss": 1.4416, "step": 912 }, { "epoch": 0.16257122507122507, "grad_norm": 0.48763832449913025, "learning_rate": 0.00019921979880804157, "loss": 1.0408, "step": 913 }, { "epoch": 0.16274928774928774, "grad_norm": 0.4841614067554474, "learning_rate": 0.0001992180527406755, "loss": 1.1826, "step": 914 }, { "epoch": 0.16292735042735043, "grad_norm": 0.49433308839797974, "learning_rate": 0.0001992163047293378, "loss": 1.3552, "step": 915 }, { "epoch": 0.1631054131054131, "grad_norm": 0.4985002875328064, "learning_rate": 0.0001992145547740627, "loss": 1.2639, "step": 916 }, { "epoch": 0.1632834757834758, "grad_norm": 0.40348032116889954, "learning_rate": 0.00019921280287488448, "loss": 1.1731, "step": 917 }, { "epoch": 0.16346153846153846, "grad_norm": 0.5166002511978149, "learning_rate": 0.0001992110490318375, "loss": 1.0692, "step": 918 }, { "epoch": 0.16363960113960113, "grad_norm": 0.44233468174934387, "learning_rate": 0.00019920929324495615, "loss": 1.0488, "step": 919 }, { "epoch": 0.16381766381766383, "grad_norm": 0.43709903955459595, "learning_rate": 0.00019920753551427476, "loss": 0.8884, "step": 920 }, { "epoch": 0.1639957264957265, "grad_norm": 0.4054167568683624, "learning_rate": 0.00019920577583982778, "loss": 0.9872, "step": 921 }, { "epoch": 0.16417378917378916, "grad_norm": 0.4657362997531891, "learning_rate": 0.0001992040142216497, "loss": 1.4402, "step": 922 }, { "epoch": 0.16435185185185186, "grad_norm": 0.42550426721572876, "learning_rate": 0.0001992022506597751, "loss": 1.0456, "step": 923 }, { "epoch": 0.16452991452991453, "grad_norm": 0.49346762895584106, "learning_rate": 0.00019920048515423842, "loss": 1.527, "step": 924 }, { "epoch": 0.1647079772079772, "grad_norm": 0.3970337510108948, "learning_rate": 0.0001991987177050743, "loss": 1.0363, "step": 925 }, { "epoch": 0.1648860398860399, "grad_norm": 0.4027378559112549, "learning_rate": 0.0001991969483123174, "loss": 0.8416, "step": 926 }, { "epoch": 0.16506410256410256, "grad_norm": 0.4181644916534424, "learning_rate": 0.00019919517697600237, "loss": 1.2253, "step": 927 }, { "epoch": 0.16524216524216523, "grad_norm": 0.43686383962631226, "learning_rate": 0.0001991934036961639, "loss": 1.0808, "step": 928 }, { "epoch": 0.16542022792022792, "grad_norm": 0.4242876172065735, "learning_rate": 0.0001991916284728367, "loss": 0.9483, "step": 929 }, { "epoch": 0.1655982905982906, "grad_norm": 0.3690609037876129, "learning_rate": 0.00019918985130605563, "loss": 0.9495, "step": 930 }, { "epoch": 0.1657763532763533, "grad_norm": 0.42184555530548096, "learning_rate": 0.00019918807219585546, "loss": 1.0966, "step": 931 }, { "epoch": 0.16595441595441596, "grad_norm": 0.4342746138572693, "learning_rate": 0.00019918629114227106, "loss": 1.0875, "step": 932 }, { "epoch": 0.16613247863247863, "grad_norm": 0.4191494286060333, "learning_rate": 0.00019918450814533737, "loss": 1.0777, "step": 933 }, { "epoch": 0.16631054131054132, "grad_norm": 0.37124550342559814, "learning_rate": 0.00019918272320508922, "loss": 1.0131, "step": 934 }, { "epoch": 0.166488603988604, "grad_norm": 0.4475722014904022, "learning_rate": 0.00019918093632156168, "loss": 1.1185, "step": 935 }, { "epoch": 0.16666666666666666, "grad_norm": 0.4629058241844177, "learning_rate": 0.0001991791474947897, "loss": 1.0353, "step": 936 }, { "epoch": 0.16684472934472935, "grad_norm": 0.48192909359931946, "learning_rate": 0.00019917735672480834, "loss": 1.1628, "step": 937 }, { "epoch": 0.16702279202279202, "grad_norm": 0.5542252063751221, "learning_rate": 0.00019917556401165273, "loss": 1.3133, "step": 938 }, { "epoch": 0.1672008547008547, "grad_norm": 0.4172651171684265, "learning_rate": 0.00019917376935535796, "loss": 1.1733, "step": 939 }, { "epoch": 0.16737891737891739, "grad_norm": 0.4424920380115509, "learning_rate": 0.0001991719727559592, "loss": 1.0262, "step": 940 }, { "epoch": 0.16755698005698005, "grad_norm": 0.4551742970943451, "learning_rate": 0.00019917017421349162, "loss": 1.0883, "step": 941 }, { "epoch": 0.16773504273504272, "grad_norm": 0.45929640531539917, "learning_rate": 0.00019916837372799048, "loss": 1.1836, "step": 942 }, { "epoch": 0.16791310541310542, "grad_norm": 0.4609353542327881, "learning_rate": 0.0001991665712994911, "loss": 1.0682, "step": 943 }, { "epoch": 0.16809116809116809, "grad_norm": 0.42617303133010864, "learning_rate": 0.00019916476692802873, "loss": 1.074, "step": 944 }, { "epoch": 0.16826923076923078, "grad_norm": 0.41919493675231934, "learning_rate": 0.00019916296061363875, "loss": 1.0969, "step": 945 }, { "epoch": 0.16844729344729345, "grad_norm": 0.450979083776474, "learning_rate": 0.00019916115235635656, "loss": 1.1686, "step": 946 }, { "epoch": 0.16862535612535612, "grad_norm": 0.42166751623153687, "learning_rate": 0.00019915934215621758, "loss": 0.9273, "step": 947 }, { "epoch": 0.16880341880341881, "grad_norm": 0.4404160976409912, "learning_rate": 0.00019915753001325729, "loss": 1.1663, "step": 948 }, { "epoch": 0.16898148148148148, "grad_norm": 0.42025226354599, "learning_rate": 0.0001991557159275111, "loss": 0.9433, "step": 949 }, { "epoch": 0.16915954415954415, "grad_norm": 0.4277796745300293, "learning_rate": 0.00019915389989901474, "loss": 0.8475, "step": 950 }, { "epoch": 0.16933760683760685, "grad_norm": 0.5162755250930786, "learning_rate": 0.00019915208192780365, "loss": 1.1155, "step": 951 }, { "epoch": 0.16951566951566951, "grad_norm": 0.4214856028556824, "learning_rate": 0.00019915026201391346, "loss": 1.173, "step": 952 }, { "epoch": 0.16969373219373218, "grad_norm": 0.4713292419910431, "learning_rate": 0.00019914844015737985, "loss": 1.1615, "step": 953 }, { "epoch": 0.16987179487179488, "grad_norm": 0.461179256439209, "learning_rate": 0.00019914661635823854, "loss": 1.1169, "step": 954 }, { "epoch": 0.17004985754985755, "grad_norm": 0.46200552582740784, "learning_rate": 0.00019914479061652527, "loss": 1.0274, "step": 955 }, { "epoch": 0.17022792022792022, "grad_norm": 0.40968334674835205, "learning_rate": 0.00019914296293227572, "loss": 1.066, "step": 956 }, { "epoch": 0.1704059829059829, "grad_norm": 0.40877434611320496, "learning_rate": 0.0001991411333055258, "loss": 1.1595, "step": 957 }, { "epoch": 0.17058404558404558, "grad_norm": 0.42940187454223633, "learning_rate": 0.00019913930173631132, "loss": 1.0364, "step": 958 }, { "epoch": 0.17076210826210828, "grad_norm": 0.49648910760879517, "learning_rate": 0.00019913746822466819, "loss": 1.0763, "step": 959 }, { "epoch": 0.17094017094017094, "grad_norm": 0.4353426396846771, "learning_rate": 0.00019913563277063228, "loss": 0.9698, "step": 960 }, { "epoch": 0.1711182336182336, "grad_norm": 0.45079681277275085, "learning_rate": 0.00019913379537423958, "loss": 1.2244, "step": 961 }, { "epoch": 0.1712962962962963, "grad_norm": 0.4276828467845917, "learning_rate": 0.00019913195603552607, "loss": 0.9976, "step": 962 }, { "epoch": 0.17147435897435898, "grad_norm": 0.41122403740882874, "learning_rate": 0.00019913011475452785, "loss": 1.0077, "step": 963 }, { "epoch": 0.17165242165242164, "grad_norm": 0.43170276284217834, "learning_rate": 0.00019912827153128096, "loss": 1.1402, "step": 964 }, { "epoch": 0.17183048433048434, "grad_norm": 0.37950268387794495, "learning_rate": 0.0001991264263658215, "loss": 0.9818, "step": 965 }, { "epoch": 0.172008547008547, "grad_norm": 0.477333128452301, "learning_rate": 0.00019912457925818562, "loss": 1.1756, "step": 966 }, { "epoch": 0.17218660968660968, "grad_norm": 0.4326401352882385, "learning_rate": 0.00019912273020840954, "loss": 1.3718, "step": 967 }, { "epoch": 0.17236467236467237, "grad_norm": 0.37711042165756226, "learning_rate": 0.00019912087921652945, "loss": 0.9011, "step": 968 }, { "epoch": 0.17254273504273504, "grad_norm": 0.50013667345047, "learning_rate": 0.00019911902628258162, "loss": 1.1163, "step": 969 }, { "epoch": 0.1727207977207977, "grad_norm": 0.41913339495658875, "learning_rate": 0.0001991171714066024, "loss": 1.2614, "step": 970 }, { "epoch": 0.1728988603988604, "grad_norm": 0.4075855612754822, "learning_rate": 0.00019911531458862813, "loss": 0.8984, "step": 971 }, { "epoch": 0.17307692307692307, "grad_norm": 0.40277954936027527, "learning_rate": 0.00019911345582869513, "loss": 1.0851, "step": 972 }, { "epoch": 0.17325498575498577, "grad_norm": 0.4312847852706909, "learning_rate": 0.00019911159512683987, "loss": 1.1273, "step": 973 }, { "epoch": 0.17343304843304844, "grad_norm": 0.40303611755371094, "learning_rate": 0.0001991097324830988, "loss": 0.9645, "step": 974 }, { "epoch": 0.1736111111111111, "grad_norm": 0.45560577511787415, "learning_rate": 0.00019910786789750838, "loss": 1.0864, "step": 975 }, { "epoch": 0.1737891737891738, "grad_norm": 0.43775680661201477, "learning_rate": 0.00019910600137010517, "loss": 1.028, "step": 976 }, { "epoch": 0.17396723646723647, "grad_norm": 0.3917224407196045, "learning_rate": 0.00019910413290092572, "loss": 1.0491, "step": 977 }, { "epoch": 0.17414529914529914, "grad_norm": 0.4068751037120819, "learning_rate": 0.0001991022624900067, "loss": 1.0476, "step": 978 }, { "epoch": 0.17432336182336183, "grad_norm": 0.4463370144367218, "learning_rate": 0.0001991003901373847, "loss": 1.0612, "step": 979 }, { "epoch": 0.1745014245014245, "grad_norm": 0.46949052810668945, "learning_rate": 0.0001990985158430964, "loss": 1.3099, "step": 980 }, { "epoch": 0.17467948717948717, "grad_norm": 0.4250012934207916, "learning_rate": 0.00019909663960717856, "loss": 0.9903, "step": 981 }, { "epoch": 0.17485754985754987, "grad_norm": 0.5293903946876526, "learning_rate": 0.0001990947614296679, "loss": 0.9908, "step": 982 }, { "epoch": 0.17503561253561253, "grad_norm": 0.3838284909725189, "learning_rate": 0.0001990928813106013, "loss": 0.716, "step": 983 }, { "epoch": 0.1752136752136752, "grad_norm": 0.4597751200199127, "learning_rate": 0.0001990909992500155, "loss": 1.0126, "step": 984 }, { "epoch": 0.1753917378917379, "grad_norm": 0.4844081699848175, "learning_rate": 0.0001990891152479474, "loss": 1.1043, "step": 985 }, { "epoch": 0.17556980056980057, "grad_norm": 0.4763399660587311, "learning_rate": 0.00019908722930443392, "loss": 1.019, "step": 986 }, { "epoch": 0.17574786324786323, "grad_norm": 0.4670077860355377, "learning_rate": 0.00019908534141951204, "loss": 1.1382, "step": 987 }, { "epoch": 0.17592592592592593, "grad_norm": 0.39372730255126953, "learning_rate": 0.00019908345159321873, "loss": 1.1219, "step": 988 }, { "epoch": 0.1761039886039886, "grad_norm": 0.41869843006134033, "learning_rate": 0.00019908155982559098, "loss": 0.9461, "step": 989 }, { "epoch": 0.1762820512820513, "grad_norm": 0.4398406147956848, "learning_rate": 0.00019907966611666593, "loss": 1.1328, "step": 990 }, { "epoch": 0.17646011396011396, "grad_norm": 0.4315733015537262, "learning_rate": 0.0001990777704664806, "loss": 1.0974, "step": 991 }, { "epoch": 0.17663817663817663, "grad_norm": 0.42859575152397156, "learning_rate": 0.00019907587287507222, "loss": 1.2637, "step": 992 }, { "epoch": 0.17681623931623933, "grad_norm": 0.47928622364997864, "learning_rate": 0.0001990739733424779, "loss": 1.0699, "step": 993 }, { "epoch": 0.176994301994302, "grad_norm": 0.4443826973438263, "learning_rate": 0.00019907207186873488, "loss": 1.0547, "step": 994 }, { "epoch": 0.17717236467236466, "grad_norm": 0.4108099937438965, "learning_rate": 0.00019907016845388043, "loss": 1.1401, "step": 995 }, { "epoch": 0.17735042735042736, "grad_norm": 0.4474675953388214, "learning_rate": 0.00019906826309795182, "loss": 1.0712, "step": 996 }, { "epoch": 0.17752849002849003, "grad_norm": 0.4149756133556366, "learning_rate": 0.00019906635580098638, "loss": 0.9585, "step": 997 }, { "epoch": 0.1777065527065527, "grad_norm": 0.4875968098640442, "learning_rate": 0.00019906444656302152, "loss": 1.0659, "step": 998 }, { "epoch": 0.1778846153846154, "grad_norm": 0.5494784116744995, "learning_rate": 0.0001990625353840946, "loss": 1.2858, "step": 999 }, { "epoch": 0.17806267806267806, "grad_norm": 0.425062358379364, "learning_rate": 0.0001990606222642431, "loss": 1.1826, "step": 1000 }, { "epoch": 0.17824074074074073, "grad_norm": 0.3890725374221802, "learning_rate": 0.00019905870720350445, "loss": 0.9568, "step": 1001 }, { "epoch": 0.17841880341880342, "grad_norm": 0.3884070813655853, "learning_rate": 0.00019905679020191624, "loss": 0.9674, "step": 1002 }, { "epoch": 0.1785968660968661, "grad_norm": 0.49496129155158997, "learning_rate": 0.00019905487125951597, "loss": 0.9143, "step": 1003 }, { "epoch": 0.1787749287749288, "grad_norm": 0.43448135256767273, "learning_rate": 0.00019905295037634128, "loss": 1.2677, "step": 1004 }, { "epoch": 0.17895299145299146, "grad_norm": 0.47327905893325806, "learning_rate": 0.00019905102755242982, "loss": 0.9089, "step": 1005 }, { "epoch": 0.17913105413105412, "grad_norm": 0.4962378442287445, "learning_rate": 0.00019904910278781922, "loss": 1.1748, "step": 1006 }, { "epoch": 0.17930911680911682, "grad_norm": 0.4343934655189514, "learning_rate": 0.0001990471760825472, "loss": 1.2176, "step": 1007 }, { "epoch": 0.1794871794871795, "grad_norm": 0.4695793092250824, "learning_rate": 0.0001990452474366515, "loss": 1.1822, "step": 1008 }, { "epoch": 0.17966524216524216, "grad_norm": 0.4156060516834259, "learning_rate": 0.00019904331685016995, "loss": 0.8231, "step": 1009 }, { "epoch": 0.17984330484330485, "grad_norm": 0.5068191885948181, "learning_rate": 0.00019904138432314035, "loss": 1.1363, "step": 1010 }, { "epoch": 0.18002136752136752, "grad_norm": 0.5189786553382874, "learning_rate": 0.00019903944985560058, "loss": 1.3131, "step": 1011 }, { "epoch": 0.1801994301994302, "grad_norm": 0.5126828551292419, "learning_rate": 0.00019903751344758848, "loss": 1.0305, "step": 1012 }, { "epoch": 0.18037749287749288, "grad_norm": 0.41045933961868286, "learning_rate": 0.00019903557509914205, "loss": 1.2726, "step": 1013 }, { "epoch": 0.18055555555555555, "grad_norm": 0.4141713082790375, "learning_rate": 0.0001990336348102993, "loss": 0.9606, "step": 1014 }, { "epoch": 0.18073361823361822, "grad_norm": 0.42652079463005066, "learning_rate": 0.00019903169258109812, "loss": 1.0235, "step": 1015 }, { "epoch": 0.18091168091168092, "grad_norm": 0.42098379135131836, "learning_rate": 0.0001990297484115767, "loss": 1.0602, "step": 1016 }, { "epoch": 0.18108974358974358, "grad_norm": 0.49920013546943665, "learning_rate": 0.0001990278023017731, "loss": 1.3322, "step": 1017 }, { "epoch": 0.18126780626780628, "grad_norm": 0.412304550409317, "learning_rate": 0.00019902585425172537, "loss": 1.1011, "step": 1018 }, { "epoch": 0.18144586894586895, "grad_norm": 0.44226935505867004, "learning_rate": 0.00019902390426147177, "loss": 0.9777, "step": 1019 }, { "epoch": 0.18162393162393162, "grad_norm": 0.4685269594192505, "learning_rate": 0.00019902195233105046, "loss": 1.3587, "step": 1020 }, { "epoch": 0.1818019943019943, "grad_norm": 0.4500584304332733, "learning_rate": 0.00019901999846049968, "loss": 0.9888, "step": 1021 }, { "epoch": 0.18198005698005698, "grad_norm": 0.48566994071006775, "learning_rate": 0.00019901804264985774, "loss": 1.2364, "step": 1022 }, { "epoch": 0.18215811965811965, "grad_norm": 0.4063156247138977, "learning_rate": 0.00019901608489916294, "loss": 1.2224, "step": 1023 }, { "epoch": 0.18233618233618235, "grad_norm": 0.471276193857193, "learning_rate": 0.00019901412520845367, "loss": 0.9926, "step": 1024 }, { "epoch": 0.182514245014245, "grad_norm": 0.5165421366691589, "learning_rate": 0.00019901216357776829, "loss": 0.9595, "step": 1025 }, { "epoch": 0.18269230769230768, "grad_norm": 0.4746754467487335, "learning_rate": 0.0001990102000071452, "loss": 1.2057, "step": 1026 }, { "epoch": 0.18287037037037038, "grad_norm": 0.44803035259246826, "learning_rate": 0.00019900823449662297, "loss": 1.2114, "step": 1027 }, { "epoch": 0.18304843304843305, "grad_norm": 0.47256240248680115, "learning_rate": 0.00019900626704624005, "loss": 1.112, "step": 1028 }, { "epoch": 0.18322649572649571, "grad_norm": 0.4253387153148651, "learning_rate": 0.000199004297656035, "loss": 0.9899, "step": 1029 }, { "epoch": 0.1834045584045584, "grad_norm": 0.44958099722862244, "learning_rate": 0.00019900232632604636, "loss": 1.1445, "step": 1030 }, { "epoch": 0.18358262108262108, "grad_norm": 0.5296537280082703, "learning_rate": 0.00019900035305631285, "loss": 1.2502, "step": 1031 }, { "epoch": 0.18376068376068377, "grad_norm": 0.5057148933410645, "learning_rate": 0.00019899837784687302, "loss": 1.1426, "step": 1032 }, { "epoch": 0.18393874643874644, "grad_norm": 0.41463762521743774, "learning_rate": 0.00019899640069776566, "loss": 1.1854, "step": 1033 }, { "epoch": 0.1841168091168091, "grad_norm": 0.45800045132637024, "learning_rate": 0.00019899442160902945, "loss": 1.2438, "step": 1034 }, { "epoch": 0.1842948717948718, "grad_norm": 0.43450453877449036, "learning_rate": 0.00019899244058070324, "loss": 1.0598, "step": 1035 }, { "epoch": 0.18447293447293447, "grad_norm": 0.4141148626804352, "learning_rate": 0.00019899045761282577, "loss": 1.0465, "step": 1036 }, { "epoch": 0.18465099715099714, "grad_norm": 0.3938458263874054, "learning_rate": 0.0001989884727054359, "loss": 1.0142, "step": 1037 }, { "epoch": 0.18482905982905984, "grad_norm": 0.43898263573646545, "learning_rate": 0.00019898648585857257, "loss": 0.9212, "step": 1038 }, { "epoch": 0.1850071225071225, "grad_norm": 0.4425487816333771, "learning_rate": 0.00019898449707227465, "loss": 1.2987, "step": 1039 }, { "epoch": 0.18518518518518517, "grad_norm": 0.4537975490093231, "learning_rate": 0.00019898250634658115, "loss": 1.2023, "step": 1040 }, { "epoch": 0.18536324786324787, "grad_norm": 0.4107198119163513, "learning_rate": 0.00019898051368153104, "loss": 0.8443, "step": 1041 }, { "epoch": 0.18554131054131054, "grad_norm": 0.4389404058456421, "learning_rate": 0.0001989785190771634, "loss": 1.0502, "step": 1042 }, { "epoch": 0.1857193732193732, "grad_norm": 0.4288824796676636, "learning_rate": 0.00019897652253351726, "loss": 1.01, "step": 1043 }, { "epoch": 0.1858974358974359, "grad_norm": 0.50815349817276, "learning_rate": 0.00019897452405063178, "loss": 1.0308, "step": 1044 }, { "epoch": 0.18607549857549857, "grad_norm": 0.45252710580825806, "learning_rate": 0.0001989725236285461, "loss": 1.0967, "step": 1045 }, { "epoch": 0.18625356125356127, "grad_norm": 0.45049402117729187, "learning_rate": 0.00019897052126729943, "loss": 1.0141, "step": 1046 }, { "epoch": 0.18643162393162394, "grad_norm": 0.49637508392333984, "learning_rate": 0.00019896851696693098, "loss": 1.0997, "step": 1047 }, { "epoch": 0.1866096866096866, "grad_norm": 0.4465886056423187, "learning_rate": 0.00019896651072748005, "loss": 1.1415, "step": 1048 }, { "epoch": 0.1867877492877493, "grad_norm": 0.5309500694274902, "learning_rate": 0.00019896450254898592, "loss": 1.1028, "step": 1049 }, { "epoch": 0.18696581196581197, "grad_norm": 0.3516653776168823, "learning_rate": 0.00019896249243148793, "loss": 0.9841, "step": 1050 }, { "epoch": 0.18714387464387464, "grad_norm": 0.4529176950454712, "learning_rate": 0.0001989604803750255, "loss": 1.1335, "step": 1051 }, { "epoch": 0.18732193732193733, "grad_norm": 0.47694942355155945, "learning_rate": 0.000198958466379638, "loss": 1.2383, "step": 1052 }, { "epoch": 0.1875, "grad_norm": 0.5524206757545471, "learning_rate": 0.0001989564504453649, "loss": 1.3668, "step": 1053 }, { "epoch": 0.18767806267806267, "grad_norm": 0.39203691482543945, "learning_rate": 0.00019895443257224576, "loss": 1.2203, "step": 1054 }, { "epoch": 0.18785612535612536, "grad_norm": 0.4164120852947235, "learning_rate": 0.00019895241276032005, "loss": 0.8954, "step": 1055 }, { "epoch": 0.18803418803418803, "grad_norm": 0.41217970848083496, "learning_rate": 0.0001989503910096274, "loss": 1.0238, "step": 1056 }, { "epoch": 0.1882122507122507, "grad_norm": 0.44038307666778564, "learning_rate": 0.00019894836732020735, "loss": 0.8159, "step": 1057 }, { "epoch": 0.1883903133903134, "grad_norm": 0.45780670642852783, "learning_rate": 0.0001989463416920996, "loss": 1.2864, "step": 1058 }, { "epoch": 0.18856837606837606, "grad_norm": 0.5197559595108032, "learning_rate": 0.00019894431412534384, "loss": 1.0756, "step": 1059 }, { "epoch": 0.18874643874643873, "grad_norm": 0.43283385038375854, "learning_rate": 0.00019894228461997979, "loss": 1.0642, "step": 1060 }, { "epoch": 0.18892450142450143, "grad_norm": 0.4657376706600189, "learning_rate": 0.00019894025317604717, "loss": 1.1159, "step": 1061 }, { "epoch": 0.1891025641025641, "grad_norm": 0.4474908113479614, "learning_rate": 0.00019893821979358588, "loss": 1.2006, "step": 1062 }, { "epoch": 0.1892806267806268, "grad_norm": 0.43878164887428284, "learning_rate": 0.00019893618447263566, "loss": 1.1599, "step": 1063 }, { "epoch": 0.18945868945868946, "grad_norm": 0.4598735272884369, "learning_rate": 0.00019893414721323645, "loss": 1.3346, "step": 1064 }, { "epoch": 0.18963675213675213, "grad_norm": 0.3947420120239258, "learning_rate": 0.00019893210801542812, "loss": 1.1201, "step": 1065 }, { "epoch": 0.18981481481481483, "grad_norm": 0.3401558995246887, "learning_rate": 0.00019893006687925064, "loss": 0.7568, "step": 1066 }, { "epoch": 0.1899928774928775, "grad_norm": 0.4400341808795929, "learning_rate": 0.00019892802380474405, "loss": 1.1706, "step": 1067 }, { "epoch": 0.19017094017094016, "grad_norm": 0.42394164204597473, "learning_rate": 0.00019892597879194829, "loss": 1.0163, "step": 1068 }, { "epoch": 0.19034900284900286, "grad_norm": 0.42904096841812134, "learning_rate": 0.00019892393184090353, "loss": 0.9193, "step": 1069 }, { "epoch": 0.19052706552706553, "grad_norm": 0.497601181268692, "learning_rate": 0.00019892188295164977, "loss": 1.0377, "step": 1070 }, { "epoch": 0.1907051282051282, "grad_norm": 0.4536020755767822, "learning_rate": 0.00019891983212422723, "loss": 1.0946, "step": 1071 }, { "epoch": 0.1908831908831909, "grad_norm": 0.44916942715644836, "learning_rate": 0.00019891777935867607, "loss": 1.0563, "step": 1072 }, { "epoch": 0.19106125356125356, "grad_norm": 0.4256889820098877, "learning_rate": 0.0001989157246550365, "loss": 1.0988, "step": 1073 }, { "epoch": 0.19123931623931623, "grad_norm": 0.5559163689613342, "learning_rate": 0.0001989136680133488, "loss": 0.9155, "step": 1074 }, { "epoch": 0.19141737891737892, "grad_norm": 0.391804963350296, "learning_rate": 0.00019891160943365322, "loss": 0.9314, "step": 1075 }, { "epoch": 0.1915954415954416, "grad_norm": 0.4535716474056244, "learning_rate": 0.00019890954891599015, "loss": 1.0768, "step": 1076 }, { "epoch": 0.19177350427350429, "grad_norm": 0.46770521998405457, "learning_rate": 0.00019890748646039991, "loss": 0.8406, "step": 1077 }, { "epoch": 0.19195156695156695, "grad_norm": 0.4875394403934479, "learning_rate": 0.00019890542206692295, "loss": 1.1055, "step": 1078 }, { "epoch": 0.19212962962962962, "grad_norm": 0.5072727203369141, "learning_rate": 0.0001989033557355997, "loss": 1.3093, "step": 1079 }, { "epoch": 0.19230769230769232, "grad_norm": 0.4419287443161011, "learning_rate": 0.00019890128746647068, "loss": 1.1916, "step": 1080 }, { "epoch": 0.192485754985755, "grad_norm": 0.45803651213645935, "learning_rate": 0.00019889921725957637, "loss": 1.2579, "step": 1081 }, { "epoch": 0.19266381766381765, "grad_norm": 0.4832262098789215, "learning_rate": 0.0001988971451149573, "loss": 1.3217, "step": 1082 }, { "epoch": 0.19284188034188035, "grad_norm": 0.4819786250591278, "learning_rate": 0.00019889507103265416, "loss": 1.0979, "step": 1083 }, { "epoch": 0.19301994301994302, "grad_norm": 0.49360713362693787, "learning_rate": 0.0001988929950127075, "loss": 1.0987, "step": 1084 }, { "epoch": 0.1931980056980057, "grad_norm": 0.44209200143814087, "learning_rate": 0.00019889091705515806, "loss": 1.2616, "step": 1085 }, { "epoch": 0.19337606837606838, "grad_norm": 0.41626206040382385, "learning_rate": 0.00019888883716004654, "loss": 1.0922, "step": 1086 }, { "epoch": 0.19355413105413105, "grad_norm": 0.4916635751724243, "learning_rate": 0.00019888675532741366, "loss": 0.9331, "step": 1087 }, { "epoch": 0.19373219373219372, "grad_norm": 0.4493125379085541, "learning_rate": 0.00019888467155730025, "loss": 1.1261, "step": 1088 }, { "epoch": 0.19391025641025642, "grad_norm": 0.3755671977996826, "learning_rate": 0.00019888258584974708, "loss": 0.9821, "step": 1089 }, { "epoch": 0.19408831908831908, "grad_norm": 0.41917556524276733, "learning_rate": 0.00019888049820479507, "loss": 1.251, "step": 1090 }, { "epoch": 0.19426638176638178, "grad_norm": 0.46184420585632324, "learning_rate": 0.0001988784086224851, "loss": 1.1731, "step": 1091 }, { "epoch": 0.19444444444444445, "grad_norm": 0.4783691465854645, "learning_rate": 0.00019887631710285812, "loss": 1.1635, "step": 1092 }, { "epoch": 0.19462250712250712, "grad_norm": 0.4710482060909271, "learning_rate": 0.00019887422364595512, "loss": 1.0229, "step": 1093 }, { "epoch": 0.1948005698005698, "grad_norm": 0.4738706648349762, "learning_rate": 0.00019887212825181707, "loss": 1.128, "step": 1094 }, { "epoch": 0.19497863247863248, "grad_norm": 0.45665010809898376, "learning_rate": 0.00019887003092048508, "loss": 1.0425, "step": 1095 }, { "epoch": 0.19515669515669515, "grad_norm": 0.42740485072135925, "learning_rate": 0.0001988679316520002, "loss": 1.0738, "step": 1096 }, { "epoch": 0.19533475783475784, "grad_norm": 0.5977092385292053, "learning_rate": 0.0001988658304464036, "loss": 1.2687, "step": 1097 }, { "epoch": 0.1955128205128205, "grad_norm": 0.4411074221134186, "learning_rate": 0.0001988637273037364, "loss": 1.287, "step": 1098 }, { "epoch": 0.19569088319088318, "grad_norm": 0.4409518539905548, "learning_rate": 0.00019886162222403986, "loss": 1.0515, "step": 1099 }, { "epoch": 0.19586894586894588, "grad_norm": 0.4926736652851105, "learning_rate": 0.0001988595152073552, "loss": 1.1388, "step": 1100 }, { "epoch": 0.19604700854700854, "grad_norm": 0.4607115387916565, "learning_rate": 0.00019885740625372368, "loss": 0.9803, "step": 1101 }, { "epoch": 0.1962250712250712, "grad_norm": 0.4725342094898224, "learning_rate": 0.0001988552953631867, "loss": 1.199, "step": 1102 }, { "epoch": 0.1964031339031339, "grad_norm": 0.48014503717422485, "learning_rate": 0.00019885318253578548, "loss": 1.1868, "step": 1103 }, { "epoch": 0.19658119658119658, "grad_norm": 0.3872644603252411, "learning_rate": 0.00019885106777156155, "loss": 0.9182, "step": 1104 }, { "epoch": 0.19675925925925927, "grad_norm": 0.4737720787525177, "learning_rate": 0.00019884895107055627, "loss": 1.1513, "step": 1105 }, { "epoch": 0.19693732193732194, "grad_norm": 0.4144562780857086, "learning_rate": 0.00019884683243281116, "loss": 1.1711, "step": 1106 }, { "epoch": 0.1971153846153846, "grad_norm": 0.4672079384326935, "learning_rate": 0.00019884471185836769, "loss": 1.0386, "step": 1107 }, { "epoch": 0.1972934472934473, "grad_norm": 0.4558824598789215, "learning_rate": 0.0001988425893472674, "loss": 1.0535, "step": 1108 }, { "epoch": 0.19747150997150997, "grad_norm": 0.5149834752082825, "learning_rate": 0.00019884046489955192, "loss": 1.0296, "step": 1109 }, { "epoch": 0.19764957264957264, "grad_norm": 0.43444496393203735, "learning_rate": 0.00019883833851526287, "loss": 1.1475, "step": 1110 }, { "epoch": 0.19782763532763534, "grad_norm": 0.46062374114990234, "learning_rate": 0.00019883621019444188, "loss": 1.183, "step": 1111 }, { "epoch": 0.198005698005698, "grad_norm": 0.4893282949924469, "learning_rate": 0.00019883407993713065, "loss": 1.3733, "step": 1112 }, { "epoch": 0.19818376068376067, "grad_norm": 0.5434843897819519, "learning_rate": 0.00019883194774337096, "loss": 1.2505, "step": 1113 }, { "epoch": 0.19836182336182337, "grad_norm": 0.4698035418987274, "learning_rate": 0.00019882981361320456, "loss": 1.0152, "step": 1114 }, { "epoch": 0.19853988603988604, "grad_norm": 0.4582163989543915, "learning_rate": 0.00019882767754667325, "loss": 1.1718, "step": 1115 }, { "epoch": 0.1987179487179487, "grad_norm": 0.48744696378707886, "learning_rate": 0.0001988255395438189, "loss": 1.2923, "step": 1116 }, { "epoch": 0.1988960113960114, "grad_norm": 0.4172030985355377, "learning_rate": 0.0001988233996046834, "loss": 0.8098, "step": 1117 }, { "epoch": 0.19907407407407407, "grad_norm": 0.4556557834148407, "learning_rate": 0.00019882125772930867, "loss": 0.9654, "step": 1118 }, { "epoch": 0.19925213675213677, "grad_norm": 0.4363219141960144, "learning_rate": 0.00019881911391773666, "loss": 1.0333, "step": 1119 }, { "epoch": 0.19943019943019943, "grad_norm": 0.4336536228656769, "learning_rate": 0.0001988169681700094, "loss": 1.091, "step": 1120 }, { "epoch": 0.1996082621082621, "grad_norm": 0.42073166370391846, "learning_rate": 0.00019881482048616893, "loss": 0.9687, "step": 1121 }, { "epoch": 0.1997863247863248, "grad_norm": 0.4330587685108185, "learning_rate": 0.00019881267086625733, "loss": 1.0512, "step": 1122 }, { "epoch": 0.19996438746438747, "grad_norm": 0.4602276682853699, "learning_rate": 0.0001988105193103167, "loss": 1.1806, "step": 1123 }, { "epoch": 0.20014245014245013, "grad_norm": 0.4271257817745209, "learning_rate": 0.0001988083658183892, "loss": 1.1079, "step": 1124 }, { "epoch": 0.20032051282051283, "grad_norm": 0.35446426272392273, "learning_rate": 0.00019880621039051707, "loss": 0.6769, "step": 1125 }, { "epoch": 0.2004985754985755, "grad_norm": 0.413753479719162, "learning_rate": 0.00019880405302674244, "loss": 1.1088, "step": 1126 }, { "epoch": 0.20067663817663817, "grad_norm": 0.4423675835132599, "learning_rate": 0.00019880189372710767, "loss": 1.1371, "step": 1127 }, { "epoch": 0.20085470085470086, "grad_norm": 0.41865605115890503, "learning_rate": 0.00019879973249165502, "loss": 1.0027, "step": 1128 }, { "epoch": 0.20103276353276353, "grad_norm": 0.4109594225883484, "learning_rate": 0.00019879756932042686, "loss": 0.8734, "step": 1129 }, { "epoch": 0.2012108262108262, "grad_norm": 0.42326363921165466, "learning_rate": 0.00019879540421346555, "loss": 0.9722, "step": 1130 }, { "epoch": 0.2013888888888889, "grad_norm": 0.4601542055606842, "learning_rate": 0.00019879323717081354, "loss": 1.1251, "step": 1131 }, { "epoch": 0.20156695156695156, "grad_norm": 0.4704367518424988, "learning_rate": 0.00019879106819251327, "loss": 0.9457, "step": 1132 }, { "epoch": 0.20174501424501423, "grad_norm": 0.465023934841156, "learning_rate": 0.00019878889727860724, "loss": 0.9633, "step": 1133 }, { "epoch": 0.20192307692307693, "grad_norm": 0.4572450518608093, "learning_rate": 0.00019878672442913796, "loss": 1.1965, "step": 1134 }, { "epoch": 0.2021011396011396, "grad_norm": 0.4323410391807556, "learning_rate": 0.00019878454964414807, "loss": 1.1296, "step": 1135 }, { "epoch": 0.2022792022792023, "grad_norm": 0.4513751268386841, "learning_rate": 0.00019878237292368013, "loss": 1.0571, "step": 1136 }, { "epoch": 0.20245726495726496, "grad_norm": 0.45504096150398254, "learning_rate": 0.00019878019426777677, "loss": 1.0316, "step": 1137 }, { "epoch": 0.20263532763532763, "grad_norm": 0.45715275406837463, "learning_rate": 0.0001987780136764807, "loss": 1.0528, "step": 1138 }, { "epoch": 0.20281339031339032, "grad_norm": 0.4934465289115906, "learning_rate": 0.00019877583114983466, "loss": 1.3238, "step": 1139 }, { "epoch": 0.202991452991453, "grad_norm": 0.4304082989692688, "learning_rate": 0.0001987736466878814, "loss": 1.1774, "step": 1140 }, { "epoch": 0.20316951566951566, "grad_norm": 0.49721968173980713, "learning_rate": 0.00019877146029066372, "loss": 1.1767, "step": 1141 }, { "epoch": 0.20334757834757836, "grad_norm": 0.3629468083381653, "learning_rate": 0.00019876927195822445, "loss": 0.8588, "step": 1142 }, { "epoch": 0.20352564102564102, "grad_norm": 0.49310383200645447, "learning_rate": 0.00019876708169060648, "loss": 1.0588, "step": 1143 }, { "epoch": 0.2037037037037037, "grad_norm": 0.4270328879356384, "learning_rate": 0.00019876488948785271, "loss": 1.1523, "step": 1144 }, { "epoch": 0.2038817663817664, "grad_norm": 0.4559730887413025, "learning_rate": 0.0001987626953500061, "loss": 1.1736, "step": 1145 }, { "epoch": 0.20405982905982906, "grad_norm": 0.5335259437561035, "learning_rate": 0.00019876049927710962, "loss": 0.991, "step": 1146 }, { "epoch": 0.20423789173789172, "grad_norm": 0.43500083684921265, "learning_rate": 0.0001987583012692063, "loss": 1.0631, "step": 1147 }, { "epoch": 0.20441595441595442, "grad_norm": 0.4135417938232422, "learning_rate": 0.00019875610132633927, "loss": 1.0896, "step": 1148 }, { "epoch": 0.2045940170940171, "grad_norm": 0.4078896641731262, "learning_rate": 0.00019875389944855153, "loss": 1.0395, "step": 1149 }, { "epoch": 0.20477207977207978, "grad_norm": 0.46612194180488586, "learning_rate": 0.00019875169563588632, "loss": 1.0541, "step": 1150 }, { "epoch": 0.20495014245014245, "grad_norm": 0.5093224048614502, "learning_rate": 0.00019874948988838674, "loss": 1.1486, "step": 1151 }, { "epoch": 0.20512820512820512, "grad_norm": 0.5079755187034607, "learning_rate": 0.00019874728220609607, "loss": 1.2614, "step": 1152 }, { "epoch": 0.20530626780626782, "grad_norm": 0.43663498759269714, "learning_rate": 0.0001987450725890575, "loss": 1.0683, "step": 1153 }, { "epoch": 0.20548433048433049, "grad_norm": 0.5029327273368835, "learning_rate": 0.00019874286103731435, "loss": 1.1934, "step": 1154 }, { "epoch": 0.20566239316239315, "grad_norm": 0.48770397901535034, "learning_rate": 0.00019874064755090999, "loss": 1.1634, "step": 1155 }, { "epoch": 0.20584045584045585, "grad_norm": 0.46826690435409546, "learning_rate": 0.00019873843212988776, "loss": 1.0621, "step": 1156 }, { "epoch": 0.20601851851851852, "grad_norm": 0.4810047149658203, "learning_rate": 0.00019873621477429105, "loss": 1.0879, "step": 1157 }, { "epoch": 0.20619658119658119, "grad_norm": 0.4769522249698639, "learning_rate": 0.00019873399548416335, "loss": 1.1365, "step": 1158 }, { "epoch": 0.20637464387464388, "grad_norm": 0.4221782982349396, "learning_rate": 0.00019873177425954806, "loss": 1.1168, "step": 1159 }, { "epoch": 0.20655270655270655, "grad_norm": 0.4084923565387726, "learning_rate": 0.00019872955110048876, "loss": 1.2364, "step": 1160 }, { "epoch": 0.20673076923076922, "grad_norm": 0.4781704545021057, "learning_rate": 0.00019872732600702904, "loss": 1.19, "step": 1161 }, { "epoch": 0.20690883190883191, "grad_norm": 0.3984242081642151, "learning_rate": 0.0001987250989792124, "loss": 1.0568, "step": 1162 }, { "epoch": 0.20708689458689458, "grad_norm": 0.4601972997188568, "learning_rate": 0.00019872287001708257, "loss": 1.1625, "step": 1163 }, { "epoch": 0.20726495726495728, "grad_norm": 0.4853581190109253, "learning_rate": 0.00019872063912068316, "loss": 1.2304, "step": 1164 }, { "epoch": 0.20744301994301995, "grad_norm": 0.41779839992523193, "learning_rate": 0.0001987184062900579, "loss": 0.9807, "step": 1165 }, { "epoch": 0.20762108262108261, "grad_norm": 0.4945356249809265, "learning_rate": 0.00019871617152525056, "loss": 1.1861, "step": 1166 }, { "epoch": 0.2077991452991453, "grad_norm": 0.47432294487953186, "learning_rate": 0.00019871393482630487, "loss": 1.1448, "step": 1167 }, { "epoch": 0.20797720797720798, "grad_norm": 0.44647398591041565, "learning_rate": 0.00019871169619326473, "loss": 1.096, "step": 1168 }, { "epoch": 0.20815527065527065, "grad_norm": 0.4643072783946991, "learning_rate": 0.00019870945562617393, "loss": 1.1561, "step": 1169 }, { "epoch": 0.20833333333333334, "grad_norm": 0.4544340968132019, "learning_rate": 0.0001987072131250764, "loss": 1.0764, "step": 1170 }, { "epoch": 0.208511396011396, "grad_norm": 0.6036561727523804, "learning_rate": 0.00019870496869001607, "loss": 1.3961, "step": 1171 }, { "epoch": 0.20868945868945868, "grad_norm": 0.41348758339881897, "learning_rate": 0.00019870272232103695, "loss": 1.2219, "step": 1172 }, { "epoch": 0.20886752136752137, "grad_norm": 0.4184056222438812, "learning_rate": 0.000198700474018183, "loss": 1.1115, "step": 1173 }, { "epoch": 0.20904558404558404, "grad_norm": 0.41920599341392517, "learning_rate": 0.0001986982237814983, "loss": 0.9207, "step": 1174 }, { "epoch": 0.2092236467236467, "grad_norm": 0.4710249602794647, "learning_rate": 0.00019869597161102694, "loss": 1.1342, "step": 1175 }, { "epoch": 0.2094017094017094, "grad_norm": 0.46897777915000916, "learning_rate": 0.000198693717506813, "loss": 0.983, "step": 1176 }, { "epoch": 0.20957977207977208, "grad_norm": 0.4817039370536804, "learning_rate": 0.00019869146146890074, "loss": 1.0923, "step": 1177 }, { "epoch": 0.20975783475783477, "grad_norm": 0.4806751012802124, "learning_rate": 0.00019868920349733427, "loss": 1.2296, "step": 1178 }, { "epoch": 0.20993589743589744, "grad_norm": 0.44182994961738586, "learning_rate": 0.0001986869435921579, "loss": 1.1856, "step": 1179 }, { "epoch": 0.2101139601139601, "grad_norm": 0.4282805621623993, "learning_rate": 0.00019868468175341584, "loss": 1.0046, "step": 1180 }, { "epoch": 0.2102920227920228, "grad_norm": 0.5011838674545288, "learning_rate": 0.00019868241798115242, "loss": 1.2401, "step": 1181 }, { "epoch": 0.21047008547008547, "grad_norm": 0.4282447397708893, "learning_rate": 0.00019868015227541208, "loss": 0.9338, "step": 1182 }, { "epoch": 0.21064814814814814, "grad_norm": 0.4348810911178589, "learning_rate": 0.00019867788463623912, "loss": 0.926, "step": 1183 }, { "epoch": 0.21082621082621084, "grad_norm": 0.41518425941467285, "learning_rate": 0.00019867561506367799, "loss": 1.2723, "step": 1184 }, { "epoch": 0.2110042735042735, "grad_norm": 0.47346001863479614, "learning_rate": 0.00019867334355777315, "loss": 1.1931, "step": 1185 }, { "epoch": 0.21118233618233617, "grad_norm": 0.4071715474128723, "learning_rate": 0.00019867107011856914, "loss": 0.9619, "step": 1186 }, { "epoch": 0.21136039886039887, "grad_norm": 0.4803447425365448, "learning_rate": 0.00019866879474611046, "loss": 1.2, "step": 1187 }, { "epoch": 0.21153846153846154, "grad_norm": 0.4827699661254883, "learning_rate": 0.00019866651744044172, "loss": 1.0938, "step": 1188 }, { "epoch": 0.2117165242165242, "grad_norm": 0.4528424143791199, "learning_rate": 0.00019866423820160756, "loss": 0.9721, "step": 1189 }, { "epoch": 0.2118945868945869, "grad_norm": 0.43566834926605225, "learning_rate": 0.0001986619570296526, "loss": 1.0352, "step": 1190 }, { "epoch": 0.21207264957264957, "grad_norm": 0.4516540467739105, "learning_rate": 0.0001986596739246215, "loss": 1.1333, "step": 1191 }, { "epoch": 0.21225071225071226, "grad_norm": 0.4456641376018524, "learning_rate": 0.00019865738888655908, "loss": 1.2813, "step": 1192 }, { "epoch": 0.21242877492877493, "grad_norm": 0.47048309445381165, "learning_rate": 0.00019865510191551008, "loss": 1.1067, "step": 1193 }, { "epoch": 0.2126068376068376, "grad_norm": 0.4604061543941498, "learning_rate": 0.00019865281301151928, "loss": 0.925, "step": 1194 }, { "epoch": 0.2127849002849003, "grad_norm": 0.49341437220573425, "learning_rate": 0.00019865052217463153, "loss": 1.2319, "step": 1195 }, { "epoch": 0.21296296296296297, "grad_norm": 0.5099014639854431, "learning_rate": 0.00019864822940489173, "loss": 1.139, "step": 1196 }, { "epoch": 0.21314102564102563, "grad_norm": 0.41396936774253845, "learning_rate": 0.0001986459347023448, "loss": 1.0594, "step": 1197 }, { "epoch": 0.21331908831908833, "grad_norm": 0.46071869134902954, "learning_rate": 0.0001986436380670357, "loss": 1.0815, "step": 1198 }, { "epoch": 0.213497150997151, "grad_norm": 0.507882297039032, "learning_rate": 0.00019864133949900942, "loss": 1.3841, "step": 1199 }, { "epoch": 0.21367521367521367, "grad_norm": 0.45680439472198486, "learning_rate": 0.00019863903899831103, "loss": 1.0945, "step": 1200 }, { "epoch": 0.21385327635327636, "grad_norm": 0.44277429580688477, "learning_rate": 0.00019863673656498555, "loss": 1.1655, "step": 1201 }, { "epoch": 0.21403133903133903, "grad_norm": 0.43890756368637085, "learning_rate": 0.00019863443219907812, "loss": 1.1186, "step": 1202 }, { "epoch": 0.2142094017094017, "grad_norm": 0.3910178542137146, "learning_rate": 0.0001986321259006339, "loss": 1.0817, "step": 1203 }, { "epoch": 0.2143874643874644, "grad_norm": 0.3803878128528595, "learning_rate": 0.00019862981766969803, "loss": 0.8022, "step": 1204 }, { "epoch": 0.21456552706552706, "grad_norm": 0.4495108425617218, "learning_rate": 0.0001986275075063158, "loss": 1.2212, "step": 1205 }, { "epoch": 0.21474358974358973, "grad_norm": 0.5211976766586304, "learning_rate": 0.00019862519541053244, "loss": 1.2771, "step": 1206 }, { "epoch": 0.21492165242165243, "grad_norm": 0.4313061535358429, "learning_rate": 0.00019862288138239325, "loss": 1.1205, "step": 1207 }, { "epoch": 0.2150997150997151, "grad_norm": 0.47110888361930847, "learning_rate": 0.00019862056542194355, "loss": 1.1835, "step": 1208 }, { "epoch": 0.2152777777777778, "grad_norm": 0.5129403471946716, "learning_rate": 0.00019861824752922876, "loss": 1.1655, "step": 1209 }, { "epoch": 0.21545584045584046, "grad_norm": 0.4353938102722168, "learning_rate": 0.00019861592770429427, "loss": 1.2794, "step": 1210 }, { "epoch": 0.21563390313390313, "grad_norm": 0.48590636253356934, "learning_rate": 0.0001986136059471855, "loss": 1.2003, "step": 1211 }, { "epoch": 0.21581196581196582, "grad_norm": 0.4738406836986542, "learning_rate": 0.00019861128225794804, "loss": 1.2271, "step": 1212 }, { "epoch": 0.2159900284900285, "grad_norm": 0.45983126759529114, "learning_rate": 0.0001986089566366273, "loss": 1.1896, "step": 1213 }, { "epoch": 0.21616809116809116, "grad_norm": 0.37296006083488464, "learning_rate": 0.00019860662908326892, "loss": 1.079, "step": 1214 }, { "epoch": 0.21634615384615385, "grad_norm": 0.4442676305770874, "learning_rate": 0.00019860429959791845, "loss": 1.1754, "step": 1215 }, { "epoch": 0.21652421652421652, "grad_norm": 0.4950128495693207, "learning_rate": 0.0001986019681806216, "loss": 1.1571, "step": 1216 }, { "epoch": 0.2167022792022792, "grad_norm": 0.4374556541442871, "learning_rate": 0.000198599634831424, "loss": 1.1003, "step": 1217 }, { "epoch": 0.2168803418803419, "grad_norm": 0.47301414608955383, "learning_rate": 0.00019859729955037136, "loss": 1.1426, "step": 1218 }, { "epoch": 0.21705840455840456, "grad_norm": 0.41213178634643555, "learning_rate": 0.00019859496233750947, "loss": 1.0659, "step": 1219 }, { "epoch": 0.21723646723646722, "grad_norm": 0.41601964831352234, "learning_rate": 0.0001985926231928841, "loss": 1.0248, "step": 1220 }, { "epoch": 0.21741452991452992, "grad_norm": 0.46328839659690857, "learning_rate": 0.0001985902821165411, "loss": 1.0405, "step": 1221 }, { "epoch": 0.2175925925925926, "grad_norm": 0.43287959694862366, "learning_rate": 0.0001985879391085263, "loss": 0.9202, "step": 1222 }, { "epoch": 0.21777065527065528, "grad_norm": 0.4770444631576538, "learning_rate": 0.00019858559416888568, "loss": 1.0911, "step": 1223 }, { "epoch": 0.21794871794871795, "grad_norm": 0.4756585955619812, "learning_rate": 0.00019858324729766507, "loss": 1.1566, "step": 1224 }, { "epoch": 0.21812678062678062, "grad_norm": 0.4337233006954193, "learning_rate": 0.00019858089849491054, "loss": 0.9084, "step": 1225 }, { "epoch": 0.21830484330484332, "grad_norm": 0.5165579319000244, "learning_rate": 0.00019857854776066813, "loss": 1.4154, "step": 1226 }, { "epoch": 0.21848290598290598, "grad_norm": 0.4280378520488739, "learning_rate": 0.00019857619509498382, "loss": 1.1291, "step": 1227 }, { "epoch": 0.21866096866096865, "grad_norm": 0.5375089049339294, "learning_rate": 0.00019857384049790376, "loss": 1.2985, "step": 1228 }, { "epoch": 0.21883903133903135, "grad_norm": 0.4708811640739441, "learning_rate": 0.00019857148396947401, "loss": 1.0589, "step": 1229 }, { "epoch": 0.21901709401709402, "grad_norm": 0.4744570255279541, "learning_rate": 0.00019856912550974084, "loss": 1.1269, "step": 1230 }, { "epoch": 0.21919515669515668, "grad_norm": 0.5355265736579895, "learning_rate": 0.00019856676511875043, "loss": 1.1441, "step": 1231 }, { "epoch": 0.21937321937321938, "grad_norm": 0.42718183994293213, "learning_rate": 0.00019856440279654897, "loss": 1.0244, "step": 1232 }, { "epoch": 0.21955128205128205, "grad_norm": 0.5162127614021301, "learning_rate": 0.00019856203854318283, "loss": 1.2674, "step": 1233 }, { "epoch": 0.21972934472934472, "grad_norm": 0.5180695652961731, "learning_rate": 0.00019855967235869827, "loss": 1.2472, "step": 1234 }, { "epoch": 0.2199074074074074, "grad_norm": 0.4290023744106293, "learning_rate": 0.00019855730424314167, "loss": 1.0502, "step": 1235 }, { "epoch": 0.22008547008547008, "grad_norm": 0.4418254792690277, "learning_rate": 0.00019855493419655945, "loss": 1.0589, "step": 1236 }, { "epoch": 0.22026353276353278, "grad_norm": 0.4074663817882538, "learning_rate": 0.000198552562218998, "loss": 0.9197, "step": 1237 }, { "epoch": 0.22044159544159544, "grad_norm": 0.4526660740375519, "learning_rate": 0.00019855018831050383, "loss": 1.2578, "step": 1238 }, { "epoch": 0.2206196581196581, "grad_norm": 0.4747827649116516, "learning_rate": 0.00019854781247112343, "loss": 1.0841, "step": 1239 }, { "epoch": 0.2207977207977208, "grad_norm": 0.41567128896713257, "learning_rate": 0.00019854543470090334, "loss": 1.0737, "step": 1240 }, { "epoch": 0.22097578347578348, "grad_norm": 0.4793100953102112, "learning_rate": 0.00019854305499989022, "loss": 1.1972, "step": 1241 }, { "epoch": 0.22115384615384615, "grad_norm": 0.41755473613739014, "learning_rate": 0.00019854067336813058, "loss": 1.2529, "step": 1242 }, { "epoch": 0.22133190883190884, "grad_norm": 0.40421152114868164, "learning_rate": 0.0001985382898056712, "loss": 1.0549, "step": 1243 }, { "epoch": 0.2215099715099715, "grad_norm": 0.45779645442962646, "learning_rate": 0.0001985359043125587, "loss": 1.1586, "step": 1244 }, { "epoch": 0.22168803418803418, "grad_norm": 0.4380546808242798, "learning_rate": 0.00019853351688883987, "loss": 1.1024, "step": 1245 }, { "epoch": 0.22186609686609687, "grad_norm": 0.39917269349098206, "learning_rate": 0.00019853112753456142, "loss": 0.9823, "step": 1246 }, { "epoch": 0.22204415954415954, "grad_norm": 0.4228038489818573, "learning_rate": 0.00019852873624977022, "loss": 1.1684, "step": 1247 }, { "epoch": 0.2222222222222222, "grad_norm": 0.4462146759033203, "learning_rate": 0.00019852634303451315, "loss": 0.9027, "step": 1248 }, { "epoch": 0.2224002849002849, "grad_norm": 0.5682163834571838, "learning_rate": 0.000198523947888837, "loss": 1.141, "step": 1249 }, { "epoch": 0.22257834757834757, "grad_norm": 0.44866830110549927, "learning_rate": 0.0001985215508127888, "loss": 1.0759, "step": 1250 }, { "epoch": 0.22275641025641027, "grad_norm": 0.4034106135368347, "learning_rate": 0.00019851915180641548, "loss": 1.0675, "step": 1251 }, { "epoch": 0.22293447293447294, "grad_norm": 0.4780726432800293, "learning_rate": 0.00019851675086976397, "loss": 1.0283, "step": 1252 }, { "epoch": 0.2231125356125356, "grad_norm": 0.48892372846603394, "learning_rate": 0.00019851434800288145, "loss": 1.1159, "step": 1253 }, { "epoch": 0.2232905982905983, "grad_norm": 0.42629215121269226, "learning_rate": 0.0001985119432058149, "loss": 1.0292, "step": 1254 }, { "epoch": 0.22346866096866097, "grad_norm": 0.4496444761753082, "learning_rate": 0.00019850953647861146, "loss": 1.0252, "step": 1255 }, { "epoch": 0.22364672364672364, "grad_norm": 0.4371408224105835, "learning_rate": 0.00019850712782131828, "loss": 1.1104, "step": 1256 }, { "epoch": 0.22382478632478633, "grad_norm": 0.4910794496536255, "learning_rate": 0.00019850471723398258, "loss": 1.1928, "step": 1257 }, { "epoch": 0.224002849002849, "grad_norm": 0.41235068440437317, "learning_rate": 0.00019850230471665157, "loss": 1.1261, "step": 1258 }, { "epoch": 0.22418091168091167, "grad_norm": 0.4507700502872467, "learning_rate": 0.0001984998902693725, "loss": 1.0602, "step": 1259 }, { "epoch": 0.22435897435897437, "grad_norm": 0.4654198884963989, "learning_rate": 0.00019849747389219272, "loss": 1.1258, "step": 1260 }, { "epoch": 0.22453703703703703, "grad_norm": 0.439807653427124, "learning_rate": 0.00019849505558515952, "loss": 1.2312, "step": 1261 }, { "epoch": 0.2247150997150997, "grad_norm": 0.4309258759021759, "learning_rate": 0.00019849263534832035, "loss": 1.0083, "step": 1262 }, { "epoch": 0.2248931623931624, "grad_norm": 0.4920141100883484, "learning_rate": 0.00019849021318172255, "loss": 1.0254, "step": 1263 }, { "epoch": 0.22507122507122507, "grad_norm": 0.5333457589149475, "learning_rate": 0.00019848778908541367, "loss": 1.3017, "step": 1264 }, { "epoch": 0.22524928774928774, "grad_norm": 0.4096757769584656, "learning_rate": 0.0001984853630594411, "loss": 0.9531, "step": 1265 }, { "epoch": 0.22542735042735043, "grad_norm": 0.5744075775146484, "learning_rate": 0.00019848293510385244, "loss": 1.1414, "step": 1266 }, { "epoch": 0.2256054131054131, "grad_norm": 0.44707193970680237, "learning_rate": 0.00019848050521869529, "loss": 1.1926, "step": 1267 }, { "epoch": 0.2257834757834758, "grad_norm": 0.4162999391555786, "learning_rate": 0.00019847807340401716, "loss": 1.1354, "step": 1268 }, { "epoch": 0.22596153846153846, "grad_norm": 0.4273204207420349, "learning_rate": 0.0001984756396598658, "loss": 0.9956, "step": 1269 }, { "epoch": 0.22613960113960113, "grad_norm": 0.5670466423034668, "learning_rate": 0.00019847320398628878, "loss": 1.2384, "step": 1270 }, { "epoch": 0.22631766381766383, "grad_norm": 0.424544095993042, "learning_rate": 0.00019847076638333395, "loss": 0.9963, "step": 1271 }, { "epoch": 0.2264957264957265, "grad_norm": 0.3716120719909668, "learning_rate": 0.000198468326851049, "loss": 0.865, "step": 1272 }, { "epoch": 0.22667378917378916, "grad_norm": 0.4472847282886505, "learning_rate": 0.00019846588538948172, "loss": 1.174, "step": 1273 }, { "epoch": 0.22685185185185186, "grad_norm": 0.4599195718765259, "learning_rate": 0.00019846344199867994, "loss": 1.289, "step": 1274 }, { "epoch": 0.22702991452991453, "grad_norm": 0.4303213357925415, "learning_rate": 0.0001984609966786916, "loss": 1.1606, "step": 1275 }, { "epoch": 0.2272079772079772, "grad_norm": 0.44893527030944824, "learning_rate": 0.00019845854942956455, "loss": 1.1043, "step": 1276 }, { "epoch": 0.2273860398860399, "grad_norm": 0.40033379197120667, "learning_rate": 0.00019845610025134676, "loss": 1.1434, "step": 1277 }, { "epoch": 0.22756410256410256, "grad_norm": 0.4385402202606201, "learning_rate": 0.00019845364914408616, "loss": 0.9943, "step": 1278 }, { "epoch": 0.22774216524216523, "grad_norm": 0.42123618721961975, "learning_rate": 0.0001984511961078309, "loss": 1.0911, "step": 1279 }, { "epoch": 0.22792022792022792, "grad_norm": 0.5558577179908752, "learning_rate": 0.00019844874114262893, "loss": 1.3893, "step": 1280 }, { "epoch": 0.2280982905982906, "grad_norm": 0.3996453583240509, "learning_rate": 0.00019844628424852835, "loss": 0.8951, "step": 1281 }, { "epoch": 0.2282763532763533, "grad_norm": 0.3943425714969635, "learning_rate": 0.0001984438254255774, "loss": 1.0595, "step": 1282 }, { "epoch": 0.22845441595441596, "grad_norm": 0.4429021179676056, "learning_rate": 0.00019844136467382414, "loss": 1.0853, "step": 1283 }, { "epoch": 0.22863247863247863, "grad_norm": 0.4515686631202698, "learning_rate": 0.00019843890199331687, "loss": 1.0829, "step": 1284 }, { "epoch": 0.22881054131054132, "grad_norm": 0.5157768726348877, "learning_rate": 0.00019843643738410378, "loss": 1.334, "step": 1285 }, { "epoch": 0.228988603988604, "grad_norm": 0.45833173394203186, "learning_rate": 0.0001984339708462332, "loss": 1.1353, "step": 1286 }, { "epoch": 0.22916666666666666, "grad_norm": 0.46610337495803833, "learning_rate": 0.00019843150237975344, "loss": 1.1338, "step": 1287 }, { "epoch": 0.22934472934472935, "grad_norm": 0.5076978802680969, "learning_rate": 0.00019842903198471286, "loss": 1.1811, "step": 1288 }, { "epoch": 0.22952279202279202, "grad_norm": 0.4297824800014496, "learning_rate": 0.00019842655966115986, "loss": 1.1799, "step": 1289 }, { "epoch": 0.2297008547008547, "grad_norm": 0.5304586291313171, "learning_rate": 0.0001984240854091429, "loss": 1.1315, "step": 1290 }, { "epoch": 0.22987891737891739, "grad_norm": 0.45359212160110474, "learning_rate": 0.00019842160922871042, "loss": 1.1037, "step": 1291 }, { "epoch": 0.23005698005698005, "grad_norm": 0.4416881203651428, "learning_rate": 0.00019841913111991096, "loss": 1.122, "step": 1292 }, { "epoch": 0.23023504273504272, "grad_norm": 0.46682995557785034, "learning_rate": 0.0001984166510827931, "loss": 0.9808, "step": 1293 }, { "epoch": 0.23041310541310542, "grad_norm": 0.44172337651252747, "learning_rate": 0.00019841416911740538, "loss": 0.9167, "step": 1294 }, { "epoch": 0.23059116809116809, "grad_norm": 0.40562742948532104, "learning_rate": 0.0001984116852237965, "loss": 0.9547, "step": 1295 }, { "epoch": 0.23076923076923078, "grad_norm": 0.4040384888648987, "learning_rate": 0.00019840919940201503, "loss": 1.1039, "step": 1296 }, { "epoch": 0.23094729344729345, "grad_norm": 0.5094077587127686, "learning_rate": 0.00019840671165210973, "loss": 1.2283, "step": 1297 }, { "epoch": 0.23112535612535612, "grad_norm": 0.48553213477134705, "learning_rate": 0.00019840422197412938, "loss": 1.0927, "step": 1298 }, { "epoch": 0.23130341880341881, "grad_norm": 0.5197509527206421, "learning_rate": 0.00019840173036812266, "loss": 1.2154, "step": 1299 }, { "epoch": 0.23148148148148148, "grad_norm": 0.42069005966186523, "learning_rate": 0.0001983992368341385, "loss": 1.0076, "step": 1300 }, { "epoch": 0.23165954415954415, "grad_norm": 0.475204735994339, "learning_rate": 0.00019839674137222567, "loss": 1.1682, "step": 1301 }, { "epoch": 0.23183760683760685, "grad_norm": 0.55730140209198, "learning_rate": 0.0001983942439824331, "loss": 1.2948, "step": 1302 }, { "epoch": 0.23201566951566951, "grad_norm": 0.4533313512802124, "learning_rate": 0.00019839174466480973, "loss": 1.2691, "step": 1303 }, { "epoch": 0.23219373219373218, "grad_norm": 0.4733520746231079, "learning_rate": 0.0001983892434194045, "loss": 1.2232, "step": 1304 }, { "epoch": 0.23237179487179488, "grad_norm": 0.5085756182670593, "learning_rate": 0.00019838674024626643, "loss": 1.1347, "step": 1305 }, { "epoch": 0.23254985754985755, "grad_norm": 0.4679976999759674, "learning_rate": 0.00019838423514544456, "loss": 1.0018, "step": 1306 }, { "epoch": 0.23272792022792022, "grad_norm": 0.4234481751918793, "learning_rate": 0.00019838172811698795, "loss": 1.0472, "step": 1307 }, { "epoch": 0.2329059829059829, "grad_norm": 0.5749204158782959, "learning_rate": 0.00019837921916094579, "loss": 1.2239, "step": 1308 }, { "epoch": 0.23308404558404558, "grad_norm": 0.46715882420539856, "learning_rate": 0.0001983767082773672, "loss": 1.1924, "step": 1309 }, { "epoch": 0.23326210826210828, "grad_norm": 0.5079745054244995, "learning_rate": 0.00019837419546630137, "loss": 1.1086, "step": 1310 }, { "epoch": 0.23344017094017094, "grad_norm": 0.4419243037700653, "learning_rate": 0.0001983716807277975, "loss": 1.1911, "step": 1311 }, { "epoch": 0.2336182336182336, "grad_norm": 0.5107570290565491, "learning_rate": 0.00019836916406190493, "loss": 1.1071, "step": 1312 }, { "epoch": 0.2337962962962963, "grad_norm": 0.5295659303665161, "learning_rate": 0.00019836664546867293, "loss": 1.2905, "step": 1313 }, { "epoch": 0.23397435897435898, "grad_norm": 0.4844837784767151, "learning_rate": 0.00019836412494815084, "loss": 1.3507, "step": 1314 }, { "epoch": 0.23415242165242164, "grad_norm": 0.6166049242019653, "learning_rate": 0.00019836160250038808, "loss": 1.2822, "step": 1315 }, { "epoch": 0.23433048433048434, "grad_norm": 0.3229198753833771, "learning_rate": 0.00019835907812543402, "loss": 0.4959, "step": 1316 }, { "epoch": 0.234508547008547, "grad_norm": 0.5788772702217102, "learning_rate": 0.00019835655182333815, "loss": 1.0832, "step": 1317 }, { "epoch": 0.23468660968660968, "grad_norm": 0.525705099105835, "learning_rate": 0.00019835402359414997, "loss": 1.0968, "step": 1318 }, { "epoch": 0.23486467236467237, "grad_norm": 0.5007779002189636, "learning_rate": 0.000198351493437919, "loss": 1.2788, "step": 1319 }, { "epoch": 0.23504273504273504, "grad_norm": 0.4276871383190155, "learning_rate": 0.00019834896135469484, "loss": 1.0419, "step": 1320 }, { "epoch": 0.2352207977207977, "grad_norm": 0.5359070301055908, "learning_rate": 0.00019834642734452708, "loss": 1.1308, "step": 1321 }, { "epoch": 0.2353988603988604, "grad_norm": 0.4854908883571625, "learning_rate": 0.0001983438914074654, "loss": 1.1211, "step": 1322 }, { "epoch": 0.23557692307692307, "grad_norm": 0.4913707375526428, "learning_rate": 0.0001983413535435594, "loss": 1.2392, "step": 1323 }, { "epoch": 0.23575498575498577, "grad_norm": 0.46755748987197876, "learning_rate": 0.0001983388137528589, "loss": 0.9348, "step": 1324 }, { "epoch": 0.23593304843304844, "grad_norm": 0.4592570960521698, "learning_rate": 0.0001983362720354136, "loss": 1.1339, "step": 1325 }, { "epoch": 0.2361111111111111, "grad_norm": 0.5121711492538452, "learning_rate": 0.00019833372839127335, "loss": 1.2973, "step": 1326 }, { "epoch": 0.2362891737891738, "grad_norm": 0.4809017479419708, "learning_rate": 0.000198331182820488, "loss": 0.9849, "step": 1327 }, { "epoch": 0.23646723646723647, "grad_norm": 0.42340895533561707, "learning_rate": 0.00019832863532310733, "loss": 1.0731, "step": 1328 }, { "epoch": 0.23664529914529914, "grad_norm": 0.5388045310974121, "learning_rate": 0.00019832608589918135, "loss": 1.0729, "step": 1329 }, { "epoch": 0.23682336182336183, "grad_norm": 0.43075770139694214, "learning_rate": 0.00019832353454875992, "loss": 1.1684, "step": 1330 }, { "epoch": 0.2370014245014245, "grad_norm": 0.554927408695221, "learning_rate": 0.00019832098127189313, "loss": 1.0842, "step": 1331 }, { "epoch": 0.23717948717948717, "grad_norm": 0.5359260439872742, "learning_rate": 0.0001983184260686309, "loss": 1.2399, "step": 1332 }, { "epoch": 0.23735754985754987, "grad_norm": 0.5141251087188721, "learning_rate": 0.0001983158689390234, "loss": 1.3752, "step": 1333 }, { "epoch": 0.23753561253561253, "grad_norm": 0.4578750431537628, "learning_rate": 0.00019831330988312067, "loss": 1.0965, "step": 1334 }, { "epoch": 0.2377136752136752, "grad_norm": 0.47974497079849243, "learning_rate": 0.00019831074890097286, "loss": 1.3379, "step": 1335 }, { "epoch": 0.2378917378917379, "grad_norm": 0.4618176817893982, "learning_rate": 0.00019830818599263014, "loss": 1.274, "step": 1336 }, { "epoch": 0.23806980056980057, "grad_norm": 0.4279816448688507, "learning_rate": 0.00019830562115814276, "loss": 0.996, "step": 1337 }, { "epoch": 0.23824786324786323, "grad_norm": 0.4255026876926422, "learning_rate": 0.0001983030543975609, "loss": 0.969, "step": 1338 }, { "epoch": 0.23842592592592593, "grad_norm": 0.4551412761211395, "learning_rate": 0.00019830048571093493, "loss": 1.0204, "step": 1339 }, { "epoch": 0.2386039886039886, "grad_norm": 0.4747903048992157, "learning_rate": 0.00019829791509831513, "loss": 1.1816, "step": 1340 }, { "epoch": 0.2387820512820513, "grad_norm": 0.47187140583992004, "learning_rate": 0.00019829534255975188, "loss": 1.1205, "step": 1341 }, { "epoch": 0.23896011396011396, "grad_norm": 0.49332180619239807, "learning_rate": 0.0001982927680952956, "loss": 1.2657, "step": 1342 }, { "epoch": 0.23913817663817663, "grad_norm": 0.5162837505340576, "learning_rate": 0.0001982901917049967, "loss": 1.2247, "step": 1343 }, { "epoch": 0.23931623931623933, "grad_norm": 0.43407055735588074, "learning_rate": 0.0001982876133889057, "loss": 1.0038, "step": 1344 }, { "epoch": 0.239494301994302, "grad_norm": 0.5132251977920532, "learning_rate": 0.00019828503314707306, "loss": 1.0678, "step": 1345 }, { "epoch": 0.23967236467236466, "grad_norm": 0.46295464038848877, "learning_rate": 0.00019828245097954937, "loss": 1.1802, "step": 1346 }, { "epoch": 0.23985042735042736, "grad_norm": 0.4682658314704895, "learning_rate": 0.00019827986688638523, "loss": 1.0249, "step": 1347 }, { "epoch": 0.24002849002849003, "grad_norm": 0.49990561604499817, "learning_rate": 0.00019827728086763125, "loss": 1.0691, "step": 1348 }, { "epoch": 0.2402065527065527, "grad_norm": 0.39090847969055176, "learning_rate": 0.00019827469292333806, "loss": 0.8367, "step": 1349 }, { "epoch": 0.2403846153846154, "grad_norm": 0.5023905634880066, "learning_rate": 0.00019827210305355645, "loss": 1.0675, "step": 1350 }, { "epoch": 0.24056267806267806, "grad_norm": 0.4744076430797577, "learning_rate": 0.00019826951125833715, "loss": 1.3166, "step": 1351 }, { "epoch": 0.24074074074074073, "grad_norm": 0.44914689660072327, "learning_rate": 0.00019826691753773088, "loss": 0.9818, "step": 1352 }, { "epoch": 0.24091880341880342, "grad_norm": 0.44391971826553345, "learning_rate": 0.00019826432189178853, "loss": 1.0448, "step": 1353 }, { "epoch": 0.2410968660968661, "grad_norm": 0.46102839708328247, "learning_rate": 0.00019826172432056086, "loss": 0.9952, "step": 1354 }, { "epoch": 0.2412749287749288, "grad_norm": 0.4796878695487976, "learning_rate": 0.00019825912482409884, "loss": 1.0977, "step": 1355 }, { "epoch": 0.24145299145299146, "grad_norm": 0.5003768801689148, "learning_rate": 0.0001982565234024534, "loss": 1.3149, "step": 1356 }, { "epoch": 0.24163105413105412, "grad_norm": 0.43475663661956787, "learning_rate": 0.00019825392005567551, "loss": 1.0527, "step": 1357 }, { "epoch": 0.24180911680911682, "grad_norm": 0.46120527386665344, "learning_rate": 0.00019825131478381613, "loss": 1.2333, "step": 1358 }, { "epoch": 0.2419871794871795, "grad_norm": 0.43748101592063904, "learning_rate": 0.00019824870758692638, "loss": 0.9788, "step": 1359 }, { "epoch": 0.24216524216524216, "grad_norm": 0.5275192856788635, "learning_rate": 0.00019824609846505727, "loss": 1.1473, "step": 1360 }, { "epoch": 0.24234330484330485, "grad_norm": 0.346463143825531, "learning_rate": 0.00019824348741825993, "loss": 0.6824, "step": 1361 }, { "epoch": 0.24252136752136752, "grad_norm": 0.5004115700721741, "learning_rate": 0.00019824087444658556, "loss": 1.1853, "step": 1362 }, { "epoch": 0.2426994301994302, "grad_norm": 0.42746666073799133, "learning_rate": 0.00019823825955008533, "loss": 0.9355, "step": 1363 }, { "epoch": 0.24287749287749288, "grad_norm": 0.4099743068218231, "learning_rate": 0.00019823564272881047, "loss": 1.0753, "step": 1364 }, { "epoch": 0.24305555555555555, "grad_norm": 0.5262967944145203, "learning_rate": 0.00019823302398281226, "loss": 1.2324, "step": 1365 }, { "epoch": 0.24323361823361822, "grad_norm": 0.436069518327713, "learning_rate": 0.000198230403312142, "loss": 1.1887, "step": 1366 }, { "epoch": 0.24341168091168092, "grad_norm": 0.38252368569374084, "learning_rate": 0.00019822778071685107, "loss": 1.0211, "step": 1367 }, { "epoch": 0.24358974358974358, "grad_norm": 0.48024141788482666, "learning_rate": 0.00019822515619699081, "loss": 1.065, "step": 1368 }, { "epoch": 0.24376780626780628, "grad_norm": 0.47421589493751526, "learning_rate": 0.00019822252975261267, "loss": 1.0433, "step": 1369 }, { "epoch": 0.24394586894586895, "grad_norm": 0.46094807982444763, "learning_rate": 0.00019821990138376808, "loss": 1.1427, "step": 1370 }, { "epoch": 0.24412393162393162, "grad_norm": 0.5093680620193481, "learning_rate": 0.00019821727109050856, "loss": 1.1086, "step": 1371 }, { "epoch": 0.2443019943019943, "grad_norm": 0.41084879636764526, "learning_rate": 0.00019821463887288566, "loss": 1.0068, "step": 1372 }, { "epoch": 0.24448005698005698, "grad_norm": 0.4991084635257721, "learning_rate": 0.0001982120047309509, "loss": 1.1884, "step": 1373 }, { "epoch": 0.24465811965811965, "grad_norm": 0.39198383688926697, "learning_rate": 0.00019820936866475595, "loss": 0.9776, "step": 1374 }, { "epoch": 0.24483618233618235, "grad_norm": 0.4517424702644348, "learning_rate": 0.00019820673067435244, "loss": 1.1491, "step": 1375 }, { "epoch": 0.245014245014245, "grad_norm": 0.45881983637809753, "learning_rate": 0.00019820409075979202, "loss": 1.1198, "step": 1376 }, { "epoch": 0.24519230769230768, "grad_norm": 0.4498792290687561, "learning_rate": 0.00019820144892112646, "loss": 1.0897, "step": 1377 }, { "epoch": 0.24537037037037038, "grad_norm": 0.4128037393093109, "learning_rate": 0.00019819880515840752, "loss": 0.9415, "step": 1378 }, { "epoch": 0.24554843304843305, "grad_norm": 0.4340885281562805, "learning_rate": 0.00019819615947168698, "loss": 1.201, "step": 1379 }, { "epoch": 0.24572649572649571, "grad_norm": 0.43814027309417725, "learning_rate": 0.00019819351186101667, "loss": 1.1039, "step": 1380 }, { "epoch": 0.2459045584045584, "grad_norm": 0.40115082263946533, "learning_rate": 0.00019819086232644845, "loss": 1.2599, "step": 1381 }, { "epoch": 0.24608262108262108, "grad_norm": 0.4947351813316345, "learning_rate": 0.00019818821086803426, "loss": 1.252, "step": 1382 }, { "epoch": 0.24626068376068377, "grad_norm": 0.45179441571235657, "learning_rate": 0.0001981855574858261, "loss": 1.1323, "step": 1383 }, { "epoch": 0.24643874643874644, "grad_norm": 0.47159844636917114, "learning_rate": 0.00019818290217987587, "loss": 1.2053, "step": 1384 }, { "epoch": 0.2466168091168091, "grad_norm": 0.4358448386192322, "learning_rate": 0.0001981802449502356, "loss": 1.1174, "step": 1385 }, { "epoch": 0.2467948717948718, "grad_norm": 0.4588233530521393, "learning_rate": 0.00019817758579695745, "loss": 1.1098, "step": 1386 }, { "epoch": 0.24697293447293447, "grad_norm": 0.4955112636089325, "learning_rate": 0.00019817492472009338, "loss": 1.258, "step": 1387 }, { "epoch": 0.24715099715099714, "grad_norm": 0.4226941764354706, "learning_rate": 0.00019817226171969565, "loss": 1.0976, "step": 1388 }, { "epoch": 0.24732905982905984, "grad_norm": 0.4076840579509735, "learning_rate": 0.00019816959679581637, "loss": 1.0121, "step": 1389 }, { "epoch": 0.2475071225071225, "grad_norm": 0.4395063519477844, "learning_rate": 0.0001981669299485078, "loss": 1.3153, "step": 1390 }, { "epoch": 0.24768518518518517, "grad_norm": 0.41010400652885437, "learning_rate": 0.0001981642611778221, "loss": 1.0717, "step": 1391 }, { "epoch": 0.24786324786324787, "grad_norm": 0.43459352850914, "learning_rate": 0.00019816159048381167, "loss": 1.1077, "step": 1392 }, { "epoch": 0.24804131054131054, "grad_norm": 0.46291449666023254, "learning_rate": 0.00019815891786652875, "loss": 1.0257, "step": 1393 }, { "epoch": 0.2482193732193732, "grad_norm": 0.46408146619796753, "learning_rate": 0.00019815624332602578, "loss": 0.7899, "step": 1394 }, { "epoch": 0.2483974358974359, "grad_norm": 0.4763357937335968, "learning_rate": 0.00019815356686235508, "loss": 0.9857, "step": 1395 }, { "epoch": 0.24857549857549857, "grad_norm": 0.4766457676887512, "learning_rate": 0.00019815088847556918, "loss": 1.0589, "step": 1396 }, { "epoch": 0.24875356125356127, "grad_norm": 0.4486583173274994, "learning_rate": 0.0001981482081657205, "loss": 1.2572, "step": 1397 }, { "epoch": 0.24893162393162394, "grad_norm": 0.468878835439682, "learning_rate": 0.00019814552593286155, "loss": 1.101, "step": 1398 }, { "epoch": 0.2491096866096866, "grad_norm": 0.4230278730392456, "learning_rate": 0.0001981428417770449, "loss": 0.9457, "step": 1399 }, { "epoch": 0.2492877492877493, "grad_norm": 0.45630761981010437, "learning_rate": 0.00019814015569832315, "loss": 1.0665, "step": 1400 }, { "epoch": 0.24946581196581197, "grad_norm": 0.5780113935470581, "learning_rate": 0.00019813746769674893, "loss": 1.1064, "step": 1401 }, { "epoch": 0.24964387464387464, "grad_norm": 0.4343436658382416, "learning_rate": 0.0001981347777723749, "loss": 1.1132, "step": 1402 }, { "epoch": 0.24982193732193733, "grad_norm": 0.4879056513309479, "learning_rate": 0.0001981320859252537, "loss": 1.1301, "step": 1403 }, { "epoch": 0.25, "grad_norm": 0.5248328447341919, "learning_rate": 0.00019812939215543818, "loss": 1.1468, "step": 1404 }, { "epoch": 0.25, "eval_loss": 1.115895390510559, "eval_runtime": 25.0474, "eval_samples_per_second": 41.561, "eval_steps_per_second": 20.801, "step": 1404 }, { "epoch": 0.2501780626780627, "grad_norm": 0.5076769590377808, "learning_rate": 0.00019812669646298106, "loss": 1.1428, "step": 1405 }, { "epoch": 0.25035612535612534, "grad_norm": 0.5510252714157104, "learning_rate": 0.00019812399884793514, "loss": 1.3383, "step": 1406 }, { "epoch": 0.25053418803418803, "grad_norm": 0.48918986320495605, "learning_rate": 0.0001981212993103533, "loss": 1.1507, "step": 1407 }, { "epoch": 0.25071225071225073, "grad_norm": 0.4678935110569, "learning_rate": 0.00019811859785028846, "loss": 1.13, "step": 1408 }, { "epoch": 0.25089031339031337, "grad_norm": 0.5155254602432251, "learning_rate": 0.0001981158944677935, "loss": 1.1194, "step": 1409 }, { "epoch": 0.25106837606837606, "grad_norm": 0.4533839523792267, "learning_rate": 0.00019811318916292142, "loss": 0.9464, "step": 1410 }, { "epoch": 0.25124643874643876, "grad_norm": 0.5142433047294617, "learning_rate": 0.00019811048193572517, "loss": 1.0837, "step": 1411 }, { "epoch": 0.2514245014245014, "grad_norm": 0.4330446124076843, "learning_rate": 0.00019810777278625788, "loss": 0.9117, "step": 1412 }, { "epoch": 0.2516025641025641, "grad_norm": 0.44806256890296936, "learning_rate": 0.00019810506171457254, "loss": 1.1643, "step": 1413 }, { "epoch": 0.2517806267806268, "grad_norm": 0.43526285886764526, "learning_rate": 0.00019810234872072235, "loss": 0.9776, "step": 1414 }, { "epoch": 0.25195868945868943, "grad_norm": 0.47394511103630066, "learning_rate": 0.00019809963380476039, "loss": 1.0935, "step": 1415 }, { "epoch": 0.25213675213675213, "grad_norm": 0.48961278796195984, "learning_rate": 0.00019809691696673993, "loss": 1.179, "step": 1416 }, { "epoch": 0.2523148148148148, "grad_norm": 0.43153589963912964, "learning_rate": 0.00019809419820671412, "loss": 0.906, "step": 1417 }, { "epoch": 0.25249287749287747, "grad_norm": 0.41187527775764465, "learning_rate": 0.00019809147752473632, "loss": 0.899, "step": 1418 }, { "epoch": 0.25267094017094016, "grad_norm": 0.5003183484077454, "learning_rate": 0.00019808875492085973, "loss": 1.0606, "step": 1419 }, { "epoch": 0.25284900284900286, "grad_norm": 0.4430316984653473, "learning_rate": 0.00019808603039513778, "loss": 0.9167, "step": 1420 }, { "epoch": 0.25302706552706555, "grad_norm": 0.4577699601650238, "learning_rate": 0.00019808330394762382, "loss": 1.1184, "step": 1421 }, { "epoch": 0.2532051282051282, "grad_norm": 0.42656826972961426, "learning_rate": 0.0001980805755783713, "loss": 0.9335, "step": 1422 }, { "epoch": 0.2533831908831909, "grad_norm": 0.40980881452560425, "learning_rate": 0.0001980778452874336, "loss": 0.9756, "step": 1423 }, { "epoch": 0.2535612535612536, "grad_norm": 0.5752090811729431, "learning_rate": 0.00019807511307486423, "loss": 1.1694, "step": 1424 }, { "epoch": 0.2537393162393162, "grad_norm": 0.5000349283218384, "learning_rate": 0.00019807237894071681, "loss": 0.9515, "step": 1425 }, { "epoch": 0.2539173789173789, "grad_norm": 0.5159069299697876, "learning_rate": 0.00019806964288504483, "loss": 1.4014, "step": 1426 }, { "epoch": 0.2540954415954416, "grad_norm": 0.5377941131591797, "learning_rate": 0.00019806690490790194, "loss": 1.2832, "step": 1427 }, { "epoch": 0.25427350427350426, "grad_norm": 0.4565938711166382, "learning_rate": 0.00019806416500934174, "loss": 1.0629, "step": 1428 }, { "epoch": 0.25445156695156695, "grad_norm": 0.49867144227027893, "learning_rate": 0.00019806142318941797, "loss": 1.2011, "step": 1429 }, { "epoch": 0.25462962962962965, "grad_norm": 0.5111994743347168, "learning_rate": 0.00019805867944818427, "loss": 0.8925, "step": 1430 }, { "epoch": 0.2548076923076923, "grad_norm": 0.5204268097877502, "learning_rate": 0.00019805593378569448, "loss": 1.2956, "step": 1431 }, { "epoch": 0.254985754985755, "grad_norm": 0.3889026939868927, "learning_rate": 0.00019805318620200234, "loss": 1.0355, "step": 1432 }, { "epoch": 0.2551638176638177, "grad_norm": 0.46825656294822693, "learning_rate": 0.00019805043669716174, "loss": 1.0444, "step": 1433 }, { "epoch": 0.2553418803418803, "grad_norm": 0.4509420394897461, "learning_rate": 0.00019804768527122648, "loss": 1.0423, "step": 1434 }, { "epoch": 0.255519943019943, "grad_norm": 0.4514774978160858, "learning_rate": 0.0001980449319242505, "loss": 1.1588, "step": 1435 }, { "epoch": 0.2556980056980057, "grad_norm": 0.43019044399261475, "learning_rate": 0.0001980421766562878, "loss": 0.9939, "step": 1436 }, { "epoch": 0.25587606837606836, "grad_norm": 0.5056091547012329, "learning_rate": 0.00019803941946739228, "loss": 1.1238, "step": 1437 }, { "epoch": 0.25605413105413105, "grad_norm": 0.48664605617523193, "learning_rate": 0.000198036660357618, "loss": 1.0702, "step": 1438 }, { "epoch": 0.25623219373219375, "grad_norm": 0.4500972032546997, "learning_rate": 0.000198033899327019, "loss": 0.9365, "step": 1439 }, { "epoch": 0.2564102564102564, "grad_norm": 0.4800589382648468, "learning_rate": 0.0001980311363756494, "loss": 1.1159, "step": 1440 }, { "epoch": 0.2565883190883191, "grad_norm": 0.3486495316028595, "learning_rate": 0.0001980283715035633, "loss": 0.6029, "step": 1441 }, { "epoch": 0.2567663817663818, "grad_norm": 0.46258702874183655, "learning_rate": 0.00019802560471081493, "loss": 1.025, "step": 1442 }, { "epoch": 0.2569444444444444, "grad_norm": 0.4846673607826233, "learning_rate": 0.00019802283599745844, "loss": 1.1105, "step": 1443 }, { "epoch": 0.2571225071225071, "grad_norm": 0.4586990475654602, "learning_rate": 0.00019802006536354813, "loss": 0.9897, "step": 1444 }, { "epoch": 0.2573005698005698, "grad_norm": 0.5177786350250244, "learning_rate": 0.00019801729280913825, "loss": 1.2558, "step": 1445 }, { "epoch": 0.25747863247863245, "grad_norm": 0.43213751912117004, "learning_rate": 0.00019801451833428312, "loss": 1.0961, "step": 1446 }, { "epoch": 0.25765669515669515, "grad_norm": 0.42974478006362915, "learning_rate": 0.00019801174193903714, "loss": 1.0659, "step": 1447 }, { "epoch": 0.25783475783475784, "grad_norm": 0.4424504339694977, "learning_rate": 0.00019800896362345464, "loss": 0.9805, "step": 1448 }, { "epoch": 0.25801282051282054, "grad_norm": 0.4734833836555481, "learning_rate": 0.0001980061833875901, "loss": 1.255, "step": 1449 }, { "epoch": 0.2581908831908832, "grad_norm": 0.41024845838546753, "learning_rate": 0.000198003401231498, "loss": 1.0908, "step": 1450 }, { "epoch": 0.2583689458689459, "grad_norm": 0.43603816628456116, "learning_rate": 0.00019800061715523283, "loss": 1.0611, "step": 1451 }, { "epoch": 0.25854700854700857, "grad_norm": 0.4871339499950409, "learning_rate": 0.00019799783115884915, "loss": 1.1851, "step": 1452 }, { "epoch": 0.2587250712250712, "grad_norm": 0.49758270382881165, "learning_rate": 0.00019799504324240157, "loss": 1.1936, "step": 1453 }, { "epoch": 0.2589031339031339, "grad_norm": 0.4201010763645172, "learning_rate": 0.00019799225340594466, "loss": 1.1567, "step": 1454 }, { "epoch": 0.2590811965811966, "grad_norm": 0.4200313091278076, "learning_rate": 0.00019798946164953309, "loss": 0.9666, "step": 1455 }, { "epoch": 0.25925925925925924, "grad_norm": 0.43001702427864075, "learning_rate": 0.0001979866679732216, "loss": 1.0104, "step": 1456 }, { "epoch": 0.25943732193732194, "grad_norm": 0.46733465790748596, "learning_rate": 0.0001979838723770649, "loss": 1.0927, "step": 1457 }, { "epoch": 0.25961538461538464, "grad_norm": 0.4513280391693115, "learning_rate": 0.00019798107486111773, "loss": 1.0282, "step": 1458 }, { "epoch": 0.2597934472934473, "grad_norm": 0.40411749482154846, "learning_rate": 0.00019797827542543495, "loss": 1.0789, "step": 1459 }, { "epoch": 0.25997150997151, "grad_norm": 0.4359099268913269, "learning_rate": 0.0001979754740700714, "loss": 1.0616, "step": 1460 }, { "epoch": 0.26014957264957267, "grad_norm": 0.4979047477245331, "learning_rate": 0.00019797267079508198, "loss": 1.2948, "step": 1461 }, { "epoch": 0.2603276353276353, "grad_norm": 0.44698619842529297, "learning_rate": 0.0001979698656005216, "loss": 0.9198, "step": 1462 }, { "epoch": 0.260505698005698, "grad_norm": 0.48437631130218506, "learning_rate": 0.00019796705848644516, "loss": 1.3207, "step": 1463 }, { "epoch": 0.2606837606837607, "grad_norm": 0.4382587671279907, "learning_rate": 0.00019796424945290778, "loss": 1.1315, "step": 1464 }, { "epoch": 0.26086182336182334, "grad_norm": 0.4565944969654083, "learning_rate": 0.0001979614384999644, "loss": 1.1893, "step": 1465 }, { "epoch": 0.26103988603988604, "grad_norm": 0.4705163836479187, "learning_rate": 0.00019795862562767017, "loss": 1.1132, "step": 1466 }, { "epoch": 0.26121794871794873, "grad_norm": 0.525184690952301, "learning_rate": 0.00019795581083608012, "loss": 1.2111, "step": 1467 }, { "epoch": 0.2613960113960114, "grad_norm": 0.45215457677841187, "learning_rate": 0.00019795299412524945, "loss": 1.1851, "step": 1468 }, { "epoch": 0.26157407407407407, "grad_norm": 0.4336663484573364, "learning_rate": 0.00019795017549523335, "loss": 1.0147, "step": 1469 }, { "epoch": 0.26175213675213677, "grad_norm": 0.5327649712562561, "learning_rate": 0.00019794735494608703, "loss": 1.1743, "step": 1470 }, { "epoch": 0.2619301994301994, "grad_norm": 0.49972307682037354, "learning_rate": 0.00019794453247786578, "loss": 1.1624, "step": 1471 }, { "epoch": 0.2621082621082621, "grad_norm": 0.43475785851478577, "learning_rate": 0.00019794170809062485, "loss": 0.9888, "step": 1472 }, { "epoch": 0.2622863247863248, "grad_norm": 0.428838849067688, "learning_rate": 0.0001979388817844196, "loss": 0.9154, "step": 1473 }, { "epoch": 0.26246438746438744, "grad_norm": 0.508568286895752, "learning_rate": 0.00019793605355930544, "loss": 1.1679, "step": 1474 }, { "epoch": 0.26264245014245013, "grad_norm": 0.47791770100593567, "learning_rate": 0.00019793322341533776, "loss": 1.1375, "step": 1475 }, { "epoch": 0.26282051282051283, "grad_norm": 0.41909220814704895, "learning_rate": 0.00019793039135257196, "loss": 1.0235, "step": 1476 }, { "epoch": 0.26299857549857547, "grad_norm": 0.5564408302307129, "learning_rate": 0.00019792755737106361, "loss": 1.0756, "step": 1477 }, { "epoch": 0.26317663817663817, "grad_norm": 0.42813625931739807, "learning_rate": 0.0001979247214708682, "loss": 0.8213, "step": 1478 }, { "epoch": 0.26335470085470086, "grad_norm": 0.44495970010757446, "learning_rate": 0.00019792188365204126, "loss": 0.9654, "step": 1479 }, { "epoch": 0.26353276353276356, "grad_norm": 0.47473424673080444, "learning_rate": 0.00019791904391463846, "loss": 1.1643, "step": 1480 }, { "epoch": 0.2637108262108262, "grad_norm": 0.40189051628112793, "learning_rate": 0.0001979162022587154, "loss": 0.8687, "step": 1481 }, { "epoch": 0.2638888888888889, "grad_norm": 0.44629937410354614, "learning_rate": 0.00019791335868432776, "loss": 1.0284, "step": 1482 }, { "epoch": 0.2640669515669516, "grad_norm": 0.511275053024292, "learning_rate": 0.00019791051319153124, "loss": 1.2217, "step": 1483 }, { "epoch": 0.26424501424501423, "grad_norm": 0.5136445164680481, "learning_rate": 0.00019790766578038163, "loss": 1.1129, "step": 1484 }, { "epoch": 0.2644230769230769, "grad_norm": 0.4450451135635376, "learning_rate": 0.00019790481645093469, "loss": 0.9912, "step": 1485 }, { "epoch": 0.2646011396011396, "grad_norm": 0.39455199241638184, "learning_rate": 0.00019790196520324621, "loss": 1.0887, "step": 1486 }, { "epoch": 0.26477920227920226, "grad_norm": 0.4444045126438141, "learning_rate": 0.00019789911203737216, "loss": 1.1559, "step": 1487 }, { "epoch": 0.26495726495726496, "grad_norm": 0.4769677221775055, "learning_rate": 0.0001978962569533683, "loss": 1.147, "step": 1488 }, { "epoch": 0.26513532763532766, "grad_norm": 0.40226617455482483, "learning_rate": 0.0001978933999512907, "loss": 1.0966, "step": 1489 }, { "epoch": 0.2653133903133903, "grad_norm": 0.4640974700450897, "learning_rate": 0.00019789054103119526, "loss": 1.1002, "step": 1490 }, { "epoch": 0.265491452991453, "grad_norm": 0.48251107335090637, "learning_rate": 0.00019788768019313806, "loss": 1.07, "step": 1491 }, { "epoch": 0.2656695156695157, "grad_norm": 0.4836949408054352, "learning_rate": 0.00019788481743717506, "loss": 1.2992, "step": 1492 }, { "epoch": 0.26584757834757833, "grad_norm": 0.4253857135772705, "learning_rate": 0.00019788195276336244, "loss": 1.1326, "step": 1493 }, { "epoch": 0.266025641025641, "grad_norm": 0.5161862373352051, "learning_rate": 0.0001978790861717563, "loss": 1.2131, "step": 1494 }, { "epoch": 0.2662037037037037, "grad_norm": 0.5223346948623657, "learning_rate": 0.00019787621766241274, "loss": 1.0933, "step": 1495 }, { "epoch": 0.26638176638176636, "grad_norm": 0.37622541189193726, "learning_rate": 0.000197873347235388, "loss": 0.8919, "step": 1496 }, { "epoch": 0.26655982905982906, "grad_norm": 0.4425419569015503, "learning_rate": 0.0001978704748907384, "loss": 1.0411, "step": 1497 }, { "epoch": 0.26673789173789175, "grad_norm": 0.4536985456943512, "learning_rate": 0.00019786760062852015, "loss": 1.2747, "step": 1498 }, { "epoch": 0.2669159544159544, "grad_norm": 0.4998049736022949, "learning_rate": 0.00019786472444878955, "loss": 1.3214, "step": 1499 }, { "epoch": 0.2670940170940171, "grad_norm": 0.42104312777519226, "learning_rate": 0.00019786184635160295, "loss": 0.7878, "step": 1500 }, { "epoch": 0.2672720797720798, "grad_norm": 0.5354288220405579, "learning_rate": 0.00019785896633701678, "loss": 1.0642, "step": 1501 }, { "epoch": 0.2674501424501424, "grad_norm": 0.4681485891342163, "learning_rate": 0.00019785608440508744, "loss": 1.1737, "step": 1502 }, { "epoch": 0.2676282051282051, "grad_norm": 0.49107062816619873, "learning_rate": 0.0001978532005558714, "loss": 1.1507, "step": 1503 }, { "epoch": 0.2678062678062678, "grad_norm": 0.4173283576965332, "learning_rate": 0.0001978503147894252, "loss": 1.0538, "step": 1504 }, { "epoch": 0.26798433048433046, "grad_norm": 0.49354055523872375, "learning_rate": 0.0001978474271058053, "loss": 1.1043, "step": 1505 }, { "epoch": 0.26816239316239315, "grad_norm": 0.5787215232849121, "learning_rate": 0.00019784453750506834, "loss": 0.9245, "step": 1506 }, { "epoch": 0.26834045584045585, "grad_norm": 0.48982590436935425, "learning_rate": 0.00019784164598727095, "loss": 1.2007, "step": 1507 }, { "epoch": 0.26851851851851855, "grad_norm": 0.4971007704734802, "learning_rate": 0.00019783875255246973, "loss": 1.1174, "step": 1508 }, { "epoch": 0.2686965811965812, "grad_norm": 0.5200340151786804, "learning_rate": 0.00019783585720072142, "loss": 1.1967, "step": 1509 }, { "epoch": 0.2688746438746439, "grad_norm": 0.47911885380744934, "learning_rate": 0.00019783295993208271, "loss": 1.162, "step": 1510 }, { "epoch": 0.2690527065527066, "grad_norm": 0.4764275848865509, "learning_rate": 0.00019783006074661037, "loss": 1.1358, "step": 1511 }, { "epoch": 0.2692307692307692, "grad_norm": 0.478545606136322, "learning_rate": 0.00019782715964436124, "loss": 1.0096, "step": 1512 }, { "epoch": 0.2694088319088319, "grad_norm": 0.5512787699699402, "learning_rate": 0.00019782425662539212, "loss": 1.1799, "step": 1513 }, { "epoch": 0.2695868945868946, "grad_norm": 0.5495108962059021, "learning_rate": 0.00019782135168975988, "loss": 1.0959, "step": 1514 }, { "epoch": 0.26976495726495725, "grad_norm": 0.42052868008613586, "learning_rate": 0.0001978184448375215, "loss": 1.1872, "step": 1515 }, { "epoch": 0.26994301994301995, "grad_norm": 0.4994426965713501, "learning_rate": 0.0001978155360687339, "loss": 1.0568, "step": 1516 }, { "epoch": 0.27012108262108264, "grad_norm": 0.459577351808548, "learning_rate": 0.00019781262538345402, "loss": 1.0315, "step": 1517 }, { "epoch": 0.2702991452991453, "grad_norm": 0.4792841374874115, "learning_rate": 0.00019780971278173895, "loss": 1.2055, "step": 1518 }, { "epoch": 0.270477207977208, "grad_norm": 0.5017708539962769, "learning_rate": 0.00019780679826364575, "loss": 1.157, "step": 1519 }, { "epoch": 0.2706552706552707, "grad_norm": 0.5197349786758423, "learning_rate": 0.00019780388182923152, "loss": 0.9101, "step": 1520 }, { "epoch": 0.2708333333333333, "grad_norm": 0.4226742684841156, "learning_rate": 0.00019780096347855338, "loss": 1.0525, "step": 1521 }, { "epoch": 0.271011396011396, "grad_norm": 0.5058164596557617, "learning_rate": 0.00019779804321166852, "loss": 0.931, "step": 1522 }, { "epoch": 0.2711894586894587, "grad_norm": 0.44492244720458984, "learning_rate": 0.00019779512102863418, "loss": 1.0641, "step": 1523 }, { "epoch": 0.27136752136752135, "grad_norm": 0.5348989963531494, "learning_rate": 0.00019779219692950758, "loss": 1.1692, "step": 1524 }, { "epoch": 0.27154558404558404, "grad_norm": 0.4631774425506592, "learning_rate": 0.00019778927091434602, "loss": 1.0876, "step": 1525 }, { "epoch": 0.27172364672364674, "grad_norm": 0.45957499742507935, "learning_rate": 0.00019778634298320684, "loss": 0.9527, "step": 1526 }, { "epoch": 0.2719017094017094, "grad_norm": 0.4506755769252777, "learning_rate": 0.00019778341313614743, "loss": 1.086, "step": 1527 }, { "epoch": 0.2720797720797721, "grad_norm": 0.4900587797164917, "learning_rate": 0.00019778048137322513, "loss": 0.9911, "step": 1528 }, { "epoch": 0.27225783475783477, "grad_norm": 0.478127658367157, "learning_rate": 0.00019777754769449745, "loss": 1.2083, "step": 1529 }, { "epoch": 0.2724358974358974, "grad_norm": 0.47220897674560547, "learning_rate": 0.00019777461210002183, "loss": 1.0313, "step": 1530 }, { "epoch": 0.2726139601139601, "grad_norm": 0.4526277184486389, "learning_rate": 0.0001977716745898558, "loss": 1.2648, "step": 1531 }, { "epoch": 0.2727920227920228, "grad_norm": 0.42907601594924927, "learning_rate": 0.00019776873516405688, "loss": 0.8645, "step": 1532 }, { "epoch": 0.27297008547008544, "grad_norm": 0.43440163135528564, "learning_rate": 0.00019776579382268272, "loss": 0.9702, "step": 1533 }, { "epoch": 0.27314814814814814, "grad_norm": 0.48213550448417664, "learning_rate": 0.0001977628505657909, "loss": 0.998, "step": 1534 }, { "epoch": 0.27332621082621084, "grad_norm": 0.43385565280914307, "learning_rate": 0.00019775990539343914, "loss": 1.0575, "step": 1535 }, { "epoch": 0.27350427350427353, "grad_norm": 0.45706847310066223, "learning_rate": 0.00019775695830568507, "loss": 1.3024, "step": 1536 }, { "epoch": 0.27368233618233617, "grad_norm": 0.45769137144088745, "learning_rate": 0.00019775400930258652, "loss": 1.0987, "step": 1537 }, { "epoch": 0.27386039886039887, "grad_norm": 0.44682395458221436, "learning_rate": 0.00019775105838420117, "loss": 1.1327, "step": 1538 }, { "epoch": 0.27403846153846156, "grad_norm": 0.5923072099685669, "learning_rate": 0.00019774810555058694, "loss": 1.4766, "step": 1539 }, { "epoch": 0.2742165242165242, "grad_norm": 0.4327206015586853, "learning_rate": 0.0001977451508018016, "loss": 1.1175, "step": 1540 }, { "epoch": 0.2743945868945869, "grad_norm": 0.48036691546440125, "learning_rate": 0.00019774219413790315, "loss": 1.1189, "step": 1541 }, { "epoch": 0.2745726495726496, "grad_norm": 0.41371914744377136, "learning_rate": 0.00019773923555894935, "loss": 1.1366, "step": 1542 }, { "epoch": 0.27475071225071224, "grad_norm": 0.4452378749847412, "learning_rate": 0.00019773627506499832, "loss": 0.9517, "step": 1543 }, { "epoch": 0.27492877492877493, "grad_norm": 0.469098299741745, "learning_rate": 0.00019773331265610802, "loss": 1.0848, "step": 1544 }, { "epoch": 0.27510683760683763, "grad_norm": 0.5390294790267944, "learning_rate": 0.00019773034833233646, "loss": 0.8589, "step": 1545 }, { "epoch": 0.27528490028490027, "grad_norm": 0.5368238091468811, "learning_rate": 0.00019772738209374174, "loss": 1.2954, "step": 1546 }, { "epoch": 0.27546296296296297, "grad_norm": 0.4705318510532379, "learning_rate": 0.00019772441394038198, "loss": 1.2252, "step": 1547 }, { "epoch": 0.27564102564102566, "grad_norm": 0.4682813286781311, "learning_rate": 0.00019772144387231533, "loss": 1.0855, "step": 1548 }, { "epoch": 0.2758190883190883, "grad_norm": 0.46876460313796997, "learning_rate": 0.0001977184718896, "loss": 1.1959, "step": 1549 }, { "epoch": 0.275997150997151, "grad_norm": 0.4172806441783905, "learning_rate": 0.00019771549799229416, "loss": 1.2166, "step": 1550 }, { "epoch": 0.2761752136752137, "grad_norm": 0.5088075399398804, "learning_rate": 0.0001977125221804562, "loss": 1.1285, "step": 1551 }, { "epoch": 0.27635327635327633, "grad_norm": 0.4728628396987915, "learning_rate": 0.0001977095444541443, "loss": 1.2985, "step": 1552 }, { "epoch": 0.27653133903133903, "grad_norm": 0.4431236684322357, "learning_rate": 0.00019770656481341684, "loss": 1.1298, "step": 1553 }, { "epoch": 0.2767094017094017, "grad_norm": 0.474065363407135, "learning_rate": 0.00019770358325833223, "loss": 1.1915, "step": 1554 }, { "epoch": 0.27688746438746437, "grad_norm": 0.45718875527381897, "learning_rate": 0.00019770059978894885, "loss": 1.0626, "step": 1555 }, { "epoch": 0.27706552706552706, "grad_norm": 0.49300211668014526, "learning_rate": 0.00019769761440532522, "loss": 1.0134, "step": 1556 }, { "epoch": 0.27724358974358976, "grad_norm": 0.4389498829841614, "learning_rate": 0.00019769462710751974, "loss": 1.0292, "step": 1557 }, { "epoch": 0.2774216524216524, "grad_norm": 0.47330448031425476, "learning_rate": 0.000197691637895591, "loss": 1.1273, "step": 1558 }, { "epoch": 0.2775997150997151, "grad_norm": 0.5322058200836182, "learning_rate": 0.00019768864676959755, "loss": 1.059, "step": 1559 }, { "epoch": 0.2777777777777778, "grad_norm": 0.4714536964893341, "learning_rate": 0.000197685653729598, "loss": 1.1987, "step": 1560 }, { "epoch": 0.27795584045584043, "grad_norm": 0.48687809705734253, "learning_rate": 0.00019768265877565097, "loss": 1.3206, "step": 1561 }, { "epoch": 0.2781339031339031, "grad_norm": 0.46066713333129883, "learning_rate": 0.00019767966190781518, "loss": 1.0845, "step": 1562 }, { "epoch": 0.2783119658119658, "grad_norm": 0.44372090697288513, "learning_rate": 0.00019767666312614935, "loss": 1.0942, "step": 1563 }, { "epoch": 0.27849002849002846, "grad_norm": 0.4615907073020935, "learning_rate": 0.00019767366243071216, "loss": 1.071, "step": 1564 }, { "epoch": 0.27866809116809116, "grad_norm": 0.502097487449646, "learning_rate": 0.0001976706598215625, "loss": 1.1164, "step": 1565 }, { "epoch": 0.27884615384615385, "grad_norm": 0.4371815621852875, "learning_rate": 0.00019766765529875913, "loss": 1.0252, "step": 1566 }, { "epoch": 0.27902421652421655, "grad_norm": 0.43035808205604553, "learning_rate": 0.00019766464886236093, "loss": 1.073, "step": 1567 }, { "epoch": 0.2792022792022792, "grad_norm": 0.49721601605415344, "learning_rate": 0.00019766164051242683, "loss": 1.0316, "step": 1568 }, { "epoch": 0.2793803418803419, "grad_norm": 0.44866231083869934, "learning_rate": 0.00019765863024901576, "loss": 1.0951, "step": 1569 }, { "epoch": 0.2795584045584046, "grad_norm": 0.46318337321281433, "learning_rate": 0.0001976556180721867, "loss": 0.9836, "step": 1570 }, { "epoch": 0.2797364672364672, "grad_norm": 0.4227696657180786, "learning_rate": 0.00019765260398199868, "loss": 1.0414, "step": 1571 }, { "epoch": 0.2799145299145299, "grad_norm": 0.6062980890274048, "learning_rate": 0.00019764958797851073, "loss": 1.137, "step": 1572 }, { "epoch": 0.2800925925925926, "grad_norm": 0.4856833219528198, "learning_rate": 0.00019764657006178196, "loss": 1.1361, "step": 1573 }, { "epoch": 0.28027065527065526, "grad_norm": 0.45612895488739014, "learning_rate": 0.00019764355023187146, "loss": 1.0005, "step": 1574 }, { "epoch": 0.28044871794871795, "grad_norm": 0.4143696129322052, "learning_rate": 0.00019764052848883845, "loss": 1.051, "step": 1575 }, { "epoch": 0.28062678062678065, "grad_norm": 0.4532071352005005, "learning_rate": 0.00019763750483274212, "loss": 1.0595, "step": 1576 }, { "epoch": 0.2808048433048433, "grad_norm": 0.4940357208251953, "learning_rate": 0.0001976344792636417, "loss": 1.0983, "step": 1577 }, { "epoch": 0.280982905982906, "grad_norm": 0.44405099749565125, "learning_rate": 0.0001976314517815965, "loss": 1.0846, "step": 1578 }, { "epoch": 0.2811609686609687, "grad_norm": 0.5508625507354736, "learning_rate": 0.00019762842238666578, "loss": 1.1722, "step": 1579 }, { "epoch": 0.2813390313390313, "grad_norm": 0.5241084694862366, "learning_rate": 0.00019762539107890894, "loss": 1.351, "step": 1580 }, { "epoch": 0.281517094017094, "grad_norm": 0.5307353734970093, "learning_rate": 0.00019762235785838537, "loss": 1.1868, "step": 1581 }, { "epoch": 0.2816951566951567, "grad_norm": 0.45697924494743347, "learning_rate": 0.00019761932272515447, "loss": 1.1982, "step": 1582 }, { "epoch": 0.28187321937321935, "grad_norm": 0.412483811378479, "learning_rate": 0.00019761628567927574, "loss": 1.0433, "step": 1583 }, { "epoch": 0.28205128205128205, "grad_norm": 0.4614165425300598, "learning_rate": 0.00019761324672080868, "loss": 1.104, "step": 1584 }, { "epoch": 0.28222934472934474, "grad_norm": 0.47644901275634766, "learning_rate": 0.00019761020584981284, "loss": 1.1037, "step": 1585 }, { "epoch": 0.2824074074074074, "grad_norm": 0.4985184669494629, "learning_rate": 0.00019760716306634773, "loss": 1.2213, "step": 1586 }, { "epoch": 0.2825854700854701, "grad_norm": 0.508301317691803, "learning_rate": 0.00019760411837047305, "loss": 1.1315, "step": 1587 }, { "epoch": 0.2827635327635328, "grad_norm": 0.5346587300300598, "learning_rate": 0.00019760107176224845, "loss": 1.2281, "step": 1588 }, { "epoch": 0.2829415954415954, "grad_norm": 0.5106825232505798, "learning_rate": 0.00019759802324173357, "loss": 1.2904, "step": 1589 }, { "epoch": 0.2831196581196581, "grad_norm": 0.46458688378334045, "learning_rate": 0.00019759497280898817, "loss": 1.0861, "step": 1590 }, { "epoch": 0.2832977207977208, "grad_norm": 0.49115365743637085, "learning_rate": 0.00019759192046407201, "loss": 1.0529, "step": 1591 }, { "epoch": 0.28347578347578345, "grad_norm": 0.5114167332649231, "learning_rate": 0.0001975888662070449, "loss": 1.2555, "step": 1592 }, { "epoch": 0.28365384615384615, "grad_norm": 0.45844775438308716, "learning_rate": 0.0001975858100379667, "loss": 1.0662, "step": 1593 }, { "epoch": 0.28383190883190884, "grad_norm": 0.4684161841869354, "learning_rate": 0.00019758275195689727, "loss": 1.0537, "step": 1594 }, { "epoch": 0.28400997150997154, "grad_norm": 0.4816220998764038, "learning_rate": 0.0001975796919638965, "loss": 1.126, "step": 1595 }, { "epoch": 0.2841880341880342, "grad_norm": 0.46578118205070496, "learning_rate": 0.0001975766300590244, "loss": 0.9651, "step": 1596 }, { "epoch": 0.2843660968660969, "grad_norm": 0.4181675612926483, "learning_rate": 0.0001975735662423409, "loss": 1.0888, "step": 1597 }, { "epoch": 0.28454415954415957, "grad_norm": 0.49417954683303833, "learning_rate": 0.00019757050051390609, "loss": 1.1878, "step": 1598 }, { "epoch": 0.2847222222222222, "grad_norm": 0.47264960408210754, "learning_rate": 0.00019756743287377998, "loss": 1.027, "step": 1599 }, { "epoch": 0.2849002849002849, "grad_norm": 0.47686338424682617, "learning_rate": 0.0001975643633220227, "loss": 1.1307, "step": 1600 }, { "epoch": 0.2850783475783476, "grad_norm": 0.5571266412734985, "learning_rate": 0.00019756129185869443, "loss": 0.984, "step": 1601 }, { "epoch": 0.28525641025641024, "grad_norm": 0.46942809224128723, "learning_rate": 0.00019755821848385527, "loss": 1.0397, "step": 1602 }, { "epoch": 0.28543447293447294, "grad_norm": 0.6325890421867371, "learning_rate": 0.00019755514319756551, "loss": 1.0918, "step": 1603 }, { "epoch": 0.28561253561253563, "grad_norm": 0.5297608375549316, "learning_rate": 0.00019755206599988533, "loss": 0.9911, "step": 1604 }, { "epoch": 0.2857905982905983, "grad_norm": 0.4736945331096649, "learning_rate": 0.00019754898689087512, "loss": 1.0786, "step": 1605 }, { "epoch": 0.28596866096866097, "grad_norm": 0.5048685669898987, "learning_rate": 0.00019754590587059512, "loss": 0.9834, "step": 1606 }, { "epoch": 0.28614672364672367, "grad_norm": 0.3823149502277374, "learning_rate": 0.00019754282293910574, "loss": 0.8341, "step": 1607 }, { "epoch": 0.2863247863247863, "grad_norm": 0.44071945548057556, "learning_rate": 0.00019753973809646738, "loss": 1.131, "step": 1608 }, { "epoch": 0.286502849002849, "grad_norm": 0.44182759523391724, "learning_rate": 0.00019753665134274043, "loss": 1.0321, "step": 1609 }, { "epoch": 0.2866809116809117, "grad_norm": 0.4486250877380371, "learning_rate": 0.00019753356267798546, "loss": 0.9941, "step": 1610 }, { "epoch": 0.28685897435897434, "grad_norm": 0.42796584963798523, "learning_rate": 0.00019753047210226292, "loss": 1.0235, "step": 1611 }, { "epoch": 0.28703703703703703, "grad_norm": 0.47294023633003235, "learning_rate": 0.00019752737961563336, "loss": 1.11, "step": 1612 }, { "epoch": 0.28721509971509973, "grad_norm": 0.44550734758377075, "learning_rate": 0.00019752428521815742, "loss": 1.0849, "step": 1613 }, { "epoch": 0.28739316239316237, "grad_norm": 0.44189929962158203, "learning_rate": 0.0001975211889098957, "loss": 0.8904, "step": 1614 }, { "epoch": 0.28757122507122507, "grad_norm": 0.5302733182907104, "learning_rate": 0.00019751809069090885, "loss": 1.2348, "step": 1615 }, { "epoch": 0.28774928774928776, "grad_norm": 0.5951390862464905, "learning_rate": 0.00019751499056125762, "loss": 1.3035, "step": 1616 }, { "epoch": 0.2879273504273504, "grad_norm": 0.5431534647941589, "learning_rate": 0.0001975118885210027, "loss": 1.0016, "step": 1617 }, { "epoch": 0.2881054131054131, "grad_norm": 0.47301986813545227, "learning_rate": 0.00019750878457020489, "loss": 1.2245, "step": 1618 }, { "epoch": 0.2882834757834758, "grad_norm": 0.44785359501838684, "learning_rate": 0.00019750567870892497, "loss": 1.122, "step": 1619 }, { "epoch": 0.28846153846153844, "grad_norm": 0.49494361877441406, "learning_rate": 0.00019750257093722383, "loss": 0.9421, "step": 1620 }, { "epoch": 0.28863960113960113, "grad_norm": 0.4484521150588989, "learning_rate": 0.00019749946125516242, "loss": 1.2146, "step": 1621 }, { "epoch": 0.28881766381766383, "grad_norm": 0.4635269343852997, "learning_rate": 0.00019749634966280156, "loss": 0.976, "step": 1622 }, { "epoch": 0.28899572649572647, "grad_norm": 0.5532249808311462, "learning_rate": 0.00019749323616020226, "loss": 1.1818, "step": 1623 }, { "epoch": 0.28917378917378916, "grad_norm": 0.4730629622936249, "learning_rate": 0.00019749012074742552, "loss": 1.0321, "step": 1624 }, { "epoch": 0.28935185185185186, "grad_norm": 0.47437289357185364, "learning_rate": 0.0001974870034245324, "loss": 1.1572, "step": 1625 }, { "epoch": 0.28952991452991456, "grad_norm": 0.4796304404735565, "learning_rate": 0.00019748388419158394, "loss": 1.1667, "step": 1626 }, { "epoch": 0.2897079772079772, "grad_norm": 0.42686304450035095, "learning_rate": 0.0001974807630486413, "loss": 0.9824, "step": 1627 }, { "epoch": 0.2898860398860399, "grad_norm": 0.4444865584373474, "learning_rate": 0.00019747763999576558, "loss": 1.2789, "step": 1628 }, { "epoch": 0.2900641025641026, "grad_norm": 0.5039985179901123, "learning_rate": 0.000197474515033018, "loss": 1.1488, "step": 1629 }, { "epoch": 0.29024216524216523, "grad_norm": 0.581479549407959, "learning_rate": 0.00019747138816045978, "loss": 1.1232, "step": 1630 }, { "epoch": 0.2904202279202279, "grad_norm": 0.5415821075439453, "learning_rate": 0.00019746825937815222, "loss": 1.2326, "step": 1631 }, { "epoch": 0.2905982905982906, "grad_norm": 0.45528364181518555, "learning_rate": 0.00019746512868615656, "loss": 1.0246, "step": 1632 }, { "epoch": 0.29077635327635326, "grad_norm": 0.5255574584007263, "learning_rate": 0.00019746199608453418, "loss": 1.0592, "step": 1633 }, { "epoch": 0.29095441595441596, "grad_norm": 0.5064096450805664, "learning_rate": 0.00019745886157334646, "loss": 1.3439, "step": 1634 }, { "epoch": 0.29113247863247865, "grad_norm": 0.500848650932312, "learning_rate": 0.00019745572515265475, "loss": 1.1212, "step": 1635 }, { "epoch": 0.2913105413105413, "grad_norm": 0.5229088068008423, "learning_rate": 0.00019745258682252062, "loss": 1.1019, "step": 1636 }, { "epoch": 0.291488603988604, "grad_norm": 0.4494398832321167, "learning_rate": 0.00019744944658300545, "loss": 1.1298, "step": 1637 }, { "epoch": 0.2916666666666667, "grad_norm": 0.48383277654647827, "learning_rate": 0.00019744630443417082, "loss": 1.206, "step": 1638 }, { "epoch": 0.2918447293447293, "grad_norm": 0.4870131313800812, "learning_rate": 0.00019744316037607828, "loss": 1.2096, "step": 1639 }, { "epoch": 0.292022792022792, "grad_norm": 0.4153090715408325, "learning_rate": 0.00019744001440878944, "loss": 1.0478, "step": 1640 }, { "epoch": 0.2922008547008547, "grad_norm": 0.4262249171733856, "learning_rate": 0.0001974368665323659, "loss": 1.0393, "step": 1641 }, { "epoch": 0.29237891737891736, "grad_norm": 0.46131134033203125, "learning_rate": 0.00019743371674686938, "loss": 1.0908, "step": 1642 }, { "epoch": 0.29255698005698005, "grad_norm": 0.44877463579177856, "learning_rate": 0.0001974305650523616, "loss": 1.1906, "step": 1643 }, { "epoch": 0.29273504273504275, "grad_norm": 0.5199326276779175, "learning_rate": 0.00019742741144890432, "loss": 1.1147, "step": 1644 }, { "epoch": 0.2929131054131054, "grad_norm": 0.48142504692077637, "learning_rate": 0.00019742425593655924, "loss": 1.1951, "step": 1645 }, { "epoch": 0.2930911680911681, "grad_norm": 0.5672988891601562, "learning_rate": 0.0001974210985153883, "loss": 1.1817, "step": 1646 }, { "epoch": 0.2932692307692308, "grad_norm": 0.38135233521461487, "learning_rate": 0.00019741793918545326, "loss": 0.8567, "step": 1647 }, { "epoch": 0.2934472934472934, "grad_norm": 0.6153588891029358, "learning_rate": 0.0001974147779468161, "loss": 1.0593, "step": 1648 }, { "epoch": 0.2936253561253561, "grad_norm": 0.38935527205467224, "learning_rate": 0.0001974116147995387, "loss": 0.9907, "step": 1649 }, { "epoch": 0.2938034188034188, "grad_norm": 0.467351496219635, "learning_rate": 0.0001974084497436831, "loss": 1.091, "step": 1650 }, { "epoch": 0.29398148148148145, "grad_norm": 0.45613420009613037, "learning_rate": 0.00019740528277931128, "loss": 0.6789, "step": 1651 }, { "epoch": 0.29415954415954415, "grad_norm": 0.4045158326625824, "learning_rate": 0.00019740211390648524, "loss": 1.0727, "step": 1652 }, { "epoch": 0.29433760683760685, "grad_norm": 0.5122803449630737, "learning_rate": 0.00019739894312526714, "loss": 1.2297, "step": 1653 }, { "epoch": 0.29451566951566954, "grad_norm": 0.44304123520851135, "learning_rate": 0.00019739577043571908, "loss": 0.9562, "step": 1654 }, { "epoch": 0.2946937321937322, "grad_norm": 0.6070618629455566, "learning_rate": 0.00019739259583790322, "loss": 1.2745, "step": 1655 }, { "epoch": 0.2948717948717949, "grad_norm": 0.48815637826919556, "learning_rate": 0.00019738941933188176, "loss": 1.0574, "step": 1656 }, { "epoch": 0.2950498575498576, "grad_norm": 0.5067802667617798, "learning_rate": 0.00019738624091771693, "loss": 1.1874, "step": 1657 }, { "epoch": 0.2952279202279202, "grad_norm": 0.4956928491592407, "learning_rate": 0.000197383060595471, "loss": 1.1085, "step": 1658 }, { "epoch": 0.2954059829059829, "grad_norm": 0.46313008666038513, "learning_rate": 0.00019737987836520633, "loss": 1.0548, "step": 1659 }, { "epoch": 0.2955840455840456, "grad_norm": 0.49944064021110535, "learning_rate": 0.0001973766942269852, "loss": 1.1485, "step": 1660 }, { "epoch": 0.29576210826210825, "grad_norm": 0.4743517339229584, "learning_rate": 0.00019737350818087003, "loss": 0.9279, "step": 1661 }, { "epoch": 0.29594017094017094, "grad_norm": 0.45935431122779846, "learning_rate": 0.00019737032022692326, "loss": 0.9574, "step": 1662 }, { "epoch": 0.29611823361823364, "grad_norm": 0.4550873637199402, "learning_rate": 0.00019736713036520734, "loss": 1.1642, "step": 1663 }, { "epoch": 0.2962962962962963, "grad_norm": 0.45252951979637146, "learning_rate": 0.00019736393859578474, "loss": 1.0113, "step": 1664 }, { "epoch": 0.296474358974359, "grad_norm": 0.5147238969802856, "learning_rate": 0.00019736074491871804, "loss": 1.1604, "step": 1665 }, { "epoch": 0.29665242165242167, "grad_norm": 0.5122934579849243, "learning_rate": 0.00019735754933406977, "loss": 0.9525, "step": 1666 }, { "epoch": 0.2968304843304843, "grad_norm": 0.438620001077652, "learning_rate": 0.00019735435184190257, "loss": 1.0728, "step": 1667 }, { "epoch": 0.297008547008547, "grad_norm": 0.41970670223236084, "learning_rate": 0.00019735115244227908, "loss": 0.9782, "step": 1668 }, { "epoch": 0.2971866096866097, "grad_norm": 0.5447152256965637, "learning_rate": 0.000197347951135262, "loss": 1.0633, "step": 1669 }, { "epoch": 0.29736467236467234, "grad_norm": 0.4846996068954468, "learning_rate": 0.00019734474792091407, "loss": 0.9019, "step": 1670 }, { "epoch": 0.29754273504273504, "grad_norm": 0.4721437990665436, "learning_rate": 0.00019734154279929796, "loss": 1.1793, "step": 1671 }, { "epoch": 0.29772079772079774, "grad_norm": 0.4659852385520935, "learning_rate": 0.00019733833577047655, "loss": 1.1503, "step": 1672 }, { "epoch": 0.2978988603988604, "grad_norm": 0.3733183443546295, "learning_rate": 0.00019733512683451268, "loss": 0.7763, "step": 1673 }, { "epoch": 0.2980769230769231, "grad_norm": 0.4898292124271393, "learning_rate": 0.0001973319159914692, "loss": 1.3146, "step": 1674 }, { "epoch": 0.29825498575498577, "grad_norm": 0.41774725914001465, "learning_rate": 0.00019732870324140899, "loss": 1.2069, "step": 1675 }, { "epoch": 0.2984330484330484, "grad_norm": 0.4607912003993988, "learning_rate": 0.000197325488584395, "loss": 1.2255, "step": 1676 }, { "epoch": 0.2986111111111111, "grad_norm": 0.4692424237728119, "learning_rate": 0.00019732227202049025, "loss": 1.0793, "step": 1677 }, { "epoch": 0.2987891737891738, "grad_norm": 0.5925022959709167, "learning_rate": 0.00019731905354975778, "loss": 1.0297, "step": 1678 }, { "epoch": 0.29896723646723644, "grad_norm": 0.44047990441322327, "learning_rate": 0.00019731583317226056, "loss": 1.0982, "step": 1679 }, { "epoch": 0.29914529914529914, "grad_norm": 0.5863066911697388, "learning_rate": 0.0001973126108880618, "loss": 1.0284, "step": 1680 }, { "epoch": 0.29932336182336183, "grad_norm": 0.48962152004241943, "learning_rate": 0.00019730938669722457, "loss": 1.1861, "step": 1681 }, { "epoch": 0.29950142450142453, "grad_norm": 0.5445577502250671, "learning_rate": 0.00019730616059981205, "loss": 1.2574, "step": 1682 }, { "epoch": 0.29967948717948717, "grad_norm": 0.49327564239501953, "learning_rate": 0.00019730293259588743, "loss": 0.9578, "step": 1683 }, { "epoch": 0.29985754985754987, "grad_norm": 0.4252840578556061, "learning_rate": 0.00019729970268551398, "loss": 1.0083, "step": 1684 }, { "epoch": 0.30003561253561256, "grad_norm": 0.5140926241874695, "learning_rate": 0.000197296470868755, "loss": 1.3263, "step": 1685 }, { "epoch": 0.3002136752136752, "grad_norm": 0.5143948197364807, "learning_rate": 0.00019729323714567375, "loss": 1.0424, "step": 1686 }, { "epoch": 0.3003917378917379, "grad_norm": 0.3811354339122772, "learning_rate": 0.00019729000151633367, "loss": 0.6319, "step": 1687 }, { "epoch": 0.3005698005698006, "grad_norm": 0.5249716639518738, "learning_rate": 0.0001972867639807981, "loss": 1.0173, "step": 1688 }, { "epoch": 0.30074786324786323, "grad_norm": 0.41832098364830017, "learning_rate": 0.00019728352453913048, "loss": 1.0503, "step": 1689 }, { "epoch": 0.30092592592592593, "grad_norm": 0.5961149334907532, "learning_rate": 0.00019728028319139428, "loss": 1.1843, "step": 1690 }, { "epoch": 0.3011039886039886, "grad_norm": 0.44083690643310547, "learning_rate": 0.00019727703993765303, "loss": 1.1311, "step": 1691 }, { "epoch": 0.30128205128205127, "grad_norm": 0.4368111491203308, "learning_rate": 0.00019727379477797022, "loss": 0.9463, "step": 1692 }, { "epoch": 0.30146011396011396, "grad_norm": 0.5289376974105835, "learning_rate": 0.00019727054771240954, "loss": 0.9836, "step": 1693 }, { "epoch": 0.30163817663817666, "grad_norm": 0.4132843613624573, "learning_rate": 0.00019726729874103448, "loss": 1.1052, "step": 1694 }, { "epoch": 0.3018162393162393, "grad_norm": 0.4919086992740631, "learning_rate": 0.00019726404786390877, "loss": 1.2219, "step": 1695 }, { "epoch": 0.301994301994302, "grad_norm": 0.42561691999435425, "learning_rate": 0.0001972607950810961, "loss": 1.0756, "step": 1696 }, { "epoch": 0.3021723646723647, "grad_norm": 0.5030396580696106, "learning_rate": 0.0001972575403926602, "loss": 1.2207, "step": 1697 }, { "epoch": 0.30235042735042733, "grad_norm": 0.4779801666736603, "learning_rate": 0.0001972542837986648, "loss": 1.194, "step": 1698 }, { "epoch": 0.30252849002849, "grad_norm": 0.45395568013191223, "learning_rate": 0.00019725102529917377, "loss": 1.0775, "step": 1699 }, { "epoch": 0.3027065527065527, "grad_norm": 0.6540699005126953, "learning_rate": 0.0001972477648942509, "loss": 1.181, "step": 1700 }, { "epoch": 0.30288461538461536, "grad_norm": 0.46281275153160095, "learning_rate": 0.00019724450258396008, "loss": 0.629, "step": 1701 }, { "epoch": 0.30306267806267806, "grad_norm": 0.3452845811843872, "learning_rate": 0.00019724123836836527, "loss": 0.51, "step": 1702 }, { "epoch": 0.30324074074074076, "grad_norm": 0.4507991671562195, "learning_rate": 0.00019723797224753038, "loss": 1.0258, "step": 1703 }, { "epoch": 0.3034188034188034, "grad_norm": 0.5385412573814392, "learning_rate": 0.0001972347042215194, "loss": 1.0232, "step": 1704 }, { "epoch": 0.3035968660968661, "grad_norm": 0.4460466504096985, "learning_rate": 0.00019723143429039642, "loss": 1.1307, "step": 1705 }, { "epoch": 0.3037749287749288, "grad_norm": 0.5229718685150146, "learning_rate": 0.00019722816245422545, "loss": 1.0964, "step": 1706 }, { "epoch": 0.30395299145299143, "grad_norm": 0.4776979088783264, "learning_rate": 0.00019722488871307058, "loss": 1.2678, "step": 1707 }, { "epoch": 0.3041310541310541, "grad_norm": 0.5371831655502319, "learning_rate": 0.00019722161306699601, "loss": 1.2808, "step": 1708 }, { "epoch": 0.3043091168091168, "grad_norm": 0.45322108268737793, "learning_rate": 0.0001972183355160659, "loss": 1.0775, "step": 1709 }, { "epoch": 0.30448717948717946, "grad_norm": 0.5036569833755493, "learning_rate": 0.00019721505606034448, "loss": 1.1859, "step": 1710 }, { "epoch": 0.30466524216524216, "grad_norm": 0.5425969958305359, "learning_rate": 0.00019721177469989593, "loss": 1.0173, "step": 1711 }, { "epoch": 0.30484330484330485, "grad_norm": 0.5638980269432068, "learning_rate": 0.00019720849143478462, "loss": 1.182, "step": 1712 }, { "epoch": 0.30502136752136755, "grad_norm": 0.5160546898841858, "learning_rate": 0.00019720520626507486, "loss": 0.9853, "step": 1713 }, { "epoch": 0.3051994301994302, "grad_norm": 0.5079004168510437, "learning_rate": 0.000197201919190831, "loss": 1.3154, "step": 1714 }, { "epoch": 0.3053774928774929, "grad_norm": 0.4590355455875397, "learning_rate": 0.00019719863021211745, "loss": 1.007, "step": 1715 }, { "epoch": 0.3055555555555556, "grad_norm": 0.49656423926353455, "learning_rate": 0.00019719533932899865, "loss": 1.2187, "step": 1716 }, { "epoch": 0.3057336182336182, "grad_norm": 0.46426209807395935, "learning_rate": 0.0001971920465415391, "loss": 1.3007, "step": 1717 }, { "epoch": 0.3059116809116809, "grad_norm": 0.5211917757987976, "learning_rate": 0.00019718875184980328, "loss": 1.2256, "step": 1718 }, { "epoch": 0.3060897435897436, "grad_norm": 0.42953309416770935, "learning_rate": 0.00019718545525385578, "loss": 1.2838, "step": 1719 }, { "epoch": 0.30626780626780625, "grad_norm": 0.4893105924129486, "learning_rate": 0.00019718215675376116, "loss": 1.052, "step": 1720 }, { "epoch": 0.30644586894586895, "grad_norm": 0.4833602011203766, "learning_rate": 0.00019717885634958405, "loss": 1.069, "step": 1721 }, { "epoch": 0.30662393162393164, "grad_norm": 0.502176821231842, "learning_rate": 0.0001971755540413891, "loss": 1.1659, "step": 1722 }, { "epoch": 0.3068019943019943, "grad_norm": 0.4648856818675995, "learning_rate": 0.00019717224982924108, "loss": 1.1873, "step": 1723 }, { "epoch": 0.306980056980057, "grad_norm": 0.405429869890213, "learning_rate": 0.00019716894371320465, "loss": 0.99, "step": 1724 }, { "epoch": 0.3071581196581197, "grad_norm": 0.4306945204734802, "learning_rate": 0.00019716563569334463, "loss": 0.8751, "step": 1725 }, { "epoch": 0.3073361823361823, "grad_norm": 0.49424824118614197, "learning_rate": 0.00019716232576972583, "loss": 0.9205, "step": 1726 }, { "epoch": 0.307514245014245, "grad_norm": 0.5044034123420715, "learning_rate": 0.00019715901394241306, "loss": 1.2042, "step": 1727 }, { "epoch": 0.3076923076923077, "grad_norm": 0.512180507183075, "learning_rate": 0.00019715570021147126, "loss": 1.1644, "step": 1728 }, { "epoch": 0.30787037037037035, "grad_norm": 0.4377981126308441, "learning_rate": 0.00019715238457696538, "loss": 1.1625, "step": 1729 }, { "epoch": 0.30804843304843305, "grad_norm": 0.49107855558395386, "learning_rate": 0.00019714906703896027, "loss": 1.1037, "step": 1730 }, { "epoch": 0.30822649572649574, "grad_norm": 0.47342559695243835, "learning_rate": 0.00019714574759752105, "loss": 1.3186, "step": 1731 }, { "epoch": 0.3084045584045584, "grad_norm": 0.487177312374115, "learning_rate": 0.0001971424262527127, "loss": 1.1196, "step": 1732 }, { "epoch": 0.3085826210826211, "grad_norm": 0.5290025472640991, "learning_rate": 0.0001971391030046003, "loss": 1.2103, "step": 1733 }, { "epoch": 0.3087606837606838, "grad_norm": 0.4587760269641876, "learning_rate": 0.00019713577785324896, "loss": 1.1017, "step": 1734 }, { "epoch": 0.3089387464387464, "grad_norm": 0.45323294401168823, "learning_rate": 0.00019713245079872388, "loss": 1.0, "step": 1735 }, { "epoch": 0.3091168091168091, "grad_norm": 0.43414804339408875, "learning_rate": 0.00019712912184109013, "loss": 1.0341, "step": 1736 }, { "epoch": 0.3092948717948718, "grad_norm": 0.49604663252830505, "learning_rate": 0.00019712579098041304, "loss": 0.9437, "step": 1737 }, { "epoch": 0.30947293447293445, "grad_norm": 0.48580703139305115, "learning_rate": 0.00019712245821675785, "loss": 1.2622, "step": 1738 }, { "epoch": 0.30965099715099714, "grad_norm": 0.45333603024482727, "learning_rate": 0.00019711912355018982, "loss": 1.2063, "step": 1739 }, { "epoch": 0.30982905982905984, "grad_norm": 0.5990764498710632, "learning_rate": 0.00019711578698077432, "loss": 1.5097, "step": 1740 }, { "epoch": 0.31000712250712253, "grad_norm": 0.4386102259159088, "learning_rate": 0.0001971124485085767, "loss": 1.1283, "step": 1741 }, { "epoch": 0.3101851851851852, "grad_norm": 0.4476035237312317, "learning_rate": 0.00019710910813366242, "loss": 0.8922, "step": 1742 }, { "epoch": 0.31036324786324787, "grad_norm": 0.5276228785514832, "learning_rate": 0.00019710576585609685, "loss": 1.2373, "step": 1743 }, { "epoch": 0.31054131054131057, "grad_norm": 0.4885637164115906, "learning_rate": 0.00019710242167594557, "loss": 1.0881, "step": 1744 }, { "epoch": 0.3107193732193732, "grad_norm": 0.421132355928421, "learning_rate": 0.000197099075593274, "loss": 1.0544, "step": 1745 }, { "epoch": 0.3108974358974359, "grad_norm": 0.5257927179336548, "learning_rate": 0.00019709572760814777, "loss": 1.265, "step": 1746 }, { "epoch": 0.3110754985754986, "grad_norm": 0.5164850950241089, "learning_rate": 0.00019709237772063247, "loss": 0.9593, "step": 1747 }, { "epoch": 0.31125356125356124, "grad_norm": 0.5176383256912231, "learning_rate": 0.00019708902593079374, "loss": 1.0194, "step": 1748 }, { "epoch": 0.31143162393162394, "grad_norm": 0.4620790481567383, "learning_rate": 0.00019708567223869716, "loss": 0.9241, "step": 1749 }, { "epoch": 0.31160968660968663, "grad_norm": 0.48307979106903076, "learning_rate": 0.00019708231664440854, "loss": 1.2314, "step": 1750 }, { "epoch": 0.31178774928774927, "grad_norm": 0.4931468069553375, "learning_rate": 0.00019707895914799364, "loss": 1.2065, "step": 1751 }, { "epoch": 0.31196581196581197, "grad_norm": 0.5035979747772217, "learning_rate": 0.00019707559974951818, "loss": 1.1867, "step": 1752 }, { "epoch": 0.31214387464387466, "grad_norm": 0.47543632984161377, "learning_rate": 0.00019707223844904795, "loss": 1.0603, "step": 1753 }, { "epoch": 0.3123219373219373, "grad_norm": 0.49929797649383545, "learning_rate": 0.00019706887524664892, "loss": 1.0597, "step": 1754 }, { "epoch": 0.3125, "grad_norm": 0.5075222253799438, "learning_rate": 0.00019706551014238687, "loss": 1.1398, "step": 1755 }, { "epoch": 0.3126780626780627, "grad_norm": 0.5096884369850159, "learning_rate": 0.00019706214313632784, "loss": 1.1382, "step": 1756 }, { "epoch": 0.31285612535612534, "grad_norm": 0.4629988372325897, "learning_rate": 0.0001970587742285377, "loss": 1.0009, "step": 1757 }, { "epoch": 0.31303418803418803, "grad_norm": 0.5244084596633911, "learning_rate": 0.00019705540341908253, "loss": 1.047, "step": 1758 }, { "epoch": 0.31321225071225073, "grad_norm": 0.5136716961860657, "learning_rate": 0.00019705203070802832, "loss": 1.29, "step": 1759 }, { "epoch": 0.31339031339031337, "grad_norm": 0.43991541862487793, "learning_rate": 0.0001970486560954412, "loss": 0.9605, "step": 1760 }, { "epoch": 0.31356837606837606, "grad_norm": 0.4633477032184601, "learning_rate": 0.00019704527958138725, "loss": 1.1507, "step": 1761 }, { "epoch": 0.31374643874643876, "grad_norm": 0.4419999420642853, "learning_rate": 0.00019704190116593266, "loss": 0.9262, "step": 1762 }, { "epoch": 0.3139245014245014, "grad_norm": 0.49359434843063354, "learning_rate": 0.00019703852084914357, "loss": 0.9348, "step": 1763 }, { "epoch": 0.3141025641025641, "grad_norm": 0.5072139501571655, "learning_rate": 0.00019703513863108627, "loss": 1.1592, "step": 1764 }, { "epoch": 0.3142806267806268, "grad_norm": 0.45969831943511963, "learning_rate": 0.00019703175451182698, "loss": 1.1519, "step": 1765 }, { "epoch": 0.31445868945868943, "grad_norm": 0.5148758292198181, "learning_rate": 0.00019702836849143208, "loss": 1.1673, "step": 1766 }, { "epoch": 0.31463675213675213, "grad_norm": 0.43033209443092346, "learning_rate": 0.0001970249805699678, "loss": 0.9256, "step": 1767 }, { "epoch": 0.3148148148148148, "grad_norm": 0.48143425583839417, "learning_rate": 0.00019702159074750058, "loss": 1.08, "step": 1768 }, { "epoch": 0.31499287749287747, "grad_norm": 0.4780619740486145, "learning_rate": 0.00019701819902409685, "loss": 1.1198, "step": 1769 }, { "epoch": 0.31517094017094016, "grad_norm": 0.4662075936794281, "learning_rate": 0.00019701480539982305, "loss": 0.8424, "step": 1770 }, { "epoch": 0.31534900284900286, "grad_norm": 0.503901481628418, "learning_rate": 0.00019701140987474566, "loss": 1.1026, "step": 1771 }, { "epoch": 0.31552706552706555, "grad_norm": 0.5197132229804993, "learning_rate": 0.00019700801244893124, "loss": 1.2148, "step": 1772 }, { "epoch": 0.3157051282051282, "grad_norm": 0.4746309220790863, "learning_rate": 0.00019700461312244634, "loss": 1.0906, "step": 1773 }, { "epoch": 0.3158831908831909, "grad_norm": 0.5277339816093445, "learning_rate": 0.00019700121189535752, "loss": 1.0588, "step": 1774 }, { "epoch": 0.3160612535612536, "grad_norm": 0.436002254486084, "learning_rate": 0.00019699780876773147, "loss": 1.0341, "step": 1775 }, { "epoch": 0.3162393162393162, "grad_norm": 0.5171145796775818, "learning_rate": 0.00019699440373963486, "loss": 1.282, "step": 1776 }, { "epoch": 0.3164173789173789, "grad_norm": 0.38382846117019653, "learning_rate": 0.00019699099681113436, "loss": 0.8908, "step": 1777 }, { "epoch": 0.3165954415954416, "grad_norm": 0.4621630609035492, "learning_rate": 0.0001969875879822968, "loss": 1.1074, "step": 1778 }, { "epoch": 0.31677350427350426, "grad_norm": 0.5543130040168762, "learning_rate": 0.00019698417725318892, "loss": 0.9682, "step": 1779 }, { "epoch": 0.31695156695156695, "grad_norm": 0.49534836411476135, "learning_rate": 0.00019698076462387753, "loss": 1.107, "step": 1780 }, { "epoch": 0.31712962962962965, "grad_norm": 0.48844948410987854, "learning_rate": 0.00019697735009442956, "loss": 1.1295, "step": 1781 }, { "epoch": 0.3173076923076923, "grad_norm": 0.5070686936378479, "learning_rate": 0.00019697393366491185, "loss": 1.083, "step": 1782 }, { "epoch": 0.317485754985755, "grad_norm": 0.47817620635032654, "learning_rate": 0.00019697051533539134, "loss": 1.3014, "step": 1783 }, { "epoch": 0.3176638176638177, "grad_norm": 0.538488507270813, "learning_rate": 0.00019696709510593502, "loss": 1.0354, "step": 1784 }, { "epoch": 0.3178418803418803, "grad_norm": 0.5141439437866211, "learning_rate": 0.0001969636729766099, "loss": 1.2912, "step": 1785 }, { "epoch": 0.318019943019943, "grad_norm": 0.5009665489196777, "learning_rate": 0.00019696024894748306, "loss": 0.9014, "step": 1786 }, { "epoch": 0.3181980056980057, "grad_norm": 0.46199744939804077, "learning_rate": 0.00019695682301862155, "loss": 1.0532, "step": 1787 }, { "epoch": 0.31837606837606836, "grad_norm": 0.4649423062801361, "learning_rate": 0.0001969533951900925, "loss": 0.8608, "step": 1788 }, { "epoch": 0.31855413105413105, "grad_norm": 0.516909658908844, "learning_rate": 0.0001969499654619631, "loss": 1.1385, "step": 1789 }, { "epoch": 0.31873219373219375, "grad_norm": 0.46016669273376465, "learning_rate": 0.00019694653383430048, "loss": 0.9168, "step": 1790 }, { "epoch": 0.3189102564102564, "grad_norm": 0.4794938564300537, "learning_rate": 0.00019694310030717193, "loss": 1.0244, "step": 1791 }, { "epoch": 0.3190883190883191, "grad_norm": 0.46577662229537964, "learning_rate": 0.00019693966488064471, "loss": 1.0954, "step": 1792 }, { "epoch": 0.3192663817663818, "grad_norm": 0.4866746962070465, "learning_rate": 0.00019693622755478614, "loss": 1.2925, "step": 1793 }, { "epoch": 0.3194444444444444, "grad_norm": 0.4841702878475189, "learning_rate": 0.00019693278832966357, "loss": 1.119, "step": 1794 }, { "epoch": 0.3196225071225071, "grad_norm": 0.4835243821144104, "learning_rate": 0.00019692934720534435, "loss": 1.1702, "step": 1795 }, { "epoch": 0.3198005698005698, "grad_norm": 0.5200608968734741, "learning_rate": 0.00019692590418189594, "loss": 1.1989, "step": 1796 }, { "epoch": 0.31997863247863245, "grad_norm": 0.5147821307182312, "learning_rate": 0.00019692245925938577, "loss": 1.1417, "step": 1797 }, { "epoch": 0.32015669515669515, "grad_norm": 0.5145614743232727, "learning_rate": 0.00019691901243788136, "loss": 1.0571, "step": 1798 }, { "epoch": 0.32033475783475784, "grad_norm": 0.5416026711463928, "learning_rate": 0.00019691556371745022, "loss": 1.188, "step": 1799 }, { "epoch": 0.32051282051282054, "grad_norm": 0.5140644311904907, "learning_rate": 0.00019691211309815995, "loss": 1.1795, "step": 1800 }, { "epoch": 0.3206908831908832, "grad_norm": 0.44219106435775757, "learning_rate": 0.00019690866058007817, "loss": 0.9215, "step": 1801 }, { "epoch": 0.3208689458689459, "grad_norm": 0.49523603916168213, "learning_rate": 0.00019690520616327245, "loss": 1.1117, "step": 1802 }, { "epoch": 0.32104700854700857, "grad_norm": 0.5818293690681458, "learning_rate": 0.0001969017498478105, "loss": 1.16, "step": 1803 }, { "epoch": 0.3212250712250712, "grad_norm": 0.5175749659538269, "learning_rate": 0.0001968982916337601, "loss": 1.1999, "step": 1804 }, { "epoch": 0.3214031339031339, "grad_norm": 0.49916017055511475, "learning_rate": 0.00019689483152118898, "loss": 0.9505, "step": 1805 }, { "epoch": 0.3215811965811966, "grad_norm": 0.46849536895751953, "learning_rate": 0.00019689136951016488, "loss": 0.9627, "step": 1806 }, { "epoch": 0.32175925925925924, "grad_norm": 0.4226818382740021, "learning_rate": 0.00019688790560075568, "loss": 1.037, "step": 1807 }, { "epoch": 0.32193732193732194, "grad_norm": 0.4697103798389435, "learning_rate": 0.00019688443979302923, "loss": 1.1431, "step": 1808 }, { "epoch": 0.32211538461538464, "grad_norm": 0.4999365508556366, "learning_rate": 0.00019688097208705343, "loss": 1.171, "step": 1809 }, { "epoch": 0.3222934472934473, "grad_norm": 0.5229731798171997, "learning_rate": 0.00019687750248289625, "loss": 1.3395, "step": 1810 }, { "epoch": 0.32247150997151, "grad_norm": 0.512525737285614, "learning_rate": 0.00019687403098062566, "loss": 1.1438, "step": 1811 }, { "epoch": 0.32264957264957267, "grad_norm": 0.4558548927307129, "learning_rate": 0.00019687055758030967, "loss": 1.0012, "step": 1812 }, { "epoch": 0.3228276353276353, "grad_norm": 0.45195743441581726, "learning_rate": 0.00019686708228201636, "loss": 1.0222, "step": 1813 }, { "epoch": 0.323005698005698, "grad_norm": 0.5023126602172852, "learning_rate": 0.00019686360508581373, "loss": 1.2128, "step": 1814 }, { "epoch": 0.3231837606837607, "grad_norm": 0.46516045928001404, "learning_rate": 0.00019686012599177003, "loss": 0.989, "step": 1815 }, { "epoch": 0.32336182336182334, "grad_norm": 0.4142672121524811, "learning_rate": 0.00019685664499995338, "loss": 1.0144, "step": 1816 }, { "epoch": 0.32353988603988604, "grad_norm": 0.4511009752750397, "learning_rate": 0.0001968531621104319, "loss": 0.885, "step": 1817 }, { "epoch": 0.32371794871794873, "grad_norm": 0.49583545327186584, "learning_rate": 0.00019684967732327396, "loss": 1.0986, "step": 1818 }, { "epoch": 0.3238960113960114, "grad_norm": 0.5872161388397217, "learning_rate": 0.0001968461906385478, "loss": 1.1482, "step": 1819 }, { "epoch": 0.32407407407407407, "grad_norm": 0.4509563148021698, "learning_rate": 0.00019684270205632168, "loss": 1.0578, "step": 1820 }, { "epoch": 0.32425213675213677, "grad_norm": 0.501345157623291, "learning_rate": 0.00019683921157666402, "loss": 1.1792, "step": 1821 }, { "epoch": 0.3244301994301994, "grad_norm": 0.48257577419281006, "learning_rate": 0.00019683571919964314, "loss": 1.0448, "step": 1822 }, { "epoch": 0.3246082621082621, "grad_norm": 0.5399422645568848, "learning_rate": 0.00019683222492532752, "loss": 1.0579, "step": 1823 }, { "epoch": 0.3247863247863248, "grad_norm": 0.4382506012916565, "learning_rate": 0.0001968287287537856, "loss": 1.0246, "step": 1824 }, { "epoch": 0.32496438746438744, "grad_norm": 0.49247491359710693, "learning_rate": 0.00019682523068508586, "loss": 1.318, "step": 1825 }, { "epoch": 0.32514245014245013, "grad_norm": 0.49067625403404236, "learning_rate": 0.0001968217307192969, "loss": 1.1028, "step": 1826 }, { "epoch": 0.32532051282051283, "grad_norm": 0.4832286238670349, "learning_rate": 0.00019681822885648723, "loss": 1.0996, "step": 1827 }, { "epoch": 0.32549857549857547, "grad_norm": 0.47144386172294617, "learning_rate": 0.0001968147250967255, "loss": 1.0707, "step": 1828 }, { "epoch": 0.32567663817663817, "grad_norm": 0.46299225091934204, "learning_rate": 0.0001968112194400803, "loss": 1.0461, "step": 1829 }, { "epoch": 0.32585470085470086, "grad_norm": 0.4880816340446472, "learning_rate": 0.00019680771188662044, "loss": 1.1198, "step": 1830 }, { "epoch": 0.32603276353276356, "grad_norm": 0.43837276101112366, "learning_rate": 0.00019680420243641452, "loss": 1.0599, "step": 1831 }, { "epoch": 0.3262108262108262, "grad_norm": 0.453168660402298, "learning_rate": 0.0001968006910895314, "loss": 1.0327, "step": 1832 }, { "epoch": 0.3263888888888889, "grad_norm": 0.45183828473091125, "learning_rate": 0.00019679717784603975, "loss": 1.1381, "step": 1833 }, { "epoch": 0.3265669515669516, "grad_norm": 0.5326765775680542, "learning_rate": 0.00019679366270600852, "loss": 1.3169, "step": 1834 }, { "epoch": 0.32674501424501423, "grad_norm": 0.47468429803848267, "learning_rate": 0.00019679014566950653, "loss": 1.1816, "step": 1835 }, { "epoch": 0.3269230769230769, "grad_norm": 0.5096879005432129, "learning_rate": 0.0001967866267366027, "loss": 1.1162, "step": 1836 }, { "epoch": 0.3271011396011396, "grad_norm": 0.491514652967453, "learning_rate": 0.00019678310590736598, "loss": 1.2793, "step": 1837 }, { "epoch": 0.32727920227920226, "grad_norm": 0.601439356803894, "learning_rate": 0.00019677958318186533, "loss": 0.9851, "step": 1838 }, { "epoch": 0.32745726495726496, "grad_norm": 0.45270970463752747, "learning_rate": 0.0001967760585601698, "loss": 1.0042, "step": 1839 }, { "epoch": 0.32763532763532766, "grad_norm": 0.48864325881004333, "learning_rate": 0.00019677253204234847, "loss": 1.0835, "step": 1840 }, { "epoch": 0.3278133903133903, "grad_norm": 0.5855685472488403, "learning_rate": 0.00019676900362847037, "loss": 1.193, "step": 1841 }, { "epoch": 0.327991452991453, "grad_norm": 0.7181013822555542, "learning_rate": 0.00019676547331860466, "loss": 1.2028, "step": 1842 }, { "epoch": 0.3281695156695157, "grad_norm": 0.4517378807067871, "learning_rate": 0.00019676194111282054, "loss": 1.013, "step": 1843 }, { "epoch": 0.32834757834757833, "grad_norm": 0.5477756857872009, "learning_rate": 0.00019675840701118718, "loss": 1.2311, "step": 1844 }, { "epoch": 0.328525641025641, "grad_norm": 0.5194997191429138, "learning_rate": 0.00019675487101377382, "loss": 1.0953, "step": 1845 }, { "epoch": 0.3287037037037037, "grad_norm": 0.44454067945480347, "learning_rate": 0.00019675133312064977, "loss": 0.8505, "step": 1846 }, { "epoch": 0.32888176638176636, "grad_norm": 0.3938713073730469, "learning_rate": 0.00019674779333188428, "loss": 0.8525, "step": 1847 }, { "epoch": 0.32905982905982906, "grad_norm": 0.4927884340286255, "learning_rate": 0.00019674425164754682, "loss": 1.2477, "step": 1848 }, { "epoch": 0.32923789173789175, "grad_norm": 0.4516635239124298, "learning_rate": 0.0001967407080677067, "loss": 0.8333, "step": 1849 }, { "epoch": 0.3294159544159544, "grad_norm": 0.47105780243873596, "learning_rate": 0.00019673716259243336, "loss": 1.0989, "step": 1850 }, { "epoch": 0.3295940170940171, "grad_norm": 0.5192127823829651, "learning_rate": 0.00019673361522179627, "loss": 1.1164, "step": 1851 }, { "epoch": 0.3297720797720798, "grad_norm": 0.5222696661949158, "learning_rate": 0.00019673006595586495, "loss": 1.3191, "step": 1852 }, { "epoch": 0.3299501424501424, "grad_norm": 0.6046679019927979, "learning_rate": 0.0001967265147947089, "loss": 0.9782, "step": 1853 }, { "epoch": 0.3301282051282051, "grad_norm": 0.47928622364997864, "learning_rate": 0.00019672296173839775, "loss": 1.2247, "step": 1854 }, { "epoch": 0.3303062678062678, "grad_norm": 0.5435982346534729, "learning_rate": 0.00019671940678700107, "loss": 1.1647, "step": 1855 }, { "epoch": 0.33048433048433046, "grad_norm": 0.46878984570503235, "learning_rate": 0.00019671584994058856, "loss": 1.132, "step": 1856 }, { "epoch": 0.33066239316239315, "grad_norm": 0.5336877107620239, "learning_rate": 0.00019671229119922986, "loss": 1.0583, "step": 1857 }, { "epoch": 0.33084045584045585, "grad_norm": 0.4811093807220459, "learning_rate": 0.0001967087305629947, "loss": 1.0089, "step": 1858 }, { "epoch": 0.33101851851851855, "grad_norm": 0.5140184760093689, "learning_rate": 0.0001967051680319529, "loss": 1.2335, "step": 1859 }, { "epoch": 0.3311965811965812, "grad_norm": 0.5855883955955505, "learning_rate": 0.00019670160360617418, "loss": 1.1107, "step": 1860 }, { "epoch": 0.3313746438746439, "grad_norm": 0.5081531405448914, "learning_rate": 0.00019669803728572844, "loss": 1.0669, "step": 1861 }, { "epoch": 0.3315527065527066, "grad_norm": 0.48749417066574097, "learning_rate": 0.0001966944690706855, "loss": 1.1465, "step": 1862 }, { "epoch": 0.3317307692307692, "grad_norm": 0.5175687670707703, "learning_rate": 0.00019669089896111536, "loss": 1.254, "step": 1863 }, { "epoch": 0.3319088319088319, "grad_norm": 0.4198860824108124, "learning_rate": 0.0001966873269570879, "loss": 0.9811, "step": 1864 }, { "epoch": 0.3320868945868946, "grad_norm": 0.5220273733139038, "learning_rate": 0.0001966837530586731, "loss": 1.277, "step": 1865 }, { "epoch": 0.33226495726495725, "grad_norm": 0.551954448223114, "learning_rate": 0.00019668017726594101, "loss": 1.0627, "step": 1866 }, { "epoch": 0.33244301994301995, "grad_norm": 0.5289301872253418, "learning_rate": 0.00019667659957896166, "loss": 1.4525, "step": 1867 }, { "epoch": 0.33262108262108264, "grad_norm": 0.5190161466598511, "learning_rate": 0.00019667301999780522, "loss": 1.1064, "step": 1868 }, { "epoch": 0.3327991452991453, "grad_norm": 0.437637060880661, "learning_rate": 0.00019666943852254172, "loss": 1.1304, "step": 1869 }, { "epoch": 0.332977207977208, "grad_norm": 0.4801286458969116, "learning_rate": 0.00019666585515324138, "loss": 1.032, "step": 1870 }, { "epoch": 0.3331552706552707, "grad_norm": 0.5041908621788025, "learning_rate": 0.00019666226988997445, "loss": 1.2611, "step": 1871 }, { "epoch": 0.3333333333333333, "grad_norm": 0.4529375731945038, "learning_rate": 0.00019665868273281115, "loss": 1.1346, "step": 1872 }, { "epoch": 0.333511396011396, "grad_norm": 0.4797019064426422, "learning_rate": 0.00019665509368182172, "loss": 1.1716, "step": 1873 }, { "epoch": 0.3336894586894587, "grad_norm": 0.5505055785179138, "learning_rate": 0.00019665150273707652, "loss": 0.9729, "step": 1874 }, { "epoch": 0.33386752136752135, "grad_norm": 0.4228051006793976, "learning_rate": 0.00019664790989864592, "loss": 0.9023, "step": 1875 }, { "epoch": 0.33404558404558404, "grad_norm": 0.4926959276199341, "learning_rate": 0.00019664431516660028, "loss": 1.0999, "step": 1876 }, { "epoch": 0.33422364672364674, "grad_norm": 0.4273219704627991, "learning_rate": 0.00019664071854101005, "loss": 1.1039, "step": 1877 }, { "epoch": 0.3344017094017094, "grad_norm": 0.48438936471939087, "learning_rate": 0.00019663712002194566, "loss": 1.1308, "step": 1878 }, { "epoch": 0.3345797720797721, "grad_norm": 0.5102053284645081, "learning_rate": 0.0001966335196094777, "loss": 1.0618, "step": 1879 }, { "epoch": 0.33475783475783477, "grad_norm": 0.4357300400733948, "learning_rate": 0.00019662991730367663, "loss": 1.0521, "step": 1880 }, { "epoch": 0.3349358974358974, "grad_norm": 0.5052695870399475, "learning_rate": 0.00019662631310461308, "loss": 0.9579, "step": 1881 }, { "epoch": 0.3351139601139601, "grad_norm": 0.4889117181301117, "learning_rate": 0.00019662270701235762, "loss": 1.0304, "step": 1882 }, { "epoch": 0.3352920227920228, "grad_norm": 0.4671195149421692, "learning_rate": 0.000196619099026981, "loss": 1.2228, "step": 1883 }, { "epoch": 0.33547008547008544, "grad_norm": 0.4700174331665039, "learning_rate": 0.0001966154891485538, "loss": 0.9634, "step": 1884 }, { "epoch": 0.33564814814814814, "grad_norm": 0.488817423582077, "learning_rate": 0.00019661187737714676, "loss": 1.2499, "step": 1885 }, { "epoch": 0.33582621082621084, "grad_norm": 0.5336169600486755, "learning_rate": 0.00019660826371283073, "loss": 1.251, "step": 1886 }, { "epoch": 0.33600427350427353, "grad_norm": 0.5054540038108826, "learning_rate": 0.00019660464815567642, "loss": 1.221, "step": 1887 }, { "epoch": 0.33618233618233617, "grad_norm": 0.5078747868537903, "learning_rate": 0.00019660103070575472, "loss": 0.9792, "step": 1888 }, { "epoch": 0.33636039886039887, "grad_norm": 0.498571515083313, "learning_rate": 0.0001965974113631365, "loss": 1.1682, "step": 1889 }, { "epoch": 0.33653846153846156, "grad_norm": 0.49969518184661865, "learning_rate": 0.00019659379012789264, "loss": 1.0012, "step": 1890 }, { "epoch": 0.3367165242165242, "grad_norm": 0.4238094687461853, "learning_rate": 0.00019659016700009416, "loss": 1.0455, "step": 1891 }, { "epoch": 0.3368945868945869, "grad_norm": 0.5139104723930359, "learning_rate": 0.000196586541979812, "loss": 0.9979, "step": 1892 }, { "epoch": 0.3370726495726496, "grad_norm": 0.5446547269821167, "learning_rate": 0.00019658291506711715, "loss": 0.9271, "step": 1893 }, { "epoch": 0.33725071225071224, "grad_norm": 0.5284572839736938, "learning_rate": 0.00019657928626208077, "loss": 1.0356, "step": 1894 }, { "epoch": 0.33742877492877493, "grad_norm": 0.49936217069625854, "learning_rate": 0.00019657565556477387, "loss": 0.9785, "step": 1895 }, { "epoch": 0.33760683760683763, "grad_norm": 0.4678729772567749, "learning_rate": 0.00019657202297526763, "loss": 1.2135, "step": 1896 }, { "epoch": 0.33778490028490027, "grad_norm": 0.46844249963760376, "learning_rate": 0.0001965683884936332, "loss": 0.9369, "step": 1897 }, { "epoch": 0.33796296296296297, "grad_norm": 0.4307389557361603, "learning_rate": 0.0001965647521199418, "loss": 0.9301, "step": 1898 }, { "epoch": 0.33814102564102566, "grad_norm": 0.48227834701538086, "learning_rate": 0.00019656111385426468, "loss": 1.3169, "step": 1899 }, { "epoch": 0.3383190883190883, "grad_norm": 0.45860713720321655, "learning_rate": 0.00019655747369667315, "loss": 0.9835, "step": 1900 }, { "epoch": 0.338497150997151, "grad_norm": 0.5522414445877075, "learning_rate": 0.00019655383164723846, "loss": 1.363, "step": 1901 }, { "epoch": 0.3386752136752137, "grad_norm": 0.5283710360527039, "learning_rate": 0.000196550187706032, "loss": 1.1499, "step": 1902 }, { "epoch": 0.33885327635327633, "grad_norm": 0.4419134259223938, "learning_rate": 0.00019654654187312525, "loss": 1.2039, "step": 1903 }, { "epoch": 0.33903133903133903, "grad_norm": 0.49066096544265747, "learning_rate": 0.00019654289414858952, "loss": 0.9707, "step": 1904 }, { "epoch": 0.3392094017094017, "grad_norm": 0.4619338810443878, "learning_rate": 0.00019653924453249633, "loss": 1.0849, "step": 1905 }, { "epoch": 0.33938746438746437, "grad_norm": 0.5191119313240051, "learning_rate": 0.0001965355930249172, "loss": 1.1387, "step": 1906 }, { "epoch": 0.33956552706552706, "grad_norm": 0.5245711207389832, "learning_rate": 0.00019653193962592368, "loss": 1.3435, "step": 1907 }, { "epoch": 0.33974358974358976, "grad_norm": 0.49562904238700867, "learning_rate": 0.0001965282843355873, "loss": 1.2781, "step": 1908 }, { "epoch": 0.3399216524216524, "grad_norm": 0.4661353826522827, "learning_rate": 0.0001965246271539797, "loss": 0.9317, "step": 1909 }, { "epoch": 0.3400997150997151, "grad_norm": 0.4723222851753235, "learning_rate": 0.00019652096808117254, "loss": 1.0733, "step": 1910 }, { "epoch": 0.3402777777777778, "grad_norm": 0.4358505308628082, "learning_rate": 0.00019651730711723754, "loss": 1.1461, "step": 1911 }, { "epoch": 0.34045584045584043, "grad_norm": 0.462422251701355, "learning_rate": 0.00019651364426224638, "loss": 1.0914, "step": 1912 }, { "epoch": 0.3406339031339031, "grad_norm": 0.47952914237976074, "learning_rate": 0.0001965099795162709, "loss": 1.0392, "step": 1913 }, { "epoch": 0.3408119658119658, "grad_norm": 0.5036373734474182, "learning_rate": 0.00019650631287938282, "loss": 1.4002, "step": 1914 }, { "epoch": 0.34099002849002846, "grad_norm": 0.5130090713500977, "learning_rate": 0.000196502644351654, "loss": 1.3499, "step": 1915 }, { "epoch": 0.34116809116809116, "grad_norm": 0.4426332414150238, "learning_rate": 0.00019649897393315635, "loss": 1.0726, "step": 1916 }, { "epoch": 0.34134615384615385, "grad_norm": 0.5580727458000183, "learning_rate": 0.00019649530162396176, "loss": 1.1164, "step": 1917 }, { "epoch": 0.34152421652421655, "grad_norm": 0.545001745223999, "learning_rate": 0.00019649162742414218, "loss": 0.962, "step": 1918 }, { "epoch": 0.3417022792022792, "grad_norm": 0.5225808024406433, "learning_rate": 0.00019648795133376962, "loss": 1.1415, "step": 1919 }, { "epoch": 0.3418803418803419, "grad_norm": 0.48210129141807556, "learning_rate": 0.0001964842733529161, "loss": 1.1188, "step": 1920 }, { "epoch": 0.3420584045584046, "grad_norm": 0.4515395164489746, "learning_rate": 0.00019648059348165365, "loss": 1.0828, "step": 1921 }, { "epoch": 0.3422364672364672, "grad_norm": 0.5802633166313171, "learning_rate": 0.0001964769117200544, "loss": 1.3137, "step": 1922 }, { "epoch": 0.3424145299145299, "grad_norm": 0.4432032108306885, "learning_rate": 0.00019647322806819046, "loss": 1.0523, "step": 1923 }, { "epoch": 0.3425925925925926, "grad_norm": 0.4697614908218384, "learning_rate": 0.00019646954252613402, "loss": 0.8426, "step": 1924 }, { "epoch": 0.34277065527065526, "grad_norm": 0.4610968232154846, "learning_rate": 0.0001964658550939573, "loss": 0.9826, "step": 1925 }, { "epoch": 0.34294871794871795, "grad_norm": 0.5278257727622986, "learning_rate": 0.00019646216577173258, "loss": 1.1064, "step": 1926 }, { "epoch": 0.34312678062678065, "grad_norm": 0.5686144232749939, "learning_rate": 0.00019645847455953205, "loss": 0.9138, "step": 1927 }, { "epoch": 0.3433048433048433, "grad_norm": 0.42894792556762695, "learning_rate": 0.0001964547814574281, "loss": 1.0461, "step": 1928 }, { "epoch": 0.343482905982906, "grad_norm": 0.5567317605018616, "learning_rate": 0.0001964510864654931, "loss": 0.8787, "step": 1929 }, { "epoch": 0.3436609686609687, "grad_norm": 0.5015586614608765, "learning_rate": 0.0001964473895837994, "loss": 1.1406, "step": 1930 }, { "epoch": 0.3438390313390313, "grad_norm": 0.47391530871391296, "learning_rate": 0.00019644369081241948, "loss": 1.0685, "step": 1931 }, { "epoch": 0.344017094017094, "grad_norm": 0.546037495136261, "learning_rate": 0.00019643999015142574, "loss": 1.2349, "step": 1932 }, { "epoch": 0.3441951566951567, "grad_norm": 0.4724953770637512, "learning_rate": 0.00019643628760089078, "loss": 1.0621, "step": 1933 }, { "epoch": 0.34437321937321935, "grad_norm": 0.5644593834877014, "learning_rate": 0.00019643258316088703, "loss": 1.2559, "step": 1934 }, { "epoch": 0.34455128205128205, "grad_norm": 0.500815749168396, "learning_rate": 0.00019642887683148718, "loss": 1.0439, "step": 1935 }, { "epoch": 0.34472934472934474, "grad_norm": 0.4932316541671753, "learning_rate": 0.0001964251686127638, "loss": 1.0404, "step": 1936 }, { "epoch": 0.3449074074074074, "grad_norm": 0.48494651913642883, "learning_rate": 0.00019642145850478954, "loss": 0.9951, "step": 1937 }, { "epoch": 0.3450854700854701, "grad_norm": 0.5191963315010071, "learning_rate": 0.00019641774650763706, "loss": 1.1258, "step": 1938 }, { "epoch": 0.3452635327635328, "grad_norm": 0.4439312815666199, "learning_rate": 0.00019641403262137918, "loss": 1.1158, "step": 1939 }, { "epoch": 0.3454415954415954, "grad_norm": 0.4829137921333313, "learning_rate": 0.0001964103168460886, "loss": 1.0531, "step": 1940 }, { "epoch": 0.3456196581196581, "grad_norm": 0.49433329701423645, "learning_rate": 0.00019640659918183811, "loss": 1.1295, "step": 1941 }, { "epoch": 0.3457977207977208, "grad_norm": 0.5351347923278809, "learning_rate": 0.00019640287962870062, "loss": 1.2379, "step": 1942 }, { "epoch": 0.34597578347578345, "grad_norm": 0.4845680892467499, "learning_rate": 0.00019639915818674895, "loss": 1.0197, "step": 1943 }, { "epoch": 0.34615384615384615, "grad_norm": 0.5312514901161194, "learning_rate": 0.00019639543485605604, "loss": 0.9734, "step": 1944 }, { "epoch": 0.34633190883190884, "grad_norm": 0.4571874737739563, "learning_rate": 0.00019639170963669478, "loss": 1.1012, "step": 1945 }, { "epoch": 0.34650997150997154, "grad_norm": 0.4449031949043274, "learning_rate": 0.00019638798252873824, "loss": 1.1393, "step": 1946 }, { "epoch": 0.3466880341880342, "grad_norm": 0.47470834851264954, "learning_rate": 0.0001963842535322594, "loss": 0.981, "step": 1947 }, { "epoch": 0.3468660968660969, "grad_norm": 0.5386981964111328, "learning_rate": 0.00019638052264733132, "loss": 1.1247, "step": 1948 }, { "epoch": 0.34704415954415957, "grad_norm": 0.535589873790741, "learning_rate": 0.00019637678987402714, "loss": 1.3157, "step": 1949 }, { "epoch": 0.3472222222222222, "grad_norm": 0.49338245391845703, "learning_rate": 0.00019637305521242, "loss": 1.1066, "step": 1950 }, { "epoch": 0.3474002849002849, "grad_norm": 0.4247688353061676, "learning_rate": 0.00019636931866258298, "loss": 1.0039, "step": 1951 }, { "epoch": 0.3475783475783476, "grad_norm": 0.5351517200469971, "learning_rate": 0.00019636558022458934, "loss": 1.0344, "step": 1952 }, { "epoch": 0.34775641025641024, "grad_norm": 0.4633362889289856, "learning_rate": 0.00019636183989851238, "loss": 1.1383, "step": 1953 }, { "epoch": 0.34793447293447294, "grad_norm": 0.553709089756012, "learning_rate": 0.00019635809768442535, "loss": 1.0389, "step": 1954 }, { "epoch": 0.34811253561253563, "grad_norm": 0.479374498128891, "learning_rate": 0.00019635435358240154, "loss": 1.1774, "step": 1955 }, { "epoch": 0.3482905982905983, "grad_norm": 0.5274081230163574, "learning_rate": 0.0001963506075925143, "loss": 1.1809, "step": 1956 }, { "epoch": 0.34846866096866097, "grad_norm": 0.45398542284965515, "learning_rate": 0.0001963468597148371, "loss": 1.0502, "step": 1957 }, { "epoch": 0.34864672364672367, "grad_norm": 0.48201611638069153, "learning_rate": 0.00019634310994944332, "loss": 1.0557, "step": 1958 }, { "epoch": 0.3488247863247863, "grad_norm": 0.6407544016838074, "learning_rate": 0.00019633935829640642, "loss": 1.2138, "step": 1959 }, { "epoch": 0.349002849002849, "grad_norm": 0.5385687351226807, "learning_rate": 0.00019633560475579995, "loss": 1.3496, "step": 1960 }, { "epoch": 0.3491809116809117, "grad_norm": 0.5260964035987854, "learning_rate": 0.0001963318493276974, "loss": 1.0253, "step": 1961 }, { "epoch": 0.34935897435897434, "grad_norm": 0.48478585481643677, "learning_rate": 0.00019632809201217238, "loss": 1.137, "step": 1962 }, { "epoch": 0.34953703703703703, "grad_norm": 0.620033860206604, "learning_rate": 0.0001963243328092985, "loss": 1.3445, "step": 1963 }, { "epoch": 0.34971509971509973, "grad_norm": 0.5149700045585632, "learning_rate": 0.00019632057171914942, "loss": 1.1042, "step": 1964 }, { "epoch": 0.34989316239316237, "grad_norm": 0.42695048451423645, "learning_rate": 0.0001963168087417988, "loss": 0.8789, "step": 1965 }, { "epoch": 0.35007122507122507, "grad_norm": 0.5281283855438232, "learning_rate": 0.00019631304387732044, "loss": 1.1155, "step": 1966 }, { "epoch": 0.35024928774928776, "grad_norm": 0.4994089901447296, "learning_rate": 0.00019630927712578804, "loss": 1.1226, "step": 1967 }, { "epoch": 0.3504273504273504, "grad_norm": 0.4433288276195526, "learning_rate": 0.0001963055084872754, "loss": 1.0262, "step": 1968 }, { "epoch": 0.3506054131054131, "grad_norm": 0.46541857719421387, "learning_rate": 0.0001963017379618564, "loss": 1.1438, "step": 1969 }, { "epoch": 0.3507834757834758, "grad_norm": 0.5097604393959045, "learning_rate": 0.00019629796554960488, "loss": 0.9641, "step": 1970 }, { "epoch": 0.35096153846153844, "grad_norm": 0.49461981654167175, "learning_rate": 0.00019629419125059478, "loss": 1.1765, "step": 1971 }, { "epoch": 0.35113960113960113, "grad_norm": 0.4763339161872864, "learning_rate": 0.00019629041506490005, "loss": 1.0527, "step": 1972 }, { "epoch": 0.35131766381766383, "grad_norm": 0.4528443217277527, "learning_rate": 0.00019628663699259463, "loss": 1.1409, "step": 1973 }, { "epoch": 0.35149572649572647, "grad_norm": 0.4436309039592743, "learning_rate": 0.00019628285703375258, "loss": 1.0459, "step": 1974 }, { "epoch": 0.35167378917378916, "grad_norm": 0.5146129727363586, "learning_rate": 0.00019627907518844797, "loss": 1.2527, "step": 1975 }, { "epoch": 0.35185185185185186, "grad_norm": 0.5202171802520752, "learning_rate": 0.0001962752914567549, "loss": 1.226, "step": 1976 }, { "epoch": 0.35202991452991456, "grad_norm": 0.5267411470413208, "learning_rate": 0.00019627150583874747, "loss": 1.0898, "step": 1977 }, { "epoch": 0.3522079772079772, "grad_norm": 0.546840250492096, "learning_rate": 0.00019626771833449987, "loss": 1.1716, "step": 1978 }, { "epoch": 0.3523860398860399, "grad_norm": 0.5525290966033936, "learning_rate": 0.0001962639289440863, "loss": 1.1762, "step": 1979 }, { "epoch": 0.3525641025641026, "grad_norm": 0.48967215418815613, "learning_rate": 0.000196260137667581, "loss": 1.1884, "step": 1980 }, { "epoch": 0.35274216524216523, "grad_norm": 0.5908235907554626, "learning_rate": 0.0001962563445050583, "loss": 1.1887, "step": 1981 }, { "epoch": 0.3529202279202279, "grad_norm": 0.46708086133003235, "learning_rate": 0.00019625254945659245, "loss": 0.8842, "step": 1982 }, { "epoch": 0.3530982905982906, "grad_norm": 0.41652458906173706, "learning_rate": 0.00019624875252225788, "loss": 1.0268, "step": 1983 }, { "epoch": 0.35327635327635326, "grad_norm": 0.5084529519081116, "learning_rate": 0.00019624495370212892, "loss": 1.0547, "step": 1984 }, { "epoch": 0.35345441595441596, "grad_norm": 0.5667507648468018, "learning_rate": 0.00019624115299628003, "loss": 1.0656, "step": 1985 }, { "epoch": 0.35363247863247865, "grad_norm": 0.5022873282432556, "learning_rate": 0.00019623735040478568, "loss": 1.0627, "step": 1986 }, { "epoch": 0.3538105413105413, "grad_norm": 0.48342058062553406, "learning_rate": 0.00019623354592772035, "loss": 1.0976, "step": 1987 }, { "epoch": 0.353988603988604, "grad_norm": 0.48117366433143616, "learning_rate": 0.0001962297395651586, "loss": 1.0515, "step": 1988 }, { "epoch": 0.3541666666666667, "grad_norm": 0.492564857006073, "learning_rate": 0.000196225931317175, "loss": 1.1957, "step": 1989 }, { "epoch": 0.3543447293447293, "grad_norm": 0.4756208658218384, "learning_rate": 0.00019622212118384417, "loss": 1.007, "step": 1990 }, { "epoch": 0.354522792022792, "grad_norm": 0.581930935382843, "learning_rate": 0.00019621830916524076, "loss": 1.232, "step": 1991 }, { "epoch": 0.3547008547008547, "grad_norm": 0.480064332485199, "learning_rate": 0.00019621449526143947, "loss": 1.2693, "step": 1992 }, { "epoch": 0.35487891737891736, "grad_norm": 0.5679123401641846, "learning_rate": 0.000196210679472515, "loss": 1.2985, "step": 1993 }, { "epoch": 0.35505698005698005, "grad_norm": 0.43757280707359314, "learning_rate": 0.00019620686179854213, "loss": 1.1387, "step": 1994 }, { "epoch": 0.35523504273504275, "grad_norm": 0.4950634837150574, "learning_rate": 0.00019620304223959566, "loss": 1.1809, "step": 1995 }, { "epoch": 0.3554131054131054, "grad_norm": 0.5574113726615906, "learning_rate": 0.00019619922079575043, "loss": 1.2434, "step": 1996 }, { "epoch": 0.3555911680911681, "grad_norm": 0.5154930949211121, "learning_rate": 0.00019619539746708128, "loss": 1.1747, "step": 1997 }, { "epoch": 0.3557692307692308, "grad_norm": 0.4377825856208801, "learning_rate": 0.00019619157225366315, "loss": 0.9547, "step": 1998 }, { "epoch": 0.3559472934472934, "grad_norm": 0.530714213848114, "learning_rate": 0.00019618774515557097, "loss": 1.2057, "step": 1999 }, { "epoch": 0.3561253561253561, "grad_norm": 0.5703464150428772, "learning_rate": 0.00019618391617287978, "loss": 1.3068, "step": 2000 }, { "epoch": 0.3563034188034188, "grad_norm": 0.4862228333950043, "learning_rate": 0.0001961800853056645, "loss": 1.0077, "step": 2001 }, { "epoch": 0.35648148148148145, "grad_norm": 0.5575395822525024, "learning_rate": 0.00019617625255400028, "loss": 1.03, "step": 2002 }, { "epoch": 0.35665954415954415, "grad_norm": 0.4826279580593109, "learning_rate": 0.0001961724179179622, "loss": 1.268, "step": 2003 }, { "epoch": 0.35683760683760685, "grad_norm": 0.49423274397850037, "learning_rate": 0.00019616858139762534, "loss": 1.1305, "step": 2004 }, { "epoch": 0.35701566951566954, "grad_norm": 0.5208541750907898, "learning_rate": 0.00019616474299306491, "loss": 1.1651, "step": 2005 }, { "epoch": 0.3571937321937322, "grad_norm": 0.5324164032936096, "learning_rate": 0.0001961609027043561, "loss": 1.1406, "step": 2006 }, { "epoch": 0.3573717948717949, "grad_norm": 0.45385462045669556, "learning_rate": 0.00019615706053157416, "loss": 1.0716, "step": 2007 }, { "epoch": 0.3575498575498576, "grad_norm": 0.5016173720359802, "learning_rate": 0.00019615321647479438, "loss": 1.0878, "step": 2008 }, { "epoch": 0.3577279202279202, "grad_norm": 0.5073097348213196, "learning_rate": 0.00019614937053409205, "loss": 1.237, "step": 2009 }, { "epoch": 0.3579059829059829, "grad_norm": 0.48880141973495483, "learning_rate": 0.00019614552270954256, "loss": 0.8794, "step": 2010 }, { "epoch": 0.3580840455840456, "grad_norm": 0.43902209401130676, "learning_rate": 0.00019614167300122126, "loss": 0.912, "step": 2011 }, { "epoch": 0.35826210826210825, "grad_norm": 0.42809322476387024, "learning_rate": 0.0001961378214092036, "loss": 0.7804, "step": 2012 }, { "epoch": 0.35844017094017094, "grad_norm": 0.4464281499385834, "learning_rate": 0.00019613396793356503, "loss": 1.0004, "step": 2013 }, { "epoch": 0.35861823361823364, "grad_norm": 0.49085676670074463, "learning_rate": 0.00019613011257438109, "loss": 1.1087, "step": 2014 }, { "epoch": 0.3587962962962963, "grad_norm": 0.4997732937335968, "learning_rate": 0.00019612625533172725, "loss": 0.9591, "step": 2015 }, { "epoch": 0.358974358974359, "grad_norm": 0.48442545533180237, "learning_rate": 0.00019612239620567912, "loss": 0.9744, "step": 2016 }, { "epoch": 0.35915242165242167, "grad_norm": 0.4989205002784729, "learning_rate": 0.00019611853519631233, "loss": 0.9844, "step": 2017 }, { "epoch": 0.3593304843304843, "grad_norm": 0.6107521653175354, "learning_rate": 0.00019611467230370248, "loss": 1.147, "step": 2018 }, { "epoch": 0.359508547008547, "grad_norm": 0.5594844818115234, "learning_rate": 0.00019611080752792535, "loss": 1.3195, "step": 2019 }, { "epoch": 0.3596866096866097, "grad_norm": 0.4786946475505829, "learning_rate": 0.00019610694086905656, "loss": 1.2108, "step": 2020 }, { "epoch": 0.35986467236467234, "grad_norm": 0.5186030268669128, "learning_rate": 0.0001961030723271719, "loss": 1.0008, "step": 2021 }, { "epoch": 0.36004273504273504, "grad_norm": 0.4520573318004608, "learning_rate": 0.0001960992019023472, "loss": 1.1307, "step": 2022 }, { "epoch": 0.36022079772079774, "grad_norm": 0.4983210563659668, "learning_rate": 0.00019609532959465823, "loss": 1.1486, "step": 2023 }, { "epoch": 0.3603988603988604, "grad_norm": 0.6209200024604797, "learning_rate": 0.00019609145540418094, "loss": 1.2566, "step": 2024 }, { "epoch": 0.3605769230769231, "grad_norm": 0.47047603130340576, "learning_rate": 0.00019608757933099117, "loss": 1.1588, "step": 2025 }, { "epoch": 0.36075498575498577, "grad_norm": 0.5147389769554138, "learning_rate": 0.0001960837013751649, "loss": 1.2113, "step": 2026 }, { "epoch": 0.3609330484330484, "grad_norm": 0.45826098322868347, "learning_rate": 0.00019607982153677808, "loss": 1.13, "step": 2027 }, { "epoch": 0.3611111111111111, "grad_norm": 0.5699561834335327, "learning_rate": 0.00019607593981590675, "loss": 1.2476, "step": 2028 }, { "epoch": 0.3612891737891738, "grad_norm": 0.5349239110946655, "learning_rate": 0.000196072056212627, "loss": 1.2295, "step": 2029 }, { "epoch": 0.36146723646723644, "grad_norm": 0.6212165355682373, "learning_rate": 0.00019606817072701484, "loss": 1.1965, "step": 2030 }, { "epoch": 0.36164529914529914, "grad_norm": 0.4870990216732025, "learning_rate": 0.00019606428335914645, "loss": 1.4464, "step": 2031 }, { "epoch": 0.36182336182336183, "grad_norm": 0.42427828907966614, "learning_rate": 0.00019606039410909797, "loss": 1.1546, "step": 2032 }, { "epoch": 0.36200142450142453, "grad_norm": 0.5081788301467896, "learning_rate": 0.0001960565029769456, "loss": 1.1867, "step": 2033 }, { "epoch": 0.36217948717948717, "grad_norm": 0.4813104271888733, "learning_rate": 0.00019605260996276565, "loss": 1.3726, "step": 2034 }, { "epoch": 0.36235754985754987, "grad_norm": 0.4648851156234741, "learning_rate": 0.0001960487150666343, "loss": 1.2434, "step": 2035 }, { "epoch": 0.36253561253561256, "grad_norm": 0.484161913394928, "learning_rate": 0.00019604481828862792, "loss": 1.1309, "step": 2036 }, { "epoch": 0.3627136752136752, "grad_norm": 0.4929439127445221, "learning_rate": 0.00019604091962882283, "loss": 1.1007, "step": 2037 }, { "epoch": 0.3628917378917379, "grad_norm": 0.45599642395973206, "learning_rate": 0.00019603701908729544, "loss": 1.2628, "step": 2038 }, { "epoch": 0.3630698005698006, "grad_norm": 0.45295149087905884, "learning_rate": 0.00019603311666412213, "loss": 0.9808, "step": 2039 }, { "epoch": 0.36324786324786323, "grad_norm": 0.48681163787841797, "learning_rate": 0.00019602921235937942, "loss": 1.0574, "step": 2040 }, { "epoch": 0.36342592592592593, "grad_norm": 0.41232365369796753, "learning_rate": 0.00019602530617314378, "loss": 1.0454, "step": 2041 }, { "epoch": 0.3636039886039886, "grad_norm": 0.46214723587036133, "learning_rate": 0.00019602139810549174, "loss": 0.9985, "step": 2042 }, { "epoch": 0.36378205128205127, "grad_norm": 0.44307878613471985, "learning_rate": 0.00019601748815649989, "loss": 0.9683, "step": 2043 }, { "epoch": 0.36396011396011396, "grad_norm": 0.4809451401233673, "learning_rate": 0.00019601357632624477, "loss": 1.028, "step": 2044 }, { "epoch": 0.36413817663817666, "grad_norm": 0.4638497531414032, "learning_rate": 0.0001960096626148031, "loss": 0.9851, "step": 2045 }, { "epoch": 0.3643162393162393, "grad_norm": 0.5942164063453674, "learning_rate": 0.00019600574702225153, "loss": 1.1606, "step": 2046 }, { "epoch": 0.364494301994302, "grad_norm": 0.5171293616294861, "learning_rate": 0.00019600182954866675, "loss": 1.2335, "step": 2047 }, { "epoch": 0.3646723646723647, "grad_norm": 0.5294404625892639, "learning_rate": 0.00019599791019412558, "loss": 1.0966, "step": 2048 }, { "epoch": 0.36485042735042733, "grad_norm": 0.46117448806762695, "learning_rate": 0.00019599398895870477, "loss": 1.0565, "step": 2049 }, { "epoch": 0.36502849002849, "grad_norm": 0.5385118126869202, "learning_rate": 0.00019599006584248118, "loss": 1.0076, "step": 2050 }, { "epoch": 0.3652065527065527, "grad_norm": 0.4915166199207306, "learning_rate": 0.00019598614084553165, "loss": 0.9686, "step": 2051 }, { "epoch": 0.36538461538461536, "grad_norm": 0.46769094467163086, "learning_rate": 0.00019598221396793303, "loss": 1.1217, "step": 2052 }, { "epoch": 0.36556267806267806, "grad_norm": 0.5440493822097778, "learning_rate": 0.00019597828520976236, "loss": 1.2344, "step": 2053 }, { "epoch": 0.36574074074074076, "grad_norm": 0.616727352142334, "learning_rate": 0.00019597435457109657, "loss": 1.2953, "step": 2054 }, { "epoch": 0.3659188034188034, "grad_norm": 0.4859183430671692, "learning_rate": 0.00019597042205201265, "loss": 1.16, "step": 2055 }, { "epoch": 0.3660968660968661, "grad_norm": 0.47056329250335693, "learning_rate": 0.0001959664876525877, "loss": 0.9982, "step": 2056 }, { "epoch": 0.3662749287749288, "grad_norm": 0.48347967863082886, "learning_rate": 0.00019596255137289875, "loss": 1.0966, "step": 2057 }, { "epoch": 0.36645299145299143, "grad_norm": 0.5068454742431641, "learning_rate": 0.00019595861321302296, "loss": 1.2891, "step": 2058 }, { "epoch": 0.3666310541310541, "grad_norm": 0.5702359080314636, "learning_rate": 0.00019595467317303747, "loss": 1.1394, "step": 2059 }, { "epoch": 0.3668091168091168, "grad_norm": 0.5028812885284424, "learning_rate": 0.0001959507312530195, "loss": 1.2324, "step": 2060 }, { "epoch": 0.36698717948717946, "grad_norm": 0.4672880172729492, "learning_rate": 0.00019594678745304628, "loss": 1.0581, "step": 2061 }, { "epoch": 0.36716524216524216, "grad_norm": 0.5233900547027588, "learning_rate": 0.00019594284177319504, "loss": 1.138, "step": 2062 }, { "epoch": 0.36734330484330485, "grad_norm": 0.46871712803840637, "learning_rate": 0.00019593889421354316, "loss": 1.2159, "step": 2063 }, { "epoch": 0.36752136752136755, "grad_norm": 0.5180533528327942, "learning_rate": 0.00019593494477416793, "loss": 1.1116, "step": 2064 }, { "epoch": 0.3676994301994302, "grad_norm": 0.5398494005203247, "learning_rate": 0.0001959309934551467, "loss": 1.2038, "step": 2065 }, { "epoch": 0.3678774928774929, "grad_norm": 0.4850373864173889, "learning_rate": 0.000195927040256557, "loss": 1.4315, "step": 2066 }, { "epoch": 0.3680555555555556, "grad_norm": 0.49190905690193176, "learning_rate": 0.0001959230851784762, "loss": 0.9993, "step": 2067 }, { "epoch": 0.3682336182336182, "grad_norm": 0.4546903073787689, "learning_rate": 0.00019591912822098178, "loss": 1.0979, "step": 2068 }, { "epoch": 0.3684116809116809, "grad_norm": 0.4726468622684479, "learning_rate": 0.00019591516938415133, "loss": 1.1629, "step": 2069 }, { "epoch": 0.3685897435897436, "grad_norm": 0.47856009006500244, "learning_rate": 0.00019591120866806235, "loss": 1.2048, "step": 2070 }, { "epoch": 0.36876780626780625, "grad_norm": 0.46847718954086304, "learning_rate": 0.0001959072460727925, "loss": 1.0958, "step": 2071 }, { "epoch": 0.36894586894586895, "grad_norm": 0.47164350748062134, "learning_rate": 0.0001959032815984194, "loss": 1.1912, "step": 2072 }, { "epoch": 0.36912393162393164, "grad_norm": 0.4838213324546814, "learning_rate": 0.0001958993152450207, "loss": 1.1466, "step": 2073 }, { "epoch": 0.3693019943019943, "grad_norm": 0.47234636545181274, "learning_rate": 0.00019589534701267412, "loss": 0.9475, "step": 2074 }, { "epoch": 0.369480056980057, "grad_norm": 0.4913126826286316, "learning_rate": 0.00019589137690145746, "loss": 1.1571, "step": 2075 }, { "epoch": 0.3696581196581197, "grad_norm": 0.4696233570575714, "learning_rate": 0.00019588740491144842, "loss": 0.9797, "step": 2076 }, { "epoch": 0.3698361823361823, "grad_norm": 0.46146106719970703, "learning_rate": 0.00019588343104272492, "loss": 1.027, "step": 2077 }, { "epoch": 0.370014245014245, "grad_norm": 0.4920627176761627, "learning_rate": 0.00019587945529536474, "loss": 1.1008, "step": 2078 }, { "epoch": 0.3701923076923077, "grad_norm": 0.4854249954223633, "learning_rate": 0.0001958754776694458, "loss": 1.0759, "step": 2079 }, { "epoch": 0.37037037037037035, "grad_norm": 0.4884897768497467, "learning_rate": 0.00019587149816504608, "loss": 1.1403, "step": 2080 }, { "epoch": 0.37054843304843305, "grad_norm": 0.5062584280967712, "learning_rate": 0.00019586751678224345, "loss": 1.0185, "step": 2081 }, { "epoch": 0.37072649572649574, "grad_norm": 0.44697675108909607, "learning_rate": 0.000195863533521116, "loss": 1.0462, "step": 2082 }, { "epoch": 0.3709045584045584, "grad_norm": 0.5122885704040527, "learning_rate": 0.00019585954838174176, "loss": 1.108, "step": 2083 }, { "epoch": 0.3710826210826211, "grad_norm": 0.486650288105011, "learning_rate": 0.0001958555613641988, "loss": 1.126, "step": 2084 }, { "epoch": 0.3712606837606838, "grad_norm": 0.5296297669410706, "learning_rate": 0.00019585157246856523, "loss": 1.1757, "step": 2085 }, { "epoch": 0.3714387464387464, "grad_norm": 0.4935721457004547, "learning_rate": 0.0001958475816949192, "loss": 1.1654, "step": 2086 }, { "epoch": 0.3716168091168091, "grad_norm": 0.6226509213447571, "learning_rate": 0.00019584358904333891, "loss": 1.1981, "step": 2087 }, { "epoch": 0.3717948717948718, "grad_norm": 0.44094228744506836, "learning_rate": 0.0001958395945139026, "loss": 0.8468, "step": 2088 }, { "epoch": 0.37197293447293445, "grad_norm": 0.5335884690284729, "learning_rate": 0.00019583559810668858, "loss": 1.1597, "step": 2089 }, { "epoch": 0.37215099715099714, "grad_norm": 0.4585414528846741, "learning_rate": 0.000195831599821775, "loss": 0.9343, "step": 2090 }, { "epoch": 0.37232905982905984, "grad_norm": 0.533087432384491, "learning_rate": 0.00019582759965924035, "loss": 1.1209, "step": 2091 }, { "epoch": 0.37250712250712253, "grad_norm": 0.5302683711051941, "learning_rate": 0.00019582359761916295, "loss": 1.236, "step": 2092 }, { "epoch": 0.3726851851851852, "grad_norm": 0.4522508382797241, "learning_rate": 0.00019581959370162122, "loss": 1.0196, "step": 2093 }, { "epoch": 0.37286324786324787, "grad_norm": 0.52391517162323, "learning_rate": 0.00019581558790669358, "loss": 1.0077, "step": 2094 }, { "epoch": 0.37304131054131057, "grad_norm": 0.47144797444343567, "learning_rate": 0.00019581158023445854, "loss": 1.0956, "step": 2095 }, { "epoch": 0.3732193732193732, "grad_norm": 0.4486723244190216, "learning_rate": 0.00019580757068499459, "loss": 0.8697, "step": 2096 }, { "epoch": 0.3733974358974359, "grad_norm": 0.4626580476760864, "learning_rate": 0.00019580355925838034, "loss": 0.8489, "step": 2097 }, { "epoch": 0.3735754985754986, "grad_norm": 0.5647920370101929, "learning_rate": 0.00019579954595469438, "loss": 1.1458, "step": 2098 }, { "epoch": 0.37375356125356124, "grad_norm": 0.4734349846839905, "learning_rate": 0.00019579553077401528, "loss": 1.1036, "step": 2099 }, { "epoch": 0.37393162393162394, "grad_norm": 0.5624295473098755, "learning_rate": 0.00019579151371642176, "loss": 0.9793, "step": 2100 }, { "epoch": 0.37410968660968663, "grad_norm": 0.47507283091545105, "learning_rate": 0.00019578749478199256, "loss": 1.0371, "step": 2101 }, { "epoch": 0.37428774928774927, "grad_norm": 0.550865113735199, "learning_rate": 0.00019578347397080633, "loss": 1.046, "step": 2102 }, { "epoch": 0.37446581196581197, "grad_norm": 0.5249403715133667, "learning_rate": 0.00019577945128294193, "loss": 1.3185, "step": 2103 }, { "epoch": 0.37464387464387466, "grad_norm": 0.4921024739742279, "learning_rate": 0.00019577542671847815, "loss": 1.0758, "step": 2104 }, { "epoch": 0.3748219373219373, "grad_norm": 0.5351784825325012, "learning_rate": 0.00019577140027749384, "loss": 1.067, "step": 2105 }, { "epoch": 0.375, "grad_norm": 0.44420507550239563, "learning_rate": 0.00019576737196006787, "loss": 1.1065, "step": 2106 }, { "epoch": 0.3751780626780627, "grad_norm": 0.531384289264679, "learning_rate": 0.0001957633417662792, "loss": 1.1634, "step": 2107 }, { "epoch": 0.37535612535612534, "grad_norm": 0.5167618989944458, "learning_rate": 0.00019575930969620677, "loss": 1.1646, "step": 2108 }, { "epoch": 0.37553418803418803, "grad_norm": 0.41487228870391846, "learning_rate": 0.0001957552757499296, "loss": 0.793, "step": 2109 }, { "epoch": 0.37571225071225073, "grad_norm": 0.5110787153244019, "learning_rate": 0.00019575123992752672, "loss": 1.1752, "step": 2110 }, { "epoch": 0.37589031339031337, "grad_norm": 0.4422051012516022, "learning_rate": 0.00019574720222907717, "loss": 1.0102, "step": 2111 }, { "epoch": 0.37606837606837606, "grad_norm": 0.4757538139820099, "learning_rate": 0.0001957431626546601, "loss": 1.0467, "step": 2112 }, { "epoch": 0.37624643874643876, "grad_norm": 0.4736764430999756, "learning_rate": 0.00019573912120435466, "loss": 1.3048, "step": 2113 }, { "epoch": 0.3764245014245014, "grad_norm": 0.49894335865974426, "learning_rate": 0.00019573507787824004, "loss": 1.0502, "step": 2114 }, { "epoch": 0.3766025641025641, "grad_norm": 0.48120981454849243, "learning_rate": 0.00019573103267639543, "loss": 1.2405, "step": 2115 }, { "epoch": 0.3767806267806268, "grad_norm": 0.4826737642288208, "learning_rate": 0.0001957269855989001, "loss": 1.1189, "step": 2116 }, { "epoch": 0.37695868945868943, "grad_norm": 0.4736921489238739, "learning_rate": 0.0001957229366458333, "loss": 1.2862, "step": 2117 }, { "epoch": 0.37713675213675213, "grad_norm": 0.3895208537578583, "learning_rate": 0.00019571888581727446, "loss": 1.0573, "step": 2118 }, { "epoch": 0.3773148148148148, "grad_norm": 0.5107510089874268, "learning_rate": 0.00019571483311330284, "loss": 1.2913, "step": 2119 }, { "epoch": 0.37749287749287747, "grad_norm": 0.4543241262435913, "learning_rate": 0.00019571077853399794, "loss": 0.949, "step": 2120 }, { "epoch": 0.37767094017094016, "grad_norm": 0.46897491812705994, "learning_rate": 0.00019570672207943913, "loss": 1.2235, "step": 2121 }, { "epoch": 0.37784900284900286, "grad_norm": 0.4812130630016327, "learning_rate": 0.0001957026637497059, "loss": 0.8857, "step": 2122 }, { "epoch": 0.37802706552706555, "grad_norm": 0.47452476620674133, "learning_rate": 0.00019569860354487782, "loss": 1.0549, "step": 2123 }, { "epoch": 0.3782051282051282, "grad_norm": 0.49879950284957886, "learning_rate": 0.00019569454146503438, "loss": 1.0475, "step": 2124 }, { "epoch": 0.3783831908831909, "grad_norm": 0.4246445894241333, "learning_rate": 0.00019569047751025518, "loss": 0.8788, "step": 2125 }, { "epoch": 0.3785612535612536, "grad_norm": 0.4868565499782562, "learning_rate": 0.00019568641168061986, "loss": 1.1801, "step": 2126 }, { "epoch": 0.3787393162393162, "grad_norm": 0.46723654866218567, "learning_rate": 0.0001956823439762081, "loss": 1.1661, "step": 2127 }, { "epoch": 0.3789173789173789, "grad_norm": 0.4989059269428253, "learning_rate": 0.00019567827439709954, "loss": 1.3037, "step": 2128 }, { "epoch": 0.3790954415954416, "grad_norm": 0.441307932138443, "learning_rate": 0.00019567420294337395, "loss": 1.0197, "step": 2129 }, { "epoch": 0.37927350427350426, "grad_norm": 0.5200160145759583, "learning_rate": 0.0001956701296151111, "loss": 1.3366, "step": 2130 }, { "epoch": 0.37945156695156695, "grad_norm": 0.43610256910324097, "learning_rate": 0.00019566605441239082, "loss": 1.0148, "step": 2131 }, { "epoch": 0.37962962962962965, "grad_norm": 0.4160982370376587, "learning_rate": 0.00019566197733529293, "loss": 1.0758, "step": 2132 }, { "epoch": 0.3798076923076923, "grad_norm": 0.5007950663566589, "learning_rate": 0.00019565789838389726, "loss": 1.1937, "step": 2133 }, { "epoch": 0.379985754985755, "grad_norm": 0.4991525113582611, "learning_rate": 0.00019565381755828385, "loss": 1.1788, "step": 2134 }, { "epoch": 0.3801638176638177, "grad_norm": 0.6313113570213318, "learning_rate": 0.00019564973485853258, "loss": 1.1241, "step": 2135 }, { "epoch": 0.3803418803418803, "grad_norm": 0.49736538529396057, "learning_rate": 0.0001956456502847234, "loss": 1.0299, "step": 2136 }, { "epoch": 0.380519943019943, "grad_norm": 0.4384380578994751, "learning_rate": 0.00019564156383693643, "loss": 1.132, "step": 2137 }, { "epoch": 0.3806980056980057, "grad_norm": 0.4696183502674103, "learning_rate": 0.00019563747551525168, "loss": 1.1145, "step": 2138 }, { "epoch": 0.38087606837606836, "grad_norm": 0.42039749026298523, "learning_rate": 0.0001956333853197493, "loss": 0.9549, "step": 2139 }, { "epoch": 0.38105413105413105, "grad_norm": 0.5547221899032593, "learning_rate": 0.00019562929325050936, "loss": 1.0476, "step": 2140 }, { "epoch": 0.38123219373219375, "grad_norm": 0.4803301692008972, "learning_rate": 0.0001956251993076121, "loss": 1.1285, "step": 2141 }, { "epoch": 0.3814102564102564, "grad_norm": 0.609501838684082, "learning_rate": 0.00019562110349113766, "loss": 1.2375, "step": 2142 }, { "epoch": 0.3815883190883191, "grad_norm": 0.5134759545326233, "learning_rate": 0.00019561700580116639, "loss": 1.0895, "step": 2143 }, { "epoch": 0.3817663817663818, "grad_norm": 0.5086711049079895, "learning_rate": 0.00019561290623777846, "loss": 1.1139, "step": 2144 }, { "epoch": 0.3819444444444444, "grad_norm": 0.5371596813201904, "learning_rate": 0.00019560880480105428, "loss": 0.9302, "step": 2145 }, { "epoch": 0.3821225071225071, "grad_norm": 0.4966319799423218, "learning_rate": 0.00019560470149107418, "loss": 1.2485, "step": 2146 }, { "epoch": 0.3823005698005698, "grad_norm": 0.5296950340270996, "learning_rate": 0.00019560059630791855, "loss": 1.4449, "step": 2147 }, { "epoch": 0.38247863247863245, "grad_norm": 0.5564194321632385, "learning_rate": 0.00019559648925166783, "loss": 1.0817, "step": 2148 }, { "epoch": 0.38265669515669515, "grad_norm": 0.5763841867446899, "learning_rate": 0.0001955923803224025, "loss": 1.1915, "step": 2149 }, { "epoch": 0.38283475783475784, "grad_norm": 0.4782295823097229, "learning_rate": 0.00019558826952020304, "loss": 1.1317, "step": 2150 }, { "epoch": 0.38301282051282054, "grad_norm": 0.4876856207847595, "learning_rate": 0.00019558415684515002, "loss": 1.2113, "step": 2151 }, { "epoch": 0.3831908831908832, "grad_norm": 0.4894421398639679, "learning_rate": 0.00019558004229732398, "loss": 1.0761, "step": 2152 }, { "epoch": 0.3833689458689459, "grad_norm": 0.47914227843284607, "learning_rate": 0.0001955759258768056, "loss": 1.0869, "step": 2153 }, { "epoch": 0.38354700854700857, "grad_norm": 0.43933629989624023, "learning_rate": 0.00019557180758367543, "loss": 1.0581, "step": 2154 }, { "epoch": 0.3837250712250712, "grad_norm": 0.4078103005886078, "learning_rate": 0.00019556768741801428, "loss": 1.065, "step": 2155 }, { "epoch": 0.3839031339031339, "grad_norm": 0.5112793445587158, "learning_rate": 0.00019556356537990278, "loss": 1.2023, "step": 2156 }, { "epoch": 0.3840811965811966, "grad_norm": 0.4699678122997284, "learning_rate": 0.00019555944146942177, "loss": 1.2459, "step": 2157 }, { "epoch": 0.38425925925925924, "grad_norm": 0.4723528027534485, "learning_rate": 0.00019555531568665198, "loss": 1.2204, "step": 2158 }, { "epoch": 0.38443732193732194, "grad_norm": 0.4648225009441376, "learning_rate": 0.00019555118803167432, "loss": 1.1355, "step": 2159 }, { "epoch": 0.38461538461538464, "grad_norm": 0.49861815571784973, "learning_rate": 0.00019554705850456961, "loss": 1.1301, "step": 2160 }, { "epoch": 0.3847934472934473, "grad_norm": 0.4076344966888428, "learning_rate": 0.00019554292710541874, "loss": 0.8997, "step": 2161 }, { "epoch": 0.38497150997151, "grad_norm": 0.5510796308517456, "learning_rate": 0.00019553879383430272, "loss": 1.0594, "step": 2162 }, { "epoch": 0.38514957264957267, "grad_norm": 0.55793696641922, "learning_rate": 0.00019553465869130249, "loss": 1.1284, "step": 2163 }, { "epoch": 0.3853276353276353, "grad_norm": 0.5096491575241089, "learning_rate": 0.00019553052167649906, "loss": 1.0419, "step": 2164 }, { "epoch": 0.385505698005698, "grad_norm": 0.49077361822128296, "learning_rate": 0.0001955263827899735, "loss": 1.1632, "step": 2165 }, { "epoch": 0.3856837606837607, "grad_norm": 0.5546894073486328, "learning_rate": 0.00019552224203180693, "loss": 1.1487, "step": 2166 }, { "epoch": 0.38586182336182334, "grad_norm": 0.4930037260055542, "learning_rate": 0.00019551809940208047, "loss": 1.2668, "step": 2167 }, { "epoch": 0.38603988603988604, "grad_norm": 0.5600671172142029, "learning_rate": 0.00019551395490087525, "loss": 1.3988, "step": 2168 }, { "epoch": 0.38621794871794873, "grad_norm": 0.45897629857063293, "learning_rate": 0.0001955098085282725, "loss": 0.7792, "step": 2169 }, { "epoch": 0.3863960113960114, "grad_norm": 0.46138936281204224, "learning_rate": 0.00019550566028435346, "loss": 1.1749, "step": 2170 }, { "epoch": 0.38657407407407407, "grad_norm": 0.5136167407035828, "learning_rate": 0.0001955015101691994, "loss": 1.0153, "step": 2171 }, { "epoch": 0.38675213675213677, "grad_norm": 0.4886440336704254, "learning_rate": 0.00019549735818289165, "loss": 1.0006, "step": 2172 }, { "epoch": 0.3869301994301994, "grad_norm": 0.4339776635169983, "learning_rate": 0.00019549320432551154, "loss": 1.0109, "step": 2173 }, { "epoch": 0.3871082621082621, "grad_norm": 0.48729443550109863, "learning_rate": 0.00019548904859714044, "loss": 1.2016, "step": 2174 }, { "epoch": 0.3872863247863248, "grad_norm": 0.5128757357597351, "learning_rate": 0.0001954848909978598, "loss": 1.085, "step": 2175 }, { "epoch": 0.38746438746438744, "grad_norm": 0.49636292457580566, "learning_rate": 0.0001954807315277511, "loss": 1.0671, "step": 2176 }, { "epoch": 0.38764245014245013, "grad_norm": 0.4946988821029663, "learning_rate": 0.00019547657018689578, "loss": 1.2091, "step": 2177 }, { "epoch": 0.38782051282051283, "grad_norm": 0.49004554748535156, "learning_rate": 0.00019547240697537544, "loss": 1.0241, "step": 2178 }, { "epoch": 0.38799857549857547, "grad_norm": 0.48750075697898865, "learning_rate": 0.00019546824189327157, "loss": 1.1082, "step": 2179 }, { "epoch": 0.38817663817663817, "grad_norm": 0.47726166248321533, "learning_rate": 0.00019546407494066585, "loss": 1.1275, "step": 2180 }, { "epoch": 0.38835470085470086, "grad_norm": 0.5253444910049438, "learning_rate": 0.00019545990611763986, "loss": 1.0164, "step": 2181 }, { "epoch": 0.38853276353276356, "grad_norm": 0.4470371603965759, "learning_rate": 0.00019545573542427533, "loss": 1.0138, "step": 2182 }, { "epoch": 0.3887108262108262, "grad_norm": 0.6645087599754333, "learning_rate": 0.00019545156286065397, "loss": 1.0884, "step": 2183 }, { "epoch": 0.3888888888888889, "grad_norm": 0.498775839805603, "learning_rate": 0.0001954473884268575, "loss": 1.1035, "step": 2184 }, { "epoch": 0.3890669515669516, "grad_norm": 0.5830566883087158, "learning_rate": 0.00019544321212296772, "loss": 1.1665, "step": 2185 }, { "epoch": 0.38924501424501423, "grad_norm": 0.48162809014320374, "learning_rate": 0.00019543903394906646, "loss": 1.1035, "step": 2186 }, { "epoch": 0.3894230769230769, "grad_norm": 0.46334075927734375, "learning_rate": 0.0001954348539052356, "loss": 0.9764, "step": 2187 }, { "epoch": 0.3896011396011396, "grad_norm": 0.6343515515327454, "learning_rate": 0.00019543067199155704, "loss": 0.9474, "step": 2188 }, { "epoch": 0.38977920227920226, "grad_norm": 0.4867806136608124, "learning_rate": 0.0001954264882081127, "loss": 1.1161, "step": 2189 }, { "epoch": 0.38995726495726496, "grad_norm": 0.49305734038352966, "learning_rate": 0.00019542230255498454, "loss": 1.1825, "step": 2190 }, { "epoch": 0.39013532763532766, "grad_norm": 0.518465518951416, "learning_rate": 0.00019541811503225457, "loss": 1.0695, "step": 2191 }, { "epoch": 0.3903133903133903, "grad_norm": 0.4892457127571106, "learning_rate": 0.00019541392564000488, "loss": 1.3113, "step": 2192 }, { "epoch": 0.390491452991453, "grad_norm": 0.5150920152664185, "learning_rate": 0.00019540973437831753, "loss": 1.0735, "step": 2193 }, { "epoch": 0.3906695156695157, "grad_norm": 0.5414708256721497, "learning_rate": 0.00019540554124727462, "loss": 1.0773, "step": 2194 }, { "epoch": 0.39084757834757833, "grad_norm": 0.49826398491859436, "learning_rate": 0.0001954013462469583, "loss": 1.0542, "step": 2195 }, { "epoch": 0.391025641025641, "grad_norm": 0.5203596949577332, "learning_rate": 0.0001953971493774508, "loss": 1.178, "step": 2196 }, { "epoch": 0.3912037037037037, "grad_norm": 0.45095738768577576, "learning_rate": 0.00019539295063883432, "loss": 1.1254, "step": 2197 }, { "epoch": 0.39138176638176636, "grad_norm": 0.4938857853412628, "learning_rate": 0.00019538875003119113, "loss": 1.1061, "step": 2198 }, { "epoch": 0.39155982905982906, "grad_norm": 0.5260919332504272, "learning_rate": 0.00019538454755460354, "loss": 1.3292, "step": 2199 }, { "epoch": 0.39173789173789175, "grad_norm": 0.46527108550071716, "learning_rate": 0.00019538034320915388, "loss": 1.2074, "step": 2200 }, { "epoch": 0.3919159544159544, "grad_norm": 0.5608304738998413, "learning_rate": 0.00019537613699492453, "loss": 1.0385, "step": 2201 }, { "epoch": 0.3920940170940171, "grad_norm": 0.5056684613227844, "learning_rate": 0.00019537192891199792, "loss": 1.1513, "step": 2202 }, { "epoch": 0.3922720797720798, "grad_norm": 0.3764426112174988, "learning_rate": 0.00019536771896045644, "loss": 0.8966, "step": 2203 }, { "epoch": 0.3924501424501424, "grad_norm": 0.4983638823032379, "learning_rate": 0.0001953635071403827, "loss": 1.097, "step": 2204 }, { "epoch": 0.3926282051282051, "grad_norm": 0.5733919739723206, "learning_rate": 0.00019535929345185904, "loss": 1.4992, "step": 2205 }, { "epoch": 0.3928062678062678, "grad_norm": 0.632064163684845, "learning_rate": 0.00019535507789496817, "loss": 1.0611, "step": 2206 }, { "epoch": 0.39298433048433046, "grad_norm": 0.409978449344635, "learning_rate": 0.00019535086046979262, "loss": 0.7172, "step": 2207 }, { "epoch": 0.39316239316239315, "grad_norm": 0.40910813212394714, "learning_rate": 0.00019534664117641502, "loss": 0.8803, "step": 2208 }, { "epoch": 0.39334045584045585, "grad_norm": 0.4696179926395416, "learning_rate": 0.00019534242001491807, "loss": 1.1551, "step": 2209 }, { "epoch": 0.39351851851851855, "grad_norm": 0.538425862789154, "learning_rate": 0.00019533819698538444, "loss": 1.1296, "step": 2210 }, { "epoch": 0.3936965811965812, "grad_norm": 0.5913630723953247, "learning_rate": 0.00019533397208789692, "loss": 0.9757, "step": 2211 }, { "epoch": 0.3938746438746439, "grad_norm": 0.5649870038032532, "learning_rate": 0.00019532974532253822, "loss": 0.9976, "step": 2212 }, { "epoch": 0.3940527065527066, "grad_norm": 0.5012063980102539, "learning_rate": 0.00019532551668939121, "loss": 0.9969, "step": 2213 }, { "epoch": 0.3942307692307692, "grad_norm": 0.5098594427108765, "learning_rate": 0.00019532128618853872, "loss": 1.1229, "step": 2214 }, { "epoch": 0.3944088319088319, "grad_norm": 0.4753342568874359, "learning_rate": 0.0001953170538200636, "loss": 1.0808, "step": 2215 }, { "epoch": 0.3945868945868946, "grad_norm": 0.4770098626613617, "learning_rate": 0.00019531281958404888, "loss": 1.0656, "step": 2216 }, { "epoch": 0.39476495726495725, "grad_norm": 0.6007979512214661, "learning_rate": 0.00019530858348057746, "loss": 1.0093, "step": 2217 }, { "epoch": 0.39494301994301995, "grad_norm": 0.4501650929450989, "learning_rate": 0.00019530434550973227, "loss": 0.8557, "step": 2218 }, { "epoch": 0.39512108262108264, "grad_norm": 0.5123980641365051, "learning_rate": 0.00019530010567159645, "loss": 0.9833, "step": 2219 }, { "epoch": 0.3952991452991453, "grad_norm": 0.4623969495296478, "learning_rate": 0.000195295863966253, "loss": 0.913, "step": 2220 }, { "epoch": 0.395477207977208, "grad_norm": 0.4341880679130554, "learning_rate": 0.0001952916203937851, "loss": 1.0234, "step": 2221 }, { "epoch": 0.3956552706552707, "grad_norm": 0.5935006141662598, "learning_rate": 0.00019528737495427581, "loss": 1.061, "step": 2222 }, { "epoch": 0.3958333333333333, "grad_norm": 0.44835174083709717, "learning_rate": 0.00019528312764780837, "loss": 1.1567, "step": 2223 }, { "epoch": 0.396011396011396, "grad_norm": 0.5476976633071899, "learning_rate": 0.00019527887847446595, "loss": 1.2304, "step": 2224 }, { "epoch": 0.3961894586894587, "grad_norm": 0.4487939774990082, "learning_rate": 0.00019527462743433187, "loss": 1.1813, "step": 2225 }, { "epoch": 0.39636752136752135, "grad_norm": 0.4053241014480591, "learning_rate": 0.00019527037452748936, "loss": 0.7899, "step": 2226 }, { "epoch": 0.39654558404558404, "grad_norm": 0.534570574760437, "learning_rate": 0.00019526611975402176, "loss": 1.0681, "step": 2227 }, { "epoch": 0.39672364672364674, "grad_norm": 0.46096158027648926, "learning_rate": 0.00019526186311401246, "loss": 0.9234, "step": 2228 }, { "epoch": 0.3969017094017094, "grad_norm": 0.47363516688346863, "learning_rate": 0.00019525760460754483, "loss": 1.0197, "step": 2229 }, { "epoch": 0.3970797720797721, "grad_norm": 0.46317258477211, "learning_rate": 0.00019525334423470234, "loss": 1.2103, "step": 2230 }, { "epoch": 0.39725783475783477, "grad_norm": 0.4924237132072449, "learning_rate": 0.0001952490819955684, "loss": 1.3299, "step": 2231 }, { "epoch": 0.3974358974358974, "grad_norm": 0.5419978499412537, "learning_rate": 0.0001952448178902266, "loss": 1.2526, "step": 2232 }, { "epoch": 0.3976139601139601, "grad_norm": 0.5003267526626587, "learning_rate": 0.00019524055191876043, "loss": 1.1073, "step": 2233 }, { "epoch": 0.3977920227920228, "grad_norm": 0.621789276599884, "learning_rate": 0.00019523628408125347, "loss": 1.3409, "step": 2234 }, { "epoch": 0.39797008547008544, "grad_norm": 0.44235602021217346, "learning_rate": 0.0001952320143777894, "loss": 0.9799, "step": 2235 }, { "epoch": 0.39814814814814814, "grad_norm": 0.49954718351364136, "learning_rate": 0.0001952277428084518, "loss": 1.2227, "step": 2236 }, { "epoch": 0.39832621082621084, "grad_norm": 0.5113739967346191, "learning_rate": 0.00019522346937332443, "loss": 1.1644, "step": 2237 }, { "epoch": 0.39850427350427353, "grad_norm": 0.5026139616966248, "learning_rate": 0.00019521919407249096, "loss": 1.0823, "step": 2238 }, { "epoch": 0.39868233618233617, "grad_norm": 0.4943205714225769, "learning_rate": 0.0001952149169060352, "loss": 1.0961, "step": 2239 }, { "epoch": 0.39886039886039887, "grad_norm": 0.4680631458759308, "learning_rate": 0.00019521063787404094, "loss": 0.9787, "step": 2240 }, { "epoch": 0.39903846153846156, "grad_norm": 0.5511566400527954, "learning_rate": 0.00019520635697659202, "loss": 1.2543, "step": 2241 }, { "epoch": 0.3992165242165242, "grad_norm": 0.5494263172149658, "learning_rate": 0.00019520207421377229, "loss": 1.1978, "step": 2242 }, { "epoch": 0.3993945868945869, "grad_norm": 0.4850340485572815, "learning_rate": 0.00019519778958566568, "loss": 0.8531, "step": 2243 }, { "epoch": 0.3995726495726496, "grad_norm": 0.47168150544166565, "learning_rate": 0.00019519350309235613, "loss": 1.0746, "step": 2244 }, { "epoch": 0.39975071225071224, "grad_norm": 0.571133553981781, "learning_rate": 0.00019518921473392765, "loss": 1.2984, "step": 2245 }, { "epoch": 0.39992877492877493, "grad_norm": 0.4636089503765106, "learning_rate": 0.00019518492451046427, "loss": 1.019, "step": 2246 }, { "epoch": 0.40010683760683763, "grad_norm": 0.4573518931865692, "learning_rate": 0.00019518063242205, "loss": 1.1042, "step": 2247 }, { "epoch": 0.40028490028490027, "grad_norm": 0.49098989367485046, "learning_rate": 0.00019517633846876894, "loss": 1.1224, "step": 2248 }, { "epoch": 0.40046296296296297, "grad_norm": 0.5475491881370544, "learning_rate": 0.00019517204265070523, "loss": 1.0984, "step": 2249 }, { "epoch": 0.40064102564102566, "grad_norm": 0.45498281717300415, "learning_rate": 0.00019516774496794307, "loss": 0.8883, "step": 2250 }, { "epoch": 0.4008190883190883, "grad_norm": 0.4908423125743866, "learning_rate": 0.00019516344542056666, "loss": 1.328, "step": 2251 }, { "epoch": 0.400997150997151, "grad_norm": 0.5474920272827148, "learning_rate": 0.0001951591440086602, "loss": 1.3825, "step": 2252 }, { "epoch": 0.4011752136752137, "grad_norm": 0.5165615081787109, "learning_rate": 0.000195154840732308, "loss": 1.33, "step": 2253 }, { "epoch": 0.40135327635327633, "grad_norm": 0.5185585021972656, "learning_rate": 0.00019515053559159435, "loss": 1.1689, "step": 2254 }, { "epoch": 0.40153133903133903, "grad_norm": 0.5468854904174805, "learning_rate": 0.00019514622858660363, "loss": 1.2708, "step": 2255 }, { "epoch": 0.4017094017094017, "grad_norm": 0.47556906938552856, "learning_rate": 0.0001951419197174202, "loss": 1.0488, "step": 2256 }, { "epoch": 0.40188746438746437, "grad_norm": 0.5521323084831238, "learning_rate": 0.0001951376089841285, "loss": 1.0868, "step": 2257 }, { "epoch": 0.40206552706552706, "grad_norm": 0.6029638051986694, "learning_rate": 0.00019513329638681296, "loss": 1.1735, "step": 2258 }, { "epoch": 0.40224358974358976, "grad_norm": 0.4897766411304474, "learning_rate": 0.00019512898192555812, "loss": 1.1687, "step": 2259 }, { "epoch": 0.4024216524216524, "grad_norm": 0.45527184009552, "learning_rate": 0.00019512466560044848, "loss": 1.0352, "step": 2260 }, { "epoch": 0.4025997150997151, "grad_norm": 0.5025625824928284, "learning_rate": 0.00019512034741156863, "loss": 1.2503, "step": 2261 }, { "epoch": 0.4027777777777778, "grad_norm": 0.46415451169013977, "learning_rate": 0.00019511602735900317, "loss": 1.032, "step": 2262 }, { "epoch": 0.40295584045584043, "grad_norm": 0.4812934398651123, "learning_rate": 0.00019511170544283678, "loss": 1.0523, "step": 2263 }, { "epoch": 0.4031339031339031, "grad_norm": 0.49937039613723755, "learning_rate": 0.00019510738166315404, "loss": 1.2238, "step": 2264 }, { "epoch": 0.4033119658119658, "grad_norm": 0.5428698062896729, "learning_rate": 0.00019510305602003975, "loss": 1.0361, "step": 2265 }, { "epoch": 0.40349002849002846, "grad_norm": 0.44836854934692383, "learning_rate": 0.0001950987285135786, "loss": 1.169, "step": 2266 }, { "epoch": 0.40366809116809116, "grad_norm": 0.5071489214897156, "learning_rate": 0.00019509439914385549, "loss": 1.1567, "step": 2267 }, { "epoch": 0.40384615384615385, "grad_norm": 0.5204613208770752, "learning_rate": 0.00019509006791095513, "loss": 0.9949, "step": 2268 }, { "epoch": 0.40402421652421655, "grad_norm": 0.4583234488964081, "learning_rate": 0.00019508573481496238, "loss": 0.9051, "step": 2269 }, { "epoch": 0.4042022792022792, "grad_norm": 0.5436791181564331, "learning_rate": 0.00019508139985596222, "loss": 1.3239, "step": 2270 }, { "epoch": 0.4043803418803419, "grad_norm": 0.48774269223213196, "learning_rate": 0.00019507706303403954, "loss": 1.2102, "step": 2271 }, { "epoch": 0.4045584045584046, "grad_norm": 0.4742540717124939, "learning_rate": 0.00019507272434927933, "loss": 1.1137, "step": 2272 }, { "epoch": 0.4047364672364672, "grad_norm": 0.531148374080658, "learning_rate": 0.00019506838380176658, "loss": 1.3162, "step": 2273 }, { "epoch": 0.4049145299145299, "grad_norm": 0.5002314448356628, "learning_rate": 0.0001950640413915863, "loss": 1.0743, "step": 2274 }, { "epoch": 0.4050925925925926, "grad_norm": 0.39826446771621704, "learning_rate": 0.00019505969711882366, "loss": 0.7698, "step": 2275 }, { "epoch": 0.40527065527065526, "grad_norm": 0.5177471041679382, "learning_rate": 0.00019505535098356371, "loss": 1.1821, "step": 2276 }, { "epoch": 0.40544871794871795, "grad_norm": 0.467241108417511, "learning_rate": 0.00019505100298589158, "loss": 0.8036, "step": 2277 }, { "epoch": 0.40562678062678065, "grad_norm": 0.43711844086647034, "learning_rate": 0.00019504665312589255, "loss": 0.8667, "step": 2278 }, { "epoch": 0.4058048433048433, "grad_norm": 0.4929116368293762, "learning_rate": 0.00019504230140365177, "loss": 1.1279, "step": 2279 }, { "epoch": 0.405982905982906, "grad_norm": 0.5279183983802795, "learning_rate": 0.00019503794781925452, "loss": 1.1318, "step": 2280 }, { "epoch": 0.4061609686609687, "grad_norm": 0.549217939376831, "learning_rate": 0.00019503359237278608, "loss": 1.2007, "step": 2281 }, { "epoch": 0.4063390313390313, "grad_norm": 0.5485880374908447, "learning_rate": 0.00019502923506433187, "loss": 1.1079, "step": 2282 }, { "epoch": 0.406517094017094, "grad_norm": 0.48379644751548767, "learning_rate": 0.0001950248758939772, "loss": 0.9978, "step": 2283 }, { "epoch": 0.4066951566951567, "grad_norm": 0.5943657755851746, "learning_rate": 0.00019502051486180744, "loss": 1.0466, "step": 2284 }, { "epoch": 0.40687321937321935, "grad_norm": 0.5721273422241211, "learning_rate": 0.00019501615196790812, "loss": 1.2674, "step": 2285 }, { "epoch": 0.40705128205128205, "grad_norm": 0.47624221444129944, "learning_rate": 0.00019501178721236464, "loss": 1.089, "step": 2286 }, { "epoch": 0.40722934472934474, "grad_norm": 0.5091297030448914, "learning_rate": 0.0001950074205952626, "loss": 1.2035, "step": 2287 }, { "epoch": 0.4074074074074074, "grad_norm": 0.45206236839294434, "learning_rate": 0.0001950030521166875, "loss": 0.9188, "step": 2288 }, { "epoch": 0.4075854700854701, "grad_norm": 0.5563844442367554, "learning_rate": 0.00019499868177672497, "loss": 1.3444, "step": 2289 }, { "epoch": 0.4077635327635328, "grad_norm": 0.4971138536930084, "learning_rate": 0.00019499430957546055, "loss": 1.1615, "step": 2290 }, { "epoch": 0.4079415954415954, "grad_norm": 0.49355944991111755, "learning_rate": 0.00019498993551298, "loss": 1.1528, "step": 2291 }, { "epoch": 0.4081196581196581, "grad_norm": 0.534705638885498, "learning_rate": 0.000194985559589369, "loss": 1.197, "step": 2292 }, { "epoch": 0.4082977207977208, "grad_norm": 0.5113020539283752, "learning_rate": 0.0001949811818047133, "loss": 1.109, "step": 2293 }, { "epoch": 0.40847578347578345, "grad_norm": 0.4823366701602936, "learning_rate": 0.00019497680215909858, "loss": 1.168, "step": 2294 }, { "epoch": 0.40865384615384615, "grad_norm": 0.500792920589447, "learning_rate": 0.00019497242065261077, "loss": 1.1567, "step": 2295 }, { "epoch": 0.40883190883190884, "grad_norm": 0.5047918558120728, "learning_rate": 0.00019496803728533566, "loss": 1.0515, "step": 2296 }, { "epoch": 0.40900997150997154, "grad_norm": 0.474624365568161, "learning_rate": 0.00019496365205735913, "loss": 1.1747, "step": 2297 }, { "epoch": 0.4091880341880342, "grad_norm": 0.5522183179855347, "learning_rate": 0.0001949592649687671, "loss": 1.1506, "step": 2298 }, { "epoch": 0.4093660968660969, "grad_norm": 0.4526083767414093, "learning_rate": 0.00019495487601964553, "loss": 0.9968, "step": 2299 }, { "epoch": 0.40954415954415957, "grad_norm": 0.545845091342926, "learning_rate": 0.00019495048521008044, "loss": 1.146, "step": 2300 }, { "epoch": 0.4097222222222222, "grad_norm": 0.5475544333457947, "learning_rate": 0.00019494609254015784, "loss": 1.0101, "step": 2301 }, { "epoch": 0.4099002849002849, "grad_norm": 0.43419042229652405, "learning_rate": 0.00019494169800996373, "loss": 1.065, "step": 2302 }, { "epoch": 0.4100783475783476, "grad_norm": 0.44998374581336975, "learning_rate": 0.00019493730161958435, "loss": 0.9948, "step": 2303 }, { "epoch": 0.41025641025641024, "grad_norm": 0.5401661992073059, "learning_rate": 0.0001949329033691057, "loss": 1.0473, "step": 2304 }, { "epoch": 0.41043447293447294, "grad_norm": 0.48064103722572327, "learning_rate": 0.00019492850325861404, "loss": 1.0486, "step": 2305 }, { "epoch": 0.41061253561253563, "grad_norm": 0.5398300290107727, "learning_rate": 0.00019492410128819557, "loss": 1.0314, "step": 2306 }, { "epoch": 0.4107905982905983, "grad_norm": 0.4771125912666321, "learning_rate": 0.0001949196974579365, "loss": 0.9855, "step": 2307 }, { "epoch": 0.41096866096866097, "grad_norm": 0.5375809669494629, "learning_rate": 0.00019491529176792315, "loss": 1.0777, "step": 2308 }, { "epoch": 0.41114672364672367, "grad_norm": 0.48424094915390015, "learning_rate": 0.00019491088421824183, "loss": 1.0751, "step": 2309 }, { "epoch": 0.4113247863247863, "grad_norm": 0.5054880380630493, "learning_rate": 0.00019490647480897887, "loss": 1.2457, "step": 2310 }, { "epoch": 0.411502849002849, "grad_norm": 0.47118356823921204, "learning_rate": 0.0001949020635402207, "loss": 1.0445, "step": 2311 }, { "epoch": 0.4116809116809117, "grad_norm": 0.47171851992607117, "learning_rate": 0.00019489765041205375, "loss": 1.0062, "step": 2312 }, { "epoch": 0.41185897435897434, "grad_norm": 0.5703238844871521, "learning_rate": 0.00019489323542456447, "loss": 1.5639, "step": 2313 }, { "epoch": 0.41203703703703703, "grad_norm": 0.5045075416564941, "learning_rate": 0.00019488881857783935, "loss": 1.1665, "step": 2314 }, { "epoch": 0.41221509971509973, "grad_norm": 0.46835362911224365, "learning_rate": 0.00019488439987196495, "loss": 1.2078, "step": 2315 }, { "epoch": 0.41239316239316237, "grad_norm": 0.5187196731567383, "learning_rate": 0.00019487997930702785, "loss": 1.1049, "step": 2316 }, { "epoch": 0.41257122507122507, "grad_norm": 0.5190554857254028, "learning_rate": 0.00019487555688311463, "loss": 1.331, "step": 2317 }, { "epoch": 0.41274928774928776, "grad_norm": 0.7394969463348389, "learning_rate": 0.00019487113260031197, "loss": 0.9646, "step": 2318 }, { "epoch": 0.4129273504273504, "grad_norm": 0.532982349395752, "learning_rate": 0.00019486670645870656, "loss": 1.166, "step": 2319 }, { "epoch": 0.4131054131054131, "grad_norm": 0.48659515380859375, "learning_rate": 0.00019486227845838509, "loss": 1.0016, "step": 2320 }, { "epoch": 0.4132834757834758, "grad_norm": 0.5364453196525574, "learning_rate": 0.00019485784859943434, "loss": 1.3877, "step": 2321 }, { "epoch": 0.41346153846153844, "grad_norm": 0.49788740277290344, "learning_rate": 0.0001948534168819411, "loss": 1.2949, "step": 2322 }, { "epoch": 0.41363960113960113, "grad_norm": 0.5125377774238586, "learning_rate": 0.00019484898330599217, "loss": 0.9769, "step": 2323 }, { "epoch": 0.41381766381766383, "grad_norm": 0.5434861779212952, "learning_rate": 0.00019484454787167447, "loss": 1.254, "step": 2324 }, { "epoch": 0.41399572649572647, "grad_norm": 0.5324583053588867, "learning_rate": 0.00019484011057907487, "loss": 0.9788, "step": 2325 }, { "epoch": 0.41417378917378916, "grad_norm": 0.4806961715221405, "learning_rate": 0.00019483567142828033, "loss": 1.0089, "step": 2326 }, { "epoch": 0.41435185185185186, "grad_norm": 0.5152947306632996, "learning_rate": 0.0001948312304193778, "loss": 1.15, "step": 2327 }, { "epoch": 0.41452991452991456, "grad_norm": 0.6030138731002808, "learning_rate": 0.0001948267875524543, "loss": 1.196, "step": 2328 }, { "epoch": 0.4147079772079772, "grad_norm": 0.4504946768283844, "learning_rate": 0.0001948223428275969, "loss": 0.8742, "step": 2329 }, { "epoch": 0.4148860398860399, "grad_norm": 0.5195745825767517, "learning_rate": 0.00019481789624489263, "loss": 1.0104, "step": 2330 }, { "epoch": 0.4150641025641026, "grad_norm": 0.5269250869750977, "learning_rate": 0.0001948134478044287, "loss": 1.2284, "step": 2331 }, { "epoch": 0.41524216524216523, "grad_norm": 0.5302315354347229, "learning_rate": 0.00019480899750629218, "loss": 1.1374, "step": 2332 }, { "epoch": 0.4154202279202279, "grad_norm": 0.5501471161842346, "learning_rate": 0.0001948045453505703, "loss": 1.214, "step": 2333 }, { "epoch": 0.4155982905982906, "grad_norm": 0.4674588739871979, "learning_rate": 0.0001948000913373503, "loss": 1.0568, "step": 2334 }, { "epoch": 0.41577635327635326, "grad_norm": 0.5262266993522644, "learning_rate": 0.0001947956354667195, "loss": 1.111, "step": 2335 }, { "epoch": 0.41595441595441596, "grad_norm": 0.4549071788787842, "learning_rate": 0.00019479117773876507, "loss": 1.2655, "step": 2336 }, { "epoch": 0.41613247863247865, "grad_norm": 0.48897311091423035, "learning_rate": 0.00019478671815357447, "loss": 1.0543, "step": 2337 }, { "epoch": 0.4163105413105413, "grad_norm": 0.5544867515563965, "learning_rate": 0.000194782256711235, "loss": 1.2276, "step": 2338 }, { "epoch": 0.416488603988604, "grad_norm": 0.5050773024559021, "learning_rate": 0.0001947777934118341, "loss": 0.9781, "step": 2339 }, { "epoch": 0.4166666666666667, "grad_norm": 0.4831899106502533, "learning_rate": 0.00019477332825545925, "loss": 1.0213, "step": 2340 }, { "epoch": 0.4168447293447293, "grad_norm": 0.5392552614212036, "learning_rate": 0.0001947688612421979, "loss": 1.3251, "step": 2341 }, { "epoch": 0.417022792022792, "grad_norm": 0.5003608465194702, "learning_rate": 0.00019476439237213754, "loss": 1.0714, "step": 2342 }, { "epoch": 0.4172008547008547, "grad_norm": 0.5016986727714539, "learning_rate": 0.00019475992164536582, "loss": 1.0656, "step": 2343 }, { "epoch": 0.41737891737891736, "grad_norm": 0.5139234066009521, "learning_rate": 0.00019475544906197024, "loss": 1.1317, "step": 2344 }, { "epoch": 0.41755698005698005, "grad_norm": 0.582478940486908, "learning_rate": 0.00019475097462203847, "loss": 1.4209, "step": 2345 }, { "epoch": 0.41773504273504275, "grad_norm": 0.5248767137527466, "learning_rate": 0.00019474649832565823, "loss": 1.2965, "step": 2346 }, { "epoch": 0.4179131054131054, "grad_norm": 0.4977390170097351, "learning_rate": 0.00019474202017291713, "loss": 1.3319, "step": 2347 }, { "epoch": 0.4180911680911681, "grad_norm": 0.4868984818458557, "learning_rate": 0.00019473754016390298, "loss": 1.0595, "step": 2348 }, { "epoch": 0.4182692307692308, "grad_norm": 0.5965346693992615, "learning_rate": 0.00019473305829870353, "loss": 1.2289, "step": 2349 }, { "epoch": 0.4184472934472934, "grad_norm": 0.46590209007263184, "learning_rate": 0.0001947285745774066, "loss": 1.0468, "step": 2350 }, { "epoch": 0.4186253561253561, "grad_norm": 0.497811883687973, "learning_rate": 0.0001947240890001, "loss": 1.1247, "step": 2351 }, { "epoch": 0.4188034188034188, "grad_norm": 0.5348289012908936, "learning_rate": 0.0001947196015668717, "loss": 0.9496, "step": 2352 }, { "epoch": 0.41898148148148145, "grad_norm": 0.5086174607276917, "learning_rate": 0.0001947151122778095, "loss": 0.8869, "step": 2353 }, { "epoch": 0.41915954415954415, "grad_norm": 0.4844677150249481, "learning_rate": 0.00019471062113300146, "loss": 0.847, "step": 2354 }, { "epoch": 0.41933760683760685, "grad_norm": 0.5395866632461548, "learning_rate": 0.00019470612813253556, "loss": 0.9684, "step": 2355 }, { "epoch": 0.41951566951566954, "grad_norm": 0.479403018951416, "learning_rate": 0.0001947016332764998, "loss": 1.0532, "step": 2356 }, { "epoch": 0.4196937321937322, "grad_norm": 0.5499961376190186, "learning_rate": 0.00019469713656498227, "loss": 1.2565, "step": 2357 }, { "epoch": 0.4198717948717949, "grad_norm": 0.5865352153778076, "learning_rate": 0.00019469263799807104, "loss": 1.1349, "step": 2358 }, { "epoch": 0.4200498575498576, "grad_norm": 0.4454309046268463, "learning_rate": 0.00019468813757585432, "loss": 0.9631, "step": 2359 }, { "epoch": 0.4202279202279202, "grad_norm": 0.48426875472068787, "learning_rate": 0.00019468363529842023, "loss": 0.9795, "step": 2360 }, { "epoch": 0.4204059829059829, "grad_norm": 0.47428226470947266, "learning_rate": 0.00019467913116585697, "loss": 0.9316, "step": 2361 }, { "epoch": 0.4205840455840456, "grad_norm": 0.5193758010864258, "learning_rate": 0.00019467462517825282, "loss": 1.235, "step": 2362 }, { "epoch": 0.42076210826210825, "grad_norm": 0.49845513701438904, "learning_rate": 0.00019467011733569607, "loss": 1.2413, "step": 2363 }, { "epoch": 0.42094017094017094, "grad_norm": 0.45483845472335815, "learning_rate": 0.00019466560763827502, "loss": 1.2817, "step": 2364 }, { "epoch": 0.42111823361823364, "grad_norm": 0.43345287442207336, "learning_rate": 0.00019466109608607806, "loss": 0.8568, "step": 2365 }, { "epoch": 0.4212962962962963, "grad_norm": 0.4467088282108307, "learning_rate": 0.00019465658267919352, "loss": 1.1408, "step": 2366 }, { "epoch": 0.421474358974359, "grad_norm": 0.6705610156059265, "learning_rate": 0.00019465206741770992, "loss": 1.445, "step": 2367 }, { "epoch": 0.42165242165242167, "grad_norm": 0.5037859678268433, "learning_rate": 0.00019464755030171565, "loss": 0.8682, "step": 2368 }, { "epoch": 0.4218304843304843, "grad_norm": 0.49576324224472046, "learning_rate": 0.00019464303133129928, "loss": 0.8387, "step": 2369 }, { "epoch": 0.422008547008547, "grad_norm": 0.5222806334495544, "learning_rate": 0.00019463851050654927, "loss": 1.1443, "step": 2370 }, { "epoch": 0.4221866096866097, "grad_norm": 0.4966863989830017, "learning_rate": 0.00019463398782755426, "loss": 1.1555, "step": 2371 }, { "epoch": 0.42236467236467234, "grad_norm": 0.6140168309211731, "learning_rate": 0.00019462946329440285, "loss": 1.2264, "step": 2372 }, { "epoch": 0.42254273504273504, "grad_norm": 0.4906651973724365, "learning_rate": 0.0001946249369071837, "loss": 1.2459, "step": 2373 }, { "epoch": 0.42272079772079774, "grad_norm": 0.5956700444221497, "learning_rate": 0.00019462040866598544, "loss": 1.1521, "step": 2374 }, { "epoch": 0.4228988603988604, "grad_norm": 0.46044886112213135, "learning_rate": 0.00019461587857089687, "loss": 1.2084, "step": 2375 }, { "epoch": 0.4230769230769231, "grad_norm": 0.5109430551528931, "learning_rate": 0.00019461134662200668, "loss": 1.2684, "step": 2376 }, { "epoch": 0.42325498575498577, "grad_norm": 0.4373733103275299, "learning_rate": 0.0001946068128194037, "loss": 1.0451, "step": 2377 }, { "epoch": 0.4234330484330484, "grad_norm": 0.553817868232727, "learning_rate": 0.00019460227716317673, "loss": 1.1052, "step": 2378 }, { "epoch": 0.4236111111111111, "grad_norm": 0.5742647647857666, "learning_rate": 0.00019459773965341468, "loss": 1.1647, "step": 2379 }, { "epoch": 0.4237891737891738, "grad_norm": 0.5461940169334412, "learning_rate": 0.00019459320029020642, "loss": 1.0953, "step": 2380 }, { "epoch": 0.42396723646723644, "grad_norm": 0.5837802290916443, "learning_rate": 0.0001945886590736409, "loss": 1.1303, "step": 2381 }, { "epoch": 0.42414529914529914, "grad_norm": 0.5316985249519348, "learning_rate": 0.0001945841160038071, "loss": 1.1204, "step": 2382 }, { "epoch": 0.42432336182336183, "grad_norm": 0.5846191048622131, "learning_rate": 0.00019457957108079404, "loss": 1.2622, "step": 2383 }, { "epoch": 0.42450142450142453, "grad_norm": 0.43266957998275757, "learning_rate": 0.00019457502430469075, "loss": 0.9834, "step": 2384 }, { "epoch": 0.42467948717948717, "grad_norm": 0.514081597328186, "learning_rate": 0.00019457047567558632, "loss": 0.8413, "step": 2385 }, { "epoch": 0.42485754985754987, "grad_norm": 0.4831700325012207, "learning_rate": 0.00019456592519356987, "loss": 0.9244, "step": 2386 }, { "epoch": 0.42503561253561256, "grad_norm": 0.5612850785255432, "learning_rate": 0.00019456137285873057, "loss": 0.9438, "step": 2387 }, { "epoch": 0.4252136752136752, "grad_norm": 0.5197352766990662, "learning_rate": 0.00019455681867115758, "loss": 1.1095, "step": 2388 }, { "epoch": 0.4253917378917379, "grad_norm": 0.5045261979103088, "learning_rate": 0.00019455226263094018, "loss": 1.0007, "step": 2389 }, { "epoch": 0.4255698005698006, "grad_norm": 0.5167570114135742, "learning_rate": 0.00019454770473816758, "loss": 1.1335, "step": 2390 }, { "epoch": 0.42574786324786323, "grad_norm": 0.49262070655822754, "learning_rate": 0.00019454314499292913, "loss": 1.0436, "step": 2391 }, { "epoch": 0.42592592592592593, "grad_norm": 0.4489207863807678, "learning_rate": 0.00019453858339531417, "loss": 1.0138, "step": 2392 }, { "epoch": 0.4261039886039886, "grad_norm": 0.6024920344352722, "learning_rate": 0.00019453401994541203, "loss": 1.1921, "step": 2393 }, { "epoch": 0.42628205128205127, "grad_norm": 0.46807861328125, "learning_rate": 0.00019452945464331215, "loss": 1.0947, "step": 2394 }, { "epoch": 0.42646011396011396, "grad_norm": 0.48776543140411377, "learning_rate": 0.00019452488748910397, "loss": 1.0029, "step": 2395 }, { "epoch": 0.42663817663817666, "grad_norm": 0.4798663556575775, "learning_rate": 0.000194520318482877, "loss": 0.7863, "step": 2396 }, { "epoch": 0.4268162393162393, "grad_norm": 0.5067816972732544, "learning_rate": 0.0001945157476247207, "loss": 1.0049, "step": 2397 }, { "epoch": 0.426994301994302, "grad_norm": 0.5179638266563416, "learning_rate": 0.00019451117491472468, "loss": 1.1851, "step": 2398 }, { "epoch": 0.4271723646723647, "grad_norm": 0.4782430827617645, "learning_rate": 0.00019450660035297854, "loss": 1.125, "step": 2399 }, { "epoch": 0.42735042735042733, "grad_norm": 0.560077965259552, "learning_rate": 0.00019450202393957186, "loss": 1.1843, "step": 2400 }, { "epoch": 0.42752849002849, "grad_norm": 0.5247970223426819, "learning_rate": 0.00019449744567459436, "loss": 1.1576, "step": 2401 }, { "epoch": 0.4277065527065527, "grad_norm": 0.6414062976837158, "learning_rate": 0.00019449286555813568, "loss": 1.1833, "step": 2402 }, { "epoch": 0.42788461538461536, "grad_norm": 0.5006586909294128, "learning_rate": 0.00019448828359028563, "loss": 1.1778, "step": 2403 }, { "epoch": 0.42806267806267806, "grad_norm": 0.4946450889110565, "learning_rate": 0.0001944836997711339, "loss": 1.1611, "step": 2404 }, { "epoch": 0.42824074074074076, "grad_norm": 0.4601200222969055, "learning_rate": 0.00019447911410077037, "loss": 1.2456, "step": 2405 }, { "epoch": 0.4284188034188034, "grad_norm": 0.4653947651386261, "learning_rate": 0.00019447452657928485, "loss": 1.0941, "step": 2406 }, { "epoch": 0.4285968660968661, "grad_norm": 0.5015713572502136, "learning_rate": 0.00019446993720676726, "loss": 1.3113, "step": 2407 }, { "epoch": 0.4287749287749288, "grad_norm": 0.5803143978118896, "learning_rate": 0.0001944653459833075, "loss": 1.0568, "step": 2408 }, { "epoch": 0.42895299145299143, "grad_norm": 0.5259647965431213, "learning_rate": 0.0001944607529089955, "loss": 1.1243, "step": 2409 }, { "epoch": 0.4291310541310541, "grad_norm": 0.5150414109230042, "learning_rate": 0.00019445615798392124, "loss": 1.0676, "step": 2410 }, { "epoch": 0.4293091168091168, "grad_norm": 0.5848649740219116, "learning_rate": 0.0001944515612081748, "loss": 1.0671, "step": 2411 }, { "epoch": 0.42948717948717946, "grad_norm": 0.5696990489959717, "learning_rate": 0.00019444696258184626, "loss": 1.3323, "step": 2412 }, { "epoch": 0.42966524216524216, "grad_norm": 0.49822330474853516, "learning_rate": 0.00019444236210502567, "loss": 1.1004, "step": 2413 }, { "epoch": 0.42984330484330485, "grad_norm": 0.4683490991592407, "learning_rate": 0.00019443775977780317, "loss": 0.9768, "step": 2414 }, { "epoch": 0.43002136752136755, "grad_norm": 0.5703811049461365, "learning_rate": 0.00019443315560026893, "loss": 1.154, "step": 2415 }, { "epoch": 0.4301994301994302, "grad_norm": 0.5121861100196838, "learning_rate": 0.0001944285495725132, "loss": 1.1388, "step": 2416 }, { "epoch": 0.4303774928774929, "grad_norm": 0.4864094853401184, "learning_rate": 0.00019442394169462619, "loss": 0.9214, "step": 2417 }, { "epoch": 0.4305555555555556, "grad_norm": 0.5234864354133606, "learning_rate": 0.0001944193319666982, "loss": 1.2787, "step": 2418 }, { "epoch": 0.4307336182336182, "grad_norm": 0.5137650370597839, "learning_rate": 0.00019441472038881955, "loss": 1.1406, "step": 2419 }, { "epoch": 0.4309116809116809, "grad_norm": 0.49687784910202026, "learning_rate": 0.00019441010696108054, "loss": 0.93, "step": 2420 }, { "epoch": 0.4310897435897436, "grad_norm": 0.5078722834587097, "learning_rate": 0.00019440549168357163, "loss": 1.1417, "step": 2421 }, { "epoch": 0.43126780626780625, "grad_norm": 0.4483391046524048, "learning_rate": 0.00019440087455638324, "loss": 0.9016, "step": 2422 }, { "epoch": 0.43144586894586895, "grad_norm": 0.5963045954704285, "learning_rate": 0.00019439625557960576, "loss": 1.1567, "step": 2423 }, { "epoch": 0.43162393162393164, "grad_norm": 0.5534471273422241, "learning_rate": 0.0001943916347533298, "loss": 1.1409, "step": 2424 }, { "epoch": 0.4318019943019943, "grad_norm": 0.6400241851806641, "learning_rate": 0.0001943870120776458, "loss": 1.2041, "step": 2425 }, { "epoch": 0.431980056980057, "grad_norm": 0.4599420726299286, "learning_rate": 0.0001943823875526444, "loss": 1.023, "step": 2426 }, { "epoch": 0.4321581196581197, "grad_norm": 0.4799708425998688, "learning_rate": 0.00019437776117841614, "loss": 1.0872, "step": 2427 }, { "epoch": 0.4323361823361823, "grad_norm": 0.5138532519340515, "learning_rate": 0.00019437313295505172, "loss": 1.1175, "step": 2428 }, { "epoch": 0.432514245014245, "grad_norm": 0.538223147392273, "learning_rate": 0.00019436850288264183, "loss": 1.1203, "step": 2429 }, { "epoch": 0.4326923076923077, "grad_norm": 0.458044171333313, "learning_rate": 0.00019436387096127713, "loss": 1.0383, "step": 2430 }, { "epoch": 0.43287037037037035, "grad_norm": 0.5928303599357605, "learning_rate": 0.00019435923719104842, "loss": 1.1191, "step": 2431 }, { "epoch": 0.43304843304843305, "grad_norm": 0.5818437933921814, "learning_rate": 0.00019435460157204645, "loss": 1.0352, "step": 2432 }, { "epoch": 0.43322649572649574, "grad_norm": 0.487341046333313, "learning_rate": 0.0001943499641043621, "loss": 1.2608, "step": 2433 }, { "epoch": 0.4334045584045584, "grad_norm": 0.4737292230129242, "learning_rate": 0.0001943453247880862, "loss": 1.0084, "step": 2434 }, { "epoch": 0.4335826210826211, "grad_norm": 0.4251207709312439, "learning_rate": 0.0001943406836233096, "loss": 0.9163, "step": 2435 }, { "epoch": 0.4337606837606838, "grad_norm": 0.49468478560447693, "learning_rate": 0.00019433604061012331, "loss": 1.0293, "step": 2436 }, { "epoch": 0.4339387464387464, "grad_norm": 0.47120022773742676, "learning_rate": 0.00019433139574861826, "loss": 1.0097, "step": 2437 }, { "epoch": 0.4341168091168091, "grad_norm": 0.5060358047485352, "learning_rate": 0.00019432674903888548, "loss": 1.0683, "step": 2438 }, { "epoch": 0.4342948717948718, "grad_norm": 0.5455917119979858, "learning_rate": 0.00019432210048101598, "loss": 0.8886, "step": 2439 }, { "epoch": 0.43447293447293445, "grad_norm": 0.7960546612739563, "learning_rate": 0.00019431745007510086, "loss": 0.8648, "step": 2440 }, { "epoch": 0.43465099715099714, "grad_norm": 0.5069689154624939, "learning_rate": 0.00019431279782123126, "loss": 1.1315, "step": 2441 }, { "epoch": 0.43482905982905984, "grad_norm": 0.5597776174545288, "learning_rate": 0.0001943081437194983, "loss": 1.2281, "step": 2442 }, { "epoch": 0.43500712250712253, "grad_norm": 0.4527420997619629, "learning_rate": 0.00019430348776999315, "loss": 0.7576, "step": 2443 }, { "epoch": 0.4351851851851852, "grad_norm": 0.5625936388969421, "learning_rate": 0.00019429882997280706, "loss": 1.0302, "step": 2444 }, { "epoch": 0.43536324786324787, "grad_norm": 0.5173513293266296, "learning_rate": 0.0001942941703280313, "loss": 1.2255, "step": 2445 }, { "epoch": 0.43554131054131057, "grad_norm": 0.45889151096343994, "learning_rate": 0.00019428950883575714, "loss": 0.9322, "step": 2446 }, { "epoch": 0.4357193732193732, "grad_norm": 0.5288477540016174, "learning_rate": 0.00019428484549607593, "loss": 1.0572, "step": 2447 }, { "epoch": 0.4358974358974359, "grad_norm": 0.48328033089637756, "learning_rate": 0.00019428018030907902, "loss": 1.1213, "step": 2448 }, { "epoch": 0.4360754985754986, "grad_norm": 0.5146737098693848, "learning_rate": 0.00019427551327485786, "loss": 0.9633, "step": 2449 }, { "epoch": 0.43625356125356124, "grad_norm": 0.5138360261917114, "learning_rate": 0.00019427084439350382, "loss": 1.0561, "step": 2450 }, { "epoch": 0.43643162393162394, "grad_norm": 0.5192533135414124, "learning_rate": 0.00019426617366510843, "loss": 1.1704, "step": 2451 }, { "epoch": 0.43660968660968663, "grad_norm": 0.4819495379924774, "learning_rate": 0.00019426150108976318, "loss": 1.0958, "step": 2452 }, { "epoch": 0.43678774928774927, "grad_norm": 0.4626680910587311, "learning_rate": 0.00019425682666755965, "loss": 1.1872, "step": 2453 }, { "epoch": 0.43696581196581197, "grad_norm": 0.5773931741714478, "learning_rate": 0.00019425215039858937, "loss": 1.0722, "step": 2454 }, { "epoch": 0.43714387464387466, "grad_norm": 0.5003872513771057, "learning_rate": 0.00019424747228294402, "loss": 1.0561, "step": 2455 }, { "epoch": 0.4373219373219373, "grad_norm": 0.47370314598083496, "learning_rate": 0.0001942427923207152, "loss": 1.1619, "step": 2456 }, { "epoch": 0.4375, "grad_norm": 0.466421514749527, "learning_rate": 0.00019423811051199466, "loss": 1.1311, "step": 2457 }, { "epoch": 0.4376780626780627, "grad_norm": 0.44564682245254517, "learning_rate": 0.00019423342685687413, "loss": 1.1889, "step": 2458 }, { "epoch": 0.43785612535612534, "grad_norm": 0.40986698865890503, "learning_rate": 0.00019422874135544533, "loss": 0.7312, "step": 2459 }, { "epoch": 0.43803418803418803, "grad_norm": 0.4714358448982239, "learning_rate": 0.0001942240540078001, "loss": 0.9273, "step": 2460 }, { "epoch": 0.43821225071225073, "grad_norm": 0.5298398733139038, "learning_rate": 0.00019421936481403025, "loss": 1.3377, "step": 2461 }, { "epoch": 0.43839031339031337, "grad_norm": 0.6326695680618286, "learning_rate": 0.0001942146737742277, "loss": 1.0258, "step": 2462 }, { "epoch": 0.43856837606837606, "grad_norm": 0.5087653994560242, "learning_rate": 0.00019420998088848427, "loss": 1.0007, "step": 2463 }, { "epoch": 0.43874643874643876, "grad_norm": 0.4895429313182831, "learning_rate": 0.00019420528615689202, "loss": 1.0032, "step": 2464 }, { "epoch": 0.4389245014245014, "grad_norm": 0.5029937028884888, "learning_rate": 0.00019420058957954285, "loss": 1.2877, "step": 2465 }, { "epoch": 0.4391025641025641, "grad_norm": 0.4953192174434662, "learning_rate": 0.00019419589115652884, "loss": 1.0759, "step": 2466 }, { "epoch": 0.4392806267806268, "grad_norm": 0.5081778168678284, "learning_rate": 0.000194191190887942, "loss": 0.8816, "step": 2467 }, { "epoch": 0.43945868945868943, "grad_norm": 0.5065913200378418, "learning_rate": 0.00019418648877387446, "loss": 1.0362, "step": 2468 }, { "epoch": 0.43963675213675213, "grad_norm": 0.540600061416626, "learning_rate": 0.00019418178481441832, "loss": 1.0911, "step": 2469 }, { "epoch": 0.4398148148148148, "grad_norm": 0.5122954845428467, "learning_rate": 0.00019417707900966572, "loss": 0.9866, "step": 2470 }, { "epoch": 0.43999287749287747, "grad_norm": 0.5380190014839172, "learning_rate": 0.00019417237135970893, "loss": 1.2775, "step": 2471 }, { "epoch": 0.44017094017094016, "grad_norm": 1.2977570295333862, "learning_rate": 0.00019416766186464016, "loss": 1.3993, "step": 2472 }, { "epoch": 0.44034900284900286, "grad_norm": 0.48105308413505554, "learning_rate": 0.00019416295052455165, "loss": 0.9369, "step": 2473 }, { "epoch": 0.44052706552706555, "grad_norm": 0.4742157459259033, "learning_rate": 0.00019415823733953574, "loss": 1.101, "step": 2474 }, { "epoch": 0.4407051282051282, "grad_norm": 0.4958631694316864, "learning_rate": 0.00019415352230968473, "loss": 0.9906, "step": 2475 }, { "epoch": 0.4408831908831909, "grad_norm": 0.5808146595954895, "learning_rate": 0.00019414880543509107, "loss": 1.2315, "step": 2476 }, { "epoch": 0.4410612535612536, "grad_norm": 0.4294755160808563, "learning_rate": 0.00019414408671584714, "loss": 0.8275, "step": 2477 }, { "epoch": 0.4412393162393162, "grad_norm": 0.5346055626869202, "learning_rate": 0.0001941393661520454, "loss": 1.2432, "step": 2478 }, { "epoch": 0.4414173789173789, "grad_norm": 0.5827590227127075, "learning_rate": 0.00019413464374377833, "loss": 1.3204, "step": 2479 }, { "epoch": 0.4415954415954416, "grad_norm": 0.45688143372535706, "learning_rate": 0.00019412991949113847, "loss": 0.9307, "step": 2480 }, { "epoch": 0.44177350427350426, "grad_norm": 0.512999415397644, "learning_rate": 0.0001941251933942184, "loss": 1.2808, "step": 2481 }, { "epoch": 0.44195156695156695, "grad_norm": 0.4546334445476532, "learning_rate": 0.00019412046545311064, "loss": 1.0156, "step": 2482 }, { "epoch": 0.44212962962962965, "grad_norm": 0.48552581667900085, "learning_rate": 0.00019411573566790793, "loss": 1.3798, "step": 2483 }, { "epoch": 0.4423076923076923, "grad_norm": 0.511970579624176, "learning_rate": 0.00019411100403870287, "loss": 1.065, "step": 2484 }, { "epoch": 0.442485754985755, "grad_norm": 0.6367824077606201, "learning_rate": 0.00019410627056558815, "loss": 1.3242, "step": 2485 }, { "epoch": 0.4426638176638177, "grad_norm": 0.48913368582725525, "learning_rate": 0.00019410153524865659, "loss": 0.9761, "step": 2486 }, { "epoch": 0.4428418803418803, "grad_norm": 0.5077710151672363, "learning_rate": 0.0001940967980880009, "loss": 1.1023, "step": 2487 }, { "epoch": 0.443019943019943, "grad_norm": 0.4956335723400116, "learning_rate": 0.00019409205908371395, "loss": 1.1788, "step": 2488 }, { "epoch": 0.4431980056980057, "grad_norm": 0.4726616442203522, "learning_rate": 0.00019408731823588853, "loss": 1.1445, "step": 2489 }, { "epoch": 0.44337606837606836, "grad_norm": 0.5676438212394714, "learning_rate": 0.00019408257554461757, "loss": 1.0344, "step": 2490 }, { "epoch": 0.44355413105413105, "grad_norm": 0.537656843662262, "learning_rate": 0.000194077831009994, "loss": 0.9876, "step": 2491 }, { "epoch": 0.44373219373219375, "grad_norm": 0.517905592918396, "learning_rate": 0.00019407308463211074, "loss": 1.1389, "step": 2492 }, { "epoch": 0.4439102564102564, "grad_norm": 0.49227026104927063, "learning_rate": 0.0001940683364110608, "loss": 1.0351, "step": 2493 }, { "epoch": 0.4440883190883191, "grad_norm": 0.5131173729896545, "learning_rate": 0.00019406358634693725, "loss": 1.0351, "step": 2494 }, { "epoch": 0.4442663817663818, "grad_norm": 0.5064495205879211, "learning_rate": 0.0001940588344398331, "loss": 1.0248, "step": 2495 }, { "epoch": 0.4444444444444444, "grad_norm": 0.44107526540756226, "learning_rate": 0.00019405408068984148, "loss": 0.8068, "step": 2496 }, { "epoch": 0.4446225071225071, "grad_norm": 0.6711848378181458, "learning_rate": 0.00019404932509705554, "loss": 1.059, "step": 2497 }, { "epoch": 0.4448005698005698, "grad_norm": 0.5862596035003662, "learning_rate": 0.00019404456766156845, "loss": 1.2012, "step": 2498 }, { "epoch": 0.44497863247863245, "grad_norm": 0.5528512001037598, "learning_rate": 0.0001940398083834734, "loss": 1.1121, "step": 2499 }, { "epoch": 0.44515669515669515, "grad_norm": 0.5326655507087708, "learning_rate": 0.0001940350472628637, "loss": 1.166, "step": 2500 }, { "epoch": 0.44533475783475784, "grad_norm": 0.5384873747825623, "learning_rate": 0.00019403028429983252, "loss": 1.4111, "step": 2501 }, { "epoch": 0.44551282051282054, "grad_norm": 0.5142310857772827, "learning_rate": 0.0001940255194944733, "loss": 1.3353, "step": 2502 }, { "epoch": 0.4456908831908832, "grad_norm": 0.49124231934547424, "learning_rate": 0.0001940207528468793, "loss": 1.1443, "step": 2503 }, { "epoch": 0.4458689458689459, "grad_norm": 0.509713888168335, "learning_rate": 0.000194015984357144, "loss": 1.1857, "step": 2504 }, { "epoch": 0.44604700854700857, "grad_norm": 0.5211275219917297, "learning_rate": 0.00019401121402536078, "loss": 0.9911, "step": 2505 }, { "epoch": 0.4462250712250712, "grad_norm": 0.480340838432312, "learning_rate": 0.00019400644185162312, "loss": 1.1018, "step": 2506 }, { "epoch": 0.4464031339031339, "grad_norm": 0.4212559163570404, "learning_rate": 0.00019400166783602448, "loss": 0.7501, "step": 2507 }, { "epoch": 0.4465811965811966, "grad_norm": 0.5110511183738708, "learning_rate": 0.00019399689197865846, "loss": 1.1244, "step": 2508 }, { "epoch": 0.44675925925925924, "grad_norm": 0.5604230165481567, "learning_rate": 0.0001939921142796186, "loss": 1.1066, "step": 2509 }, { "epoch": 0.44693732193732194, "grad_norm": 0.5578675270080566, "learning_rate": 0.0001939873347389985, "loss": 1.0514, "step": 2510 }, { "epoch": 0.44711538461538464, "grad_norm": 0.520908772945404, "learning_rate": 0.00019398255335689184, "loss": 1.1217, "step": 2511 }, { "epoch": 0.4472934472934473, "grad_norm": 0.4405131936073303, "learning_rate": 0.00019397777013339224, "loss": 1.043, "step": 2512 }, { "epoch": 0.44747150997151, "grad_norm": 0.5217751860618591, "learning_rate": 0.0001939729850685935, "loss": 1.1301, "step": 2513 }, { "epoch": 0.44764957264957267, "grad_norm": 0.6151493191719055, "learning_rate": 0.00019396819816258932, "loss": 1.3498, "step": 2514 }, { "epoch": 0.4478276353276353, "grad_norm": 0.5622836947441101, "learning_rate": 0.0001939634094154735, "loss": 1.146, "step": 2515 }, { "epoch": 0.448005698005698, "grad_norm": 0.4671688973903656, "learning_rate": 0.00019395861882733984, "loss": 0.9456, "step": 2516 }, { "epoch": 0.4481837606837607, "grad_norm": 0.453951358795166, "learning_rate": 0.00019395382639828223, "loss": 1.0042, "step": 2517 }, { "epoch": 0.44836182336182334, "grad_norm": 0.5150699615478516, "learning_rate": 0.0001939490321283946, "loss": 1.166, "step": 2518 }, { "epoch": 0.44853988603988604, "grad_norm": 0.5718298554420471, "learning_rate": 0.0001939442360177708, "loss": 1.2033, "step": 2519 }, { "epoch": 0.44871794871794873, "grad_norm": 0.5306782126426697, "learning_rate": 0.00019393943806650488, "loss": 1.0765, "step": 2520 }, { "epoch": 0.4488960113960114, "grad_norm": 0.47633033990859985, "learning_rate": 0.0001939346382746908, "loss": 0.9957, "step": 2521 }, { "epoch": 0.44907407407407407, "grad_norm": 0.496441513299942, "learning_rate": 0.00019392983664242262, "loss": 1.2016, "step": 2522 }, { "epoch": 0.44925213675213677, "grad_norm": 0.45956477522850037, "learning_rate": 0.00019392503316979442, "loss": 1.026, "step": 2523 }, { "epoch": 0.4494301994301994, "grad_norm": 0.5400575995445251, "learning_rate": 0.0001939202278569003, "loss": 1.0785, "step": 2524 }, { "epoch": 0.4496082621082621, "grad_norm": 0.4847868084907532, "learning_rate": 0.00019391542070383442, "loss": 1.013, "step": 2525 }, { "epoch": 0.4497863247863248, "grad_norm": 0.4694063663482666, "learning_rate": 0.00019391061171069094, "loss": 0.8793, "step": 2526 }, { "epoch": 0.44996438746438744, "grad_norm": 0.5158169269561768, "learning_rate": 0.00019390580087756413, "loss": 0.9602, "step": 2527 }, { "epoch": 0.45014245014245013, "grad_norm": 0.5404585003852844, "learning_rate": 0.00019390098820454822, "loss": 1.2247, "step": 2528 }, { "epoch": 0.45032051282051283, "grad_norm": 0.5302738547325134, "learning_rate": 0.00019389617369173752, "loss": 0.918, "step": 2529 }, { "epoch": 0.45049857549857547, "grad_norm": 0.5065485835075378, "learning_rate": 0.00019389135733922634, "loss": 1.0934, "step": 2530 }, { "epoch": 0.45067663817663817, "grad_norm": 0.5491471886634827, "learning_rate": 0.00019388653914710903, "loss": 1.0736, "step": 2531 }, { "epoch": 0.45085470085470086, "grad_norm": 0.4850206971168518, "learning_rate": 0.00019388171911548005, "loss": 1.2401, "step": 2532 }, { "epoch": 0.45103276353276356, "grad_norm": 0.5419789552688599, "learning_rate": 0.0001938768972444338, "loss": 1.269, "step": 2533 }, { "epoch": 0.4512108262108262, "grad_norm": 0.4209023714065552, "learning_rate": 0.00019387207353406476, "loss": 1.0544, "step": 2534 }, { "epoch": 0.4513888888888889, "grad_norm": 0.578588604927063, "learning_rate": 0.00019386724798446743, "loss": 1.0564, "step": 2535 }, { "epoch": 0.4515669515669516, "grad_norm": 0.5277524590492249, "learning_rate": 0.00019386242059573638, "loss": 1.1497, "step": 2536 }, { "epoch": 0.45174501424501423, "grad_norm": 0.5536073446273804, "learning_rate": 0.0001938575913679662, "loss": 1.2213, "step": 2537 }, { "epoch": 0.4519230769230769, "grad_norm": 0.5572254657745361, "learning_rate": 0.00019385276030125143, "loss": 1.0231, "step": 2538 }, { "epoch": 0.4521011396011396, "grad_norm": 0.493847131729126, "learning_rate": 0.00019384792739568686, "loss": 0.9385, "step": 2539 }, { "epoch": 0.45227920227920226, "grad_norm": 0.4641396403312683, "learning_rate": 0.00019384309265136707, "loss": 0.9332, "step": 2540 }, { "epoch": 0.45245726495726496, "grad_norm": 0.5439442992210388, "learning_rate": 0.00019383825606838681, "loss": 1.317, "step": 2541 }, { "epoch": 0.45263532763532766, "grad_norm": 0.7050970792770386, "learning_rate": 0.00019383341764684086, "loss": 0.9508, "step": 2542 }, { "epoch": 0.4528133903133903, "grad_norm": 0.5013265013694763, "learning_rate": 0.000193828577386824, "loss": 1.2704, "step": 2543 }, { "epoch": 0.452991452991453, "grad_norm": 0.47641924023628235, "learning_rate": 0.0001938237352884311, "loss": 1.0101, "step": 2544 }, { "epoch": 0.4531695156695157, "grad_norm": 0.5223637819290161, "learning_rate": 0.000193818891351757, "loss": 1.0548, "step": 2545 }, { "epoch": 0.45334757834757833, "grad_norm": 0.49065667390823364, "learning_rate": 0.0001938140455768966, "loss": 1.0927, "step": 2546 }, { "epoch": 0.453525641025641, "grad_norm": 0.4808312654495239, "learning_rate": 0.0001938091979639449, "loss": 1.0599, "step": 2547 }, { "epoch": 0.4537037037037037, "grad_norm": 0.5157489776611328, "learning_rate": 0.0001938043485129968, "loss": 1.2596, "step": 2548 }, { "epoch": 0.45388176638176636, "grad_norm": 0.5983387231826782, "learning_rate": 0.0001937994972241474, "loss": 1.2276, "step": 2549 }, { "epoch": 0.45405982905982906, "grad_norm": 0.49776506423950195, "learning_rate": 0.00019379464409749163, "loss": 1.3666, "step": 2550 }, { "epoch": 0.45423789173789175, "grad_norm": 0.4693490266799927, "learning_rate": 0.00019378978913312471, "loss": 1.087, "step": 2551 }, { "epoch": 0.4544159544159544, "grad_norm": 0.4754335880279541, "learning_rate": 0.00019378493233114167, "loss": 1.1282, "step": 2552 }, { "epoch": 0.4545940170940171, "grad_norm": 0.5852862000465393, "learning_rate": 0.00019378007369163776, "loss": 1.1113, "step": 2553 }, { "epoch": 0.4547720797720798, "grad_norm": 0.47442635893821716, "learning_rate": 0.00019377521321470805, "loss": 0.983, "step": 2554 }, { "epoch": 0.4549501424501424, "grad_norm": 0.47432273626327515, "learning_rate": 0.00019377035090044787, "loss": 1.0169, "step": 2555 }, { "epoch": 0.4551282051282051, "grad_norm": 0.4929196834564209, "learning_rate": 0.00019376548674895246, "loss": 1.0182, "step": 2556 }, { "epoch": 0.4553062678062678, "grad_norm": 0.5433184504508972, "learning_rate": 0.00019376062076031708, "loss": 1.1339, "step": 2557 }, { "epoch": 0.45548433048433046, "grad_norm": 0.47430408000946045, "learning_rate": 0.00019375575293463715, "loss": 1.1589, "step": 2558 }, { "epoch": 0.45566239316239315, "grad_norm": 0.46641045808792114, "learning_rate": 0.000193750883272008, "loss": 1.029, "step": 2559 }, { "epoch": 0.45584045584045585, "grad_norm": 0.44476228952407837, "learning_rate": 0.00019374601177252502, "loss": 0.8494, "step": 2560 }, { "epoch": 0.45601851851851855, "grad_norm": 0.4886183440685272, "learning_rate": 0.00019374113843628366, "loss": 1.1374, "step": 2561 }, { "epoch": 0.4561965811965812, "grad_norm": 0.4786703288555145, "learning_rate": 0.00019373626326337946, "loss": 1.2861, "step": 2562 }, { "epoch": 0.4563746438746439, "grad_norm": 0.5752716660499573, "learning_rate": 0.0001937313862539079, "loss": 1.2365, "step": 2563 }, { "epoch": 0.4565527065527066, "grad_norm": 0.519176185131073, "learning_rate": 0.00019372650740796452, "loss": 1.2264, "step": 2564 }, { "epoch": 0.4567307692307692, "grad_norm": 0.5927292704582214, "learning_rate": 0.00019372162672564493, "loss": 0.8979, "step": 2565 }, { "epoch": 0.4569088319088319, "grad_norm": 0.5467435121536255, "learning_rate": 0.00019371674420704478, "loss": 1.1016, "step": 2566 }, { "epoch": 0.4570868945868946, "grad_norm": 0.49593284726142883, "learning_rate": 0.00019371185985225968, "loss": 0.982, "step": 2567 }, { "epoch": 0.45726495726495725, "grad_norm": 0.5696587562561035, "learning_rate": 0.00019370697366138538, "loss": 0.979, "step": 2568 }, { "epoch": 0.45744301994301995, "grad_norm": 0.4455752968788147, "learning_rate": 0.00019370208563451757, "loss": 0.8832, "step": 2569 }, { "epoch": 0.45762108262108264, "grad_norm": 0.5072923302650452, "learning_rate": 0.00019369719577175203, "loss": 1.1046, "step": 2570 }, { "epoch": 0.4577991452991453, "grad_norm": 0.45119982957839966, "learning_rate": 0.0001936923040731846, "loss": 1.0083, "step": 2571 }, { "epoch": 0.457977207977208, "grad_norm": 0.5062251091003418, "learning_rate": 0.00019368741053891108, "loss": 1.2771, "step": 2572 }, { "epoch": 0.4581552706552707, "grad_norm": 0.5511104464530945, "learning_rate": 0.0001936825151690274, "loss": 1.0039, "step": 2573 }, { "epoch": 0.4583333333333333, "grad_norm": 0.4721006453037262, "learning_rate": 0.0001936776179636294, "loss": 1.3246, "step": 2574 }, { "epoch": 0.458511396011396, "grad_norm": 0.5021488666534424, "learning_rate": 0.0001936727189228131, "loss": 1.1733, "step": 2575 }, { "epoch": 0.4586894586894587, "grad_norm": 0.5755292177200317, "learning_rate": 0.0001936678180466745, "loss": 1.2241, "step": 2576 }, { "epoch": 0.45886752136752135, "grad_norm": 0.4501610994338989, "learning_rate": 0.00019366291533530952, "loss": 1.0503, "step": 2577 }, { "epoch": 0.45904558404558404, "grad_norm": 0.4067458212375641, "learning_rate": 0.00019365801078881432, "loss": 0.8259, "step": 2578 }, { "epoch": 0.45922364672364674, "grad_norm": 0.539730429649353, "learning_rate": 0.0001936531044072849, "loss": 1.1964, "step": 2579 }, { "epoch": 0.4594017094017094, "grad_norm": 0.5624797344207764, "learning_rate": 0.0001936481961908175, "loss": 1.2059, "step": 2580 }, { "epoch": 0.4595797720797721, "grad_norm": 0.43679240345954895, "learning_rate": 0.00019364328613950824, "loss": 1.1371, "step": 2581 }, { "epoch": 0.45975783475783477, "grad_norm": 0.5214769244194031, "learning_rate": 0.00019363837425345328, "loss": 1.109, "step": 2582 }, { "epoch": 0.4599358974358974, "grad_norm": 0.4522894024848938, "learning_rate": 0.00019363346053274892, "loss": 1.0532, "step": 2583 }, { "epoch": 0.4601139601139601, "grad_norm": 0.44980281591415405, "learning_rate": 0.0001936285449774914, "loss": 0.9352, "step": 2584 }, { "epoch": 0.4602920227920228, "grad_norm": 0.5697414875030518, "learning_rate": 0.00019362362758777705, "loss": 1.2171, "step": 2585 }, { "epoch": 0.46047008547008544, "grad_norm": 0.4636315107345581, "learning_rate": 0.00019361870836370217, "loss": 1.0662, "step": 2586 }, { "epoch": 0.46064814814814814, "grad_norm": 0.5144017338752747, "learning_rate": 0.00019361378730536321, "loss": 1.0681, "step": 2587 }, { "epoch": 0.46082621082621084, "grad_norm": 0.5007636547088623, "learning_rate": 0.00019360886441285654, "loss": 1.2058, "step": 2588 }, { "epoch": 0.46100427350427353, "grad_norm": 0.5024117231369019, "learning_rate": 0.00019360393968627864, "loss": 1.065, "step": 2589 }, { "epoch": 0.46118233618233617, "grad_norm": 0.48105588555336, "learning_rate": 0.00019359901312572596, "loss": 1.0887, "step": 2590 }, { "epoch": 0.46136039886039887, "grad_norm": 0.5381982326507568, "learning_rate": 0.00019359408473129506, "loss": 1.2754, "step": 2591 }, { "epoch": 0.46153846153846156, "grad_norm": 0.5051333904266357, "learning_rate": 0.0001935891545030825, "loss": 0.9334, "step": 2592 }, { "epoch": 0.4617165242165242, "grad_norm": 0.43818601965904236, "learning_rate": 0.0001935842224411849, "loss": 1.0967, "step": 2593 }, { "epoch": 0.4618945868945869, "grad_norm": 0.4727257490158081, "learning_rate": 0.0001935792885456988, "loss": 0.8136, "step": 2594 }, { "epoch": 0.4620726495726496, "grad_norm": 0.5505291223526001, "learning_rate": 0.00019357435281672098, "loss": 1.3113, "step": 2595 }, { "epoch": 0.46225071225071224, "grad_norm": 0.4705682396888733, "learning_rate": 0.0001935694152543481, "loss": 0.9863, "step": 2596 }, { "epoch": 0.46242877492877493, "grad_norm": 0.49653419852256775, "learning_rate": 0.0001935644758586769, "loss": 1.035, "step": 2597 }, { "epoch": 0.46260683760683763, "grad_norm": 0.4788367748260498, "learning_rate": 0.00019355953462980415, "loss": 1.1253, "step": 2598 }, { "epoch": 0.46278490028490027, "grad_norm": 0.5295125842094421, "learning_rate": 0.00019355459156782668, "loss": 1.0853, "step": 2599 }, { "epoch": 0.46296296296296297, "grad_norm": 0.4878056049346924, "learning_rate": 0.00019354964667284133, "loss": 1.1381, "step": 2600 }, { "epoch": 0.46314102564102566, "grad_norm": 0.5442031025886536, "learning_rate": 0.00019354469994494497, "loss": 1.1349, "step": 2601 }, { "epoch": 0.4633190883190883, "grad_norm": 0.4845225214958191, "learning_rate": 0.00019353975138423457, "loss": 1.0538, "step": 2602 }, { "epoch": 0.463497150997151, "grad_norm": 0.4957871437072754, "learning_rate": 0.00019353480099080703, "loss": 1.2765, "step": 2603 }, { "epoch": 0.4636752136752137, "grad_norm": 0.5414339303970337, "learning_rate": 0.00019352984876475936, "loss": 1.1015, "step": 2604 }, { "epoch": 0.46385327635327633, "grad_norm": 0.5171043872833252, "learning_rate": 0.0001935248947061886, "loss": 0.9995, "step": 2605 }, { "epoch": 0.46403133903133903, "grad_norm": 0.46040529012680054, "learning_rate": 0.0001935199388151918, "loss": 1.1126, "step": 2606 }, { "epoch": 0.4642094017094017, "grad_norm": 0.5327033400535583, "learning_rate": 0.00019351498109186613, "loss": 1.1983, "step": 2607 }, { "epoch": 0.46438746438746437, "grad_norm": 0.4451361298561096, "learning_rate": 0.0001935100215363086, "loss": 0.9689, "step": 2608 }, { "epoch": 0.46456552706552706, "grad_norm": 0.5462809801101685, "learning_rate": 0.00019350506014861646, "loss": 1.036, "step": 2609 }, { "epoch": 0.46474358974358976, "grad_norm": 0.4907000958919525, "learning_rate": 0.00019350009692888694, "loss": 1.0724, "step": 2610 }, { "epoch": 0.4649216524216524, "grad_norm": 0.47523510456085205, "learning_rate": 0.00019349513187721723, "loss": 0.9214, "step": 2611 }, { "epoch": 0.4650997150997151, "grad_norm": 0.539732813835144, "learning_rate": 0.0001934901649937046, "loss": 1.1166, "step": 2612 }, { "epoch": 0.4652777777777778, "grad_norm": 0.4827860891819, "learning_rate": 0.00019348519627844643, "loss": 1.1613, "step": 2613 }, { "epoch": 0.46545584045584043, "grad_norm": 0.5385223031044006, "learning_rate": 0.00019348022573154, "loss": 1.0105, "step": 2614 }, { "epoch": 0.4656339031339031, "grad_norm": 0.4629383087158203, "learning_rate": 0.0001934752533530828, "loss": 1.0298, "step": 2615 }, { "epoch": 0.4658119658119658, "grad_norm": 0.599371075630188, "learning_rate": 0.00019347027914317212, "loss": 1.3158, "step": 2616 }, { "epoch": 0.46599002849002846, "grad_norm": 0.5954698324203491, "learning_rate": 0.00019346530310190553, "loss": 1.1882, "step": 2617 }, { "epoch": 0.46616809116809116, "grad_norm": 0.49185171723365784, "learning_rate": 0.00019346032522938046, "loss": 1.0977, "step": 2618 }, { "epoch": 0.46634615384615385, "grad_norm": 0.5145422220230103, "learning_rate": 0.0001934553455256945, "loss": 0.9948, "step": 2619 }, { "epoch": 0.46652421652421655, "grad_norm": 0.6809412837028503, "learning_rate": 0.00019345036399094517, "loss": 1.5798, "step": 2620 }, { "epoch": 0.4667022792022792, "grad_norm": 0.4606841206550598, "learning_rate": 0.00019344538062523005, "loss": 0.7357, "step": 2621 }, { "epoch": 0.4668803418803419, "grad_norm": 0.49036628007888794, "learning_rate": 0.00019344039542864685, "loss": 1.1518, "step": 2622 }, { "epoch": 0.4670584045584046, "grad_norm": 0.47904539108276367, "learning_rate": 0.0001934354084012932, "loss": 0.9929, "step": 2623 }, { "epoch": 0.4672364672364672, "grad_norm": 0.5224666595458984, "learning_rate": 0.0001934304195432668, "loss": 1.2544, "step": 2624 }, { "epoch": 0.4674145299145299, "grad_norm": 0.4902483820915222, "learning_rate": 0.00019342542885466543, "loss": 1.0301, "step": 2625 }, { "epoch": 0.4675925925925926, "grad_norm": 0.46824702620506287, "learning_rate": 0.00019342043633558683, "loss": 0.9364, "step": 2626 }, { "epoch": 0.46777065527065526, "grad_norm": 0.46272051334381104, "learning_rate": 0.00019341544198612888, "loss": 1.056, "step": 2627 }, { "epoch": 0.46794871794871795, "grad_norm": 0.6216606497764587, "learning_rate": 0.0001934104458063894, "loss": 1.0825, "step": 2628 }, { "epoch": 0.46812678062678065, "grad_norm": 0.5024014115333557, "learning_rate": 0.00019340544779646623, "loss": 1.1832, "step": 2629 }, { "epoch": 0.4683048433048433, "grad_norm": 0.5547130107879639, "learning_rate": 0.00019340044795645737, "loss": 1.1335, "step": 2630 }, { "epoch": 0.468482905982906, "grad_norm": 0.5439161658287048, "learning_rate": 0.0001933954462864608, "loss": 1.0229, "step": 2631 }, { "epoch": 0.4686609686609687, "grad_norm": 0.4782990515232086, "learning_rate": 0.0001933904427865744, "loss": 1.2318, "step": 2632 }, { "epoch": 0.4688390313390313, "grad_norm": 0.5872140526771545, "learning_rate": 0.00019338543745689633, "loss": 1.0132, "step": 2633 }, { "epoch": 0.469017094017094, "grad_norm": 0.44163307547569275, "learning_rate": 0.00019338043029752458, "loss": 1.0091, "step": 2634 }, { "epoch": 0.4691951566951567, "grad_norm": 0.541081428527832, "learning_rate": 0.0001933754213085573, "loss": 1.2155, "step": 2635 }, { "epoch": 0.46937321937321935, "grad_norm": 0.4761527478694916, "learning_rate": 0.00019337041049009255, "loss": 1.1138, "step": 2636 }, { "epoch": 0.46955128205128205, "grad_norm": 0.46414369344711304, "learning_rate": 0.0001933653978422286, "loss": 0.9903, "step": 2637 }, { "epoch": 0.46972934472934474, "grad_norm": 0.5337086915969849, "learning_rate": 0.00019336038336506363, "loss": 1.2873, "step": 2638 }, { "epoch": 0.4699074074074074, "grad_norm": 0.5065379738807678, "learning_rate": 0.00019335536705869592, "loss": 1.1436, "step": 2639 }, { "epoch": 0.4700854700854701, "grad_norm": 0.5539217591285706, "learning_rate": 0.0001933503489232237, "loss": 1.2881, "step": 2640 }, { "epoch": 0.4702635327635328, "grad_norm": 0.48303213715553284, "learning_rate": 0.0001933453289587453, "loss": 1.0209, "step": 2641 }, { "epoch": 0.4704415954415954, "grad_norm": 0.6986871957778931, "learning_rate": 0.00019334030716535908, "loss": 1.1979, "step": 2642 }, { "epoch": 0.4706196581196581, "grad_norm": 0.46137234568595886, "learning_rate": 0.00019333528354316347, "loss": 1.0682, "step": 2643 }, { "epoch": 0.4707977207977208, "grad_norm": 0.4726654291152954, "learning_rate": 0.00019333025809225684, "loss": 1.1712, "step": 2644 }, { "epoch": 0.47097578347578345, "grad_norm": 0.46188637614250183, "learning_rate": 0.0001933252308127377, "loss": 1.0183, "step": 2645 }, { "epoch": 0.47115384615384615, "grad_norm": 0.5323259830474854, "learning_rate": 0.0001933202017047045, "loss": 0.935, "step": 2646 }, { "epoch": 0.47133190883190884, "grad_norm": 0.5004189014434814, "learning_rate": 0.00019331517076825582, "loss": 1.1331, "step": 2647 }, { "epoch": 0.47150997150997154, "grad_norm": 0.5443634986877441, "learning_rate": 0.0001933101380034902, "loss": 1.0514, "step": 2648 }, { "epoch": 0.4716880341880342, "grad_norm": 0.504180371761322, "learning_rate": 0.0001933051034105063, "loss": 1.3099, "step": 2649 }, { "epoch": 0.4718660968660969, "grad_norm": 0.5092344284057617, "learning_rate": 0.0001933000669894027, "loss": 1.0716, "step": 2650 }, { "epoch": 0.47204415954415957, "grad_norm": 0.5236422419548035, "learning_rate": 0.0001932950287402781, "loss": 1.0981, "step": 2651 }, { "epoch": 0.4722222222222222, "grad_norm": 0.6228063702583313, "learning_rate": 0.0001932899886632312, "loss": 1.3398, "step": 2652 }, { "epoch": 0.4724002849002849, "grad_norm": 0.5112748146057129, "learning_rate": 0.00019328494675836078, "loss": 1.0151, "step": 2653 }, { "epoch": 0.4725783475783476, "grad_norm": 0.5554201602935791, "learning_rate": 0.00019327990302576563, "loss": 1.404, "step": 2654 }, { "epoch": 0.47275641025641024, "grad_norm": 0.5050725340843201, "learning_rate": 0.0001932748574655445, "loss": 0.951, "step": 2655 }, { "epoch": 0.47293447293447294, "grad_norm": 0.5161749720573425, "learning_rate": 0.00019326981007779636, "loss": 1.2425, "step": 2656 }, { "epoch": 0.47311253561253563, "grad_norm": 0.4865442216396332, "learning_rate": 0.00019326476086262002, "loss": 1.1175, "step": 2657 }, { "epoch": 0.4732905982905983, "grad_norm": 0.5276186466217041, "learning_rate": 0.0001932597098201144, "loss": 1.3687, "step": 2658 }, { "epoch": 0.47346866096866097, "grad_norm": 0.509139358997345, "learning_rate": 0.00019325465695037855, "loss": 1.0546, "step": 2659 }, { "epoch": 0.47364672364672367, "grad_norm": 0.49815434217453003, "learning_rate": 0.00019324960225351138, "loss": 1.0807, "step": 2660 }, { "epoch": 0.4738247863247863, "grad_norm": 0.5059618353843689, "learning_rate": 0.00019324454572961197, "loss": 1.0827, "step": 2661 }, { "epoch": 0.474002849002849, "grad_norm": 0.5698565244674683, "learning_rate": 0.00019323948737877942, "loss": 1.2019, "step": 2662 }, { "epoch": 0.4741809116809117, "grad_norm": 0.49661511182785034, "learning_rate": 0.00019323442720111276, "loss": 1.1447, "step": 2663 }, { "epoch": 0.47435897435897434, "grad_norm": 0.46442747116088867, "learning_rate": 0.0001932293651967112, "loss": 0.8796, "step": 2664 }, { "epoch": 0.47453703703703703, "grad_norm": 0.48306044936180115, "learning_rate": 0.00019322430136567388, "loss": 1.1358, "step": 2665 }, { "epoch": 0.47471509971509973, "grad_norm": 0.5677350759506226, "learning_rate": 0.00019321923570810005, "loss": 1.1026, "step": 2666 }, { "epoch": 0.47489316239316237, "grad_norm": 0.3700144588947296, "learning_rate": 0.0001932141682240889, "loss": 0.7514, "step": 2667 }, { "epoch": 0.47507122507122507, "grad_norm": 0.6003054976463318, "learning_rate": 0.0001932090989137398, "loss": 1.1591, "step": 2668 }, { "epoch": 0.47524928774928776, "grad_norm": 0.520298421382904, "learning_rate": 0.00019320402777715204, "loss": 1.339, "step": 2669 }, { "epoch": 0.4754273504273504, "grad_norm": 0.46453598141670227, "learning_rate": 0.00019319895481442493, "loss": 0.9879, "step": 2670 }, { "epoch": 0.4756054131054131, "grad_norm": 0.5247363448143005, "learning_rate": 0.00019319388002565793, "loss": 0.9862, "step": 2671 }, { "epoch": 0.4757834757834758, "grad_norm": 0.5498613715171814, "learning_rate": 0.00019318880341095046, "loss": 1.2224, "step": 2672 }, { "epoch": 0.47596153846153844, "grad_norm": 0.565838098526001, "learning_rate": 0.00019318372497040192, "loss": 1.0712, "step": 2673 }, { "epoch": 0.47613960113960113, "grad_norm": 0.5797489881515503, "learning_rate": 0.00019317864470411191, "loss": 1.0176, "step": 2674 }, { "epoch": 0.47631766381766383, "grad_norm": 0.5114326477050781, "learning_rate": 0.0001931735626121799, "loss": 1.1027, "step": 2675 }, { "epoch": 0.47649572649572647, "grad_norm": 0.5396515727043152, "learning_rate": 0.00019316847869470547, "loss": 1.1782, "step": 2676 }, { "epoch": 0.47667378917378916, "grad_norm": 0.4812076985836029, "learning_rate": 0.00019316339295178824, "loss": 1.1196, "step": 2677 }, { "epoch": 0.47685185185185186, "grad_norm": 0.4875647723674774, "learning_rate": 0.00019315830538352787, "loss": 1.1407, "step": 2678 }, { "epoch": 0.47702991452991456, "grad_norm": 0.5036377906799316, "learning_rate": 0.00019315321599002404, "loss": 0.9842, "step": 2679 }, { "epoch": 0.4772079772079772, "grad_norm": 0.5054177641868591, "learning_rate": 0.00019314812477137645, "loss": 0.8196, "step": 2680 }, { "epoch": 0.4773860398860399, "grad_norm": 0.5050665736198425, "learning_rate": 0.00019314303172768483, "loss": 0.8463, "step": 2681 }, { "epoch": 0.4775641025641026, "grad_norm": 0.5179004669189453, "learning_rate": 0.000193137936859049, "loss": 1.2485, "step": 2682 }, { "epoch": 0.47774216524216523, "grad_norm": 0.44986143708229065, "learning_rate": 0.00019313284016556876, "loss": 0.9855, "step": 2683 }, { "epoch": 0.4779202279202279, "grad_norm": 0.5594347715377808, "learning_rate": 0.00019312774164734398, "loss": 1.0987, "step": 2684 }, { "epoch": 0.4780982905982906, "grad_norm": 0.4837244749069214, "learning_rate": 0.0001931226413044746, "loss": 1.1119, "step": 2685 }, { "epoch": 0.47827635327635326, "grad_norm": 0.489145427942276, "learning_rate": 0.0001931175391370605, "loss": 1.1962, "step": 2686 }, { "epoch": 0.47845441595441596, "grad_norm": 0.503568708896637, "learning_rate": 0.00019311243514520164, "loss": 0.9668, "step": 2687 }, { "epoch": 0.47863247863247865, "grad_norm": 0.5401005744934082, "learning_rate": 0.00019310732932899805, "loss": 1.3072, "step": 2688 }, { "epoch": 0.4788105413105413, "grad_norm": 0.526523768901825, "learning_rate": 0.00019310222168854971, "loss": 1.1387, "step": 2689 }, { "epoch": 0.478988603988604, "grad_norm": 0.5223183631896973, "learning_rate": 0.00019309711222395678, "loss": 1.1391, "step": 2690 }, { "epoch": 0.4791666666666667, "grad_norm": 0.5840879082679749, "learning_rate": 0.00019309200093531933, "loss": 1.1543, "step": 2691 }, { "epoch": 0.4793447293447293, "grad_norm": 0.5173699259757996, "learning_rate": 0.00019308688782273753, "loss": 1.1889, "step": 2692 }, { "epoch": 0.479522792022792, "grad_norm": 0.5417894124984741, "learning_rate": 0.00019308177288631146, "loss": 1.299, "step": 2693 }, { "epoch": 0.4797008547008547, "grad_norm": 0.4890797734260559, "learning_rate": 0.0001930766561261415, "loss": 1.1516, "step": 2694 }, { "epoch": 0.47987891737891736, "grad_norm": 0.5422119498252869, "learning_rate": 0.00019307153754232772, "loss": 1.0301, "step": 2695 }, { "epoch": 0.48005698005698005, "grad_norm": 0.5838702917098999, "learning_rate": 0.00019306641713497057, "loss": 1.265, "step": 2696 }, { "epoch": 0.48023504273504275, "grad_norm": 0.5020943284034729, "learning_rate": 0.00019306129490417027, "loss": 1.1119, "step": 2697 }, { "epoch": 0.4804131054131054, "grad_norm": 0.412993460893631, "learning_rate": 0.00019305617085002723, "loss": 0.8083, "step": 2698 }, { "epoch": 0.4805911680911681, "grad_norm": 0.6270101070404053, "learning_rate": 0.00019305104497264184, "loss": 1.3355, "step": 2699 }, { "epoch": 0.4807692307692308, "grad_norm": 0.45256730914115906, "learning_rate": 0.0001930459172721145, "loss": 1.0368, "step": 2700 }, { "epoch": 0.4809472934472934, "grad_norm": 0.5351749658584595, "learning_rate": 0.0001930407877485457, "loss": 1.135, "step": 2701 }, { "epoch": 0.4811253561253561, "grad_norm": 0.49324163794517517, "learning_rate": 0.00019303565640203593, "loss": 0.9383, "step": 2702 }, { "epoch": 0.4813034188034188, "grad_norm": 0.5434361100196838, "learning_rate": 0.00019303052323268576, "loss": 1.2605, "step": 2703 }, { "epoch": 0.48148148148148145, "grad_norm": 0.5858064889907837, "learning_rate": 0.00019302538824059572, "loss": 1.0846, "step": 2704 }, { "epoch": 0.48165954415954415, "grad_norm": 0.5753700733184814, "learning_rate": 0.00019302025142586647, "loss": 1.0371, "step": 2705 }, { "epoch": 0.48183760683760685, "grad_norm": 0.43102699518203735, "learning_rate": 0.00019301511278859858, "loss": 0.9189, "step": 2706 }, { "epoch": 0.48201566951566954, "grad_norm": 0.4731025993824005, "learning_rate": 0.0001930099723288928, "loss": 1.1291, "step": 2707 }, { "epoch": 0.4821937321937322, "grad_norm": 0.5685615539550781, "learning_rate": 0.00019300483004684987, "loss": 1.1006, "step": 2708 }, { "epoch": 0.4823717948717949, "grad_norm": 0.4368155896663666, "learning_rate": 0.00019299968594257044, "loss": 0.9959, "step": 2709 }, { "epoch": 0.4825498575498576, "grad_norm": 0.5594738125801086, "learning_rate": 0.00019299454001615537, "loss": 1.0826, "step": 2710 }, { "epoch": 0.4827279202279202, "grad_norm": 0.48876598477363586, "learning_rate": 0.00019298939226770548, "loss": 1.1556, "step": 2711 }, { "epoch": 0.4829059829059829, "grad_norm": 0.548039436340332, "learning_rate": 0.00019298424269732157, "loss": 1.158, "step": 2712 }, { "epoch": 0.4830840455840456, "grad_norm": 0.4957645535469055, "learning_rate": 0.00019297909130510464, "loss": 0.9824, "step": 2713 }, { "epoch": 0.48326210826210825, "grad_norm": 0.5197011232376099, "learning_rate": 0.00019297393809115555, "loss": 1.1074, "step": 2714 }, { "epoch": 0.48344017094017094, "grad_norm": 0.5742064118385315, "learning_rate": 0.00019296878305557526, "loss": 1.0431, "step": 2715 }, { "epoch": 0.48361823361823364, "grad_norm": 0.5698413252830505, "learning_rate": 0.0001929636261984648, "loss": 1.0713, "step": 2716 }, { "epoch": 0.4837962962962963, "grad_norm": 0.48126333951950073, "learning_rate": 0.0001929584675199252, "loss": 0.9274, "step": 2717 }, { "epoch": 0.483974358974359, "grad_norm": 0.49299830198287964, "learning_rate": 0.00019295330702005754, "loss": 0.9392, "step": 2718 }, { "epoch": 0.48415242165242167, "grad_norm": 0.4780774414539337, "learning_rate": 0.0001929481446989629, "loss": 1.1459, "step": 2719 }, { "epoch": 0.4843304843304843, "grad_norm": 0.5462654829025269, "learning_rate": 0.00019294298055674248, "loss": 1.0635, "step": 2720 }, { "epoch": 0.484508547008547, "grad_norm": 0.5371061563491821, "learning_rate": 0.00019293781459349743, "loss": 1.3578, "step": 2721 }, { "epoch": 0.4846866096866097, "grad_norm": 0.46308520436286926, "learning_rate": 0.00019293264680932893, "loss": 0.9001, "step": 2722 }, { "epoch": 0.48486467236467234, "grad_norm": 0.5149807929992676, "learning_rate": 0.0001929274772043383, "loss": 0.6908, "step": 2723 }, { "epoch": 0.48504273504273504, "grad_norm": 0.5435031056404114, "learning_rate": 0.00019292230577862678, "loss": 1.2143, "step": 2724 }, { "epoch": 0.48522079772079774, "grad_norm": 0.44217726588249207, "learning_rate": 0.00019291713253229568, "loss": 0.9303, "step": 2725 }, { "epoch": 0.4853988603988604, "grad_norm": 0.6120226383209229, "learning_rate": 0.00019291195746544643, "loss": 1.3801, "step": 2726 }, { "epoch": 0.4855769230769231, "grad_norm": 0.5014316439628601, "learning_rate": 0.00019290678057818037, "loss": 1.0631, "step": 2727 }, { "epoch": 0.48575498575498577, "grad_norm": 0.5667829513549805, "learning_rate": 0.00019290160187059895, "loss": 1.3166, "step": 2728 }, { "epoch": 0.4859330484330484, "grad_norm": 0.5011509656906128, "learning_rate": 0.0001928964213428036, "loss": 1.1887, "step": 2729 }, { "epoch": 0.4861111111111111, "grad_norm": 0.48317405581474304, "learning_rate": 0.00019289123899489586, "loss": 1.1125, "step": 2730 }, { "epoch": 0.4862891737891738, "grad_norm": 0.4669005870819092, "learning_rate": 0.00019288605482697726, "loss": 1.0091, "step": 2731 }, { "epoch": 0.48646723646723644, "grad_norm": 0.4330739974975586, "learning_rate": 0.00019288086883914937, "loss": 0.9789, "step": 2732 }, { "epoch": 0.48664529914529914, "grad_norm": 0.48482781648635864, "learning_rate": 0.0001928756810315138, "loss": 1.1922, "step": 2733 }, { "epoch": 0.48682336182336183, "grad_norm": 0.5781838297843933, "learning_rate": 0.0001928704914041722, "loss": 1.1793, "step": 2734 }, { "epoch": 0.48700142450142453, "grad_norm": 0.5955413579940796, "learning_rate": 0.00019286529995722623, "loss": 1.1001, "step": 2735 }, { "epoch": 0.48717948717948717, "grad_norm": 0.49204322695732117, "learning_rate": 0.00019286010669077763, "loss": 0.9219, "step": 2736 }, { "epoch": 0.48735754985754987, "grad_norm": 0.5853500962257385, "learning_rate": 0.00019285491160492813, "loss": 1.1133, "step": 2737 }, { "epoch": 0.48753561253561256, "grad_norm": 0.5555846095085144, "learning_rate": 0.0001928497146997795, "loss": 1.0915, "step": 2738 }, { "epoch": 0.4877136752136752, "grad_norm": 0.5166759490966797, "learning_rate": 0.00019284451597543364, "loss": 0.9349, "step": 2739 }, { "epoch": 0.4878917378917379, "grad_norm": 0.47816506028175354, "learning_rate": 0.00019283931543199234, "loss": 0.8978, "step": 2740 }, { "epoch": 0.4880698005698006, "grad_norm": 0.5632442831993103, "learning_rate": 0.0001928341130695575, "loss": 1.0491, "step": 2741 }, { "epoch": 0.48824786324786323, "grad_norm": 0.6532769799232483, "learning_rate": 0.00019282890888823107, "loss": 1.2779, "step": 2742 }, { "epoch": 0.48842592592592593, "grad_norm": 0.5733640789985657, "learning_rate": 0.000192823702888115, "loss": 1.4127, "step": 2743 }, { "epoch": 0.4886039886039886, "grad_norm": 0.5701746344566345, "learning_rate": 0.00019281849506931132, "loss": 1.138, "step": 2744 }, { "epoch": 0.48878205128205127, "grad_norm": 0.5227449536323547, "learning_rate": 0.000192813285431922, "loss": 1.1831, "step": 2745 }, { "epoch": 0.48896011396011396, "grad_norm": 0.48457080125808716, "learning_rate": 0.00019280807397604915, "loss": 1.2468, "step": 2746 }, { "epoch": 0.48913817663817666, "grad_norm": 0.4596176743507385, "learning_rate": 0.0001928028607017949, "loss": 1.1098, "step": 2747 }, { "epoch": 0.4893162393162393, "grad_norm": 0.5204966068267822, "learning_rate": 0.00019279764560926142, "loss": 1.1501, "step": 2748 }, { "epoch": 0.489494301994302, "grad_norm": 0.5179490447044373, "learning_rate": 0.0001927924286985508, "loss": 1.2601, "step": 2749 }, { "epoch": 0.4896723646723647, "grad_norm": 0.4563423693180084, "learning_rate": 0.00019278720996976533, "loss": 1.081, "step": 2750 }, { "epoch": 0.48985042735042733, "grad_norm": 0.4906339943408966, "learning_rate": 0.00019278198942300717, "loss": 1.157, "step": 2751 }, { "epoch": 0.49002849002849, "grad_norm": 0.42241403460502625, "learning_rate": 0.00019277676705837873, "loss": 1.0333, "step": 2752 }, { "epoch": 0.4902065527065527, "grad_norm": 0.6310175657272339, "learning_rate": 0.00019277154287598226, "loss": 1.1225, "step": 2753 }, { "epoch": 0.49038461538461536, "grad_norm": 0.5109034776687622, "learning_rate": 0.0001927663168759201, "loss": 1.1619, "step": 2754 }, { "epoch": 0.49056267806267806, "grad_norm": 0.4809598922729492, "learning_rate": 0.00019276108905829465, "loss": 1.0423, "step": 2755 }, { "epoch": 0.49074074074074076, "grad_norm": 0.557502806186676, "learning_rate": 0.00019275585942320837, "loss": 0.8783, "step": 2756 }, { "epoch": 0.4909188034188034, "grad_norm": 0.5434393882751465, "learning_rate": 0.0001927506279707637, "loss": 1.1701, "step": 2757 }, { "epoch": 0.4910968660968661, "grad_norm": 0.49278944730758667, "learning_rate": 0.00019274539470106317, "loss": 1.0447, "step": 2758 }, { "epoch": 0.4912749287749288, "grad_norm": 0.5634264349937439, "learning_rate": 0.00019274015961420927, "loss": 1.0639, "step": 2759 }, { "epoch": 0.49145299145299143, "grad_norm": 0.5632645487785339, "learning_rate": 0.00019273492271030464, "loss": 0.9223, "step": 2760 }, { "epoch": 0.4916310541310541, "grad_norm": 0.5949172377586365, "learning_rate": 0.00019272968398945177, "loss": 0.894, "step": 2761 }, { "epoch": 0.4918091168091168, "grad_norm": 0.5375374555587769, "learning_rate": 0.00019272444345175342, "loss": 1.0311, "step": 2762 }, { "epoch": 0.49198717948717946, "grad_norm": 0.5211305022239685, "learning_rate": 0.00019271920109731222, "loss": 1.1531, "step": 2763 }, { "epoch": 0.49216524216524216, "grad_norm": 0.44022253155708313, "learning_rate": 0.00019271395692623084, "loss": 0.9147, "step": 2764 }, { "epoch": 0.49234330484330485, "grad_norm": 0.4682174623012543, "learning_rate": 0.0001927087109386121, "loss": 1.081, "step": 2765 }, { "epoch": 0.49252136752136755, "grad_norm": 0.4971517324447632, "learning_rate": 0.0001927034631345588, "loss": 1.1017, "step": 2766 }, { "epoch": 0.4926994301994302, "grad_norm": 0.5015294551849365, "learning_rate": 0.00019269821351417364, "loss": 1.1093, "step": 2767 }, { "epoch": 0.4928774928774929, "grad_norm": 0.5512694716453552, "learning_rate": 0.00019269296207755958, "loss": 0.9657, "step": 2768 }, { "epoch": 0.4930555555555556, "grad_norm": 0.4914868474006653, "learning_rate": 0.00019268770882481948, "loss": 1.0379, "step": 2769 }, { "epoch": 0.4932336182336182, "grad_norm": 0.567337691783905, "learning_rate": 0.00019268245375605626, "loss": 1.004, "step": 2770 }, { "epoch": 0.4934116809116809, "grad_norm": 0.518489420413971, "learning_rate": 0.0001926771968713729, "loss": 1.0734, "step": 2771 }, { "epoch": 0.4935897435897436, "grad_norm": 0.567742109298706, "learning_rate": 0.00019267193817087237, "loss": 1.1276, "step": 2772 }, { "epoch": 0.49376780626780625, "grad_norm": 0.5287964344024658, "learning_rate": 0.00019266667765465773, "loss": 1.1429, "step": 2773 }, { "epoch": 0.49394586894586895, "grad_norm": 0.5302085876464844, "learning_rate": 0.00019266141532283207, "loss": 1.0934, "step": 2774 }, { "epoch": 0.49412393162393164, "grad_norm": 0.5569987297058105, "learning_rate": 0.00019265615117549842, "loss": 1.1453, "step": 2775 }, { "epoch": 0.4943019943019943, "grad_norm": 0.519695520401001, "learning_rate": 0.00019265088521275997, "loss": 1.1255, "step": 2776 }, { "epoch": 0.494480056980057, "grad_norm": 0.5073211193084717, "learning_rate": 0.0001926456174347199, "loss": 1.0609, "step": 2777 }, { "epoch": 0.4946581196581197, "grad_norm": 0.45028239488601685, "learning_rate": 0.00019264034784148142, "loss": 0.9098, "step": 2778 }, { "epoch": 0.4948361823361823, "grad_norm": 0.6641215682029724, "learning_rate": 0.00019263507643314776, "loss": 0.8903, "step": 2779 }, { "epoch": 0.495014245014245, "grad_norm": 0.5281413793563843, "learning_rate": 0.00019262980320982224, "loss": 1.2906, "step": 2780 }, { "epoch": 0.4951923076923077, "grad_norm": 0.6256437301635742, "learning_rate": 0.0001926245281716081, "loss": 1.4142, "step": 2781 }, { "epoch": 0.49537037037037035, "grad_norm": 0.5422517657279968, "learning_rate": 0.00019261925131860877, "loss": 1.1606, "step": 2782 }, { "epoch": 0.49554843304843305, "grad_norm": 0.46938949823379517, "learning_rate": 0.0001926139726509276, "loss": 1.0333, "step": 2783 }, { "epoch": 0.49572649572649574, "grad_norm": 0.5799683928489685, "learning_rate": 0.000192608692168668, "loss": 1.0333, "step": 2784 }, { "epoch": 0.4959045584045584, "grad_norm": 0.5231602787971497, "learning_rate": 0.0001926034098719335, "loss": 1.1847, "step": 2785 }, { "epoch": 0.4960826210826211, "grad_norm": 0.477845698595047, "learning_rate": 0.00019259812576082752, "loss": 1.0746, "step": 2786 }, { "epoch": 0.4962606837606838, "grad_norm": 0.5490350723266602, "learning_rate": 0.00019259283983545365, "loss": 1.2462, "step": 2787 }, { "epoch": 0.4964387464387464, "grad_norm": 0.5788847208023071, "learning_rate": 0.0001925875520959154, "loss": 1.3485, "step": 2788 }, { "epoch": 0.4966168091168091, "grad_norm": 0.46184736490249634, "learning_rate": 0.00019258226254231643, "loss": 0.8673, "step": 2789 }, { "epoch": 0.4967948717948718, "grad_norm": 0.4890633225440979, "learning_rate": 0.0001925769711747603, "loss": 0.9474, "step": 2790 }, { "epoch": 0.49697293447293445, "grad_norm": 0.5719282627105713, "learning_rate": 0.00019257167799335078, "loss": 1.2532, "step": 2791 }, { "epoch": 0.49715099715099714, "grad_norm": 0.5385584235191345, "learning_rate": 0.0001925663829981915, "loss": 1.1326, "step": 2792 }, { "epoch": 0.49732905982905984, "grad_norm": 0.5339545011520386, "learning_rate": 0.00019256108618938625, "loss": 1.1362, "step": 2793 }, { "epoch": 0.49750712250712253, "grad_norm": 0.5017803907394409, "learning_rate": 0.00019255578756703878, "loss": 1.0449, "step": 2794 }, { "epoch": 0.4976851851851852, "grad_norm": 0.6004226803779602, "learning_rate": 0.00019255048713125294, "loss": 0.9346, "step": 2795 }, { "epoch": 0.49786324786324787, "grad_norm": 0.44581490755081177, "learning_rate": 0.00019254518488213255, "loss": 1.038, "step": 2796 }, { "epoch": 0.49804131054131057, "grad_norm": 0.5180951356887817, "learning_rate": 0.00019253988081978151, "loss": 1.0479, "step": 2797 }, { "epoch": 0.4982193732193732, "grad_norm": 0.53944993019104, "learning_rate": 0.00019253457494430376, "loss": 1.2598, "step": 2798 }, { "epoch": 0.4983974358974359, "grad_norm": 0.5633010268211365, "learning_rate": 0.00019252926725580322, "loss": 1.205, "step": 2799 }, { "epoch": 0.4985754985754986, "grad_norm": 0.6653175950050354, "learning_rate": 0.0001925239577543839, "loss": 1.2383, "step": 2800 }, { "epoch": 0.49875356125356124, "grad_norm": 0.5083333849906921, "learning_rate": 0.00019251864644014984, "loss": 1.0649, "step": 2801 }, { "epoch": 0.49893162393162394, "grad_norm": 0.4842020571231842, "learning_rate": 0.00019251333331320506, "loss": 1.1991, "step": 2802 }, { "epoch": 0.49910968660968663, "grad_norm": 0.47987112402915955, "learning_rate": 0.00019250801837365373, "loss": 1.1686, "step": 2803 }, { "epoch": 0.49928774928774927, "grad_norm": 0.5316333770751953, "learning_rate": 0.00019250270162159992, "loss": 1.1759, "step": 2804 }, { "epoch": 0.49946581196581197, "grad_norm": 0.5015079379081726, "learning_rate": 0.00019249738305714787, "loss": 0.9424, "step": 2805 }, { "epoch": 0.49964387464387466, "grad_norm": 0.6488274931907654, "learning_rate": 0.00019249206268040172, "loss": 1.066, "step": 2806 }, { "epoch": 0.4998219373219373, "grad_norm": 0.40364864468574524, "learning_rate": 0.00019248674049146574, "loss": 0.6998, "step": 2807 }, { "epoch": 0.5, "grad_norm": 0.5535672903060913, "learning_rate": 0.00019248141649044423, "loss": 1.2207, "step": 2808 }, { "epoch": 0.5, "eval_loss": 1.1072274446487427, "eval_runtime": 28.6913, "eval_samples_per_second": 36.283, "eval_steps_per_second": 18.159, "step": 2808 }, { "epoch": 0.5001780626780626, "grad_norm": 0.4834389090538025, "learning_rate": 0.00019247609067744143, "loss": 1.1686, "step": 2809 }, { "epoch": 0.5003561253561254, "grad_norm": 0.5007249712944031, "learning_rate": 0.00019247076305256176, "loss": 1.1343, "step": 2810 }, { "epoch": 0.500534188034188, "grad_norm": 0.4773348271846771, "learning_rate": 0.00019246543361590957, "loss": 0.9324, "step": 2811 }, { "epoch": 0.5007122507122507, "grad_norm": 0.47324609756469727, "learning_rate": 0.0001924601023675893, "loss": 1.0223, "step": 2812 }, { "epoch": 0.5008903133903134, "grad_norm": 0.5583845973014832, "learning_rate": 0.00019245476930770537, "loss": 1.1328, "step": 2813 }, { "epoch": 0.5010683760683761, "grad_norm": 0.4814579486846924, "learning_rate": 0.00019244943443636232, "loss": 1.0528, "step": 2814 }, { "epoch": 0.5012464387464387, "grad_norm": 0.4996104836463928, "learning_rate": 0.00019244409775366465, "loss": 1.2482, "step": 2815 }, { "epoch": 0.5014245014245015, "grad_norm": 0.47870904207229614, "learning_rate": 0.0001924387592597169, "loss": 0.9452, "step": 2816 }, { "epoch": 0.5016025641025641, "grad_norm": 0.5617441534996033, "learning_rate": 0.0001924334189546237, "loss": 1.378, "step": 2817 }, { "epoch": 0.5017806267806267, "grad_norm": 0.4872083365917206, "learning_rate": 0.00019242807683848967, "loss": 1.1571, "step": 2818 }, { "epoch": 0.5019586894586895, "grad_norm": 0.5147804021835327, "learning_rate": 0.00019242273291141947, "loss": 1.1086, "step": 2819 }, { "epoch": 0.5021367521367521, "grad_norm": 0.4698995351791382, "learning_rate": 0.00019241738717351784, "loss": 1.1579, "step": 2820 }, { "epoch": 0.5023148148148148, "grad_norm": 0.5158926844596863, "learning_rate": 0.00019241203962488946, "loss": 1.2763, "step": 2821 }, { "epoch": 0.5024928774928775, "grad_norm": 0.5218976736068726, "learning_rate": 0.00019240669026563914, "loss": 1.0633, "step": 2822 }, { "epoch": 0.5026709401709402, "grad_norm": 0.5511452555656433, "learning_rate": 0.0001924013390958717, "loss": 0.9939, "step": 2823 }, { "epoch": 0.5028490028490028, "grad_norm": 0.5227555632591248, "learning_rate": 0.00019239598611569191, "loss": 1.2478, "step": 2824 }, { "epoch": 0.5030270655270656, "grad_norm": 0.5444719791412354, "learning_rate": 0.00019239063132520475, "loss": 1.1574, "step": 2825 }, { "epoch": 0.5032051282051282, "grad_norm": 0.4752781093120575, "learning_rate": 0.0001923852747245151, "loss": 0.9034, "step": 2826 }, { "epoch": 0.5033831908831908, "grad_norm": 0.5286496877670288, "learning_rate": 0.00019237991631372792, "loss": 1.1391, "step": 2827 }, { "epoch": 0.5035612535612536, "grad_norm": 0.5009933710098267, "learning_rate": 0.00019237455609294815, "loss": 1.2178, "step": 2828 }, { "epoch": 0.5037393162393162, "grad_norm": 0.5012276768684387, "learning_rate": 0.00019236919406228085, "loss": 0.9877, "step": 2829 }, { "epoch": 0.5039173789173789, "grad_norm": 0.576508104801178, "learning_rate": 0.00019236383022183106, "loss": 1.1299, "step": 2830 }, { "epoch": 0.5040954415954416, "grad_norm": 0.4716590642929077, "learning_rate": 0.0001923584645717039, "loss": 1.0451, "step": 2831 }, { "epoch": 0.5042735042735043, "grad_norm": 0.5817418098449707, "learning_rate": 0.00019235309711200448, "loss": 1.0911, "step": 2832 }, { "epoch": 0.5044515669515669, "grad_norm": 0.5695745944976807, "learning_rate": 0.000192347727842838, "loss": 1.0229, "step": 2833 }, { "epoch": 0.5046296296296297, "grad_norm": 0.49127066135406494, "learning_rate": 0.00019234235676430958, "loss": 1.1377, "step": 2834 }, { "epoch": 0.5048076923076923, "grad_norm": 0.5426172614097595, "learning_rate": 0.00019233698387652453, "loss": 1.2427, "step": 2835 }, { "epoch": 0.5049857549857549, "grad_norm": 0.5342385172843933, "learning_rate": 0.0001923316091795881, "loss": 1.1427, "step": 2836 }, { "epoch": 0.5051638176638177, "grad_norm": 0.5480486750602722, "learning_rate": 0.00019232623267360558, "loss": 1.0647, "step": 2837 }, { "epoch": 0.5053418803418803, "grad_norm": 0.4584530293941498, "learning_rate": 0.00019232085435868235, "loss": 1.0461, "step": 2838 }, { "epoch": 0.5055199430199431, "grad_norm": 0.5992119908332825, "learning_rate": 0.00019231547423492371, "loss": 1.1456, "step": 2839 }, { "epoch": 0.5056980056980057, "grad_norm": 0.514018177986145, "learning_rate": 0.00019231009230243515, "loss": 1.2559, "step": 2840 }, { "epoch": 0.5058760683760684, "grad_norm": 0.5392283797264099, "learning_rate": 0.0001923047085613221, "loss": 1.044, "step": 2841 }, { "epoch": 0.5060541310541311, "grad_norm": 0.4486566483974457, "learning_rate": 0.00019229932301169, "loss": 1.0679, "step": 2842 }, { "epoch": 0.5062321937321937, "grad_norm": 0.4523460566997528, "learning_rate": 0.00019229393565364442, "loss": 1.1651, "step": 2843 }, { "epoch": 0.5064102564102564, "grad_norm": 0.6032688021659851, "learning_rate": 0.0001922885464872909, "loss": 1.15, "step": 2844 }, { "epoch": 0.5065883190883191, "grad_norm": 0.5883688926696777, "learning_rate": 0.000192283155512735, "loss": 1.2179, "step": 2845 }, { "epoch": 0.5067663817663818, "grad_norm": 0.5534378886222839, "learning_rate": 0.00019227776273008238, "loss": 1.0387, "step": 2846 }, { "epoch": 0.5069444444444444, "grad_norm": 0.5899033546447754, "learning_rate": 0.00019227236813943872, "loss": 1.0812, "step": 2847 }, { "epoch": 0.5071225071225072, "grad_norm": 0.5718855261802673, "learning_rate": 0.00019226697174090965, "loss": 1.1375, "step": 2848 }, { "epoch": 0.5073005698005698, "grad_norm": 0.5080967545509338, "learning_rate": 0.00019226157353460094, "loss": 1.1421, "step": 2849 }, { "epoch": 0.5074786324786325, "grad_norm": 0.5253677368164062, "learning_rate": 0.0001922561735206184, "loss": 1.0166, "step": 2850 }, { "epoch": 0.5076566951566952, "grad_norm": 0.47797444462776184, "learning_rate": 0.00019225077169906772, "loss": 1.0504, "step": 2851 }, { "epoch": 0.5078347578347578, "grad_norm": 0.4911690652370453, "learning_rate": 0.0001922453680700548, "loss": 1.0629, "step": 2852 }, { "epoch": 0.5080128205128205, "grad_norm": 0.49678200483322144, "learning_rate": 0.00019223996263368557, "loss": 1.1672, "step": 2853 }, { "epoch": 0.5081908831908832, "grad_norm": 0.5451810359954834, "learning_rate": 0.00019223455539006586, "loss": 1.3031, "step": 2854 }, { "epoch": 0.5083689458689459, "grad_norm": 0.5708984136581421, "learning_rate": 0.00019222914633930166, "loss": 1.0986, "step": 2855 }, { "epoch": 0.5085470085470085, "grad_norm": 0.47232356667518616, "learning_rate": 0.00019222373548149888, "loss": 1.0449, "step": 2856 }, { "epoch": 0.5087250712250713, "grad_norm": 0.6027610898017883, "learning_rate": 0.0001922183228167636, "loss": 0.862, "step": 2857 }, { "epoch": 0.5089031339031339, "grad_norm": 0.5211802124977112, "learning_rate": 0.00019221290834520188, "loss": 1.1048, "step": 2858 }, { "epoch": 0.5090811965811965, "grad_norm": 0.45101237297058105, "learning_rate": 0.00019220749206691972, "loss": 1.0046, "step": 2859 }, { "epoch": 0.5092592592592593, "grad_norm": 0.5526158213615417, "learning_rate": 0.00019220207398202335, "loss": 1.2275, "step": 2860 }, { "epoch": 0.5094373219373219, "grad_norm": 0.48322010040283203, "learning_rate": 0.00019219665409061885, "loss": 0.9974, "step": 2861 }, { "epoch": 0.5096153846153846, "grad_norm": 0.4775219261646271, "learning_rate": 0.00019219123239281244, "loss": 1.1852, "step": 2862 }, { "epoch": 0.5097934472934473, "grad_norm": 0.46184200048446655, "learning_rate": 0.00019218580888871034, "loss": 0.9393, "step": 2863 }, { "epoch": 0.50997150997151, "grad_norm": 0.47495174407958984, "learning_rate": 0.00019218038357841883, "loss": 0.9631, "step": 2864 }, { "epoch": 0.5101495726495726, "grad_norm": 0.48600029945373535, "learning_rate": 0.00019217495646204418, "loss": 1.0498, "step": 2865 }, { "epoch": 0.5103276353276354, "grad_norm": 0.5801547169685364, "learning_rate": 0.00019216952753969274, "loss": 1.2181, "step": 2866 }, { "epoch": 0.510505698005698, "grad_norm": 0.5082106590270996, "learning_rate": 0.00019216409681147085, "loss": 1.2009, "step": 2867 }, { "epoch": 0.5106837606837606, "grad_norm": 0.4184330701828003, "learning_rate": 0.00019215866427748493, "loss": 0.8462, "step": 2868 }, { "epoch": 0.5108618233618234, "grad_norm": 0.518099844455719, "learning_rate": 0.00019215322993784147, "loss": 1.2091, "step": 2869 }, { "epoch": 0.511039886039886, "grad_norm": 0.569464921951294, "learning_rate": 0.0001921477937926469, "loss": 1.0264, "step": 2870 }, { "epoch": 0.5112179487179487, "grad_norm": 0.526767909526825, "learning_rate": 0.00019214235584200768, "loss": 1.1192, "step": 2871 }, { "epoch": 0.5113960113960114, "grad_norm": 0.6511057019233704, "learning_rate": 0.00019213691608603047, "loss": 1.3193, "step": 2872 }, { "epoch": 0.5115740740740741, "grad_norm": 0.48536401987075806, "learning_rate": 0.00019213147452482173, "loss": 1.1671, "step": 2873 }, { "epoch": 0.5117521367521367, "grad_norm": 0.7972469329833984, "learning_rate": 0.00019212603115848818, "loss": 1.1393, "step": 2874 }, { "epoch": 0.5119301994301995, "grad_norm": 0.5543264746665955, "learning_rate": 0.00019212058598713642, "loss": 1.1436, "step": 2875 }, { "epoch": 0.5121082621082621, "grad_norm": 0.49688720703125, "learning_rate": 0.0001921151390108731, "loss": 1.0897, "step": 2876 }, { "epoch": 0.5122863247863247, "grad_norm": 0.4928736090660095, "learning_rate": 0.000192109690229805, "loss": 1.2426, "step": 2877 }, { "epoch": 0.5124643874643875, "grad_norm": 0.4917896091938019, "learning_rate": 0.0001921042396440389, "loss": 1.0047, "step": 2878 }, { "epoch": 0.5126424501424501, "grad_norm": 0.5485204458236694, "learning_rate": 0.00019209878725368152, "loss": 1.2615, "step": 2879 }, { "epoch": 0.5128205128205128, "grad_norm": 0.5229470133781433, "learning_rate": 0.0001920933330588397, "loss": 1.3249, "step": 2880 }, { "epoch": 0.5129985754985755, "grad_norm": 0.4783077538013458, "learning_rate": 0.00019208787705962037, "loss": 1.2004, "step": 2881 }, { "epoch": 0.5131766381766382, "grad_norm": 0.5106910467147827, "learning_rate": 0.00019208241925613035, "loss": 1.1745, "step": 2882 }, { "epoch": 0.5133547008547008, "grad_norm": 0.5308730006217957, "learning_rate": 0.00019207695964847666, "loss": 0.9706, "step": 2883 }, { "epoch": 0.5135327635327636, "grad_norm": 0.5489775538444519, "learning_rate": 0.00019207149823676617, "loss": 1.0073, "step": 2884 }, { "epoch": 0.5137108262108262, "grad_norm": 0.4992835521697998, "learning_rate": 0.00019206603502110596, "loss": 1.1053, "step": 2885 }, { "epoch": 0.5138888888888888, "grad_norm": 0.5304922461509705, "learning_rate": 0.00019206057000160302, "loss": 1.0565, "step": 2886 }, { "epoch": 0.5140669515669516, "grad_norm": 0.46411609649658203, "learning_rate": 0.00019205510317836448, "loss": 0.9202, "step": 2887 }, { "epoch": 0.5142450142450142, "grad_norm": 0.5236835479736328, "learning_rate": 0.0001920496345514974, "loss": 0.9075, "step": 2888 }, { "epoch": 0.5144230769230769, "grad_norm": 0.4416964054107666, "learning_rate": 0.00019204416412110895, "loss": 0.9225, "step": 2889 }, { "epoch": 0.5146011396011396, "grad_norm": 0.5470940470695496, "learning_rate": 0.00019203869188730633, "loss": 1.2195, "step": 2890 }, { "epoch": 0.5147792022792023, "grad_norm": 0.5380414128303528, "learning_rate": 0.0001920332178501967, "loss": 1.0731, "step": 2891 }, { "epoch": 0.5149572649572649, "grad_norm": 0.4405716359615326, "learning_rate": 0.00019202774200988737, "loss": 0.8739, "step": 2892 }, { "epoch": 0.5151353276353277, "grad_norm": 0.5222984552383423, "learning_rate": 0.0001920222643664856, "loss": 1.1806, "step": 2893 }, { "epoch": 0.5153133903133903, "grad_norm": 0.48545539379119873, "learning_rate": 0.0001920167849200987, "loss": 0.9939, "step": 2894 }, { "epoch": 0.5154914529914529, "grad_norm": 0.45078009366989136, "learning_rate": 0.0001920113036708341, "loss": 1.0085, "step": 2895 }, { "epoch": 0.5156695156695157, "grad_norm": 0.5029830932617188, "learning_rate": 0.00019200582061879913, "loss": 1.1095, "step": 2896 }, { "epoch": 0.5158475783475783, "grad_norm": 0.5316143035888672, "learning_rate": 0.00019200033576410118, "loss": 0.9883, "step": 2897 }, { "epoch": 0.5160256410256411, "grad_norm": 0.5282100439071655, "learning_rate": 0.0001919948491068478, "loss": 1.1441, "step": 2898 }, { "epoch": 0.5162037037037037, "grad_norm": 0.5145367980003357, "learning_rate": 0.00019198936064714647, "loss": 1.1999, "step": 2899 }, { "epoch": 0.5163817663817664, "grad_norm": 0.5385651588439941, "learning_rate": 0.00019198387038510468, "loss": 1.1831, "step": 2900 }, { "epoch": 0.5165598290598291, "grad_norm": 0.4971916377544403, "learning_rate": 0.00019197837832083002, "loss": 1.2518, "step": 2901 }, { "epoch": 0.5167378917378918, "grad_norm": 0.5253807306289673, "learning_rate": 0.00019197288445443016, "loss": 1.0788, "step": 2902 }, { "epoch": 0.5169159544159544, "grad_norm": 0.49724945425987244, "learning_rate": 0.00019196738878601263, "loss": 1.0985, "step": 2903 }, { "epoch": 0.5170940170940171, "grad_norm": 0.5327325463294983, "learning_rate": 0.0001919618913156852, "loss": 1.2862, "step": 2904 }, { "epoch": 0.5172720797720798, "grad_norm": 0.639999270439148, "learning_rate": 0.00019195639204355554, "loss": 1.2052, "step": 2905 }, { "epoch": 0.5174501424501424, "grad_norm": 0.4630785584449768, "learning_rate": 0.0001919508909697314, "loss": 1.1157, "step": 2906 }, { "epoch": 0.5176282051282052, "grad_norm": 0.513949990272522, "learning_rate": 0.00019194538809432055, "loss": 1.0047, "step": 2907 }, { "epoch": 0.5178062678062678, "grad_norm": 0.488034725189209, "learning_rate": 0.0001919398834174308, "loss": 0.9008, "step": 2908 }, { "epoch": 0.5179843304843305, "grad_norm": 0.4892788529396057, "learning_rate": 0.00019193437693917006, "loss": 1.1024, "step": 2909 }, { "epoch": 0.5181623931623932, "grad_norm": 0.5503842830657959, "learning_rate": 0.00019192886865964618, "loss": 1.2283, "step": 2910 }, { "epoch": 0.5183404558404558, "grad_norm": 0.48885393142700195, "learning_rate": 0.00019192335857896707, "loss": 0.9522, "step": 2911 }, { "epoch": 0.5185185185185185, "grad_norm": 0.5479527115821838, "learning_rate": 0.00019191784669724072, "loss": 1.1616, "step": 2912 }, { "epoch": 0.5186965811965812, "grad_norm": 0.42701148986816406, "learning_rate": 0.00019191233301457506, "loss": 0.8434, "step": 2913 }, { "epoch": 0.5188746438746439, "grad_norm": 0.4273422658443451, "learning_rate": 0.00019190681753107822, "loss": 0.8316, "step": 2914 }, { "epoch": 0.5190527065527065, "grad_norm": 0.5047736763954163, "learning_rate": 0.00019190130024685818, "loss": 1.171, "step": 2915 }, { "epoch": 0.5192307692307693, "grad_norm": 0.5221177935600281, "learning_rate": 0.00019189578116202307, "loss": 1.0256, "step": 2916 }, { "epoch": 0.5194088319088319, "grad_norm": 0.4782322943210602, "learning_rate": 0.00019189026027668105, "loss": 0.8598, "step": 2917 }, { "epoch": 0.5195868945868946, "grad_norm": 0.5627185702323914, "learning_rate": 0.00019188473759094022, "loss": 1.1825, "step": 2918 }, { "epoch": 0.5197649572649573, "grad_norm": 0.5036423206329346, "learning_rate": 0.00019187921310490888, "loss": 1.0881, "step": 2919 }, { "epoch": 0.51994301994302, "grad_norm": 0.4271143972873688, "learning_rate": 0.0001918736868186952, "loss": 0.9265, "step": 2920 }, { "epoch": 0.5201210826210826, "grad_norm": 0.5427432656288147, "learning_rate": 0.00019186815873240747, "loss": 1.196, "step": 2921 }, { "epoch": 0.5202991452991453, "grad_norm": 0.5494198203086853, "learning_rate": 0.00019186262884615402, "loss": 1.1207, "step": 2922 }, { "epoch": 0.520477207977208, "grad_norm": 0.5305119752883911, "learning_rate": 0.0001918570971600432, "loss": 1.0393, "step": 2923 }, { "epoch": 0.5206552706552706, "grad_norm": 0.46713170409202576, "learning_rate": 0.00019185156367418333, "loss": 0.9583, "step": 2924 }, { "epoch": 0.5208333333333334, "grad_norm": 0.597776472568512, "learning_rate": 0.00019184602838868292, "loss": 1.2978, "step": 2925 }, { "epoch": 0.521011396011396, "grad_norm": 0.520976722240448, "learning_rate": 0.00019184049130365036, "loss": 1.0515, "step": 2926 }, { "epoch": 0.5211894586894587, "grad_norm": 0.5266290307044983, "learning_rate": 0.00019183495241919415, "loss": 1.0437, "step": 2927 }, { "epoch": 0.5213675213675214, "grad_norm": 0.50911545753479, "learning_rate": 0.00019182941173542285, "loss": 0.9977, "step": 2928 }, { "epoch": 0.521545584045584, "grad_norm": 0.4924670457839966, "learning_rate": 0.00019182386925244496, "loss": 0.9309, "step": 2929 }, { "epoch": 0.5217236467236467, "grad_norm": 0.4979301393032074, "learning_rate": 0.00019181832497036912, "loss": 0.87, "step": 2930 }, { "epoch": 0.5219017094017094, "grad_norm": 0.6307916045188904, "learning_rate": 0.0001918127788893039, "loss": 1.2159, "step": 2931 }, { "epoch": 0.5220797720797721, "grad_norm": 0.4915660619735718, "learning_rate": 0.00019180723100935802, "loss": 1.0828, "step": 2932 }, { "epoch": 0.5222578347578347, "grad_norm": 0.4312742352485657, "learning_rate": 0.00019180168133064017, "loss": 1.0496, "step": 2933 }, { "epoch": 0.5224358974358975, "grad_norm": 0.6006124019622803, "learning_rate": 0.00019179612985325908, "loss": 1.0751, "step": 2934 }, { "epoch": 0.5226139601139601, "grad_norm": 0.5332220196723938, "learning_rate": 0.0001917905765773235, "loss": 1.2601, "step": 2935 }, { "epoch": 0.5227920227920227, "grad_norm": 0.4877954423427582, "learning_rate": 0.00019178502150294223, "loss": 1.2279, "step": 2936 }, { "epoch": 0.5229700854700855, "grad_norm": 0.5975968837738037, "learning_rate": 0.00019177946463022418, "loss": 1.3371, "step": 2937 }, { "epoch": 0.5231481481481481, "grad_norm": 0.5363923907279968, "learning_rate": 0.00019177390595927815, "loss": 1.0705, "step": 2938 }, { "epoch": 0.5233262108262108, "grad_norm": 0.4314909875392914, "learning_rate": 0.0001917683454902131, "loss": 0.9172, "step": 2939 }, { "epoch": 0.5235042735042735, "grad_norm": 0.46187883615493774, "learning_rate": 0.0001917627832231379, "loss": 1.1201, "step": 2940 }, { "epoch": 0.5236823361823362, "grad_norm": 0.4648260772228241, "learning_rate": 0.00019175721915816162, "loss": 1.1307, "step": 2941 }, { "epoch": 0.5238603988603988, "grad_norm": 0.4427165687084198, "learning_rate": 0.00019175165329539325, "loss": 0.9459, "step": 2942 }, { "epoch": 0.5240384615384616, "grad_norm": 0.4645056128501892, "learning_rate": 0.0001917460856349418, "loss": 0.9176, "step": 2943 }, { "epoch": 0.5242165242165242, "grad_norm": 0.4939568042755127, "learning_rate": 0.0001917405161769164, "loss": 1.1056, "step": 2944 }, { "epoch": 0.5243945868945868, "grad_norm": 0.6057310104370117, "learning_rate": 0.00019173494492142617, "loss": 1.2714, "step": 2945 }, { "epoch": 0.5245726495726496, "grad_norm": 0.5038546323776245, "learning_rate": 0.00019172937186858025, "loss": 0.911, "step": 2946 }, { "epoch": 0.5247507122507122, "grad_norm": 0.5521321296691895, "learning_rate": 0.00019172379701848784, "loss": 1.0781, "step": 2947 }, { "epoch": 0.5249287749287749, "grad_norm": 0.516979455947876, "learning_rate": 0.00019171822037125817, "loss": 1.1051, "step": 2948 }, { "epoch": 0.5251068376068376, "grad_norm": 0.5443150997161865, "learning_rate": 0.0001917126419270005, "loss": 1.0802, "step": 2949 }, { "epoch": 0.5252849002849003, "grad_norm": 0.5373311042785645, "learning_rate": 0.00019170706168582412, "loss": 0.9313, "step": 2950 }, { "epoch": 0.5254629629629629, "grad_norm": 0.7511917948722839, "learning_rate": 0.0001917014796478384, "loss": 1.1958, "step": 2951 }, { "epoch": 0.5256410256410257, "grad_norm": 0.49893468618392944, "learning_rate": 0.00019169589581315263, "loss": 0.9387, "step": 2952 }, { "epoch": 0.5258190883190883, "grad_norm": 0.48010289669036865, "learning_rate": 0.00019169031018187628, "loss": 1.2459, "step": 2953 }, { "epoch": 0.5259971509971509, "grad_norm": 0.48768678307533264, "learning_rate": 0.0001916847227541188, "loss": 1.0127, "step": 2954 }, { "epoch": 0.5261752136752137, "grad_norm": 0.5973068475723267, "learning_rate": 0.00019167913352998963, "loss": 1.1685, "step": 2955 }, { "epoch": 0.5263532763532763, "grad_norm": 0.5567806959152222, "learning_rate": 0.00019167354250959826, "loss": 1.142, "step": 2956 }, { "epoch": 0.5265313390313391, "grad_norm": 0.47819700837135315, "learning_rate": 0.00019166794969305428, "loss": 0.712, "step": 2957 }, { "epoch": 0.5267094017094017, "grad_norm": 0.5191744565963745, "learning_rate": 0.00019166235508046725, "loss": 1.2208, "step": 2958 }, { "epoch": 0.5268874643874644, "grad_norm": 0.4987856149673462, "learning_rate": 0.00019165675867194675, "loss": 1.0466, "step": 2959 }, { "epoch": 0.5270655270655271, "grad_norm": 0.5017665028572083, "learning_rate": 0.0001916511604676025, "loss": 1.1236, "step": 2960 }, { "epoch": 0.5272435897435898, "grad_norm": 0.5115348696708679, "learning_rate": 0.00019164556046754415, "loss": 1.1497, "step": 2961 }, { "epoch": 0.5274216524216524, "grad_norm": 0.4934345781803131, "learning_rate": 0.0001916399586718814, "loss": 1.0183, "step": 2962 }, { "epoch": 0.5275997150997151, "grad_norm": 0.5033719539642334, "learning_rate": 0.00019163435508072404, "loss": 1.0256, "step": 2963 }, { "epoch": 0.5277777777777778, "grad_norm": 0.5325372219085693, "learning_rate": 0.00019162874969418184, "loss": 1.1384, "step": 2964 }, { "epoch": 0.5279558404558404, "grad_norm": 0.4901772141456604, "learning_rate": 0.00019162314251236465, "loss": 1.0831, "step": 2965 }, { "epoch": 0.5281339031339032, "grad_norm": 0.4743805229663849, "learning_rate": 0.0001916175335353823, "loss": 1.1894, "step": 2966 }, { "epoch": 0.5283119658119658, "grad_norm": 0.5439450740814209, "learning_rate": 0.00019161192276334466, "loss": 1.2066, "step": 2967 }, { "epoch": 0.5284900284900285, "grad_norm": 0.5123090744018555, "learning_rate": 0.00019160631019636174, "loss": 1.1829, "step": 2968 }, { "epoch": 0.5286680911680912, "grad_norm": 0.5995343923568726, "learning_rate": 0.00019160069583454346, "loss": 1.4872, "step": 2969 }, { "epoch": 0.5288461538461539, "grad_norm": 0.4596657156944275, "learning_rate": 0.00019159507967799985, "loss": 0.8948, "step": 2970 }, { "epoch": 0.5290242165242165, "grad_norm": 0.5533682107925415, "learning_rate": 0.0001915894617268409, "loss": 1.1779, "step": 2971 }, { "epoch": 0.5292022792022792, "grad_norm": 0.3860718309879303, "learning_rate": 0.00019158384198117673, "loss": 0.6424, "step": 2972 }, { "epoch": 0.5293803418803419, "grad_norm": 0.47424063086509705, "learning_rate": 0.0001915782204411174, "loss": 1.1592, "step": 2973 }, { "epoch": 0.5295584045584045, "grad_norm": 0.5050228834152222, "learning_rate": 0.00019157259710677309, "loss": 1.1971, "step": 2974 }, { "epoch": 0.5297364672364673, "grad_norm": 0.6080113649368286, "learning_rate": 0.00019156697197825396, "loss": 1.1511, "step": 2975 }, { "epoch": 0.5299145299145299, "grad_norm": 0.4805932641029358, "learning_rate": 0.00019156134505567024, "loss": 1.1033, "step": 2976 }, { "epoch": 0.5300925925925926, "grad_norm": 0.4835345447063446, "learning_rate": 0.00019155571633913215, "loss": 1.1832, "step": 2977 }, { "epoch": 0.5302706552706553, "grad_norm": 0.5183725953102112, "learning_rate": 0.00019155008582875, "loss": 0.9221, "step": 2978 }, { "epoch": 0.530448717948718, "grad_norm": 0.48015761375427246, "learning_rate": 0.00019154445352463412, "loss": 1.045, "step": 2979 }, { "epoch": 0.5306267806267806, "grad_norm": 0.4670043885707855, "learning_rate": 0.0001915388194268948, "loss": 0.9025, "step": 2980 }, { "epoch": 0.5308048433048433, "grad_norm": 0.5048824548721313, "learning_rate": 0.0001915331835356425, "loss": 1.0681, "step": 2981 }, { "epoch": 0.530982905982906, "grad_norm": 0.4785633981227875, "learning_rate": 0.00019152754585098758, "loss": 1.0097, "step": 2982 }, { "epoch": 0.5311609686609686, "grad_norm": 0.4829573333263397, "learning_rate": 0.00019152190637304056, "loss": 1.0856, "step": 2983 }, { "epoch": 0.5313390313390314, "grad_norm": 0.5425563454627991, "learning_rate": 0.00019151626510191189, "loss": 1.2313, "step": 2984 }, { "epoch": 0.531517094017094, "grad_norm": 0.5532251596450806, "learning_rate": 0.0001915106220377121, "loss": 1.0328, "step": 2985 }, { "epoch": 0.5316951566951567, "grad_norm": 0.47016972303390503, "learning_rate": 0.0001915049771805518, "loss": 1.2003, "step": 2986 }, { "epoch": 0.5318732193732194, "grad_norm": 0.5241743326187134, "learning_rate": 0.00019149933053054153, "loss": 1.046, "step": 2987 }, { "epoch": 0.532051282051282, "grad_norm": 0.5043526887893677, "learning_rate": 0.00019149368208779197, "loss": 1.0022, "step": 2988 }, { "epoch": 0.5322293447293447, "grad_norm": 0.5563312768936157, "learning_rate": 0.00019148803185241374, "loss": 1.1017, "step": 2989 }, { "epoch": 0.5324074074074074, "grad_norm": 0.5414231419563293, "learning_rate": 0.00019148237982451763, "loss": 0.9649, "step": 2990 }, { "epoch": 0.5325854700854701, "grad_norm": 0.5452231764793396, "learning_rate": 0.0001914767260042143, "loss": 1.2281, "step": 2991 }, { "epoch": 0.5327635327635327, "grad_norm": 0.5500698685646057, "learning_rate": 0.00019147107039161454, "loss": 1.2865, "step": 2992 }, { "epoch": 0.5329415954415955, "grad_norm": 0.49747416377067566, "learning_rate": 0.00019146541298682918, "loss": 1.1296, "step": 2993 }, { "epoch": 0.5331196581196581, "grad_norm": 0.5684167742729187, "learning_rate": 0.00019145975378996903, "loss": 1.0685, "step": 2994 }, { "epoch": 0.5332977207977208, "grad_norm": 0.5411235690116882, "learning_rate": 0.00019145409280114502, "loss": 1.1372, "step": 2995 }, { "epoch": 0.5334757834757835, "grad_norm": 0.5006675720214844, "learning_rate": 0.00019144843002046806, "loss": 1.0688, "step": 2996 }, { "epoch": 0.5336538461538461, "grad_norm": 0.4591315686702728, "learning_rate": 0.00019144276544804908, "loss": 1.1071, "step": 2997 }, { "epoch": 0.5338319088319088, "grad_norm": 0.5615306496620178, "learning_rate": 0.000191437099083999, "loss": 1.1033, "step": 2998 }, { "epoch": 0.5340099715099715, "grad_norm": 0.4986817240715027, "learning_rate": 0.00019143143092842897, "loss": 1.176, "step": 2999 }, { "epoch": 0.5341880341880342, "grad_norm": 0.5017120242118835, "learning_rate": 0.00019142576098144995, "loss": 1.0174, "step": 3000 }, { "epoch": 0.5343660968660968, "grad_norm": 0.508298397064209, "learning_rate": 0.0001914200892431731, "loss": 1.164, "step": 3001 }, { "epoch": 0.5345441595441596, "grad_norm": 0.48068809509277344, "learning_rate": 0.0001914144157137095, "loss": 0.7959, "step": 3002 }, { "epoch": 0.5347222222222222, "grad_norm": 0.6347028017044067, "learning_rate": 0.0001914087403931703, "loss": 1.1727, "step": 3003 }, { "epoch": 0.5349002849002849, "grad_norm": 0.5558401942253113, "learning_rate": 0.00019140306328166676, "loss": 1.2282, "step": 3004 }, { "epoch": 0.5350783475783476, "grad_norm": 0.5093596577644348, "learning_rate": 0.00019139738437931004, "loss": 1.3258, "step": 3005 }, { "epoch": 0.5352564102564102, "grad_norm": 0.4653106927871704, "learning_rate": 0.0001913917036862114, "loss": 1.1062, "step": 3006 }, { "epoch": 0.5354344729344729, "grad_norm": 0.48085781931877136, "learning_rate": 0.00019138602120248222, "loss": 0.9019, "step": 3007 }, { "epoch": 0.5356125356125356, "grad_norm": 0.5174745321273804, "learning_rate": 0.0001913803369282338, "loss": 1.044, "step": 3008 }, { "epoch": 0.5357905982905983, "grad_norm": 0.5359669327735901, "learning_rate": 0.00019137465086357746, "loss": 1.0723, "step": 3009 }, { "epoch": 0.5359686609686609, "grad_norm": 0.5583470463752747, "learning_rate": 0.00019136896300862467, "loss": 1.2192, "step": 3010 }, { "epoch": 0.5361467236467237, "grad_norm": 0.4905693829059601, "learning_rate": 0.00019136327336348688, "loss": 1.2372, "step": 3011 }, { "epoch": 0.5363247863247863, "grad_norm": 0.5741264820098877, "learning_rate": 0.0001913575819282755, "loss": 1.1703, "step": 3012 }, { "epoch": 0.5365028490028491, "grad_norm": 0.577033281326294, "learning_rate": 0.0001913518887031021, "loss": 1.1555, "step": 3013 }, { "epoch": 0.5366809116809117, "grad_norm": 0.46795153617858887, "learning_rate": 0.00019134619368807822, "loss": 0.8583, "step": 3014 }, { "epoch": 0.5368589743589743, "grad_norm": 0.5973345637321472, "learning_rate": 0.0001913404968833154, "loss": 1.1509, "step": 3015 }, { "epoch": 0.5370370370370371, "grad_norm": 0.62020343542099, "learning_rate": 0.00019133479828892531, "loss": 1.0781, "step": 3016 }, { "epoch": 0.5372150997150997, "grad_norm": 0.5342286229133606, "learning_rate": 0.00019132909790501958, "loss": 1.1556, "step": 3017 }, { "epoch": 0.5373931623931624, "grad_norm": 0.49612846970558167, "learning_rate": 0.0001913233957317099, "loss": 0.9027, "step": 3018 }, { "epoch": 0.5375712250712251, "grad_norm": 0.5403908491134644, "learning_rate": 0.00019131769176910796, "loss": 1.1125, "step": 3019 }, { "epoch": 0.5377492877492878, "grad_norm": 0.4952050447463989, "learning_rate": 0.0001913119860173256, "loss": 1.2329, "step": 3020 }, { "epoch": 0.5379273504273504, "grad_norm": 0.5877819657325745, "learning_rate": 0.0001913062784764745, "loss": 1.2855, "step": 3021 }, { "epoch": 0.5381054131054132, "grad_norm": 0.49312907457351685, "learning_rate": 0.00019130056914666655, "loss": 1.0212, "step": 3022 }, { "epoch": 0.5382834757834758, "grad_norm": 0.45544490218162537, "learning_rate": 0.00019129485802801366, "loss": 0.9748, "step": 3023 }, { "epoch": 0.5384615384615384, "grad_norm": 0.5535242557525635, "learning_rate": 0.00019128914512062762, "loss": 1.2134, "step": 3024 }, { "epoch": 0.5386396011396012, "grad_norm": 0.45369696617126465, "learning_rate": 0.00019128343042462044, "loss": 0.9964, "step": 3025 }, { "epoch": 0.5388176638176638, "grad_norm": 0.6240725517272949, "learning_rate": 0.00019127771394010406, "loss": 1.425, "step": 3026 }, { "epoch": 0.5389957264957265, "grad_norm": 0.4859573245048523, "learning_rate": 0.0001912719956671905, "loss": 1.087, "step": 3027 }, { "epoch": 0.5391737891737892, "grad_norm": 0.47529762983322144, "learning_rate": 0.0001912662756059918, "loss": 0.9517, "step": 3028 }, { "epoch": 0.5393518518518519, "grad_norm": 0.5317288637161255, "learning_rate": 0.00019126055375661997, "loss": 1.0945, "step": 3029 }, { "epoch": 0.5395299145299145, "grad_norm": 0.55974280834198, "learning_rate": 0.00019125483011918722, "loss": 1.0794, "step": 3030 }, { "epoch": 0.5397079772079773, "grad_norm": 0.48579123616218567, "learning_rate": 0.0001912491046938056, "loss": 1.1421, "step": 3031 }, { "epoch": 0.5398860398860399, "grad_norm": 0.4917181134223938, "learning_rate": 0.00019124337748058733, "loss": 0.9708, "step": 3032 }, { "epoch": 0.5400641025641025, "grad_norm": 0.525291383266449, "learning_rate": 0.00019123764847964466, "loss": 1.064, "step": 3033 }, { "epoch": 0.5402421652421653, "grad_norm": 0.5733301639556885, "learning_rate": 0.00019123191769108977, "loss": 1.2142, "step": 3034 }, { "epoch": 0.5404202279202279, "grad_norm": 0.5400987863540649, "learning_rate": 0.00019122618511503494, "loss": 1.1309, "step": 3035 }, { "epoch": 0.5405982905982906, "grad_norm": 0.6261051893234253, "learning_rate": 0.00019122045075159257, "loss": 1.2112, "step": 3036 }, { "epoch": 0.5407763532763533, "grad_norm": 0.5483576059341431, "learning_rate": 0.0001912147146008749, "loss": 1.2705, "step": 3037 }, { "epoch": 0.540954415954416, "grad_norm": 0.5442137122154236, "learning_rate": 0.00019120897666299443, "loss": 1.2512, "step": 3038 }, { "epoch": 0.5411324786324786, "grad_norm": 0.5680811405181885, "learning_rate": 0.00019120323693806355, "loss": 1.392, "step": 3039 }, { "epoch": 0.5413105413105413, "grad_norm": 0.5237287878990173, "learning_rate": 0.00019119749542619466, "loss": 1.1599, "step": 3040 }, { "epoch": 0.541488603988604, "grad_norm": 0.48119300603866577, "learning_rate": 0.00019119175212750032, "loss": 1.0976, "step": 3041 }, { "epoch": 0.5416666666666666, "grad_norm": 0.507033109664917, "learning_rate": 0.00019118600704209302, "loss": 1.0181, "step": 3042 }, { "epoch": 0.5418447293447294, "grad_norm": 0.484672874212265, "learning_rate": 0.00019118026017008531, "loss": 1.1636, "step": 3043 }, { "epoch": 0.542022792022792, "grad_norm": 0.4923502206802368, "learning_rate": 0.00019117451151158985, "loss": 1.0388, "step": 3044 }, { "epoch": 0.5422008547008547, "grad_norm": 0.4882057309150696, "learning_rate": 0.00019116876106671922, "loss": 1.131, "step": 3045 }, { "epoch": 0.5423789173789174, "grad_norm": 0.6068355441093445, "learning_rate": 0.0001911630088355861, "loss": 1.3218, "step": 3046 }, { "epoch": 0.54255698005698, "grad_norm": 0.5012881755828857, "learning_rate": 0.0001911572548183032, "loss": 1.0514, "step": 3047 }, { "epoch": 0.5427350427350427, "grad_norm": 0.49849793314933777, "learning_rate": 0.00019115149901498328, "loss": 1.0003, "step": 3048 }, { "epoch": 0.5429131054131054, "grad_norm": 0.4934251010417938, "learning_rate": 0.00019114574142573904, "loss": 1.0319, "step": 3049 }, { "epoch": 0.5430911680911681, "grad_norm": 0.4947762191295624, "learning_rate": 0.00019113998205068334, "loss": 1.0906, "step": 3050 }, { "epoch": 0.5432692307692307, "grad_norm": 0.5449416041374207, "learning_rate": 0.00019113422088992907, "loss": 0.9093, "step": 3051 }, { "epoch": 0.5434472934472935, "grad_norm": 0.49395284056663513, "learning_rate": 0.00019112845794358902, "loss": 1.0071, "step": 3052 }, { "epoch": 0.5436253561253561, "grad_norm": 0.5478728413581848, "learning_rate": 0.00019112269321177613, "loss": 1.2124, "step": 3053 }, { "epoch": 0.5438034188034188, "grad_norm": 0.6205173134803772, "learning_rate": 0.0001911169266946034, "loss": 1.021, "step": 3054 }, { "epoch": 0.5439814814814815, "grad_norm": 0.4777783751487732, "learning_rate": 0.00019111115839218372, "loss": 0.9192, "step": 3055 }, { "epoch": 0.5441595441595442, "grad_norm": 0.5541689991950989, "learning_rate": 0.00019110538830463018, "loss": 1.1248, "step": 3056 }, { "epoch": 0.5443376068376068, "grad_norm": 0.4750942289829254, "learning_rate": 0.0001910996164320558, "loss": 1.3147, "step": 3057 }, { "epoch": 0.5445156695156695, "grad_norm": 0.6283948421478271, "learning_rate": 0.0001910938427745737, "loss": 1.0919, "step": 3058 }, { "epoch": 0.5446937321937322, "grad_norm": 0.552725076675415, "learning_rate": 0.00019108806733229698, "loss": 1.3807, "step": 3059 }, { "epoch": 0.5448717948717948, "grad_norm": 0.4832848310470581, "learning_rate": 0.0001910822901053388, "loss": 1.0705, "step": 3060 }, { "epoch": 0.5450498575498576, "grad_norm": 0.6468375325202942, "learning_rate": 0.00019107651109381233, "loss": 1.0766, "step": 3061 }, { "epoch": 0.5452279202279202, "grad_norm": 0.5464920401573181, "learning_rate": 0.00019107073029783083, "loss": 1.0453, "step": 3062 }, { "epoch": 0.5454059829059829, "grad_norm": 0.5321210026741028, "learning_rate": 0.0001910649477175076, "loss": 1.2326, "step": 3063 }, { "epoch": 0.5455840455840456, "grad_norm": 0.5572962164878845, "learning_rate": 0.00019105916335295582, "loss": 1.0673, "step": 3064 }, { "epoch": 0.5457621082621082, "grad_norm": 0.5239177942276001, "learning_rate": 0.00019105337720428894, "loss": 1.04, "step": 3065 }, { "epoch": 0.5459401709401709, "grad_norm": 0.5633319616317749, "learning_rate": 0.00019104758927162023, "loss": 0.9606, "step": 3066 }, { "epoch": 0.5461182336182336, "grad_norm": 0.5317914485931396, "learning_rate": 0.0001910417995550632, "loss": 1.0651, "step": 3067 }, { "epoch": 0.5462962962962963, "grad_norm": 0.5126453638076782, "learning_rate": 0.00019103600805473118, "loss": 1.0316, "step": 3068 }, { "epoch": 0.5464743589743589, "grad_norm": 0.5262107253074646, "learning_rate": 0.00019103021477073773, "loss": 1.0752, "step": 3069 }, { "epoch": 0.5466524216524217, "grad_norm": 0.5384877324104309, "learning_rate": 0.0001910244197031963, "loss": 1.1731, "step": 3070 }, { "epoch": 0.5468304843304843, "grad_norm": 0.5126553773880005, "learning_rate": 0.00019101862285222048, "loss": 1.2229, "step": 3071 }, { "epoch": 0.5470085470085471, "grad_norm": 0.4841194450855255, "learning_rate": 0.0001910128242179238, "loss": 0.9955, "step": 3072 }, { "epoch": 0.5471866096866097, "grad_norm": 0.526546061038971, "learning_rate": 0.00019100702380041987, "loss": 1.2436, "step": 3073 }, { "epoch": 0.5473646723646723, "grad_norm": 0.5085833072662354, "learning_rate": 0.0001910012215998224, "loss": 1.011, "step": 3074 }, { "epoch": 0.5475427350427351, "grad_norm": 0.5149994492530823, "learning_rate": 0.000190995417616245, "loss": 0.8632, "step": 3075 }, { "epoch": 0.5477207977207977, "grad_norm": 0.48079630732536316, "learning_rate": 0.00019098961184980145, "loss": 1.1115, "step": 3076 }, { "epoch": 0.5478988603988604, "grad_norm": 0.5769477486610413, "learning_rate": 0.00019098380430060546, "loss": 0.9544, "step": 3077 }, { "epoch": 0.5480769230769231, "grad_norm": 0.5260093808174133, "learning_rate": 0.0001909779949687708, "loss": 1.2354, "step": 3078 }, { "epoch": 0.5482549857549858, "grad_norm": 0.5518734455108643, "learning_rate": 0.00019097218385441135, "loss": 1.1944, "step": 3079 }, { "epoch": 0.5484330484330484, "grad_norm": 0.5436808466911316, "learning_rate": 0.00019096637095764095, "loss": 1.0717, "step": 3080 }, { "epoch": 0.5486111111111112, "grad_norm": 0.4749584197998047, "learning_rate": 0.00019096055627857344, "loss": 1.0417, "step": 3081 }, { "epoch": 0.5487891737891738, "grad_norm": 0.5485591292381287, "learning_rate": 0.0001909547398173228, "loss": 1.2515, "step": 3082 }, { "epoch": 0.5489672364672364, "grad_norm": 0.5751016736030579, "learning_rate": 0.00019094892157400296, "loss": 1.2112, "step": 3083 }, { "epoch": 0.5491452991452992, "grad_norm": 0.5404475331306458, "learning_rate": 0.00019094310154872795, "loss": 0.4334, "step": 3084 }, { "epoch": 0.5493233618233618, "grad_norm": 0.5198020935058594, "learning_rate": 0.00019093727974161178, "loss": 0.9759, "step": 3085 }, { "epoch": 0.5495014245014245, "grad_norm": 0.4893439710140228, "learning_rate": 0.0001909314561527685, "loss": 1.1287, "step": 3086 }, { "epoch": 0.5496794871794872, "grad_norm": 0.5675956606864929, "learning_rate": 0.00019092563078231228, "loss": 1.234, "step": 3087 }, { "epoch": 0.5498575498575499, "grad_norm": 0.5539132356643677, "learning_rate": 0.00019091980363035714, "loss": 1.2378, "step": 3088 }, { "epoch": 0.5500356125356125, "grad_norm": 0.5194353461265564, "learning_rate": 0.00019091397469701735, "loss": 1.1338, "step": 3089 }, { "epoch": 0.5502136752136753, "grad_norm": 0.5143756866455078, "learning_rate": 0.0001909081439824071, "loss": 0.9118, "step": 3090 }, { "epoch": 0.5503917378917379, "grad_norm": 0.5624327659606934, "learning_rate": 0.0001909023114866406, "loss": 1.035, "step": 3091 }, { "epoch": 0.5505698005698005, "grad_norm": 0.5285067558288574, "learning_rate": 0.0001908964772098321, "loss": 1.0451, "step": 3092 }, { "epoch": 0.5507478632478633, "grad_norm": 0.5730587244033813, "learning_rate": 0.000190890641152096, "loss": 1.0672, "step": 3093 }, { "epoch": 0.5509259259259259, "grad_norm": 0.5822951197624207, "learning_rate": 0.0001908848033135466, "loss": 1.1791, "step": 3094 }, { "epoch": 0.5511039886039886, "grad_norm": 0.596161961555481, "learning_rate": 0.00019087896369429826, "loss": 1.0954, "step": 3095 }, { "epoch": 0.5512820512820513, "grad_norm": 0.5138190984725952, "learning_rate": 0.00019087312229446542, "loss": 0.896, "step": 3096 }, { "epoch": 0.551460113960114, "grad_norm": 0.5061872601509094, "learning_rate": 0.0001908672791141625, "loss": 1.1017, "step": 3097 }, { "epoch": 0.5516381766381766, "grad_norm": 0.5189547538757324, "learning_rate": 0.00019086143415350404, "loss": 1.2906, "step": 3098 }, { "epoch": 0.5518162393162394, "grad_norm": 0.5640039443969727, "learning_rate": 0.00019085558741260448, "loss": 1.1001, "step": 3099 }, { "epoch": 0.551994301994302, "grad_norm": 0.453867107629776, "learning_rate": 0.00019084973889157844, "loss": 0.9731, "step": 3100 }, { "epoch": 0.5521723646723646, "grad_norm": 0.5431303977966309, "learning_rate": 0.0001908438885905405, "loss": 1.3511, "step": 3101 }, { "epoch": 0.5523504273504274, "grad_norm": 0.47693368792533875, "learning_rate": 0.00019083803650960527, "loss": 1.0426, "step": 3102 }, { "epoch": 0.55252849002849, "grad_norm": 0.4663422703742981, "learning_rate": 0.00019083218264888743, "loss": 1.05, "step": 3103 }, { "epoch": 0.5527065527065527, "grad_norm": 0.561354398727417, "learning_rate": 0.00019082632700850164, "loss": 0.9608, "step": 3104 }, { "epoch": 0.5528846153846154, "grad_norm": 0.4981916844844818, "learning_rate": 0.00019082046958856266, "loss": 1.1935, "step": 3105 }, { "epoch": 0.5530626780626781, "grad_norm": 0.5301326513290405, "learning_rate": 0.0001908146103891852, "loss": 1.0646, "step": 3106 }, { "epoch": 0.5532407407407407, "grad_norm": 0.5023610591888428, "learning_rate": 0.00019080874941048416, "loss": 1.127, "step": 3107 }, { "epoch": 0.5534188034188035, "grad_norm": 0.5172514319419861, "learning_rate": 0.00019080288665257426, "loss": 1.0435, "step": 3108 }, { "epoch": 0.5535968660968661, "grad_norm": 0.6340598464012146, "learning_rate": 0.00019079702211557048, "loss": 1.3528, "step": 3109 }, { "epoch": 0.5537749287749287, "grad_norm": 0.46882256865501404, "learning_rate": 0.0001907911557995876, "loss": 1.1361, "step": 3110 }, { "epoch": 0.5539529914529915, "grad_norm": 0.6401382088661194, "learning_rate": 0.00019078528770474068, "loss": 1.2415, "step": 3111 }, { "epoch": 0.5541310541310541, "grad_norm": 0.5141328573226929, "learning_rate": 0.00019077941783114463, "loss": 1.0505, "step": 3112 }, { "epoch": 0.5543091168091168, "grad_norm": 0.522318959236145, "learning_rate": 0.00019077354617891444, "loss": 1.0964, "step": 3113 }, { "epoch": 0.5544871794871795, "grad_norm": 0.539551854133606, "learning_rate": 0.00019076767274816517, "loss": 1.0735, "step": 3114 }, { "epoch": 0.5546652421652422, "grad_norm": 0.495320200920105, "learning_rate": 0.00019076179753901195, "loss": 0.9754, "step": 3115 }, { "epoch": 0.5548433048433048, "grad_norm": 0.5499199628829956, "learning_rate": 0.00019075592055156984, "loss": 1.0043, "step": 3116 }, { "epoch": 0.5550213675213675, "grad_norm": 0.5352509617805481, "learning_rate": 0.00019075004178595396, "loss": 1.1701, "step": 3117 }, { "epoch": 0.5551994301994302, "grad_norm": 0.5392300486564636, "learning_rate": 0.00019074416124227953, "loss": 1.1612, "step": 3118 }, { "epoch": 0.5553774928774928, "grad_norm": 0.5195050835609436, "learning_rate": 0.0001907382789206618, "loss": 1.0934, "step": 3119 }, { "epoch": 0.5555555555555556, "grad_norm": 0.5276884436607361, "learning_rate": 0.000190732394821216, "loss": 0.9011, "step": 3120 }, { "epoch": 0.5557336182336182, "grad_norm": 0.6115903258323669, "learning_rate": 0.00019072650894405734, "loss": 1.3065, "step": 3121 }, { "epoch": 0.5559116809116809, "grad_norm": 0.5752483010292053, "learning_rate": 0.00019072062128930127, "loss": 1.0063, "step": 3122 }, { "epoch": 0.5560897435897436, "grad_norm": 0.5508273243904114, "learning_rate": 0.00019071473185706302, "loss": 1.2598, "step": 3123 }, { "epoch": 0.5562678062678063, "grad_norm": 0.49712198972702026, "learning_rate": 0.00019070884064745808, "loss": 0.924, "step": 3124 }, { "epoch": 0.5564458689458689, "grad_norm": 0.572849452495575, "learning_rate": 0.00019070294766060185, "loss": 0.9683, "step": 3125 }, { "epoch": 0.5566239316239316, "grad_norm": 0.4807920753955841, "learning_rate": 0.00019069705289660976, "loss": 1.0998, "step": 3126 }, { "epoch": 0.5568019943019943, "grad_norm": 0.5543031096458435, "learning_rate": 0.0001906911563555973, "loss": 1.0878, "step": 3127 }, { "epoch": 0.5569800569800569, "grad_norm": 0.5710418820381165, "learning_rate": 0.00019068525803768007, "loss": 1.0381, "step": 3128 }, { "epoch": 0.5571581196581197, "grad_norm": 0.5169163346290588, "learning_rate": 0.00019067935794297357, "loss": 1.1149, "step": 3129 }, { "epoch": 0.5573361823361823, "grad_norm": 0.6474376916885376, "learning_rate": 0.00019067345607159345, "loss": 0.9828, "step": 3130 }, { "epoch": 0.5575142450142451, "grad_norm": 0.5029847621917725, "learning_rate": 0.0001906675524236553, "loss": 0.797, "step": 3131 }, { "epoch": 0.5576923076923077, "grad_norm": 0.5681431293487549, "learning_rate": 0.00019066164699927478, "loss": 1.1565, "step": 3132 }, { "epoch": 0.5578703703703703, "grad_norm": 0.5654549598693848, "learning_rate": 0.00019065573979856764, "loss": 1.2488, "step": 3133 }, { "epoch": 0.5580484330484331, "grad_norm": 0.47653043270111084, "learning_rate": 0.0001906498308216496, "loss": 1.0428, "step": 3134 }, { "epoch": 0.5582264957264957, "grad_norm": 0.5068467259407043, "learning_rate": 0.00019064392006863643, "loss": 0.9659, "step": 3135 }, { "epoch": 0.5584045584045584, "grad_norm": 0.7076661586761475, "learning_rate": 0.00019063800753964393, "loss": 1.1289, "step": 3136 }, { "epoch": 0.5585826210826211, "grad_norm": 0.551456868648529, "learning_rate": 0.000190632093234788, "loss": 1.1925, "step": 3137 }, { "epoch": 0.5587606837606838, "grad_norm": 0.518276035785675, "learning_rate": 0.00019062617715418442, "loss": 0.8681, "step": 3138 }, { "epoch": 0.5589387464387464, "grad_norm": 0.5272278785705566, "learning_rate": 0.0001906202592979492, "loss": 1.0865, "step": 3139 }, { "epoch": 0.5591168091168092, "grad_norm": 0.5344942212104797, "learning_rate": 0.00019061433966619822, "loss": 1.1647, "step": 3140 }, { "epoch": 0.5592948717948718, "grad_norm": 0.5833460092544556, "learning_rate": 0.00019060841825904753, "loss": 1.3403, "step": 3141 }, { "epoch": 0.5594729344729344, "grad_norm": 0.5707054734230042, "learning_rate": 0.00019060249507661306, "loss": 1.1236, "step": 3142 }, { "epoch": 0.5596509971509972, "grad_norm": 0.5446065664291382, "learning_rate": 0.00019059657011901094, "loss": 1.017, "step": 3143 }, { "epoch": 0.5598290598290598, "grad_norm": 0.5285109281539917, "learning_rate": 0.0001905906433863572, "loss": 1.3186, "step": 3144 }, { "epoch": 0.5600071225071225, "grad_norm": 0.5308659672737122, "learning_rate": 0.00019058471487876802, "loss": 0.8464, "step": 3145 }, { "epoch": 0.5601851851851852, "grad_norm": 0.5218054056167603, "learning_rate": 0.00019057878459635948, "loss": 1.0219, "step": 3146 }, { "epoch": 0.5603632478632479, "grad_norm": 0.45067787170410156, "learning_rate": 0.00019057285253924785, "loss": 1.0364, "step": 3147 }, { "epoch": 0.5605413105413105, "grad_norm": 0.4856041669845581, "learning_rate": 0.0001905669187075493, "loss": 1.1928, "step": 3148 }, { "epoch": 0.5607193732193733, "grad_norm": 0.506912112236023, "learning_rate": 0.00019056098310138016, "loss": 1.119, "step": 3149 }, { "epoch": 0.5608974358974359, "grad_norm": 0.49049463868141174, "learning_rate": 0.00019055504572085662, "loss": 1.2165, "step": 3150 }, { "epoch": 0.5610754985754985, "grad_norm": 0.5250293612480164, "learning_rate": 0.0001905491065660951, "loss": 1.1427, "step": 3151 }, { "epoch": 0.5612535612535613, "grad_norm": 0.43438446521759033, "learning_rate": 0.00019054316563721195, "loss": 0.884, "step": 3152 }, { "epoch": 0.5614316239316239, "grad_norm": 0.5386807918548584, "learning_rate": 0.00019053722293432354, "loss": 1.1494, "step": 3153 }, { "epoch": 0.5616096866096866, "grad_norm": 0.5403809547424316, "learning_rate": 0.00019053127845754632, "loss": 1.1743, "step": 3154 }, { "epoch": 0.5617877492877493, "grad_norm": 0.4759823977947235, "learning_rate": 0.00019052533220699678, "loss": 1.0716, "step": 3155 }, { "epoch": 0.561965811965812, "grad_norm": 0.45332327485084534, "learning_rate": 0.0001905193841827914, "loss": 0.8405, "step": 3156 }, { "epoch": 0.5621438746438746, "grad_norm": 0.5617053508758545, "learning_rate": 0.00019051343438504671, "loss": 1.0422, "step": 3157 }, { "epoch": 0.5623219373219374, "grad_norm": 0.5088049173355103, "learning_rate": 0.00019050748281387931, "loss": 1.0067, "step": 3158 }, { "epoch": 0.5625, "grad_norm": 0.5174484848976135, "learning_rate": 0.00019050152946940578, "loss": 1.0623, "step": 3159 }, { "epoch": 0.5626780626780626, "grad_norm": 0.6093568801879883, "learning_rate": 0.0001904955743517428, "loss": 1.24, "step": 3160 }, { "epoch": 0.5628561253561254, "grad_norm": 0.49063584208488464, "learning_rate": 0.00019048961746100703, "loss": 0.8563, "step": 3161 }, { "epoch": 0.563034188034188, "grad_norm": 0.583940863609314, "learning_rate": 0.00019048365879731517, "loss": 1.0695, "step": 3162 }, { "epoch": 0.5632122507122507, "grad_norm": 0.4943268597126007, "learning_rate": 0.000190477698360784, "loss": 0.8606, "step": 3163 }, { "epoch": 0.5633903133903134, "grad_norm": 0.5050932168960571, "learning_rate": 0.00019047173615153028, "loss": 1.1591, "step": 3164 }, { "epoch": 0.5635683760683761, "grad_norm": 0.5445677638053894, "learning_rate": 0.0001904657721696708, "loss": 1.262, "step": 3165 }, { "epoch": 0.5637464387464387, "grad_norm": 0.5445297360420227, "learning_rate": 0.00019045980641532246, "loss": 1.223, "step": 3166 }, { "epoch": 0.5639245014245015, "grad_norm": 0.5098413228988647, "learning_rate": 0.00019045383888860213, "loss": 1.0829, "step": 3167 }, { "epoch": 0.5641025641025641, "grad_norm": 0.484998881816864, "learning_rate": 0.0001904478695896267, "loss": 1.0711, "step": 3168 }, { "epoch": 0.5642806267806267, "grad_norm": 0.5515334010124207, "learning_rate": 0.0001904418985185132, "loss": 1.1583, "step": 3169 }, { "epoch": 0.5644586894586895, "grad_norm": 0.545460045337677, "learning_rate": 0.00019043592567537853, "loss": 1.2321, "step": 3170 }, { "epoch": 0.5646367521367521, "grad_norm": 0.5463964343070984, "learning_rate": 0.0001904299510603398, "loss": 1.1019, "step": 3171 }, { "epoch": 0.5648148148148148, "grad_norm": 0.5619220733642578, "learning_rate": 0.000190423974673514, "loss": 1.1001, "step": 3172 }, { "epoch": 0.5649928774928775, "grad_norm": 0.4448916018009186, "learning_rate": 0.00019041799651501825, "loss": 1.057, "step": 3173 }, { "epoch": 0.5651709401709402, "grad_norm": 0.6073006987571716, "learning_rate": 0.00019041201658496975, "loss": 1.0306, "step": 3174 }, { "epoch": 0.5653490028490028, "grad_norm": 0.5342072248458862, "learning_rate": 0.0001904060348834855, "loss": 0.9231, "step": 3175 }, { "epoch": 0.5655270655270656, "grad_norm": 0.4505697786808014, "learning_rate": 0.0001904000514106829, "loss": 1.1134, "step": 3176 }, { "epoch": 0.5657051282051282, "grad_norm": 0.5627852082252502, "learning_rate": 0.00019039406616667902, "loss": 1.2138, "step": 3177 }, { "epoch": 0.5658831908831908, "grad_norm": 0.499734103679657, "learning_rate": 0.0001903880791515912, "loss": 1.1074, "step": 3178 }, { "epoch": 0.5660612535612536, "grad_norm": 0.4768189489841461, "learning_rate": 0.00019038209036553676, "loss": 0.9442, "step": 3179 }, { "epoch": 0.5662393162393162, "grad_norm": 0.5265373587608337, "learning_rate": 0.00019037609980863298, "loss": 1.0907, "step": 3180 }, { "epoch": 0.5664173789173789, "grad_norm": 0.5506128072738647, "learning_rate": 0.00019037010748099728, "loss": 1.2541, "step": 3181 }, { "epoch": 0.5665954415954416, "grad_norm": 0.44860872626304626, "learning_rate": 0.00019036411338274703, "loss": 0.893, "step": 3182 }, { "epoch": 0.5667735042735043, "grad_norm": 0.4901522994041443, "learning_rate": 0.00019035811751399973, "loss": 1.0469, "step": 3183 }, { "epoch": 0.5669515669515669, "grad_norm": 0.500868022441864, "learning_rate": 0.0001903521198748728, "loss": 1.0527, "step": 3184 }, { "epoch": 0.5671296296296297, "grad_norm": 0.5508102774620056, "learning_rate": 0.00019034612046548376, "loss": 1.283, "step": 3185 }, { "epoch": 0.5673076923076923, "grad_norm": 0.5079495906829834, "learning_rate": 0.0001903401192859502, "loss": 1.0808, "step": 3186 }, { "epoch": 0.5674857549857549, "grad_norm": 0.5758788585662842, "learning_rate": 0.00019033411633638964, "loss": 1.1301, "step": 3187 }, { "epoch": 0.5676638176638177, "grad_norm": 0.46557924151420593, "learning_rate": 0.00019032811161691972, "loss": 1.0205, "step": 3188 }, { "epoch": 0.5678418803418803, "grad_norm": 0.5665056109428406, "learning_rate": 0.0001903221051276581, "loss": 1.1926, "step": 3189 }, { "epoch": 0.5680199430199431, "grad_norm": 0.5948992967605591, "learning_rate": 0.00019031609686872246, "loss": 1.2724, "step": 3190 }, { "epoch": 0.5681980056980057, "grad_norm": 0.6189367771148682, "learning_rate": 0.00019031008684023055, "loss": 1.2762, "step": 3191 }, { "epoch": 0.5683760683760684, "grad_norm": 0.49511992931365967, "learning_rate": 0.00019030407504230006, "loss": 1.0117, "step": 3192 }, { "epoch": 0.5685541310541311, "grad_norm": 0.5358837842941284, "learning_rate": 0.00019029806147504878, "loss": 0.944, "step": 3193 }, { "epoch": 0.5687321937321937, "grad_norm": 0.458636999130249, "learning_rate": 0.00019029204613859463, "loss": 0.8174, "step": 3194 }, { "epoch": 0.5689102564102564, "grad_norm": 0.5168304443359375, "learning_rate": 0.00019028602903305535, "loss": 1.1533, "step": 3195 }, { "epoch": 0.5690883190883191, "grad_norm": 0.5334134697914124, "learning_rate": 0.00019028001015854892, "loss": 1.1868, "step": 3196 }, { "epoch": 0.5692663817663818, "grad_norm": 0.5649123191833496, "learning_rate": 0.0001902739895151932, "loss": 0.9876, "step": 3197 }, { "epoch": 0.5694444444444444, "grad_norm": 0.5647651553153992, "learning_rate": 0.0001902679671031062, "loss": 1.0805, "step": 3198 }, { "epoch": 0.5696225071225072, "grad_norm": 0.5251876711845398, "learning_rate": 0.00019026194292240587, "loss": 1.2335, "step": 3199 }, { "epoch": 0.5698005698005698, "grad_norm": 0.5268014669418335, "learning_rate": 0.0001902559169732103, "loss": 1.19, "step": 3200 }, { "epoch": 0.5699786324786325, "grad_norm": 0.5301041007041931, "learning_rate": 0.00019024988925563752, "loss": 1.1173, "step": 3201 }, { "epoch": 0.5701566951566952, "grad_norm": 0.4531562030315399, "learning_rate": 0.00019024385976980566, "loss": 0.7576, "step": 3202 }, { "epoch": 0.5703347578347578, "grad_norm": 0.5779716372489929, "learning_rate": 0.00019023782851583282, "loss": 1.1719, "step": 3203 }, { "epoch": 0.5705128205128205, "grad_norm": 0.4886093735694885, "learning_rate": 0.00019023179549383716, "loss": 1.085, "step": 3204 }, { "epoch": 0.5706908831908832, "grad_norm": 0.510117769241333, "learning_rate": 0.0001902257607039369, "loss": 0.8931, "step": 3205 }, { "epoch": 0.5708689458689459, "grad_norm": 0.5195479393005371, "learning_rate": 0.00019021972414625036, "loss": 0.9922, "step": 3206 }, { "epoch": 0.5710470085470085, "grad_norm": 0.5791407227516174, "learning_rate": 0.00019021368582089568, "loss": 1.112, "step": 3207 }, { "epoch": 0.5712250712250713, "grad_norm": 0.5056005716323853, "learning_rate": 0.00019020764572799122, "loss": 0.8474, "step": 3208 }, { "epoch": 0.5714031339031339, "grad_norm": 0.5060068964958191, "learning_rate": 0.00019020160386765537, "loss": 1.071, "step": 3209 }, { "epoch": 0.5715811965811965, "grad_norm": 0.5396568775177002, "learning_rate": 0.00019019556024000648, "loss": 1.0436, "step": 3210 }, { "epoch": 0.5717592592592593, "grad_norm": 0.6552190780639648, "learning_rate": 0.0001901895148451629, "loss": 0.9869, "step": 3211 }, { "epoch": 0.5719373219373219, "grad_norm": 0.5177004337310791, "learning_rate": 0.00019018346768324314, "loss": 1.0193, "step": 3212 }, { "epoch": 0.5721153846153846, "grad_norm": 0.5192117094993591, "learning_rate": 0.0001901774187543657, "loss": 1.1263, "step": 3213 }, { "epoch": 0.5722934472934473, "grad_norm": 0.4857729971408844, "learning_rate": 0.00019017136805864906, "loss": 0.9808, "step": 3214 }, { "epoch": 0.57247150997151, "grad_norm": 0.5800918936729431, "learning_rate": 0.00019016531559621177, "loss": 1.2334, "step": 3215 }, { "epoch": 0.5726495726495726, "grad_norm": 0.4812086522579193, "learning_rate": 0.00019015926136717242, "loss": 1.2409, "step": 3216 }, { "epoch": 0.5728276353276354, "grad_norm": 0.5128398537635803, "learning_rate": 0.00019015320537164963, "loss": 0.9036, "step": 3217 }, { "epoch": 0.573005698005698, "grad_norm": 0.4761141538619995, "learning_rate": 0.00019014714760976205, "loss": 1.1058, "step": 3218 }, { "epoch": 0.5731837606837606, "grad_norm": 0.5850459933280945, "learning_rate": 0.0001901410880816284, "loss": 1.1011, "step": 3219 }, { "epoch": 0.5733618233618234, "grad_norm": 0.5648714303970337, "learning_rate": 0.00019013502678736738, "loss": 1.0479, "step": 3220 }, { "epoch": 0.573539886039886, "grad_norm": 0.5835902094841003, "learning_rate": 0.00019012896372709774, "loss": 1.0555, "step": 3221 }, { "epoch": 0.5737179487179487, "grad_norm": 0.5155113935470581, "learning_rate": 0.00019012289890093828, "loss": 0.9488, "step": 3222 }, { "epoch": 0.5738960113960114, "grad_norm": 0.5064889788627625, "learning_rate": 0.00019011683230900784, "loss": 0.9144, "step": 3223 }, { "epoch": 0.5740740740740741, "grad_norm": 0.53825843334198, "learning_rate": 0.00019011076395142527, "loss": 1.0713, "step": 3224 }, { "epoch": 0.5742521367521367, "grad_norm": 0.5341386198997498, "learning_rate": 0.00019010469382830947, "loss": 1.1438, "step": 3225 }, { "epoch": 0.5744301994301995, "grad_norm": 0.5300050973892212, "learning_rate": 0.00019009862193977936, "loss": 1.0114, "step": 3226 }, { "epoch": 0.5746082621082621, "grad_norm": 0.6033682823181152, "learning_rate": 0.0001900925482859539, "loss": 1.0458, "step": 3227 }, { "epoch": 0.5747863247863247, "grad_norm": 0.5108983516693115, "learning_rate": 0.00019008647286695215, "loss": 1.1211, "step": 3228 }, { "epoch": 0.5749643874643875, "grad_norm": 0.5263782739639282, "learning_rate": 0.00019008039568289308, "loss": 0.8647, "step": 3229 }, { "epoch": 0.5751424501424501, "grad_norm": 0.47119566798210144, "learning_rate": 0.0001900743167338958, "loss": 1.019, "step": 3230 }, { "epoch": 0.5753205128205128, "grad_norm": 0.56391841173172, "learning_rate": 0.00019006823602007937, "loss": 0.9791, "step": 3231 }, { "epoch": 0.5754985754985755, "grad_norm": 0.5364985466003418, "learning_rate": 0.000190062153541563, "loss": 1.1355, "step": 3232 }, { "epoch": 0.5756766381766382, "grad_norm": 0.5098565220832825, "learning_rate": 0.00019005606929846578, "loss": 0.987, "step": 3233 }, { "epoch": 0.5758547008547008, "grad_norm": 0.6640968918800354, "learning_rate": 0.00019004998329090692, "loss": 1.1165, "step": 3234 }, { "epoch": 0.5760327635327636, "grad_norm": 0.5044721961021423, "learning_rate": 0.00019004389551900578, "loss": 0.8643, "step": 3235 }, { "epoch": 0.5762108262108262, "grad_norm": 0.4822785258293152, "learning_rate": 0.00019003780598288153, "loss": 1.0735, "step": 3236 }, { "epoch": 0.5763888888888888, "grad_norm": 0.505261242389679, "learning_rate": 0.00019003171468265348, "loss": 1.0001, "step": 3237 }, { "epoch": 0.5765669515669516, "grad_norm": 0.5020412802696228, "learning_rate": 0.00019002562161844102, "loss": 0.9601, "step": 3238 }, { "epoch": 0.5767450142450142, "grad_norm": 0.4920475482940674, "learning_rate": 0.00019001952679036354, "loss": 1.0111, "step": 3239 }, { "epoch": 0.5769230769230769, "grad_norm": 0.5638813376426697, "learning_rate": 0.00019001343019854042, "loss": 1.1456, "step": 3240 }, { "epoch": 0.5771011396011396, "grad_norm": 0.5519235134124756, "learning_rate": 0.0001900073318430911, "loss": 0.9258, "step": 3241 }, { "epoch": 0.5772792022792023, "grad_norm": 0.5207770466804504, "learning_rate": 0.0001900012317241351, "loss": 0.9859, "step": 3242 }, { "epoch": 0.5774572649572649, "grad_norm": 0.5493707656860352, "learning_rate": 0.00018999512984179195, "loss": 1.1183, "step": 3243 }, { "epoch": 0.5776353276353277, "grad_norm": 0.4504764676094055, "learning_rate": 0.00018998902619618116, "loss": 0.9363, "step": 3244 }, { "epoch": 0.5778133903133903, "grad_norm": 0.5232836604118347, "learning_rate": 0.00018998292078742233, "loss": 1.1887, "step": 3245 }, { "epoch": 0.5779914529914529, "grad_norm": 0.5715088248252869, "learning_rate": 0.0001899768136156351, "loss": 1.4524, "step": 3246 }, { "epoch": 0.5781695156695157, "grad_norm": 0.59555584192276, "learning_rate": 0.0001899707046809391, "loss": 1.0922, "step": 3247 }, { "epoch": 0.5783475783475783, "grad_norm": 0.4500894546508789, "learning_rate": 0.00018996459398345404, "loss": 1.0087, "step": 3248 }, { "epoch": 0.5785256410256411, "grad_norm": 0.49126625061035156, "learning_rate": 0.00018995848152329967, "loss": 1.1512, "step": 3249 }, { "epoch": 0.5787037037037037, "grad_norm": 0.4096335172653198, "learning_rate": 0.00018995236730059574, "loss": 0.7633, "step": 3250 }, { "epoch": 0.5788817663817664, "grad_norm": 0.5364313721656799, "learning_rate": 0.00018994625131546199, "loss": 1.295, "step": 3251 }, { "epoch": 0.5790598290598291, "grad_norm": 0.4897502660751343, "learning_rate": 0.00018994013356801834, "loss": 1.2197, "step": 3252 }, { "epoch": 0.5792378917378918, "grad_norm": 0.5101368427276611, "learning_rate": 0.00018993401405838456, "loss": 1.1129, "step": 3253 }, { "epoch": 0.5794159544159544, "grad_norm": 0.5426377654075623, "learning_rate": 0.00018992789278668063, "loss": 1.188, "step": 3254 }, { "epoch": 0.5795940170940171, "grad_norm": 0.5066362023353577, "learning_rate": 0.00018992176975302644, "loss": 1.2802, "step": 3255 }, { "epoch": 0.5797720797720798, "grad_norm": 0.5418947339057922, "learning_rate": 0.00018991564495754196, "loss": 1.1675, "step": 3256 }, { "epoch": 0.5799501424501424, "grad_norm": 0.5139963626861572, "learning_rate": 0.0001899095184003472, "loss": 0.9717, "step": 3257 }, { "epoch": 0.5801282051282052, "grad_norm": 0.5167285799980164, "learning_rate": 0.00018990339008156219, "loss": 1.1529, "step": 3258 }, { "epoch": 0.5803062678062678, "grad_norm": 0.53471440076828, "learning_rate": 0.00018989726000130704, "loss": 1.0711, "step": 3259 }, { "epoch": 0.5804843304843305, "grad_norm": 0.49875229597091675, "learning_rate": 0.0001898911281597018, "loss": 1.1095, "step": 3260 }, { "epoch": 0.5806623931623932, "grad_norm": 0.4473155438899994, "learning_rate": 0.00018988499455686663, "loss": 0.836, "step": 3261 }, { "epoch": 0.5808404558404558, "grad_norm": 0.6181996464729309, "learning_rate": 0.00018987885919292174, "loss": 1.2787, "step": 3262 }, { "epoch": 0.5810185185185185, "grad_norm": 0.4996899664402008, "learning_rate": 0.00018987272206798733, "loss": 1.2132, "step": 3263 }, { "epoch": 0.5811965811965812, "grad_norm": 0.49979713559150696, "learning_rate": 0.00018986658318218358, "loss": 0.8388, "step": 3264 }, { "epoch": 0.5813746438746439, "grad_norm": 0.5288876295089722, "learning_rate": 0.00018986044253563084, "loss": 1.1871, "step": 3265 }, { "epoch": 0.5815527065527065, "grad_norm": 0.534063458442688, "learning_rate": 0.00018985430012844937, "loss": 0.96, "step": 3266 }, { "epoch": 0.5817307692307693, "grad_norm": 0.5081285834312439, "learning_rate": 0.00018984815596075953, "loss": 1.1577, "step": 3267 }, { "epoch": 0.5819088319088319, "grad_norm": 0.5648202896118164, "learning_rate": 0.00018984201003268176, "loss": 1.2235, "step": 3268 }, { "epoch": 0.5820868945868946, "grad_norm": 0.495061993598938, "learning_rate": 0.00018983586234433642, "loss": 1.056, "step": 3269 }, { "epoch": 0.5822649572649573, "grad_norm": 0.47149857878685, "learning_rate": 0.000189829712895844, "loss": 1.0844, "step": 3270 }, { "epoch": 0.58244301994302, "grad_norm": 0.6107062697410583, "learning_rate": 0.00018982356168732492, "loss": 0.9868, "step": 3271 }, { "epoch": 0.5826210826210826, "grad_norm": 0.7355940341949463, "learning_rate": 0.00018981740871889974, "loss": 1.1448, "step": 3272 }, { "epoch": 0.5827991452991453, "grad_norm": 0.5950441956520081, "learning_rate": 0.00018981125399068907, "loss": 0.9618, "step": 3273 }, { "epoch": 0.582977207977208, "grad_norm": 0.47607290744781494, "learning_rate": 0.0001898050975028134, "loss": 0.957, "step": 3274 }, { "epoch": 0.5831552706552706, "grad_norm": 0.541164755821228, "learning_rate": 0.00018979893925539338, "loss": 1.1426, "step": 3275 }, { "epoch": 0.5833333333333334, "grad_norm": 0.5240640044212341, "learning_rate": 0.00018979277924854974, "loss": 1.1421, "step": 3276 }, { "epoch": 0.583511396011396, "grad_norm": 0.48155727982521057, "learning_rate": 0.00018978661748240307, "loss": 1.0069, "step": 3277 }, { "epoch": 0.5836894586894587, "grad_norm": 0.5559938549995422, "learning_rate": 0.00018978045395707418, "loss": 1.1227, "step": 3278 }, { "epoch": 0.5838675213675214, "grad_norm": 0.5244291424751282, "learning_rate": 0.0001897742886726838, "loss": 1.1103, "step": 3279 }, { "epoch": 0.584045584045584, "grad_norm": 0.5277758240699768, "learning_rate": 0.00018976812162935268, "loss": 1.2125, "step": 3280 }, { "epoch": 0.5842236467236467, "grad_norm": 0.5415039658546448, "learning_rate": 0.00018976195282720173, "loss": 1.146, "step": 3281 }, { "epoch": 0.5844017094017094, "grad_norm": 0.5152051448822021, "learning_rate": 0.00018975578226635177, "loss": 1.0092, "step": 3282 }, { "epoch": 0.5845797720797721, "grad_norm": 0.5489452481269836, "learning_rate": 0.00018974960994692371, "loss": 1.2425, "step": 3283 }, { "epoch": 0.5847578347578347, "grad_norm": 0.491274356842041, "learning_rate": 0.00018974343586903848, "loss": 0.9559, "step": 3284 }, { "epoch": 0.5849358974358975, "grad_norm": 0.5783739686012268, "learning_rate": 0.00018973726003281707, "loss": 1.1971, "step": 3285 }, { "epoch": 0.5851139601139601, "grad_norm": 0.5056472420692444, "learning_rate": 0.00018973108243838045, "loss": 1.0313, "step": 3286 }, { "epoch": 0.5852920227920227, "grad_norm": 0.4939729571342468, "learning_rate": 0.00018972490308584962, "loss": 1.1061, "step": 3287 }, { "epoch": 0.5854700854700855, "grad_norm": 0.4889580011367798, "learning_rate": 0.00018971872197534576, "loss": 0.9157, "step": 3288 }, { "epoch": 0.5856481481481481, "grad_norm": 0.40889349579811096, "learning_rate": 0.00018971253910698993, "loss": 0.8083, "step": 3289 }, { "epoch": 0.5858262108262108, "grad_norm": 0.5221503973007202, "learning_rate": 0.00018970635448090322, "loss": 0.9995, "step": 3290 }, { "epoch": 0.5860042735042735, "grad_norm": 0.47060561180114746, "learning_rate": 0.00018970016809720687, "loss": 0.9738, "step": 3291 }, { "epoch": 0.5861823361823362, "grad_norm": 0.6083170771598816, "learning_rate": 0.000189693979956022, "loss": 1.188, "step": 3292 }, { "epoch": 0.5863603988603988, "grad_norm": 0.4696751534938812, "learning_rate": 0.00018968779005746998, "loss": 1.089, "step": 3293 }, { "epoch": 0.5865384615384616, "grad_norm": 0.5081014633178711, "learning_rate": 0.00018968159840167202, "loss": 1.1869, "step": 3294 }, { "epoch": 0.5867165242165242, "grad_norm": 0.48042431473731995, "learning_rate": 0.0001896754049887494, "loss": 0.964, "step": 3295 }, { "epoch": 0.5868945868945868, "grad_norm": 0.5075193643569946, "learning_rate": 0.00018966920981882353, "loss": 1.1884, "step": 3296 }, { "epoch": 0.5870726495726496, "grad_norm": 0.5734842419624329, "learning_rate": 0.00018966301289201576, "loss": 1.1475, "step": 3297 }, { "epoch": 0.5872507122507122, "grad_norm": 0.5525311231613159, "learning_rate": 0.00018965681420844753, "loss": 1.241, "step": 3298 }, { "epoch": 0.5874287749287749, "grad_norm": 0.48142680525779724, "learning_rate": 0.00018965061376824025, "loss": 1.0871, "step": 3299 }, { "epoch": 0.5876068376068376, "grad_norm": 0.5360350608825684, "learning_rate": 0.00018964441157151544, "loss": 1.1895, "step": 3300 }, { "epoch": 0.5877849002849003, "grad_norm": 0.5207685232162476, "learning_rate": 0.00018963820761839457, "loss": 0.9323, "step": 3301 }, { "epoch": 0.5879629629629629, "grad_norm": 0.453620970249176, "learning_rate": 0.00018963200190899926, "loss": 0.802, "step": 3302 }, { "epoch": 0.5881410256410257, "grad_norm": 0.5198796391487122, "learning_rate": 0.00018962579444345106, "loss": 1.0243, "step": 3303 }, { "epoch": 0.5883190883190883, "grad_norm": 0.5597525835037231, "learning_rate": 0.0001896195852218716, "loss": 0.9351, "step": 3304 }, { "epoch": 0.5884971509971509, "grad_norm": 0.5738299489021301, "learning_rate": 0.00018961337424438254, "loss": 1.3737, "step": 3305 }, { "epoch": 0.5886752136752137, "grad_norm": 0.5569949150085449, "learning_rate": 0.00018960716151110554, "loss": 1.0469, "step": 3306 }, { "epoch": 0.5888532763532763, "grad_norm": 0.5088010430335999, "learning_rate": 0.00018960094702216238, "loss": 1.0982, "step": 3307 }, { "epoch": 0.5890313390313391, "grad_norm": 0.5127636790275574, "learning_rate": 0.0001895947307776748, "loss": 0.9986, "step": 3308 }, { "epoch": 0.5892094017094017, "grad_norm": 0.5160682797431946, "learning_rate": 0.00018958851277776456, "loss": 1.0219, "step": 3309 }, { "epoch": 0.5893874643874644, "grad_norm": 0.5380711555480957, "learning_rate": 0.00018958229302255356, "loss": 1.118, "step": 3310 }, { "epoch": 0.5895655270655271, "grad_norm": 0.5571228861808777, "learning_rate": 0.0001895760715121636, "loss": 1.0302, "step": 3311 }, { "epoch": 0.5897435897435898, "grad_norm": 0.542266309261322, "learning_rate": 0.00018956984824671657, "loss": 1.0372, "step": 3312 }, { "epoch": 0.5899216524216524, "grad_norm": 0.48350459337234497, "learning_rate": 0.00018956362322633446, "loss": 1.2, "step": 3313 }, { "epoch": 0.5900997150997151, "grad_norm": 0.5001645088195801, "learning_rate": 0.0001895573964511392, "loss": 0.9749, "step": 3314 }, { "epoch": 0.5902777777777778, "grad_norm": 0.5227531790733337, "learning_rate": 0.00018955116792125276, "loss": 1.025, "step": 3315 }, { "epoch": 0.5904558404558404, "grad_norm": 0.522251546382904, "learning_rate": 0.00018954493763679727, "loss": 1.0821, "step": 3316 }, { "epoch": 0.5906339031339032, "grad_norm": 0.5423251390457153, "learning_rate": 0.00018953870559789467, "loss": 1.0961, "step": 3317 }, { "epoch": 0.5908119658119658, "grad_norm": 0.5615720748901367, "learning_rate": 0.0001895324718046672, "loss": 1.1209, "step": 3318 }, { "epoch": 0.5909900284900285, "grad_norm": 0.44746771454811096, "learning_rate": 0.00018952623625723692, "loss": 0.9935, "step": 3319 }, { "epoch": 0.5911680911680912, "grad_norm": 0.5993229150772095, "learning_rate": 0.00018951999895572597, "loss": 1.1409, "step": 3320 }, { "epoch": 0.5913461538461539, "grad_norm": 0.4969801902770996, "learning_rate": 0.00018951375990025666, "loss": 1.1568, "step": 3321 }, { "epoch": 0.5915242165242165, "grad_norm": 0.6001267433166504, "learning_rate": 0.00018950751909095116, "loss": 1.1135, "step": 3322 }, { "epoch": 0.5917022792022792, "grad_norm": 0.5386021733283997, "learning_rate": 0.00018950127652793172, "loss": 0.947, "step": 3323 }, { "epoch": 0.5918803418803419, "grad_norm": 0.49043843150138855, "learning_rate": 0.00018949503221132074, "loss": 0.9581, "step": 3324 }, { "epoch": 0.5920584045584045, "grad_norm": 0.5241141319274902, "learning_rate": 0.00018948878614124048, "loss": 1.0797, "step": 3325 }, { "epoch": 0.5922364672364673, "grad_norm": 0.5755026340484619, "learning_rate": 0.00018948253831781338, "loss": 1.1046, "step": 3326 }, { "epoch": 0.5924145299145299, "grad_norm": 0.5004449486732483, "learning_rate": 0.00018947628874116179, "loss": 1.1416, "step": 3327 }, { "epoch": 0.5925925925925926, "grad_norm": 0.53347247838974, "learning_rate": 0.00018947003741140821, "loss": 1.2718, "step": 3328 }, { "epoch": 0.5927706552706553, "grad_norm": 0.6473469138145447, "learning_rate": 0.0001894637843286751, "loss": 1.2255, "step": 3329 }, { "epoch": 0.592948717948718, "grad_norm": 0.4750518798828125, "learning_rate": 0.00018945752949308498, "loss": 1.0537, "step": 3330 }, { "epoch": 0.5931267806267806, "grad_norm": 0.5636306405067444, "learning_rate": 0.00018945127290476043, "loss": 0.9906, "step": 3331 }, { "epoch": 0.5933048433048433, "grad_norm": 0.4871736466884613, "learning_rate": 0.00018944501456382397, "loss": 1.0549, "step": 3332 }, { "epoch": 0.593482905982906, "grad_norm": 0.5554637312889099, "learning_rate": 0.0001894387544703983, "loss": 1.1587, "step": 3333 }, { "epoch": 0.5936609686609686, "grad_norm": 0.5385799407958984, "learning_rate": 0.000189432492624606, "loss": 0.9565, "step": 3334 }, { "epoch": 0.5938390313390314, "grad_norm": 0.4996553063392639, "learning_rate": 0.00018942622902656976, "loss": 1.0456, "step": 3335 }, { "epoch": 0.594017094017094, "grad_norm": 0.46810707449913025, "learning_rate": 0.00018941996367641237, "loss": 1.119, "step": 3336 }, { "epoch": 0.5941951566951567, "grad_norm": 0.5672653913497925, "learning_rate": 0.0001894136965742565, "loss": 1.1317, "step": 3337 }, { "epoch": 0.5943732193732194, "grad_norm": 0.4790053367614746, "learning_rate": 0.00018940742772022504, "loss": 1.0967, "step": 3338 }, { "epoch": 0.594551282051282, "grad_norm": 0.5935906171798706, "learning_rate": 0.00018940115711444072, "loss": 1.3044, "step": 3339 }, { "epoch": 0.5947293447293447, "grad_norm": 0.4790516793727875, "learning_rate": 0.00018939488475702647, "loss": 1.074, "step": 3340 }, { "epoch": 0.5949074074074074, "grad_norm": 0.474588006734848, "learning_rate": 0.00018938861064810516, "loss": 1.1476, "step": 3341 }, { "epoch": 0.5950854700854701, "grad_norm": 0.4908665120601654, "learning_rate": 0.0001893823347877997, "loss": 1.216, "step": 3342 }, { "epoch": 0.5952635327635327, "grad_norm": 0.531650960445404, "learning_rate": 0.00018937605717623307, "loss": 1.1057, "step": 3343 }, { "epoch": 0.5954415954415955, "grad_norm": 0.5581082105636597, "learning_rate": 0.00018936977781352823, "loss": 0.7972, "step": 3344 }, { "epoch": 0.5956196581196581, "grad_norm": 0.42370662093162537, "learning_rate": 0.00018936349669980827, "loss": 0.8888, "step": 3345 }, { "epoch": 0.5957977207977208, "grad_norm": 0.5817318558692932, "learning_rate": 0.00018935721383519624, "loss": 1.2801, "step": 3346 }, { "epoch": 0.5959757834757835, "grad_norm": 0.4766376316547394, "learning_rate": 0.00018935092921981524, "loss": 1.0918, "step": 3347 }, { "epoch": 0.5961538461538461, "grad_norm": 0.5567346811294556, "learning_rate": 0.00018934464285378836, "loss": 1.0269, "step": 3348 }, { "epoch": 0.5963319088319088, "grad_norm": 0.5285565257072449, "learning_rate": 0.0001893383547372388, "loss": 1.1887, "step": 3349 }, { "epoch": 0.5965099715099715, "grad_norm": 0.49052694439888, "learning_rate": 0.00018933206487028979, "loss": 1.0773, "step": 3350 }, { "epoch": 0.5966880341880342, "grad_norm": 0.6175199151039124, "learning_rate": 0.0001893257732530645, "loss": 1.0192, "step": 3351 }, { "epoch": 0.5968660968660968, "grad_norm": 0.56049644947052, "learning_rate": 0.00018931947988568628, "loss": 0.9516, "step": 3352 }, { "epoch": 0.5970441595441596, "grad_norm": 0.47873660922050476, "learning_rate": 0.00018931318476827838, "loss": 0.8174, "step": 3353 }, { "epoch": 0.5972222222222222, "grad_norm": 0.4748854339122772, "learning_rate": 0.00018930688790096416, "loss": 1.0238, "step": 3354 }, { "epoch": 0.5974002849002849, "grad_norm": 0.5382232666015625, "learning_rate": 0.00018930058928386698, "loss": 1.0815, "step": 3355 }, { "epoch": 0.5975783475783476, "grad_norm": 0.5038299560546875, "learning_rate": 0.00018929428891711027, "loss": 1.0472, "step": 3356 }, { "epoch": 0.5977564102564102, "grad_norm": 0.5185908079147339, "learning_rate": 0.00018928798680081744, "loss": 1.0435, "step": 3357 }, { "epoch": 0.5979344729344729, "grad_norm": 0.5169877409934998, "learning_rate": 0.00018928168293511202, "loss": 1.0437, "step": 3358 }, { "epoch": 0.5981125356125356, "grad_norm": 0.5218369960784912, "learning_rate": 0.00018927537732011749, "loss": 1.082, "step": 3359 }, { "epoch": 0.5982905982905983, "grad_norm": 0.5358219742774963, "learning_rate": 0.0001892690699559574, "loss": 1.2523, "step": 3360 }, { "epoch": 0.5984686609686609, "grad_norm": 0.47716647386550903, "learning_rate": 0.0001892627608427553, "loss": 1.2069, "step": 3361 }, { "epoch": 0.5986467236467237, "grad_norm": 0.5484169125556946, "learning_rate": 0.00018925644998063482, "loss": 1.2016, "step": 3362 }, { "epoch": 0.5988247863247863, "grad_norm": 0.46814846992492676, "learning_rate": 0.00018925013736971965, "loss": 0.7989, "step": 3363 }, { "epoch": 0.5990028490028491, "grad_norm": 0.5391258001327515, "learning_rate": 0.0001892438230101334, "loss": 1.224, "step": 3364 }, { "epoch": 0.5991809116809117, "grad_norm": 0.5248384475708008, "learning_rate": 0.00018923750690199987, "loss": 1.1532, "step": 3365 }, { "epoch": 0.5993589743589743, "grad_norm": 0.5074637532234192, "learning_rate": 0.00018923118904544273, "loss": 1.0968, "step": 3366 }, { "epoch": 0.5995370370370371, "grad_norm": 0.5260029435157776, "learning_rate": 0.00018922486944058581, "loss": 1.1311, "step": 3367 }, { "epoch": 0.5997150997150997, "grad_norm": 0.48497965931892395, "learning_rate": 0.00018921854808755294, "loss": 1.1208, "step": 3368 }, { "epoch": 0.5998931623931624, "grad_norm": 0.5108651518821716, "learning_rate": 0.00018921222498646792, "loss": 1.147, "step": 3369 }, { "epoch": 0.6000712250712251, "grad_norm": 0.5243437886238098, "learning_rate": 0.00018920590013745471, "loss": 0.9614, "step": 3370 }, { "epoch": 0.6002492877492878, "grad_norm": 0.47022634744644165, "learning_rate": 0.00018919957354063719, "loss": 1.0579, "step": 3371 }, { "epoch": 0.6004273504273504, "grad_norm": 0.6461413502693176, "learning_rate": 0.00018919324519613931, "loss": 1.2126, "step": 3372 }, { "epoch": 0.6006054131054132, "grad_norm": 0.4654616713523865, "learning_rate": 0.00018918691510408508, "loss": 1.1476, "step": 3373 }, { "epoch": 0.6007834757834758, "grad_norm": 0.48571303486824036, "learning_rate": 0.00018918058326459854, "loss": 1.2093, "step": 3374 }, { "epoch": 0.6009615384615384, "grad_norm": 0.5255016684532166, "learning_rate": 0.00018917424967780368, "loss": 1.1538, "step": 3375 }, { "epoch": 0.6011396011396012, "grad_norm": 0.5059894323348999, "learning_rate": 0.00018916791434382468, "loss": 1.0556, "step": 3376 }, { "epoch": 0.6013176638176638, "grad_norm": 0.4581229090690613, "learning_rate": 0.00018916157726278561, "loss": 1.1468, "step": 3377 }, { "epoch": 0.6014957264957265, "grad_norm": 0.5701818466186523, "learning_rate": 0.00018915523843481067, "loss": 1.3641, "step": 3378 }, { "epoch": 0.6016737891737892, "grad_norm": 0.5007243752479553, "learning_rate": 0.00018914889786002403, "loss": 1.2705, "step": 3379 }, { "epoch": 0.6018518518518519, "grad_norm": 0.5192995071411133, "learning_rate": 0.0001891425555385499, "loss": 0.9922, "step": 3380 }, { "epoch": 0.6020299145299145, "grad_norm": 0.5880612134933472, "learning_rate": 0.00018913621147051258, "loss": 0.8783, "step": 3381 }, { "epoch": 0.6022079772079773, "grad_norm": 0.5161563158035278, "learning_rate": 0.0001891298656560364, "loss": 0.9634, "step": 3382 }, { "epoch": 0.6023860398860399, "grad_norm": 0.48450782895088196, "learning_rate": 0.00018912351809524563, "loss": 0.809, "step": 3383 }, { "epoch": 0.6025641025641025, "grad_norm": 0.621537983417511, "learning_rate": 0.00018911716878826465, "loss": 1.2031, "step": 3384 }, { "epoch": 0.6027421652421653, "grad_norm": 0.6014544367790222, "learning_rate": 0.00018911081773521787, "loss": 1.1552, "step": 3385 }, { "epoch": 0.6029202279202279, "grad_norm": 0.49995481967926025, "learning_rate": 0.00018910446493622976, "loss": 0.8569, "step": 3386 }, { "epoch": 0.6030982905982906, "grad_norm": 0.5157307386398315, "learning_rate": 0.00018909811039142472, "loss": 0.9515, "step": 3387 }, { "epoch": 0.6032763532763533, "grad_norm": 0.5164140462875366, "learning_rate": 0.0001890917541009273, "loss": 0.9803, "step": 3388 }, { "epoch": 0.603454415954416, "grad_norm": 0.5555596947669983, "learning_rate": 0.00018908539606486206, "loss": 1.2994, "step": 3389 }, { "epoch": 0.6036324786324786, "grad_norm": 0.605697512626648, "learning_rate": 0.00018907903628335353, "loss": 1.2865, "step": 3390 }, { "epoch": 0.6038105413105413, "grad_norm": 0.5700713992118835, "learning_rate": 0.0001890726747565263, "loss": 1.2493, "step": 3391 }, { "epoch": 0.603988603988604, "grad_norm": 0.5516746044158936, "learning_rate": 0.0001890663114845051, "loss": 1.2743, "step": 3392 }, { "epoch": 0.6041666666666666, "grad_norm": 0.5233162641525269, "learning_rate": 0.0001890599464674145, "loss": 0.9237, "step": 3393 }, { "epoch": 0.6043447293447294, "grad_norm": 0.5709942579269409, "learning_rate": 0.00018905357970537925, "loss": 0.9922, "step": 3394 }, { "epoch": 0.604522792022792, "grad_norm": 0.48403796553611755, "learning_rate": 0.0001890472111985241, "loss": 1.1255, "step": 3395 }, { "epoch": 0.6047008547008547, "grad_norm": 0.628718376159668, "learning_rate": 0.00018904084094697386, "loss": 1.1458, "step": 3396 }, { "epoch": 0.6048789173789174, "grad_norm": 0.46822869777679443, "learning_rate": 0.00018903446895085328, "loss": 0.8727, "step": 3397 }, { "epoch": 0.60505698005698, "grad_norm": 0.505584180355072, "learning_rate": 0.00018902809521028724, "loss": 1.1595, "step": 3398 }, { "epoch": 0.6052350427350427, "grad_norm": 0.4494974911212921, "learning_rate": 0.00018902171972540058, "loss": 0.6685, "step": 3399 }, { "epoch": 0.6054131054131054, "grad_norm": 0.5101519227027893, "learning_rate": 0.0001890153424963183, "loss": 0.9313, "step": 3400 }, { "epoch": 0.6055911680911681, "grad_norm": 0.5081079602241516, "learning_rate": 0.00018900896352316528, "loss": 1.2588, "step": 3401 }, { "epoch": 0.6057692307692307, "grad_norm": 0.5784309506416321, "learning_rate": 0.00018900258280606653, "loss": 1.2077, "step": 3402 }, { "epoch": 0.6059472934472935, "grad_norm": 0.4506312608718872, "learning_rate": 0.00018899620034514705, "loss": 1.05, "step": 3403 }, { "epoch": 0.6061253561253561, "grad_norm": 0.5243048071861267, "learning_rate": 0.0001889898161405319, "loss": 1.2295, "step": 3404 }, { "epoch": 0.6063034188034188, "grad_norm": 0.5447196364402771, "learning_rate": 0.00018898343019234615, "loss": 1.1476, "step": 3405 }, { "epoch": 0.6064814814814815, "grad_norm": 0.46813663840293884, "learning_rate": 0.00018897704250071492, "loss": 1.2113, "step": 3406 }, { "epoch": 0.6066595441595442, "grad_norm": 0.5340631604194641, "learning_rate": 0.00018897065306576342, "loss": 1.1656, "step": 3407 }, { "epoch": 0.6068376068376068, "grad_norm": 0.513708233833313, "learning_rate": 0.00018896426188761675, "loss": 1.1616, "step": 3408 }, { "epoch": 0.6070156695156695, "grad_norm": 0.594601035118103, "learning_rate": 0.00018895786896640023, "loss": 1.2564, "step": 3409 }, { "epoch": 0.6071937321937322, "grad_norm": 0.45067599415779114, "learning_rate": 0.000188951474302239, "loss": 1.0107, "step": 3410 }, { "epoch": 0.6073717948717948, "grad_norm": 0.5394250750541687, "learning_rate": 0.00018894507789525843, "loss": 1.4081, "step": 3411 }, { "epoch": 0.6075498575498576, "grad_norm": 0.5612049102783203, "learning_rate": 0.00018893867974558383, "loss": 1.1015, "step": 3412 }, { "epoch": 0.6077279202279202, "grad_norm": 0.4794061779975891, "learning_rate": 0.00018893227985334056, "loss": 1.2103, "step": 3413 }, { "epoch": 0.6079059829059829, "grad_norm": 0.6060562133789062, "learning_rate": 0.00018892587821865402, "loss": 1.3693, "step": 3414 }, { "epoch": 0.6080840455840456, "grad_norm": 0.44624534249305725, "learning_rate": 0.00018891947484164963, "loss": 0.8209, "step": 3415 }, { "epoch": 0.6082621082621082, "grad_norm": 0.49297213554382324, "learning_rate": 0.0001889130697224528, "loss": 1.2027, "step": 3416 }, { "epoch": 0.6084401709401709, "grad_norm": 0.4431746304035187, "learning_rate": 0.0001889066628611891, "loss": 1.0347, "step": 3417 }, { "epoch": 0.6086182336182336, "grad_norm": 0.5425933599472046, "learning_rate": 0.00018890025425798404, "loss": 1.0556, "step": 3418 }, { "epoch": 0.6087962962962963, "grad_norm": 0.5502763390541077, "learning_rate": 0.00018889384391296315, "loss": 1.2362, "step": 3419 }, { "epoch": 0.6089743589743589, "grad_norm": 0.5442292094230652, "learning_rate": 0.00018888743182625203, "loss": 1.1306, "step": 3420 }, { "epoch": 0.6091524216524217, "grad_norm": 0.4651123583316803, "learning_rate": 0.00018888101799797636, "loss": 0.9305, "step": 3421 }, { "epoch": 0.6093304843304843, "grad_norm": 0.4713892340660095, "learning_rate": 0.00018887460242826177, "loss": 1.0789, "step": 3422 }, { "epoch": 0.6095085470085471, "grad_norm": 0.5283244848251343, "learning_rate": 0.00018886818511723398, "loss": 1.345, "step": 3423 }, { "epoch": 0.6096866096866097, "grad_norm": 0.5527324080467224, "learning_rate": 0.0001888617660650187, "loss": 1.1297, "step": 3424 }, { "epoch": 0.6098646723646723, "grad_norm": 0.5412901043891907, "learning_rate": 0.00018885534527174168, "loss": 1.1213, "step": 3425 }, { "epoch": 0.6100427350427351, "grad_norm": 0.5295354127883911, "learning_rate": 0.00018884892273752878, "loss": 1.1217, "step": 3426 }, { "epoch": 0.6102207977207977, "grad_norm": 0.461900532245636, "learning_rate": 0.0001888424984625058, "loss": 0.827, "step": 3427 }, { "epoch": 0.6103988603988604, "grad_norm": 0.4922671616077423, "learning_rate": 0.00018883607244679865, "loss": 1.2216, "step": 3428 }, { "epoch": 0.6105769230769231, "grad_norm": 0.5080927014350891, "learning_rate": 0.00018882964469053317, "loss": 1.2446, "step": 3429 }, { "epoch": 0.6107549857549858, "grad_norm": 0.5523943901062012, "learning_rate": 0.00018882321519383534, "loss": 1.3346, "step": 3430 }, { "epoch": 0.6109330484330484, "grad_norm": 0.5105271935462952, "learning_rate": 0.0001888167839568311, "loss": 1.1311, "step": 3431 }, { "epoch": 0.6111111111111112, "grad_norm": 0.5635872483253479, "learning_rate": 0.0001888103509796465, "loss": 1.1875, "step": 3432 }, { "epoch": 0.6112891737891738, "grad_norm": 0.4619547426700592, "learning_rate": 0.00018880391626240755, "loss": 0.9176, "step": 3433 }, { "epoch": 0.6114672364672364, "grad_norm": 0.5896356105804443, "learning_rate": 0.00018879747980524034, "loss": 1.0251, "step": 3434 }, { "epoch": 0.6116452991452992, "grad_norm": 0.49062737822532654, "learning_rate": 0.000188791041608271, "loss": 1.1598, "step": 3435 }, { "epoch": 0.6118233618233618, "grad_norm": 0.45717164874076843, "learning_rate": 0.00018878460167162558, "loss": 0.8647, "step": 3436 }, { "epoch": 0.6120014245014245, "grad_norm": 0.5903525352478027, "learning_rate": 0.00018877815999543038, "loss": 0.9671, "step": 3437 }, { "epoch": 0.6121794871794872, "grad_norm": 0.5315384268760681, "learning_rate": 0.00018877171657981153, "loss": 1.1759, "step": 3438 }, { "epoch": 0.6123575498575499, "grad_norm": 0.5650150775909424, "learning_rate": 0.0001887652714248953, "loss": 1.0128, "step": 3439 }, { "epoch": 0.6125356125356125, "grad_norm": 0.49841752648353577, "learning_rate": 0.000188758824530808, "loss": 1.1259, "step": 3440 }, { "epoch": 0.6127136752136753, "grad_norm": 0.4985620975494385, "learning_rate": 0.00018875237589767593, "loss": 1.0158, "step": 3441 }, { "epoch": 0.6128917378917379, "grad_norm": 0.45266565680503845, "learning_rate": 0.00018874592552562536, "loss": 0.93, "step": 3442 }, { "epoch": 0.6130698005698005, "grad_norm": 0.5696130990982056, "learning_rate": 0.00018873947341478274, "loss": 1.1432, "step": 3443 }, { "epoch": 0.6132478632478633, "grad_norm": 0.5211645364761353, "learning_rate": 0.00018873301956527451, "loss": 1.1317, "step": 3444 }, { "epoch": 0.6134259259259259, "grad_norm": 0.4991866946220398, "learning_rate": 0.00018872656397722707, "loss": 1.0362, "step": 3445 }, { "epoch": 0.6136039886039886, "grad_norm": 0.5109508037567139, "learning_rate": 0.00018872010665076694, "loss": 1.2728, "step": 3446 }, { "epoch": 0.6137820512820513, "grad_norm": 0.5838373899459839, "learning_rate": 0.00018871364758602058, "loss": 1.1131, "step": 3447 }, { "epoch": 0.613960113960114, "grad_norm": 0.5139824151992798, "learning_rate": 0.00018870718678311462, "loss": 1.238, "step": 3448 }, { "epoch": 0.6141381766381766, "grad_norm": 0.4852082431316376, "learning_rate": 0.00018870072424217562, "loss": 1.0677, "step": 3449 }, { "epoch": 0.6143162393162394, "grad_norm": 0.5312315225601196, "learning_rate": 0.00018869425996333018, "loss": 1.178, "step": 3450 }, { "epoch": 0.614494301994302, "grad_norm": 0.6343565583229065, "learning_rate": 0.00018868779394670492, "loss": 0.8839, "step": 3451 }, { "epoch": 0.6146723646723646, "grad_norm": 0.6029773950576782, "learning_rate": 0.00018868132619242662, "loss": 1.1188, "step": 3452 }, { "epoch": 0.6148504273504274, "grad_norm": 0.5246016383171082, "learning_rate": 0.00018867485670062193, "loss": 1.0797, "step": 3453 }, { "epoch": 0.61502849002849, "grad_norm": 0.49307698011398315, "learning_rate": 0.00018866838547141763, "loss": 0.9749, "step": 3454 }, { "epoch": 0.6152065527065527, "grad_norm": 0.5232903361320496, "learning_rate": 0.00018866191250494052, "loss": 1.0785, "step": 3455 }, { "epoch": 0.6153846153846154, "grad_norm": 0.5545645356178284, "learning_rate": 0.0001886554378013174, "loss": 1.0496, "step": 3456 }, { "epoch": 0.6155626780626781, "grad_norm": 0.493945837020874, "learning_rate": 0.00018864896136067515, "loss": 0.9248, "step": 3457 }, { "epoch": 0.6157407407407407, "grad_norm": 0.5223548412322998, "learning_rate": 0.00018864248318314065, "loss": 1.0617, "step": 3458 }, { "epoch": 0.6159188034188035, "grad_norm": 0.5666514039039612, "learning_rate": 0.00018863600326884082, "loss": 0.9981, "step": 3459 }, { "epoch": 0.6160968660968661, "grad_norm": 0.4648127257823944, "learning_rate": 0.00018862952161790265, "loss": 0.917, "step": 3460 }, { "epoch": 0.6162749287749287, "grad_norm": 0.590326189994812, "learning_rate": 0.0001886230382304531, "loss": 1.044, "step": 3461 }, { "epoch": 0.6164529914529915, "grad_norm": 0.5511625409126282, "learning_rate": 0.00018861655310661925, "loss": 1.0988, "step": 3462 }, { "epoch": 0.6166310541310541, "grad_norm": 0.567182183265686, "learning_rate": 0.0001886100662465281, "loss": 1.3017, "step": 3463 }, { "epoch": 0.6168091168091168, "grad_norm": 0.5708897709846497, "learning_rate": 0.0001886035776503068, "loss": 0.9123, "step": 3464 }, { "epoch": 0.6169871794871795, "grad_norm": 0.4945180416107178, "learning_rate": 0.0001885970873180824, "loss": 1.1645, "step": 3465 }, { "epoch": 0.6171652421652422, "grad_norm": 0.4713336229324341, "learning_rate": 0.00018859059524998215, "loss": 1.0546, "step": 3466 }, { "epoch": 0.6173433048433048, "grad_norm": 0.532859206199646, "learning_rate": 0.0001885841014461332, "loss": 1.0795, "step": 3467 }, { "epoch": 0.6175213675213675, "grad_norm": 0.5165733695030212, "learning_rate": 0.00018857760590666284, "loss": 1.1284, "step": 3468 }, { "epoch": 0.6176994301994302, "grad_norm": 0.48623126745224, "learning_rate": 0.00018857110863169826, "loss": 0.8618, "step": 3469 }, { "epoch": 0.6178774928774928, "grad_norm": 0.628559947013855, "learning_rate": 0.0001885646096213668, "loss": 1.1089, "step": 3470 }, { "epoch": 0.6180555555555556, "grad_norm": 0.503545880317688, "learning_rate": 0.0001885581088757958, "loss": 1.2311, "step": 3471 }, { "epoch": 0.6182336182336182, "grad_norm": 0.6172101497650146, "learning_rate": 0.00018855160639511264, "loss": 1.2651, "step": 3472 }, { "epoch": 0.6184116809116809, "grad_norm": 0.49572527408599854, "learning_rate": 0.00018854510217944465, "loss": 1.1026, "step": 3473 }, { "epoch": 0.6185897435897436, "grad_norm": 0.5373549461364746, "learning_rate": 0.00018853859622891938, "loss": 1.2562, "step": 3474 }, { "epoch": 0.6187678062678063, "grad_norm": 0.5272396206855774, "learning_rate": 0.0001885320885436642, "loss": 1.1763, "step": 3475 }, { "epoch": 0.6189458689458689, "grad_norm": 0.46584269404411316, "learning_rate": 0.00018852557912380665, "loss": 1.1762, "step": 3476 }, { "epoch": 0.6191239316239316, "grad_norm": 0.4798245131969452, "learning_rate": 0.0001885190679694743, "loss": 0.9229, "step": 3477 }, { "epoch": 0.6193019943019943, "grad_norm": 0.5221366286277771, "learning_rate": 0.0001885125550807947, "loss": 1.1078, "step": 3478 }, { "epoch": 0.6194800569800569, "grad_norm": 0.5051897168159485, "learning_rate": 0.0001885060404578954, "loss": 1.0055, "step": 3479 }, { "epoch": 0.6196581196581197, "grad_norm": 0.492662250995636, "learning_rate": 0.00018849952410090413, "loss": 1.1172, "step": 3480 }, { "epoch": 0.6198361823361823, "grad_norm": 0.4906775951385498, "learning_rate": 0.00018849300600994853, "loss": 1.1223, "step": 3481 }, { "epoch": 0.6200142450142451, "grad_norm": 0.5032641291618347, "learning_rate": 0.0001884864861851563, "loss": 0.9541, "step": 3482 }, { "epoch": 0.6201923076923077, "grad_norm": 0.5262296795845032, "learning_rate": 0.00018847996462665521, "loss": 1.021, "step": 3483 }, { "epoch": 0.6203703703703703, "grad_norm": 0.5253522992134094, "learning_rate": 0.00018847344133457295, "loss": 0.9075, "step": 3484 }, { "epoch": 0.6205484330484331, "grad_norm": 0.4204299747943878, "learning_rate": 0.00018846691630903744, "loss": 0.895, "step": 3485 }, { "epoch": 0.6207264957264957, "grad_norm": 0.557604193687439, "learning_rate": 0.0001884603895501765, "loss": 1.1758, "step": 3486 }, { "epoch": 0.6209045584045584, "grad_norm": 0.5981321930885315, "learning_rate": 0.00018845386105811795, "loss": 1.1087, "step": 3487 }, { "epoch": 0.6210826210826211, "grad_norm": 0.5285581946372986, "learning_rate": 0.00018844733083298975, "loss": 1.0692, "step": 3488 }, { "epoch": 0.6212606837606838, "grad_norm": 0.5403170585632324, "learning_rate": 0.00018844079887491986, "loss": 1.1998, "step": 3489 }, { "epoch": 0.6214387464387464, "grad_norm": 0.5471615791320801, "learning_rate": 0.0001884342651840362, "loss": 0.9556, "step": 3490 }, { "epoch": 0.6216168091168092, "grad_norm": 0.6126871705055237, "learning_rate": 0.00018842772976046686, "loss": 1.2629, "step": 3491 }, { "epoch": 0.6217948717948718, "grad_norm": 0.45669353008270264, "learning_rate": 0.00018842119260433982, "loss": 1.0203, "step": 3492 }, { "epoch": 0.6219729344729344, "grad_norm": 0.4998520612716675, "learning_rate": 0.0001884146537157832, "loss": 1.0271, "step": 3493 }, { "epoch": 0.6221509971509972, "grad_norm": 0.5820242166519165, "learning_rate": 0.00018840811309492507, "loss": 1.0321, "step": 3494 }, { "epoch": 0.6223290598290598, "grad_norm": 0.581676185131073, "learning_rate": 0.00018840157074189367, "loss": 0.9219, "step": 3495 }, { "epoch": 0.6225071225071225, "grad_norm": 0.6044120788574219, "learning_rate": 0.0001883950266568171, "loss": 1.1621, "step": 3496 }, { "epoch": 0.6226851851851852, "grad_norm": 0.5448858737945557, "learning_rate": 0.0001883884808398236, "loss": 1.0686, "step": 3497 }, { "epoch": 0.6228632478632479, "grad_norm": 0.4921551048755646, "learning_rate": 0.00018838193329104143, "loss": 1.2259, "step": 3498 }, { "epoch": 0.6230413105413105, "grad_norm": 0.5374335646629333, "learning_rate": 0.00018837538401059888, "loss": 1.2608, "step": 3499 }, { "epoch": 0.6232193732193733, "grad_norm": 0.5123008489608765, "learning_rate": 0.0001883688329986243, "loss": 0.8682, "step": 3500 }, { "epoch": 0.6233974358974359, "grad_norm": 0.566145122051239, "learning_rate": 0.00018836228025524595, "loss": 1.1807, "step": 3501 }, { "epoch": 0.6235754985754985, "grad_norm": 0.6658587455749512, "learning_rate": 0.00018835572578059233, "loss": 1.1641, "step": 3502 }, { "epoch": 0.6237535612535613, "grad_norm": 0.4992465078830719, "learning_rate": 0.00018834916957479177, "loss": 0.9125, "step": 3503 }, { "epoch": 0.6239316239316239, "grad_norm": 0.5081812739372253, "learning_rate": 0.00018834261163797278, "loss": 1.0939, "step": 3504 }, { "epoch": 0.6241096866096866, "grad_norm": 0.5168607234954834, "learning_rate": 0.0001883360519702638, "loss": 1.2382, "step": 3505 }, { "epoch": 0.6242877492877493, "grad_norm": 0.5517697334289551, "learning_rate": 0.00018832949057179344, "loss": 1.206, "step": 3506 }, { "epoch": 0.624465811965812, "grad_norm": 0.4505497217178345, "learning_rate": 0.00018832292744269013, "loss": 0.8485, "step": 3507 }, { "epoch": 0.6246438746438746, "grad_norm": 0.5230690240859985, "learning_rate": 0.0001883163625830826, "loss": 1.1701, "step": 3508 }, { "epoch": 0.6248219373219374, "grad_norm": 0.5062205195426941, "learning_rate": 0.00018830979599309937, "loss": 1.0602, "step": 3509 }, { "epoch": 0.625, "grad_norm": 0.49922460317611694, "learning_rate": 0.00018830322767286913, "loss": 1.1937, "step": 3510 }, { "epoch": 0.6251780626780626, "grad_norm": 0.4637366831302643, "learning_rate": 0.0001882966576225206, "loss": 1.038, "step": 3511 }, { "epoch": 0.6253561253561254, "grad_norm": 0.5330080389976501, "learning_rate": 0.00018829008584218246, "loss": 0.9308, "step": 3512 }, { "epoch": 0.625534188034188, "grad_norm": 0.5443428754806519, "learning_rate": 0.0001882835123319835, "loss": 1.0006, "step": 3513 }, { "epoch": 0.6257122507122507, "grad_norm": 0.5534018874168396, "learning_rate": 0.00018827693709205253, "loss": 1.2383, "step": 3514 }, { "epoch": 0.6258903133903134, "grad_norm": 0.49207547307014465, "learning_rate": 0.00018827036012251832, "loss": 0.9804, "step": 3515 }, { "epoch": 0.6260683760683761, "grad_norm": 0.4900086224079132, "learning_rate": 0.0001882637814235098, "loss": 1.012, "step": 3516 }, { "epoch": 0.6262464387464387, "grad_norm": 0.5267475247383118, "learning_rate": 0.00018825720099515585, "loss": 1.1104, "step": 3517 }, { "epoch": 0.6264245014245015, "grad_norm": 0.5711902379989624, "learning_rate": 0.00018825061883758534, "loss": 1.0616, "step": 3518 }, { "epoch": 0.6266025641025641, "grad_norm": 0.5007771849632263, "learning_rate": 0.0001882440349509273, "loss": 0.9578, "step": 3519 }, { "epoch": 0.6267806267806267, "grad_norm": 0.5657192468643188, "learning_rate": 0.00018823744933531075, "loss": 1.2768, "step": 3520 }, { "epoch": 0.6269586894586895, "grad_norm": 0.6077173352241516, "learning_rate": 0.00018823086199086462, "loss": 1.147, "step": 3521 }, { "epoch": 0.6271367521367521, "grad_norm": 0.5114718079566956, "learning_rate": 0.000188224272917718, "loss": 1.1176, "step": 3522 }, { "epoch": 0.6273148148148148, "grad_norm": 0.4831676185131073, "learning_rate": 0.0001882176821160001, "loss": 0.8021, "step": 3523 }, { "epoch": 0.6274928774928775, "grad_norm": 0.6327390670776367, "learning_rate": 0.00018821108958583994, "loss": 0.9449, "step": 3524 }, { "epoch": 0.6276709401709402, "grad_norm": 0.5541796684265137, "learning_rate": 0.00018820449532736672, "loss": 1.2018, "step": 3525 }, { "epoch": 0.6278490028490028, "grad_norm": 0.5224639773368835, "learning_rate": 0.00018819789934070968, "loss": 1.0138, "step": 3526 }, { "epoch": 0.6280270655270656, "grad_norm": 0.49359360337257385, "learning_rate": 0.00018819130162599798, "loss": 1.0768, "step": 3527 }, { "epoch": 0.6282051282051282, "grad_norm": 0.5525050759315491, "learning_rate": 0.00018818470218336092, "loss": 1.0883, "step": 3528 }, { "epoch": 0.6283831908831908, "grad_norm": 0.5563427209854126, "learning_rate": 0.00018817810101292787, "loss": 1.1491, "step": 3529 }, { "epoch": 0.6285612535612536, "grad_norm": 0.49363306164741516, "learning_rate": 0.00018817149811482803, "loss": 1.1409, "step": 3530 }, { "epoch": 0.6287393162393162, "grad_norm": 0.5102340579032898, "learning_rate": 0.00018816489348919086, "loss": 1.1914, "step": 3531 }, { "epoch": 0.6289173789173789, "grad_norm": 0.5173332691192627, "learning_rate": 0.00018815828713614576, "loss": 0.9308, "step": 3532 }, { "epoch": 0.6290954415954416, "grad_norm": 0.5093010067939758, "learning_rate": 0.00018815167905582216, "loss": 0.9429, "step": 3533 }, { "epoch": 0.6292735042735043, "grad_norm": 0.5453153848648071, "learning_rate": 0.00018814506924834954, "loss": 1.0147, "step": 3534 }, { "epoch": 0.6294515669515669, "grad_norm": 0.5850773453712463, "learning_rate": 0.00018813845771385737, "loss": 1.3372, "step": 3535 }, { "epoch": 0.6296296296296297, "grad_norm": 0.5095621943473816, "learning_rate": 0.00018813184445247525, "loss": 1.0515, "step": 3536 }, { "epoch": 0.6298076923076923, "grad_norm": 0.6216054558753967, "learning_rate": 0.00018812522946433266, "loss": 0.8703, "step": 3537 }, { "epoch": 0.6299857549857549, "grad_norm": 0.4945531189441681, "learning_rate": 0.00018811861274955932, "loss": 1.1485, "step": 3538 }, { "epoch": 0.6301638176638177, "grad_norm": 0.47882601618766785, "learning_rate": 0.00018811199430828477, "loss": 1.1107, "step": 3539 }, { "epoch": 0.6303418803418803, "grad_norm": 0.5005326867103577, "learning_rate": 0.00018810537414063876, "loss": 1.0237, "step": 3540 }, { "epoch": 0.6305199430199431, "grad_norm": 0.5382370352745056, "learning_rate": 0.00018809875224675093, "loss": 0.9965, "step": 3541 }, { "epoch": 0.6306980056980057, "grad_norm": 0.47002625465393066, "learning_rate": 0.0001880921286267511, "loss": 1.065, "step": 3542 }, { "epoch": 0.6308760683760684, "grad_norm": 0.4519105851650238, "learning_rate": 0.00018808550328076897, "loss": 0.9312, "step": 3543 }, { "epoch": 0.6310541310541311, "grad_norm": 0.45360881090164185, "learning_rate": 0.0001880788762089344, "loss": 1.0739, "step": 3544 }, { "epoch": 0.6312321937321937, "grad_norm": 0.5578218698501587, "learning_rate": 0.00018807224741137723, "loss": 1.2478, "step": 3545 }, { "epoch": 0.6314102564102564, "grad_norm": 0.4838615655899048, "learning_rate": 0.0001880656168882273, "loss": 1.0221, "step": 3546 }, { "epoch": 0.6315883190883191, "grad_norm": 0.5733556747436523, "learning_rate": 0.0001880589846396146, "loss": 1.1249, "step": 3547 }, { "epoch": 0.6317663817663818, "grad_norm": 0.4939686954021454, "learning_rate": 0.00018805235066566894, "loss": 0.8559, "step": 3548 }, { "epoch": 0.6319444444444444, "grad_norm": 0.5072234869003296, "learning_rate": 0.00018804571496652044, "loss": 1.0842, "step": 3549 }, { "epoch": 0.6321225071225072, "grad_norm": 0.4640493392944336, "learning_rate": 0.00018803907754229903, "loss": 1.0728, "step": 3550 }, { "epoch": 0.6323005698005698, "grad_norm": 0.5314788818359375, "learning_rate": 0.00018803243839313481, "loss": 1.0752, "step": 3551 }, { "epoch": 0.6324786324786325, "grad_norm": 0.5511462092399597, "learning_rate": 0.0001880257975191578, "loss": 1.0238, "step": 3552 }, { "epoch": 0.6326566951566952, "grad_norm": 0.4980711042881012, "learning_rate": 0.00018801915492049816, "loss": 1.0981, "step": 3553 }, { "epoch": 0.6328347578347578, "grad_norm": 0.7746123671531677, "learning_rate": 0.00018801251059728604, "loss": 1.0968, "step": 3554 }, { "epoch": 0.6330128205128205, "grad_norm": 0.5006106495857239, "learning_rate": 0.00018800586454965155, "loss": 1.1802, "step": 3555 }, { "epoch": 0.6331908831908832, "grad_norm": 0.49427780508995056, "learning_rate": 0.000187999216777725, "loss": 1.1257, "step": 3556 }, { "epoch": 0.6333689458689459, "grad_norm": 0.5484146475791931, "learning_rate": 0.00018799256728163662, "loss": 1.1344, "step": 3557 }, { "epoch": 0.6335470085470085, "grad_norm": 0.5007877349853516, "learning_rate": 0.00018798591606151662, "loss": 1.1328, "step": 3558 }, { "epoch": 0.6337250712250713, "grad_norm": 0.5068148970603943, "learning_rate": 0.00018797926311749544, "loss": 0.976, "step": 3559 }, { "epoch": 0.6339031339031339, "grad_norm": 0.44936859607696533, "learning_rate": 0.00018797260844970334, "loss": 0.9735, "step": 3560 }, { "epoch": 0.6340811965811965, "grad_norm": 0.4592931866645813, "learning_rate": 0.0001879659520582707, "loss": 1.1306, "step": 3561 }, { "epoch": 0.6342592592592593, "grad_norm": 0.4664020836353302, "learning_rate": 0.00018795929394332795, "loss": 1.0577, "step": 3562 }, { "epoch": 0.6344373219373219, "grad_norm": 0.5638116002082825, "learning_rate": 0.00018795263410500556, "loss": 1.1747, "step": 3563 }, { "epoch": 0.6346153846153846, "grad_norm": 0.524736225605011, "learning_rate": 0.00018794597254343401, "loss": 0.8964, "step": 3564 }, { "epoch": 0.6347934472934473, "grad_norm": 0.4645404517650604, "learning_rate": 0.00018793930925874386, "loss": 0.8673, "step": 3565 }, { "epoch": 0.63497150997151, "grad_norm": 0.4800064265727997, "learning_rate": 0.00018793264425106558, "loss": 1.0334, "step": 3566 }, { "epoch": 0.6351495726495726, "grad_norm": 0.6202501058578491, "learning_rate": 0.0001879259775205298, "loss": 1.1061, "step": 3567 }, { "epoch": 0.6353276353276354, "grad_norm": 0.503383457660675, "learning_rate": 0.00018791930906726718, "loss": 0.8545, "step": 3568 }, { "epoch": 0.635505698005698, "grad_norm": 0.5256780982017517, "learning_rate": 0.00018791263889140832, "loss": 1.0785, "step": 3569 }, { "epoch": 0.6356837606837606, "grad_norm": 0.47562023997306824, "learning_rate": 0.00018790596699308392, "loss": 1.0041, "step": 3570 }, { "epoch": 0.6358618233618234, "grad_norm": 0.5103238224983215, "learning_rate": 0.00018789929337242469, "loss": 1.1488, "step": 3571 }, { "epoch": 0.636039886039886, "grad_norm": 0.5023695826530457, "learning_rate": 0.0001878926180295614, "loss": 1.0696, "step": 3572 }, { "epoch": 0.6362179487179487, "grad_norm": 0.5302290916442871, "learning_rate": 0.00018788594096462487, "loss": 1.0554, "step": 3573 }, { "epoch": 0.6363960113960114, "grad_norm": 0.4798361361026764, "learning_rate": 0.00018787926217774588, "loss": 0.8872, "step": 3574 }, { "epoch": 0.6365740740740741, "grad_norm": 0.5529209971427917, "learning_rate": 0.00018787258166905527, "loss": 1.0976, "step": 3575 }, { "epoch": 0.6367521367521367, "grad_norm": 0.49757125973701477, "learning_rate": 0.00018786589943868402, "loss": 1.0049, "step": 3576 }, { "epoch": 0.6369301994301995, "grad_norm": 0.5497848391532898, "learning_rate": 0.00018785921548676295, "loss": 1.2272, "step": 3577 }, { "epoch": 0.6371082621082621, "grad_norm": 0.5061752200126648, "learning_rate": 0.0001878525298134231, "loss": 1.0307, "step": 3578 }, { "epoch": 0.6372863247863247, "grad_norm": 0.5427432656288147, "learning_rate": 0.00018784584241879538, "loss": 1.1064, "step": 3579 }, { "epoch": 0.6374643874643875, "grad_norm": 0.48312774300575256, "learning_rate": 0.0001878391533030109, "loss": 1.078, "step": 3580 }, { "epoch": 0.6376424501424501, "grad_norm": 0.5059898495674133, "learning_rate": 0.00018783246246620067, "loss": 1.0922, "step": 3581 }, { "epoch": 0.6378205128205128, "grad_norm": 0.5144124031066895, "learning_rate": 0.00018782576990849581, "loss": 1.0909, "step": 3582 }, { "epoch": 0.6379985754985755, "grad_norm": 0.5535032153129578, "learning_rate": 0.0001878190756300274, "loss": 1.2579, "step": 3583 }, { "epoch": 0.6381766381766382, "grad_norm": 0.49145692586898804, "learning_rate": 0.00018781237963092667, "loss": 1.0823, "step": 3584 }, { "epoch": 0.6383547008547008, "grad_norm": 0.5245576500892639, "learning_rate": 0.00018780568191132472, "loss": 0.9595, "step": 3585 }, { "epoch": 0.6385327635327636, "grad_norm": 0.5026637315750122, "learning_rate": 0.00018779898247135287, "loss": 1.153, "step": 3586 }, { "epoch": 0.6387108262108262, "grad_norm": 0.5092771053314209, "learning_rate": 0.00018779228131114234, "loss": 1.0661, "step": 3587 }, { "epoch": 0.6388888888888888, "grad_norm": 0.517387330532074, "learning_rate": 0.00018778557843082444, "loss": 1.0113, "step": 3588 }, { "epoch": 0.6390669515669516, "grad_norm": 0.5149948000907898, "learning_rate": 0.00018777887383053047, "loss": 0.9483, "step": 3589 }, { "epoch": 0.6392450142450142, "grad_norm": 0.4854544997215271, "learning_rate": 0.00018777216751039185, "loss": 1.22, "step": 3590 }, { "epoch": 0.6394230769230769, "grad_norm": 0.5317271947860718, "learning_rate": 0.0001877654594705399, "loss": 1.2483, "step": 3591 }, { "epoch": 0.6396011396011396, "grad_norm": 0.4554755687713623, "learning_rate": 0.0001877587497111061, "loss": 0.9864, "step": 3592 }, { "epoch": 0.6397792022792023, "grad_norm": 0.4833736717700958, "learning_rate": 0.0001877520382322219, "loss": 0.8895, "step": 3593 }, { "epoch": 0.6399572649572649, "grad_norm": 0.5018072724342346, "learning_rate": 0.00018774532503401878, "loss": 1.2523, "step": 3594 }, { "epoch": 0.6401353276353277, "grad_norm": 0.4478762447834015, "learning_rate": 0.00018773861011662832, "loss": 0.8833, "step": 3595 }, { "epoch": 0.6403133903133903, "grad_norm": 0.5686985850334167, "learning_rate": 0.00018773189348018205, "loss": 0.9934, "step": 3596 }, { "epoch": 0.6404914529914529, "grad_norm": 0.5144175291061401, "learning_rate": 0.00018772517512481157, "loss": 0.8149, "step": 3597 }, { "epoch": 0.6406695156695157, "grad_norm": 0.5359936356544495, "learning_rate": 0.00018771845505064852, "loss": 1.1822, "step": 3598 }, { "epoch": 0.6408475783475783, "grad_norm": 0.532573938369751, "learning_rate": 0.00018771173325782457, "loss": 1.0361, "step": 3599 }, { "epoch": 0.6410256410256411, "grad_norm": 0.46121537685394287, "learning_rate": 0.00018770500974647138, "loss": 1.0792, "step": 3600 }, { "epoch": 0.6412037037037037, "grad_norm": 0.4804821312427521, "learning_rate": 0.00018769828451672076, "loss": 1.1119, "step": 3601 }, { "epoch": 0.6413817663817664, "grad_norm": 0.4955114722251892, "learning_rate": 0.00018769155756870443, "loss": 0.9312, "step": 3602 }, { "epoch": 0.6415598290598291, "grad_norm": 0.4987298250198364, "learning_rate": 0.00018768482890255415, "loss": 1.2326, "step": 3603 }, { "epoch": 0.6417378917378918, "grad_norm": 0.47216179966926575, "learning_rate": 0.0001876780985184018, "loss": 1.0114, "step": 3604 }, { "epoch": 0.6419159544159544, "grad_norm": 0.5891931653022766, "learning_rate": 0.0001876713664163793, "loss": 1.2963, "step": 3605 }, { "epoch": 0.6420940170940171, "grad_norm": 0.4645081162452698, "learning_rate": 0.00018766463259661846, "loss": 1.0874, "step": 3606 }, { "epoch": 0.6422720797720798, "grad_norm": 0.5275476574897766, "learning_rate": 0.00018765789705925125, "loss": 0.9453, "step": 3607 }, { "epoch": 0.6424501424501424, "grad_norm": 0.5884957313537598, "learning_rate": 0.00018765115980440964, "loss": 1.0796, "step": 3608 }, { "epoch": 0.6426282051282052, "grad_norm": 0.4843178987503052, "learning_rate": 0.00018764442083222567, "loss": 1.1657, "step": 3609 }, { "epoch": 0.6428062678062678, "grad_norm": 0.5188381671905518, "learning_rate": 0.00018763768014283126, "loss": 1.1109, "step": 3610 }, { "epoch": 0.6429843304843305, "grad_norm": 0.4101468324661255, "learning_rate": 0.00018763093773635863, "loss": 0.895, "step": 3611 }, { "epoch": 0.6431623931623932, "grad_norm": 0.4552084505558014, "learning_rate": 0.00018762419361293979, "loss": 0.9418, "step": 3612 }, { "epoch": 0.6433404558404558, "grad_norm": 0.5924661159515381, "learning_rate": 0.0001876174477727069, "loss": 1.2562, "step": 3613 }, { "epoch": 0.6435185185185185, "grad_norm": 0.5072348713874817, "learning_rate": 0.00018761070021579212, "loss": 1.1501, "step": 3614 }, { "epoch": 0.6436965811965812, "grad_norm": 0.5312697887420654, "learning_rate": 0.0001876039509423277, "loss": 1.0751, "step": 3615 }, { "epoch": 0.6438746438746439, "grad_norm": 0.6046462059020996, "learning_rate": 0.0001875971999524458, "loss": 1.0927, "step": 3616 }, { "epoch": 0.6440527065527065, "grad_norm": 0.4992375373840332, "learning_rate": 0.00018759044724627876, "loss": 0.96, "step": 3617 }, { "epoch": 0.6442307692307693, "grad_norm": 0.4983134865760803, "learning_rate": 0.00018758369282395886, "loss": 1.0599, "step": 3618 }, { "epoch": 0.6444088319088319, "grad_norm": 0.5655683279037476, "learning_rate": 0.00018757693668561843, "loss": 1.2372, "step": 3619 }, { "epoch": 0.6445868945868946, "grad_norm": 0.4968827962875366, "learning_rate": 0.00018757017883138985, "loss": 1.1639, "step": 3620 }, { "epoch": 0.6447649572649573, "grad_norm": 0.5831420421600342, "learning_rate": 0.00018756341926140553, "loss": 0.9002, "step": 3621 }, { "epoch": 0.64494301994302, "grad_norm": 0.4828467071056366, "learning_rate": 0.0001875566579757979, "loss": 0.9201, "step": 3622 }, { "epoch": 0.6451210826210826, "grad_norm": 0.5067087411880493, "learning_rate": 0.00018754989497469943, "loss": 0.9874, "step": 3623 }, { "epoch": 0.6452991452991453, "grad_norm": 0.5182318091392517, "learning_rate": 0.00018754313025824267, "loss": 1.1291, "step": 3624 }, { "epoch": 0.645477207977208, "grad_norm": 0.472200483083725, "learning_rate": 0.0001875363638265601, "loss": 1.0286, "step": 3625 }, { "epoch": 0.6456552706552706, "grad_norm": 0.4597308039665222, "learning_rate": 0.0001875295956797843, "loss": 0.7517, "step": 3626 }, { "epoch": 0.6458333333333334, "grad_norm": 0.5358221530914307, "learning_rate": 0.00018752282581804798, "loss": 1.2264, "step": 3627 }, { "epoch": 0.646011396011396, "grad_norm": 0.5268992781639099, "learning_rate": 0.00018751605424148363, "loss": 1.0801, "step": 3628 }, { "epoch": 0.6461894586894587, "grad_norm": 0.5917379260063171, "learning_rate": 0.00018750928095022403, "loss": 0.9538, "step": 3629 }, { "epoch": 0.6463675213675214, "grad_norm": 0.44506707787513733, "learning_rate": 0.00018750250594440183, "loss": 0.9818, "step": 3630 }, { "epoch": 0.646545584045584, "grad_norm": 0.5578880906105042, "learning_rate": 0.00018749572922414982, "loss": 0.9958, "step": 3631 }, { "epoch": 0.6467236467236467, "grad_norm": 0.5155318975448608, "learning_rate": 0.00018748895078960076, "loss": 1.2888, "step": 3632 }, { "epoch": 0.6469017094017094, "grad_norm": 0.5117297768592834, "learning_rate": 0.0001874821706408874, "loss": 1.0452, "step": 3633 }, { "epoch": 0.6470797720797721, "grad_norm": 0.5169841647148132, "learning_rate": 0.00018747538877814267, "loss": 1.1649, "step": 3634 }, { "epoch": 0.6472578347578347, "grad_norm": 0.5001181960105896, "learning_rate": 0.00018746860520149942, "loss": 1.1472, "step": 3635 }, { "epoch": 0.6474358974358975, "grad_norm": 0.6289856433868408, "learning_rate": 0.00018746181991109056, "loss": 1.0351, "step": 3636 }, { "epoch": 0.6476139601139601, "grad_norm": 0.5490612983703613, "learning_rate": 0.00018745503290704897, "loss": 0.8938, "step": 3637 }, { "epoch": 0.6477920227920227, "grad_norm": 0.47378283739089966, "learning_rate": 0.00018744824418950775, "loss": 0.937, "step": 3638 }, { "epoch": 0.6479700854700855, "grad_norm": 0.6079059839248657, "learning_rate": 0.0001874414537585998, "loss": 1.0486, "step": 3639 }, { "epoch": 0.6481481481481481, "grad_norm": 0.5351769924163818, "learning_rate": 0.00018743466161445823, "loss": 1.0316, "step": 3640 }, { "epoch": 0.6483262108262108, "grad_norm": 0.5516425967216492, "learning_rate": 0.0001874278677572161, "loss": 1.1552, "step": 3641 }, { "epoch": 0.6485042735042735, "grad_norm": 0.5027523636817932, "learning_rate": 0.0001874210721870065, "loss": 1.0491, "step": 3642 }, { "epoch": 0.6486823361823362, "grad_norm": 0.5596168041229248, "learning_rate": 0.00018741427490396258, "loss": 1.0256, "step": 3643 }, { "epoch": 0.6488603988603988, "grad_norm": 0.5601046681404114, "learning_rate": 0.00018740747590821751, "loss": 1.1604, "step": 3644 }, { "epoch": 0.6490384615384616, "grad_norm": 0.49749523401260376, "learning_rate": 0.0001874006751999046, "loss": 1.0532, "step": 3645 }, { "epoch": 0.6492165242165242, "grad_norm": 0.6226113438606262, "learning_rate": 0.00018739387277915697, "loss": 1.1402, "step": 3646 }, { "epoch": 0.6493945868945868, "grad_norm": 0.6142009496688843, "learning_rate": 0.00018738706864610794, "loss": 1.2437, "step": 3647 }, { "epoch": 0.6495726495726496, "grad_norm": 0.48814916610717773, "learning_rate": 0.00018738026280089084, "loss": 0.8429, "step": 3648 }, { "epoch": 0.6497507122507122, "grad_norm": 0.5717982053756714, "learning_rate": 0.00018737345524363902, "loss": 1.1095, "step": 3649 }, { "epoch": 0.6499287749287749, "grad_norm": 0.5150009989738464, "learning_rate": 0.00018736664597448582, "loss": 1.199, "step": 3650 }, { "epoch": 0.6501068376068376, "grad_norm": 0.58461594581604, "learning_rate": 0.00018735983499356472, "loss": 1.0704, "step": 3651 }, { "epoch": 0.6502849002849003, "grad_norm": 0.5108643770217896, "learning_rate": 0.0001873530223010091, "loss": 1.2039, "step": 3652 }, { "epoch": 0.6504629629629629, "grad_norm": 0.513306736946106, "learning_rate": 0.00018734620789695247, "loss": 1.1448, "step": 3653 }, { "epoch": 0.6506410256410257, "grad_norm": 0.5139986872673035, "learning_rate": 0.00018733939178152835, "loss": 1.0023, "step": 3654 }, { "epoch": 0.6508190883190883, "grad_norm": 0.5187703967094421, "learning_rate": 0.00018733257395487027, "loss": 1.1304, "step": 3655 }, { "epoch": 0.6509971509971509, "grad_norm": 0.5470501184463501, "learning_rate": 0.00018732575441711183, "loss": 1.0272, "step": 3656 }, { "epoch": 0.6511752136752137, "grad_norm": 0.537309467792511, "learning_rate": 0.00018731893316838665, "loss": 1.0806, "step": 3657 }, { "epoch": 0.6513532763532763, "grad_norm": 0.5187864899635315, "learning_rate": 0.00018731211020882836, "loss": 1.0154, "step": 3658 }, { "epoch": 0.6515313390313391, "grad_norm": 0.48373252153396606, "learning_rate": 0.00018730528553857062, "loss": 1.0135, "step": 3659 }, { "epoch": 0.6517094017094017, "grad_norm": 0.5645000338554382, "learning_rate": 0.00018729845915774716, "loss": 0.8924, "step": 3660 }, { "epoch": 0.6518874643874644, "grad_norm": 0.5722129940986633, "learning_rate": 0.00018729163106649178, "loss": 1.2416, "step": 3661 }, { "epoch": 0.6520655270655271, "grad_norm": 0.5904877185821533, "learning_rate": 0.00018728480126493823, "loss": 0.9792, "step": 3662 }, { "epoch": 0.6522435897435898, "grad_norm": 0.5224713087081909, "learning_rate": 0.00018727796975322026, "loss": 1.079, "step": 3663 }, { "epoch": 0.6524216524216524, "grad_norm": 0.5667217969894409, "learning_rate": 0.00018727113653147184, "loss": 1.1397, "step": 3664 }, { "epoch": 0.6525997150997151, "grad_norm": 0.5274622440338135, "learning_rate": 0.00018726430159982677, "loss": 1.0569, "step": 3665 }, { "epoch": 0.6527777777777778, "grad_norm": 0.5745310187339783, "learning_rate": 0.00018725746495841896, "loss": 1.2129, "step": 3666 }, { "epoch": 0.6529558404558404, "grad_norm": 0.6123398542404175, "learning_rate": 0.0001872506266073824, "loss": 1.186, "step": 3667 }, { "epoch": 0.6531339031339032, "grad_norm": 0.4983387291431427, "learning_rate": 0.00018724378654685106, "loss": 1.1957, "step": 3668 }, { "epoch": 0.6533119658119658, "grad_norm": 0.5584192276000977, "learning_rate": 0.00018723694477695897, "loss": 1.0939, "step": 3669 }, { "epoch": 0.6534900284900285, "grad_norm": 0.5318745374679565, "learning_rate": 0.00018723010129784016, "loss": 1.1869, "step": 3670 }, { "epoch": 0.6536680911680912, "grad_norm": 0.4607617259025574, "learning_rate": 0.0001872232561096287, "loss": 0.8447, "step": 3671 }, { "epoch": 0.6538461538461539, "grad_norm": 0.5312213897705078, "learning_rate": 0.00018721640921245874, "loss": 1.0623, "step": 3672 }, { "epoch": 0.6540242165242165, "grad_norm": 0.5099136233329773, "learning_rate": 0.0001872095606064644, "loss": 0.7174, "step": 3673 }, { "epoch": 0.6542022792022792, "grad_norm": 0.6894404888153076, "learning_rate": 0.0001872027102917799, "loss": 1.0251, "step": 3674 }, { "epoch": 0.6543803418803419, "grad_norm": 0.5758535861968994, "learning_rate": 0.00018719585826853944, "loss": 1.1655, "step": 3675 }, { "epoch": 0.6545584045584045, "grad_norm": 0.521824061870575, "learning_rate": 0.0001871890045368773, "loss": 1.1653, "step": 3676 }, { "epoch": 0.6547364672364673, "grad_norm": 0.5370712280273438, "learning_rate": 0.00018718214909692771, "loss": 1.3152, "step": 3677 }, { "epoch": 0.6549145299145299, "grad_norm": 0.4459827244281769, "learning_rate": 0.000187175291948825, "loss": 1.0953, "step": 3678 }, { "epoch": 0.6550925925925926, "grad_norm": 0.44131460785865784, "learning_rate": 0.00018716843309270353, "loss": 0.8568, "step": 3679 }, { "epoch": 0.6552706552706553, "grad_norm": 0.5529624819755554, "learning_rate": 0.00018716157252869772, "loss": 1.2085, "step": 3680 }, { "epoch": 0.655448717948718, "grad_norm": 0.44604751467704773, "learning_rate": 0.00018715471025694194, "loss": 0.9605, "step": 3681 }, { "epoch": 0.6556267806267806, "grad_norm": 0.4662449359893799, "learning_rate": 0.0001871478462775707, "loss": 1.2092, "step": 3682 }, { "epoch": 0.6558048433048433, "grad_norm": 0.42632922530174255, "learning_rate": 0.0001871409805907184, "loss": 0.9141, "step": 3683 }, { "epoch": 0.655982905982906, "grad_norm": 0.534009575843811, "learning_rate": 0.00018713411319651958, "loss": 1.0147, "step": 3684 }, { "epoch": 0.6561609686609686, "grad_norm": 0.5433241724967957, "learning_rate": 0.00018712724409510888, "loss": 1.1998, "step": 3685 }, { "epoch": 0.6563390313390314, "grad_norm": 0.4771319627761841, "learning_rate": 0.0001871203732866208, "loss": 1.0384, "step": 3686 }, { "epoch": 0.656517094017094, "grad_norm": 0.507641077041626, "learning_rate": 0.00018711350077119, "loss": 0.9608, "step": 3687 }, { "epoch": 0.6566951566951567, "grad_norm": 0.5069413185119629, "learning_rate": 0.00018710662654895108, "loss": 1.055, "step": 3688 }, { "epoch": 0.6568732193732194, "grad_norm": 0.512340247631073, "learning_rate": 0.00018709975062003876, "loss": 0.9506, "step": 3689 }, { "epoch": 0.657051282051282, "grad_norm": 0.5156390070915222, "learning_rate": 0.00018709287298458778, "loss": 1.0089, "step": 3690 }, { "epoch": 0.6572293447293447, "grad_norm": 0.5101696252822876, "learning_rate": 0.0001870859936427329, "loss": 1.0441, "step": 3691 }, { "epoch": 0.6574074074074074, "grad_norm": 0.4394689202308655, "learning_rate": 0.00018707911259460884, "loss": 0.9124, "step": 3692 }, { "epoch": 0.6575854700854701, "grad_norm": 0.4842554032802582, "learning_rate": 0.00018707222984035043, "loss": 1.0051, "step": 3693 }, { "epoch": 0.6577635327635327, "grad_norm": 0.6418108344078064, "learning_rate": 0.00018706534538009262, "loss": 1.1165, "step": 3694 }, { "epoch": 0.6579415954415955, "grad_norm": 0.5596832036972046, "learning_rate": 0.00018705845921397022, "loss": 1.1127, "step": 3695 }, { "epoch": 0.6581196581196581, "grad_norm": 0.6692909002304077, "learning_rate": 0.00018705157134211813, "loss": 1.2403, "step": 3696 }, { "epoch": 0.6582977207977208, "grad_norm": 0.5046468377113342, "learning_rate": 0.00018704468176467134, "loss": 1.1016, "step": 3697 }, { "epoch": 0.6584757834757835, "grad_norm": 0.6723586320877075, "learning_rate": 0.00018703779048176485, "loss": 1.1777, "step": 3698 }, { "epoch": 0.6586538461538461, "grad_norm": 0.5269754528999329, "learning_rate": 0.00018703089749353365, "loss": 1.1441, "step": 3699 }, { "epoch": 0.6588319088319088, "grad_norm": 0.5303323268890381, "learning_rate": 0.0001870240028001128, "loss": 1.07, "step": 3700 }, { "epoch": 0.6590099715099715, "grad_norm": 0.4795511066913605, "learning_rate": 0.00018701710640163738, "loss": 1.0189, "step": 3701 }, { "epoch": 0.6591880341880342, "grad_norm": 0.514659583568573, "learning_rate": 0.00018701020829824255, "loss": 1.0792, "step": 3702 }, { "epoch": 0.6593660968660968, "grad_norm": 0.5407463312149048, "learning_rate": 0.0001870033084900634, "loss": 0.9346, "step": 3703 }, { "epoch": 0.6595441595441596, "grad_norm": 0.5358424186706543, "learning_rate": 0.0001869964069772352, "loss": 1.1242, "step": 3704 }, { "epoch": 0.6597222222222222, "grad_norm": 0.470825731754303, "learning_rate": 0.00018698950375989307, "loss": 0.9952, "step": 3705 }, { "epoch": 0.6599002849002849, "grad_norm": 0.5711592435836792, "learning_rate": 0.00018698259883817236, "loss": 1.1678, "step": 3706 }, { "epoch": 0.6600783475783476, "grad_norm": 0.5298995971679688, "learning_rate": 0.00018697569221220832, "loss": 0.869, "step": 3707 }, { "epoch": 0.6602564102564102, "grad_norm": 0.5453875064849854, "learning_rate": 0.00018696878388213626, "loss": 0.9706, "step": 3708 }, { "epoch": 0.6604344729344729, "grad_norm": 0.6219926476478577, "learning_rate": 0.00018696187384809154, "loss": 1.1902, "step": 3709 }, { "epoch": 0.6606125356125356, "grad_norm": 0.5972491502761841, "learning_rate": 0.00018695496211020953, "loss": 1.2054, "step": 3710 }, { "epoch": 0.6607905982905983, "grad_norm": 0.5048904418945312, "learning_rate": 0.0001869480486686257, "loss": 1.0405, "step": 3711 }, { "epoch": 0.6609686609686609, "grad_norm": 0.5474200248718262, "learning_rate": 0.00018694113352347546, "loss": 1.09, "step": 3712 }, { "epoch": 0.6611467236467237, "grad_norm": 0.5073318481445312, "learning_rate": 0.00018693421667489432, "loss": 1.0698, "step": 3713 }, { "epoch": 0.6613247863247863, "grad_norm": 0.5693208575248718, "learning_rate": 0.0001869272981230178, "loss": 0.9664, "step": 3714 }, { "epoch": 0.6615028490028491, "grad_norm": 0.5678503513336182, "learning_rate": 0.00018692037786798143, "loss": 1.0895, "step": 3715 }, { "epoch": 0.6616809116809117, "grad_norm": 0.4950976073741913, "learning_rate": 0.00018691345590992082, "loss": 0.9584, "step": 3716 }, { "epoch": 0.6618589743589743, "grad_norm": 0.4944666624069214, "learning_rate": 0.0001869065322489716, "loss": 0.8607, "step": 3717 }, { "epoch": 0.6620370370370371, "grad_norm": 0.5197804570198059, "learning_rate": 0.0001868996068852694, "loss": 1.2335, "step": 3718 }, { "epoch": 0.6622150997150997, "grad_norm": 0.6550365686416626, "learning_rate": 0.00018689267981894994, "loss": 1.0441, "step": 3719 }, { "epoch": 0.6623931623931624, "grad_norm": 0.5331503748893738, "learning_rate": 0.00018688575105014888, "loss": 1.1696, "step": 3720 }, { "epoch": 0.6625712250712251, "grad_norm": 0.47304239869117737, "learning_rate": 0.00018687882057900207, "loss": 0.9695, "step": 3721 }, { "epoch": 0.6627492877492878, "grad_norm": 0.5653772354125977, "learning_rate": 0.00018687188840564524, "loss": 1.2082, "step": 3722 }, { "epoch": 0.6629273504273504, "grad_norm": 0.5323491096496582, "learning_rate": 0.00018686495453021417, "loss": 0.9106, "step": 3723 }, { "epoch": 0.6631054131054132, "grad_norm": 0.5612817406654358, "learning_rate": 0.00018685801895284483, "loss": 1.1302, "step": 3724 }, { "epoch": 0.6632834757834758, "grad_norm": 0.4562164545059204, "learning_rate": 0.000186851081673673, "loss": 0.8886, "step": 3725 }, { "epoch": 0.6634615384615384, "grad_norm": 0.5006430745124817, "learning_rate": 0.00018684414269283463, "loss": 0.9128, "step": 3726 }, { "epoch": 0.6636396011396012, "grad_norm": 0.5305442810058594, "learning_rate": 0.0001868372020104657, "loss": 1.1766, "step": 3727 }, { "epoch": 0.6638176638176638, "grad_norm": 0.6129274368286133, "learning_rate": 0.0001868302596267022, "loss": 1.04, "step": 3728 }, { "epoch": 0.6639957264957265, "grad_norm": 0.5530399084091187, "learning_rate": 0.00018682331554168013, "loss": 1.4114, "step": 3729 }, { "epoch": 0.6641737891737892, "grad_norm": 0.5397193431854248, "learning_rate": 0.00018681636975553557, "loss": 1.1945, "step": 3730 }, { "epoch": 0.6643518518518519, "grad_norm": 0.5510205030441284, "learning_rate": 0.00018680942226840456, "loss": 1.0489, "step": 3731 }, { "epoch": 0.6645299145299145, "grad_norm": 0.5519221425056458, "learning_rate": 0.00018680247308042324, "loss": 1.1633, "step": 3732 }, { "epoch": 0.6647079772079773, "grad_norm": 0.4848768711090088, "learning_rate": 0.00018679552219172784, "loss": 0.8716, "step": 3733 }, { "epoch": 0.6648860398860399, "grad_norm": 0.5490246415138245, "learning_rate": 0.0001867885696024544, "loss": 1.1347, "step": 3734 }, { "epoch": 0.6650641025641025, "grad_norm": 0.5281458497047424, "learning_rate": 0.00018678161531273928, "loss": 1.0987, "step": 3735 }, { "epoch": 0.6652421652421653, "grad_norm": 0.5313079953193665, "learning_rate": 0.00018677465932271867, "loss": 0.9705, "step": 3736 }, { "epoch": 0.6654202279202279, "grad_norm": 0.5425750017166138, "learning_rate": 0.0001867677016325289, "loss": 1.1847, "step": 3737 }, { "epoch": 0.6655982905982906, "grad_norm": 0.5796298980712891, "learning_rate": 0.0001867607422423062, "loss": 1.2639, "step": 3738 }, { "epoch": 0.6657763532763533, "grad_norm": 0.49738675355911255, "learning_rate": 0.00018675378115218702, "loss": 1.0536, "step": 3739 }, { "epoch": 0.665954415954416, "grad_norm": 0.665250301361084, "learning_rate": 0.0001867468183623077, "loss": 1.2836, "step": 3740 }, { "epoch": 0.6661324786324786, "grad_norm": 0.5184717178344727, "learning_rate": 0.00018673985387280469, "loss": 1.0497, "step": 3741 }, { "epoch": 0.6663105413105413, "grad_norm": 0.5129656791687012, "learning_rate": 0.00018673288768381442, "loss": 1.2041, "step": 3742 }, { "epoch": 0.666488603988604, "grad_norm": 0.5308768153190613, "learning_rate": 0.00018672591979547337, "loss": 1.2092, "step": 3743 }, { "epoch": 0.6666666666666666, "grad_norm": 0.5059141516685486, "learning_rate": 0.00018671895020791812, "loss": 1.1929, "step": 3744 }, { "epoch": 0.6668447293447294, "grad_norm": 0.5237857103347778, "learning_rate": 0.00018671197892128517, "loss": 1.2538, "step": 3745 }, { "epoch": 0.667022792022792, "grad_norm": 0.450000137090683, "learning_rate": 0.0001867050059357111, "loss": 0.7138, "step": 3746 }, { "epoch": 0.6672008547008547, "grad_norm": 0.5413795709609985, "learning_rate": 0.00018669803125133258, "loss": 1.1383, "step": 3747 }, { "epoch": 0.6673789173789174, "grad_norm": 0.4657825529575348, "learning_rate": 0.00018669105486828622, "loss": 1.0518, "step": 3748 }, { "epoch": 0.66755698005698, "grad_norm": 0.6198551654815674, "learning_rate": 0.00018668407678670875, "loss": 1.2697, "step": 3749 }, { "epoch": 0.6677350427350427, "grad_norm": 0.5112186074256897, "learning_rate": 0.00018667709700673685, "loss": 0.9907, "step": 3750 }, { "epoch": 0.6679131054131054, "grad_norm": 0.5446593761444092, "learning_rate": 0.00018667011552850728, "loss": 1.0708, "step": 3751 }, { "epoch": 0.6680911680911681, "grad_norm": 0.5673866271972656, "learning_rate": 0.00018666313235215682, "loss": 1.05, "step": 3752 }, { "epoch": 0.6682692307692307, "grad_norm": 0.4821988046169281, "learning_rate": 0.00018665614747782235, "loss": 1.0543, "step": 3753 }, { "epoch": 0.6684472934472935, "grad_norm": 0.5158842206001282, "learning_rate": 0.00018664916090564067, "loss": 1.0331, "step": 3754 }, { "epoch": 0.6686253561253561, "grad_norm": 0.45486921072006226, "learning_rate": 0.00018664217263574865, "loss": 0.9262, "step": 3755 }, { "epoch": 0.6688034188034188, "grad_norm": 0.46193036437034607, "learning_rate": 0.00018663518266828327, "loss": 0.9858, "step": 3756 }, { "epoch": 0.6689814814814815, "grad_norm": 0.5144094824790955, "learning_rate": 0.00018662819100338148, "loss": 1.0302, "step": 3757 }, { "epoch": 0.6691595441595442, "grad_norm": 0.5246134400367737, "learning_rate": 0.0001866211976411802, "loss": 1.064, "step": 3758 }, { "epoch": 0.6693376068376068, "grad_norm": 0.4853166937828064, "learning_rate": 0.0001866142025818165, "loss": 0.9481, "step": 3759 }, { "epoch": 0.6695156695156695, "grad_norm": 0.5029586553573608, "learning_rate": 0.00018660720582542743, "loss": 0.9443, "step": 3760 }, { "epoch": 0.6696937321937322, "grad_norm": 0.5373172163963318, "learning_rate": 0.0001866002073721501, "loss": 1.1401, "step": 3761 }, { "epoch": 0.6698717948717948, "grad_norm": 0.6236287951469421, "learning_rate": 0.00018659320722212158, "loss": 1.1255, "step": 3762 }, { "epoch": 0.6700498575498576, "grad_norm": 0.5470684766769409, "learning_rate": 0.00018658620537547903, "loss": 1.0622, "step": 3763 }, { "epoch": 0.6702279202279202, "grad_norm": 0.63177090883255, "learning_rate": 0.00018657920183235964, "loss": 0.9736, "step": 3764 }, { "epoch": 0.6704059829059829, "grad_norm": 0.5456309914588928, "learning_rate": 0.00018657219659290068, "loss": 1.027, "step": 3765 }, { "epoch": 0.6705840455840456, "grad_norm": 0.4816138744354248, "learning_rate": 0.00018656518965723935, "loss": 0.7801, "step": 3766 }, { "epoch": 0.6707621082621082, "grad_norm": 0.4811640679836273, "learning_rate": 0.00018655818102551294, "loss": 1.0535, "step": 3767 }, { "epoch": 0.6709401709401709, "grad_norm": 0.4677673280239105, "learning_rate": 0.00018655117069785884, "loss": 1.1043, "step": 3768 }, { "epoch": 0.6711182336182336, "grad_norm": 0.5628635883331299, "learning_rate": 0.0001865441586744143, "loss": 1.0392, "step": 3769 }, { "epoch": 0.6712962962962963, "grad_norm": 0.5484504103660583, "learning_rate": 0.00018653714495531673, "loss": 1.1533, "step": 3770 }, { "epoch": 0.6714743589743589, "grad_norm": 0.5830571055412292, "learning_rate": 0.0001865301295407036, "loss": 1.2479, "step": 3771 }, { "epoch": 0.6716524216524217, "grad_norm": 0.5516841411590576, "learning_rate": 0.00018652311243071235, "loss": 1.2152, "step": 3772 }, { "epoch": 0.6718304843304843, "grad_norm": 0.6360766291618347, "learning_rate": 0.0001865160936254804, "loss": 1.0752, "step": 3773 }, { "epoch": 0.6720085470085471, "grad_norm": 0.6038610935211182, "learning_rate": 0.00018650907312514533, "loss": 1.2425, "step": 3774 }, { "epoch": 0.6721866096866097, "grad_norm": 0.49572908878326416, "learning_rate": 0.0001865020509298447, "loss": 1.0057, "step": 3775 }, { "epoch": 0.6723646723646723, "grad_norm": 0.4551616311073303, "learning_rate": 0.00018649502703971607, "loss": 1.0763, "step": 3776 }, { "epoch": 0.6725427350427351, "grad_norm": 0.6621482372283936, "learning_rate": 0.00018648800145489706, "loss": 1.0306, "step": 3777 }, { "epoch": 0.6727207977207977, "grad_norm": 0.5523806810379028, "learning_rate": 0.0001864809741755253, "loss": 0.9906, "step": 3778 }, { "epoch": 0.6728988603988604, "grad_norm": 0.5527048110961914, "learning_rate": 0.00018647394520173856, "loss": 1.0734, "step": 3779 }, { "epoch": 0.6730769230769231, "grad_norm": 0.573573887348175, "learning_rate": 0.00018646691453367444, "loss": 1.1409, "step": 3780 }, { "epoch": 0.6732549857549858, "grad_norm": 0.6273239254951477, "learning_rate": 0.00018645988217147079, "loss": 0.9682, "step": 3781 }, { "epoch": 0.6734330484330484, "grad_norm": 0.4917762279510498, "learning_rate": 0.00018645284811526534, "loss": 0.9681, "step": 3782 }, { "epoch": 0.6736111111111112, "grad_norm": 0.4901154339313507, "learning_rate": 0.0001864458123651959, "loss": 1.1828, "step": 3783 }, { "epoch": 0.6737891737891738, "grad_norm": 0.6292546391487122, "learning_rate": 0.00018643877492140036, "loss": 1.1987, "step": 3784 }, { "epoch": 0.6739672364672364, "grad_norm": 0.5334137678146362, "learning_rate": 0.0001864317357840166, "loss": 1.0347, "step": 3785 }, { "epoch": 0.6741452991452992, "grad_norm": 0.6064338684082031, "learning_rate": 0.0001864246949531825, "loss": 1.4154, "step": 3786 }, { "epoch": 0.6743233618233618, "grad_norm": 0.5442034602165222, "learning_rate": 0.000186417652429036, "loss": 1.2604, "step": 3787 }, { "epoch": 0.6745014245014245, "grad_norm": 0.490858793258667, "learning_rate": 0.00018641060821171518, "loss": 1.1511, "step": 3788 }, { "epoch": 0.6746794871794872, "grad_norm": 0.571116030216217, "learning_rate": 0.00018640356230135798, "loss": 1.1479, "step": 3789 }, { "epoch": 0.6748575498575499, "grad_norm": 0.4857785105705261, "learning_rate": 0.00018639651469810247, "loss": 0.9, "step": 3790 }, { "epoch": 0.6750356125356125, "grad_norm": 0.5320703983306885, "learning_rate": 0.0001863894654020867, "loss": 1.2284, "step": 3791 }, { "epoch": 0.6752136752136753, "grad_norm": 0.5586925745010376, "learning_rate": 0.0001863824144134488, "loss": 1.1183, "step": 3792 }, { "epoch": 0.6753917378917379, "grad_norm": 0.47740885615348816, "learning_rate": 0.000186375361732327, "loss": 1.1512, "step": 3793 }, { "epoch": 0.6755698005698005, "grad_norm": 0.5867732167243958, "learning_rate": 0.00018636830735885935, "loss": 1.1903, "step": 3794 }, { "epoch": 0.6757478632478633, "grad_norm": 0.5013887882232666, "learning_rate": 0.0001863612512931842, "loss": 0.8581, "step": 3795 }, { "epoch": 0.6759259259259259, "grad_norm": 0.6026871204376221, "learning_rate": 0.0001863541935354397, "loss": 0.9581, "step": 3796 }, { "epoch": 0.6761039886039886, "grad_norm": 0.5238468647003174, "learning_rate": 0.00018634713408576415, "loss": 1.0949, "step": 3797 }, { "epoch": 0.6762820512820513, "grad_norm": 0.5128598213195801, "learning_rate": 0.00018634007294429585, "loss": 0.8992, "step": 3798 }, { "epoch": 0.676460113960114, "grad_norm": 0.5092771053314209, "learning_rate": 0.00018633301011117324, "loss": 1.0793, "step": 3799 }, { "epoch": 0.6766381766381766, "grad_norm": 0.592566728591919, "learning_rate": 0.00018632594558653457, "loss": 1.3242, "step": 3800 }, { "epoch": 0.6768162393162394, "grad_norm": 0.4953067898750305, "learning_rate": 0.0001863188793705184, "loss": 0.9925, "step": 3801 }, { "epoch": 0.676994301994302, "grad_norm": 0.4989747107028961, "learning_rate": 0.00018631181146326305, "loss": 1.0677, "step": 3802 }, { "epoch": 0.6771723646723646, "grad_norm": 0.5375261902809143, "learning_rate": 0.00018630474186490705, "loss": 1.0556, "step": 3803 }, { "epoch": 0.6773504273504274, "grad_norm": 0.6512624025344849, "learning_rate": 0.00018629767057558894, "loss": 1.2041, "step": 3804 }, { "epoch": 0.67752849002849, "grad_norm": 0.5428260564804077, "learning_rate": 0.00018629059759544723, "loss": 0.9645, "step": 3805 }, { "epoch": 0.6777065527065527, "grad_norm": 0.5598662495613098, "learning_rate": 0.00018628352292462052, "loss": 1.1683, "step": 3806 }, { "epoch": 0.6778846153846154, "grad_norm": 0.49351340532302856, "learning_rate": 0.0001862764465632474, "loss": 1.1622, "step": 3807 }, { "epoch": 0.6780626780626781, "grad_norm": 0.4796701669692993, "learning_rate": 0.00018626936851146657, "loss": 1.0017, "step": 3808 }, { "epoch": 0.6782407407407407, "grad_norm": 0.444533109664917, "learning_rate": 0.00018626228876941664, "loss": 0.9145, "step": 3809 }, { "epoch": 0.6784188034188035, "grad_norm": 0.5197392702102661, "learning_rate": 0.00018625520733723635, "loss": 1.283, "step": 3810 }, { "epoch": 0.6785968660968661, "grad_norm": 0.48785829544067383, "learning_rate": 0.00018624812421506447, "loss": 1.1084, "step": 3811 }, { "epoch": 0.6787749287749287, "grad_norm": 0.5083680152893066, "learning_rate": 0.00018624103940303974, "loss": 0.9071, "step": 3812 }, { "epoch": 0.6789529914529915, "grad_norm": 0.553819477558136, "learning_rate": 0.00018623395290130103, "loss": 0.9986, "step": 3813 }, { "epoch": 0.6791310541310541, "grad_norm": 0.5347508788108826, "learning_rate": 0.00018622686470998713, "loss": 1.0148, "step": 3814 }, { "epoch": 0.6793091168091168, "grad_norm": 0.5080769062042236, "learning_rate": 0.00018621977482923693, "loss": 1.0169, "step": 3815 }, { "epoch": 0.6794871794871795, "grad_norm": 0.5444077849388123, "learning_rate": 0.00018621268325918938, "loss": 1.172, "step": 3816 }, { "epoch": 0.6796652421652422, "grad_norm": 0.521946132183075, "learning_rate": 0.00018620558999998335, "loss": 1.0247, "step": 3817 }, { "epoch": 0.6798433048433048, "grad_norm": 0.5257413983345032, "learning_rate": 0.00018619849505175786, "loss": 1.1574, "step": 3818 }, { "epoch": 0.6800213675213675, "grad_norm": 0.5473007559776306, "learning_rate": 0.00018619139841465193, "loss": 1.1254, "step": 3819 }, { "epoch": 0.6801994301994302, "grad_norm": 0.5479872226715088, "learning_rate": 0.00018618430008880463, "loss": 1.0196, "step": 3820 }, { "epoch": 0.6803774928774928, "grad_norm": 0.5918973088264465, "learning_rate": 0.00018617720007435497, "loss": 1.082, "step": 3821 }, { "epoch": 0.6805555555555556, "grad_norm": 0.5411791801452637, "learning_rate": 0.0001861700983714421, "loss": 0.7723, "step": 3822 }, { "epoch": 0.6807336182336182, "grad_norm": 0.5466326475143433, "learning_rate": 0.00018616299498020516, "loss": 1.0979, "step": 3823 }, { "epoch": 0.6809116809116809, "grad_norm": 0.5405182838439941, "learning_rate": 0.00018615588990078332, "loss": 0.8891, "step": 3824 }, { "epoch": 0.6810897435897436, "grad_norm": 0.5415780544281006, "learning_rate": 0.00018614878313331579, "loss": 1.0927, "step": 3825 }, { "epoch": 0.6812678062678063, "grad_norm": 0.5284909605979919, "learning_rate": 0.00018614167467794182, "loss": 1.0684, "step": 3826 }, { "epoch": 0.6814458689458689, "grad_norm": 0.4873995780944824, "learning_rate": 0.00018613456453480062, "loss": 1.1653, "step": 3827 }, { "epoch": 0.6816239316239316, "grad_norm": 0.5506551265716553, "learning_rate": 0.0001861274527040316, "loss": 0.9876, "step": 3828 }, { "epoch": 0.6818019943019943, "grad_norm": 0.5031297206878662, "learning_rate": 0.0001861203391857741, "loss": 1.067, "step": 3829 }, { "epoch": 0.6819800569800569, "grad_norm": 0.622346043586731, "learning_rate": 0.0001861132239801674, "loss": 1.1514, "step": 3830 }, { "epoch": 0.6821581196581197, "grad_norm": 0.47706183791160583, "learning_rate": 0.000186106107087351, "loss": 0.9857, "step": 3831 }, { "epoch": 0.6823361823361823, "grad_norm": 0.5082845091819763, "learning_rate": 0.00018609898850746424, "loss": 1.123, "step": 3832 }, { "epoch": 0.6825142450142451, "grad_norm": 0.5119805932044983, "learning_rate": 0.00018609186824064671, "loss": 1.1386, "step": 3833 }, { "epoch": 0.6826923076923077, "grad_norm": 0.5247541069984436, "learning_rate": 0.00018608474628703788, "loss": 0.9433, "step": 3834 }, { "epoch": 0.6828703703703703, "grad_norm": 0.4618282616138458, "learning_rate": 0.00018607762264677722, "loss": 0.8727, "step": 3835 }, { "epoch": 0.6830484330484331, "grad_norm": 0.6014040112495422, "learning_rate": 0.00018607049732000436, "loss": 1.1823, "step": 3836 }, { "epoch": 0.6832264957264957, "grad_norm": 0.6489043831825256, "learning_rate": 0.00018606337030685892, "loss": 1.1466, "step": 3837 }, { "epoch": 0.6834045584045584, "grad_norm": 0.5527763366699219, "learning_rate": 0.00018605624160748053, "loss": 1.3015, "step": 3838 }, { "epoch": 0.6835826210826211, "grad_norm": 0.5628284215927124, "learning_rate": 0.0001860491112220088, "loss": 1.1504, "step": 3839 }, { "epoch": 0.6837606837606838, "grad_norm": 0.5414566993713379, "learning_rate": 0.00018604197915058355, "loss": 1.0155, "step": 3840 }, { "epoch": 0.6839387464387464, "grad_norm": 0.5378929376602173, "learning_rate": 0.00018603484539334443, "loss": 0.8917, "step": 3841 }, { "epoch": 0.6841168091168092, "grad_norm": 0.5953748822212219, "learning_rate": 0.00018602770995043125, "loss": 1.1971, "step": 3842 }, { "epoch": 0.6842948717948718, "grad_norm": 0.511813759803772, "learning_rate": 0.00018602057282198376, "loss": 1.1345, "step": 3843 }, { "epoch": 0.6844729344729344, "grad_norm": 0.5145484209060669, "learning_rate": 0.00018601343400814185, "loss": 1.0786, "step": 3844 }, { "epoch": 0.6846509971509972, "grad_norm": 0.5199604034423828, "learning_rate": 0.00018600629350904542, "loss": 1.2063, "step": 3845 }, { "epoch": 0.6848290598290598, "grad_norm": 0.5653825998306274, "learning_rate": 0.0001859991513248343, "loss": 1.0314, "step": 3846 }, { "epoch": 0.6850071225071225, "grad_norm": 0.5660843849182129, "learning_rate": 0.00018599200745564843, "loss": 1.2754, "step": 3847 }, { "epoch": 0.6851851851851852, "grad_norm": 0.5225719809532166, "learning_rate": 0.00018598486190162788, "loss": 1.0837, "step": 3848 }, { "epoch": 0.6853632478632479, "grad_norm": 0.5011669397354126, "learning_rate": 0.00018597771466291252, "loss": 1.1, "step": 3849 }, { "epoch": 0.6855413105413105, "grad_norm": 0.5923115015029907, "learning_rate": 0.00018597056573964245, "loss": 1.1875, "step": 3850 }, { "epoch": 0.6857193732193733, "grad_norm": 0.5666482448577881, "learning_rate": 0.00018596341513195776, "loss": 1.1663, "step": 3851 }, { "epoch": 0.6858974358974359, "grad_norm": 0.5396790504455566, "learning_rate": 0.0001859562628399985, "loss": 1.1179, "step": 3852 }, { "epoch": 0.6860754985754985, "grad_norm": 0.5709532499313354, "learning_rate": 0.00018594910886390485, "loss": 1.0369, "step": 3853 }, { "epoch": 0.6862535612535613, "grad_norm": 0.45524322986602783, "learning_rate": 0.00018594195320381692, "loss": 1.0171, "step": 3854 }, { "epoch": 0.6864316239316239, "grad_norm": 0.6130724549293518, "learning_rate": 0.00018593479585987498, "loss": 1.1944, "step": 3855 }, { "epoch": 0.6866096866096866, "grad_norm": 0.5079745054244995, "learning_rate": 0.0001859276368322192, "loss": 1.2567, "step": 3856 }, { "epoch": 0.6867877492877493, "grad_norm": 0.49919846653938293, "learning_rate": 0.00018592047612098992, "loss": 0.9459, "step": 3857 }, { "epoch": 0.686965811965812, "grad_norm": 0.5776857733726501, "learning_rate": 0.00018591331372632734, "loss": 1.2456, "step": 3858 }, { "epoch": 0.6871438746438746, "grad_norm": 0.4740692377090454, "learning_rate": 0.00018590614964837188, "loss": 1.0401, "step": 3859 }, { "epoch": 0.6873219373219374, "grad_norm": 0.5015742182731628, "learning_rate": 0.00018589898388726389, "loss": 1.2052, "step": 3860 }, { "epoch": 0.6875, "grad_norm": 0.4819730818271637, "learning_rate": 0.0001858918164431437, "loss": 1.007, "step": 3861 }, { "epoch": 0.6876780626780626, "grad_norm": 0.5510426163673401, "learning_rate": 0.00018588464731615184, "loss": 1.0123, "step": 3862 }, { "epoch": 0.6878561253561254, "grad_norm": 0.4950829744338989, "learning_rate": 0.00018587747650642867, "loss": 1.033, "step": 3863 }, { "epoch": 0.688034188034188, "grad_norm": 0.5278680920600891, "learning_rate": 0.0001858703040141148, "loss": 1.0912, "step": 3864 }, { "epoch": 0.6882122507122507, "grad_norm": 0.6359158158302307, "learning_rate": 0.00018586312983935068, "loss": 1.2868, "step": 3865 }, { "epoch": 0.6883903133903134, "grad_norm": 0.5098239183425903, "learning_rate": 0.0001858559539822769, "loss": 0.8364, "step": 3866 }, { "epoch": 0.6885683760683761, "grad_norm": 0.5651038289070129, "learning_rate": 0.000185848776443034, "loss": 1.1983, "step": 3867 }, { "epoch": 0.6887464387464387, "grad_norm": 0.5305678248405457, "learning_rate": 0.00018584159722176272, "loss": 1.32, "step": 3868 }, { "epoch": 0.6889245014245015, "grad_norm": 0.5481845140457153, "learning_rate": 0.00018583441631860368, "loss": 1.013, "step": 3869 }, { "epoch": 0.6891025641025641, "grad_norm": 0.5214795470237732, "learning_rate": 0.00018582723373369753, "loss": 1.172, "step": 3870 }, { "epoch": 0.6892806267806267, "grad_norm": 0.6282780766487122, "learning_rate": 0.00018582004946718502, "loss": 1.7304, "step": 3871 }, { "epoch": 0.6894586894586895, "grad_norm": 0.5266988277435303, "learning_rate": 0.0001858128635192069, "loss": 1.1418, "step": 3872 }, { "epoch": 0.6896367521367521, "grad_norm": 0.4761001467704773, "learning_rate": 0.000185805675889904, "loss": 0.8585, "step": 3873 }, { "epoch": 0.6898148148148148, "grad_norm": 0.528779923915863, "learning_rate": 0.00018579848657941715, "loss": 1.0036, "step": 3874 }, { "epoch": 0.6899928774928775, "grad_norm": 0.5427684783935547, "learning_rate": 0.00018579129558788716, "loss": 0.9769, "step": 3875 }, { "epoch": 0.6901709401709402, "grad_norm": 0.6229544281959534, "learning_rate": 0.00018578410291545495, "loss": 1.2848, "step": 3876 }, { "epoch": 0.6903490028490028, "grad_norm": 0.6602693200111389, "learning_rate": 0.00018577690856226147, "loss": 1.2713, "step": 3877 }, { "epoch": 0.6905270655270656, "grad_norm": 0.45884042978286743, "learning_rate": 0.0001857697125284476, "loss": 0.9143, "step": 3878 }, { "epoch": 0.6907051282051282, "grad_norm": 0.4956444203853607, "learning_rate": 0.00018576251481415443, "loss": 0.9646, "step": 3879 }, { "epoch": 0.6908831908831908, "grad_norm": 0.473561555147171, "learning_rate": 0.00018575531541952292, "loss": 0.843, "step": 3880 }, { "epoch": 0.6910612535612536, "grad_norm": 0.4676312506198883, "learning_rate": 0.00018574811434469415, "loss": 0.9464, "step": 3881 }, { "epoch": 0.6912393162393162, "grad_norm": 0.5452045202255249, "learning_rate": 0.00018574091158980922, "loss": 0.985, "step": 3882 }, { "epoch": 0.6914173789173789, "grad_norm": 0.6274946331977844, "learning_rate": 0.0001857337071550092, "loss": 1.0357, "step": 3883 }, { "epoch": 0.6915954415954416, "grad_norm": 0.5533788800239563, "learning_rate": 0.00018572650104043531, "loss": 1.2636, "step": 3884 }, { "epoch": 0.6917735042735043, "grad_norm": 0.48312318325042725, "learning_rate": 0.00018571929324622872, "loss": 1.2402, "step": 3885 }, { "epoch": 0.6919515669515669, "grad_norm": 0.6087453961372375, "learning_rate": 0.00018571208377253062, "loss": 1.2961, "step": 3886 }, { "epoch": 0.6921296296296297, "grad_norm": 0.49156486988067627, "learning_rate": 0.00018570487261948234, "loss": 0.9585, "step": 3887 }, { "epoch": 0.6923076923076923, "grad_norm": 0.5200015902519226, "learning_rate": 0.0001856976597872251, "loss": 0.9274, "step": 3888 }, { "epoch": 0.6924857549857549, "grad_norm": 0.5185118913650513, "learning_rate": 0.0001856904452759002, "loss": 1.0015, "step": 3889 }, { "epoch": 0.6926638176638177, "grad_norm": 0.5859049558639526, "learning_rate": 0.00018568322908564904, "loss": 1.0959, "step": 3890 }, { "epoch": 0.6928418803418803, "grad_norm": 0.5882301926612854, "learning_rate": 0.00018567601121661302, "loss": 1.3214, "step": 3891 }, { "epoch": 0.6930199430199431, "grad_norm": 0.6475503444671631, "learning_rate": 0.0001856687916689335, "loss": 1.3265, "step": 3892 }, { "epoch": 0.6931980056980057, "grad_norm": 0.46175432205200195, "learning_rate": 0.000185661570442752, "loss": 0.8547, "step": 3893 }, { "epoch": 0.6933760683760684, "grad_norm": 0.5362716913223267, "learning_rate": 0.00018565434753820998, "loss": 0.974, "step": 3894 }, { "epoch": 0.6935541310541311, "grad_norm": 0.4317963719367981, "learning_rate": 0.00018564712295544896, "loss": 0.7653, "step": 3895 }, { "epoch": 0.6937321937321937, "grad_norm": 0.5679717659950256, "learning_rate": 0.00018563989669461047, "loss": 1.0691, "step": 3896 }, { "epoch": 0.6939102564102564, "grad_norm": 0.5058363676071167, "learning_rate": 0.00018563266875583608, "loss": 1.0665, "step": 3897 }, { "epoch": 0.6940883190883191, "grad_norm": 0.5365496277809143, "learning_rate": 0.00018562543913926746, "loss": 0.9963, "step": 3898 }, { "epoch": 0.6942663817663818, "grad_norm": 0.49945300817489624, "learning_rate": 0.0001856182078450462, "loss": 0.8668, "step": 3899 }, { "epoch": 0.6944444444444444, "grad_norm": 0.5869430899620056, "learning_rate": 0.00018561097487331405, "loss": 1.1942, "step": 3900 }, { "epoch": 0.6946225071225072, "grad_norm": 0.5188950300216675, "learning_rate": 0.0001856037402242127, "loss": 0.9493, "step": 3901 }, { "epoch": 0.6948005698005698, "grad_norm": 0.510788083076477, "learning_rate": 0.00018559650389788384, "loss": 0.9989, "step": 3902 }, { "epoch": 0.6949786324786325, "grad_norm": 0.5360601544380188, "learning_rate": 0.0001855892658944693, "loss": 1.2766, "step": 3903 }, { "epoch": 0.6951566951566952, "grad_norm": 0.522502601146698, "learning_rate": 0.00018558202621411093, "loss": 0.8774, "step": 3904 }, { "epoch": 0.6953347578347578, "grad_norm": 0.5330635905265808, "learning_rate": 0.00018557478485695052, "loss": 0.972, "step": 3905 }, { "epoch": 0.6955128205128205, "grad_norm": 0.5387479066848755, "learning_rate": 0.00018556754182312996, "loss": 1.0574, "step": 3906 }, { "epoch": 0.6956908831908832, "grad_norm": 0.5357984900474548, "learning_rate": 0.00018556029711279116, "loss": 1.396, "step": 3907 }, { "epoch": 0.6958689458689459, "grad_norm": 0.5647178292274475, "learning_rate": 0.00018555305072607612, "loss": 1.3304, "step": 3908 }, { "epoch": 0.6960470085470085, "grad_norm": 0.46460914611816406, "learning_rate": 0.00018554580266312673, "loss": 0.9574, "step": 3909 }, { "epoch": 0.6962250712250713, "grad_norm": 0.6206206679344177, "learning_rate": 0.00018553855292408503, "loss": 1.1637, "step": 3910 }, { "epoch": 0.6964031339031339, "grad_norm": 0.5899842977523804, "learning_rate": 0.00018553130150909312, "loss": 1.1067, "step": 3911 }, { "epoch": 0.6965811965811965, "grad_norm": 0.47294262051582336, "learning_rate": 0.000185524048418293, "loss": 1.1516, "step": 3912 }, { "epoch": 0.6967592592592593, "grad_norm": 0.5791197419166565, "learning_rate": 0.00018551679365182684, "loss": 1.0007, "step": 3913 }, { "epoch": 0.6969373219373219, "grad_norm": 0.5678651332855225, "learning_rate": 0.00018550953720983672, "loss": 1.2698, "step": 3914 }, { "epoch": 0.6971153846153846, "grad_norm": 0.6509683728218079, "learning_rate": 0.0001855022790924649, "loss": 1.0354, "step": 3915 }, { "epoch": 0.6972934472934473, "grad_norm": 0.5176648497581482, "learning_rate": 0.0001854950192998535, "loss": 1.1243, "step": 3916 }, { "epoch": 0.69747150997151, "grad_norm": 0.520631730556488, "learning_rate": 0.00018548775783214477, "loss": 1.1371, "step": 3917 }, { "epoch": 0.6976495726495726, "grad_norm": 0.5408333539962769, "learning_rate": 0.00018548049468948108, "loss": 1.1185, "step": 3918 }, { "epoch": 0.6978276353276354, "grad_norm": 0.5423790216445923, "learning_rate": 0.00018547322987200461, "loss": 1.1539, "step": 3919 }, { "epoch": 0.698005698005698, "grad_norm": 0.5422113537788391, "learning_rate": 0.0001854659633798578, "loss": 1.171, "step": 3920 }, { "epoch": 0.6981837606837606, "grad_norm": 0.5113416314125061, "learning_rate": 0.00018545869521318292, "loss": 1.0597, "step": 3921 }, { "epoch": 0.6983618233618234, "grad_norm": 0.49901214241981506, "learning_rate": 0.00018545142537212248, "loss": 1.1043, "step": 3922 }, { "epoch": 0.698539886039886, "grad_norm": 0.6606622338294983, "learning_rate": 0.00018544415385681885, "loss": 1.1797, "step": 3923 }, { "epoch": 0.6987179487179487, "grad_norm": 0.4786234498023987, "learning_rate": 0.00018543688066741454, "loss": 0.9532, "step": 3924 }, { "epoch": 0.6988960113960114, "grad_norm": 0.5900700688362122, "learning_rate": 0.00018542960580405203, "loss": 1.1171, "step": 3925 }, { "epoch": 0.6990740740740741, "grad_norm": 0.53485506772995, "learning_rate": 0.00018542232926687383, "loss": 1.1535, "step": 3926 }, { "epoch": 0.6992521367521367, "grad_norm": 0.5269177556037903, "learning_rate": 0.00018541505105602255, "loss": 1.0287, "step": 3927 }, { "epoch": 0.6994301994301995, "grad_norm": 0.5185505151748657, "learning_rate": 0.0001854077711716408, "loss": 1.2526, "step": 3928 }, { "epoch": 0.6996082621082621, "grad_norm": 0.5615512132644653, "learning_rate": 0.00018540048961387115, "loss": 1.0189, "step": 3929 }, { "epoch": 0.6997863247863247, "grad_norm": 0.4492493271827698, "learning_rate": 0.00018539320638285637, "loss": 0.8917, "step": 3930 }, { "epoch": 0.6999643874643875, "grad_norm": 0.5062302947044373, "learning_rate": 0.00018538592147873906, "loss": 1.053, "step": 3931 }, { "epoch": 0.7001424501424501, "grad_norm": 0.5508798956871033, "learning_rate": 0.000185378634901662, "loss": 0.9638, "step": 3932 }, { "epoch": 0.7003205128205128, "grad_norm": 0.463980108499527, "learning_rate": 0.00018537134665176793, "loss": 1.0945, "step": 3933 }, { "epoch": 0.7004985754985755, "grad_norm": 0.5027088522911072, "learning_rate": 0.0001853640567291997, "loss": 1.1745, "step": 3934 }, { "epoch": 0.7006766381766382, "grad_norm": 0.5006551146507263, "learning_rate": 0.00018535676513410009, "loss": 0.8521, "step": 3935 }, { "epoch": 0.7008547008547008, "grad_norm": 0.5870724320411682, "learning_rate": 0.000185349471866612, "loss": 0.9197, "step": 3936 }, { "epoch": 0.7010327635327636, "grad_norm": 0.5030696392059326, "learning_rate": 0.00018534217692687825, "loss": 1.1049, "step": 3937 }, { "epoch": 0.7012108262108262, "grad_norm": 0.5212681889533997, "learning_rate": 0.00018533488031504186, "loss": 1.3397, "step": 3938 }, { "epoch": 0.7013888888888888, "grad_norm": 0.5649709105491638, "learning_rate": 0.0001853275820312458, "loss": 1.1994, "step": 3939 }, { "epoch": 0.7015669515669516, "grad_norm": 0.4892779290676117, "learning_rate": 0.00018532028207563297, "loss": 1.1511, "step": 3940 }, { "epoch": 0.7017450142450142, "grad_norm": 0.4929407835006714, "learning_rate": 0.00018531298044834643, "loss": 1.0792, "step": 3941 }, { "epoch": 0.7019230769230769, "grad_norm": 0.5645940899848938, "learning_rate": 0.00018530567714952932, "loss": 1.0937, "step": 3942 }, { "epoch": 0.7021011396011396, "grad_norm": 0.5471178293228149, "learning_rate": 0.00018529837217932466, "loss": 1.193, "step": 3943 }, { "epoch": 0.7022792022792023, "grad_norm": 0.576627790927887, "learning_rate": 0.00018529106553787558, "loss": 1.1032, "step": 3944 }, { "epoch": 0.7024572649572649, "grad_norm": 0.5015735626220703, "learning_rate": 0.00018528375722532526, "loss": 1.066, "step": 3945 }, { "epoch": 0.7026353276353277, "grad_norm": 0.5315404534339905, "learning_rate": 0.00018527644724181683, "loss": 1.2059, "step": 3946 }, { "epoch": 0.7028133903133903, "grad_norm": 0.5516065955162048, "learning_rate": 0.0001852691355874936, "loss": 1.161, "step": 3947 }, { "epoch": 0.7029914529914529, "grad_norm": 0.5026212930679321, "learning_rate": 0.0001852618222624988, "loss": 1.2616, "step": 3948 }, { "epoch": 0.7031695156695157, "grad_norm": 0.49874603748321533, "learning_rate": 0.0001852545072669757, "loss": 0.805, "step": 3949 }, { "epoch": 0.7033475783475783, "grad_norm": 0.47698748111724854, "learning_rate": 0.00018524719060106763, "loss": 1.2321, "step": 3950 }, { "epoch": 0.7035256410256411, "grad_norm": 0.5201322436332703, "learning_rate": 0.00018523987226491792, "loss": 1.1577, "step": 3951 }, { "epoch": 0.7037037037037037, "grad_norm": 0.5506543517112732, "learning_rate": 0.00018523255225867002, "loss": 1.2289, "step": 3952 }, { "epoch": 0.7038817663817664, "grad_norm": 0.5691256523132324, "learning_rate": 0.0001852252305824673, "loss": 1.1945, "step": 3953 }, { "epoch": 0.7040598290598291, "grad_norm": 0.5324838757514954, "learning_rate": 0.00018521790723645322, "loss": 1.1037, "step": 3954 }, { "epoch": 0.7042378917378918, "grad_norm": 0.5238786339759827, "learning_rate": 0.00018521058222077127, "loss": 1.2075, "step": 3955 }, { "epoch": 0.7044159544159544, "grad_norm": 0.4936453402042389, "learning_rate": 0.00018520325553556498, "loss": 1.0537, "step": 3956 }, { "epoch": 0.7045940170940171, "grad_norm": 0.6198282837867737, "learning_rate": 0.00018519592718097791, "loss": 1.0728, "step": 3957 }, { "epoch": 0.7047720797720798, "grad_norm": 0.44729140400886536, "learning_rate": 0.0001851885971571536, "loss": 0.8432, "step": 3958 }, { "epoch": 0.7049501424501424, "grad_norm": 0.5884211659431458, "learning_rate": 0.00018518126546423572, "loss": 0.9515, "step": 3959 }, { "epoch": 0.7051282051282052, "grad_norm": 0.5293807983398438, "learning_rate": 0.00018517393210236788, "loss": 1.1178, "step": 3960 }, { "epoch": 0.7053062678062678, "grad_norm": 0.6036825180053711, "learning_rate": 0.00018516659707169374, "loss": 1.0408, "step": 3961 }, { "epoch": 0.7054843304843305, "grad_norm": 0.5157122015953064, "learning_rate": 0.0001851592603723571, "loss": 1.2136, "step": 3962 }, { "epoch": 0.7056623931623932, "grad_norm": 0.5354781150817871, "learning_rate": 0.00018515192200450163, "loss": 0.7165, "step": 3963 }, { "epoch": 0.7058404558404558, "grad_norm": 0.6073734760284424, "learning_rate": 0.00018514458196827111, "loss": 1.3079, "step": 3964 }, { "epoch": 0.7060185185185185, "grad_norm": 0.4324839413166046, "learning_rate": 0.0001851372402638094, "loss": 0.7903, "step": 3965 }, { "epoch": 0.7061965811965812, "grad_norm": 0.6530333161354065, "learning_rate": 0.00018512989689126034, "loss": 1.3179, "step": 3966 }, { "epoch": 0.7063746438746439, "grad_norm": 0.5500404238700867, "learning_rate": 0.00018512255185076782, "loss": 1.0624, "step": 3967 }, { "epoch": 0.7065527065527065, "grad_norm": 0.6277863383293152, "learning_rate": 0.00018511520514247567, "loss": 1.1056, "step": 3968 }, { "epoch": 0.7067307692307693, "grad_norm": 0.580544650554657, "learning_rate": 0.0001851078567665279, "loss": 0.9849, "step": 3969 }, { "epoch": 0.7069088319088319, "grad_norm": 0.4880999028682709, "learning_rate": 0.00018510050672306848, "loss": 1.0185, "step": 3970 }, { "epoch": 0.7070868945868946, "grad_norm": 0.4919959306716919, "learning_rate": 0.0001850931550122414, "loss": 1.0334, "step": 3971 }, { "epoch": 0.7072649572649573, "grad_norm": 0.6001213192939758, "learning_rate": 0.0001850858016341907, "loss": 1.0729, "step": 3972 }, { "epoch": 0.70744301994302, "grad_norm": 0.538690447807312, "learning_rate": 0.00018507844658906052, "loss": 1.0733, "step": 3973 }, { "epoch": 0.7076210826210826, "grad_norm": 0.5427643656730652, "learning_rate": 0.00018507108987699487, "loss": 1.1207, "step": 3974 }, { "epoch": 0.7077991452991453, "grad_norm": 0.43014347553253174, "learning_rate": 0.00018506373149813795, "loss": 0.7958, "step": 3975 }, { "epoch": 0.707977207977208, "grad_norm": 0.56591796875, "learning_rate": 0.00018505637145263394, "loss": 1.2199, "step": 3976 }, { "epoch": 0.7081552706552706, "grad_norm": 0.59147047996521, "learning_rate": 0.000185049009740627, "loss": 1.2354, "step": 3977 }, { "epoch": 0.7083333333333334, "grad_norm": 0.5078346133232117, "learning_rate": 0.00018504164636226137, "loss": 0.976, "step": 3978 }, { "epoch": 0.708511396011396, "grad_norm": 0.533302366733551, "learning_rate": 0.00018503428131768135, "loss": 0.9653, "step": 3979 }, { "epoch": 0.7086894586894587, "grad_norm": 0.4985341727733612, "learning_rate": 0.00018502691460703122, "loss": 1.1485, "step": 3980 }, { "epoch": 0.7088675213675214, "grad_norm": 0.5143141150474548, "learning_rate": 0.00018501954623045532, "loss": 1.148, "step": 3981 }, { "epoch": 0.709045584045584, "grad_norm": 0.507189154624939, "learning_rate": 0.00018501217618809804, "loss": 0.9306, "step": 3982 }, { "epoch": 0.7092236467236467, "grad_norm": 0.5246604084968567, "learning_rate": 0.00018500480448010377, "loss": 0.9116, "step": 3983 }, { "epoch": 0.7094017094017094, "grad_norm": 0.5321049094200134, "learning_rate": 0.00018499743110661693, "loss": 0.9607, "step": 3984 }, { "epoch": 0.7095797720797721, "grad_norm": 0.62645423412323, "learning_rate": 0.000184990056067782, "loss": 1.5834, "step": 3985 }, { "epoch": 0.7097578347578347, "grad_norm": 0.486557275056839, "learning_rate": 0.0001849826793637435, "loss": 1.0598, "step": 3986 }, { "epoch": 0.7099358974358975, "grad_norm": 0.5122783184051514, "learning_rate": 0.0001849753009946459, "loss": 1.2213, "step": 3987 }, { "epoch": 0.7101139601139601, "grad_norm": 0.4864068627357483, "learning_rate": 0.0001849679209606338, "loss": 1.2708, "step": 3988 }, { "epoch": 0.7102920227920227, "grad_norm": 0.5860990881919861, "learning_rate": 0.00018496053926185183, "loss": 1.2421, "step": 3989 }, { "epoch": 0.7104700854700855, "grad_norm": 0.471194326877594, "learning_rate": 0.00018495315589844453, "loss": 0.879, "step": 3990 }, { "epoch": 0.7106481481481481, "grad_norm": 0.5626323819160461, "learning_rate": 0.00018494577087055662, "loss": 1.1297, "step": 3991 }, { "epoch": 0.7108262108262108, "grad_norm": 0.4706762135028839, "learning_rate": 0.0001849383841783328, "loss": 1.0444, "step": 3992 }, { "epoch": 0.7110042735042735, "grad_norm": 0.5776444673538208, "learning_rate": 0.00018493099582191783, "loss": 1.1773, "step": 3993 }, { "epoch": 0.7111823361823362, "grad_norm": 0.5493253469467163, "learning_rate": 0.00018492360580145637, "loss": 1.0354, "step": 3994 }, { "epoch": 0.7113603988603988, "grad_norm": 0.5328514575958252, "learning_rate": 0.0001849162141170933, "loss": 0.9251, "step": 3995 }, { "epoch": 0.7115384615384616, "grad_norm": 0.5814893841743469, "learning_rate": 0.0001849088207689734, "loss": 1.1066, "step": 3996 }, { "epoch": 0.7117165242165242, "grad_norm": 0.5476071834564209, "learning_rate": 0.00018490142575724154, "loss": 1.1613, "step": 3997 }, { "epoch": 0.7118945868945868, "grad_norm": 0.5216463208198547, "learning_rate": 0.00018489402908204258, "loss": 1.2574, "step": 3998 }, { "epoch": 0.7120726495726496, "grad_norm": 0.5110020637512207, "learning_rate": 0.00018488663074352153, "loss": 1.0663, "step": 3999 }, { "epoch": 0.7122507122507122, "grad_norm": 0.448090523481369, "learning_rate": 0.00018487923074182326, "loss": 0.6687, "step": 4000 }, { "epoch": 0.7124287749287749, "grad_norm": 0.4980565011501312, "learning_rate": 0.00018487182907709279, "loss": 1.2365, "step": 4001 }, { "epoch": 0.7126068376068376, "grad_norm": 0.485831081867218, "learning_rate": 0.00018486442574947511, "loss": 1.0941, "step": 4002 }, { "epoch": 0.7127849002849003, "grad_norm": 0.4955040216445923, "learning_rate": 0.00018485702075911534, "loss": 1.248, "step": 4003 }, { "epoch": 0.7129629629629629, "grad_norm": 0.5168375968933105, "learning_rate": 0.00018484961410615845, "loss": 1.1118, "step": 4004 }, { "epoch": 0.7131410256410257, "grad_norm": 0.5255687832832336, "learning_rate": 0.00018484220579074968, "loss": 1.0558, "step": 4005 }, { "epoch": 0.7133190883190883, "grad_norm": 0.5502219796180725, "learning_rate": 0.00018483479581303416, "loss": 1.1604, "step": 4006 }, { "epoch": 0.7134971509971509, "grad_norm": 0.5155881643295288, "learning_rate": 0.000184827384173157, "loss": 0.8246, "step": 4007 }, { "epoch": 0.7136752136752137, "grad_norm": 0.5321542024612427, "learning_rate": 0.0001848199708712635, "loss": 1.2058, "step": 4008 }, { "epoch": 0.7138532763532763, "grad_norm": 0.4929848313331604, "learning_rate": 0.00018481255590749884, "loss": 1.4023, "step": 4009 }, { "epoch": 0.7140313390313391, "grad_norm": 0.5070937871932983, "learning_rate": 0.00018480513928200836, "loss": 1.0561, "step": 4010 }, { "epoch": 0.7142094017094017, "grad_norm": 0.5750083327293396, "learning_rate": 0.00018479772099493728, "loss": 1.0276, "step": 4011 }, { "epoch": 0.7143874643874644, "grad_norm": 0.5265933275222778, "learning_rate": 0.00018479030104643108, "loss": 1.0295, "step": 4012 }, { "epoch": 0.7145655270655271, "grad_norm": 0.526830792427063, "learning_rate": 0.00018478287943663504, "loss": 1.0157, "step": 4013 }, { "epoch": 0.7147435897435898, "grad_norm": 0.5344091653823853, "learning_rate": 0.00018477545616569458, "loss": 1.1997, "step": 4014 }, { "epoch": 0.7149216524216524, "grad_norm": 0.4935445189476013, "learning_rate": 0.0001847680312337552, "loss": 1.1858, "step": 4015 }, { "epoch": 0.7150997150997151, "grad_norm": 0.5291212797164917, "learning_rate": 0.0001847606046409623, "loss": 0.926, "step": 4016 }, { "epoch": 0.7152777777777778, "grad_norm": 0.559050977230072, "learning_rate": 0.00018475317638746142, "loss": 1.0947, "step": 4017 }, { "epoch": 0.7154558404558404, "grad_norm": 0.4566570222377777, "learning_rate": 0.00018474574647339814, "loss": 1.0334, "step": 4018 }, { "epoch": 0.7156339031339032, "grad_norm": 0.5156155824661255, "learning_rate": 0.000184738314898918, "loss": 1.0076, "step": 4019 }, { "epoch": 0.7158119658119658, "grad_norm": 0.5008716583251953, "learning_rate": 0.00018473088166416662, "loss": 1.0378, "step": 4020 }, { "epoch": 0.7159900284900285, "grad_norm": 0.49556368589401245, "learning_rate": 0.0001847234467692896, "loss": 1.15, "step": 4021 }, { "epoch": 0.7161680911680912, "grad_norm": 0.5464680790901184, "learning_rate": 0.00018471601021443265, "loss": 1.2975, "step": 4022 }, { "epoch": 0.7163461538461539, "grad_norm": 0.6291980147361755, "learning_rate": 0.00018470857199974144, "loss": 1.05, "step": 4023 }, { "epoch": 0.7165242165242165, "grad_norm": 0.5566631555557251, "learning_rate": 0.00018470113212536176, "loss": 1.1296, "step": 4024 }, { "epoch": 0.7167022792022792, "grad_norm": 0.5569562911987305, "learning_rate": 0.00018469369059143933, "loss": 1.2484, "step": 4025 }, { "epoch": 0.7168803418803419, "grad_norm": 0.5804716944694519, "learning_rate": 0.00018468624739812, "loss": 1.0547, "step": 4026 }, { "epoch": 0.7170584045584045, "grad_norm": 0.6316802501678467, "learning_rate": 0.00018467880254554952, "loss": 1.1188, "step": 4027 }, { "epoch": 0.7172364672364673, "grad_norm": 0.6131419539451599, "learning_rate": 0.00018467135603387385, "loss": 1.1662, "step": 4028 }, { "epoch": 0.7174145299145299, "grad_norm": 0.4703124761581421, "learning_rate": 0.00018466390786323883, "loss": 1.038, "step": 4029 }, { "epoch": 0.7175925925925926, "grad_norm": 0.5718469023704529, "learning_rate": 0.0001846564580337904, "loss": 1.0786, "step": 4030 }, { "epoch": 0.7177706552706553, "grad_norm": 0.5227612853050232, "learning_rate": 0.00018464900654567457, "loss": 1.0561, "step": 4031 }, { "epoch": 0.717948717948718, "grad_norm": 0.5800358057022095, "learning_rate": 0.00018464155339903727, "loss": 1.0944, "step": 4032 }, { "epoch": 0.7181267806267806, "grad_norm": 0.5562314987182617, "learning_rate": 0.00018463409859402455, "loss": 0.8573, "step": 4033 }, { "epoch": 0.7183048433048433, "grad_norm": 0.6420153379440308, "learning_rate": 0.0001846266421307825, "loss": 1.088, "step": 4034 }, { "epoch": 0.718482905982906, "grad_norm": 0.4745902717113495, "learning_rate": 0.00018461918400945718, "loss": 1.1679, "step": 4035 }, { "epoch": 0.7186609686609686, "grad_norm": 0.5070300102233887, "learning_rate": 0.00018461172423019475, "loss": 1.1984, "step": 4036 }, { "epoch": 0.7188390313390314, "grad_norm": 0.5339375138282776, "learning_rate": 0.00018460426279314133, "loss": 1.3038, "step": 4037 }, { "epoch": 0.719017094017094, "grad_norm": 0.5947147607803345, "learning_rate": 0.00018459679969844313, "loss": 1.0103, "step": 4038 }, { "epoch": 0.7191951566951567, "grad_norm": 0.5493791699409485, "learning_rate": 0.00018458933494624642, "loss": 1.1001, "step": 4039 }, { "epoch": 0.7193732193732194, "grad_norm": 0.5700310468673706, "learning_rate": 0.00018458186853669736, "loss": 0.9006, "step": 4040 }, { "epoch": 0.719551282051282, "grad_norm": 0.60371994972229, "learning_rate": 0.0001845744004699423, "loss": 1.3001, "step": 4041 }, { "epoch": 0.7197293447293447, "grad_norm": 0.5469261407852173, "learning_rate": 0.00018456693074612757, "loss": 1.1745, "step": 4042 }, { "epoch": 0.7199074074074074, "grad_norm": 0.5179165601730347, "learning_rate": 0.00018455945936539947, "loss": 0.9883, "step": 4043 }, { "epoch": 0.7200854700854701, "grad_norm": 0.5396696329116821, "learning_rate": 0.00018455198632790447, "loss": 1.1277, "step": 4044 }, { "epoch": 0.7202635327635327, "grad_norm": 0.4559909403324127, "learning_rate": 0.00018454451163378888, "loss": 0.9644, "step": 4045 }, { "epoch": 0.7204415954415955, "grad_norm": 0.49863892793655396, "learning_rate": 0.00018453703528319927, "loss": 1.1276, "step": 4046 }, { "epoch": 0.7206196581196581, "grad_norm": 0.4790710508823395, "learning_rate": 0.000184529557276282, "loss": 0.9443, "step": 4047 }, { "epoch": 0.7207977207977208, "grad_norm": 0.541999876499176, "learning_rate": 0.0001845220776131837, "loss": 1.0681, "step": 4048 }, { "epoch": 0.7209757834757835, "grad_norm": 0.5119109153747559, "learning_rate": 0.00018451459629405088, "loss": 1.2078, "step": 4049 }, { "epoch": 0.7211538461538461, "grad_norm": 0.6141307353973389, "learning_rate": 0.00018450711331903006, "loss": 1.1071, "step": 4050 }, { "epoch": 0.7213319088319088, "grad_norm": 0.48679864406585693, "learning_rate": 0.00018449962868826795, "loss": 0.9713, "step": 4051 }, { "epoch": 0.7215099715099715, "grad_norm": 0.5548661947250366, "learning_rate": 0.0001844921424019111, "loss": 1.2099, "step": 4052 }, { "epoch": 0.7216880341880342, "grad_norm": 0.5000107884407043, "learning_rate": 0.00018448465446010626, "loss": 1.0184, "step": 4053 }, { "epoch": 0.7218660968660968, "grad_norm": 0.6131454110145569, "learning_rate": 0.00018447716486300013, "loss": 1.2581, "step": 4054 }, { "epoch": 0.7220441595441596, "grad_norm": 0.5145987868309021, "learning_rate": 0.0001844696736107394, "loss": 1.1646, "step": 4055 }, { "epoch": 0.7222222222222222, "grad_norm": 0.4361337125301361, "learning_rate": 0.00018446218070347094, "loss": 0.8239, "step": 4056 }, { "epoch": 0.7224002849002849, "grad_norm": 0.5549173355102539, "learning_rate": 0.00018445468614134146, "loss": 1.1935, "step": 4057 }, { "epoch": 0.7225783475783476, "grad_norm": 0.5569297671318054, "learning_rate": 0.00018444718992449789, "loss": 1.0137, "step": 4058 }, { "epoch": 0.7227564102564102, "grad_norm": 0.44866305589675903, "learning_rate": 0.00018443969205308704, "loss": 0.987, "step": 4059 }, { "epoch": 0.7229344729344729, "grad_norm": 0.5142943263053894, "learning_rate": 0.0001844321925272558, "loss": 1.0837, "step": 4060 }, { "epoch": 0.7231125356125356, "grad_norm": 0.4922119379043579, "learning_rate": 0.0001844246913471512, "loss": 0.8477, "step": 4061 }, { "epoch": 0.7232905982905983, "grad_norm": 0.5245375633239746, "learning_rate": 0.0001844171885129201, "loss": 0.9985, "step": 4062 }, { "epoch": 0.7234686609686609, "grad_norm": 0.45562678575515747, "learning_rate": 0.00018440968402470956, "loss": 0.8678, "step": 4063 }, { "epoch": 0.7236467236467237, "grad_norm": 0.5388376712799072, "learning_rate": 0.0001844021778826666, "loss": 1.0586, "step": 4064 }, { "epoch": 0.7238247863247863, "grad_norm": 0.48945263028144836, "learning_rate": 0.00018439467008693833, "loss": 1.0547, "step": 4065 }, { "epoch": 0.7240028490028491, "grad_norm": 0.5202330350875854, "learning_rate": 0.00018438716063767178, "loss": 1.3142, "step": 4066 }, { "epoch": 0.7241809116809117, "grad_norm": 0.5432567000389099, "learning_rate": 0.00018437964953501413, "loss": 1.0192, "step": 4067 }, { "epoch": 0.7243589743589743, "grad_norm": 0.5220325589179993, "learning_rate": 0.00018437213677911253, "loss": 1.0904, "step": 4068 }, { "epoch": 0.7245370370370371, "grad_norm": 0.45711690187454224, "learning_rate": 0.00018436462237011417, "loss": 1.0417, "step": 4069 }, { "epoch": 0.7247150997150997, "grad_norm": 0.560778021812439, "learning_rate": 0.0001843571063081663, "loss": 1.2316, "step": 4070 }, { "epoch": 0.7248931623931624, "grad_norm": 0.591533362865448, "learning_rate": 0.0001843495885934162, "loss": 1.0294, "step": 4071 }, { "epoch": 0.7250712250712251, "grad_norm": 0.5550443530082703, "learning_rate": 0.00018434206922601106, "loss": 1.0162, "step": 4072 }, { "epoch": 0.7252492877492878, "grad_norm": 0.5744053721427917, "learning_rate": 0.00018433454820609833, "loss": 1.2774, "step": 4073 }, { "epoch": 0.7254273504273504, "grad_norm": 0.6210703253746033, "learning_rate": 0.0001843270255338253, "loss": 1.2526, "step": 4074 }, { "epoch": 0.7256054131054132, "grad_norm": 0.49684277176856995, "learning_rate": 0.0001843195012093394, "loss": 1.0786, "step": 4075 }, { "epoch": 0.7257834757834758, "grad_norm": 0.5851606130599976, "learning_rate": 0.00018431197523278802, "loss": 1.14, "step": 4076 }, { "epoch": 0.7259615384615384, "grad_norm": 0.5494425296783447, "learning_rate": 0.00018430444760431862, "loss": 1.211, "step": 4077 }, { "epoch": 0.7261396011396012, "grad_norm": 0.5247658491134644, "learning_rate": 0.00018429691832407867, "loss": 0.8031, "step": 4078 }, { "epoch": 0.7263176638176638, "grad_norm": 0.5012249946594238, "learning_rate": 0.00018428938739221574, "loss": 1.1258, "step": 4079 }, { "epoch": 0.7264957264957265, "grad_norm": 0.5226427912712097, "learning_rate": 0.0001842818548088774, "loss": 1.0029, "step": 4080 }, { "epoch": 0.7266737891737892, "grad_norm": 0.45008543133735657, "learning_rate": 0.00018427432057421114, "loss": 1.0681, "step": 4081 }, { "epoch": 0.7268518518518519, "grad_norm": 0.5127285122871399, "learning_rate": 0.00018426678468836467, "loss": 1.1069, "step": 4082 }, { "epoch": 0.7270299145299145, "grad_norm": 0.5406150221824646, "learning_rate": 0.0001842592471514856, "loss": 1.052, "step": 4083 }, { "epoch": 0.7272079772079773, "grad_norm": 0.5001157522201538, "learning_rate": 0.0001842517079637216, "loss": 0.9157, "step": 4084 }, { "epoch": 0.7273860398860399, "grad_norm": 0.6169779300689697, "learning_rate": 0.00018424416712522042, "loss": 1.3133, "step": 4085 }, { "epoch": 0.7275641025641025, "grad_norm": 0.4891316890716553, "learning_rate": 0.00018423662463612974, "loss": 0.9505, "step": 4086 }, { "epoch": 0.7277421652421653, "grad_norm": 0.5883708596229553, "learning_rate": 0.00018422908049659743, "loss": 1.2797, "step": 4087 }, { "epoch": 0.7279202279202279, "grad_norm": 0.6679072976112366, "learning_rate": 0.00018422153470677125, "loss": 1.1096, "step": 4088 }, { "epoch": 0.7280982905982906, "grad_norm": 0.5178479552268982, "learning_rate": 0.00018421398726679904, "loss": 1.0299, "step": 4089 }, { "epoch": 0.7282763532763533, "grad_norm": 0.6343900561332703, "learning_rate": 0.0001842064381768287, "loss": 1.2983, "step": 4090 }, { "epoch": 0.728454415954416, "grad_norm": 0.43816515803337097, "learning_rate": 0.0001841988874370081, "loss": 0.9452, "step": 4091 }, { "epoch": 0.7286324786324786, "grad_norm": 0.579790472984314, "learning_rate": 0.00018419133504748528, "loss": 1.1037, "step": 4092 }, { "epoch": 0.7288105413105413, "grad_norm": 0.571374773979187, "learning_rate": 0.00018418378100840807, "loss": 1.1655, "step": 4093 }, { "epoch": 0.728988603988604, "grad_norm": 0.5163514018058777, "learning_rate": 0.0001841762253199246, "loss": 1.1579, "step": 4094 }, { "epoch": 0.7291666666666666, "grad_norm": 0.6553022265434265, "learning_rate": 0.0001841686679821828, "loss": 0.9664, "step": 4095 }, { "epoch": 0.7293447293447294, "grad_norm": 0.5072969198226929, "learning_rate": 0.00018416110899533084, "loss": 0.9416, "step": 4096 }, { "epoch": 0.729522792022792, "grad_norm": 0.5103251338005066, "learning_rate": 0.00018415354835951675, "loss": 1.0715, "step": 4097 }, { "epoch": 0.7297008547008547, "grad_norm": 0.49752289056777954, "learning_rate": 0.00018414598607488874, "loss": 1.1848, "step": 4098 }, { "epoch": 0.7298789173789174, "grad_norm": 0.5361882448196411, "learning_rate": 0.00018413842214159488, "loss": 1.1035, "step": 4099 }, { "epoch": 0.73005698005698, "grad_norm": 0.5167670249938965, "learning_rate": 0.00018413085655978343, "loss": 1.0015, "step": 4100 }, { "epoch": 0.7302350427350427, "grad_norm": 0.5930629372596741, "learning_rate": 0.00018412328932960263, "loss": 0.9766, "step": 4101 }, { "epoch": 0.7304131054131054, "grad_norm": 0.5234778523445129, "learning_rate": 0.00018411572045120073, "loss": 1.0317, "step": 4102 }, { "epoch": 0.7305911680911681, "grad_norm": 0.5361374020576477, "learning_rate": 0.000184108149924726, "loss": 1.1228, "step": 4103 }, { "epoch": 0.7307692307692307, "grad_norm": 0.5845770239830017, "learning_rate": 0.0001841005777503268, "loss": 0.9541, "step": 4104 }, { "epoch": 0.7309472934472935, "grad_norm": 0.49320483207702637, "learning_rate": 0.0001840930039281515, "loss": 0.9445, "step": 4105 }, { "epoch": 0.7311253561253561, "grad_norm": 0.5391250252723694, "learning_rate": 0.00018408542845834845, "loss": 1.1983, "step": 4106 }, { "epoch": 0.7313034188034188, "grad_norm": 0.4890393316745758, "learning_rate": 0.00018407785134106613, "loss": 0.8353, "step": 4107 }, { "epoch": 0.7314814814814815, "grad_norm": 0.5839747190475464, "learning_rate": 0.00018407027257645296, "loss": 1.4074, "step": 4108 }, { "epoch": 0.7316595441595442, "grad_norm": 0.5957708358764648, "learning_rate": 0.0001840626921646574, "loss": 1.1032, "step": 4109 }, { "epoch": 0.7318376068376068, "grad_norm": 0.5029017925262451, "learning_rate": 0.00018405511010582805, "loss": 1.095, "step": 4110 }, { "epoch": 0.7320156695156695, "grad_norm": 0.6054347157478333, "learning_rate": 0.00018404752640011345, "loss": 1.0366, "step": 4111 }, { "epoch": 0.7321937321937322, "grad_norm": 0.5476830005645752, "learning_rate": 0.00018403994104766212, "loss": 1.0976, "step": 4112 }, { "epoch": 0.7323717948717948, "grad_norm": 0.5000962615013123, "learning_rate": 0.00018403235404862277, "loss": 1.0809, "step": 4113 }, { "epoch": 0.7325498575498576, "grad_norm": 0.5119251012802124, "learning_rate": 0.00018402476540314394, "loss": 1.0176, "step": 4114 }, { "epoch": 0.7327279202279202, "grad_norm": 0.5825830698013306, "learning_rate": 0.00018401717511137445, "loss": 1.2357, "step": 4115 }, { "epoch": 0.7329059829059829, "grad_norm": 0.5702941417694092, "learning_rate": 0.0001840095831734629, "loss": 1.1549, "step": 4116 }, { "epoch": 0.7330840455840456, "grad_norm": 0.5660699605941772, "learning_rate": 0.00018400198958955807, "loss": 1.1778, "step": 4117 }, { "epoch": 0.7332621082621082, "grad_norm": 0.5241161584854126, "learning_rate": 0.0001839943943598088, "loss": 0.8587, "step": 4118 }, { "epoch": 0.7334401709401709, "grad_norm": 0.581194281578064, "learning_rate": 0.0001839867974843638, "loss": 1.2169, "step": 4119 }, { "epoch": 0.7336182336182336, "grad_norm": 0.4342379570007324, "learning_rate": 0.00018397919896337198, "loss": 0.9182, "step": 4120 }, { "epoch": 0.7337962962962963, "grad_norm": 0.5708567500114441, "learning_rate": 0.00018397159879698224, "loss": 1.1781, "step": 4121 }, { "epoch": 0.7339743589743589, "grad_norm": 0.5827265977859497, "learning_rate": 0.00018396399698534344, "loss": 1.2905, "step": 4122 }, { "epoch": 0.7341524216524217, "grad_norm": 0.5274056792259216, "learning_rate": 0.00018395639352860457, "loss": 1.1786, "step": 4123 }, { "epoch": 0.7343304843304843, "grad_norm": 0.5094266533851624, "learning_rate": 0.00018394878842691452, "loss": 1.2016, "step": 4124 }, { "epoch": 0.7345085470085471, "grad_norm": 0.48779475688934326, "learning_rate": 0.0001839411816804224, "loss": 1.0562, "step": 4125 }, { "epoch": 0.7346866096866097, "grad_norm": 0.5805709958076477, "learning_rate": 0.00018393357328927716, "loss": 1.1705, "step": 4126 }, { "epoch": 0.7348646723646723, "grad_norm": 0.4910700023174286, "learning_rate": 0.00018392596325362791, "loss": 1.0682, "step": 4127 }, { "epoch": 0.7350427350427351, "grad_norm": 0.5297428369522095, "learning_rate": 0.0001839183515736238, "loss": 0.9505, "step": 4128 }, { "epoch": 0.7352207977207977, "grad_norm": 0.45442086458206177, "learning_rate": 0.00018391073824941385, "loss": 0.9548, "step": 4129 }, { "epoch": 0.7353988603988604, "grad_norm": 0.49299946427345276, "learning_rate": 0.00018390312328114733, "loss": 1.0868, "step": 4130 }, { "epoch": 0.7355769230769231, "grad_norm": 0.4839940369129181, "learning_rate": 0.0001838955066689734, "loss": 0.9565, "step": 4131 }, { "epoch": 0.7357549857549858, "grad_norm": 0.48600608110427856, "learning_rate": 0.00018388788841304128, "loss": 1.2353, "step": 4132 }, { "epoch": 0.7359330484330484, "grad_norm": 0.4893583357334137, "learning_rate": 0.0001838802685135003, "loss": 0.9595, "step": 4133 }, { "epoch": 0.7361111111111112, "grad_norm": 0.4587398171424866, "learning_rate": 0.00018387264697049963, "loss": 1.1222, "step": 4134 }, { "epoch": 0.7362891737891738, "grad_norm": 0.5361055731773376, "learning_rate": 0.00018386502378418872, "loss": 1.3304, "step": 4135 }, { "epoch": 0.7364672364672364, "grad_norm": 0.5556629300117493, "learning_rate": 0.00018385739895471686, "loss": 1.0358, "step": 4136 }, { "epoch": 0.7366452991452992, "grad_norm": 0.45555856823921204, "learning_rate": 0.00018384977248223346, "loss": 1.0081, "step": 4137 }, { "epoch": 0.7368233618233618, "grad_norm": 0.5606052875518799, "learning_rate": 0.00018384214436688797, "loss": 0.9367, "step": 4138 }, { "epoch": 0.7370014245014245, "grad_norm": 0.5428356528282166, "learning_rate": 0.00018383451460882982, "loss": 1.1391, "step": 4139 }, { "epoch": 0.7371794871794872, "grad_norm": 0.4891330897808075, "learning_rate": 0.00018382688320820853, "loss": 0.9805, "step": 4140 }, { "epoch": 0.7373575498575499, "grad_norm": 0.5407996773719788, "learning_rate": 0.0001838192501651736, "loss": 1.0532, "step": 4141 }, { "epoch": 0.7375356125356125, "grad_norm": 0.5241971611976624, "learning_rate": 0.00018381161547987454, "loss": 0.9509, "step": 4142 }, { "epoch": 0.7377136752136753, "grad_norm": 0.5370210409164429, "learning_rate": 0.000183803979152461, "loss": 1.2342, "step": 4143 }, { "epoch": 0.7378917378917379, "grad_norm": 0.5470060706138611, "learning_rate": 0.00018379634118308259, "loss": 0.9621, "step": 4144 }, { "epoch": 0.7380698005698005, "grad_norm": 0.546313464641571, "learning_rate": 0.00018378870157188893, "loss": 1.1253, "step": 4145 }, { "epoch": 0.7382478632478633, "grad_norm": 0.502027153968811, "learning_rate": 0.00018378106031902974, "loss": 1.1919, "step": 4146 }, { "epoch": 0.7384259259259259, "grad_norm": 0.5282283425331116, "learning_rate": 0.0001837734174246547, "loss": 1.0088, "step": 4147 }, { "epoch": 0.7386039886039886, "grad_norm": 0.5152897238731384, "learning_rate": 0.00018376577288891355, "loss": 1.0813, "step": 4148 }, { "epoch": 0.7387820512820513, "grad_norm": 0.5002804398536682, "learning_rate": 0.0001837581267119561, "loss": 0.9797, "step": 4149 }, { "epoch": 0.738960113960114, "grad_norm": 0.5698176026344299, "learning_rate": 0.00018375047889393215, "loss": 1.1099, "step": 4150 }, { "epoch": 0.7391381766381766, "grad_norm": 0.5384604930877686, "learning_rate": 0.00018374282943499156, "loss": 1.1944, "step": 4151 }, { "epoch": 0.7393162393162394, "grad_norm": 0.5483044385910034, "learning_rate": 0.00018373517833528418, "loss": 1.1734, "step": 4152 }, { "epoch": 0.739494301994302, "grad_norm": 0.4824066162109375, "learning_rate": 0.0001837275255949599, "loss": 0.9515, "step": 4153 }, { "epoch": 0.7396723646723646, "grad_norm": 0.45413634181022644, "learning_rate": 0.00018371987121416873, "loss": 0.7534, "step": 4154 }, { "epoch": 0.7398504273504274, "grad_norm": 0.5874246954917908, "learning_rate": 0.00018371221519306055, "loss": 0.9464, "step": 4155 }, { "epoch": 0.74002849002849, "grad_norm": 0.5219913125038147, "learning_rate": 0.00018370455753178544, "loss": 1.0494, "step": 4156 }, { "epoch": 0.7402065527065527, "grad_norm": 0.5937709212303162, "learning_rate": 0.00018369689823049341, "loss": 1.0529, "step": 4157 }, { "epoch": 0.7403846153846154, "grad_norm": 0.5204295516014099, "learning_rate": 0.00018368923728933449, "loss": 1.0602, "step": 4158 }, { "epoch": 0.7405626780626781, "grad_norm": 0.5422890186309814, "learning_rate": 0.00018368157470845885, "loss": 0.9261, "step": 4159 }, { "epoch": 0.7407407407407407, "grad_norm": 0.6163852214813232, "learning_rate": 0.00018367391048801655, "loss": 1.2771, "step": 4160 }, { "epoch": 0.7409188034188035, "grad_norm": 0.5070751309394836, "learning_rate": 0.00018366624462815785, "loss": 1.0401, "step": 4161 }, { "epoch": 0.7410968660968661, "grad_norm": 0.4477100968360901, "learning_rate": 0.00018365857712903283, "loss": 1.1463, "step": 4162 }, { "epoch": 0.7412749287749287, "grad_norm": 0.5421462655067444, "learning_rate": 0.0001836509079907918, "loss": 0.9373, "step": 4163 }, { "epoch": 0.7414529914529915, "grad_norm": 0.6162141561508179, "learning_rate": 0.000183643237213585, "loss": 1.1827, "step": 4164 }, { "epoch": 0.7416310541310541, "grad_norm": 0.5653836131095886, "learning_rate": 0.00018363556479756272, "loss": 1.0689, "step": 4165 }, { "epoch": 0.7418091168091168, "grad_norm": 0.57053542137146, "learning_rate": 0.00018362789074287527, "loss": 1.0289, "step": 4166 }, { "epoch": 0.7419871794871795, "grad_norm": 0.5603055953979492, "learning_rate": 0.00018362021504967304, "loss": 1.1926, "step": 4167 }, { "epoch": 0.7421652421652422, "grad_norm": 0.5460166335105896, "learning_rate": 0.0001836125377181064, "loss": 1.1488, "step": 4168 }, { "epoch": 0.7423433048433048, "grad_norm": 0.5097107887268066, "learning_rate": 0.00018360485874832579, "loss": 1.0781, "step": 4169 }, { "epoch": 0.7425213675213675, "grad_norm": 0.6280624270439148, "learning_rate": 0.00018359717814048164, "loss": 1.3625, "step": 4170 }, { "epoch": 0.7426994301994302, "grad_norm": 0.4528210759162903, "learning_rate": 0.0001835894958947244, "loss": 0.8417, "step": 4171 }, { "epoch": 0.7428774928774928, "grad_norm": 0.48735132813453674, "learning_rate": 0.00018358181201120468, "loss": 0.9544, "step": 4172 }, { "epoch": 0.7430555555555556, "grad_norm": 0.48388174176216125, "learning_rate": 0.00018357412649007296, "loss": 1.0663, "step": 4173 }, { "epoch": 0.7432336182336182, "grad_norm": 0.5435357689857483, "learning_rate": 0.00018356643933147986, "loss": 1.2074, "step": 4174 }, { "epoch": 0.7434116809116809, "grad_norm": 0.49890074133872986, "learning_rate": 0.00018355875053557594, "loss": 1.1322, "step": 4175 }, { "epoch": 0.7435897435897436, "grad_norm": 0.5680708885192871, "learning_rate": 0.0001835510601025119, "loss": 1.1964, "step": 4176 }, { "epoch": 0.7437678062678063, "grad_norm": 0.5002360939979553, "learning_rate": 0.00018354336803243842, "loss": 1.1396, "step": 4177 }, { "epoch": 0.7439458689458689, "grad_norm": 0.5202965140342712, "learning_rate": 0.00018353567432550616, "loss": 1.1498, "step": 4178 }, { "epoch": 0.7441239316239316, "grad_norm": 0.514492928981781, "learning_rate": 0.00018352797898186588, "loss": 1.0959, "step": 4179 }, { "epoch": 0.7443019943019943, "grad_norm": 0.6395383477210999, "learning_rate": 0.0001835202820016684, "loss": 1.2867, "step": 4180 }, { "epoch": 0.7444800569800569, "grad_norm": 0.5489062070846558, "learning_rate": 0.00018351258338506447, "loss": 1.1638, "step": 4181 }, { "epoch": 0.7446581196581197, "grad_norm": 0.5705671906471252, "learning_rate": 0.00018350488313220498, "loss": 0.9493, "step": 4182 }, { "epoch": 0.7448361823361823, "grad_norm": 0.5404297709465027, "learning_rate": 0.00018349718124324076, "loss": 0.9876, "step": 4183 }, { "epoch": 0.7450142450142451, "grad_norm": 0.5841003060340881, "learning_rate": 0.0001834894777183227, "loss": 1.1225, "step": 4184 }, { "epoch": 0.7451923076923077, "grad_norm": 0.49774688482284546, "learning_rate": 0.00018348177255760178, "loss": 1.1442, "step": 4185 }, { "epoch": 0.7453703703703703, "grad_norm": 0.5212422609329224, "learning_rate": 0.00018347406576122894, "loss": 1.101, "step": 4186 }, { "epoch": 0.7455484330484331, "grad_norm": 0.615024983882904, "learning_rate": 0.00018346635732935517, "loss": 1.4188, "step": 4187 }, { "epoch": 0.7457264957264957, "grad_norm": 0.46818843483924866, "learning_rate": 0.00018345864726213154, "loss": 1.0071, "step": 4188 }, { "epoch": 0.7459045584045584, "grad_norm": 0.4921121895313263, "learning_rate": 0.00018345093555970906, "loss": 1.015, "step": 4189 }, { "epoch": 0.7460826210826211, "grad_norm": 0.5042136311531067, "learning_rate": 0.00018344322222223889, "loss": 0.9974, "step": 4190 }, { "epoch": 0.7462606837606838, "grad_norm": 0.5872490406036377, "learning_rate": 0.0001834355072498721, "loss": 1.3166, "step": 4191 }, { "epoch": 0.7464387464387464, "grad_norm": 0.559117317199707, "learning_rate": 0.00018342779064275984, "loss": 1.2227, "step": 4192 }, { "epoch": 0.7466168091168092, "grad_norm": 0.5269635319709778, "learning_rate": 0.00018342007240105336, "loss": 1.0281, "step": 4193 }, { "epoch": 0.7467948717948718, "grad_norm": 0.4608335793018341, "learning_rate": 0.00018341235252490387, "loss": 0.98, "step": 4194 }, { "epoch": 0.7469729344729344, "grad_norm": 0.5818259119987488, "learning_rate": 0.00018340463101446255, "loss": 1.1544, "step": 4195 }, { "epoch": 0.7471509971509972, "grad_norm": 0.5577529668807983, "learning_rate": 0.00018339690786988079, "loss": 1.3059, "step": 4196 }, { "epoch": 0.7473290598290598, "grad_norm": 0.5430468320846558, "learning_rate": 0.00018338918309130983, "loss": 1.2766, "step": 4197 }, { "epoch": 0.7475071225071225, "grad_norm": 0.4941701591014862, "learning_rate": 0.0001833814566789011, "loss": 1.193, "step": 4198 }, { "epoch": 0.7476851851851852, "grad_norm": 0.5471884608268738, "learning_rate": 0.00018337372863280589, "loss": 1.2261, "step": 4199 }, { "epoch": 0.7478632478632479, "grad_norm": 0.4641438126564026, "learning_rate": 0.0001833659989531757, "loss": 0.7953, "step": 4200 }, { "epoch": 0.7480413105413105, "grad_norm": 0.5244714617729187, "learning_rate": 0.0001833582676401619, "loss": 0.9344, "step": 4201 }, { "epoch": 0.7482193732193733, "grad_norm": 0.5964360237121582, "learning_rate": 0.00018335053469391603, "loss": 1.2072, "step": 4202 }, { "epoch": 0.7483974358974359, "grad_norm": 0.4929158091545105, "learning_rate": 0.00018334280011458954, "loss": 1.2183, "step": 4203 }, { "epoch": 0.7485754985754985, "grad_norm": 0.46221864223480225, "learning_rate": 0.00018333506390233405, "loss": 1.1957, "step": 4204 }, { "epoch": 0.7487535612535613, "grad_norm": 0.6301732659339905, "learning_rate": 0.0001833273260573011, "loss": 1.0582, "step": 4205 }, { "epoch": 0.7489316239316239, "grad_norm": 0.5606021881103516, "learning_rate": 0.0001833195865796423, "loss": 1.4034, "step": 4206 }, { "epoch": 0.7491096866096866, "grad_norm": 0.44856077432632446, "learning_rate": 0.00018331184546950926, "loss": 0.8421, "step": 4207 }, { "epoch": 0.7492877492877493, "grad_norm": 0.5487226247787476, "learning_rate": 0.00018330410272705366, "loss": 1.238, "step": 4208 }, { "epoch": 0.749465811965812, "grad_norm": 0.6043636798858643, "learning_rate": 0.00018329635835242724, "loss": 1.1215, "step": 4209 }, { "epoch": 0.7496438746438746, "grad_norm": 0.5145319104194641, "learning_rate": 0.00018328861234578173, "loss": 1.1002, "step": 4210 }, { "epoch": 0.7498219373219374, "grad_norm": 0.5667078495025635, "learning_rate": 0.00018328086470726884, "loss": 1.2994, "step": 4211 }, { "epoch": 0.75, "grad_norm": 0.5117634534835815, "learning_rate": 0.00018327311543704043, "loss": 0.9448, "step": 4212 }, { "epoch": 0.75, "eval_loss": 1.0982474088668823, "eval_runtime": 24.6617, "eval_samples_per_second": 42.211, "eval_steps_per_second": 21.126, "step": 4212 }, { "epoch": 0.7501780626780626, "grad_norm": 0.5451585054397583, "learning_rate": 0.00018326536453524826, "loss": 0.9023, "step": 4213 }, { "epoch": 0.7503561253561254, "grad_norm": 0.6585208773612976, "learning_rate": 0.0001832576120020443, "loss": 1.2798, "step": 4214 }, { "epoch": 0.750534188034188, "grad_norm": 0.6444812417030334, "learning_rate": 0.00018324985783758037, "loss": 1.3999, "step": 4215 }, { "epoch": 0.7507122507122507, "grad_norm": 0.6178330779075623, "learning_rate": 0.0001832421020420084, "loss": 1.1846, "step": 4216 }, { "epoch": 0.7508903133903134, "grad_norm": 0.509969174861908, "learning_rate": 0.00018323434461548036, "loss": 1.1831, "step": 4217 }, { "epoch": 0.7510683760683761, "grad_norm": 0.5558911561965942, "learning_rate": 0.00018322658555814826, "loss": 1.1599, "step": 4218 }, { "epoch": 0.7512464387464387, "grad_norm": 0.5714917778968811, "learning_rate": 0.0001832188248701641, "loss": 0.9702, "step": 4219 }, { "epoch": 0.7514245014245015, "grad_norm": 0.6136442422866821, "learning_rate": 0.00018321106255167995, "loss": 0.9376, "step": 4220 }, { "epoch": 0.7516025641025641, "grad_norm": 0.5832077264785767, "learning_rate": 0.00018320329860284785, "loss": 1.2564, "step": 4221 }, { "epoch": 0.7517806267806267, "grad_norm": 0.45330923795700073, "learning_rate": 0.00018319553302381997, "loss": 0.9321, "step": 4222 }, { "epoch": 0.7519586894586895, "grad_norm": 0.5278468132019043, "learning_rate": 0.00018318776581474847, "loss": 1.1334, "step": 4223 }, { "epoch": 0.7521367521367521, "grad_norm": 0.49267473816871643, "learning_rate": 0.00018317999697578549, "loss": 1.1577, "step": 4224 }, { "epoch": 0.7523148148148148, "grad_norm": 0.5372124314308167, "learning_rate": 0.00018317222650708325, "loss": 1.037, "step": 4225 }, { "epoch": 0.7524928774928775, "grad_norm": 0.5879829525947571, "learning_rate": 0.000183164454408794, "loss": 1.1312, "step": 4226 }, { "epoch": 0.7526709401709402, "grad_norm": 0.5363932251930237, "learning_rate": 0.00018315668068107004, "loss": 1.174, "step": 4227 }, { "epoch": 0.7528490028490028, "grad_norm": 0.5585991740226746, "learning_rate": 0.00018314890532406366, "loss": 1.2106, "step": 4228 }, { "epoch": 0.7530270655270656, "grad_norm": 0.49395787715911865, "learning_rate": 0.0001831411283379272, "loss": 1.1163, "step": 4229 }, { "epoch": 0.7532051282051282, "grad_norm": 0.5081066489219666, "learning_rate": 0.00018313334972281306, "loss": 1.184, "step": 4230 }, { "epoch": 0.7533831908831908, "grad_norm": 0.40304034948349, "learning_rate": 0.0001831255694788736, "loss": 0.7548, "step": 4231 }, { "epoch": 0.7535612535612536, "grad_norm": 0.4999815821647644, "learning_rate": 0.0001831177876062613, "loss": 1.0092, "step": 4232 }, { "epoch": 0.7537393162393162, "grad_norm": 0.48917025327682495, "learning_rate": 0.00018311000410512862, "loss": 1.0354, "step": 4233 }, { "epoch": 0.7539173789173789, "grad_norm": 0.475606769323349, "learning_rate": 0.00018310221897562806, "loss": 0.8728, "step": 4234 }, { "epoch": 0.7540954415954416, "grad_norm": 0.630439817905426, "learning_rate": 0.00018309443221791214, "loss": 1.1436, "step": 4235 }, { "epoch": 0.7542735042735043, "grad_norm": 0.524740993976593, "learning_rate": 0.00018308664383213344, "loss": 1.0487, "step": 4236 }, { "epoch": 0.7544515669515669, "grad_norm": 0.4734523892402649, "learning_rate": 0.0001830788538184445, "loss": 1.0681, "step": 4237 }, { "epoch": 0.7546296296296297, "grad_norm": 0.5767266750335693, "learning_rate": 0.00018307106217699807, "loss": 1.0599, "step": 4238 }, { "epoch": 0.7548076923076923, "grad_norm": 0.6276642084121704, "learning_rate": 0.0001830632689079467, "loss": 1.2837, "step": 4239 }, { "epoch": 0.7549857549857549, "grad_norm": 0.5539988279342651, "learning_rate": 0.00018305547401144316, "loss": 0.9072, "step": 4240 }, { "epoch": 0.7551638176638177, "grad_norm": 0.4551292061805725, "learning_rate": 0.00018304767748764014, "loss": 1.0204, "step": 4241 }, { "epoch": 0.7553418803418803, "grad_norm": 0.47344550490379333, "learning_rate": 0.00018303987933669034, "loss": 1.0473, "step": 4242 }, { "epoch": 0.7555199430199431, "grad_norm": 0.6050213575363159, "learning_rate": 0.00018303207955874665, "loss": 1.1552, "step": 4243 }, { "epoch": 0.7556980056980057, "grad_norm": 0.48943889141082764, "learning_rate": 0.00018302427815396186, "loss": 1.0002, "step": 4244 }, { "epoch": 0.7558760683760684, "grad_norm": 0.5664682984352112, "learning_rate": 0.00018301647512248878, "loss": 1.1865, "step": 4245 }, { "epoch": 0.7560541310541311, "grad_norm": 0.5702242255210876, "learning_rate": 0.00018300867046448034, "loss": 1.3029, "step": 4246 }, { "epoch": 0.7562321937321937, "grad_norm": 0.593207836151123, "learning_rate": 0.00018300086418008942, "loss": 1.109, "step": 4247 }, { "epoch": 0.7564102564102564, "grad_norm": 0.5887887477874756, "learning_rate": 0.000182993056269469, "loss": 1.3022, "step": 4248 }, { "epoch": 0.7565883190883191, "grad_norm": 0.5277966260910034, "learning_rate": 0.00018298524673277203, "loss": 1.1738, "step": 4249 }, { "epoch": 0.7567663817663818, "grad_norm": 0.589347779750824, "learning_rate": 0.00018297743557015155, "loss": 1.0185, "step": 4250 }, { "epoch": 0.7569444444444444, "grad_norm": 0.49920859932899475, "learning_rate": 0.0001829696227817606, "loss": 1.118, "step": 4251 }, { "epoch": 0.7571225071225072, "grad_norm": 0.502565324306488, "learning_rate": 0.0001829618083677522, "loss": 1.1856, "step": 4252 }, { "epoch": 0.7573005698005698, "grad_norm": 0.49814435839653015, "learning_rate": 0.00018295399232827955, "loss": 1.0432, "step": 4253 }, { "epoch": 0.7574786324786325, "grad_norm": 0.5087502598762512, "learning_rate": 0.00018294617466349574, "loss": 1.2325, "step": 4254 }, { "epoch": 0.7576566951566952, "grad_norm": 0.5107288956642151, "learning_rate": 0.00018293835537355394, "loss": 1.0487, "step": 4255 }, { "epoch": 0.7578347578347578, "grad_norm": 0.524725615978241, "learning_rate": 0.00018293053445860732, "loss": 1.1821, "step": 4256 }, { "epoch": 0.7580128205128205, "grad_norm": 0.5234082937240601, "learning_rate": 0.0001829227119188092, "loss": 0.8896, "step": 4257 }, { "epoch": 0.7581908831908832, "grad_norm": 0.5102918744087219, "learning_rate": 0.00018291488775431275, "loss": 1.0246, "step": 4258 }, { "epoch": 0.7583689458689459, "grad_norm": 0.5552714467048645, "learning_rate": 0.00018290706196527135, "loss": 1.0193, "step": 4259 }, { "epoch": 0.7585470085470085, "grad_norm": 0.5395022630691528, "learning_rate": 0.00018289923455183825, "loss": 1.3203, "step": 4260 }, { "epoch": 0.7587250712250713, "grad_norm": 0.7474865913391113, "learning_rate": 0.00018289140551416692, "loss": 1.182, "step": 4261 }, { "epoch": 0.7589031339031339, "grad_norm": 0.4892016649246216, "learning_rate": 0.00018288357485241066, "loss": 0.968, "step": 4262 }, { "epoch": 0.7590811965811965, "grad_norm": 0.4627816081047058, "learning_rate": 0.00018287574256672291, "loss": 0.6895, "step": 4263 }, { "epoch": 0.7592592592592593, "grad_norm": 0.6221280097961426, "learning_rate": 0.00018286790865725715, "loss": 0.9691, "step": 4264 }, { "epoch": 0.7594373219373219, "grad_norm": 0.5542295575141907, "learning_rate": 0.0001828600731241669, "loss": 0.9996, "step": 4265 }, { "epoch": 0.7596153846153846, "grad_norm": 0.5570770502090454, "learning_rate": 0.00018285223596760562, "loss": 1.1996, "step": 4266 }, { "epoch": 0.7597934472934473, "grad_norm": 0.5495262742042542, "learning_rate": 0.00018284439718772687, "loss": 1.1572, "step": 4267 }, { "epoch": 0.75997150997151, "grad_norm": 0.5006741881370544, "learning_rate": 0.00018283655678468427, "loss": 1.1215, "step": 4268 }, { "epoch": 0.7601495726495726, "grad_norm": 0.4682157635688782, "learning_rate": 0.00018282871475863144, "loss": 1.0547, "step": 4269 }, { "epoch": 0.7603276353276354, "grad_norm": 0.6275840997695923, "learning_rate": 0.00018282087110972197, "loss": 1.3855, "step": 4270 }, { "epoch": 0.760505698005698, "grad_norm": 0.5341474413871765, "learning_rate": 0.0001828130258381096, "loss": 1.2024, "step": 4271 }, { "epoch": 0.7606837606837606, "grad_norm": 0.4330833852291107, "learning_rate": 0.000182805178943948, "loss": 1.0508, "step": 4272 }, { "epoch": 0.7608618233618234, "grad_norm": 0.6276537179946899, "learning_rate": 0.00018279733042739094, "loss": 1.1635, "step": 4273 }, { "epoch": 0.761039886039886, "grad_norm": 0.5370199084281921, "learning_rate": 0.00018278948028859217, "loss": 1.0579, "step": 4274 }, { "epoch": 0.7612179487179487, "grad_norm": 0.524959921836853, "learning_rate": 0.00018278162852770552, "loss": 1.0972, "step": 4275 }, { "epoch": 0.7613960113960114, "grad_norm": 0.5029389262199402, "learning_rate": 0.00018277377514488486, "loss": 0.959, "step": 4276 }, { "epoch": 0.7615740740740741, "grad_norm": 0.49772894382476807, "learning_rate": 0.00018276592014028397, "loss": 1.2773, "step": 4277 }, { "epoch": 0.7617521367521367, "grad_norm": 0.5195719003677368, "learning_rate": 0.00018275806351405685, "loss": 1.0676, "step": 4278 }, { "epoch": 0.7619301994301995, "grad_norm": 0.5167942643165588, "learning_rate": 0.00018275020526635735, "loss": 1.0615, "step": 4279 }, { "epoch": 0.7621082621082621, "grad_norm": 0.4958035945892334, "learning_rate": 0.0001827423453973395, "loss": 0.9605, "step": 4280 }, { "epoch": 0.7622863247863247, "grad_norm": 0.6256808042526245, "learning_rate": 0.00018273448390715728, "loss": 1.2526, "step": 4281 }, { "epoch": 0.7624643874643875, "grad_norm": 0.5062580108642578, "learning_rate": 0.0001827266207959647, "loss": 1.0604, "step": 4282 }, { "epoch": 0.7626424501424501, "grad_norm": 0.5080778002738953, "learning_rate": 0.00018271875606391583, "loss": 1.1246, "step": 4283 }, { "epoch": 0.7628205128205128, "grad_norm": 0.5069389939308167, "learning_rate": 0.00018271088971116479, "loss": 1.3158, "step": 4284 }, { "epoch": 0.7629985754985755, "grad_norm": 0.7280121445655823, "learning_rate": 0.00018270302173786567, "loss": 1.2066, "step": 4285 }, { "epoch": 0.7631766381766382, "grad_norm": 0.6523470282554626, "learning_rate": 0.00018269515214417267, "loss": 1.3236, "step": 4286 }, { "epoch": 0.7633547008547008, "grad_norm": 0.5799322724342346, "learning_rate": 0.00018268728093023988, "loss": 0.9786, "step": 4287 }, { "epoch": 0.7635327635327636, "grad_norm": 0.46675166487693787, "learning_rate": 0.00018267940809622163, "loss": 0.8131, "step": 4288 }, { "epoch": 0.7637108262108262, "grad_norm": 0.5566182732582092, "learning_rate": 0.00018267153364227214, "loss": 1.0565, "step": 4289 }, { "epoch": 0.7638888888888888, "grad_norm": 0.532028079032898, "learning_rate": 0.00018266365756854566, "loss": 0.952, "step": 4290 }, { "epoch": 0.7640669515669516, "grad_norm": 0.5082666873931885, "learning_rate": 0.00018265577987519653, "loss": 1.0704, "step": 4291 }, { "epoch": 0.7642450142450142, "grad_norm": 0.5223562717437744, "learning_rate": 0.00018264790056237912, "loss": 1.1161, "step": 4292 }, { "epoch": 0.7644230769230769, "grad_norm": 0.48472318053245544, "learning_rate": 0.00018264001963024778, "loss": 0.8784, "step": 4293 }, { "epoch": 0.7646011396011396, "grad_norm": 0.5901281833648682, "learning_rate": 0.0001826321370789569, "loss": 1.1031, "step": 4294 }, { "epoch": 0.7647792022792023, "grad_norm": 0.570350706577301, "learning_rate": 0.000182624252908661, "loss": 0.9047, "step": 4295 }, { "epoch": 0.7649572649572649, "grad_norm": 0.568373441696167, "learning_rate": 0.00018261636711951445, "loss": 1.0106, "step": 4296 }, { "epoch": 0.7651353276353277, "grad_norm": 0.6175880432128906, "learning_rate": 0.00018260847971167182, "loss": 1.3531, "step": 4297 }, { "epoch": 0.7653133903133903, "grad_norm": 0.5682594776153564, "learning_rate": 0.00018260059068528762, "loss": 1.1261, "step": 4298 }, { "epoch": 0.7654914529914529, "grad_norm": 0.5050225257873535, "learning_rate": 0.00018259270004051644, "loss": 1.0921, "step": 4299 }, { "epoch": 0.7656695156695157, "grad_norm": 0.5416565537452698, "learning_rate": 0.0001825848077775129, "loss": 1.0881, "step": 4300 }, { "epoch": 0.7658475783475783, "grad_norm": 0.5418867468833923, "learning_rate": 0.0001825769138964316, "loss": 1.2069, "step": 4301 }, { "epoch": 0.7660256410256411, "grad_norm": 0.5447866320610046, "learning_rate": 0.00018256901839742718, "loss": 1.1827, "step": 4302 }, { "epoch": 0.7662037037037037, "grad_norm": 0.5482802987098694, "learning_rate": 0.00018256112128065439, "loss": 1.0492, "step": 4303 }, { "epoch": 0.7663817663817664, "grad_norm": 0.5059601664543152, "learning_rate": 0.0001825532225462679, "loss": 1.0996, "step": 4304 }, { "epoch": 0.7665598290598291, "grad_norm": 0.5153701901435852, "learning_rate": 0.00018254532219442258, "loss": 1.3237, "step": 4305 }, { "epoch": 0.7667378917378918, "grad_norm": 0.5370768904685974, "learning_rate": 0.0001825374202252731, "loss": 0.9925, "step": 4306 }, { "epoch": 0.7669159544159544, "grad_norm": 0.4516580402851105, "learning_rate": 0.00018252951663897432, "loss": 1.0749, "step": 4307 }, { "epoch": 0.7670940170940171, "grad_norm": 0.5565171837806702, "learning_rate": 0.0001825216114356811, "loss": 1.1617, "step": 4308 }, { "epoch": 0.7672720797720798, "grad_norm": 0.5212662220001221, "learning_rate": 0.00018251370461554834, "loss": 1.1108, "step": 4309 }, { "epoch": 0.7674501424501424, "grad_norm": 0.49061715602874756, "learning_rate": 0.00018250579617873095, "loss": 1.0881, "step": 4310 }, { "epoch": 0.7676282051282052, "grad_norm": 0.5535751581192017, "learning_rate": 0.00018249788612538387, "loss": 0.9341, "step": 4311 }, { "epoch": 0.7678062678062678, "grad_norm": 0.5425209403038025, "learning_rate": 0.00018248997445566208, "loss": 1.1858, "step": 4312 }, { "epoch": 0.7679843304843305, "grad_norm": 0.6224395036697388, "learning_rate": 0.0001824820611697206, "loss": 1.0836, "step": 4313 }, { "epoch": 0.7681623931623932, "grad_norm": 0.4895690977573395, "learning_rate": 0.00018247414626771445, "loss": 0.8598, "step": 4314 }, { "epoch": 0.7683404558404558, "grad_norm": 0.5279615521430969, "learning_rate": 0.00018246622974979877, "loss": 1.1742, "step": 4315 }, { "epoch": 0.7685185185185185, "grad_norm": 0.45300471782684326, "learning_rate": 0.0001824583116161286, "loss": 0.8872, "step": 4316 }, { "epoch": 0.7686965811965812, "grad_norm": 0.6499692797660828, "learning_rate": 0.00018245039186685916, "loss": 1.2495, "step": 4317 }, { "epoch": 0.7688746438746439, "grad_norm": 0.48151278495788574, "learning_rate": 0.00018244247050214552, "loss": 1.2382, "step": 4318 }, { "epoch": 0.7690527065527065, "grad_norm": 0.6597028374671936, "learning_rate": 0.0001824345475221429, "loss": 1.3453, "step": 4319 }, { "epoch": 0.7692307692307693, "grad_norm": 0.4536992609500885, "learning_rate": 0.0001824266229270066, "loss": 1.1141, "step": 4320 }, { "epoch": 0.7694088319088319, "grad_norm": 0.5489405393600464, "learning_rate": 0.00018241869671689184, "loss": 1.0333, "step": 4321 }, { "epoch": 0.7695868945868946, "grad_norm": 0.5741586089134216, "learning_rate": 0.00018241076889195394, "loss": 0.9939, "step": 4322 }, { "epoch": 0.7697649572649573, "grad_norm": 0.47170960903167725, "learning_rate": 0.00018240283945234823, "loss": 0.9878, "step": 4323 }, { "epoch": 0.76994301994302, "grad_norm": 0.4729093313217163, "learning_rate": 0.00018239490839823004, "loss": 1.0087, "step": 4324 }, { "epoch": 0.7701210826210826, "grad_norm": 0.49869823455810547, "learning_rate": 0.0001823869757297548, "loss": 1.169, "step": 4325 }, { "epoch": 0.7702991452991453, "grad_norm": 0.5118468403816223, "learning_rate": 0.0001823790414470779, "loss": 1.1092, "step": 4326 }, { "epoch": 0.770477207977208, "grad_norm": 0.5076048970222473, "learning_rate": 0.0001823711055503548, "loss": 1.1028, "step": 4327 }, { "epoch": 0.7706552706552706, "grad_norm": 0.5661569237709045, "learning_rate": 0.00018236316803974098, "loss": 1.1114, "step": 4328 }, { "epoch": 0.7708333333333334, "grad_norm": 0.5542354583740234, "learning_rate": 0.000182355228915392, "loss": 1.0931, "step": 4329 }, { "epoch": 0.771011396011396, "grad_norm": 0.5476680994033813, "learning_rate": 0.0001823472881774634, "loss": 1.036, "step": 4330 }, { "epoch": 0.7711894586894587, "grad_norm": 0.5449798703193665, "learning_rate": 0.00018233934582611073, "loss": 1.0682, "step": 4331 }, { "epoch": 0.7713675213675214, "grad_norm": 0.61089026927948, "learning_rate": 0.00018233140186148963, "loss": 1.0748, "step": 4332 }, { "epoch": 0.771545584045584, "grad_norm": 0.5015206336975098, "learning_rate": 0.00018232345628375576, "loss": 1.2032, "step": 4333 }, { "epoch": 0.7717236467236467, "grad_norm": 0.579289972782135, "learning_rate": 0.00018231550909306475, "loss": 1.0764, "step": 4334 }, { "epoch": 0.7719017094017094, "grad_norm": 0.5889299511909485, "learning_rate": 0.00018230756028957235, "loss": 1.1768, "step": 4335 }, { "epoch": 0.7720797720797721, "grad_norm": 0.5328249335289001, "learning_rate": 0.00018229960987343428, "loss": 1.0055, "step": 4336 }, { "epoch": 0.7722578347578347, "grad_norm": 0.5766382217407227, "learning_rate": 0.0001822916578448063, "loss": 0.9923, "step": 4337 }, { "epoch": 0.7724358974358975, "grad_norm": 0.6448187828063965, "learning_rate": 0.00018228370420384423, "loss": 1.1135, "step": 4338 }, { "epoch": 0.7726139601139601, "grad_norm": 0.5505210757255554, "learning_rate": 0.00018227574895070394, "loss": 1.2048, "step": 4339 }, { "epoch": 0.7727920227920227, "grad_norm": 0.6278925538063049, "learning_rate": 0.00018226779208554126, "loss": 1.1045, "step": 4340 }, { "epoch": 0.7729700854700855, "grad_norm": 0.5345009565353394, "learning_rate": 0.00018225983360851207, "loss": 1.0102, "step": 4341 }, { "epoch": 0.7731481481481481, "grad_norm": 0.566633403301239, "learning_rate": 0.00018225187351977233, "loss": 1.0038, "step": 4342 }, { "epoch": 0.7733262108262108, "grad_norm": 0.5066078901290894, "learning_rate": 0.000182243911819478, "loss": 1.0339, "step": 4343 }, { "epoch": 0.7735042735042735, "grad_norm": 0.5614920258522034, "learning_rate": 0.00018223594850778503, "loss": 1.1021, "step": 4344 }, { "epoch": 0.7736823361823362, "grad_norm": 0.7747337818145752, "learning_rate": 0.0001822279835848495, "loss": 1.1129, "step": 4345 }, { "epoch": 0.7738603988603988, "grad_norm": 0.7066529989242554, "learning_rate": 0.00018222001705082744, "loss": 1.3234, "step": 4346 }, { "epoch": 0.7740384615384616, "grad_norm": 0.6340884566307068, "learning_rate": 0.00018221204890587497, "loss": 1.0726, "step": 4347 }, { "epoch": 0.7742165242165242, "grad_norm": 0.5401145815849304, "learning_rate": 0.00018220407915014818, "loss": 0.9904, "step": 4348 }, { "epoch": 0.7743945868945868, "grad_norm": 0.5069159269332886, "learning_rate": 0.00018219610778380315, "loss": 1.0654, "step": 4349 }, { "epoch": 0.7745726495726496, "grad_norm": 0.5422839522361755, "learning_rate": 0.00018218813480699623, "loss": 1.1741, "step": 4350 }, { "epoch": 0.7747507122507122, "grad_norm": 0.5550300478935242, "learning_rate": 0.0001821801602198835, "loss": 1.0033, "step": 4351 }, { "epoch": 0.7749287749287749, "grad_norm": 0.5987736582756042, "learning_rate": 0.00018217218402262123, "loss": 0.935, "step": 4352 }, { "epoch": 0.7751068376068376, "grad_norm": 0.6137008666992188, "learning_rate": 0.00018216420621536573, "loss": 1.17, "step": 4353 }, { "epoch": 0.7752849002849003, "grad_norm": 0.47124359011650085, "learning_rate": 0.0001821562267982733, "loss": 0.8316, "step": 4354 }, { "epoch": 0.7754629629629629, "grad_norm": 0.5057868361473083, "learning_rate": 0.00018214824577150024, "loss": 1.0246, "step": 4355 }, { "epoch": 0.7756410256410257, "grad_norm": 0.604055643081665, "learning_rate": 0.00018214026313520299, "loss": 1.1272, "step": 4356 }, { "epoch": 0.7758190883190883, "grad_norm": 0.6690384149551392, "learning_rate": 0.0001821322788895379, "loss": 1.0464, "step": 4357 }, { "epoch": 0.7759971509971509, "grad_norm": 0.5458958745002747, "learning_rate": 0.0001821242930346614, "loss": 1.1712, "step": 4358 }, { "epoch": 0.7761752136752137, "grad_norm": 0.6448663473129272, "learning_rate": 0.00018211630557073, "loss": 1.1125, "step": 4359 }, { "epoch": 0.7763532763532763, "grad_norm": 0.49889448285102844, "learning_rate": 0.00018210831649790018, "loss": 1.097, "step": 4360 }, { "epoch": 0.7765313390313391, "grad_norm": 0.5118046998977661, "learning_rate": 0.00018210032581632843, "loss": 1.009, "step": 4361 }, { "epoch": 0.7767094017094017, "grad_norm": 0.5450068116188049, "learning_rate": 0.00018209233352617135, "loss": 1.1138, "step": 4362 }, { "epoch": 0.7768874643874644, "grad_norm": 0.6147481203079224, "learning_rate": 0.00018208433962758558, "loss": 1.212, "step": 4363 }, { "epoch": 0.7770655270655271, "grad_norm": 0.554176926612854, "learning_rate": 0.00018207634412072764, "loss": 1.1271, "step": 4364 }, { "epoch": 0.7772435897435898, "grad_norm": 0.5872851014137268, "learning_rate": 0.00018206834700575426, "loss": 1.2793, "step": 4365 }, { "epoch": 0.7774216524216524, "grad_norm": 0.5135685205459595, "learning_rate": 0.00018206034828282207, "loss": 0.9642, "step": 4366 }, { "epoch": 0.7775997150997151, "grad_norm": 0.5699490308761597, "learning_rate": 0.00018205234795208786, "loss": 0.9086, "step": 4367 }, { "epoch": 0.7777777777777778, "grad_norm": 0.5908057689666748, "learning_rate": 0.00018204434601370832, "loss": 1.1973, "step": 4368 }, { "epoch": 0.7779558404558404, "grad_norm": 0.5777581334114075, "learning_rate": 0.00018203634246784025, "loss": 1.0447, "step": 4369 }, { "epoch": 0.7781339031339032, "grad_norm": 0.4822927713394165, "learning_rate": 0.00018202833731464048, "loss": 0.814, "step": 4370 }, { "epoch": 0.7783119658119658, "grad_norm": 0.5343610644340515, "learning_rate": 0.0001820203305542658, "loss": 1.2785, "step": 4371 }, { "epoch": 0.7784900284900285, "grad_norm": 0.5462222695350647, "learning_rate": 0.00018201232218687316, "loss": 1.1785, "step": 4372 }, { "epoch": 0.7786680911680912, "grad_norm": 0.5177609324455261, "learning_rate": 0.00018200431221261943, "loss": 1.111, "step": 4373 }, { "epoch": 0.7788461538461539, "grad_norm": 0.5324625968933105, "learning_rate": 0.00018199630063166157, "loss": 1.0738, "step": 4374 }, { "epoch": 0.7790242165242165, "grad_norm": 0.6392876505851746, "learning_rate": 0.0001819882874441565, "loss": 1.1758, "step": 4375 }, { "epoch": 0.7792022792022792, "grad_norm": 0.49964696168899536, "learning_rate": 0.00018198027265026127, "loss": 1.0556, "step": 4376 }, { "epoch": 0.7793803418803419, "grad_norm": 0.6090660691261292, "learning_rate": 0.00018197225625013287, "loss": 1.0102, "step": 4377 }, { "epoch": 0.7795584045584045, "grad_norm": 0.5242345929145813, "learning_rate": 0.00018196423824392842, "loss": 0.8335, "step": 4378 }, { "epoch": 0.7797364672364673, "grad_norm": 0.5265036225318909, "learning_rate": 0.00018195621863180498, "loss": 1.0781, "step": 4379 }, { "epoch": 0.7799145299145299, "grad_norm": 0.5115378499031067, "learning_rate": 0.0001819481974139197, "loss": 1.1658, "step": 4380 }, { "epoch": 0.7800925925925926, "grad_norm": 0.6489549875259399, "learning_rate": 0.00018194017459042972, "loss": 1.0572, "step": 4381 }, { "epoch": 0.7802706552706553, "grad_norm": 0.5800202488899231, "learning_rate": 0.0001819321501614922, "loss": 0.9593, "step": 4382 }, { "epoch": 0.780448717948718, "grad_norm": 0.5608528256416321, "learning_rate": 0.00018192412412726443, "loss": 1.0324, "step": 4383 }, { "epoch": 0.7806267806267806, "grad_norm": 0.5596401691436768, "learning_rate": 0.00018191609648790362, "loss": 1.071, "step": 4384 }, { "epoch": 0.7808048433048433, "grad_norm": 0.5712903141975403, "learning_rate": 0.00018190806724356707, "loss": 0.9011, "step": 4385 }, { "epoch": 0.780982905982906, "grad_norm": 0.5079438090324402, "learning_rate": 0.0001819000363944121, "loss": 1.1194, "step": 4386 }, { "epoch": 0.7811609686609686, "grad_norm": 0.5785079598426819, "learning_rate": 0.00018189200394059602, "loss": 1.1703, "step": 4387 }, { "epoch": 0.7813390313390314, "grad_norm": 0.6901816129684448, "learning_rate": 0.00018188396988227625, "loss": 1.6689, "step": 4388 }, { "epoch": 0.781517094017094, "grad_norm": 0.48107922077178955, "learning_rate": 0.00018187593421961022, "loss": 1.0116, "step": 4389 }, { "epoch": 0.7816951566951567, "grad_norm": 0.5843084454536438, "learning_rate": 0.0001818678969527553, "loss": 1.1172, "step": 4390 }, { "epoch": 0.7818732193732194, "grad_norm": 0.479034423828125, "learning_rate": 0.00018185985808186902, "loss": 0.811, "step": 4391 }, { "epoch": 0.782051282051282, "grad_norm": 0.5864158272743225, "learning_rate": 0.00018185181760710888, "loss": 0.9522, "step": 4392 }, { "epoch": 0.7822293447293447, "grad_norm": 0.4824625551700592, "learning_rate": 0.00018184377552863242, "loss": 0.9039, "step": 4393 }, { "epoch": 0.7824074074074074, "grad_norm": 0.580102801322937, "learning_rate": 0.00018183573184659717, "loss": 1.2382, "step": 4394 }, { "epoch": 0.7825854700854701, "grad_norm": 0.5300056338310242, "learning_rate": 0.00018182768656116073, "loss": 1.2268, "step": 4395 }, { "epoch": 0.7827635327635327, "grad_norm": 0.5548123121261597, "learning_rate": 0.00018181963967248078, "loss": 1.0628, "step": 4396 }, { "epoch": 0.7829415954415955, "grad_norm": 0.5485070943832397, "learning_rate": 0.00018181159118071496, "loss": 0.9628, "step": 4397 }, { "epoch": 0.7831196581196581, "grad_norm": 0.47405415773391724, "learning_rate": 0.00018180354108602095, "loss": 1.1413, "step": 4398 }, { "epoch": 0.7832977207977208, "grad_norm": 0.5545752644538879, "learning_rate": 0.0001817954893885565, "loss": 1.3807, "step": 4399 }, { "epoch": 0.7834757834757835, "grad_norm": 0.5339497327804565, "learning_rate": 0.00018178743608847933, "loss": 0.9978, "step": 4400 }, { "epoch": 0.7836538461538461, "grad_norm": 0.5006352663040161, "learning_rate": 0.00018177938118594725, "loss": 0.8873, "step": 4401 }, { "epoch": 0.7838319088319088, "grad_norm": 0.4845179319381714, "learning_rate": 0.00018177132468111812, "loss": 0.8866, "step": 4402 }, { "epoch": 0.7840099715099715, "grad_norm": 0.5240967869758606, "learning_rate": 0.0001817632665741497, "loss": 1.0347, "step": 4403 }, { "epoch": 0.7841880341880342, "grad_norm": 0.5311884880065918, "learning_rate": 0.00018175520686519993, "loss": 1.2065, "step": 4404 }, { "epoch": 0.7843660968660968, "grad_norm": 0.5562815070152283, "learning_rate": 0.00018174714555442673, "loss": 1.1272, "step": 4405 }, { "epoch": 0.7845441595441596, "grad_norm": 0.5524366497993469, "learning_rate": 0.00018173908264198802, "loss": 1.2337, "step": 4406 }, { "epoch": 0.7847222222222222, "grad_norm": 0.5612216591835022, "learning_rate": 0.0001817310181280418, "loss": 1.1809, "step": 4407 }, { "epoch": 0.7849002849002849, "grad_norm": 0.5315343737602234, "learning_rate": 0.000181722952012746, "loss": 1.0491, "step": 4408 }, { "epoch": 0.7850783475783476, "grad_norm": 0.5233435034751892, "learning_rate": 0.00018171488429625878, "loss": 1.0457, "step": 4409 }, { "epoch": 0.7852564102564102, "grad_norm": 0.7809093594551086, "learning_rate": 0.00018170681497873813, "loss": 1.1578, "step": 4410 }, { "epoch": 0.7854344729344729, "grad_norm": 0.49659839272499084, "learning_rate": 0.00018169874406034217, "loss": 1.0815, "step": 4411 }, { "epoch": 0.7856125356125356, "grad_norm": 0.5020765066146851, "learning_rate": 0.00018169067154122904, "loss": 1.1985, "step": 4412 }, { "epoch": 0.7857905982905983, "grad_norm": 0.6408432126045227, "learning_rate": 0.0001816825974215569, "loss": 1.2272, "step": 4413 }, { "epoch": 0.7859686609686609, "grad_norm": 0.5062605142593384, "learning_rate": 0.00018167452170148396, "loss": 0.9663, "step": 4414 }, { "epoch": 0.7861467236467237, "grad_norm": 0.5100119113922119, "learning_rate": 0.0001816664443811684, "loss": 1.0256, "step": 4415 }, { "epoch": 0.7863247863247863, "grad_norm": 0.5277643799781799, "learning_rate": 0.00018165836546076854, "loss": 1.2885, "step": 4416 }, { "epoch": 0.7865028490028491, "grad_norm": 0.5568150281906128, "learning_rate": 0.0001816502849404426, "loss": 1.2673, "step": 4417 }, { "epoch": 0.7866809116809117, "grad_norm": 0.5061392188072205, "learning_rate": 0.00018164220282034896, "loss": 1.072, "step": 4418 }, { "epoch": 0.7868589743589743, "grad_norm": 0.5383077263832092, "learning_rate": 0.00018163411910064597, "loss": 1.0621, "step": 4419 }, { "epoch": 0.7870370370370371, "grad_norm": 0.5167948007583618, "learning_rate": 0.00018162603378149198, "loss": 1.099, "step": 4420 }, { "epoch": 0.7872150997150997, "grad_norm": 0.5084534287452698, "learning_rate": 0.0001816179468630454, "loss": 1.3984, "step": 4421 }, { "epoch": 0.7873931623931624, "grad_norm": 0.608762264251709, "learning_rate": 0.00018160985834546475, "loss": 1.3553, "step": 4422 }, { "epoch": 0.7875712250712251, "grad_norm": 0.4900866746902466, "learning_rate": 0.00018160176822890842, "loss": 1.0009, "step": 4423 }, { "epoch": 0.7877492877492878, "grad_norm": 0.5928917527198792, "learning_rate": 0.00018159367651353496, "loss": 1.0523, "step": 4424 }, { "epoch": 0.7879273504273504, "grad_norm": 0.624422013759613, "learning_rate": 0.0001815855831995029, "loss": 1.0519, "step": 4425 }, { "epoch": 0.7881054131054132, "grad_norm": 0.5140150785446167, "learning_rate": 0.00018157748828697082, "loss": 1.048, "step": 4426 }, { "epoch": 0.7882834757834758, "grad_norm": 0.47006943821907043, "learning_rate": 0.00018156939177609732, "loss": 1.0067, "step": 4427 }, { "epoch": 0.7884615384615384, "grad_norm": 0.5178864002227783, "learning_rate": 0.00018156129366704105, "loss": 1.0583, "step": 4428 }, { "epoch": 0.7886396011396012, "grad_norm": 0.5279985666275024, "learning_rate": 0.00018155319395996066, "loss": 1.3023, "step": 4429 }, { "epoch": 0.7888176638176638, "grad_norm": 0.5238787531852722, "learning_rate": 0.00018154509265501482, "loss": 1.0851, "step": 4430 }, { "epoch": 0.7889957264957265, "grad_norm": 0.5914917588233948, "learning_rate": 0.00018153698975236228, "loss": 0.9291, "step": 4431 }, { "epoch": 0.7891737891737892, "grad_norm": 0.5046082735061646, "learning_rate": 0.00018152888525216183, "loss": 0.9951, "step": 4432 }, { "epoch": 0.7893518518518519, "grad_norm": 0.5042256116867065, "learning_rate": 0.00018152077915457225, "loss": 1.0243, "step": 4433 }, { "epoch": 0.7895299145299145, "grad_norm": 0.5950339436531067, "learning_rate": 0.0001815126714597523, "loss": 0.9803, "step": 4434 }, { "epoch": 0.7897079772079773, "grad_norm": 0.5163764953613281, "learning_rate": 0.0001815045621678609, "loss": 1.0353, "step": 4435 }, { "epoch": 0.7898860398860399, "grad_norm": 0.5166211128234863, "learning_rate": 0.00018149645127905691, "loss": 0.9649, "step": 4436 }, { "epoch": 0.7900641025641025, "grad_norm": 0.5239769220352173, "learning_rate": 0.00018148833879349927, "loss": 0.9747, "step": 4437 }, { "epoch": 0.7902421652421653, "grad_norm": 0.5803237557411194, "learning_rate": 0.00018148022471134692, "loss": 1.315, "step": 4438 }, { "epoch": 0.7904202279202279, "grad_norm": 0.5141370296478271, "learning_rate": 0.00018147210903275877, "loss": 1.0547, "step": 4439 }, { "epoch": 0.7905982905982906, "grad_norm": 0.545788586139679, "learning_rate": 0.00018146399175789394, "loss": 1.0797, "step": 4440 }, { "epoch": 0.7907763532763533, "grad_norm": 0.5273314714431763, "learning_rate": 0.0001814558728869114, "loss": 0.7928, "step": 4441 }, { "epoch": 0.790954415954416, "grad_norm": 0.4614652693271637, "learning_rate": 0.00018144775241997024, "loss": 0.8826, "step": 4442 }, { "epoch": 0.7911324786324786, "grad_norm": 0.6203590631484985, "learning_rate": 0.00018143963035722958, "loss": 1.2891, "step": 4443 }, { "epoch": 0.7913105413105413, "grad_norm": 0.4870408773422241, "learning_rate": 0.0001814315066988485, "loss": 1.0717, "step": 4444 }, { "epoch": 0.791488603988604, "grad_norm": 0.6468982696533203, "learning_rate": 0.00018142338144498625, "loss": 1.3398, "step": 4445 }, { "epoch": 0.7916666666666666, "grad_norm": 0.4727918207645416, "learning_rate": 0.00018141525459580197, "loss": 1.0195, "step": 4446 }, { "epoch": 0.7918447293447294, "grad_norm": 0.5080479979515076, "learning_rate": 0.0001814071261514549, "loss": 1.0163, "step": 4447 }, { "epoch": 0.792022792022792, "grad_norm": 0.5380908250808716, "learning_rate": 0.0001813989961121043, "loss": 1.1673, "step": 4448 }, { "epoch": 0.7922008547008547, "grad_norm": 0.5020384192466736, "learning_rate": 0.00018139086447790945, "loss": 0.8591, "step": 4449 }, { "epoch": 0.7923789173789174, "grad_norm": 0.5279949903488159, "learning_rate": 0.0001813827312490297, "loss": 1.1221, "step": 4450 }, { "epoch": 0.79255698005698, "grad_norm": 0.6739233732223511, "learning_rate": 0.00018137459642562437, "loss": 1.2704, "step": 4451 }, { "epoch": 0.7927350427350427, "grad_norm": 0.5112259984016418, "learning_rate": 0.00018136646000785288, "loss": 1.1161, "step": 4452 }, { "epoch": 0.7929131054131054, "grad_norm": 0.5244031548500061, "learning_rate": 0.00018135832199587463, "loss": 0.7866, "step": 4453 }, { "epoch": 0.7930911680911681, "grad_norm": 0.5803347229957581, "learning_rate": 0.0001813501823898491, "loss": 0.994, "step": 4454 }, { "epoch": 0.7932692307692307, "grad_norm": 0.6191152930259705, "learning_rate": 0.00018134204118993568, "loss": 1.0725, "step": 4455 }, { "epoch": 0.7934472934472935, "grad_norm": 0.549735963344574, "learning_rate": 0.00018133389839629396, "loss": 0.9915, "step": 4456 }, { "epoch": 0.7936253561253561, "grad_norm": 0.4940381646156311, "learning_rate": 0.00018132575400908347, "loss": 1.1815, "step": 4457 }, { "epoch": 0.7938034188034188, "grad_norm": 0.5009099245071411, "learning_rate": 0.00018131760802846377, "loss": 1.0833, "step": 4458 }, { "epoch": 0.7939814814814815, "grad_norm": 0.595853865146637, "learning_rate": 0.00018130946045459445, "loss": 1.2774, "step": 4459 }, { "epoch": 0.7941595441595442, "grad_norm": 0.534794807434082, "learning_rate": 0.00018130131128763513, "loss": 1.0891, "step": 4460 }, { "epoch": 0.7943376068376068, "grad_norm": 0.5828582048416138, "learning_rate": 0.00018129316052774557, "loss": 1.0786, "step": 4461 }, { "epoch": 0.7945156695156695, "grad_norm": 0.4750654697418213, "learning_rate": 0.00018128500817508533, "loss": 1.0818, "step": 4462 }, { "epoch": 0.7946937321937322, "grad_norm": 0.5626576542854309, "learning_rate": 0.00018127685422981426, "loss": 1.0807, "step": 4463 }, { "epoch": 0.7948717948717948, "grad_norm": 0.6434760093688965, "learning_rate": 0.00018126869869209203, "loss": 1.0908, "step": 4464 }, { "epoch": 0.7950498575498576, "grad_norm": 0.5577414631843567, "learning_rate": 0.00018126054156207853, "loss": 1.0281, "step": 4465 }, { "epoch": 0.7952279202279202, "grad_norm": 0.5001249313354492, "learning_rate": 0.00018125238283993347, "loss": 0.9083, "step": 4466 }, { "epoch": 0.7954059829059829, "grad_norm": 0.5298314690589905, "learning_rate": 0.00018124422252581676, "loss": 0.971, "step": 4467 }, { "epoch": 0.7955840455840456, "grad_norm": 0.4872737228870392, "learning_rate": 0.00018123606061988832, "loss": 1.0515, "step": 4468 }, { "epoch": 0.7957621082621082, "grad_norm": 0.5895398259162903, "learning_rate": 0.00018122789712230798, "loss": 1.0771, "step": 4469 }, { "epoch": 0.7959401709401709, "grad_norm": 0.5212514996528625, "learning_rate": 0.00018121973203323577, "loss": 1.0365, "step": 4470 }, { "epoch": 0.7961182336182336, "grad_norm": 0.4679451584815979, "learning_rate": 0.0001812115653528316, "loss": 0.9445, "step": 4471 }, { "epoch": 0.7962962962962963, "grad_norm": 0.5852653980255127, "learning_rate": 0.00018120339708125552, "loss": 1.1781, "step": 4472 }, { "epoch": 0.7964743589743589, "grad_norm": 0.6081342697143555, "learning_rate": 0.00018119522721866756, "loss": 1.3881, "step": 4473 }, { "epoch": 0.7966524216524217, "grad_norm": 0.5254155993461609, "learning_rate": 0.00018118705576522777, "loss": 1.2198, "step": 4474 }, { "epoch": 0.7968304843304843, "grad_norm": 0.5959419012069702, "learning_rate": 0.00018117888272109632, "loss": 1.0922, "step": 4475 }, { "epoch": 0.7970085470085471, "grad_norm": 0.6243147253990173, "learning_rate": 0.0001811707080864333, "loss": 1.1782, "step": 4476 }, { "epoch": 0.7971866096866097, "grad_norm": 0.5336906909942627, "learning_rate": 0.0001811625318613988, "loss": 1.167, "step": 4477 }, { "epoch": 0.7973646723646723, "grad_norm": 0.5287907719612122, "learning_rate": 0.00018115435404615315, "loss": 0.9923, "step": 4478 }, { "epoch": 0.7975427350427351, "grad_norm": 0.48941442370414734, "learning_rate": 0.0001811461746408565, "loss": 0.863, "step": 4479 }, { "epoch": 0.7977207977207977, "grad_norm": 0.48465651273727417, "learning_rate": 0.0001811379936456691, "loss": 1.147, "step": 4480 }, { "epoch": 0.7978988603988604, "grad_norm": 0.5676067471504211, "learning_rate": 0.0001811298110607513, "loss": 1.3121, "step": 4481 }, { "epoch": 0.7980769230769231, "grad_norm": 0.4894018769264221, "learning_rate": 0.00018112162688626337, "loss": 1.1831, "step": 4482 }, { "epoch": 0.7982549857549858, "grad_norm": 0.5626382827758789, "learning_rate": 0.0001811134411223657, "loss": 1.1977, "step": 4483 }, { "epoch": 0.7984330484330484, "grad_norm": 0.564119815826416, "learning_rate": 0.00018110525376921862, "loss": 1.2686, "step": 4484 }, { "epoch": 0.7986111111111112, "grad_norm": 0.6385740041732788, "learning_rate": 0.00018109706482698256, "loss": 1.2418, "step": 4485 }, { "epoch": 0.7987891737891738, "grad_norm": 0.5550164580345154, "learning_rate": 0.00018108887429581802, "loss": 1.081, "step": 4486 }, { "epoch": 0.7989672364672364, "grad_norm": 0.5583973526954651, "learning_rate": 0.00018108068217588544, "loss": 1.1757, "step": 4487 }, { "epoch": 0.7991452991452992, "grad_norm": 0.5533342957496643, "learning_rate": 0.00018107248846734527, "loss": 1.1947, "step": 4488 }, { "epoch": 0.7993233618233618, "grad_norm": 0.5291479229927063, "learning_rate": 0.00018106429317035815, "loss": 1.2769, "step": 4489 }, { "epoch": 0.7995014245014245, "grad_norm": 0.4680160582065582, "learning_rate": 0.00018105609628508458, "loss": 0.7059, "step": 4490 }, { "epoch": 0.7996794871794872, "grad_norm": 0.5364881157875061, "learning_rate": 0.00018104789781168517, "loss": 1.0566, "step": 4491 }, { "epoch": 0.7998575498575499, "grad_norm": 0.5917307734489441, "learning_rate": 0.0001810396977503206, "loss": 1.2263, "step": 4492 }, { "epoch": 0.8000356125356125, "grad_norm": 0.6013199090957642, "learning_rate": 0.0001810314961011515, "loss": 1.2053, "step": 4493 }, { "epoch": 0.8002136752136753, "grad_norm": 0.6005663275718689, "learning_rate": 0.0001810232928643385, "loss": 1.2241, "step": 4494 }, { "epoch": 0.8003917378917379, "grad_norm": 0.49207603931427, "learning_rate": 0.00018101508804004246, "loss": 1.0661, "step": 4495 }, { "epoch": 0.8005698005698005, "grad_norm": 0.4834063947200775, "learning_rate": 0.00018100688162842401, "loss": 1.1745, "step": 4496 }, { "epoch": 0.8007478632478633, "grad_norm": 0.5347156524658203, "learning_rate": 0.000180998673629644, "loss": 1.0679, "step": 4497 }, { "epoch": 0.8009259259259259, "grad_norm": 0.5815600156784058, "learning_rate": 0.00018099046404386327, "loss": 1.2652, "step": 4498 }, { "epoch": 0.8011039886039886, "grad_norm": 0.5291135311126709, "learning_rate": 0.00018098225287124263, "loss": 1.2072, "step": 4499 }, { "epoch": 0.8012820512820513, "grad_norm": 0.5779497027397156, "learning_rate": 0.000180974040111943, "loss": 1.3277, "step": 4500 }, { "epoch": 0.801460113960114, "grad_norm": 0.44566696882247925, "learning_rate": 0.0001809658257661252, "loss": 0.7702, "step": 4501 }, { "epoch": 0.8016381766381766, "grad_norm": 0.5407577753067017, "learning_rate": 0.00018095760983395027, "loss": 1.2894, "step": 4502 }, { "epoch": 0.8018162393162394, "grad_norm": 0.4771903455257416, "learning_rate": 0.00018094939231557916, "loss": 1.045, "step": 4503 }, { "epoch": 0.801994301994302, "grad_norm": 0.5970945358276367, "learning_rate": 0.00018094117321117286, "loss": 1.2059, "step": 4504 }, { "epoch": 0.8021723646723646, "grad_norm": 0.4959338903427124, "learning_rate": 0.0001809329525208924, "loss": 1.155, "step": 4505 }, { "epoch": 0.8023504273504274, "grad_norm": 0.5142548084259033, "learning_rate": 0.00018092473024489887, "loss": 0.9413, "step": 4506 }, { "epoch": 0.80252849002849, "grad_norm": 0.5336433053016663, "learning_rate": 0.00018091650638335334, "loss": 1.0699, "step": 4507 }, { "epoch": 0.8027065527065527, "grad_norm": 0.47770628333091736, "learning_rate": 0.00018090828093641698, "loss": 1.1515, "step": 4508 }, { "epoch": 0.8028846153846154, "grad_norm": 0.5443438291549683, "learning_rate": 0.00018090005390425091, "loss": 1.189, "step": 4509 }, { "epoch": 0.8030626780626781, "grad_norm": 0.523179829120636, "learning_rate": 0.00018089182528701632, "loss": 1.1272, "step": 4510 }, { "epoch": 0.8032407407407407, "grad_norm": 0.49628451466560364, "learning_rate": 0.00018088359508487448, "loss": 0.9754, "step": 4511 }, { "epoch": 0.8034188034188035, "grad_norm": 0.5933086276054382, "learning_rate": 0.00018087536329798663, "loss": 1.2111, "step": 4512 }, { "epoch": 0.8035968660968661, "grad_norm": 0.4565310776233673, "learning_rate": 0.00018086712992651402, "loss": 0.7729, "step": 4513 }, { "epoch": 0.8037749287749287, "grad_norm": 0.5013461112976074, "learning_rate": 0.00018085889497061798, "loss": 1.2178, "step": 4514 }, { "epoch": 0.8039529914529915, "grad_norm": 0.5170024633407593, "learning_rate": 0.00018085065843045987, "loss": 0.9181, "step": 4515 }, { "epoch": 0.8041310541310541, "grad_norm": 0.583363950252533, "learning_rate": 0.00018084242030620104, "loss": 1.1542, "step": 4516 }, { "epoch": 0.8043091168091168, "grad_norm": 0.46835777163505554, "learning_rate": 0.00018083418059800297, "loss": 0.8954, "step": 4517 }, { "epoch": 0.8044871794871795, "grad_norm": 0.5145657062530518, "learning_rate": 0.000180825939306027, "loss": 1.0417, "step": 4518 }, { "epoch": 0.8046652421652422, "grad_norm": 0.47216105461120605, "learning_rate": 0.00018081769643043467, "loss": 0.9516, "step": 4519 }, { "epoch": 0.8048433048433048, "grad_norm": 0.5059915781021118, "learning_rate": 0.0001808094519713875, "loss": 1.1643, "step": 4520 }, { "epoch": 0.8050213675213675, "grad_norm": 0.5406439900398254, "learning_rate": 0.00018080120592904692, "loss": 1.2038, "step": 4521 }, { "epoch": 0.8051994301994302, "grad_norm": 0.6123420000076294, "learning_rate": 0.0001807929583035746, "loss": 1.4004, "step": 4522 }, { "epoch": 0.8053774928774928, "grad_norm": 0.49699845910072327, "learning_rate": 0.00018078470909513208, "loss": 1.0347, "step": 4523 }, { "epoch": 0.8055555555555556, "grad_norm": 0.5369421243667603, "learning_rate": 0.000180776458303881, "loss": 1.0418, "step": 4524 }, { "epoch": 0.8057336182336182, "grad_norm": 0.5407396554946899, "learning_rate": 0.00018076820592998301, "loss": 0.9546, "step": 4525 }, { "epoch": 0.8059116809116809, "grad_norm": 0.5749752521514893, "learning_rate": 0.00018075995197359984, "loss": 1.1438, "step": 4526 }, { "epoch": 0.8060897435897436, "grad_norm": 0.5523102283477783, "learning_rate": 0.00018075169643489317, "loss": 1.1312, "step": 4527 }, { "epoch": 0.8062678062678063, "grad_norm": 0.5767508149147034, "learning_rate": 0.00018074343931402472, "loss": 1.1951, "step": 4528 }, { "epoch": 0.8064458689458689, "grad_norm": 0.5262924432754517, "learning_rate": 0.00018073518061115633, "loss": 1.1985, "step": 4529 }, { "epoch": 0.8066239316239316, "grad_norm": 0.4742378294467926, "learning_rate": 0.0001807269203264498, "loss": 1.0126, "step": 4530 }, { "epoch": 0.8068019943019943, "grad_norm": 0.5190158486366272, "learning_rate": 0.00018071865846006692, "loss": 0.9985, "step": 4531 }, { "epoch": 0.8069800569800569, "grad_norm": 0.5910618305206299, "learning_rate": 0.00018071039501216964, "loss": 1.2776, "step": 4532 }, { "epoch": 0.8071581196581197, "grad_norm": 0.5363098382949829, "learning_rate": 0.00018070212998291983, "loss": 1.3346, "step": 4533 }, { "epoch": 0.8073361823361823, "grad_norm": 0.47711408138275146, "learning_rate": 0.0001806938633724794, "loss": 1.04, "step": 4534 }, { "epoch": 0.8075142450142451, "grad_norm": 0.5092964172363281, "learning_rate": 0.0001806855951810104, "loss": 1.1409, "step": 4535 }, { "epoch": 0.8076923076923077, "grad_norm": 0.5828777551651001, "learning_rate": 0.00018067732540867472, "loss": 1.3048, "step": 4536 }, { "epoch": 0.8078703703703703, "grad_norm": 0.5779826045036316, "learning_rate": 0.00018066905405563445, "loss": 1.1599, "step": 4537 }, { "epoch": 0.8080484330484331, "grad_norm": 0.49908435344696045, "learning_rate": 0.00018066078112205167, "loss": 1.1502, "step": 4538 }, { "epoch": 0.8082264957264957, "grad_norm": 0.4772704839706421, "learning_rate": 0.0001806525066080884, "loss": 0.7925, "step": 4539 }, { "epoch": 0.8084045584045584, "grad_norm": 0.4298383295536041, "learning_rate": 0.00018064423051390683, "loss": 0.7322, "step": 4540 }, { "epoch": 0.8085826210826211, "grad_norm": 0.49349579215049744, "learning_rate": 0.0001806359528396691, "loss": 1.0021, "step": 4541 }, { "epoch": 0.8087606837606838, "grad_norm": 0.4698609411716461, "learning_rate": 0.00018062767358553735, "loss": 0.9751, "step": 4542 }, { "epoch": 0.8089387464387464, "grad_norm": 0.4949014186859131, "learning_rate": 0.00018061939275167385, "loss": 0.9553, "step": 4543 }, { "epoch": 0.8091168091168092, "grad_norm": 0.5604463815689087, "learning_rate": 0.0001806111103382408, "loss": 0.9894, "step": 4544 }, { "epoch": 0.8092948717948718, "grad_norm": 0.5761561989784241, "learning_rate": 0.00018060282634540053, "loss": 1.258, "step": 4545 }, { "epoch": 0.8094729344729344, "grad_norm": 0.5239115357398987, "learning_rate": 0.00018059454077331527, "loss": 0.9189, "step": 4546 }, { "epoch": 0.8096509971509972, "grad_norm": 0.47902220487594604, "learning_rate": 0.00018058625362214742, "loss": 1.0389, "step": 4547 }, { "epoch": 0.8098290598290598, "grad_norm": 0.6274173259735107, "learning_rate": 0.00018057796489205936, "loss": 1.3368, "step": 4548 }, { "epoch": 0.8100071225071225, "grad_norm": 0.5789401531219482, "learning_rate": 0.00018056967458321345, "loss": 1.1473, "step": 4549 }, { "epoch": 0.8101851851851852, "grad_norm": 0.5850043296813965, "learning_rate": 0.0001805613826957721, "loss": 1.2224, "step": 4550 }, { "epoch": 0.8103632478632479, "grad_norm": 0.6310738921165466, "learning_rate": 0.00018055308922989788, "loss": 1.0707, "step": 4551 }, { "epoch": 0.8105413105413105, "grad_norm": 0.5198429822921753, "learning_rate": 0.00018054479418575317, "loss": 0.8984, "step": 4552 }, { "epoch": 0.8107193732193733, "grad_norm": 0.5757743120193481, "learning_rate": 0.00018053649756350054, "loss": 1.2007, "step": 4553 }, { "epoch": 0.8108974358974359, "grad_norm": 0.5109567642211914, "learning_rate": 0.0001805281993633025, "loss": 1.0696, "step": 4554 }, { "epoch": 0.8110754985754985, "grad_norm": 0.5030225515365601, "learning_rate": 0.00018051989958532173, "loss": 0.9667, "step": 4555 }, { "epoch": 0.8112535612535613, "grad_norm": 0.5291743874549866, "learning_rate": 0.00018051159822972079, "loss": 1.0219, "step": 4556 }, { "epoch": 0.8114316239316239, "grad_norm": 0.5874896049499512, "learning_rate": 0.00018050329529666233, "loss": 0.8589, "step": 4557 }, { "epoch": 0.8116096866096866, "grad_norm": 0.673284113407135, "learning_rate": 0.000180494990786309, "loss": 1.1902, "step": 4558 }, { "epoch": 0.8117877492877493, "grad_norm": 0.4742524027824402, "learning_rate": 0.00018048668469882354, "loss": 1.0578, "step": 4559 }, { "epoch": 0.811965811965812, "grad_norm": 0.5519167184829712, "learning_rate": 0.0001804783770343687, "loss": 1.083, "step": 4560 }, { "epoch": 0.8121438746438746, "grad_norm": 0.5669941306114197, "learning_rate": 0.00018047006779310727, "loss": 1.0784, "step": 4561 }, { "epoch": 0.8123219373219374, "grad_norm": 0.512759804725647, "learning_rate": 0.000180461756975202, "loss": 1.0361, "step": 4562 }, { "epoch": 0.8125, "grad_norm": 0.5721749067306519, "learning_rate": 0.00018045344458081575, "loss": 1.0246, "step": 4563 }, { "epoch": 0.8126780626780626, "grad_norm": 0.566430389881134, "learning_rate": 0.00018044513061011137, "loss": 1.1452, "step": 4564 }, { "epoch": 0.8128561253561254, "grad_norm": 0.49391916394233704, "learning_rate": 0.00018043681506325177, "loss": 0.89, "step": 4565 }, { "epoch": 0.813034188034188, "grad_norm": 0.5379437804222107, "learning_rate": 0.00018042849794039988, "loss": 1.1289, "step": 4566 }, { "epoch": 0.8132122507122507, "grad_norm": 0.5667982697486877, "learning_rate": 0.00018042017924171865, "loss": 1.1596, "step": 4567 }, { "epoch": 0.8133903133903134, "grad_norm": 0.6214209794998169, "learning_rate": 0.00018041185896737109, "loss": 1.0622, "step": 4568 }, { "epoch": 0.8135683760683761, "grad_norm": 0.5442491173744202, "learning_rate": 0.00018040353711752015, "loss": 1.0536, "step": 4569 }, { "epoch": 0.8137464387464387, "grad_norm": 0.5266172885894775, "learning_rate": 0.00018039521369232894, "loss": 1.0576, "step": 4570 }, { "epoch": 0.8139245014245015, "grad_norm": 0.6057912111282349, "learning_rate": 0.00018038688869196053, "loss": 1.3067, "step": 4571 }, { "epoch": 0.8141025641025641, "grad_norm": 0.489869087934494, "learning_rate": 0.00018037856211657803, "loss": 1.0279, "step": 4572 }, { "epoch": 0.8142806267806267, "grad_norm": 0.5497978329658508, "learning_rate": 0.00018037023396634457, "loss": 1.1568, "step": 4573 }, { "epoch": 0.8144586894586895, "grad_norm": 0.5243251919746399, "learning_rate": 0.0001803619042414233, "loss": 0.9767, "step": 4574 }, { "epoch": 0.8146367521367521, "grad_norm": 0.503032922744751, "learning_rate": 0.0001803535729419775, "loss": 1.065, "step": 4575 }, { "epoch": 0.8148148148148148, "grad_norm": 0.49955418705940247, "learning_rate": 0.00018034524006817034, "loss": 1.2752, "step": 4576 }, { "epoch": 0.8149928774928775, "grad_norm": 0.5746406316757202, "learning_rate": 0.00018033690562016508, "loss": 1.098, "step": 4577 }, { "epoch": 0.8151709401709402, "grad_norm": 0.5224192142486572, "learning_rate": 0.00018032856959812507, "loss": 1.1284, "step": 4578 }, { "epoch": 0.8153490028490028, "grad_norm": 0.5484535694122314, "learning_rate": 0.00018032023200221362, "loss": 0.9182, "step": 4579 }, { "epoch": 0.8155270655270656, "grad_norm": 0.5003355741500854, "learning_rate": 0.00018031189283259405, "loss": 1.136, "step": 4580 }, { "epoch": 0.8157051282051282, "grad_norm": 0.5395768284797668, "learning_rate": 0.00018030355208942977, "loss": 1.2349, "step": 4581 }, { "epoch": 0.8158831908831908, "grad_norm": 0.561966598033905, "learning_rate": 0.0001802952097728842, "loss": 0.999, "step": 4582 }, { "epoch": 0.8160612535612536, "grad_norm": 0.4886479675769806, "learning_rate": 0.00018028686588312083, "loss": 0.9165, "step": 4583 }, { "epoch": 0.8162393162393162, "grad_norm": 0.4769509732723236, "learning_rate": 0.00018027852042030307, "loss": 1.1377, "step": 4584 }, { "epoch": 0.8164173789173789, "grad_norm": 0.4723633825778961, "learning_rate": 0.00018027017338459448, "loss": 1.0274, "step": 4585 }, { "epoch": 0.8165954415954416, "grad_norm": 0.5773285627365112, "learning_rate": 0.00018026182477615859, "loss": 1.1468, "step": 4586 }, { "epoch": 0.8167735042735043, "grad_norm": 0.5529203414916992, "learning_rate": 0.00018025347459515895, "loss": 1.0815, "step": 4587 }, { "epoch": 0.8169515669515669, "grad_norm": 0.5449469685554504, "learning_rate": 0.00018024512284175922, "loss": 1.1637, "step": 4588 }, { "epoch": 0.8171296296296297, "grad_norm": 0.5155341625213623, "learning_rate": 0.00018023676951612298, "loss": 1.1842, "step": 4589 }, { "epoch": 0.8173076923076923, "grad_norm": 0.5569564700126648, "learning_rate": 0.00018022841461841393, "loss": 0.9254, "step": 4590 }, { "epoch": 0.8174857549857549, "grad_norm": 0.45203131437301636, "learning_rate": 0.00018022005814879573, "loss": 0.9561, "step": 4591 }, { "epoch": 0.8176638176638177, "grad_norm": 0.5735056400299072, "learning_rate": 0.00018021170010743218, "loss": 1.1402, "step": 4592 }, { "epoch": 0.8178418803418803, "grad_norm": 0.6075260043144226, "learning_rate": 0.00018020334049448697, "loss": 0.8601, "step": 4593 }, { "epoch": 0.8180199430199431, "grad_norm": 0.522682785987854, "learning_rate": 0.0001801949793101239, "loss": 1.0088, "step": 4594 }, { "epoch": 0.8181980056980057, "grad_norm": 0.5648437142372131, "learning_rate": 0.00018018661655450682, "loss": 0.8359, "step": 4595 }, { "epoch": 0.8183760683760684, "grad_norm": 0.5406472086906433, "learning_rate": 0.00018017825222779954, "loss": 1.1553, "step": 4596 }, { "epoch": 0.8185541310541311, "grad_norm": 0.4917788803577423, "learning_rate": 0.000180169886330166, "loss": 1.2198, "step": 4597 }, { "epoch": 0.8187321937321937, "grad_norm": 0.6293069124221802, "learning_rate": 0.00018016151886177004, "loss": 1.0245, "step": 4598 }, { "epoch": 0.8189102564102564, "grad_norm": 0.47277843952178955, "learning_rate": 0.00018015314982277564, "loss": 1.1141, "step": 4599 }, { "epoch": 0.8190883190883191, "grad_norm": 0.6132395267486572, "learning_rate": 0.0001801447792133468, "loss": 1.1227, "step": 4600 }, { "epoch": 0.8192663817663818, "grad_norm": 0.46839597821235657, "learning_rate": 0.00018013640703364747, "loss": 0.9239, "step": 4601 }, { "epoch": 0.8194444444444444, "grad_norm": 0.5055009722709656, "learning_rate": 0.00018012803328384171, "loss": 0.8486, "step": 4602 }, { "epoch": 0.8196225071225072, "grad_norm": 0.5094841718673706, "learning_rate": 0.00018011965796409362, "loss": 0.9969, "step": 4603 }, { "epoch": 0.8198005698005698, "grad_norm": 0.6177363395690918, "learning_rate": 0.00018011128107456726, "loss": 1.242, "step": 4604 }, { "epoch": 0.8199786324786325, "grad_norm": 0.5280042290687561, "learning_rate": 0.00018010290261542676, "loss": 1.1569, "step": 4605 }, { "epoch": 0.8201566951566952, "grad_norm": 0.5259367227554321, "learning_rate": 0.00018009452258683625, "loss": 0.9993, "step": 4606 }, { "epoch": 0.8203347578347578, "grad_norm": 0.464469850063324, "learning_rate": 0.00018008614098896, "loss": 1.0288, "step": 4607 }, { "epoch": 0.8205128205128205, "grad_norm": 0.6136324405670166, "learning_rate": 0.00018007775782196214, "loss": 1.1541, "step": 4608 }, { "epoch": 0.8206908831908832, "grad_norm": 0.5376590490341187, "learning_rate": 0.000180069373086007, "loss": 1.0624, "step": 4609 }, { "epoch": 0.8208689458689459, "grad_norm": 0.662916362285614, "learning_rate": 0.0001800609867812588, "loss": 1.1502, "step": 4610 }, { "epoch": 0.8210470085470085, "grad_norm": 0.5153383612632751, "learning_rate": 0.00018005259890788188, "loss": 0.9789, "step": 4611 }, { "epoch": 0.8212250712250713, "grad_norm": 0.5042359232902527, "learning_rate": 0.00018004420946604057, "loss": 0.9585, "step": 4612 }, { "epoch": 0.8214031339031339, "grad_norm": 0.5395993590354919, "learning_rate": 0.00018003581845589927, "loss": 1.159, "step": 4613 }, { "epoch": 0.8215811965811965, "grad_norm": 0.5561928749084473, "learning_rate": 0.00018002742587762237, "loss": 1.1604, "step": 4614 }, { "epoch": 0.8217592592592593, "grad_norm": 0.5602710843086243, "learning_rate": 0.00018001903173137432, "loss": 0.9922, "step": 4615 }, { "epoch": 0.8219373219373219, "grad_norm": 0.5529088377952576, "learning_rate": 0.00018001063601731955, "loss": 1.0943, "step": 4616 }, { "epoch": 0.8221153846153846, "grad_norm": 0.5156456828117371, "learning_rate": 0.00018000223873562254, "loss": 1.1399, "step": 4617 }, { "epoch": 0.8222934472934473, "grad_norm": 0.4868306517601013, "learning_rate": 0.0001799938398864479, "loss": 1.0692, "step": 4618 }, { "epoch": 0.82247150997151, "grad_norm": 0.5372915267944336, "learning_rate": 0.0001799854394699601, "loss": 1.2675, "step": 4619 }, { "epoch": 0.8226495726495726, "grad_norm": 0.6101839542388916, "learning_rate": 0.0001799770374863238, "loss": 0.9586, "step": 4620 }, { "epoch": 0.8228276353276354, "grad_norm": 0.5034586787223816, "learning_rate": 0.00017996863393570357, "loss": 1.0885, "step": 4621 }, { "epoch": 0.823005698005698, "grad_norm": 0.5608823299407959, "learning_rate": 0.0001799602288182641, "loss": 1.0002, "step": 4622 }, { "epoch": 0.8231837606837606, "grad_norm": 0.5700048208236694, "learning_rate": 0.00017995182213417, "loss": 1.1484, "step": 4623 }, { "epoch": 0.8233618233618234, "grad_norm": 0.5283229351043701, "learning_rate": 0.00017994341388358608, "loss": 1.0744, "step": 4624 }, { "epoch": 0.823539886039886, "grad_norm": 0.5215758681297302, "learning_rate": 0.00017993500406667703, "loss": 1.2686, "step": 4625 }, { "epoch": 0.8237179487179487, "grad_norm": 0.528883159160614, "learning_rate": 0.0001799265926836076, "loss": 1.1393, "step": 4626 }, { "epoch": 0.8238960113960114, "grad_norm": 0.5589834451675415, "learning_rate": 0.00017991817973454265, "loss": 1.1744, "step": 4627 }, { "epoch": 0.8240740740740741, "grad_norm": 0.49817174673080444, "learning_rate": 0.00017990976521964697, "loss": 1.0544, "step": 4628 }, { "epoch": 0.8242521367521367, "grad_norm": 0.613961398601532, "learning_rate": 0.00017990134913908542, "loss": 1.0951, "step": 4629 }, { "epoch": 0.8244301994301995, "grad_norm": 0.47278255224227905, "learning_rate": 0.00017989293149302295, "loss": 0.9742, "step": 4630 }, { "epoch": 0.8246082621082621, "grad_norm": 0.49807092547416687, "learning_rate": 0.00017988451228162443, "loss": 1.0985, "step": 4631 }, { "epoch": 0.8247863247863247, "grad_norm": 0.5624374747276306, "learning_rate": 0.00017987609150505485, "loss": 1.2446, "step": 4632 }, { "epoch": 0.8249643874643875, "grad_norm": 0.4863535761833191, "learning_rate": 0.00017986766916347916, "loss": 1.0239, "step": 4633 }, { "epoch": 0.8251424501424501, "grad_norm": 0.679585874080658, "learning_rate": 0.00017985924525706245, "loss": 1.1698, "step": 4634 }, { "epoch": 0.8253205128205128, "grad_norm": 0.5545455813407898, "learning_rate": 0.00017985081978596967, "loss": 1.0926, "step": 4635 }, { "epoch": 0.8254985754985755, "grad_norm": 0.5303109288215637, "learning_rate": 0.000179842392750366, "loss": 1.0978, "step": 4636 }, { "epoch": 0.8256766381766382, "grad_norm": 0.6053299307823181, "learning_rate": 0.00017983396415041644, "loss": 1.0596, "step": 4637 }, { "epoch": 0.8258547008547008, "grad_norm": 0.5241885185241699, "learning_rate": 0.00017982553398628625, "loss": 0.8541, "step": 4638 }, { "epoch": 0.8260327635327636, "grad_norm": 0.5934443473815918, "learning_rate": 0.00017981710225814052, "loss": 1.145, "step": 4639 }, { "epoch": 0.8262108262108262, "grad_norm": 0.5341619849205017, "learning_rate": 0.00017980866896614447, "loss": 1.0745, "step": 4640 }, { "epoch": 0.8263888888888888, "grad_norm": 0.6732913851737976, "learning_rate": 0.00017980023411046336, "loss": 1.0775, "step": 4641 }, { "epoch": 0.8265669515669516, "grad_norm": 0.5134359002113342, "learning_rate": 0.0001797917976912624, "loss": 1.0298, "step": 4642 }, { "epoch": 0.8267450142450142, "grad_norm": 0.5234783887863159, "learning_rate": 0.00017978335970870698, "loss": 1.1069, "step": 4643 }, { "epoch": 0.8269230769230769, "grad_norm": 0.4776439964771271, "learning_rate": 0.00017977492016296232, "loss": 0.6367, "step": 4644 }, { "epoch": 0.8271011396011396, "grad_norm": 0.53763347864151, "learning_rate": 0.0001797664790541938, "loss": 1.1356, "step": 4645 }, { "epoch": 0.8272792022792023, "grad_norm": 0.5082212686538696, "learning_rate": 0.00017975803638256682, "loss": 0.7873, "step": 4646 }, { "epoch": 0.8274572649572649, "grad_norm": 0.5156424641609192, "learning_rate": 0.00017974959214824685, "loss": 1.084, "step": 4647 }, { "epoch": 0.8276353276353277, "grad_norm": 0.5275198817253113, "learning_rate": 0.00017974114635139926, "loss": 1.1219, "step": 4648 }, { "epoch": 0.8278133903133903, "grad_norm": 0.5548223257064819, "learning_rate": 0.00017973269899218956, "loss": 1.0808, "step": 4649 }, { "epoch": 0.8279914529914529, "grad_norm": 0.535347580909729, "learning_rate": 0.00017972425007078323, "loss": 1.1211, "step": 4650 }, { "epoch": 0.8281695156695157, "grad_norm": 0.5299580693244934, "learning_rate": 0.00017971579958734587, "loss": 0.9911, "step": 4651 }, { "epoch": 0.8283475783475783, "grad_norm": 0.4863550066947937, "learning_rate": 0.000179707347542043, "loss": 0.9122, "step": 4652 }, { "epoch": 0.8285256410256411, "grad_norm": 0.5284972190856934, "learning_rate": 0.00017969889393504022, "loss": 1.0424, "step": 4653 }, { "epoch": 0.8287037037037037, "grad_norm": 0.5305661559104919, "learning_rate": 0.00017969043876650317, "loss": 1.1122, "step": 4654 }, { "epoch": 0.8288817663817664, "grad_norm": 0.5645657777786255, "learning_rate": 0.00017968198203659755, "loss": 1.2195, "step": 4655 }, { "epoch": 0.8290598290598291, "grad_norm": 0.521649181842804, "learning_rate": 0.000179673523745489, "loss": 1.2684, "step": 4656 }, { "epoch": 0.8292378917378918, "grad_norm": 0.5984422564506531, "learning_rate": 0.00017966506389334322, "loss": 0.9894, "step": 4657 }, { "epoch": 0.8294159544159544, "grad_norm": 0.5318729281425476, "learning_rate": 0.00017965660248032603, "loss": 1.2929, "step": 4658 }, { "epoch": 0.8295940170940171, "grad_norm": 0.4666081368923187, "learning_rate": 0.0001796481395066032, "loss": 0.9646, "step": 4659 }, { "epoch": 0.8297720797720798, "grad_norm": 0.5780388116836548, "learning_rate": 0.00017963967497234054, "loss": 1.1043, "step": 4660 }, { "epoch": 0.8299501424501424, "grad_norm": 0.44089245796203613, "learning_rate": 0.00017963120887770387, "loss": 0.8932, "step": 4661 }, { "epoch": 0.8301282051282052, "grad_norm": 0.5198349356651306, "learning_rate": 0.0001796227412228591, "loss": 0.9378, "step": 4662 }, { "epoch": 0.8303062678062678, "grad_norm": 0.5298343896865845, "learning_rate": 0.00017961427200797206, "loss": 1.0272, "step": 4663 }, { "epoch": 0.8304843304843305, "grad_norm": 0.5087099671363831, "learning_rate": 0.0001796058012332088, "loss": 0.989, "step": 4664 }, { "epoch": 0.8306623931623932, "grad_norm": 0.504228949546814, "learning_rate": 0.0001795973288987352, "loss": 1.0134, "step": 4665 }, { "epoch": 0.8308404558404558, "grad_norm": 0.6788033843040466, "learning_rate": 0.00017958885500471728, "loss": 0.8856, "step": 4666 }, { "epoch": 0.8310185185185185, "grad_norm": 0.5166172385215759, "learning_rate": 0.00017958037955132113, "loss": 0.8711, "step": 4667 }, { "epoch": 0.8311965811965812, "grad_norm": 0.5712400078773499, "learning_rate": 0.00017957190253871272, "loss": 1.0418, "step": 4668 }, { "epoch": 0.8313746438746439, "grad_norm": 0.5531231164932251, "learning_rate": 0.0001795634239670582, "loss": 0.9021, "step": 4669 }, { "epoch": 0.8315527065527065, "grad_norm": 0.6165615916252136, "learning_rate": 0.00017955494383652365, "loss": 1.0927, "step": 4670 }, { "epoch": 0.8317307692307693, "grad_norm": 0.5920368432998657, "learning_rate": 0.00017954646214727525, "loss": 1.231, "step": 4671 }, { "epoch": 0.8319088319088319, "grad_norm": 0.5037244558334351, "learning_rate": 0.00017953797889947915, "loss": 0.85, "step": 4672 }, { "epoch": 0.8320868945868946, "grad_norm": 0.5618211627006531, "learning_rate": 0.0001795294940933016, "loss": 1.145, "step": 4673 }, { "epoch": 0.8322649572649573, "grad_norm": 0.6275593042373657, "learning_rate": 0.00017952100772890877, "loss": 0.9061, "step": 4674 }, { "epoch": 0.83244301994302, "grad_norm": 0.5376096367835999, "learning_rate": 0.00017951251980646702, "loss": 1.1948, "step": 4675 }, { "epoch": 0.8326210826210826, "grad_norm": 0.5162268877029419, "learning_rate": 0.0001795040303261426, "loss": 1.2158, "step": 4676 }, { "epoch": 0.8327991452991453, "grad_norm": 0.5730512142181396, "learning_rate": 0.0001794955392881019, "loss": 0.9962, "step": 4677 }, { "epoch": 0.832977207977208, "grad_norm": 0.5128712058067322, "learning_rate": 0.00017948704669251122, "loss": 1.2797, "step": 4678 }, { "epoch": 0.8331552706552706, "grad_norm": 0.5173979997634888, "learning_rate": 0.00017947855253953697, "loss": 1.1093, "step": 4679 }, { "epoch": 0.8333333333333334, "grad_norm": 0.504646897315979, "learning_rate": 0.0001794700568293456, "loss": 1.3171, "step": 4680 }, { "epoch": 0.833511396011396, "grad_norm": 0.5638105869293213, "learning_rate": 0.00017946155956210356, "loss": 0.9224, "step": 4681 }, { "epoch": 0.8336894586894587, "grad_norm": 0.5289680361747742, "learning_rate": 0.00017945306073797733, "loss": 0.8919, "step": 4682 }, { "epoch": 0.8338675213675214, "grad_norm": 0.5224629044532776, "learning_rate": 0.0001794445603571334, "loss": 1.0345, "step": 4683 }, { "epoch": 0.834045584045584, "grad_norm": 0.5342282056808472, "learning_rate": 0.00017943605841973836, "loss": 1.2305, "step": 4684 }, { "epoch": 0.8342236467236467, "grad_norm": 0.6118032336235046, "learning_rate": 0.00017942755492595874, "loss": 1.0316, "step": 4685 }, { "epoch": 0.8344017094017094, "grad_norm": 0.49112311005592346, "learning_rate": 0.00017941904987596121, "loss": 0.9809, "step": 4686 }, { "epoch": 0.8345797720797721, "grad_norm": 0.5044063925743103, "learning_rate": 0.0001794105432699124, "loss": 0.834, "step": 4687 }, { "epoch": 0.8347578347578347, "grad_norm": 0.4849987328052521, "learning_rate": 0.00017940203510797892, "loss": 0.9971, "step": 4688 }, { "epoch": 0.8349358974358975, "grad_norm": 0.5539469122886658, "learning_rate": 0.00017939352539032748, "loss": 1.1599, "step": 4689 }, { "epoch": 0.8351139601139601, "grad_norm": 0.5474258065223694, "learning_rate": 0.00017938501411712485, "loss": 1.25, "step": 4690 }, { "epoch": 0.8352920227920227, "grad_norm": 0.4880213737487793, "learning_rate": 0.0001793765012885378, "loss": 1.1471, "step": 4691 }, { "epoch": 0.8354700854700855, "grad_norm": 0.5602759718894958, "learning_rate": 0.00017936798690473309, "loss": 1.0723, "step": 4692 }, { "epoch": 0.8356481481481481, "grad_norm": 0.627775251865387, "learning_rate": 0.00017935947096587755, "loss": 1.3768, "step": 4693 }, { "epoch": 0.8358262108262108, "grad_norm": 0.5324847102165222, "learning_rate": 0.00017935095347213804, "loss": 0.9945, "step": 4694 }, { "epoch": 0.8360042735042735, "grad_norm": 0.5244048237800598, "learning_rate": 0.0001793424344236814, "loss": 1.1725, "step": 4695 }, { "epoch": 0.8361823361823362, "grad_norm": 0.5420708656311035, "learning_rate": 0.00017933391382067462, "loss": 1.1267, "step": 4696 }, { "epoch": 0.8363603988603988, "grad_norm": 0.5285456776618958, "learning_rate": 0.00017932539166328458, "loss": 1.0368, "step": 4697 }, { "epoch": 0.8365384615384616, "grad_norm": 0.5330373048782349, "learning_rate": 0.00017931686795167825, "loss": 1.1082, "step": 4698 }, { "epoch": 0.8367165242165242, "grad_norm": 0.5516682267189026, "learning_rate": 0.0001793083426860227, "loss": 1.1833, "step": 4699 }, { "epoch": 0.8368945868945868, "grad_norm": 0.5229935646057129, "learning_rate": 0.0001792998158664849, "loss": 0.8527, "step": 4700 }, { "epoch": 0.8370726495726496, "grad_norm": 0.4821490943431854, "learning_rate": 0.00017929128749323195, "loss": 1.1201, "step": 4701 }, { "epoch": 0.8372507122507122, "grad_norm": 0.6276404857635498, "learning_rate": 0.0001792827575664309, "loss": 1.0986, "step": 4702 }, { "epoch": 0.8374287749287749, "grad_norm": 0.5681334733963013, "learning_rate": 0.00017927422608624897, "loss": 1.3821, "step": 4703 }, { "epoch": 0.8376068376068376, "grad_norm": 0.5257087349891663, "learning_rate": 0.00017926569305285324, "loss": 1.1033, "step": 4704 }, { "epoch": 0.8377849002849003, "grad_norm": 0.5665168166160583, "learning_rate": 0.0001792571584664109, "loss": 1.104, "step": 4705 }, { "epoch": 0.8379629629629629, "grad_norm": 0.5202076435089111, "learning_rate": 0.00017924862232708918, "loss": 1.052, "step": 4706 }, { "epoch": 0.8381410256410257, "grad_norm": 0.5103010535240173, "learning_rate": 0.00017924008463505534, "loss": 1.1348, "step": 4707 }, { "epoch": 0.8383190883190883, "grad_norm": 0.6811865568161011, "learning_rate": 0.00017923154539047667, "loss": 1.2804, "step": 4708 }, { "epoch": 0.8384971509971509, "grad_norm": 0.46808311343193054, "learning_rate": 0.00017922300459352042, "loss": 0.9302, "step": 4709 }, { "epoch": 0.8386752136752137, "grad_norm": 0.47713059186935425, "learning_rate": 0.00017921446224435398, "loss": 0.78, "step": 4710 }, { "epoch": 0.8388532763532763, "grad_norm": 0.7579890489578247, "learning_rate": 0.0001792059183431447, "loss": 1.4776, "step": 4711 }, { "epoch": 0.8390313390313391, "grad_norm": 0.6009423136711121, "learning_rate": 0.00017919737289006, "loss": 1.2679, "step": 4712 }, { "epoch": 0.8392094017094017, "grad_norm": 0.56390780210495, "learning_rate": 0.00017918882588526729, "loss": 1.0402, "step": 4713 }, { "epoch": 0.8393874643874644, "grad_norm": 0.5698862075805664, "learning_rate": 0.00017918027732893404, "loss": 1.2336, "step": 4714 }, { "epoch": 0.8395655270655271, "grad_norm": 0.5016305446624756, "learning_rate": 0.0001791717272212277, "loss": 1.0373, "step": 4715 }, { "epoch": 0.8397435897435898, "grad_norm": 0.5886971950531006, "learning_rate": 0.0001791631755623159, "loss": 1.1062, "step": 4716 }, { "epoch": 0.8399216524216524, "grad_norm": 0.647833526134491, "learning_rate": 0.00017915462235236607, "loss": 1.0464, "step": 4717 }, { "epoch": 0.8400997150997151, "grad_norm": 0.4961194396018982, "learning_rate": 0.00017914606759154587, "loss": 1.0763, "step": 4718 }, { "epoch": 0.8402777777777778, "grad_norm": 0.47041359543800354, "learning_rate": 0.00017913751128002288, "loss": 1.0685, "step": 4719 }, { "epoch": 0.8404558404558404, "grad_norm": 0.5752858519554138, "learning_rate": 0.00017912895341796475, "loss": 1.0577, "step": 4720 }, { "epoch": 0.8406339031339032, "grad_norm": 0.5233224034309387, "learning_rate": 0.00017912039400553914, "loss": 1.1484, "step": 4721 }, { "epoch": 0.8408119658119658, "grad_norm": 0.5327485203742981, "learning_rate": 0.00017911183304291378, "loss": 1.0028, "step": 4722 }, { "epoch": 0.8409900284900285, "grad_norm": 0.5320752263069153, "learning_rate": 0.00017910327053025638, "loss": 1.1247, "step": 4723 }, { "epoch": 0.8411680911680912, "grad_norm": 0.529617965221405, "learning_rate": 0.00017909470646773477, "loss": 1.1698, "step": 4724 }, { "epoch": 0.8413461538461539, "grad_norm": 0.5055609345436096, "learning_rate": 0.00017908614085551664, "loss": 1.0925, "step": 4725 }, { "epoch": 0.8415242165242165, "grad_norm": 0.5356255769729614, "learning_rate": 0.00017907757369376985, "loss": 1.0354, "step": 4726 }, { "epoch": 0.8417022792022792, "grad_norm": 0.582834780216217, "learning_rate": 0.00017906900498266233, "loss": 1.1248, "step": 4727 }, { "epoch": 0.8418803418803419, "grad_norm": 0.5750834941864014, "learning_rate": 0.00017906043472236188, "loss": 1.0119, "step": 4728 }, { "epoch": 0.8420584045584045, "grad_norm": 0.5923320055007935, "learning_rate": 0.00017905186291303644, "loss": 1.0662, "step": 4729 }, { "epoch": 0.8422364672364673, "grad_norm": 0.4767811894416809, "learning_rate": 0.00017904328955485396, "loss": 1.0911, "step": 4730 }, { "epoch": 0.8424145299145299, "grad_norm": 0.5294556021690369, "learning_rate": 0.00017903471464798245, "loss": 1.2861, "step": 4731 }, { "epoch": 0.8425925925925926, "grad_norm": 0.599117636680603, "learning_rate": 0.00017902613819258985, "loss": 1.1707, "step": 4732 }, { "epoch": 0.8427706552706553, "grad_norm": 0.5912977457046509, "learning_rate": 0.00017901756018884424, "loss": 1.1884, "step": 4733 }, { "epoch": 0.842948717948718, "grad_norm": 0.587676465511322, "learning_rate": 0.0001790089806369137, "loss": 1.1054, "step": 4734 }, { "epoch": 0.8431267806267806, "grad_norm": 0.6271800398826599, "learning_rate": 0.0001790003995369663, "loss": 1.2094, "step": 4735 }, { "epoch": 0.8433048433048433, "grad_norm": 0.47198590636253357, "learning_rate": 0.00017899181688917017, "loss": 0.9561, "step": 4736 }, { "epoch": 0.843482905982906, "grad_norm": 0.690732479095459, "learning_rate": 0.00017898323269369351, "loss": 1.1629, "step": 4737 }, { "epoch": 0.8436609686609686, "grad_norm": 0.4926888048648834, "learning_rate": 0.00017897464695070445, "loss": 1.1097, "step": 4738 }, { "epoch": 0.8438390313390314, "grad_norm": 0.7071278691291809, "learning_rate": 0.00017896605966037128, "loss": 1.195, "step": 4739 }, { "epoch": 0.844017094017094, "grad_norm": 0.5650486350059509, "learning_rate": 0.00017895747082286216, "loss": 1.0107, "step": 4740 }, { "epoch": 0.8441951566951567, "grad_norm": 0.5291931629180908, "learning_rate": 0.00017894888043834545, "loss": 1.0104, "step": 4741 }, { "epoch": 0.8443732193732194, "grad_norm": 0.5751241445541382, "learning_rate": 0.00017894028850698942, "loss": 1.2482, "step": 4742 }, { "epoch": 0.844551282051282, "grad_norm": 0.5833632349967957, "learning_rate": 0.0001789316950289624, "loss": 1.0552, "step": 4743 }, { "epoch": 0.8447293447293447, "grad_norm": 0.543729841709137, "learning_rate": 0.00017892310000443282, "loss": 1.1453, "step": 4744 }, { "epoch": 0.8449074074074074, "grad_norm": 0.5674204230308533, "learning_rate": 0.00017891450343356902, "loss": 1.0757, "step": 4745 }, { "epoch": 0.8450854700854701, "grad_norm": 0.5161892771720886, "learning_rate": 0.00017890590531653946, "loss": 1.1163, "step": 4746 }, { "epoch": 0.8452635327635327, "grad_norm": 0.49907612800598145, "learning_rate": 0.00017889730565351258, "loss": 1.0356, "step": 4747 }, { "epoch": 0.8454415954415955, "grad_norm": 0.4994732439517975, "learning_rate": 0.00017888870444465692, "loss": 1.026, "step": 4748 }, { "epoch": 0.8456196581196581, "grad_norm": 0.6397520303726196, "learning_rate": 0.00017888010169014095, "loss": 0.957, "step": 4749 }, { "epoch": 0.8457977207977208, "grad_norm": 0.5379729270935059, "learning_rate": 0.00017887149739013327, "loss": 1.1664, "step": 4750 }, { "epoch": 0.8459757834757835, "grad_norm": 0.4487382769584656, "learning_rate": 0.00017886289154480246, "loss": 0.9377, "step": 4751 }, { "epoch": 0.8461538461538461, "grad_norm": 0.5645943880081177, "learning_rate": 0.00017885428415431707, "loss": 1.273, "step": 4752 }, { "epoch": 0.8463319088319088, "grad_norm": 0.5535289645195007, "learning_rate": 0.00017884567521884577, "loss": 1.1779, "step": 4753 }, { "epoch": 0.8465099715099715, "grad_norm": 0.5039721131324768, "learning_rate": 0.0001788370647385573, "loss": 1.0237, "step": 4754 }, { "epoch": 0.8466880341880342, "grad_norm": 0.4543854892253876, "learning_rate": 0.00017882845271362032, "loss": 0.8149, "step": 4755 }, { "epoch": 0.8468660968660968, "grad_norm": 0.5095639824867249, "learning_rate": 0.00017881983914420352, "loss": 1.0141, "step": 4756 }, { "epoch": 0.8470441595441596, "grad_norm": 0.5341798663139343, "learning_rate": 0.00017881122403047575, "loss": 1.1885, "step": 4757 }, { "epoch": 0.8472222222222222, "grad_norm": 0.5595062971115112, "learning_rate": 0.00017880260737260573, "loss": 0.8939, "step": 4758 }, { "epoch": 0.8474002849002849, "grad_norm": 0.5355880260467529, "learning_rate": 0.00017879398917076232, "loss": 1.2434, "step": 4759 }, { "epoch": 0.8475783475783476, "grad_norm": 0.49477261304855347, "learning_rate": 0.0001787853694251144, "loss": 0.979, "step": 4760 }, { "epoch": 0.8477564102564102, "grad_norm": 0.5154359340667725, "learning_rate": 0.00017877674813583078, "loss": 1.0957, "step": 4761 }, { "epoch": 0.8479344729344729, "grad_norm": 0.5651070475578308, "learning_rate": 0.00017876812530308046, "loss": 1.1884, "step": 4762 }, { "epoch": 0.8481125356125356, "grad_norm": 0.537277340888977, "learning_rate": 0.00017875950092703232, "loss": 1.0272, "step": 4763 }, { "epoch": 0.8482905982905983, "grad_norm": 0.5259691476821899, "learning_rate": 0.00017875087500785538, "loss": 1.1493, "step": 4764 }, { "epoch": 0.8484686609686609, "grad_norm": 0.5491300225257874, "learning_rate": 0.00017874224754571867, "loss": 0.8316, "step": 4765 }, { "epoch": 0.8486467236467237, "grad_norm": 0.5493744611740112, "learning_rate": 0.00017873361854079116, "loss": 1.2328, "step": 4766 }, { "epoch": 0.8488247863247863, "grad_norm": 0.571002185344696, "learning_rate": 0.00017872498799324197, "loss": 1.1384, "step": 4767 }, { "epoch": 0.8490028490028491, "grad_norm": 0.538152813911438, "learning_rate": 0.00017871635590324013, "loss": 1.0581, "step": 4768 }, { "epoch": 0.8491809116809117, "grad_norm": 0.5214923620223999, "learning_rate": 0.00017870772227095486, "loss": 1.0612, "step": 4769 }, { "epoch": 0.8493589743589743, "grad_norm": 0.5714883804321289, "learning_rate": 0.0001786990870965553, "loss": 0.9076, "step": 4770 }, { "epoch": 0.8495370370370371, "grad_norm": 0.4181775450706482, "learning_rate": 0.00017869045038021054, "loss": 0.8366, "step": 4771 }, { "epoch": 0.8497150997150997, "grad_norm": 0.6266027688980103, "learning_rate": 0.00017868181212208993, "loss": 1.2047, "step": 4772 }, { "epoch": 0.8498931623931624, "grad_norm": 0.5423732399940491, "learning_rate": 0.0001786731723223626, "loss": 1.3878, "step": 4773 }, { "epoch": 0.8500712250712251, "grad_norm": 0.5512300133705139, "learning_rate": 0.00017866453098119793, "loss": 1.1132, "step": 4774 }, { "epoch": 0.8502492877492878, "grad_norm": 0.5767185688018799, "learning_rate": 0.00017865588809876519, "loss": 0.97, "step": 4775 }, { "epoch": 0.8504273504273504, "grad_norm": 0.5305790305137634, "learning_rate": 0.00017864724367523368, "loss": 1.1158, "step": 4776 }, { "epoch": 0.8506054131054132, "grad_norm": 0.49702391028404236, "learning_rate": 0.00017863859771077284, "loss": 0.9669, "step": 4777 }, { "epoch": 0.8507834757834758, "grad_norm": 0.5490063428878784, "learning_rate": 0.00017862995020555205, "loss": 1.0646, "step": 4778 }, { "epoch": 0.8509615384615384, "grad_norm": 0.5308689475059509, "learning_rate": 0.00017862130115974068, "loss": 0.8922, "step": 4779 }, { "epoch": 0.8511396011396012, "grad_norm": 0.5412983894348145, "learning_rate": 0.00017861265057350826, "loss": 1.1444, "step": 4780 }, { "epoch": 0.8513176638176638, "grad_norm": 0.5857377052307129, "learning_rate": 0.00017860399844702425, "loss": 1.1643, "step": 4781 }, { "epoch": 0.8514957264957265, "grad_norm": 0.599273681640625, "learning_rate": 0.00017859534478045815, "loss": 1.169, "step": 4782 }, { "epoch": 0.8516737891737892, "grad_norm": 0.5677087903022766, "learning_rate": 0.00017858668957397957, "loss": 1.0793, "step": 4783 }, { "epoch": 0.8518518518518519, "grad_norm": 0.5648362636566162, "learning_rate": 0.00017857803282775807, "loss": 1.1932, "step": 4784 }, { "epoch": 0.8520299145299145, "grad_norm": 0.5138826966285706, "learning_rate": 0.00017856937454196323, "loss": 1.0011, "step": 4785 }, { "epoch": 0.8522079772079773, "grad_norm": 0.5951429009437561, "learning_rate": 0.0001785607147167647, "loss": 1.3198, "step": 4786 }, { "epoch": 0.8523860398860399, "grad_norm": 0.5341953039169312, "learning_rate": 0.00017855205335233216, "loss": 0.9094, "step": 4787 }, { "epoch": 0.8525641025641025, "grad_norm": 0.5193579196929932, "learning_rate": 0.00017854339044883535, "loss": 0.892, "step": 4788 }, { "epoch": 0.8527421652421653, "grad_norm": 0.5053097009658813, "learning_rate": 0.00017853472600644392, "loss": 1.0589, "step": 4789 }, { "epoch": 0.8529202279202279, "grad_norm": 0.5819617509841919, "learning_rate": 0.0001785260600253277, "loss": 1.2646, "step": 4790 }, { "epoch": 0.8530982905982906, "grad_norm": 0.5327470302581787, "learning_rate": 0.00017851739250565645, "loss": 1.056, "step": 4791 }, { "epoch": 0.8532763532763533, "grad_norm": 0.5131269097328186, "learning_rate": 0.0001785087234476, "loss": 1.1192, "step": 4792 }, { "epoch": 0.853454415954416, "grad_norm": 0.4698086977005005, "learning_rate": 0.00017850005285132821, "loss": 0.9849, "step": 4793 }, { "epoch": 0.8536324786324786, "grad_norm": 0.5503947734832764, "learning_rate": 0.00017849138071701092, "loss": 1.1139, "step": 4794 }, { "epoch": 0.8538105413105413, "grad_norm": 0.5120903849601746, "learning_rate": 0.0001784827070448181, "loss": 0.9801, "step": 4795 }, { "epoch": 0.853988603988604, "grad_norm": 0.47650405764579773, "learning_rate": 0.00017847403183491968, "loss": 1.0268, "step": 4796 }, { "epoch": 0.8541666666666666, "grad_norm": 0.5773387551307678, "learning_rate": 0.0001784653550874856, "loss": 1.0336, "step": 4797 }, { "epoch": 0.8543447293447294, "grad_norm": 0.545531153678894, "learning_rate": 0.00017845667680268593, "loss": 1.0532, "step": 4798 }, { "epoch": 0.854522792022792, "grad_norm": 0.533161461353302, "learning_rate": 0.0001784479969806906, "loss": 1.1964, "step": 4799 }, { "epoch": 0.8547008547008547, "grad_norm": 0.5880789160728455, "learning_rate": 0.00017843931562166977, "loss": 1.1588, "step": 4800 }, { "epoch": 0.8548789173789174, "grad_norm": 0.5381524562835693, "learning_rate": 0.00017843063272579346, "loss": 1.1533, "step": 4801 }, { "epoch": 0.85505698005698, "grad_norm": 0.6280176639556885, "learning_rate": 0.00017842194829323187, "loss": 1.0084, "step": 4802 }, { "epoch": 0.8552350427350427, "grad_norm": 0.5098552703857422, "learning_rate": 0.0001784132623241551, "loss": 1.0804, "step": 4803 }, { "epoch": 0.8554131054131054, "grad_norm": 0.5406526923179626, "learning_rate": 0.00017840457481873328, "loss": 1.2571, "step": 4804 }, { "epoch": 0.8555911680911681, "grad_norm": 0.5859003663063049, "learning_rate": 0.00017839588577713678, "loss": 1.2462, "step": 4805 }, { "epoch": 0.8557692307692307, "grad_norm": 0.6209002137184143, "learning_rate": 0.00017838719519953572, "loss": 1.307, "step": 4806 }, { "epoch": 0.8559472934472935, "grad_norm": 0.525753915309906, "learning_rate": 0.00017837850308610037, "loss": 1.2957, "step": 4807 }, { "epoch": 0.8561253561253561, "grad_norm": 0.5096195340156555, "learning_rate": 0.0001783698094370011, "loss": 1.1433, "step": 4808 }, { "epoch": 0.8563034188034188, "grad_norm": 0.5873076915740967, "learning_rate": 0.0001783611142524082, "loss": 1.2271, "step": 4809 }, { "epoch": 0.8564814814814815, "grad_norm": 0.5093944668769836, "learning_rate": 0.0001783524175324921, "loss": 0.8788, "step": 4810 }, { "epoch": 0.8566595441595442, "grad_norm": 0.5485084652900696, "learning_rate": 0.00017834371927742307, "loss": 1.256, "step": 4811 }, { "epoch": 0.8568376068376068, "grad_norm": 0.5808873772621155, "learning_rate": 0.00017833501948737163, "loss": 0.9287, "step": 4812 }, { "epoch": 0.8570156695156695, "grad_norm": 0.5113978385925293, "learning_rate": 0.00017832631816250822, "loss": 1.0372, "step": 4813 }, { "epoch": 0.8571937321937322, "grad_norm": 0.5877016186714172, "learning_rate": 0.0001783176153030033, "loss": 1.3023, "step": 4814 }, { "epoch": 0.8573717948717948, "grad_norm": 0.534328043460846, "learning_rate": 0.00017830891090902742, "loss": 1.1023, "step": 4815 }, { "epoch": 0.8575498575498576, "grad_norm": 0.5781638026237488, "learning_rate": 0.0001783002049807511, "loss": 0.9562, "step": 4816 }, { "epoch": 0.8577279202279202, "grad_norm": 0.5760263204574585, "learning_rate": 0.00017829149751834487, "loss": 0.8733, "step": 4817 }, { "epoch": 0.8579059829059829, "grad_norm": 0.3887255787849426, "learning_rate": 0.00017828278852197944, "loss": 0.5949, "step": 4818 }, { "epoch": 0.8580840455840456, "grad_norm": 0.47814446687698364, "learning_rate": 0.00017827407799182537, "loss": 1.0698, "step": 4819 }, { "epoch": 0.8582621082621082, "grad_norm": 0.5520272254943848, "learning_rate": 0.00017826536592805334, "loss": 1.1314, "step": 4820 }, { "epoch": 0.8584401709401709, "grad_norm": 0.5285319685935974, "learning_rate": 0.00017825665233083405, "loss": 1.1618, "step": 4821 }, { "epoch": 0.8586182336182336, "grad_norm": 0.6080102324485779, "learning_rate": 0.0001782479372003382, "loss": 1.3817, "step": 4822 }, { "epoch": 0.8587962962962963, "grad_norm": 0.7474410533905029, "learning_rate": 0.00017823922053673662, "loss": 1.1321, "step": 4823 }, { "epoch": 0.8589743589743589, "grad_norm": 0.559283435344696, "learning_rate": 0.0001782305023402, "loss": 1.1894, "step": 4824 }, { "epoch": 0.8591524216524217, "grad_norm": 0.5620571374893188, "learning_rate": 0.00017822178261089918, "loss": 1.134, "step": 4825 }, { "epoch": 0.8593304843304843, "grad_norm": 0.5553044676780701, "learning_rate": 0.00017821306134900504, "loss": 1.3222, "step": 4826 }, { "epoch": 0.8595085470085471, "grad_norm": 0.6177778244018555, "learning_rate": 0.00017820433855468846, "loss": 1.2545, "step": 4827 }, { "epoch": 0.8596866096866097, "grad_norm": 0.656233012676239, "learning_rate": 0.0001781956142281203, "loss": 1.1346, "step": 4828 }, { "epoch": 0.8598646723646723, "grad_norm": 0.6710973381996155, "learning_rate": 0.0001781868883694715, "loss": 1.1361, "step": 4829 }, { "epoch": 0.8600427350427351, "grad_norm": 0.5093601942062378, "learning_rate": 0.0001781781609789131, "loss": 1.0509, "step": 4830 }, { "epoch": 0.8602207977207977, "grad_norm": 0.5707578063011169, "learning_rate": 0.00017816943205661598, "loss": 1.0964, "step": 4831 }, { "epoch": 0.8603988603988604, "grad_norm": 0.6159597635269165, "learning_rate": 0.00017816070160275125, "loss": 1.0322, "step": 4832 }, { "epoch": 0.8605769230769231, "grad_norm": 0.5430580377578735, "learning_rate": 0.0001781519696174899, "loss": 1.2464, "step": 4833 }, { "epoch": 0.8607549857549858, "grad_norm": 0.48104700446128845, "learning_rate": 0.0001781432361010031, "loss": 1.1031, "step": 4834 }, { "epoch": 0.8609330484330484, "grad_norm": 0.5304946303367615, "learning_rate": 0.0001781345010534619, "loss": 1.0281, "step": 4835 }, { "epoch": 0.8611111111111112, "grad_norm": 0.5230711698532104, "learning_rate": 0.00017812576447503742, "loss": 0.9499, "step": 4836 }, { "epoch": 0.8612891737891738, "grad_norm": 0.5363606214523315, "learning_rate": 0.00017811702636590093, "loss": 1.1358, "step": 4837 }, { "epoch": 0.8614672364672364, "grad_norm": 0.5880044102668762, "learning_rate": 0.00017810828672622358, "loss": 1.1765, "step": 4838 }, { "epoch": 0.8616452991452992, "grad_norm": 0.5194395184516907, "learning_rate": 0.0001780995455561766, "loss": 1.1622, "step": 4839 }, { "epoch": 0.8618233618233618, "grad_norm": 0.5114264488220215, "learning_rate": 0.00017809080285593126, "loss": 1.0081, "step": 4840 }, { "epoch": 0.8620014245014245, "grad_norm": 0.6174240112304688, "learning_rate": 0.00017808205862565886, "loss": 1.0745, "step": 4841 }, { "epoch": 0.8621794871794872, "grad_norm": 0.5662630200386047, "learning_rate": 0.0001780733128655307, "loss": 1.3369, "step": 4842 }, { "epoch": 0.8623575498575499, "grad_norm": 0.5917882919311523, "learning_rate": 0.00017806456557571817, "loss": 1.1631, "step": 4843 }, { "epoch": 0.8625356125356125, "grad_norm": 0.5305736660957336, "learning_rate": 0.00017805581675639265, "loss": 0.9875, "step": 4844 }, { "epoch": 0.8627136752136753, "grad_norm": 0.5181219577789307, "learning_rate": 0.00017804706640772556, "loss": 0.9918, "step": 4845 }, { "epoch": 0.8628917378917379, "grad_norm": 0.5467997789382935, "learning_rate": 0.00017803831452988832, "loss": 1.1395, "step": 4846 }, { "epoch": 0.8630698005698005, "grad_norm": 0.5494031310081482, "learning_rate": 0.00017802956112305241, "loss": 1.0312, "step": 4847 }, { "epoch": 0.8632478632478633, "grad_norm": 0.5804065465927124, "learning_rate": 0.00017802080618738931, "loss": 1.1555, "step": 4848 }, { "epoch": 0.8634259259259259, "grad_norm": 0.5424801111221313, "learning_rate": 0.00017801204972307067, "loss": 1.0215, "step": 4849 }, { "epoch": 0.8636039886039886, "grad_norm": 0.5321891903877258, "learning_rate": 0.0001780032917302679, "loss": 1.0187, "step": 4850 }, { "epoch": 0.8637820512820513, "grad_norm": 0.5543400049209595, "learning_rate": 0.0001779945322091527, "loss": 1.1972, "step": 4851 }, { "epoch": 0.863960113960114, "grad_norm": 0.566649317741394, "learning_rate": 0.00017798577115989668, "loss": 1.0758, "step": 4852 }, { "epoch": 0.8641381766381766, "grad_norm": 0.5538444519042969, "learning_rate": 0.00017797700858267145, "loss": 1.1338, "step": 4853 }, { "epoch": 0.8643162393162394, "grad_norm": 0.5641313791275024, "learning_rate": 0.0001779682444776487, "loss": 1.256, "step": 4854 }, { "epoch": 0.864494301994302, "grad_norm": 0.6377350091934204, "learning_rate": 0.00017795947884500016, "loss": 1.144, "step": 4855 }, { "epoch": 0.8646723646723646, "grad_norm": 0.5581876039505005, "learning_rate": 0.0001779507116848976, "loss": 1.3163, "step": 4856 }, { "epoch": 0.8648504273504274, "grad_norm": 0.5416772365570068, "learning_rate": 0.0001779419429975128, "loss": 1.0219, "step": 4857 }, { "epoch": 0.86502849002849, "grad_norm": 0.5450608730316162, "learning_rate": 0.0001779331727830175, "loss": 1.0093, "step": 4858 }, { "epoch": 0.8652065527065527, "grad_norm": 0.5151242017745972, "learning_rate": 0.00017792440104158358, "loss": 1.067, "step": 4859 }, { "epoch": 0.8653846153846154, "grad_norm": 0.5225046873092651, "learning_rate": 0.0001779156277733829, "loss": 1.0432, "step": 4860 }, { "epoch": 0.8655626780626781, "grad_norm": 0.5168602466583252, "learning_rate": 0.00017790685297858737, "loss": 0.9665, "step": 4861 }, { "epoch": 0.8657407407407407, "grad_norm": 0.5749059319496155, "learning_rate": 0.00017789807665736889, "loss": 1.1607, "step": 4862 }, { "epoch": 0.8659188034188035, "grad_norm": 0.45656394958496094, "learning_rate": 0.00017788929880989938, "loss": 0.8362, "step": 4863 }, { "epoch": 0.8660968660968661, "grad_norm": 0.5090615749359131, "learning_rate": 0.00017788051943635086, "loss": 0.9553, "step": 4864 }, { "epoch": 0.8662749287749287, "grad_norm": 0.5381240248680115, "learning_rate": 0.0001778717385368954, "loss": 1.1391, "step": 4865 }, { "epoch": 0.8664529914529915, "grad_norm": 0.522720456123352, "learning_rate": 0.00017786295611170493, "loss": 1.1869, "step": 4866 }, { "epoch": 0.8666310541310541, "grad_norm": 0.530986487865448, "learning_rate": 0.0001778541721609516, "loss": 1.1046, "step": 4867 }, { "epoch": 0.8668091168091168, "grad_norm": 0.5065864324569702, "learning_rate": 0.0001778453866848075, "loss": 1.008, "step": 4868 }, { "epoch": 0.8669871794871795, "grad_norm": 0.5541394352912903, "learning_rate": 0.00017783659968344476, "loss": 1.0004, "step": 4869 }, { "epoch": 0.8671652421652422, "grad_norm": 0.5059576630592346, "learning_rate": 0.00017782781115703556, "loss": 1.128, "step": 4870 }, { "epoch": 0.8673433048433048, "grad_norm": 0.5052187442779541, "learning_rate": 0.00017781902110575203, "loss": 0.8544, "step": 4871 }, { "epoch": 0.8675213675213675, "grad_norm": 0.5383397340774536, "learning_rate": 0.00017781022952976646, "loss": 1.1411, "step": 4872 }, { "epoch": 0.8676994301994302, "grad_norm": 0.4760429859161377, "learning_rate": 0.00017780143642925106, "loss": 0.8246, "step": 4873 }, { "epoch": 0.8678774928774928, "grad_norm": 0.5480535626411438, "learning_rate": 0.00017779264180437817, "loss": 1.013, "step": 4874 }, { "epoch": 0.8680555555555556, "grad_norm": 0.5303317904472351, "learning_rate": 0.00017778384565532004, "loss": 1.0201, "step": 4875 }, { "epoch": 0.8682336182336182, "grad_norm": 0.5365355014801025, "learning_rate": 0.00017777504798224903, "loss": 1.1107, "step": 4876 }, { "epoch": 0.8684116809116809, "grad_norm": 0.5173360705375671, "learning_rate": 0.00017776624878533754, "loss": 1.0808, "step": 4877 }, { "epoch": 0.8685897435897436, "grad_norm": 0.5088842511177063, "learning_rate": 0.00017775744806475792, "loss": 0.995, "step": 4878 }, { "epoch": 0.8687678062678063, "grad_norm": 0.5796698927879333, "learning_rate": 0.00017774864582068264, "loss": 1.1485, "step": 4879 }, { "epoch": 0.8689458689458689, "grad_norm": 0.5719375610351562, "learning_rate": 0.00017773984205328417, "loss": 1.0133, "step": 4880 }, { "epoch": 0.8691239316239316, "grad_norm": 0.6396418213844299, "learning_rate": 0.00017773103676273498, "loss": 1.0932, "step": 4881 }, { "epoch": 0.8693019943019943, "grad_norm": 0.5602468252182007, "learning_rate": 0.00017772222994920763, "loss": 0.9702, "step": 4882 }, { "epoch": 0.8694800569800569, "grad_norm": 0.5167748332023621, "learning_rate": 0.00017771342161287457, "loss": 1.0528, "step": 4883 }, { "epoch": 0.8696581196581197, "grad_norm": 0.5572916865348816, "learning_rate": 0.00017770461175390848, "loss": 1.1341, "step": 4884 }, { "epoch": 0.8698361823361823, "grad_norm": 0.6666276454925537, "learning_rate": 0.00017769580037248195, "loss": 1.1948, "step": 4885 }, { "epoch": 0.8700142450142451, "grad_norm": 0.5348601937294006, "learning_rate": 0.0001776869874687676, "loss": 1.0562, "step": 4886 }, { "epoch": 0.8701923076923077, "grad_norm": 0.5449648499488831, "learning_rate": 0.00017767817304293812, "loss": 0.988, "step": 4887 }, { "epoch": 0.8703703703703703, "grad_norm": 0.5995045304298401, "learning_rate": 0.0001776693570951662, "loss": 1.2526, "step": 4888 }, { "epoch": 0.8705484330484331, "grad_norm": 0.6575320959091187, "learning_rate": 0.00017766053962562457, "loss": 1.1717, "step": 4889 }, { "epoch": 0.8707264957264957, "grad_norm": 0.5882139801979065, "learning_rate": 0.00017765172063448597, "loss": 1.238, "step": 4890 }, { "epoch": 0.8709045584045584, "grad_norm": 0.5908389091491699, "learning_rate": 0.00017764290012192325, "loss": 1.0606, "step": 4891 }, { "epoch": 0.8710826210826211, "grad_norm": 0.6169339418411255, "learning_rate": 0.00017763407808810917, "loss": 1.1456, "step": 4892 }, { "epoch": 0.8712606837606838, "grad_norm": 0.5916035771369934, "learning_rate": 0.0001776252545332166, "loss": 1.0026, "step": 4893 }, { "epoch": 0.8714387464387464, "grad_norm": 0.539995551109314, "learning_rate": 0.00017761642945741843, "loss": 1.2397, "step": 4894 }, { "epoch": 0.8716168091168092, "grad_norm": 0.5346137881278992, "learning_rate": 0.00017760760286088755, "loss": 1.1232, "step": 4895 }, { "epoch": 0.8717948717948718, "grad_norm": 0.570202112197876, "learning_rate": 0.00017759877474379692, "loss": 1.0708, "step": 4896 }, { "epoch": 0.8719729344729344, "grad_norm": 0.5023398399353027, "learning_rate": 0.00017758994510631948, "loss": 1.1056, "step": 4897 }, { "epoch": 0.8721509971509972, "grad_norm": 0.5447137951850891, "learning_rate": 0.00017758111394862826, "loss": 0.8776, "step": 4898 }, { "epoch": 0.8723290598290598, "grad_norm": 0.5193906426429749, "learning_rate": 0.00017757228127089625, "loss": 0.9959, "step": 4899 }, { "epoch": 0.8725071225071225, "grad_norm": 0.5958787798881531, "learning_rate": 0.00017756344707329656, "loss": 1.092, "step": 4900 }, { "epoch": 0.8726851851851852, "grad_norm": 0.521045982837677, "learning_rate": 0.00017755461135600221, "loss": 0.9864, "step": 4901 }, { "epoch": 0.8728632478632479, "grad_norm": 0.5257635116577148, "learning_rate": 0.00017754577411918638, "loss": 1.216, "step": 4902 }, { "epoch": 0.8730413105413105, "grad_norm": 0.5425964593887329, "learning_rate": 0.0001775369353630222, "loss": 1.1432, "step": 4903 }, { "epoch": 0.8732193732193733, "grad_norm": 0.47995322942733765, "learning_rate": 0.00017752809508768286, "loss": 1.0227, "step": 4904 }, { "epoch": 0.8733974358974359, "grad_norm": 0.5747429728507996, "learning_rate": 0.0001775192532933415, "loss": 0.9984, "step": 4905 }, { "epoch": 0.8735754985754985, "grad_norm": 0.5745723247528076, "learning_rate": 0.00017751040998017142, "loss": 1.2559, "step": 4906 }, { "epoch": 0.8737535612535613, "grad_norm": 0.6114141941070557, "learning_rate": 0.0001775015651483459, "loss": 1.3224, "step": 4907 }, { "epoch": 0.8739316239316239, "grad_norm": 0.4757187068462372, "learning_rate": 0.00017749271879803817, "loss": 1.0352, "step": 4908 }, { "epoch": 0.8741096866096866, "grad_norm": 0.48644450306892395, "learning_rate": 0.0001774838709294216, "loss": 1.0876, "step": 4909 }, { "epoch": 0.8742877492877493, "grad_norm": 0.5652037262916565, "learning_rate": 0.00017747502154266955, "loss": 0.9189, "step": 4910 }, { "epoch": 0.874465811965812, "grad_norm": 0.5289644002914429, "learning_rate": 0.00017746617063795538, "loss": 0.9431, "step": 4911 }, { "epoch": 0.8746438746438746, "grad_norm": 0.594656229019165, "learning_rate": 0.00017745731821545253, "loss": 1.2408, "step": 4912 }, { "epoch": 0.8748219373219374, "grad_norm": 0.5693240165710449, "learning_rate": 0.0001774484642753344, "loss": 1.347, "step": 4913 }, { "epoch": 0.875, "grad_norm": 0.5291008949279785, "learning_rate": 0.00017743960881777456, "loss": 1.161, "step": 4914 }, { "epoch": 0.8751780626780626, "grad_norm": 0.5958300232887268, "learning_rate": 0.00017743075184294642, "loss": 1.2058, "step": 4915 }, { "epoch": 0.8753561253561254, "grad_norm": 0.513884425163269, "learning_rate": 0.00017742189335102354, "loss": 1.0952, "step": 4916 }, { "epoch": 0.875534188034188, "grad_norm": 0.5860681533813477, "learning_rate": 0.00017741303334217948, "loss": 1.1801, "step": 4917 }, { "epoch": 0.8757122507122507, "grad_norm": 0.47962820529937744, "learning_rate": 0.00017740417181658788, "loss": 1.0785, "step": 4918 }, { "epoch": 0.8758903133903134, "grad_norm": 0.5110440254211426, "learning_rate": 0.00017739530877442227, "loss": 1.1385, "step": 4919 }, { "epoch": 0.8760683760683761, "grad_norm": 0.5106285214424133, "learning_rate": 0.00017738644421585643, "loss": 1.1204, "step": 4920 }, { "epoch": 0.8762464387464387, "grad_norm": 0.5709205865859985, "learning_rate": 0.00017737757814106393, "loss": 1.0108, "step": 4921 }, { "epoch": 0.8764245014245015, "grad_norm": 0.5850250124931335, "learning_rate": 0.0001773687105502185, "loss": 1.0059, "step": 4922 }, { "epoch": 0.8766025641025641, "grad_norm": 0.5194727778434753, "learning_rate": 0.00017735984144349396, "loss": 0.9466, "step": 4923 }, { "epoch": 0.8767806267806267, "grad_norm": 0.5246787667274475, "learning_rate": 0.000177350970821064, "loss": 1.1336, "step": 4924 }, { "epoch": 0.8769586894586895, "grad_norm": 0.5798323154449463, "learning_rate": 0.00017734209868310244, "loss": 1.1641, "step": 4925 }, { "epoch": 0.8771367521367521, "grad_norm": 0.5188565850257874, "learning_rate": 0.00017733322502978314, "loss": 0.9959, "step": 4926 }, { "epoch": 0.8773148148148148, "grad_norm": 0.5969653725624084, "learning_rate": 0.00017732434986127995, "loss": 1.2162, "step": 4927 }, { "epoch": 0.8774928774928775, "grad_norm": 0.5520089268684387, "learning_rate": 0.00017731547317776674, "loss": 1.0163, "step": 4928 }, { "epoch": 0.8776709401709402, "grad_norm": 0.48789507150650024, "learning_rate": 0.00017730659497941745, "loss": 0.9757, "step": 4929 }, { "epoch": 0.8778490028490028, "grad_norm": 0.6034960746765137, "learning_rate": 0.000177297715266406, "loss": 1.1278, "step": 4930 }, { "epoch": 0.8780270655270656, "grad_norm": 0.53016597032547, "learning_rate": 0.00017728883403890638, "loss": 1.0637, "step": 4931 }, { "epoch": 0.8782051282051282, "grad_norm": 0.5073726177215576, "learning_rate": 0.00017727995129709266, "loss": 1.1491, "step": 4932 }, { "epoch": 0.8783831908831908, "grad_norm": 0.540605366230011, "learning_rate": 0.00017727106704113878, "loss": 1.0133, "step": 4933 }, { "epoch": 0.8785612535612536, "grad_norm": 0.5346775054931641, "learning_rate": 0.0001772621812712189, "loss": 1.1781, "step": 4934 }, { "epoch": 0.8787393162393162, "grad_norm": 0.5659036040306091, "learning_rate": 0.00017725329398750702, "loss": 1.1023, "step": 4935 }, { "epoch": 0.8789173789173789, "grad_norm": 0.591063380241394, "learning_rate": 0.00017724440519017738, "loss": 1.0298, "step": 4936 }, { "epoch": 0.8790954415954416, "grad_norm": 0.5173781514167786, "learning_rate": 0.0001772355148794041, "loss": 1.0483, "step": 4937 }, { "epoch": 0.8792735042735043, "grad_norm": 0.5405352711677551, "learning_rate": 0.0001772266230553613, "loss": 1.0716, "step": 4938 }, { "epoch": 0.8794515669515669, "grad_norm": 0.518442690372467, "learning_rate": 0.00017721772971822323, "loss": 1.1373, "step": 4939 }, { "epoch": 0.8796296296296297, "grad_norm": 0.533673107624054, "learning_rate": 0.0001772088348681642, "loss": 1.0489, "step": 4940 }, { "epoch": 0.8798076923076923, "grad_norm": 0.46117857098579407, "learning_rate": 0.0001771999385053584, "loss": 1.0297, "step": 4941 }, { "epoch": 0.8799857549857549, "grad_norm": 0.4687997102737427, "learning_rate": 0.0001771910406299802, "loss": 1.071, "step": 4942 }, { "epoch": 0.8801638176638177, "grad_norm": 0.5064153075218201, "learning_rate": 0.0001771821412422039, "loss": 0.9518, "step": 4943 }, { "epoch": 0.8803418803418803, "grad_norm": 0.6561978459358215, "learning_rate": 0.00017717324034220385, "loss": 1.11, "step": 4944 }, { "epoch": 0.8805199430199431, "grad_norm": 0.5551498532295227, "learning_rate": 0.00017716433793015454, "loss": 0.9719, "step": 4945 }, { "epoch": 0.8806980056980057, "grad_norm": 0.47059500217437744, "learning_rate": 0.00017715543400623025, "loss": 0.8891, "step": 4946 }, { "epoch": 0.8808760683760684, "grad_norm": 0.5035740733146667, "learning_rate": 0.00017714652857060554, "loss": 0.9671, "step": 4947 }, { "epoch": 0.8810541310541311, "grad_norm": 0.4599960446357727, "learning_rate": 0.00017713762162345487, "loss": 0.9588, "step": 4948 }, { "epoch": 0.8812321937321937, "grad_norm": 0.5087231397628784, "learning_rate": 0.0001771287131649527, "loss": 1.1433, "step": 4949 }, { "epoch": 0.8814102564102564, "grad_norm": 0.5609854459762573, "learning_rate": 0.00017711980319527366, "loss": 1.2022, "step": 4950 }, { "epoch": 0.8815883190883191, "grad_norm": 0.49460700154304504, "learning_rate": 0.00017711089171459227, "loss": 1.019, "step": 4951 }, { "epoch": 0.8817663817663818, "grad_norm": 0.5047259330749512, "learning_rate": 0.00017710197872308314, "loss": 0.8301, "step": 4952 }, { "epoch": 0.8819444444444444, "grad_norm": 0.5784406065940857, "learning_rate": 0.0001770930642209209, "loss": 0.9336, "step": 4953 }, { "epoch": 0.8821225071225072, "grad_norm": 0.5037121772766113, "learning_rate": 0.00017708414820828022, "loss": 1.0199, "step": 4954 }, { "epoch": 0.8823005698005698, "grad_norm": 0.5683804750442505, "learning_rate": 0.00017707523068533575, "loss": 0.9758, "step": 4955 }, { "epoch": 0.8824786324786325, "grad_norm": 0.5167922973632812, "learning_rate": 0.0001770663116522623, "loss": 1.0389, "step": 4956 }, { "epoch": 0.8826566951566952, "grad_norm": 0.5813606381416321, "learning_rate": 0.0001770573911092345, "loss": 1.3998, "step": 4957 }, { "epoch": 0.8828347578347578, "grad_norm": 0.5280475616455078, "learning_rate": 0.00017704846905642723, "loss": 1.0545, "step": 4958 }, { "epoch": 0.8830128205128205, "grad_norm": 0.5421732068061829, "learning_rate": 0.00017703954549401528, "loss": 0.899, "step": 4959 }, { "epoch": 0.8831908831908832, "grad_norm": 0.5177720189094543, "learning_rate": 0.00017703062042217344, "loss": 0.975, "step": 4960 }, { "epoch": 0.8833689458689459, "grad_norm": 0.639327883720398, "learning_rate": 0.00017702169384107666, "loss": 1.1936, "step": 4961 }, { "epoch": 0.8835470085470085, "grad_norm": 0.5201572179794312, "learning_rate": 0.00017701276575089975, "loss": 0.9891, "step": 4962 }, { "epoch": 0.8837250712250713, "grad_norm": 0.5304145216941833, "learning_rate": 0.00017700383615181767, "loss": 1.0569, "step": 4963 }, { "epoch": 0.8839031339031339, "grad_norm": 0.6068132519721985, "learning_rate": 0.00017699490504400538, "loss": 1.2653, "step": 4964 }, { "epoch": 0.8840811965811965, "grad_norm": 0.597895085811615, "learning_rate": 0.00017698597242763787, "loss": 1.2577, "step": 4965 }, { "epoch": 0.8842592592592593, "grad_norm": 0.5356902480125427, "learning_rate": 0.00017697703830289017, "loss": 1.1056, "step": 4966 }, { "epoch": 0.8844373219373219, "grad_norm": 0.5429540872573853, "learning_rate": 0.0001769681026699373, "loss": 1.0951, "step": 4967 }, { "epoch": 0.8846153846153846, "grad_norm": 0.5789309144020081, "learning_rate": 0.00017695916552895436, "loss": 1.0786, "step": 4968 }, { "epoch": 0.8847934472934473, "grad_norm": 0.5621341466903687, "learning_rate": 0.0001769502268801164, "loss": 1.0645, "step": 4969 }, { "epoch": 0.88497150997151, "grad_norm": 0.5879453420639038, "learning_rate": 0.00017694128672359865, "loss": 1.2171, "step": 4970 }, { "epoch": 0.8851495726495726, "grad_norm": 0.5005951523780823, "learning_rate": 0.0001769323450595762, "loss": 1.0725, "step": 4971 }, { "epoch": 0.8853276353276354, "grad_norm": 0.5439660549163818, "learning_rate": 0.00017692340188822425, "loss": 1.162, "step": 4972 }, { "epoch": 0.885505698005698, "grad_norm": 0.6309837698936462, "learning_rate": 0.00017691445720971802, "loss": 1.2861, "step": 4973 }, { "epoch": 0.8856837606837606, "grad_norm": 0.4997463822364807, "learning_rate": 0.00017690551102423282, "loss": 1.1887, "step": 4974 }, { "epoch": 0.8858618233618234, "grad_norm": 0.5430852174758911, "learning_rate": 0.00017689656333194385, "loss": 1.1231, "step": 4975 }, { "epoch": 0.886039886039886, "grad_norm": 0.5414215922355652, "learning_rate": 0.00017688761413302644, "loss": 1.2345, "step": 4976 }, { "epoch": 0.8862179487179487, "grad_norm": 0.5594443082809448, "learning_rate": 0.00017687866342765601, "loss": 1.0775, "step": 4977 }, { "epoch": 0.8863960113960114, "grad_norm": 0.5827134847640991, "learning_rate": 0.00017686971121600787, "loss": 1.0609, "step": 4978 }, { "epoch": 0.8865740740740741, "grad_norm": 0.5075414776802063, "learning_rate": 0.00017686075749825738, "loss": 0.796, "step": 4979 }, { "epoch": 0.8867521367521367, "grad_norm": 0.6007544994354248, "learning_rate": 0.00017685180227458003, "loss": 1.1716, "step": 4980 }, { "epoch": 0.8869301994301995, "grad_norm": 0.6458030343055725, "learning_rate": 0.00017684284554515128, "loss": 1.1945, "step": 4981 }, { "epoch": 0.8871082621082621, "grad_norm": 0.5519212484359741, "learning_rate": 0.00017683388731014657, "loss": 1.2571, "step": 4982 }, { "epoch": 0.8872863247863247, "grad_norm": 0.5079960227012634, "learning_rate": 0.00017682492756974146, "loss": 1.1186, "step": 4983 }, { "epoch": 0.8874643874643875, "grad_norm": 0.63576740026474, "learning_rate": 0.00017681596632411147, "loss": 1.389, "step": 4984 }, { "epoch": 0.8876424501424501, "grad_norm": 0.43325698375701904, "learning_rate": 0.0001768070035734322, "loss": 0.7757, "step": 4985 }, { "epoch": 0.8878205128205128, "grad_norm": 0.49492064118385315, "learning_rate": 0.00017679803931787923, "loss": 1.0096, "step": 4986 }, { "epoch": 0.8879985754985755, "grad_norm": 0.5561224222183228, "learning_rate": 0.00017678907355762825, "loss": 0.952, "step": 4987 }, { "epoch": 0.8881766381766382, "grad_norm": 0.5392457246780396, "learning_rate": 0.00017678010629285486, "loss": 1.0442, "step": 4988 }, { "epoch": 0.8883547008547008, "grad_norm": 0.4659234881401062, "learning_rate": 0.00017677113752373482, "loss": 0.8668, "step": 4989 }, { "epoch": 0.8885327635327636, "grad_norm": 0.5139175057411194, "learning_rate": 0.0001767621672504438, "loss": 0.8386, "step": 4990 }, { "epoch": 0.8887108262108262, "grad_norm": 0.5395823121070862, "learning_rate": 0.00017675319547315755, "loss": 0.9754, "step": 4991 }, { "epoch": 0.8888888888888888, "grad_norm": 0.4751867949962616, "learning_rate": 0.0001767442221920519, "loss": 0.8775, "step": 4992 }, { "epoch": 0.8890669515669516, "grad_norm": 0.5728281736373901, "learning_rate": 0.00017673524740730265, "loss": 1.2807, "step": 4993 }, { "epoch": 0.8892450142450142, "grad_norm": 0.5545622110366821, "learning_rate": 0.00017672627111908558, "loss": 1.0039, "step": 4994 }, { "epoch": 0.8894230769230769, "grad_norm": 0.5127374529838562, "learning_rate": 0.00017671729332757665, "loss": 1.0505, "step": 4995 }, { "epoch": 0.8896011396011396, "grad_norm": 0.5238714218139648, "learning_rate": 0.00017670831403295175, "loss": 1.1775, "step": 4996 }, { "epoch": 0.8897792022792023, "grad_norm": 0.5610160827636719, "learning_rate": 0.00017669933323538674, "loss": 1.0555, "step": 4997 }, { "epoch": 0.8899572649572649, "grad_norm": 0.5481634736061096, "learning_rate": 0.00017669035093505762, "loss": 1.0802, "step": 4998 }, { "epoch": 0.8901353276353277, "grad_norm": 0.4725174307823181, "learning_rate": 0.0001766813671321404, "loss": 0.9611, "step": 4999 }, { "epoch": 0.8903133903133903, "grad_norm": 0.5184635519981384, "learning_rate": 0.0001766723818268111, "loss": 1.1659, "step": 5000 }, { "epoch": 0.8904914529914529, "grad_norm": 0.5503578186035156, "learning_rate": 0.00017666339501924575, "loss": 1.2165, "step": 5001 }, { "epoch": 0.8906695156695157, "grad_norm": 0.5299594402313232, "learning_rate": 0.0001766544067096204, "loss": 1.0196, "step": 5002 }, { "epoch": 0.8908475783475783, "grad_norm": 0.5673944354057312, "learning_rate": 0.00017664541689811118, "loss": 1.2058, "step": 5003 }, { "epoch": 0.8910256410256411, "grad_norm": 0.6057320833206177, "learning_rate": 0.00017663642558489426, "loss": 1.0136, "step": 5004 }, { "epoch": 0.8912037037037037, "grad_norm": 0.4767026901245117, "learning_rate": 0.00017662743277014578, "loss": 0.8522, "step": 5005 }, { "epoch": 0.8913817663817664, "grad_norm": 0.5346270203590393, "learning_rate": 0.00017661843845404192, "loss": 1.1568, "step": 5006 }, { "epoch": 0.8915598290598291, "grad_norm": 0.5365738868713379, "learning_rate": 0.00017660944263675891, "loss": 1.0488, "step": 5007 }, { "epoch": 0.8917378917378918, "grad_norm": 0.5536269545555115, "learning_rate": 0.00017660044531847305, "loss": 1.1216, "step": 5008 }, { "epoch": 0.8919159544159544, "grad_norm": 0.6325978636741638, "learning_rate": 0.00017659144649936055, "loss": 1.2843, "step": 5009 }, { "epoch": 0.8920940170940171, "grad_norm": 0.5890641212463379, "learning_rate": 0.00017658244617959777, "loss": 1.1976, "step": 5010 }, { "epoch": 0.8922720797720798, "grad_norm": 0.604870080947876, "learning_rate": 0.00017657344435936107, "loss": 1.2881, "step": 5011 }, { "epoch": 0.8924501424501424, "grad_norm": 0.49805206060409546, "learning_rate": 0.00017656444103882676, "loss": 0.8998, "step": 5012 }, { "epoch": 0.8926282051282052, "grad_norm": 0.506926953792572, "learning_rate": 0.0001765554362181713, "loss": 1.0731, "step": 5013 }, { "epoch": 0.8928062678062678, "grad_norm": 0.5353260636329651, "learning_rate": 0.0001765464298975711, "loss": 1.0676, "step": 5014 }, { "epoch": 0.8929843304843305, "grad_norm": 0.5641853213310242, "learning_rate": 0.0001765374220772026, "loss": 0.9606, "step": 5015 }, { "epoch": 0.8931623931623932, "grad_norm": 0.5049327611923218, "learning_rate": 0.00017652841275724233, "loss": 1.009, "step": 5016 }, { "epoch": 0.8933404558404558, "grad_norm": 0.6255155205726624, "learning_rate": 0.0001765194019378668, "loss": 1.138, "step": 5017 }, { "epoch": 0.8935185185185185, "grad_norm": 0.5816851854324341, "learning_rate": 0.00017651038961925247, "loss": 1.3398, "step": 5018 }, { "epoch": 0.8936965811965812, "grad_norm": 0.5188020467758179, "learning_rate": 0.00017650137580157605, "loss": 1.0126, "step": 5019 }, { "epoch": 0.8938746438746439, "grad_norm": 0.5231554508209229, "learning_rate": 0.00017649236048501406, "loss": 1.0328, "step": 5020 }, { "epoch": 0.8940527065527065, "grad_norm": 0.7638634443283081, "learning_rate": 0.0001764833436697432, "loss": 1.3016, "step": 5021 }, { "epoch": 0.8942307692307693, "grad_norm": 0.5354094505310059, "learning_rate": 0.00017647432535594008, "loss": 1.0646, "step": 5022 }, { "epoch": 0.8944088319088319, "grad_norm": 0.6938086748123169, "learning_rate": 0.0001764653055437814, "loss": 1.2051, "step": 5023 }, { "epoch": 0.8945868945868946, "grad_norm": 0.5546849370002747, "learning_rate": 0.00017645628423344393, "loss": 1.0671, "step": 5024 }, { "epoch": 0.8947649572649573, "grad_norm": 0.49294665455818176, "learning_rate": 0.0001764472614251044, "loss": 1.0328, "step": 5025 }, { "epoch": 0.89494301994302, "grad_norm": 0.5965796113014221, "learning_rate": 0.00017643823711893956, "loss": 1.0741, "step": 5026 }, { "epoch": 0.8951210826210826, "grad_norm": 0.4846448302268982, "learning_rate": 0.00017642921131512626, "loss": 1.0409, "step": 5027 }, { "epoch": 0.8952991452991453, "grad_norm": 0.5767390131950378, "learning_rate": 0.00017642018401384135, "loss": 1.018, "step": 5028 }, { "epoch": 0.895477207977208, "grad_norm": 0.503027617931366, "learning_rate": 0.00017641115521526167, "loss": 1.0002, "step": 5029 }, { "epoch": 0.8956552706552706, "grad_norm": 0.6668619513511658, "learning_rate": 0.00017640212491956412, "loss": 1.2154, "step": 5030 }, { "epoch": 0.8958333333333334, "grad_norm": 0.5544148683547974, "learning_rate": 0.00017639309312692566, "loss": 1.2701, "step": 5031 }, { "epoch": 0.896011396011396, "grad_norm": 0.6026872992515564, "learning_rate": 0.00017638405983752323, "loss": 0.9335, "step": 5032 }, { "epoch": 0.8961894586894587, "grad_norm": 0.6288694143295288, "learning_rate": 0.00017637502505153384, "loss": 0.9075, "step": 5033 }, { "epoch": 0.8963675213675214, "grad_norm": 0.4890204966068268, "learning_rate": 0.00017636598876913446, "loss": 0.8492, "step": 5034 }, { "epoch": 0.896545584045584, "grad_norm": 0.5746598243713379, "learning_rate": 0.00017635695099050218, "loss": 1.1557, "step": 5035 }, { "epoch": 0.8967236467236467, "grad_norm": 0.5165683031082153, "learning_rate": 0.00017634791171581405, "loss": 1.0899, "step": 5036 }, { "epoch": 0.8969017094017094, "grad_norm": 0.4621037244796753, "learning_rate": 0.0001763388709452472, "loss": 1.0457, "step": 5037 }, { "epoch": 0.8970797720797721, "grad_norm": 0.532358705997467, "learning_rate": 0.00017632982867897876, "loss": 1.139, "step": 5038 }, { "epoch": 0.8972578347578347, "grad_norm": 0.5794399976730347, "learning_rate": 0.00017632078491718587, "loss": 1.031, "step": 5039 }, { "epoch": 0.8974358974358975, "grad_norm": 0.5031905174255371, "learning_rate": 0.00017631173966004576, "loss": 0.9508, "step": 5040 }, { "epoch": 0.8976139601139601, "grad_norm": 0.6528840065002441, "learning_rate": 0.00017630269290773564, "loss": 0.9974, "step": 5041 }, { "epoch": 0.8977920227920227, "grad_norm": 0.6007558703422546, "learning_rate": 0.00017629364466043273, "loss": 1.0993, "step": 5042 }, { "epoch": 0.8979700854700855, "grad_norm": 0.5104095339775085, "learning_rate": 0.00017628459491831437, "loss": 0.9175, "step": 5043 }, { "epoch": 0.8981481481481481, "grad_norm": 0.5285516977310181, "learning_rate": 0.00017627554368155782, "loss": 0.998, "step": 5044 }, { "epoch": 0.8983262108262108, "grad_norm": 0.5629046559333801, "learning_rate": 0.00017626649095034045, "loss": 1.2021, "step": 5045 }, { "epoch": 0.8985042735042735, "grad_norm": 0.57548987865448, "learning_rate": 0.00017625743672483962, "loss": 1.2076, "step": 5046 }, { "epoch": 0.8986823361823362, "grad_norm": 0.4883024990558624, "learning_rate": 0.0001762483810052327, "loss": 0.9761, "step": 5047 }, { "epoch": 0.8988603988603988, "grad_norm": 0.6378034949302673, "learning_rate": 0.0001762393237916972, "loss": 1.2266, "step": 5048 }, { "epoch": 0.8990384615384616, "grad_norm": 0.5201624035835266, "learning_rate": 0.0001762302650844105, "loss": 1.247, "step": 5049 }, { "epoch": 0.8992165242165242, "grad_norm": 0.5438048243522644, "learning_rate": 0.0001762212048835501, "loss": 0.993, "step": 5050 }, { "epoch": 0.8993945868945868, "grad_norm": 0.5928253531455994, "learning_rate": 0.00017621214318929354, "loss": 1.0469, "step": 5051 }, { "epoch": 0.8995726495726496, "grad_norm": 0.6437996625900269, "learning_rate": 0.00017620308000181831, "loss": 1.3136, "step": 5052 }, { "epoch": 0.8997507122507122, "grad_norm": 0.5961456298828125, "learning_rate": 0.00017619401532130208, "loss": 1.1495, "step": 5053 }, { "epoch": 0.8999287749287749, "grad_norm": 0.497388631105423, "learning_rate": 0.0001761849491479224, "loss": 0.7783, "step": 5054 }, { "epoch": 0.9001068376068376, "grad_norm": 0.5984451174736023, "learning_rate": 0.00017617588148185687, "loss": 1.3115, "step": 5055 }, { "epoch": 0.9002849002849003, "grad_norm": 0.549163818359375, "learning_rate": 0.0001761668123232832, "loss": 1.1649, "step": 5056 }, { "epoch": 0.9004629629629629, "grad_norm": 0.5831968188285828, "learning_rate": 0.00017615774167237903, "loss": 1.1749, "step": 5057 }, { "epoch": 0.9006410256410257, "grad_norm": 0.5111076235771179, "learning_rate": 0.00017614866952932214, "loss": 0.8936, "step": 5058 }, { "epoch": 0.9008190883190883, "grad_norm": 0.5740947723388672, "learning_rate": 0.00017613959589429028, "loss": 1.2606, "step": 5059 }, { "epoch": 0.9009971509971509, "grad_norm": 0.5881099700927734, "learning_rate": 0.0001761305207674612, "loss": 1.3682, "step": 5060 }, { "epoch": 0.9011752136752137, "grad_norm": 0.5007091760635376, "learning_rate": 0.00017612144414901268, "loss": 0.7788, "step": 5061 }, { "epoch": 0.9013532763532763, "grad_norm": 0.5127760171890259, "learning_rate": 0.00017611236603912262, "loss": 1.0519, "step": 5062 }, { "epoch": 0.9015313390313391, "grad_norm": 0.6185184121131897, "learning_rate": 0.00017610328643796882, "loss": 1.1672, "step": 5063 }, { "epoch": 0.9017094017094017, "grad_norm": 0.49707287549972534, "learning_rate": 0.00017609420534572926, "loss": 1.1865, "step": 5064 }, { "epoch": 0.9018874643874644, "grad_norm": 0.5667552351951599, "learning_rate": 0.0001760851227625818, "loss": 1.1388, "step": 5065 }, { "epoch": 0.9020655270655271, "grad_norm": 0.50298011302948, "learning_rate": 0.00017607603868870442, "loss": 0.9552, "step": 5066 }, { "epoch": 0.9022435897435898, "grad_norm": 0.5709219574928284, "learning_rate": 0.0001760669531242751, "loss": 1.2636, "step": 5067 }, { "epoch": 0.9024216524216524, "grad_norm": 0.4943496286869049, "learning_rate": 0.0001760578660694718, "loss": 0.8951, "step": 5068 }, { "epoch": 0.9025997150997151, "grad_norm": 0.5475931167602539, "learning_rate": 0.00017604877752447267, "loss": 1.1442, "step": 5069 }, { "epoch": 0.9027777777777778, "grad_norm": 0.5280239582061768, "learning_rate": 0.0001760396874894557, "loss": 0.9537, "step": 5070 }, { "epoch": 0.9029558404558404, "grad_norm": 0.5480797290802002, "learning_rate": 0.000176030595964599, "loss": 1.1557, "step": 5071 }, { "epoch": 0.9031339031339032, "grad_norm": 0.5232734680175781, "learning_rate": 0.00017602150295008073, "loss": 1.0219, "step": 5072 }, { "epoch": 0.9033119658119658, "grad_norm": 0.5448359251022339, "learning_rate": 0.000176012408446079, "loss": 1.1964, "step": 5073 }, { "epoch": 0.9034900284900285, "grad_norm": 0.4841914474964142, "learning_rate": 0.00017600331245277206, "loss": 1.0667, "step": 5074 }, { "epoch": 0.9036680911680912, "grad_norm": 0.5407083630561829, "learning_rate": 0.0001759942149703381, "loss": 1.1895, "step": 5075 }, { "epoch": 0.9038461538461539, "grad_norm": 0.5140416026115417, "learning_rate": 0.00017598511599895534, "loss": 0.9402, "step": 5076 }, { "epoch": 0.9040242165242165, "grad_norm": 0.6333765983581543, "learning_rate": 0.00017597601553880207, "loss": 1.239, "step": 5077 }, { "epoch": 0.9042022792022792, "grad_norm": 0.4996028244495392, "learning_rate": 0.00017596691359005664, "loss": 1.0259, "step": 5078 }, { "epoch": 0.9043803418803419, "grad_norm": 0.591892421245575, "learning_rate": 0.00017595781015289732, "loss": 1.2148, "step": 5079 }, { "epoch": 0.9045584045584045, "grad_norm": 0.736499011516571, "learning_rate": 0.0001759487052275025, "loss": 1.1373, "step": 5080 }, { "epoch": 0.9047364672364673, "grad_norm": 0.5951572060585022, "learning_rate": 0.00017593959881405057, "loss": 1.1833, "step": 5081 }, { "epoch": 0.9049145299145299, "grad_norm": 0.5092006325721741, "learning_rate": 0.00017593049091271996, "loss": 0.8841, "step": 5082 }, { "epoch": 0.9050925925925926, "grad_norm": 0.5679013729095459, "learning_rate": 0.0001759213815236891, "loss": 1.1056, "step": 5083 }, { "epoch": 0.9052706552706553, "grad_norm": 0.5708174109458923, "learning_rate": 0.0001759122706471365, "loss": 1.1952, "step": 5084 }, { "epoch": 0.905448717948718, "grad_norm": 0.5726733803749084, "learning_rate": 0.00017590315828324067, "loss": 1.1013, "step": 5085 }, { "epoch": 0.9056267806267806, "grad_norm": 0.5821273326873779, "learning_rate": 0.00017589404443218008, "loss": 1.2323, "step": 5086 }, { "epoch": 0.9058048433048433, "grad_norm": 0.5811445713043213, "learning_rate": 0.00017588492909413337, "loss": 1.2241, "step": 5087 }, { "epoch": 0.905982905982906, "grad_norm": 0.5377545952796936, "learning_rate": 0.0001758758122692791, "loss": 0.9777, "step": 5088 }, { "epoch": 0.9061609686609686, "grad_norm": 0.5985640287399292, "learning_rate": 0.0001758666939577959, "loss": 0.9737, "step": 5089 }, { "epoch": 0.9063390313390314, "grad_norm": 0.6038222908973694, "learning_rate": 0.00017585757415986247, "loss": 1.2116, "step": 5090 }, { "epoch": 0.906517094017094, "grad_norm": 0.6752246022224426, "learning_rate": 0.00017584845287565743, "loss": 1.1975, "step": 5091 }, { "epoch": 0.9066951566951567, "grad_norm": 0.5400625467300415, "learning_rate": 0.0001758393301053595, "loss": 0.9669, "step": 5092 }, { "epoch": 0.9068732193732194, "grad_norm": 0.5637784004211426, "learning_rate": 0.00017583020584914746, "loss": 1.2672, "step": 5093 }, { "epoch": 0.907051282051282, "grad_norm": 0.4825877249240875, "learning_rate": 0.00017582108010720006, "loss": 0.9719, "step": 5094 }, { "epoch": 0.9072293447293447, "grad_norm": 0.49902790784835815, "learning_rate": 0.00017581195287969613, "loss": 0.7941, "step": 5095 }, { "epoch": 0.9074074074074074, "grad_norm": 0.5991541743278503, "learning_rate": 0.0001758028241668144, "loss": 1.049, "step": 5096 }, { "epoch": 0.9075854700854701, "grad_norm": 0.5788859724998474, "learning_rate": 0.00017579369396873384, "loss": 1.0318, "step": 5097 }, { "epoch": 0.9077635327635327, "grad_norm": 0.5914160013198853, "learning_rate": 0.0001757845622856333, "loss": 1.1007, "step": 5098 }, { "epoch": 0.9079415954415955, "grad_norm": 0.5361711382865906, "learning_rate": 0.00017577542911769166, "loss": 1.0694, "step": 5099 }, { "epoch": 0.9081196581196581, "grad_norm": 0.5752849578857422, "learning_rate": 0.00017576629446508792, "loss": 1.1184, "step": 5100 }, { "epoch": 0.9082977207977208, "grad_norm": 0.6042249798774719, "learning_rate": 0.000175757158328001, "loss": 1.2808, "step": 5101 }, { "epoch": 0.9084757834757835, "grad_norm": 0.508352518081665, "learning_rate": 0.00017574802070661, "loss": 1.0038, "step": 5102 }, { "epoch": 0.9086538461538461, "grad_norm": 0.5667358040809631, "learning_rate": 0.00017573888160109385, "loss": 1.0208, "step": 5103 }, { "epoch": 0.9088319088319088, "grad_norm": 0.653619647026062, "learning_rate": 0.00017572974101163165, "loss": 1.2053, "step": 5104 }, { "epoch": 0.9090099715099715, "grad_norm": 0.5069597363471985, "learning_rate": 0.00017572059893840246, "loss": 0.8634, "step": 5105 }, { "epoch": 0.9091880341880342, "grad_norm": 0.6160602569580078, "learning_rate": 0.00017571145538158547, "loss": 1.2626, "step": 5106 }, { "epoch": 0.9093660968660968, "grad_norm": 0.6335833668708801, "learning_rate": 0.00017570231034135978, "loss": 1.3381, "step": 5107 }, { "epoch": 0.9095441595441596, "grad_norm": 0.5140398740768433, "learning_rate": 0.00017569316381790454, "loss": 1.1258, "step": 5108 }, { "epoch": 0.9097222222222222, "grad_norm": 0.5682975649833679, "learning_rate": 0.00017568401581139905, "loss": 1.3367, "step": 5109 }, { "epoch": 0.9099002849002849, "grad_norm": 0.49765729904174805, "learning_rate": 0.00017567486632202246, "loss": 1.1891, "step": 5110 }, { "epoch": 0.9100783475783476, "grad_norm": 0.5139224529266357, "learning_rate": 0.00017566571534995406, "loss": 0.9768, "step": 5111 }, { "epoch": 0.9102564102564102, "grad_norm": 0.5510922074317932, "learning_rate": 0.00017565656289537316, "loss": 1.1552, "step": 5112 }, { "epoch": 0.9104344729344729, "grad_norm": 0.6243364810943604, "learning_rate": 0.00017564740895845908, "loss": 1.1341, "step": 5113 }, { "epoch": 0.9106125356125356, "grad_norm": 0.5334977507591248, "learning_rate": 0.00017563825353939116, "loss": 1.0894, "step": 5114 }, { "epoch": 0.9107905982905983, "grad_norm": 0.5195826292037964, "learning_rate": 0.00017562909663834878, "loss": 1.1011, "step": 5115 }, { "epoch": 0.9109686609686609, "grad_norm": 0.5298168063163757, "learning_rate": 0.00017561993825551138, "loss": 1.0079, "step": 5116 }, { "epoch": 0.9111467236467237, "grad_norm": 0.5858965516090393, "learning_rate": 0.00017561077839105835, "loss": 1.2746, "step": 5117 }, { "epoch": 0.9113247863247863, "grad_norm": 0.5572476387023926, "learning_rate": 0.0001756016170451692, "loss": 0.8169, "step": 5118 }, { "epoch": 0.9115028490028491, "grad_norm": 0.5247095823287964, "learning_rate": 0.0001755924542180234, "loss": 1.1206, "step": 5119 }, { "epoch": 0.9116809116809117, "grad_norm": 0.5605118274688721, "learning_rate": 0.0001755832899098005, "loss": 1.371, "step": 5120 }, { "epoch": 0.9118589743589743, "grad_norm": 0.5732316970825195, "learning_rate": 0.00017557412412068005, "loss": 1.1248, "step": 5121 }, { "epoch": 0.9120370370370371, "grad_norm": 0.6167279481887817, "learning_rate": 0.0001755649568508416, "loss": 0.94, "step": 5122 }, { "epoch": 0.9122150997150997, "grad_norm": 0.5497499108314514, "learning_rate": 0.00017555578810046483, "loss": 1.0112, "step": 5123 }, { "epoch": 0.9123931623931624, "grad_norm": 0.540762186050415, "learning_rate": 0.00017554661786972931, "loss": 1.1058, "step": 5124 }, { "epoch": 0.9125712250712251, "grad_norm": 0.5943556427955627, "learning_rate": 0.0001755374461588148, "loss": 0.9086, "step": 5125 }, { "epoch": 0.9127492877492878, "grad_norm": 0.5300756692886353, "learning_rate": 0.0001755282729679009, "loss": 1.1566, "step": 5126 }, { "epoch": 0.9129273504273504, "grad_norm": 0.5390434861183167, "learning_rate": 0.00017551909829716743, "loss": 1.1395, "step": 5127 }, { "epoch": 0.9131054131054132, "grad_norm": 0.627434492111206, "learning_rate": 0.00017550992214679405, "loss": 1.1537, "step": 5128 }, { "epoch": 0.9132834757834758, "grad_norm": 0.4806903302669525, "learning_rate": 0.00017550074451696063, "loss": 0.7905, "step": 5129 }, { "epoch": 0.9134615384615384, "grad_norm": 0.5714817047119141, "learning_rate": 0.00017549156540784696, "loss": 1.1042, "step": 5130 }, { "epoch": 0.9136396011396012, "grad_norm": 0.5839236378669739, "learning_rate": 0.0001754823848196329, "loss": 1.0383, "step": 5131 }, { "epoch": 0.9138176638176638, "grad_norm": 0.6089872717857361, "learning_rate": 0.0001754732027524983, "loss": 0.9399, "step": 5132 }, { "epoch": 0.9139957264957265, "grad_norm": 0.4937956631183624, "learning_rate": 0.00017546401920662307, "loss": 0.7382, "step": 5133 }, { "epoch": 0.9141737891737892, "grad_norm": 0.5918676257133484, "learning_rate": 0.00017545483418218716, "loss": 1.2207, "step": 5134 }, { "epoch": 0.9143518518518519, "grad_norm": 0.5825346112251282, "learning_rate": 0.0001754456476793705, "loss": 0.9669, "step": 5135 }, { "epoch": 0.9145299145299145, "grad_norm": 0.49829617142677307, "learning_rate": 0.0001754364596983531, "loss": 1.2247, "step": 5136 }, { "epoch": 0.9147079772079773, "grad_norm": 0.5128271579742432, "learning_rate": 0.00017542727023931497, "loss": 0.9563, "step": 5137 }, { "epoch": 0.9148860398860399, "grad_norm": 0.5789414644241333, "learning_rate": 0.00017541807930243622, "loss": 1.22, "step": 5138 }, { "epoch": 0.9150641025641025, "grad_norm": 0.44155433773994446, "learning_rate": 0.00017540888688789683, "loss": 0.9897, "step": 5139 }, { "epoch": 0.9152421652421653, "grad_norm": 0.550464391708374, "learning_rate": 0.00017539969299587696, "loss": 1.0624, "step": 5140 }, { "epoch": 0.9154202279202279, "grad_norm": 0.5019831657409668, "learning_rate": 0.0001753904976265567, "loss": 0.9045, "step": 5141 }, { "epoch": 0.9155982905982906, "grad_norm": 0.589658796787262, "learning_rate": 0.0001753813007801163, "loss": 1.0454, "step": 5142 }, { "epoch": 0.9157763532763533, "grad_norm": 0.5945459008216858, "learning_rate": 0.00017537210245673586, "loss": 1.0042, "step": 5143 }, { "epoch": 0.915954415954416, "grad_norm": 0.5409809947013855, "learning_rate": 0.00017536290265659566, "loss": 1.0609, "step": 5144 }, { "epoch": 0.9161324786324786, "grad_norm": 0.5302975177764893, "learning_rate": 0.00017535370137987597, "loss": 1.1394, "step": 5145 }, { "epoch": 0.9163105413105413, "grad_norm": 0.5253351330757141, "learning_rate": 0.00017534449862675698, "loss": 1.2249, "step": 5146 }, { "epoch": 0.916488603988604, "grad_norm": 0.6363829970359802, "learning_rate": 0.00017533529439741908, "loss": 1.1333, "step": 5147 }, { "epoch": 0.9166666666666666, "grad_norm": 0.4703354835510254, "learning_rate": 0.0001753260886920426, "loss": 0.9971, "step": 5148 }, { "epoch": 0.9168447293447294, "grad_norm": 0.6394907236099243, "learning_rate": 0.00017531688151080786, "loss": 1.5942, "step": 5149 }, { "epoch": 0.917022792022792, "grad_norm": 0.5573459267616272, "learning_rate": 0.00017530767285389527, "loss": 0.9669, "step": 5150 }, { "epoch": 0.9172008547008547, "grad_norm": 0.5000962615013123, "learning_rate": 0.00017529846272148532, "loss": 1.2151, "step": 5151 }, { "epoch": 0.9173789173789174, "grad_norm": 0.5550395846366882, "learning_rate": 0.0001752892511137584, "loss": 1.1765, "step": 5152 }, { "epoch": 0.91755698005698, "grad_norm": 0.5461394786834717, "learning_rate": 0.00017528003803089496, "loss": 1.1136, "step": 5153 }, { "epoch": 0.9177350427350427, "grad_norm": 0.5512672662734985, "learning_rate": 0.00017527082347307558, "loss": 1.1727, "step": 5154 }, { "epoch": 0.9179131054131054, "grad_norm": 0.5210778713226318, "learning_rate": 0.0001752616074404808, "loss": 1.09, "step": 5155 }, { "epoch": 0.9180911680911681, "grad_norm": 0.5214943289756775, "learning_rate": 0.00017525238993329115, "loss": 0.9654, "step": 5156 }, { "epoch": 0.9182692307692307, "grad_norm": 0.5822862386703491, "learning_rate": 0.00017524317095168724, "loss": 1.0951, "step": 5157 }, { "epoch": 0.9184472934472935, "grad_norm": 0.43948012590408325, "learning_rate": 0.0001752339504958497, "loss": 0.6984, "step": 5158 }, { "epoch": 0.9186253561253561, "grad_norm": 0.5024449229240417, "learning_rate": 0.00017522472856595916, "loss": 0.983, "step": 5159 }, { "epoch": 0.9188034188034188, "grad_norm": 0.5815144181251526, "learning_rate": 0.00017521550516219636, "loss": 0.9784, "step": 5160 }, { "epoch": 0.9189814814814815, "grad_norm": 0.5519825220108032, "learning_rate": 0.00017520628028474197, "loss": 1.064, "step": 5161 }, { "epoch": 0.9191595441595442, "grad_norm": 0.5615749955177307, "learning_rate": 0.00017519705393377675, "loss": 1.1284, "step": 5162 }, { "epoch": 0.9193376068376068, "grad_norm": 0.5929917693138123, "learning_rate": 0.00017518782610948148, "loss": 1.1221, "step": 5163 }, { "epoch": 0.9195156695156695, "grad_norm": 0.7116361856460571, "learning_rate": 0.00017517859681203692, "loss": 1.0188, "step": 5164 }, { "epoch": 0.9196937321937322, "grad_norm": 0.5095893740653992, "learning_rate": 0.00017516936604162396, "loss": 1.0724, "step": 5165 }, { "epoch": 0.9198717948717948, "grad_norm": 0.5701385736465454, "learning_rate": 0.00017516013379842337, "loss": 1.0572, "step": 5166 }, { "epoch": 0.9200498575498576, "grad_norm": 0.518412709236145, "learning_rate": 0.00017515090008261613, "loss": 1.0514, "step": 5167 }, { "epoch": 0.9202279202279202, "grad_norm": 0.5324261784553528, "learning_rate": 0.00017514166489438312, "loss": 1.1708, "step": 5168 }, { "epoch": 0.9204059829059829, "grad_norm": 0.5640990138053894, "learning_rate": 0.00017513242823390525, "loss": 1.2846, "step": 5169 }, { "epoch": 0.9205840455840456, "grad_norm": 0.510352373123169, "learning_rate": 0.00017512319010136356, "loss": 1.0763, "step": 5170 }, { "epoch": 0.9207621082621082, "grad_norm": 0.4994175136089325, "learning_rate": 0.00017511395049693898, "loss": 0.9665, "step": 5171 }, { "epoch": 0.9209401709401709, "grad_norm": 0.43196994066238403, "learning_rate": 0.00017510470942081258, "loss": 0.761, "step": 5172 }, { "epoch": 0.9211182336182336, "grad_norm": 0.558977484703064, "learning_rate": 0.00017509546687316543, "loss": 1.0758, "step": 5173 }, { "epoch": 0.9212962962962963, "grad_norm": 0.573302149772644, "learning_rate": 0.0001750862228541786, "loss": 0.9635, "step": 5174 }, { "epoch": 0.9214743589743589, "grad_norm": 0.5083786845207214, "learning_rate": 0.00017507697736403321, "loss": 1.0311, "step": 5175 }, { "epoch": 0.9216524216524217, "grad_norm": 0.5478954911231995, "learning_rate": 0.00017506773040291043, "loss": 1.074, "step": 5176 }, { "epoch": 0.9218304843304843, "grad_norm": 0.522376537322998, "learning_rate": 0.00017505848197099137, "loss": 1.1162, "step": 5177 }, { "epoch": 0.9220085470085471, "grad_norm": 0.5946292281150818, "learning_rate": 0.0001750492320684573, "loss": 0.9494, "step": 5178 }, { "epoch": 0.9221866096866097, "grad_norm": 0.5423247814178467, "learning_rate": 0.00017503998069548943, "loss": 1.0558, "step": 5179 }, { "epoch": 0.9223646723646723, "grad_norm": 0.49960651993751526, "learning_rate": 0.000175030727852269, "loss": 1.0748, "step": 5180 }, { "epoch": 0.9225427350427351, "grad_norm": 0.6066586375236511, "learning_rate": 0.00017502147353897732, "loss": 1.2066, "step": 5181 }, { "epoch": 0.9227207977207977, "grad_norm": 0.57244473695755, "learning_rate": 0.00017501221775579576, "loss": 1.048, "step": 5182 }, { "epoch": 0.9228988603988604, "grad_norm": 0.512464165687561, "learning_rate": 0.00017500296050290557, "loss": 1.1405, "step": 5183 }, { "epoch": 0.9230769230769231, "grad_norm": 0.5380734801292419, "learning_rate": 0.00017499370178048818, "loss": 1.0641, "step": 5184 }, { "epoch": 0.9232549857549858, "grad_norm": 0.47102874517440796, "learning_rate": 0.000174984441588725, "loss": 0.7948, "step": 5185 }, { "epoch": 0.9234330484330484, "grad_norm": 0.6702211499214172, "learning_rate": 0.00017497517992779747, "loss": 1.3009, "step": 5186 }, { "epoch": 0.9236111111111112, "grad_norm": 0.4685834050178528, "learning_rate": 0.000174965916797887, "loss": 0.8136, "step": 5187 }, { "epoch": 0.9237891737891738, "grad_norm": 0.5414277911186218, "learning_rate": 0.00017495665219917513, "loss": 0.9708, "step": 5188 }, { "epoch": 0.9239672364672364, "grad_norm": 0.5253050923347473, "learning_rate": 0.0001749473861318434, "loss": 1.0691, "step": 5189 }, { "epoch": 0.9241452991452992, "grad_norm": 0.6009906530380249, "learning_rate": 0.00017493811859607328, "loss": 1.2023, "step": 5190 }, { "epoch": 0.9243233618233618, "grad_norm": 0.5519336462020874, "learning_rate": 0.00017492884959204643, "loss": 1.189, "step": 5191 }, { "epoch": 0.9245014245014245, "grad_norm": 0.5024857521057129, "learning_rate": 0.0001749195791199444, "loss": 0.8685, "step": 5192 }, { "epoch": 0.9246794871794872, "grad_norm": 0.5735679864883423, "learning_rate": 0.00017491030717994887, "loss": 1.1903, "step": 5193 }, { "epoch": 0.9248575498575499, "grad_norm": 0.5338658094406128, "learning_rate": 0.00017490103377224147, "loss": 1.0442, "step": 5194 }, { "epoch": 0.9250356125356125, "grad_norm": 0.46669119596481323, "learning_rate": 0.0001748917588970039, "loss": 0.6343, "step": 5195 }, { "epoch": 0.9252136752136753, "grad_norm": 0.510910153388977, "learning_rate": 0.00017488248255441793, "loss": 0.9334, "step": 5196 }, { "epoch": 0.9253917378917379, "grad_norm": 0.5732216238975525, "learning_rate": 0.00017487320474466524, "loss": 1.0483, "step": 5197 }, { "epoch": 0.9255698005698005, "grad_norm": 0.5864318609237671, "learning_rate": 0.00017486392546792762, "loss": 1.0669, "step": 5198 }, { "epoch": 0.9257478632478633, "grad_norm": 0.5074281096458435, "learning_rate": 0.00017485464472438692, "loss": 1.0636, "step": 5199 }, { "epoch": 0.9259259259259259, "grad_norm": 0.5833215117454529, "learning_rate": 0.00017484536251422496, "loss": 1.2005, "step": 5200 }, { "epoch": 0.9261039886039886, "grad_norm": 0.5624990463256836, "learning_rate": 0.0001748360788376236, "loss": 1.1623, "step": 5201 }, { "epoch": 0.9262820512820513, "grad_norm": 0.5618230104446411, "learning_rate": 0.00017482679369476472, "loss": 1.0495, "step": 5202 }, { "epoch": 0.926460113960114, "grad_norm": 0.6254985332489014, "learning_rate": 0.00017481750708583024, "loss": 0.9521, "step": 5203 }, { "epoch": 0.9266381766381766, "grad_norm": 0.5488203763961792, "learning_rate": 0.00017480821901100216, "loss": 1.0689, "step": 5204 }, { "epoch": 0.9268162393162394, "grad_norm": 0.6157993674278259, "learning_rate": 0.00017479892947046245, "loss": 1.2852, "step": 5205 }, { "epoch": 0.926994301994302, "grad_norm": 0.49653390049934387, "learning_rate": 0.00017478963846439305, "loss": 0.8616, "step": 5206 }, { "epoch": 0.9271723646723646, "grad_norm": 0.5079081058502197, "learning_rate": 0.00017478034599297603, "loss": 1.0192, "step": 5207 }, { "epoch": 0.9273504273504274, "grad_norm": 0.5392495393753052, "learning_rate": 0.00017477105205639354, "loss": 1.115, "step": 5208 }, { "epoch": 0.92752849002849, "grad_norm": 0.5336191654205322, "learning_rate": 0.00017476175665482756, "loss": 1.1892, "step": 5209 }, { "epoch": 0.9277065527065527, "grad_norm": 0.631712019443512, "learning_rate": 0.00017475245978846026, "loss": 0.9619, "step": 5210 }, { "epoch": 0.9278846153846154, "grad_norm": 0.5123951435089111, "learning_rate": 0.0001747431614574738, "loss": 1.1477, "step": 5211 }, { "epoch": 0.9280626780626781, "grad_norm": 0.5045743584632874, "learning_rate": 0.00017473386166205038, "loss": 0.9749, "step": 5212 }, { "epoch": 0.9282407407407407, "grad_norm": 0.5296525359153748, "learning_rate": 0.00017472456040237217, "loss": 1.0736, "step": 5213 }, { "epoch": 0.9284188034188035, "grad_norm": 0.6304933428764343, "learning_rate": 0.00017471525767862145, "loss": 1.2444, "step": 5214 }, { "epoch": 0.9285968660968661, "grad_norm": 0.4851958155632019, "learning_rate": 0.00017470595349098044, "loss": 0.9049, "step": 5215 }, { "epoch": 0.9287749287749287, "grad_norm": 0.5730679631233215, "learning_rate": 0.00017469664783963148, "loss": 1.0773, "step": 5216 }, { "epoch": 0.9289529914529915, "grad_norm": 0.6020415425300598, "learning_rate": 0.00017468734072475684, "loss": 1.3247, "step": 5217 }, { "epoch": 0.9291310541310541, "grad_norm": 0.47981077432632446, "learning_rate": 0.00017467803214653893, "loss": 1.0009, "step": 5218 }, { "epoch": 0.9293091168091168, "grad_norm": 0.5787527561187744, "learning_rate": 0.0001746687221051601, "loss": 1.2523, "step": 5219 }, { "epoch": 0.9294871794871795, "grad_norm": 0.4495891332626343, "learning_rate": 0.00017465941060080278, "loss": 0.7364, "step": 5220 }, { "epoch": 0.9296652421652422, "grad_norm": 0.5721768140792847, "learning_rate": 0.0001746500976336494, "loss": 1.015, "step": 5221 }, { "epoch": 0.9298433048433048, "grad_norm": 0.5500208735466003, "learning_rate": 0.0001746407832038824, "loss": 1.053, "step": 5222 }, { "epoch": 0.9300213675213675, "grad_norm": 0.5784386992454529, "learning_rate": 0.00017463146731168437, "loss": 0.9784, "step": 5223 }, { "epoch": 0.9301994301994302, "grad_norm": 0.4960322082042694, "learning_rate": 0.00017462214995723772, "loss": 0.8674, "step": 5224 }, { "epoch": 0.9303774928774928, "grad_norm": 0.5005537271499634, "learning_rate": 0.00017461283114072508, "loss": 1.0486, "step": 5225 }, { "epoch": 0.9305555555555556, "grad_norm": 0.5064167380332947, "learning_rate": 0.000174603510862329, "loss": 0.9722, "step": 5226 }, { "epoch": 0.9307336182336182, "grad_norm": 0.583558976650238, "learning_rate": 0.0001745941891222321, "loss": 0.9957, "step": 5227 }, { "epoch": 0.9309116809116809, "grad_norm": 0.4982515871524811, "learning_rate": 0.00017458486592061704, "loss": 0.958, "step": 5228 }, { "epoch": 0.9310897435897436, "grad_norm": 0.526549756526947, "learning_rate": 0.0001745755412576664, "loss": 1.1172, "step": 5229 }, { "epoch": 0.9312678062678063, "grad_norm": 0.6129719018936157, "learning_rate": 0.000174566215133563, "loss": 1.2524, "step": 5230 }, { "epoch": 0.9314458689458689, "grad_norm": 0.5385653972625732, "learning_rate": 0.00017455688754848948, "loss": 1.1655, "step": 5231 }, { "epoch": 0.9316239316239316, "grad_norm": 0.5646410584449768, "learning_rate": 0.0001745475585026287, "loss": 0.9026, "step": 5232 }, { "epoch": 0.9318019943019943, "grad_norm": 0.549223780632019, "learning_rate": 0.0001745382279961633, "loss": 0.804, "step": 5233 }, { "epoch": 0.9319800569800569, "grad_norm": 0.48547953367233276, "learning_rate": 0.0001745288960292762, "loss": 1.0224, "step": 5234 }, { "epoch": 0.9321581196581197, "grad_norm": 0.5260967016220093, "learning_rate": 0.00017451956260215016, "loss": 0.9688, "step": 5235 }, { "epoch": 0.9323361823361823, "grad_norm": 0.6261999011039734, "learning_rate": 0.00017451022771496812, "loss": 1.2539, "step": 5236 }, { "epoch": 0.9325142450142451, "grad_norm": 0.5801421999931335, "learning_rate": 0.00017450089136791298, "loss": 1.11, "step": 5237 }, { "epoch": 0.9326923076923077, "grad_norm": 0.5833573937416077, "learning_rate": 0.0001744915535611676, "loss": 0.9328, "step": 5238 }, { "epoch": 0.9328703703703703, "grad_norm": 0.5422634482383728, "learning_rate": 0.00017448221429491496, "loss": 1.034, "step": 5239 }, { "epoch": 0.9330484330484331, "grad_norm": 0.5105658769607544, "learning_rate": 0.00017447287356933808, "loss": 0.8924, "step": 5240 }, { "epoch": 0.9332264957264957, "grad_norm": 0.5114831924438477, "learning_rate": 0.00017446353138461995, "loss": 0.9328, "step": 5241 }, { "epoch": 0.9334045584045584, "grad_norm": 0.5105039477348328, "learning_rate": 0.00017445418774094358, "loss": 1.0468, "step": 5242 }, { "epoch": 0.9335826210826211, "grad_norm": 0.593250036239624, "learning_rate": 0.00017444484263849208, "loss": 1.0603, "step": 5243 }, { "epoch": 0.9337606837606838, "grad_norm": 0.600788414478302, "learning_rate": 0.00017443549607744853, "loss": 1.1506, "step": 5244 }, { "epoch": 0.9339387464387464, "grad_norm": 0.5394418239593506, "learning_rate": 0.00017442614805799605, "loss": 1.038, "step": 5245 }, { "epoch": 0.9341168091168092, "grad_norm": 0.5446375608444214, "learning_rate": 0.00017441679858031786, "loss": 1.079, "step": 5246 }, { "epoch": 0.9342948717948718, "grad_norm": 0.5859794616699219, "learning_rate": 0.00017440744764459702, "loss": 1.1453, "step": 5247 }, { "epoch": 0.9344729344729344, "grad_norm": 0.4899081289768219, "learning_rate": 0.00017439809525101688, "loss": 1.163, "step": 5248 }, { "epoch": 0.9346509971509972, "grad_norm": 0.652846097946167, "learning_rate": 0.00017438874139976055, "loss": 1.1819, "step": 5249 }, { "epoch": 0.9348290598290598, "grad_norm": 0.5402514934539795, "learning_rate": 0.00017437938609101138, "loss": 1.0159, "step": 5250 }, { "epoch": 0.9350071225071225, "grad_norm": 0.565864086151123, "learning_rate": 0.00017437002932495265, "loss": 1.1121, "step": 5251 }, { "epoch": 0.9351851851851852, "grad_norm": 0.611786425113678, "learning_rate": 0.0001743606711017677, "loss": 1.2511, "step": 5252 }, { "epoch": 0.9353632478632479, "grad_norm": 0.5706882476806641, "learning_rate": 0.00017435131142163988, "loss": 1.128, "step": 5253 }, { "epoch": 0.9355413105413105, "grad_norm": 0.5369367003440857, "learning_rate": 0.00017434195028475253, "loss": 1.0562, "step": 5254 }, { "epoch": 0.9357193732193733, "grad_norm": 0.49957552552223206, "learning_rate": 0.0001743325876912891, "loss": 1.0568, "step": 5255 }, { "epoch": 0.9358974358974359, "grad_norm": 0.5398106575012207, "learning_rate": 0.00017432322364143305, "loss": 1.1502, "step": 5256 }, { "epoch": 0.9360754985754985, "grad_norm": 0.6522027254104614, "learning_rate": 0.00017431385813536783, "loss": 1.0591, "step": 5257 }, { "epoch": 0.9362535612535613, "grad_norm": 0.5872012972831726, "learning_rate": 0.00017430449117327693, "loss": 1.3737, "step": 5258 }, { "epoch": 0.9364316239316239, "grad_norm": 0.5124474167823792, "learning_rate": 0.00017429512275534382, "loss": 1.0727, "step": 5259 }, { "epoch": 0.9366096866096866, "grad_norm": 0.5103365778923035, "learning_rate": 0.00017428575288175218, "loss": 1.0339, "step": 5260 }, { "epoch": 0.9367877492877493, "grad_norm": 0.585483729839325, "learning_rate": 0.0001742763815526855, "loss": 1.1844, "step": 5261 }, { "epoch": 0.936965811965812, "grad_norm": 0.5855562090873718, "learning_rate": 0.00017426700876832746, "loss": 1.3234, "step": 5262 }, { "epoch": 0.9371438746438746, "grad_norm": 0.5774588584899902, "learning_rate": 0.00017425763452886162, "loss": 1.0937, "step": 5263 }, { "epoch": 0.9373219373219374, "grad_norm": 0.5718343257904053, "learning_rate": 0.00017424825883447168, "loss": 1.0783, "step": 5264 }, { "epoch": 0.9375, "grad_norm": 0.5414558053016663, "learning_rate": 0.00017423888168534136, "loss": 1.1244, "step": 5265 }, { "epoch": 0.9376780626780626, "grad_norm": 0.5818275809288025, "learning_rate": 0.00017422950308165438, "loss": 1.247, "step": 5266 }, { "epoch": 0.9378561253561254, "grad_norm": 0.586398184299469, "learning_rate": 0.00017422012302359448, "loss": 1.0515, "step": 5267 }, { "epoch": 0.938034188034188, "grad_norm": 0.5236606001853943, "learning_rate": 0.00017421074151134544, "loss": 1.1907, "step": 5268 }, { "epoch": 0.9382122507122507, "grad_norm": 0.5108010172843933, "learning_rate": 0.0001742013585450911, "loss": 1.1125, "step": 5269 }, { "epoch": 0.9383903133903134, "grad_norm": 0.4956454038619995, "learning_rate": 0.00017419197412501527, "loss": 1.0305, "step": 5270 }, { "epoch": 0.9385683760683761, "grad_norm": 0.5432302951812744, "learning_rate": 0.0001741825882513018, "loss": 1.1946, "step": 5271 }, { "epoch": 0.9387464387464387, "grad_norm": 0.5119295716285706, "learning_rate": 0.00017417320092413463, "loss": 0.875, "step": 5272 }, { "epoch": 0.9389245014245015, "grad_norm": 0.49740248918533325, "learning_rate": 0.0001741638121436977, "loss": 1.1093, "step": 5273 }, { "epoch": 0.9391025641025641, "grad_norm": 0.5069027543067932, "learning_rate": 0.00017415442191017491, "loss": 1.2498, "step": 5274 }, { "epoch": 0.9392806267806267, "grad_norm": 0.570264995098114, "learning_rate": 0.00017414503022375027, "loss": 1.0192, "step": 5275 }, { "epoch": 0.9394586894586895, "grad_norm": 0.48129352927207947, "learning_rate": 0.00017413563708460776, "loss": 0.8467, "step": 5276 }, { "epoch": 0.9396367521367521, "grad_norm": 0.5214534401893616, "learning_rate": 0.00017412624249293148, "loss": 0.9723, "step": 5277 }, { "epoch": 0.9398148148148148, "grad_norm": 0.5150161385536194, "learning_rate": 0.00017411684644890544, "loss": 1.0906, "step": 5278 }, { "epoch": 0.9399928774928775, "grad_norm": 0.5695852637290955, "learning_rate": 0.00017410744895271377, "loss": 1.2891, "step": 5279 }, { "epoch": 0.9401709401709402, "grad_norm": 0.5613594651222229, "learning_rate": 0.00017409805000454055, "loss": 1.1373, "step": 5280 }, { "epoch": 0.9403490028490028, "grad_norm": 0.5134239196777344, "learning_rate": 0.00017408864960457004, "loss": 1.1081, "step": 5281 }, { "epoch": 0.9405270655270656, "grad_norm": 0.5256397724151611, "learning_rate": 0.00017407924775298628, "loss": 1.058, "step": 5282 }, { "epoch": 0.9407051282051282, "grad_norm": 0.5145402550697327, "learning_rate": 0.00017406984444997357, "loss": 1.0667, "step": 5283 }, { "epoch": 0.9408831908831908, "grad_norm": 0.5435704588890076, "learning_rate": 0.0001740604396957161, "loss": 1.2275, "step": 5284 }, { "epoch": 0.9410612535612536, "grad_norm": 0.5798762440681458, "learning_rate": 0.0001740510334903982, "loss": 1.2061, "step": 5285 }, { "epoch": 0.9412393162393162, "grad_norm": 0.5461057424545288, "learning_rate": 0.00017404162583420414, "loss": 1.1585, "step": 5286 }, { "epoch": 0.9414173789173789, "grad_norm": 0.5090487003326416, "learning_rate": 0.00017403221672731818, "loss": 1.2496, "step": 5287 }, { "epoch": 0.9415954415954416, "grad_norm": 0.5171035528182983, "learning_rate": 0.00017402280616992476, "loss": 1.1947, "step": 5288 }, { "epoch": 0.9417735042735043, "grad_norm": 0.5292364358901978, "learning_rate": 0.00017401339416220818, "loss": 1.0182, "step": 5289 }, { "epoch": 0.9419515669515669, "grad_norm": 0.5011499524116516, "learning_rate": 0.00017400398070435293, "loss": 1.3363, "step": 5290 }, { "epoch": 0.9421296296296297, "grad_norm": 0.4821554720401764, "learning_rate": 0.0001739945657965434, "loss": 0.9077, "step": 5291 }, { "epoch": 0.9423076923076923, "grad_norm": 0.5849515199661255, "learning_rate": 0.00017398514943896403, "loss": 1.1582, "step": 5292 }, { "epoch": 0.9424857549857549, "grad_norm": 0.49826139211654663, "learning_rate": 0.00017397573163179937, "loss": 1.1025, "step": 5293 }, { "epoch": 0.9426638176638177, "grad_norm": 0.6031842827796936, "learning_rate": 0.00017396631237523392, "loss": 1.1932, "step": 5294 }, { "epoch": 0.9428418803418803, "grad_norm": 0.6013330221176147, "learning_rate": 0.00017395689166945224, "loss": 1.2078, "step": 5295 }, { "epoch": 0.9430199430199431, "grad_norm": 0.5147021412849426, "learning_rate": 0.00017394746951463893, "loss": 0.9988, "step": 5296 }, { "epoch": 0.9431980056980057, "grad_norm": 0.5721762776374817, "learning_rate": 0.0001739380459109785, "loss": 1.1442, "step": 5297 }, { "epoch": 0.9433760683760684, "grad_norm": 0.49272531270980835, "learning_rate": 0.0001739286208586557, "loss": 1.0481, "step": 5298 }, { "epoch": 0.9435541310541311, "grad_norm": 0.6545688509941101, "learning_rate": 0.00017391919435785514, "loss": 1.1393, "step": 5299 }, { "epoch": 0.9437321937321937, "grad_norm": 0.617756724357605, "learning_rate": 0.00017390976640876152, "loss": 1.1108, "step": 5300 }, { "epoch": 0.9439102564102564, "grad_norm": 0.4870470464229584, "learning_rate": 0.00017390033701155955, "loss": 0.9028, "step": 5301 }, { "epoch": 0.9440883190883191, "grad_norm": 0.5250138640403748, "learning_rate": 0.000173890906166434, "loss": 1.0326, "step": 5302 }, { "epoch": 0.9442663817663818, "grad_norm": 0.5879467129707336, "learning_rate": 0.00017388147387356964, "loss": 1.1569, "step": 5303 }, { "epoch": 0.9444444444444444, "grad_norm": 0.4790486991405487, "learning_rate": 0.00017387204013315127, "loss": 0.967, "step": 5304 }, { "epoch": 0.9446225071225072, "grad_norm": 0.5884372591972351, "learning_rate": 0.0001738626049453637, "loss": 1.1342, "step": 5305 }, { "epoch": 0.9448005698005698, "grad_norm": 0.4633975028991699, "learning_rate": 0.00017385316831039187, "loss": 0.8942, "step": 5306 }, { "epoch": 0.9449786324786325, "grad_norm": 0.5301823019981384, "learning_rate": 0.0001738437302284206, "loss": 1.1683, "step": 5307 }, { "epoch": 0.9451566951566952, "grad_norm": 0.5476770997047424, "learning_rate": 0.00017383429069963484, "loss": 1.1574, "step": 5308 }, { "epoch": 0.9453347578347578, "grad_norm": 0.47689101099967957, "learning_rate": 0.00017382484972421953, "loss": 1.0792, "step": 5309 }, { "epoch": 0.9455128205128205, "grad_norm": 0.526063084602356, "learning_rate": 0.00017381540730235963, "loss": 0.9012, "step": 5310 }, { "epoch": 0.9456908831908832, "grad_norm": 0.5667058229446411, "learning_rate": 0.0001738059634342402, "loss": 1.0908, "step": 5311 }, { "epoch": 0.9458689458689459, "grad_norm": 0.5402196645736694, "learning_rate": 0.00017379651812004623, "loss": 0.943, "step": 5312 }, { "epoch": 0.9460470085470085, "grad_norm": 0.5288932919502258, "learning_rate": 0.00017378707135996276, "loss": 1.0055, "step": 5313 }, { "epoch": 0.9462250712250713, "grad_norm": 0.5607456564903259, "learning_rate": 0.00017377762315417492, "loss": 1.2073, "step": 5314 }, { "epoch": 0.9464031339031339, "grad_norm": 0.5737698674201965, "learning_rate": 0.00017376817350286781, "loss": 1.0001, "step": 5315 }, { "epoch": 0.9465811965811965, "grad_norm": 0.6562079787254333, "learning_rate": 0.00017375872240622657, "loss": 1.1503, "step": 5316 }, { "epoch": 0.9467592592592593, "grad_norm": 0.5407183170318604, "learning_rate": 0.0001737492698644364, "loss": 1.1169, "step": 5317 }, { "epoch": 0.9469373219373219, "grad_norm": 0.5504152178764343, "learning_rate": 0.00017373981587768248, "loss": 1.0468, "step": 5318 }, { "epoch": 0.9471153846153846, "grad_norm": 0.4813530743122101, "learning_rate": 0.00017373036044615006, "loss": 0.9707, "step": 5319 }, { "epoch": 0.9472934472934473, "grad_norm": 0.5810509920120239, "learning_rate": 0.00017372090357002437, "loss": 1.4949, "step": 5320 }, { "epoch": 0.94747150997151, "grad_norm": 0.5250222086906433, "learning_rate": 0.00017371144524949074, "loss": 1.0818, "step": 5321 }, { "epoch": 0.9476495726495726, "grad_norm": 0.4852280914783478, "learning_rate": 0.00017370198548473444, "loss": 1.1793, "step": 5322 }, { "epoch": 0.9478276353276354, "grad_norm": 0.5392420291900635, "learning_rate": 0.00017369252427594086, "loss": 1.153, "step": 5323 }, { "epoch": 0.948005698005698, "grad_norm": 0.521294116973877, "learning_rate": 0.00017368306162329533, "loss": 0.8572, "step": 5324 }, { "epoch": 0.9481837606837606, "grad_norm": 0.5579673647880554, "learning_rate": 0.0001736735975269833, "loss": 1.0452, "step": 5325 }, { "epoch": 0.9483618233618234, "grad_norm": 0.6027318835258484, "learning_rate": 0.0001736641319871901, "loss": 1.3475, "step": 5326 }, { "epoch": 0.948539886039886, "grad_norm": 0.5600738525390625, "learning_rate": 0.00017365466500410132, "loss": 1.0338, "step": 5327 }, { "epoch": 0.9487179487179487, "grad_norm": 0.5691532492637634, "learning_rate": 0.00017364519657790236, "loss": 1.129, "step": 5328 }, { "epoch": 0.9488960113960114, "grad_norm": 0.5161463022232056, "learning_rate": 0.0001736357267087788, "loss": 1.0438, "step": 5329 }, { "epoch": 0.9490740740740741, "grad_norm": 0.5049656629562378, "learning_rate": 0.0001736262553969161, "loss": 0.9484, "step": 5330 }, { "epoch": 0.9492521367521367, "grad_norm": 0.5477150678634644, "learning_rate": 0.00017361678264249988, "loss": 0.8995, "step": 5331 }, { "epoch": 0.9494301994301995, "grad_norm": 0.5679608583450317, "learning_rate": 0.0001736073084457157, "loss": 1.241, "step": 5332 }, { "epoch": 0.9496082621082621, "grad_norm": 0.5748196840286255, "learning_rate": 0.00017359783280674926, "loss": 1.0046, "step": 5333 }, { "epoch": 0.9497863247863247, "grad_norm": 0.5677094459533691, "learning_rate": 0.00017358835572578617, "loss": 1.2913, "step": 5334 }, { "epoch": 0.9499643874643875, "grad_norm": 0.49663659930229187, "learning_rate": 0.0001735788772030121, "loss": 1.0388, "step": 5335 }, { "epoch": 0.9501424501424501, "grad_norm": 0.5687218904495239, "learning_rate": 0.0001735693972386128, "loss": 1.1631, "step": 5336 }, { "epoch": 0.9503205128205128, "grad_norm": 0.520708441734314, "learning_rate": 0.00017355991583277395, "loss": 1.0744, "step": 5337 }, { "epoch": 0.9504985754985755, "grad_norm": 0.5738952159881592, "learning_rate": 0.00017355043298568137, "loss": 1.318, "step": 5338 }, { "epoch": 0.9506766381766382, "grad_norm": 0.5378455519676208, "learning_rate": 0.00017354094869752085, "loss": 0.9827, "step": 5339 }, { "epoch": 0.9508547008547008, "grad_norm": 0.5047366619110107, "learning_rate": 0.0001735314629684782, "loss": 1.0966, "step": 5340 }, { "epoch": 0.9510327635327636, "grad_norm": 0.5526043772697449, "learning_rate": 0.0001735219757987393, "loss": 1.059, "step": 5341 }, { "epoch": 0.9512108262108262, "grad_norm": 0.5741400718688965, "learning_rate": 0.00017351248718849003, "loss": 1.1232, "step": 5342 }, { "epoch": 0.9513888888888888, "grad_norm": 0.5421118140220642, "learning_rate": 0.00017350299713791626, "loss": 1.0427, "step": 5343 }, { "epoch": 0.9515669515669516, "grad_norm": 0.4857081472873688, "learning_rate": 0.00017349350564720392, "loss": 0.8663, "step": 5344 }, { "epoch": 0.9517450142450142, "grad_norm": 0.5411618947982788, "learning_rate": 0.00017348401271653904, "loss": 1.0317, "step": 5345 }, { "epoch": 0.9519230769230769, "grad_norm": 0.5246246457099915, "learning_rate": 0.00017347451834610756, "loss": 1.0076, "step": 5346 }, { "epoch": 0.9521011396011396, "grad_norm": 0.5278927683830261, "learning_rate": 0.00017346502253609556, "loss": 0.931, "step": 5347 }, { "epoch": 0.9522792022792023, "grad_norm": 0.5934548377990723, "learning_rate": 0.00017345552528668902, "loss": 1.3205, "step": 5348 }, { "epoch": 0.9524572649572649, "grad_norm": 0.5466100573539734, "learning_rate": 0.00017344602659807406, "loss": 0.8725, "step": 5349 }, { "epoch": 0.9526353276353277, "grad_norm": 0.5220118761062622, "learning_rate": 0.00017343652647043678, "loss": 1.1642, "step": 5350 }, { "epoch": 0.9528133903133903, "grad_norm": 0.6166301965713501, "learning_rate": 0.0001734270249039633, "loss": 0.8152, "step": 5351 }, { "epoch": 0.9529914529914529, "grad_norm": 0.5173428058624268, "learning_rate": 0.00017341752189883983, "loss": 0.9296, "step": 5352 }, { "epoch": 0.9531695156695157, "grad_norm": 0.5363461375236511, "learning_rate": 0.0001734080174552525, "loss": 1.3546, "step": 5353 }, { "epoch": 0.9533475783475783, "grad_norm": 0.5333831906318665, "learning_rate": 0.0001733985115733876, "loss": 1.0401, "step": 5354 }, { "epoch": 0.9535256410256411, "grad_norm": 0.5179334878921509, "learning_rate": 0.00017338900425343132, "loss": 1.1254, "step": 5355 }, { "epoch": 0.9537037037037037, "grad_norm": 0.5171303153038025, "learning_rate": 0.00017337949549556993, "loss": 1.0518, "step": 5356 }, { "epoch": 0.9538817663817664, "grad_norm": 0.5164596438407898, "learning_rate": 0.00017336998529998978, "loss": 0.8732, "step": 5357 }, { "epoch": 0.9540598290598291, "grad_norm": 0.5555717349052429, "learning_rate": 0.00017336047366687719, "loss": 1.2312, "step": 5358 }, { "epoch": 0.9542378917378918, "grad_norm": 0.45685622096061707, "learning_rate": 0.00017335096059641847, "loss": 0.8882, "step": 5359 }, { "epoch": 0.9544159544159544, "grad_norm": 0.5260133743286133, "learning_rate": 0.0001733414460888001, "loss": 1.0952, "step": 5360 }, { "epoch": 0.9545940170940171, "grad_norm": 0.4597703814506531, "learning_rate": 0.0001733319301442084, "loss": 1.0835, "step": 5361 }, { "epoch": 0.9547720797720798, "grad_norm": 0.5279495120048523, "learning_rate": 0.0001733224127628299, "loss": 1.0295, "step": 5362 }, { "epoch": 0.9549501424501424, "grad_norm": 0.48919400572776794, "learning_rate": 0.00017331289394485104, "loss": 0.9693, "step": 5363 }, { "epoch": 0.9551282051282052, "grad_norm": 0.5639515519142151, "learning_rate": 0.0001733033736904583, "loss": 1.0893, "step": 5364 }, { "epoch": 0.9553062678062678, "grad_norm": 0.49761319160461426, "learning_rate": 0.00017329385199983823, "loss": 1.038, "step": 5365 }, { "epoch": 0.9554843304843305, "grad_norm": 0.5503305792808533, "learning_rate": 0.0001732843288731774, "loss": 0.9976, "step": 5366 }, { "epoch": 0.9556623931623932, "grad_norm": 0.5633028745651245, "learning_rate": 0.00017327480431066235, "loss": 1.0602, "step": 5367 }, { "epoch": 0.9558404558404558, "grad_norm": 0.48074454069137573, "learning_rate": 0.00017326527831247973, "loss": 1.0286, "step": 5368 }, { "epoch": 0.9560185185185185, "grad_norm": 0.506597638130188, "learning_rate": 0.0001732557508788162, "loss": 0.9061, "step": 5369 }, { "epoch": 0.9561965811965812, "grad_norm": 0.6570749282836914, "learning_rate": 0.0001732462220098584, "loss": 1.0852, "step": 5370 }, { "epoch": 0.9563746438746439, "grad_norm": 0.5607653856277466, "learning_rate": 0.00017323669170579302, "loss": 1.0486, "step": 5371 }, { "epoch": 0.9565527065527065, "grad_norm": 0.6047050356864929, "learning_rate": 0.0001732271599668068, "loss": 1.2175, "step": 5372 }, { "epoch": 0.9567307692307693, "grad_norm": 0.5506869554519653, "learning_rate": 0.00017321762679308651, "loss": 1.0114, "step": 5373 }, { "epoch": 0.9569088319088319, "grad_norm": 0.5868638157844543, "learning_rate": 0.00017320809218481891, "loss": 1.2983, "step": 5374 }, { "epoch": 0.9570868945868946, "grad_norm": 0.539619505405426, "learning_rate": 0.00017319855614219084, "loss": 1.2361, "step": 5375 }, { "epoch": 0.9572649572649573, "grad_norm": 0.5525495409965515, "learning_rate": 0.0001731890186653891, "loss": 1.1316, "step": 5376 }, { "epoch": 0.95744301994302, "grad_norm": 0.5549767017364502, "learning_rate": 0.0001731794797546006, "loss": 1.0547, "step": 5377 }, { "epoch": 0.9576210826210826, "grad_norm": 0.5356076955795288, "learning_rate": 0.00017316993941001222, "loss": 0.9942, "step": 5378 }, { "epoch": 0.9577991452991453, "grad_norm": 0.5365784168243408, "learning_rate": 0.00017316039763181084, "loss": 1.226, "step": 5379 }, { "epoch": 0.957977207977208, "grad_norm": 0.5190927386283875, "learning_rate": 0.00017315085442018343, "loss": 1.1704, "step": 5380 }, { "epoch": 0.9581552706552706, "grad_norm": 0.526658833026886, "learning_rate": 0.00017314130977531705, "loss": 1.109, "step": 5381 }, { "epoch": 0.9583333333333334, "grad_norm": 0.5373684763908386, "learning_rate": 0.0001731317636973986, "loss": 1.0018, "step": 5382 }, { "epoch": 0.958511396011396, "grad_norm": 0.5714904069900513, "learning_rate": 0.00017312221618661516, "loss": 1.1855, "step": 5383 }, { "epoch": 0.9586894586894587, "grad_norm": 0.5707863569259644, "learning_rate": 0.00017311266724315377, "loss": 0.9482, "step": 5384 }, { "epoch": 0.9588675213675214, "grad_norm": 0.5856872797012329, "learning_rate": 0.00017310311686720157, "loss": 0.9543, "step": 5385 }, { "epoch": 0.959045584045584, "grad_norm": 0.5041963458061218, "learning_rate": 0.00017309356505894568, "loss": 1.1427, "step": 5386 }, { "epoch": 0.9592236467236467, "grad_norm": 0.5409179925918579, "learning_rate": 0.00017308401181857316, "loss": 0.8432, "step": 5387 }, { "epoch": 0.9594017094017094, "grad_norm": 0.5248702764511108, "learning_rate": 0.00017307445714627128, "loss": 1.1403, "step": 5388 }, { "epoch": 0.9595797720797721, "grad_norm": 0.50718092918396, "learning_rate": 0.00017306490104222722, "loss": 0.9066, "step": 5389 }, { "epoch": 0.9597578347578347, "grad_norm": 0.5563821196556091, "learning_rate": 0.0001730553435066282, "loss": 1.0204, "step": 5390 }, { "epoch": 0.9599358974358975, "grad_norm": 0.5696987509727478, "learning_rate": 0.00017304578453966146, "loss": 1.1405, "step": 5391 }, { "epoch": 0.9601139601139601, "grad_norm": 0.5927395224571228, "learning_rate": 0.00017303622414151435, "loss": 1.0398, "step": 5392 }, { "epoch": 0.9602920227920227, "grad_norm": 0.5375707745552063, "learning_rate": 0.0001730266623123741, "loss": 0.9519, "step": 5393 }, { "epoch": 0.9604700854700855, "grad_norm": 0.457998126745224, "learning_rate": 0.00017301709905242815, "loss": 0.8743, "step": 5394 }, { "epoch": 0.9606481481481481, "grad_norm": 0.5427796244621277, "learning_rate": 0.00017300753436186382, "loss": 1.078, "step": 5395 }, { "epoch": 0.9608262108262108, "grad_norm": 0.5458595752716064, "learning_rate": 0.0001729979682408685, "loss": 1.1081, "step": 5396 }, { "epoch": 0.9610042735042735, "grad_norm": 0.5495280027389526, "learning_rate": 0.00017298840068962962, "loss": 1.0141, "step": 5397 }, { "epoch": 0.9611823361823362, "grad_norm": 0.5878560543060303, "learning_rate": 0.00017297883170833465, "loss": 1.302, "step": 5398 }, { "epoch": 0.9613603988603988, "grad_norm": 0.5452881455421448, "learning_rate": 0.00017296926129717108, "loss": 0.9929, "step": 5399 }, { "epoch": 0.9615384615384616, "grad_norm": 0.6021811366081238, "learning_rate": 0.0001729596894563264, "loss": 1.2629, "step": 5400 }, { "epoch": 0.9617165242165242, "grad_norm": 0.5820204615592957, "learning_rate": 0.0001729501161859882, "loss": 1.0662, "step": 5401 }, { "epoch": 0.9618945868945868, "grad_norm": 0.4953218102455139, "learning_rate": 0.000172940541486344, "loss": 1.047, "step": 5402 }, { "epoch": 0.9620726495726496, "grad_norm": 0.5409793853759766, "learning_rate": 0.00017293096535758143, "loss": 1.1993, "step": 5403 }, { "epoch": 0.9622507122507122, "grad_norm": 0.49702873826026917, "learning_rate": 0.00017292138779988805, "loss": 1.2471, "step": 5404 }, { "epoch": 0.9624287749287749, "grad_norm": 0.5743489861488342, "learning_rate": 0.00017291180881345158, "loss": 1.0816, "step": 5405 }, { "epoch": 0.9626068376068376, "grad_norm": 0.5747945308685303, "learning_rate": 0.00017290222839845968, "loss": 1.3548, "step": 5406 }, { "epoch": 0.9627849002849003, "grad_norm": 0.5341345071792603, "learning_rate": 0.00017289264655510005, "loss": 1.0435, "step": 5407 }, { "epoch": 0.9629629629629629, "grad_norm": 0.5719689130783081, "learning_rate": 0.00017288306328356044, "loss": 1.2319, "step": 5408 }, { "epoch": 0.9631410256410257, "grad_norm": 0.4783279597759247, "learning_rate": 0.0001728734785840286, "loss": 0.9397, "step": 5409 }, { "epoch": 0.9633190883190883, "grad_norm": 0.4730507731437683, "learning_rate": 0.00017286389245669233, "loss": 0.9384, "step": 5410 }, { "epoch": 0.9634971509971509, "grad_norm": 0.5309939384460449, "learning_rate": 0.00017285430490173944, "loss": 1.098, "step": 5411 }, { "epoch": 0.9636752136752137, "grad_norm": 0.5177853107452393, "learning_rate": 0.0001728447159193578, "loss": 1.2777, "step": 5412 }, { "epoch": 0.9638532763532763, "grad_norm": 0.6437913775444031, "learning_rate": 0.00017283512550973526, "loss": 1.2661, "step": 5413 }, { "epoch": 0.9640313390313391, "grad_norm": 0.6096072196960449, "learning_rate": 0.00017282553367305975, "loss": 0.9569, "step": 5414 }, { "epoch": 0.9642094017094017, "grad_norm": 0.5104934573173523, "learning_rate": 0.00017281594040951918, "loss": 0.9666, "step": 5415 }, { "epoch": 0.9643874643874644, "grad_norm": 0.6178240776062012, "learning_rate": 0.00017280634571930153, "loss": 1.1277, "step": 5416 }, { "epoch": 0.9645655270655271, "grad_norm": 0.5749034881591797, "learning_rate": 0.0001727967496025948, "loss": 1.245, "step": 5417 }, { "epoch": 0.9647435897435898, "grad_norm": 0.5036978721618652, "learning_rate": 0.00017278715205958694, "loss": 1.3049, "step": 5418 }, { "epoch": 0.9649216524216524, "grad_norm": 0.5593041777610779, "learning_rate": 0.00017277755309046605, "loss": 1.2304, "step": 5419 }, { "epoch": 0.9650997150997151, "grad_norm": 0.5446555614471436, "learning_rate": 0.0001727679526954202, "loss": 0.732, "step": 5420 }, { "epoch": 0.9652777777777778, "grad_norm": 0.6063070297241211, "learning_rate": 0.00017275835087463747, "loss": 1.3723, "step": 5421 }, { "epoch": 0.9654558404558404, "grad_norm": 0.4994211792945862, "learning_rate": 0.00017274874762830602, "loss": 1.0505, "step": 5422 }, { "epoch": 0.9656339031339032, "grad_norm": 0.49396973848342896, "learning_rate": 0.00017273914295661395, "loss": 0.8691, "step": 5423 }, { "epoch": 0.9658119658119658, "grad_norm": 0.5067027807235718, "learning_rate": 0.0001727295368597495, "loss": 0.9744, "step": 5424 }, { "epoch": 0.9659900284900285, "grad_norm": 0.6720643043518066, "learning_rate": 0.00017271992933790085, "loss": 1.1513, "step": 5425 }, { "epoch": 0.9661680911680912, "grad_norm": 0.5494341254234314, "learning_rate": 0.00017271032039125624, "loss": 0.8295, "step": 5426 }, { "epoch": 0.9663461538461539, "grad_norm": 0.644332230091095, "learning_rate": 0.00017270071002000394, "loss": 1.0043, "step": 5427 }, { "epoch": 0.9665242165242165, "grad_norm": 0.5658500790596008, "learning_rate": 0.00017269109822433225, "loss": 1.2575, "step": 5428 }, { "epoch": 0.9667022792022792, "grad_norm": 0.5163155794143677, "learning_rate": 0.00017268148500442952, "loss": 1.1391, "step": 5429 }, { "epoch": 0.9668803418803419, "grad_norm": 0.5113703608512878, "learning_rate": 0.00017267187036048404, "loss": 1.0819, "step": 5430 }, { "epoch": 0.9670584045584045, "grad_norm": 0.6339422464370728, "learning_rate": 0.00017266225429268426, "loss": 1.0733, "step": 5431 }, { "epoch": 0.9672364672364673, "grad_norm": 0.5158288478851318, "learning_rate": 0.0001726526368012185, "loss": 0.9518, "step": 5432 }, { "epoch": 0.9674145299145299, "grad_norm": 0.593717634677887, "learning_rate": 0.00017264301788627527, "loss": 0.9416, "step": 5433 }, { "epoch": 0.9675925925925926, "grad_norm": 0.49593186378479004, "learning_rate": 0.00017263339754804301, "loss": 1.0307, "step": 5434 }, { "epoch": 0.9677706552706553, "grad_norm": 0.44032949209213257, "learning_rate": 0.00017262377578671024, "loss": 0.7884, "step": 5435 }, { "epoch": 0.967948717948718, "grad_norm": 0.513073742389679, "learning_rate": 0.00017261415260246538, "loss": 0.9797, "step": 5436 }, { "epoch": 0.9681267806267806, "grad_norm": 0.5737422108650208, "learning_rate": 0.0001726045279954971, "loss": 1.0487, "step": 5437 }, { "epoch": 0.9683048433048433, "grad_norm": 0.5385867953300476, "learning_rate": 0.0001725949019659939, "loss": 1.4166, "step": 5438 }, { "epoch": 0.968482905982906, "grad_norm": 0.5224326848983765, "learning_rate": 0.00017258527451414438, "loss": 1.195, "step": 5439 }, { "epoch": 0.9686609686609686, "grad_norm": 0.5305148363113403, "learning_rate": 0.0001725756456401372, "loss": 1.0301, "step": 5440 }, { "epoch": 0.9688390313390314, "grad_norm": 0.532588005065918, "learning_rate": 0.000172566015344161, "loss": 1.1269, "step": 5441 }, { "epoch": 0.969017094017094, "grad_norm": 0.5812515020370483, "learning_rate": 0.0001725563836264045, "loss": 1.1787, "step": 5442 }, { "epoch": 0.9691951566951567, "grad_norm": 0.4962109327316284, "learning_rate": 0.00017254675048705638, "loss": 1.0639, "step": 5443 }, { "epoch": 0.9693732193732194, "grad_norm": 0.5094883441925049, "learning_rate": 0.00017253711592630534, "loss": 1.0922, "step": 5444 }, { "epoch": 0.969551282051282, "grad_norm": 0.5728049874305725, "learning_rate": 0.00017252747994434025, "loss": 1.1237, "step": 5445 }, { "epoch": 0.9697293447293447, "grad_norm": 0.5406180620193481, "learning_rate": 0.00017251784254134983, "loss": 1.1161, "step": 5446 }, { "epoch": 0.9699074074074074, "grad_norm": 0.5724552869796753, "learning_rate": 0.00017250820371752292, "loss": 1.2205, "step": 5447 }, { "epoch": 0.9700854700854701, "grad_norm": 0.5698846578598022, "learning_rate": 0.0001724985634730484, "loss": 1.1472, "step": 5448 }, { "epoch": 0.9702635327635327, "grad_norm": 0.5315805673599243, "learning_rate": 0.0001724889218081151, "loss": 1.0253, "step": 5449 }, { "epoch": 0.9704415954415955, "grad_norm": 0.5970377326011658, "learning_rate": 0.000172479278722912, "loss": 1.3033, "step": 5450 }, { "epoch": 0.9706196581196581, "grad_norm": 0.6149488687515259, "learning_rate": 0.00017246963421762798, "loss": 1.0689, "step": 5451 }, { "epoch": 0.9707977207977208, "grad_norm": 0.4848574995994568, "learning_rate": 0.00017245998829245202, "loss": 0.8829, "step": 5452 }, { "epoch": 0.9709757834757835, "grad_norm": 0.6073294281959534, "learning_rate": 0.00017245034094757312, "loss": 1.2378, "step": 5453 }, { "epoch": 0.9711538461538461, "grad_norm": 0.6362034678459167, "learning_rate": 0.00017244069218318026, "loss": 1.3606, "step": 5454 }, { "epoch": 0.9713319088319088, "grad_norm": 0.5353880524635315, "learning_rate": 0.00017243104199946257, "loss": 1.1288, "step": 5455 }, { "epoch": 0.9715099715099715, "grad_norm": 0.5096352100372314, "learning_rate": 0.00017242139039660902, "loss": 1.0056, "step": 5456 }, { "epoch": 0.9716880341880342, "grad_norm": 0.5086682438850403, "learning_rate": 0.00017241173737480884, "loss": 1.091, "step": 5457 }, { "epoch": 0.9718660968660968, "grad_norm": 0.5034295320510864, "learning_rate": 0.000172402082934251, "loss": 0.9749, "step": 5458 }, { "epoch": 0.9720441595441596, "grad_norm": 0.5205379724502563, "learning_rate": 0.0001723924270751248, "loss": 1.1068, "step": 5459 }, { "epoch": 0.9722222222222222, "grad_norm": 0.5904826521873474, "learning_rate": 0.00017238276979761937, "loss": 1.0613, "step": 5460 }, { "epoch": 0.9724002849002849, "grad_norm": 0.6415045261383057, "learning_rate": 0.0001723731111019239, "loss": 1.2126, "step": 5461 }, { "epoch": 0.9725783475783476, "grad_norm": 0.5769147872924805, "learning_rate": 0.0001723634509882277, "loss": 1.337, "step": 5462 }, { "epoch": 0.9727564102564102, "grad_norm": 0.5585111975669861, "learning_rate": 0.00017235378945671998, "loss": 1.3922, "step": 5463 }, { "epoch": 0.9729344729344729, "grad_norm": 0.5788411498069763, "learning_rate": 0.00017234412650759008, "loss": 0.8532, "step": 5464 }, { "epoch": 0.9731125356125356, "grad_norm": 0.5617673397064209, "learning_rate": 0.00017233446214102728, "loss": 1.2575, "step": 5465 }, { "epoch": 0.9732905982905983, "grad_norm": 0.4227815568447113, "learning_rate": 0.00017232479635722093, "loss": 1.0618, "step": 5466 }, { "epoch": 0.9734686609686609, "grad_norm": 0.49751797318458557, "learning_rate": 0.00017231512915636047, "loss": 0.7714, "step": 5467 }, { "epoch": 0.9736467236467237, "grad_norm": 0.5983800292015076, "learning_rate": 0.0001723054605386353, "loss": 1.2297, "step": 5468 }, { "epoch": 0.9738247863247863, "grad_norm": 0.543394923210144, "learning_rate": 0.0001722957905042348, "loss": 1.0078, "step": 5469 }, { "epoch": 0.9740028490028491, "grad_norm": 0.5633566975593567, "learning_rate": 0.00017228611905334846, "loss": 1.0938, "step": 5470 }, { "epoch": 0.9741809116809117, "grad_norm": 0.49377235770225525, "learning_rate": 0.00017227644618616578, "loss": 1.096, "step": 5471 }, { "epoch": 0.9743589743589743, "grad_norm": 0.4963362216949463, "learning_rate": 0.00017226677190287627, "loss": 1.0003, "step": 5472 }, { "epoch": 0.9745370370370371, "grad_norm": 0.4483006000518799, "learning_rate": 0.00017225709620366953, "loss": 0.8623, "step": 5473 }, { "epoch": 0.9747150997150997, "grad_norm": 0.5429352521896362, "learning_rate": 0.00017224741908873506, "loss": 1.1383, "step": 5474 }, { "epoch": 0.9748931623931624, "grad_norm": 0.5871657729148865, "learning_rate": 0.0001722377405582625, "loss": 1.2005, "step": 5475 }, { "epoch": 0.9750712250712251, "grad_norm": 0.6002383828163147, "learning_rate": 0.0001722280606124415, "loss": 1.0696, "step": 5476 }, { "epoch": 0.9752492877492878, "grad_norm": 0.5351617336273193, "learning_rate": 0.00017221837925146164, "loss": 1.243, "step": 5477 }, { "epoch": 0.9754273504273504, "grad_norm": 0.46613118052482605, "learning_rate": 0.00017220869647551268, "loss": 1.0344, "step": 5478 }, { "epoch": 0.9756054131054132, "grad_norm": 0.6015593409538269, "learning_rate": 0.00017219901228478432, "loss": 1.082, "step": 5479 }, { "epoch": 0.9757834757834758, "grad_norm": 0.5829521417617798, "learning_rate": 0.0001721893266794663, "loss": 0.8683, "step": 5480 }, { "epoch": 0.9759615384615384, "grad_norm": 0.6344960927963257, "learning_rate": 0.00017217963965974838, "loss": 1.1048, "step": 5481 }, { "epoch": 0.9761396011396012, "grad_norm": 0.5586308240890503, "learning_rate": 0.00017216995122582034, "loss": 0.9657, "step": 5482 }, { "epoch": 0.9763176638176638, "grad_norm": 0.48625239729881287, "learning_rate": 0.00017216026137787204, "loss": 1.1026, "step": 5483 }, { "epoch": 0.9764957264957265, "grad_norm": 0.5625223517417908, "learning_rate": 0.00017215057011609332, "loss": 1.1579, "step": 5484 }, { "epoch": 0.9766737891737892, "grad_norm": 0.6016653776168823, "learning_rate": 0.0001721408774406741, "loss": 1.1777, "step": 5485 }, { "epoch": 0.9768518518518519, "grad_norm": 0.5444921851158142, "learning_rate": 0.00017213118335180418, "loss": 1.119, "step": 5486 }, { "epoch": 0.9770299145299145, "grad_norm": 0.5574755668640137, "learning_rate": 0.0001721214878496736, "loss": 1.1128, "step": 5487 }, { "epoch": 0.9772079772079773, "grad_norm": 0.5486113429069519, "learning_rate": 0.00017211179093447226, "loss": 1.1673, "step": 5488 }, { "epoch": 0.9773860398860399, "grad_norm": 0.5545483231544495, "learning_rate": 0.00017210209260639018, "loss": 1.1748, "step": 5489 }, { "epoch": 0.9775641025641025, "grad_norm": 0.5756667256355286, "learning_rate": 0.0001720923928656174, "loss": 1.2377, "step": 5490 }, { "epoch": 0.9777421652421653, "grad_norm": 0.5744972229003906, "learning_rate": 0.00017208269171234392, "loss": 1.1242, "step": 5491 }, { "epoch": 0.9779202279202279, "grad_norm": 0.6109468340873718, "learning_rate": 0.00017207298914675984, "loss": 1.1948, "step": 5492 }, { "epoch": 0.9780982905982906, "grad_norm": 0.5195167660713196, "learning_rate": 0.00017206328516905525, "loss": 1.0941, "step": 5493 }, { "epoch": 0.9782763532763533, "grad_norm": 0.5549042224884033, "learning_rate": 0.0001720535797794203, "loss": 1.1503, "step": 5494 }, { "epoch": 0.978454415954416, "grad_norm": 0.6317743062973022, "learning_rate": 0.0001720438729780451, "loss": 1.3468, "step": 5495 }, { "epoch": 0.9786324786324786, "grad_norm": 0.5932528972625732, "learning_rate": 0.0001720341647651199, "loss": 1.105, "step": 5496 }, { "epoch": 0.9788105413105413, "grad_norm": 0.607880175113678, "learning_rate": 0.00017202445514083488, "loss": 1.1465, "step": 5497 }, { "epoch": 0.978988603988604, "grad_norm": 0.49227309226989746, "learning_rate": 0.00017201474410538027, "loss": 0.9075, "step": 5498 }, { "epoch": 0.9791666666666666, "grad_norm": 0.5059443116188049, "learning_rate": 0.00017200503165894636, "loss": 1.0483, "step": 5499 }, { "epoch": 0.9793447293447294, "grad_norm": 0.5792799592018127, "learning_rate": 0.0001719953178017234, "loss": 1.0987, "step": 5500 }, { "epoch": 0.979522792022792, "grad_norm": 0.5010457038879395, "learning_rate": 0.00017198560253390177, "loss": 1.1051, "step": 5501 }, { "epoch": 0.9797008547008547, "grad_norm": 0.5866543054580688, "learning_rate": 0.0001719758858556718, "loss": 1.2824, "step": 5502 }, { "epoch": 0.9798789173789174, "grad_norm": 0.5392137169837952, "learning_rate": 0.00017196616776722382, "loss": 0.886, "step": 5503 }, { "epoch": 0.98005698005698, "grad_norm": 0.5200899839401245, "learning_rate": 0.00017195644826874834, "loss": 1.1504, "step": 5504 }, { "epoch": 0.9802350427350427, "grad_norm": 0.533159077167511, "learning_rate": 0.00017194672736043569, "loss": 1.1216, "step": 5505 }, { "epoch": 0.9804131054131054, "grad_norm": 0.5543524622917175, "learning_rate": 0.0001719370050424764, "loss": 1.0161, "step": 5506 }, { "epoch": 0.9805911680911681, "grad_norm": 0.5315365195274353, "learning_rate": 0.00017192728131506092, "loss": 1.0509, "step": 5507 }, { "epoch": 0.9807692307692307, "grad_norm": 0.5406147837638855, "learning_rate": 0.00017191755617837977, "loss": 1.0695, "step": 5508 }, { "epoch": 0.9809472934472935, "grad_norm": 0.4563386142253876, "learning_rate": 0.00017190782963262354, "loss": 0.995, "step": 5509 }, { "epoch": 0.9811253561253561, "grad_norm": 0.5456405282020569, "learning_rate": 0.00017189810167798274, "loss": 1.0546, "step": 5510 }, { "epoch": 0.9813034188034188, "grad_norm": 0.6275575160980225, "learning_rate": 0.00017188837231464795, "loss": 1.0432, "step": 5511 }, { "epoch": 0.9814814814814815, "grad_norm": 0.49735602736473083, "learning_rate": 0.0001718786415428099, "loss": 1.035, "step": 5512 }, { "epoch": 0.9816595441595442, "grad_norm": 0.5234259963035583, "learning_rate": 0.00017186890936265916, "loss": 1.0918, "step": 5513 }, { "epoch": 0.9818376068376068, "grad_norm": 0.5091170072555542, "learning_rate": 0.00017185917577438643, "loss": 1.0239, "step": 5514 }, { "epoch": 0.9820156695156695, "grad_norm": 0.6155703067779541, "learning_rate": 0.00017184944077818244, "loss": 1.2366, "step": 5515 }, { "epoch": 0.9821937321937322, "grad_norm": 0.5074070692062378, "learning_rate": 0.0001718397043742379, "loss": 1.0318, "step": 5516 }, { "epoch": 0.9823717948717948, "grad_norm": 0.5234423279762268, "learning_rate": 0.0001718299665627436, "loss": 1.0322, "step": 5517 }, { "epoch": 0.9825498575498576, "grad_norm": 0.5783474445343018, "learning_rate": 0.0001718202273438903, "loss": 0.9486, "step": 5518 }, { "epoch": 0.9827279202279202, "grad_norm": 0.5708683133125305, "learning_rate": 0.00017181048671786886, "loss": 1.0785, "step": 5519 }, { "epoch": 0.9829059829059829, "grad_norm": 0.5985961556434631, "learning_rate": 0.00017180074468487009, "loss": 1.198, "step": 5520 }, { "epoch": 0.9830840455840456, "grad_norm": 0.5711352229118347, "learning_rate": 0.0001717910012450849, "loss": 1.0386, "step": 5521 }, { "epoch": 0.9832621082621082, "grad_norm": 0.5338063836097717, "learning_rate": 0.00017178125639870416, "loss": 1.1594, "step": 5522 }, { "epoch": 0.9834401709401709, "grad_norm": 0.6144943237304688, "learning_rate": 0.00017177151014591881, "loss": 1.1083, "step": 5523 }, { "epoch": 0.9836182336182336, "grad_norm": 0.547285795211792, "learning_rate": 0.00017176176248691983, "loss": 1.1507, "step": 5524 }, { "epoch": 0.9837962962962963, "grad_norm": 0.5807644724845886, "learning_rate": 0.00017175201342189817, "loss": 1.3044, "step": 5525 }, { "epoch": 0.9839743589743589, "grad_norm": 0.5229477882385254, "learning_rate": 0.00017174226295104485, "loss": 1.2622, "step": 5526 }, { "epoch": 0.9841524216524217, "grad_norm": 0.6100695133209229, "learning_rate": 0.00017173251107455094, "loss": 1.2026, "step": 5527 }, { "epoch": 0.9843304843304843, "grad_norm": 0.5410884618759155, "learning_rate": 0.00017172275779260744, "loss": 1.2964, "step": 5528 }, { "epoch": 0.9845085470085471, "grad_norm": 0.5937406420707703, "learning_rate": 0.00017171300310540554, "loss": 1.1435, "step": 5529 }, { "epoch": 0.9846866096866097, "grad_norm": 0.56817227602005, "learning_rate": 0.00017170324701313634, "loss": 1.0099, "step": 5530 }, { "epoch": 0.9848646723646723, "grad_norm": 0.5776323080062866, "learning_rate": 0.00017169348951599092, "loss": 1.3539, "step": 5531 }, { "epoch": 0.9850427350427351, "grad_norm": 0.5208535194396973, "learning_rate": 0.0001716837306141605, "loss": 1.2306, "step": 5532 }, { "epoch": 0.9852207977207977, "grad_norm": 0.552173376083374, "learning_rate": 0.0001716739703078363, "loss": 1.0551, "step": 5533 }, { "epoch": 0.9853988603988604, "grad_norm": 0.5327515602111816, "learning_rate": 0.00017166420859720955, "loss": 1.2443, "step": 5534 }, { "epoch": 0.9855769230769231, "grad_norm": 0.5255244374275208, "learning_rate": 0.0001716544454824715, "loss": 1.005, "step": 5535 }, { "epoch": 0.9857549857549858, "grad_norm": 0.4753847122192383, "learning_rate": 0.00017164468096381343, "loss": 1.0081, "step": 5536 }, { "epoch": 0.9859330484330484, "grad_norm": 0.5261829495429993, "learning_rate": 0.00017163491504142665, "loss": 1.2249, "step": 5537 }, { "epoch": 0.9861111111111112, "grad_norm": 0.46499499678611755, "learning_rate": 0.00017162514771550255, "loss": 0.8759, "step": 5538 }, { "epoch": 0.9862891737891738, "grad_norm": 0.5233004689216614, "learning_rate": 0.00017161537898623247, "loss": 1.0474, "step": 5539 }, { "epoch": 0.9864672364672364, "grad_norm": 0.46905553340911865, "learning_rate": 0.00017160560885380778, "loss": 0.9033, "step": 5540 }, { "epoch": 0.9866452991452992, "grad_norm": 0.5816231369972229, "learning_rate": 0.00017159583731841998, "loss": 1.0628, "step": 5541 }, { "epoch": 0.9868233618233618, "grad_norm": 0.4575413167476654, "learning_rate": 0.00017158606438026045, "loss": 1.0446, "step": 5542 }, { "epoch": 0.9870014245014245, "grad_norm": 0.5968109965324402, "learning_rate": 0.00017157629003952067, "loss": 1.032, "step": 5543 }, { "epoch": 0.9871794871794872, "grad_norm": 0.5316148400306702, "learning_rate": 0.00017156651429639218, "loss": 0.9167, "step": 5544 }, { "epoch": 0.9873575498575499, "grad_norm": 0.5185125470161438, "learning_rate": 0.00017155673715106651, "loss": 1.1527, "step": 5545 }, { "epoch": 0.9875356125356125, "grad_norm": 0.5167772769927979, "learning_rate": 0.00017154695860373525, "loss": 0.9954, "step": 5546 }, { "epoch": 0.9877136752136753, "grad_norm": 0.6406680345535278, "learning_rate": 0.00017153717865458994, "loss": 1.2758, "step": 5547 }, { "epoch": 0.9878917378917379, "grad_norm": 0.5223956108093262, "learning_rate": 0.00017152739730382223, "loss": 1.1526, "step": 5548 }, { "epoch": 0.9880698005698005, "grad_norm": 0.6131790280342102, "learning_rate": 0.00017151761455162375, "loss": 1.1024, "step": 5549 }, { "epoch": 0.9882478632478633, "grad_norm": 0.5574753880500793, "learning_rate": 0.00017150783039818616, "loss": 0.9733, "step": 5550 }, { "epoch": 0.9884259259259259, "grad_norm": 0.5417882800102234, "learning_rate": 0.0001714980448437012, "loss": 1.2244, "step": 5551 }, { "epoch": 0.9886039886039886, "grad_norm": 0.6217474341392517, "learning_rate": 0.0001714882578883606, "loss": 0.9224, "step": 5552 }, { "epoch": 0.9887820512820513, "grad_norm": 0.5846285223960876, "learning_rate": 0.00017147846953235606, "loss": 1.2429, "step": 5553 }, { "epoch": 0.988960113960114, "grad_norm": 0.5924782752990723, "learning_rate": 0.00017146867977587936, "loss": 0.9907, "step": 5554 }, { "epoch": 0.9891381766381766, "grad_norm": 0.5756853818893433, "learning_rate": 0.00017145888861912242, "loss": 1.1266, "step": 5555 }, { "epoch": 0.9893162393162394, "grad_norm": 0.5277376770973206, "learning_rate": 0.00017144909606227693, "loss": 1.1676, "step": 5556 }, { "epoch": 0.989494301994302, "grad_norm": 0.5138902068138123, "learning_rate": 0.00017143930210553485, "loss": 0.9864, "step": 5557 }, { "epoch": 0.9896723646723646, "grad_norm": 0.8072507977485657, "learning_rate": 0.00017142950674908805, "loss": 1.111, "step": 5558 }, { "epoch": 0.9898504273504274, "grad_norm": 0.5641721487045288, "learning_rate": 0.00017141970999312844, "loss": 0.9106, "step": 5559 }, { "epoch": 0.99002849002849, "grad_norm": 0.5260798931121826, "learning_rate": 0.000171409911837848, "loss": 1.1609, "step": 5560 }, { "epoch": 0.9902065527065527, "grad_norm": 0.5398530960083008, "learning_rate": 0.00017140011228343864, "loss": 1.0368, "step": 5561 }, { "epoch": 0.9903846153846154, "grad_norm": 0.6011313199996948, "learning_rate": 0.00017139031133009245, "loss": 1.1314, "step": 5562 }, { "epoch": 0.9905626780626781, "grad_norm": 0.6194971203804016, "learning_rate": 0.00017138050897800135, "loss": 1.3493, "step": 5563 }, { "epoch": 0.9907407407407407, "grad_norm": 0.5779356956481934, "learning_rate": 0.0001713707052273575, "loss": 0.943, "step": 5564 }, { "epoch": 0.9909188034188035, "grad_norm": 0.5321127772331238, "learning_rate": 0.00017136090007835293, "loss": 0.7914, "step": 5565 }, { "epoch": 0.9910968660968661, "grad_norm": 0.5470426678657532, "learning_rate": 0.00017135109353117977, "loss": 1.2113, "step": 5566 }, { "epoch": 0.9912749287749287, "grad_norm": 0.5551436543464661, "learning_rate": 0.00017134128558603012, "loss": 0.8932, "step": 5567 }, { "epoch": 0.9914529914529915, "grad_norm": 0.45770928263664246, "learning_rate": 0.0001713314762430962, "loss": 1.0061, "step": 5568 }, { "epoch": 0.9916310541310541, "grad_norm": 0.5578967332839966, "learning_rate": 0.00017132166550257017, "loss": 1.148, "step": 5569 }, { "epoch": 0.9918091168091168, "grad_norm": 0.5086452960968018, "learning_rate": 0.0001713118533646443, "loss": 0.9803, "step": 5570 }, { "epoch": 0.9919871794871795, "grad_norm": 0.4714745879173279, "learning_rate": 0.00017130203982951078, "loss": 1.0176, "step": 5571 }, { "epoch": 0.9921652421652422, "grad_norm": 0.6254406571388245, "learning_rate": 0.0001712922248973619, "loss": 1.0932, "step": 5572 }, { "epoch": 0.9923433048433048, "grad_norm": 0.5005003809928894, "learning_rate": 0.00017128240856838998, "loss": 1.0783, "step": 5573 }, { "epoch": 0.9925213675213675, "grad_norm": 0.5668206214904785, "learning_rate": 0.00017127259084278733, "loss": 1.0404, "step": 5574 }, { "epoch": 0.9926994301994302, "grad_norm": 0.4976036250591278, "learning_rate": 0.00017126277172074632, "loss": 1.1437, "step": 5575 }, { "epoch": 0.9928774928774928, "grad_norm": 0.567546546459198, "learning_rate": 0.00017125295120245935, "loss": 1.2188, "step": 5576 }, { "epoch": 0.9930555555555556, "grad_norm": 0.5614372491836548, "learning_rate": 0.0001712431292881188, "loss": 0.9187, "step": 5577 }, { "epoch": 0.9932336182336182, "grad_norm": 0.6117973327636719, "learning_rate": 0.00017123330597791712, "loss": 1.1285, "step": 5578 }, { "epoch": 0.9934116809116809, "grad_norm": 0.6000342965126038, "learning_rate": 0.00017122348127204676, "loss": 0.9837, "step": 5579 }, { "epoch": 0.9935897435897436, "grad_norm": 0.5453050136566162, "learning_rate": 0.0001712136551707003, "loss": 0.8771, "step": 5580 }, { "epoch": 0.9937678062678063, "grad_norm": 0.49603891372680664, "learning_rate": 0.00017120382767407018, "loss": 1.0754, "step": 5581 }, { "epoch": 0.9939458689458689, "grad_norm": 0.48031488060951233, "learning_rate": 0.00017119399878234894, "loss": 0.6933, "step": 5582 }, { "epoch": 0.9941239316239316, "grad_norm": 0.6048742532730103, "learning_rate": 0.0001711841684957292, "loss": 0.9696, "step": 5583 }, { "epoch": 0.9943019943019943, "grad_norm": 0.5183123350143433, "learning_rate": 0.00017117433681440355, "loss": 1.1313, "step": 5584 }, { "epoch": 0.9944800569800569, "grad_norm": 0.504916250705719, "learning_rate": 0.00017116450373856466, "loss": 1.0273, "step": 5585 }, { "epoch": 0.9946581196581197, "grad_norm": 0.5804886817932129, "learning_rate": 0.0001711546692684051, "loss": 1.1162, "step": 5586 }, { "epoch": 0.9948361823361823, "grad_norm": 0.5531938672065735, "learning_rate": 0.0001711448334041176, "loss": 1.2893, "step": 5587 }, { "epoch": 0.9950142450142451, "grad_norm": 0.5079928636550903, "learning_rate": 0.00017113499614589492, "loss": 1.0393, "step": 5588 }, { "epoch": 0.9951923076923077, "grad_norm": 0.5421964526176453, "learning_rate": 0.00017112515749392973, "loss": 0.8844, "step": 5589 }, { "epoch": 0.9953703703703703, "grad_norm": 0.4834558367729187, "learning_rate": 0.00017111531744841486, "loss": 1.0187, "step": 5590 }, { "epoch": 0.9955484330484331, "grad_norm": 0.6704340577125549, "learning_rate": 0.00017110547600954307, "loss": 0.8524, "step": 5591 }, { "epoch": 0.9957264957264957, "grad_norm": 0.4578927159309387, "learning_rate": 0.00017109563317750718, "loss": 1.059, "step": 5592 }, { "epoch": 0.9959045584045584, "grad_norm": 0.5563494563102722, "learning_rate": 0.00017108578895250006, "loss": 1.1211, "step": 5593 }, { "epoch": 0.9960826210826211, "grad_norm": 0.5272170901298523, "learning_rate": 0.00017107594333471454, "loss": 0.9224, "step": 5594 }, { "epoch": 0.9962606837606838, "grad_norm": 0.5697501301765442, "learning_rate": 0.00017106609632434357, "loss": 1.2223, "step": 5595 }, { "epoch": 0.9964387464387464, "grad_norm": 0.5385653376579285, "learning_rate": 0.00017105624792158007, "loss": 1.0809, "step": 5596 }, { "epoch": 0.9966168091168092, "grad_norm": 0.5608006119728088, "learning_rate": 0.000171046398126617, "loss": 1.3936, "step": 5597 }, { "epoch": 0.9967948717948718, "grad_norm": 0.5063132643699646, "learning_rate": 0.00017103654693964736, "loss": 1.2086, "step": 5598 }, { "epoch": 0.9969729344729344, "grad_norm": 0.6014235019683838, "learning_rate": 0.00017102669436086415, "loss": 1.1231, "step": 5599 }, { "epoch": 0.9971509971509972, "grad_norm": 0.49549567699432373, "learning_rate": 0.00017101684039046036, "loss": 1.0013, "step": 5600 }, { "epoch": 0.9973290598290598, "grad_norm": 0.517464816570282, "learning_rate": 0.00017100698502862916, "loss": 1.1143, "step": 5601 }, { "epoch": 0.9975071225071225, "grad_norm": 0.514281153678894, "learning_rate": 0.00017099712827556358, "loss": 1.0336, "step": 5602 }, { "epoch": 0.9976851851851852, "grad_norm": 0.5378567576408386, "learning_rate": 0.00017098727013145672, "loss": 0.8278, "step": 5603 }, { "epoch": 0.9978632478632479, "grad_norm": 0.5098404884338379, "learning_rate": 0.0001709774105965018, "loss": 0.9902, "step": 5604 }, { "epoch": 0.9980413105413105, "grad_norm": 0.6231759190559387, "learning_rate": 0.00017096754967089198, "loss": 1.0564, "step": 5605 }, { "epoch": 0.9982193732193733, "grad_norm": 0.47434380650520325, "learning_rate": 0.00017095768735482042, "loss": 0.7457, "step": 5606 }, { "epoch": 0.9983974358974359, "grad_norm": 0.5771013498306274, "learning_rate": 0.00017094782364848035, "loss": 1.1191, "step": 5607 }, { "epoch": 0.9985754985754985, "grad_norm": 0.5617234706878662, "learning_rate": 0.00017093795855206508, "loss": 1.0779, "step": 5608 }, { "epoch": 0.9987535612535613, "grad_norm": 0.6573554873466492, "learning_rate": 0.00017092809206576792, "loss": 1.0191, "step": 5609 }, { "epoch": 0.9989316239316239, "grad_norm": 0.482834130525589, "learning_rate": 0.00017091822418978207, "loss": 1.0119, "step": 5610 }, { "epoch": 0.9991096866096866, "grad_norm": 0.47496405243873596, "learning_rate": 0.000170908354924301, "loss": 0.8297, "step": 5611 }, { "epoch": 0.9992877492877493, "grad_norm": 0.5013265013694763, "learning_rate": 0.00017089848426951796, "loss": 1.1511, "step": 5612 }, { "epoch": 0.999465811965812, "grad_norm": 0.5402522683143616, "learning_rate": 0.00017088861222562643, "loss": 1.1401, "step": 5613 }, { "epoch": 0.9996438746438746, "grad_norm": 0.546302318572998, "learning_rate": 0.00017087873879281977, "loss": 0.8611, "step": 5614 }, { "epoch": 0.9998219373219374, "grad_norm": 0.44279807806015015, "learning_rate": 0.0001708688639712915, "loss": 0.79, "step": 5615 }, { "epoch": 1.0, "grad_norm": 0.5514659285545349, "learning_rate": 0.00017085898776123502, "loss": 1.0709, "step": 5616 }, { "epoch": 1.0, "eval_loss": 1.093075156211853, "eval_runtime": 24.6155, "eval_samples_per_second": 42.29, "eval_steps_per_second": 21.166, "step": 5616 }, { "epoch": 1.0001780626780628, "grad_norm": 0.6290156841278076, "learning_rate": 0.0001708491101628439, "loss": 1.1786, "step": 5617 }, { "epoch": 1.0001780626780628, "grad_norm": 0.4703841209411621, "learning_rate": 0.00017083923117631162, "loss": 0.9548, "step": 5618 }, { "epoch": 1.0003561253561253, "grad_norm": 0.4518105089664459, "learning_rate": 0.0001708293508018318, "loss": 1.0089, "step": 5619 }, { "epoch": 1.000534188034188, "grad_norm": 0.5658619403839111, "learning_rate": 0.00017081946903959794, "loss": 0.9466, "step": 5620 }, { "epoch": 1.0007122507122508, "grad_norm": 0.6153838634490967, "learning_rate": 0.00017080958588980372, "loss": 1.2898, "step": 5621 }, { "epoch": 1.0008903133903133, "grad_norm": 0.5245628952980042, "learning_rate": 0.00017079970135264275, "loss": 1.1702, "step": 5622 }, { "epoch": 1.001068376068376, "grad_norm": 0.5291880965232849, "learning_rate": 0.00017078981542830875, "loss": 1.0779, "step": 5623 }, { "epoch": 1.0012464387464388, "grad_norm": 0.500579297542572, "learning_rate": 0.0001707799281169953, "loss": 0.9587, "step": 5624 }, { "epoch": 1.0014245014245013, "grad_norm": 0.45739707350730896, "learning_rate": 0.00017077003941889625, "loss": 0.9373, "step": 5625 }, { "epoch": 1.001602564102564, "grad_norm": 0.5513401031494141, "learning_rate": 0.00017076014933420526, "loss": 1.0368, "step": 5626 }, { "epoch": 1.0017806267806268, "grad_norm": 0.46513232588768005, "learning_rate": 0.00017075025786311612, "loss": 0.9422, "step": 5627 }, { "epoch": 1.0019586894586894, "grad_norm": 0.4530394673347473, "learning_rate": 0.00017074036500582267, "loss": 0.8211, "step": 5628 }, { "epoch": 1.0021367521367521, "grad_norm": 0.5612013339996338, "learning_rate": 0.00017073047076251872, "loss": 0.9466, "step": 5629 }, { "epoch": 1.0023148148148149, "grad_norm": 0.4976879954338074, "learning_rate": 0.00017072057513339812, "loss": 0.8059, "step": 5630 }, { "epoch": 1.0024928774928774, "grad_norm": 0.4842833876609802, "learning_rate": 0.00017071067811865476, "loss": 0.6554, "step": 5631 }, { "epoch": 1.0026709401709402, "grad_norm": 0.5446373224258423, "learning_rate": 0.00017070077971848257, "loss": 1.1001, "step": 5632 }, { "epoch": 1.002849002849003, "grad_norm": 0.5996584892272949, "learning_rate": 0.00017069087993307544, "loss": 1.0317, "step": 5633 }, { "epoch": 1.0030270655270654, "grad_norm": 0.5369443297386169, "learning_rate": 0.00017068097876262738, "loss": 0.8019, "step": 5634 }, { "epoch": 1.0032051282051282, "grad_norm": 0.4985966682434082, "learning_rate": 0.00017067107620733236, "loss": 1.0121, "step": 5635 }, { "epoch": 1.003383190883191, "grad_norm": 0.5262824892997742, "learning_rate": 0.0001706611722673844, "loss": 1.0157, "step": 5636 }, { "epoch": 1.0035612535612535, "grad_norm": 0.5912795066833496, "learning_rate": 0.00017065126694297756, "loss": 1.0327, "step": 5637 }, { "epoch": 1.0037393162393162, "grad_norm": 0.5866343379020691, "learning_rate": 0.00017064136023430595, "loss": 1.1194, "step": 5638 }, { "epoch": 1.003917378917379, "grad_norm": 0.5009918808937073, "learning_rate": 0.0001706314521415636, "loss": 1.0467, "step": 5639 }, { "epoch": 1.0040954415954415, "grad_norm": 0.5455304384231567, "learning_rate": 0.00017062154266494464, "loss": 0.8749, "step": 5640 }, { "epoch": 1.0042735042735043, "grad_norm": 0.5648258328437805, "learning_rate": 0.00017061163180464328, "loss": 0.9408, "step": 5641 }, { "epoch": 1.004451566951567, "grad_norm": 0.5276365876197815, "learning_rate": 0.00017060171956085368, "loss": 0.9681, "step": 5642 }, { "epoch": 1.0046296296296295, "grad_norm": 0.5212745070457458, "learning_rate": 0.00017059180593377007, "loss": 0.9188, "step": 5643 }, { "epoch": 1.0048076923076923, "grad_norm": 0.540626585483551, "learning_rate": 0.00017058189092358664, "loss": 1.0809, "step": 5644 }, { "epoch": 1.004985754985755, "grad_norm": 0.5592377781867981, "learning_rate": 0.00017057197453049767, "loss": 0.8589, "step": 5645 }, { "epoch": 1.0051638176638176, "grad_norm": 0.5115051865577698, "learning_rate": 0.00017056205675469746, "loss": 0.8006, "step": 5646 }, { "epoch": 1.0053418803418803, "grad_norm": 0.5031117796897888, "learning_rate": 0.00017055213759638034, "loss": 0.9242, "step": 5647 }, { "epoch": 1.005519943019943, "grad_norm": 0.5342774987220764, "learning_rate": 0.00017054221705574066, "loss": 0.8268, "step": 5648 }, { "epoch": 1.0056980056980056, "grad_norm": 0.44480493664741516, "learning_rate": 0.00017053229513297276, "loss": 0.6892, "step": 5649 }, { "epoch": 1.0058760683760684, "grad_norm": 0.5032621622085571, "learning_rate": 0.00017052237182827105, "loss": 0.971, "step": 5650 }, { "epoch": 1.006054131054131, "grad_norm": 0.5611015558242798, "learning_rate": 0.00017051244714182996, "loss": 0.9403, "step": 5651 }, { "epoch": 1.0062321937321936, "grad_norm": 0.5064613223075867, "learning_rate": 0.00017050252107384393, "loss": 0.9718, "step": 5652 }, { "epoch": 1.0064102564102564, "grad_norm": 0.6458395719528198, "learning_rate": 0.0001704925936245075, "loss": 1.1161, "step": 5653 }, { "epoch": 1.0065883190883191, "grad_norm": 0.527418315410614, "learning_rate": 0.00017048266479401512, "loss": 0.9315, "step": 5654 }, { "epoch": 1.0067663817663817, "grad_norm": 0.5127941370010376, "learning_rate": 0.00017047273458256133, "loss": 0.8206, "step": 5655 }, { "epoch": 1.0069444444444444, "grad_norm": 0.6257100105285645, "learning_rate": 0.00017046280299034067, "loss": 0.9854, "step": 5656 }, { "epoch": 1.0071225071225072, "grad_norm": 0.5081700682640076, "learning_rate": 0.0001704528700175478, "loss": 0.9478, "step": 5657 }, { "epoch": 1.0073005698005697, "grad_norm": 0.598127543926239, "learning_rate": 0.00017044293566437725, "loss": 1.0721, "step": 5658 }, { "epoch": 1.0074786324786325, "grad_norm": 0.5429877638816833, "learning_rate": 0.00017043299993102376, "loss": 0.9732, "step": 5659 }, { "epoch": 1.0076566951566952, "grad_norm": 0.6006619334220886, "learning_rate": 0.00017042306281768194, "loss": 1.1262, "step": 5660 }, { "epoch": 1.0078347578347577, "grad_norm": 0.48933324217796326, "learning_rate": 0.00017041312432454646, "loss": 0.8596, "step": 5661 }, { "epoch": 1.0080128205128205, "grad_norm": 0.5902166366577148, "learning_rate": 0.0001704031844518121, "loss": 1.1035, "step": 5662 }, { "epoch": 1.0081908831908832, "grad_norm": 0.523597776889801, "learning_rate": 0.0001703932431996736, "loss": 0.7117, "step": 5663 }, { "epoch": 1.0083689458689458, "grad_norm": 0.6313928365707397, "learning_rate": 0.00017038330056832573, "loss": 1.0204, "step": 5664 }, { "epoch": 1.0085470085470085, "grad_norm": 0.5627471804618835, "learning_rate": 0.00017037335655796328, "loss": 0.7648, "step": 5665 }, { "epoch": 1.0087250712250713, "grad_norm": 0.5817851424217224, "learning_rate": 0.0001703634111687811, "loss": 1.0452, "step": 5666 }, { "epoch": 1.0089031339031338, "grad_norm": 0.5143535137176514, "learning_rate": 0.00017035346440097407, "loss": 0.9788, "step": 5667 }, { "epoch": 1.0090811965811965, "grad_norm": 0.5331187844276428, "learning_rate": 0.000170343516254737, "loss": 0.7584, "step": 5668 }, { "epoch": 1.0092592592592593, "grad_norm": 0.5723634362220764, "learning_rate": 0.00017033356673026487, "loss": 0.9435, "step": 5669 }, { "epoch": 1.0094373219373218, "grad_norm": 0.6012297868728638, "learning_rate": 0.00017032361582775265, "loss": 1.142, "step": 5670 }, { "epoch": 1.0096153846153846, "grad_norm": 0.6161282658576965, "learning_rate": 0.00017031366354739523, "loss": 1.2823, "step": 5671 }, { "epoch": 1.0097934472934473, "grad_norm": 0.5088054537773132, "learning_rate": 0.00017030370988938763, "loss": 0.9743, "step": 5672 }, { "epoch": 1.0099715099715099, "grad_norm": 0.512003481388092, "learning_rate": 0.0001702937548539249, "loss": 0.9112, "step": 5673 }, { "epoch": 1.0101495726495726, "grad_norm": 0.5565149784088135, "learning_rate": 0.00017028379844120207, "loss": 1.0074, "step": 5674 }, { "epoch": 1.0103276353276354, "grad_norm": 0.6463099718093872, "learning_rate": 0.00017027384065141418, "loss": 1.175, "step": 5675 }, { "epoch": 1.010505698005698, "grad_norm": 0.46999064087867737, "learning_rate": 0.00017026388148475637, "loss": 0.8429, "step": 5676 }, { "epoch": 1.0106837606837606, "grad_norm": 0.5617384910583496, "learning_rate": 0.00017025392094142377, "loss": 1.045, "step": 5677 }, { "epoch": 1.0108618233618234, "grad_norm": 0.5156623721122742, "learning_rate": 0.00017024395902161154, "loss": 1.016, "step": 5678 }, { "epoch": 1.0110398860398861, "grad_norm": 0.5693390369415283, "learning_rate": 0.00017023399572551484, "loss": 0.8616, "step": 5679 }, { "epoch": 1.0112179487179487, "grad_norm": 0.5234879851341248, "learning_rate": 0.00017022403105332892, "loss": 0.9244, "step": 5680 }, { "epoch": 1.0113960113960114, "grad_norm": 0.6513097286224365, "learning_rate": 0.00017021406500524893, "loss": 0.9565, "step": 5681 }, { "epoch": 1.0115740740740742, "grad_norm": 0.5788878202438354, "learning_rate": 0.00017020409758147022, "loss": 0.8994, "step": 5682 }, { "epoch": 1.0117521367521367, "grad_norm": 0.5495247840881348, "learning_rate": 0.00017019412878218807, "loss": 0.9371, "step": 5683 }, { "epoch": 1.0119301994301995, "grad_norm": 0.639045238494873, "learning_rate": 0.00017018415860759777, "loss": 1.0297, "step": 5684 }, { "epoch": 1.0121082621082622, "grad_norm": 0.5167784690856934, "learning_rate": 0.0001701741870578947, "loss": 0.8974, "step": 5685 }, { "epoch": 1.0122863247863247, "grad_norm": 0.6131011247634888, "learning_rate": 0.00017016421413327417, "loss": 1.13, "step": 5686 }, { "epoch": 1.0124643874643875, "grad_norm": 0.4804688096046448, "learning_rate": 0.00017015423983393166, "loss": 1.0098, "step": 5687 }, { "epoch": 1.0126424501424502, "grad_norm": 0.6605221629142761, "learning_rate": 0.00017014426416006253, "loss": 1.1123, "step": 5688 }, { "epoch": 1.0128205128205128, "grad_norm": 0.5523666739463806, "learning_rate": 0.00017013428711186226, "loss": 0.8226, "step": 5689 }, { "epoch": 1.0129985754985755, "grad_norm": 0.6012941598892212, "learning_rate": 0.00017012430868952632, "loss": 0.8915, "step": 5690 }, { "epoch": 1.0131766381766383, "grad_norm": 0.5830875039100647, "learning_rate": 0.00017011432889325022, "loss": 1.021, "step": 5691 }, { "epoch": 1.0133547008547008, "grad_norm": 0.5546056032180786, "learning_rate": 0.0001701043477232295, "loss": 0.7656, "step": 5692 }, { "epoch": 1.0135327635327636, "grad_norm": 0.5592601299285889, "learning_rate": 0.0001700943651796597, "loss": 1.0172, "step": 5693 }, { "epoch": 1.0137108262108263, "grad_norm": 0.5708866715431213, "learning_rate": 0.00017008438126273645, "loss": 1.0012, "step": 5694 }, { "epoch": 1.0138888888888888, "grad_norm": 0.6856338381767273, "learning_rate": 0.0001700743959726553, "loss": 1.1278, "step": 5695 }, { "epoch": 1.0140669515669516, "grad_norm": 0.6523802876472473, "learning_rate": 0.000170064409309612, "loss": 1.0406, "step": 5696 }, { "epoch": 1.0142450142450143, "grad_norm": 0.6653079986572266, "learning_rate": 0.00017005442127380208, "loss": 1.1086, "step": 5697 }, { "epoch": 1.0144230769230769, "grad_norm": 0.5841104388237, "learning_rate": 0.00017004443186542133, "loss": 0.9335, "step": 5698 }, { "epoch": 1.0146011396011396, "grad_norm": 0.5696784257888794, "learning_rate": 0.0001700344410846654, "loss": 1.0247, "step": 5699 }, { "epoch": 1.0147792022792024, "grad_norm": 0.7135653495788574, "learning_rate": 0.00017002444893173013, "loss": 1.0259, "step": 5700 }, { "epoch": 1.014957264957265, "grad_norm": 0.5806999802589417, "learning_rate": 0.00017001445540681124, "loss": 1.0053, "step": 5701 }, { "epoch": 1.0151353276353277, "grad_norm": 0.5298715829849243, "learning_rate": 0.0001700044605101045, "loss": 0.9415, "step": 5702 }, { "epoch": 1.0153133903133904, "grad_norm": 0.5817379951477051, "learning_rate": 0.0001699944642418058, "loss": 1.0906, "step": 5703 }, { "epoch": 1.015491452991453, "grad_norm": 0.6564923524856567, "learning_rate": 0.00016998446660211098, "loss": 0.9933, "step": 5704 }, { "epoch": 1.0156695156695157, "grad_norm": 0.6547308564186096, "learning_rate": 0.00016997446759121592, "loss": 1.0045, "step": 5705 }, { "epoch": 1.0158475783475784, "grad_norm": 0.5763013958930969, "learning_rate": 0.00016996446720931652, "loss": 1.0898, "step": 5706 }, { "epoch": 1.016025641025641, "grad_norm": 0.6118074059486389, "learning_rate": 0.00016995446545660871, "loss": 0.9398, "step": 5707 }, { "epoch": 1.0162037037037037, "grad_norm": 0.6810526251792908, "learning_rate": 0.0001699444623332885, "loss": 1.0968, "step": 5708 }, { "epoch": 1.0163817663817665, "grad_norm": 0.5292752981185913, "learning_rate": 0.00016993445783955184, "loss": 0.7549, "step": 5709 }, { "epoch": 1.016559829059829, "grad_norm": 0.6014277935028076, "learning_rate": 0.00016992445197559474, "loss": 1.1711, "step": 5710 }, { "epoch": 1.0167378917378918, "grad_norm": 0.5089772343635559, "learning_rate": 0.00016991444474161326, "loss": 0.9188, "step": 5711 }, { "epoch": 1.0169159544159545, "grad_norm": 0.567193865776062, "learning_rate": 0.0001699044361378035, "loss": 0.7462, "step": 5712 }, { "epoch": 1.017094017094017, "grad_norm": 0.5638598799705505, "learning_rate": 0.00016989442616436147, "loss": 0.9643, "step": 5713 }, { "epoch": 1.0172720797720798, "grad_norm": 0.5634039640426636, "learning_rate": 0.0001698844148214834, "loss": 1.0141, "step": 5714 }, { "epoch": 1.0174501424501425, "grad_norm": 0.5326652526855469, "learning_rate": 0.00016987440210936537, "loss": 0.865, "step": 5715 }, { "epoch": 1.017628205128205, "grad_norm": 0.5858046412467957, "learning_rate": 0.0001698643880282036, "loss": 0.9561, "step": 5716 }, { "epoch": 1.0178062678062678, "grad_norm": 0.6424698829650879, "learning_rate": 0.00016985437257819428, "loss": 1.0169, "step": 5717 }, { "epoch": 1.0179843304843306, "grad_norm": 0.6294280290603638, "learning_rate": 0.00016984435575953364, "loss": 1.0438, "step": 5718 }, { "epoch": 1.018162393162393, "grad_norm": 0.5533088445663452, "learning_rate": 0.00016983433757241788, "loss": 0.8901, "step": 5719 }, { "epoch": 1.0183404558404558, "grad_norm": 0.5148718953132629, "learning_rate": 0.00016982431801704342, "loss": 0.9201, "step": 5720 }, { "epoch": 1.0185185185185186, "grad_norm": 0.5609371662139893, "learning_rate": 0.00016981429709360645, "loss": 0.9347, "step": 5721 }, { "epoch": 1.0186965811965811, "grad_norm": 0.5502731204032898, "learning_rate": 0.00016980427480230338, "loss": 1.0508, "step": 5722 }, { "epoch": 1.0188746438746439, "grad_norm": 0.5880394577980042, "learning_rate": 0.00016979425114333055, "loss": 1.1258, "step": 5723 }, { "epoch": 1.0190527065527066, "grad_norm": 0.5569866895675659, "learning_rate": 0.0001697842261168843, "loss": 0.9186, "step": 5724 }, { "epoch": 1.0192307692307692, "grad_norm": 0.7468093037605286, "learning_rate": 0.00016977419972316116, "loss": 1.2066, "step": 5725 }, { "epoch": 1.019408831908832, "grad_norm": 0.6041515469551086, "learning_rate": 0.00016976417196235753, "loss": 0.939, "step": 5726 }, { "epoch": 1.0195868945868947, "grad_norm": 0.6102641224861145, "learning_rate": 0.00016975414283466983, "loss": 0.8334, "step": 5727 }, { "epoch": 1.0197649572649572, "grad_norm": 0.5418640375137329, "learning_rate": 0.00016974411234029467, "loss": 0.8072, "step": 5728 }, { "epoch": 1.01994301994302, "grad_norm": 0.6569705605506897, "learning_rate": 0.00016973408047942843, "loss": 1.103, "step": 5729 }, { "epoch": 1.0201210826210827, "grad_norm": 0.5778102278709412, "learning_rate": 0.00016972404725226778, "loss": 0.9353, "step": 5730 }, { "epoch": 1.0202991452991452, "grad_norm": 0.5474382638931274, "learning_rate": 0.0001697140126590093, "loss": 1.0009, "step": 5731 }, { "epoch": 1.020477207977208, "grad_norm": 0.5869506597518921, "learning_rate": 0.00016970397669984947, "loss": 1.0027, "step": 5732 }, { "epoch": 1.0206552706552707, "grad_norm": 0.5078117251396179, "learning_rate": 0.00016969393937498508, "loss": 0.8316, "step": 5733 }, { "epoch": 1.0208333333333333, "grad_norm": 0.5488452911376953, "learning_rate": 0.0001696839006846127, "loss": 0.8438, "step": 5734 }, { "epoch": 1.021011396011396, "grad_norm": 0.5921052098274231, "learning_rate": 0.00016967386062892908, "loss": 0.9147, "step": 5735 }, { "epoch": 1.0211894586894588, "grad_norm": 0.5486881136894226, "learning_rate": 0.00016966381920813085, "loss": 0.7619, "step": 5736 }, { "epoch": 1.0213675213675213, "grad_norm": 0.5250689387321472, "learning_rate": 0.00016965377642241483, "loss": 0.9192, "step": 5737 }, { "epoch": 1.021545584045584, "grad_norm": 0.5355087518692017, "learning_rate": 0.00016964373227197773, "loss": 0.954, "step": 5738 }, { "epoch": 1.0217236467236468, "grad_norm": 0.6758780479431152, "learning_rate": 0.0001696336867570164, "loss": 1.1257, "step": 5739 }, { "epoch": 1.0219017094017093, "grad_norm": 0.6361044049263, "learning_rate": 0.00016962363987772756, "loss": 1.0889, "step": 5740 }, { "epoch": 1.022079772079772, "grad_norm": 0.5802326798439026, "learning_rate": 0.00016961359163430819, "loss": 0.8966, "step": 5741 }, { "epoch": 1.0222578347578348, "grad_norm": 0.5535712242126465, "learning_rate": 0.00016960354202695508, "loss": 1.0007, "step": 5742 }, { "epoch": 1.0224358974358974, "grad_norm": 0.5469220280647278, "learning_rate": 0.00016959349105586516, "loss": 0.8202, "step": 5743 }, { "epoch": 1.02261396011396, "grad_norm": 0.5533008575439453, "learning_rate": 0.00016958343872123534, "loss": 0.9576, "step": 5744 }, { "epoch": 1.0227920227920229, "grad_norm": 0.615132749080658, "learning_rate": 0.00016957338502326258, "loss": 0.8719, "step": 5745 }, { "epoch": 1.0229700854700854, "grad_norm": 0.519075334072113, "learning_rate": 0.0001695633299621439, "loss": 0.8309, "step": 5746 }, { "epoch": 1.0231481481481481, "grad_norm": 0.6249759197235107, "learning_rate": 0.00016955327353807624, "loss": 1.151, "step": 5747 }, { "epoch": 1.023326210826211, "grad_norm": 0.560299277305603, "learning_rate": 0.00016954321575125668, "loss": 0.7889, "step": 5748 }, { "epoch": 1.0235042735042734, "grad_norm": 0.5735262036323547, "learning_rate": 0.0001695331566018823, "loss": 0.8794, "step": 5749 }, { "epoch": 1.0236823361823362, "grad_norm": 0.5893994569778442, "learning_rate": 0.00016952309609015012, "loss": 0.9696, "step": 5750 }, { "epoch": 1.023860398860399, "grad_norm": 0.6064512133598328, "learning_rate": 0.0001695130342162573, "loss": 0.9771, "step": 5751 }, { "epoch": 1.0240384615384615, "grad_norm": 0.5833427309989929, "learning_rate": 0.00016950297098040099, "loss": 1.1768, "step": 5752 }, { "epoch": 1.0242165242165242, "grad_norm": 0.5940282344818115, "learning_rate": 0.00016949290638277833, "loss": 1.0758, "step": 5753 }, { "epoch": 1.024394586894587, "grad_norm": 0.5267124772071838, "learning_rate": 0.00016948284042358656, "loss": 0.772, "step": 5754 }, { "epoch": 1.0245726495726495, "grad_norm": 0.6217982172966003, "learning_rate": 0.00016947277310302284, "loss": 0.8583, "step": 5755 }, { "epoch": 1.0247507122507122, "grad_norm": 0.6192215085029602, "learning_rate": 0.00016946270442128443, "loss": 0.9148, "step": 5756 }, { "epoch": 1.024928774928775, "grad_norm": 0.5337123870849609, "learning_rate": 0.00016945263437856867, "loss": 1.0054, "step": 5757 }, { "epoch": 1.0251068376068375, "grad_norm": 0.5462040901184082, "learning_rate": 0.00016944256297507276, "loss": 1.1097, "step": 5758 }, { "epoch": 1.0252849002849003, "grad_norm": 0.5606170892715454, "learning_rate": 0.00016943249021099415, "loss": 1.0192, "step": 5759 }, { "epoch": 1.025462962962963, "grad_norm": 0.636974573135376, "learning_rate": 0.00016942241608653008, "loss": 1.0241, "step": 5760 }, { "epoch": 1.0256410256410255, "grad_norm": 0.4895164966583252, "learning_rate": 0.00016941234060187797, "loss": 0.9057, "step": 5761 }, { "epoch": 1.0258190883190883, "grad_norm": 0.5810303092002869, "learning_rate": 0.00016940226375723527, "loss": 1.0809, "step": 5762 }, { "epoch": 1.025997150997151, "grad_norm": 0.6043853163719177, "learning_rate": 0.00016939218555279937, "loss": 1.0685, "step": 5763 }, { "epoch": 1.0261752136752136, "grad_norm": 0.5827188491821289, "learning_rate": 0.00016938210598876774, "loss": 1.0236, "step": 5764 }, { "epoch": 1.0263532763532763, "grad_norm": 0.6677887439727783, "learning_rate": 0.0001693720250653379, "loss": 1.0586, "step": 5765 }, { "epoch": 1.026531339031339, "grad_norm": 0.558051347732544, "learning_rate": 0.0001693619427827073, "loss": 0.745, "step": 5766 }, { "epoch": 1.0267094017094016, "grad_norm": 0.6336706280708313, "learning_rate": 0.0001693518591410735, "loss": 1.0658, "step": 5767 }, { "epoch": 1.0268874643874644, "grad_norm": 0.7077126502990723, "learning_rate": 0.00016934177414063416, "loss": 1.18, "step": 5768 }, { "epoch": 1.0270655270655271, "grad_norm": 0.5342326760292053, "learning_rate": 0.00016933168778158675, "loss": 0.8347, "step": 5769 }, { "epoch": 1.0272435897435896, "grad_norm": 0.6116416454315186, "learning_rate": 0.00016932160006412895, "loss": 1.0648, "step": 5770 }, { "epoch": 1.0274216524216524, "grad_norm": 0.5411320924758911, "learning_rate": 0.0001693115109884584, "loss": 1.0756, "step": 5771 }, { "epoch": 1.0275997150997151, "grad_norm": 0.5549847483634949, "learning_rate": 0.00016930142055477277, "loss": 0.7259, "step": 5772 }, { "epoch": 1.0277777777777777, "grad_norm": 0.549010694026947, "learning_rate": 0.00016929132876326977, "loss": 0.9488, "step": 5773 }, { "epoch": 1.0279558404558404, "grad_norm": 0.6302017569541931, "learning_rate": 0.00016928123561414714, "loss": 0.8851, "step": 5774 }, { "epoch": 1.0281339031339032, "grad_norm": 0.5831273198127747, "learning_rate": 0.00016927114110760257, "loss": 0.7841, "step": 5775 }, { "epoch": 1.0283119658119657, "grad_norm": 0.5528474450111389, "learning_rate": 0.00016926104524383394, "loss": 1.0108, "step": 5776 }, { "epoch": 1.0284900284900285, "grad_norm": 0.6279126405715942, "learning_rate": 0.00016925094802303897, "loss": 0.8632, "step": 5777 }, { "epoch": 1.0286680911680912, "grad_norm": 0.6783218383789062, "learning_rate": 0.00016924084944541554, "loss": 1.0746, "step": 5778 }, { "epoch": 1.0288461538461537, "grad_norm": 0.5823925137519836, "learning_rate": 0.00016923074951116153, "loss": 1.0486, "step": 5779 }, { "epoch": 1.0290242165242165, "grad_norm": 0.6095981597900391, "learning_rate": 0.00016922064822047473, "loss": 0.8113, "step": 5780 }, { "epoch": 1.0292022792022792, "grad_norm": 0.7887664437294006, "learning_rate": 0.00016921054557355317, "loss": 1.2411, "step": 5781 }, { "epoch": 1.0293803418803418, "grad_norm": 0.6511263251304626, "learning_rate": 0.00016920044157059475, "loss": 0.924, "step": 5782 }, { "epoch": 1.0295584045584045, "grad_norm": 0.6045661568641663, "learning_rate": 0.00016919033621179744, "loss": 0.8373, "step": 5783 }, { "epoch": 1.0297364672364673, "grad_norm": 0.6914188861846924, "learning_rate": 0.0001691802294973592, "loss": 0.9589, "step": 5784 }, { "epoch": 1.0299145299145298, "grad_norm": 0.6483730673789978, "learning_rate": 0.00016917012142747805, "loss": 0.9871, "step": 5785 }, { "epoch": 1.0300925925925926, "grad_norm": 0.5775033235549927, "learning_rate": 0.0001691600120023521, "loss": 1.0591, "step": 5786 }, { "epoch": 1.0302706552706553, "grad_norm": 0.6206814646720886, "learning_rate": 0.00016914990122217932, "loss": 0.9126, "step": 5787 }, { "epoch": 1.0304487179487178, "grad_norm": 0.5422028303146362, "learning_rate": 0.00016913978908715796, "loss": 0.8227, "step": 5788 }, { "epoch": 1.0306267806267806, "grad_norm": 0.5824416875839233, "learning_rate": 0.000169129675597486, "loss": 1.111, "step": 5789 }, { "epoch": 1.0308048433048433, "grad_norm": 0.5419015884399414, "learning_rate": 0.00016911956075336165, "loss": 0.8941, "step": 5790 }, { "epoch": 1.0309829059829059, "grad_norm": 0.6171557903289795, "learning_rate": 0.0001691094445549831, "loss": 0.8679, "step": 5791 }, { "epoch": 1.0311609686609686, "grad_norm": 0.6136980056762695, "learning_rate": 0.00016909932700254855, "loss": 0.9266, "step": 5792 }, { "epoch": 1.0313390313390314, "grad_norm": 0.6275020241737366, "learning_rate": 0.00016908920809625624, "loss": 1.0828, "step": 5793 }, { "epoch": 1.0315170940170941, "grad_norm": 0.6538251638412476, "learning_rate": 0.0001690790878363044, "loss": 0.8413, "step": 5794 }, { "epoch": 1.0316951566951567, "grad_norm": 0.5981295108795166, "learning_rate": 0.00016906896622289136, "loss": 0.9845, "step": 5795 }, { "epoch": 1.0318732193732194, "grad_norm": 0.5390967130661011, "learning_rate": 0.00016905884325621538, "loss": 0.8755, "step": 5796 }, { "epoch": 1.032051282051282, "grad_norm": 0.5534448623657227, "learning_rate": 0.00016904871893647482, "loss": 1.1868, "step": 5797 }, { "epoch": 1.0322293447293447, "grad_norm": 0.664556086063385, "learning_rate": 0.00016903859326386806, "loss": 1.1418, "step": 5798 }, { "epoch": 1.0324074074074074, "grad_norm": 0.5737143158912659, "learning_rate": 0.00016902846623859346, "loss": 1.124, "step": 5799 }, { "epoch": 1.0325854700854702, "grad_norm": 0.6499935388565063, "learning_rate": 0.0001690183378608495, "loss": 1.0331, "step": 5800 }, { "epoch": 1.0327635327635327, "grad_norm": 0.5721518993377686, "learning_rate": 0.00016900820813083454, "loss": 0.8664, "step": 5801 }, { "epoch": 1.0329415954415955, "grad_norm": 0.5651140809059143, "learning_rate": 0.0001689980770487471, "loss": 1.1661, "step": 5802 }, { "epoch": 1.0331196581196582, "grad_norm": 0.5935871005058289, "learning_rate": 0.0001689879446147857, "loss": 0.8722, "step": 5803 }, { "epoch": 1.0332977207977208, "grad_norm": 0.5627842545509338, "learning_rate": 0.00016897781082914884, "loss": 1.0036, "step": 5804 }, { "epoch": 1.0334757834757835, "grad_norm": 0.5866895914077759, "learning_rate": 0.00016896767569203502, "loss": 0.9739, "step": 5805 }, { "epoch": 1.0336538461538463, "grad_norm": 0.5568059682846069, "learning_rate": 0.0001689575392036429, "loss": 0.7081, "step": 5806 }, { "epoch": 1.0338319088319088, "grad_norm": 0.6054235100746155, "learning_rate": 0.00016894740136417103, "loss": 1.1168, "step": 5807 }, { "epoch": 1.0340099715099715, "grad_norm": 0.5215454697608948, "learning_rate": 0.00016893726217381805, "loss": 0.9172, "step": 5808 }, { "epoch": 1.0341880341880343, "grad_norm": 0.5415732860565186, "learning_rate": 0.00016892712163278263, "loss": 0.7812, "step": 5809 }, { "epoch": 1.0343660968660968, "grad_norm": 0.6341692805290222, "learning_rate": 0.00016891697974126345, "loss": 1.0658, "step": 5810 }, { "epoch": 1.0345441595441596, "grad_norm": 0.6326245665550232, "learning_rate": 0.00016890683649945922, "loss": 1.0134, "step": 5811 }, { "epoch": 1.0347222222222223, "grad_norm": 0.5729571580886841, "learning_rate": 0.00016889669190756868, "loss": 0.9139, "step": 5812 }, { "epoch": 1.0349002849002849, "grad_norm": 0.5912853479385376, "learning_rate": 0.00016888654596579054, "loss": 1.122, "step": 5813 }, { "epoch": 1.0350783475783476, "grad_norm": 0.8410450220108032, "learning_rate": 0.00016887639867432368, "loss": 1.3009, "step": 5814 }, { "epoch": 1.0352564102564104, "grad_norm": 0.5416620969772339, "learning_rate": 0.00016886625003336683, "loss": 0.8751, "step": 5815 }, { "epoch": 1.0354344729344729, "grad_norm": 0.6367851495742798, "learning_rate": 0.0001688561000431189, "loss": 0.956, "step": 5816 }, { "epoch": 1.0356125356125356, "grad_norm": 0.4618827700614929, "learning_rate": 0.0001688459487037787, "loss": 0.5313, "step": 5817 }, { "epoch": 1.0357905982905984, "grad_norm": 0.7139244079589844, "learning_rate": 0.00016883579601554516, "loss": 1.0787, "step": 5818 }, { "epoch": 1.035968660968661, "grad_norm": 0.6896135210990906, "learning_rate": 0.00016882564197861715, "loss": 0.932, "step": 5819 }, { "epoch": 1.0361467236467237, "grad_norm": 0.5889739394187927, "learning_rate": 0.00016881548659319372, "loss": 0.8852, "step": 5820 }, { "epoch": 1.0363247863247864, "grad_norm": 0.5954701900482178, "learning_rate": 0.00016880532985947375, "loss": 0.8192, "step": 5821 }, { "epoch": 1.036502849002849, "grad_norm": 0.6665091514587402, "learning_rate": 0.00016879517177765627, "loss": 0.9578, "step": 5822 }, { "epoch": 1.0366809116809117, "grad_norm": 0.5990539789199829, "learning_rate": 0.00016878501234794034, "loss": 0.9797, "step": 5823 }, { "epoch": 1.0368589743589745, "grad_norm": 0.596755862236023, "learning_rate": 0.00016877485157052496, "loss": 1.173, "step": 5824 }, { "epoch": 1.037037037037037, "grad_norm": 0.544658362865448, "learning_rate": 0.00016876468944560923, "loss": 1.0742, "step": 5825 }, { "epoch": 1.0372150997150997, "grad_norm": 0.5841910243034363, "learning_rate": 0.00016875452597339225, "loss": 1.029, "step": 5826 }, { "epoch": 1.0373931623931625, "grad_norm": 0.6508592963218689, "learning_rate": 0.00016874436115407317, "loss": 0.9883, "step": 5827 }, { "epoch": 1.037571225071225, "grad_norm": 0.590050458908081, "learning_rate": 0.00016873419498785114, "loss": 1.0713, "step": 5828 }, { "epoch": 1.0377492877492878, "grad_norm": 0.5386307239532471, "learning_rate": 0.00016872402747492534, "loss": 1.0159, "step": 5829 }, { "epoch": 1.0379273504273505, "grad_norm": 0.6173896193504333, "learning_rate": 0.00016871385861549497, "loss": 1.0056, "step": 5830 }, { "epoch": 1.038105413105413, "grad_norm": 0.5377787351608276, "learning_rate": 0.0001687036884097593, "loss": 0.8708, "step": 5831 }, { "epoch": 1.0382834757834758, "grad_norm": 0.5753569006919861, "learning_rate": 0.00016869351685791756, "loss": 1.0529, "step": 5832 }, { "epoch": 1.0384615384615385, "grad_norm": 0.6085895299911499, "learning_rate": 0.00016868334396016906, "loss": 1.1017, "step": 5833 }, { "epoch": 1.038639601139601, "grad_norm": 0.6320509910583496, "learning_rate": 0.0001686731697167131, "loss": 1.0543, "step": 5834 }, { "epoch": 1.0388176638176638, "grad_norm": 0.5691760778427124, "learning_rate": 0.00016866299412774907, "loss": 0.9975, "step": 5835 }, { "epoch": 1.0389957264957266, "grad_norm": 0.5990765690803528, "learning_rate": 0.0001686528171934763, "loss": 0.8776, "step": 5836 }, { "epoch": 1.039173789173789, "grad_norm": 0.6650477647781372, "learning_rate": 0.00016864263891409415, "loss": 1.0652, "step": 5837 }, { "epoch": 1.0393518518518519, "grad_norm": 0.6050353646278381, "learning_rate": 0.00016863245928980212, "loss": 0.9313, "step": 5838 }, { "epoch": 1.0395299145299146, "grad_norm": 0.587505578994751, "learning_rate": 0.0001686222783207996, "loss": 0.9892, "step": 5839 }, { "epoch": 1.0397079772079771, "grad_norm": 0.6310170292854309, "learning_rate": 0.00016861209600728608, "loss": 1.1045, "step": 5840 }, { "epoch": 1.03988603988604, "grad_norm": 0.5683430433273315, "learning_rate": 0.0001686019123494611, "loss": 1.0507, "step": 5841 }, { "epoch": 1.0400641025641026, "grad_norm": 0.6621488332748413, "learning_rate": 0.00016859172734752414, "loss": 0.9255, "step": 5842 }, { "epoch": 1.0402421652421652, "grad_norm": 0.6197706460952759, "learning_rate": 0.00016858154100167475, "loss": 1.0031, "step": 5843 }, { "epoch": 1.040420227920228, "grad_norm": 0.6805898547172546, "learning_rate": 0.00016857135331211257, "loss": 0.9901, "step": 5844 }, { "epoch": 1.0405982905982907, "grad_norm": 0.5512405633926392, "learning_rate": 0.00016856116427903714, "loss": 1.0033, "step": 5845 }, { "epoch": 1.0407763532763532, "grad_norm": 0.5643384456634521, "learning_rate": 0.00016855097390264815, "loss": 0.9136, "step": 5846 }, { "epoch": 1.040954415954416, "grad_norm": 0.48351922631263733, "learning_rate": 0.0001685407821831452, "loss": 0.6163, "step": 5847 }, { "epoch": 1.0411324786324787, "grad_norm": 0.6256039142608643, "learning_rate": 0.00016853058912072802, "loss": 0.9409, "step": 5848 }, { "epoch": 1.0413105413105412, "grad_norm": 0.6539996862411499, "learning_rate": 0.00016852039471559627, "loss": 0.9367, "step": 5849 }, { "epoch": 1.041488603988604, "grad_norm": 0.6192609667778015, "learning_rate": 0.00016851019896794975, "loss": 0.9631, "step": 5850 }, { "epoch": 1.0416666666666667, "grad_norm": 0.613563060760498, "learning_rate": 0.0001685000018779882, "loss": 0.9132, "step": 5851 }, { "epoch": 1.0418447293447293, "grad_norm": 0.6004200577735901, "learning_rate": 0.0001684898034459114, "loss": 1.1313, "step": 5852 }, { "epoch": 1.042022792022792, "grad_norm": 0.6158567070960999, "learning_rate": 0.0001684796036719192, "loss": 1.0253, "step": 5853 }, { "epoch": 1.0422008547008548, "grad_norm": 0.6362335085868835, "learning_rate": 0.00016846940255621143, "loss": 0.93, "step": 5854 }, { "epoch": 1.0423789173789173, "grad_norm": 0.6148427128791809, "learning_rate": 0.00016845920009898787, "loss": 0.9122, "step": 5855 }, { "epoch": 1.04255698005698, "grad_norm": 0.5119984149932861, "learning_rate": 0.00016844899630044858, "loss": 0.7954, "step": 5856 }, { "epoch": 1.0427350427350428, "grad_norm": 0.571849524974823, "learning_rate": 0.00016843879116079338, "loss": 0.8588, "step": 5857 }, { "epoch": 1.0429131054131053, "grad_norm": 0.6173384785652161, "learning_rate": 0.00016842858468022221, "loss": 1.0475, "step": 5858 }, { "epoch": 1.043091168091168, "grad_norm": 0.566114068031311, "learning_rate": 0.0001684183768589351, "loss": 0.8485, "step": 5859 }, { "epoch": 1.0432692307692308, "grad_norm": 0.653134286403656, "learning_rate": 0.000168408167697132, "loss": 0.9976, "step": 5860 }, { "epoch": 1.0434472934472934, "grad_norm": 0.63815838098526, "learning_rate": 0.00016839795719501296, "loss": 0.7091, "step": 5861 }, { "epoch": 1.0436253561253561, "grad_norm": 0.5109001994132996, "learning_rate": 0.00016838774535277805, "loss": 0.7668, "step": 5862 }, { "epoch": 1.0438034188034189, "grad_norm": 0.6741907596588135, "learning_rate": 0.0001683775321706273, "loss": 1.0493, "step": 5863 }, { "epoch": 1.0439814814814814, "grad_norm": 0.6006115674972534, "learning_rate": 0.0001683673176487609, "loss": 0.9784, "step": 5864 }, { "epoch": 1.0441595441595442, "grad_norm": 0.5504778027534485, "learning_rate": 0.0001683571017873789, "loss": 0.9718, "step": 5865 }, { "epoch": 1.044337606837607, "grad_norm": 0.5713102221488953, "learning_rate": 0.00016834688458668148, "loss": 1.12, "step": 5866 }, { "epoch": 1.0445156695156694, "grad_norm": 0.7878454923629761, "learning_rate": 0.00016833666604686886, "loss": 1.1803, "step": 5867 }, { "epoch": 1.0446937321937322, "grad_norm": 0.582697331905365, "learning_rate": 0.00016832644616814122, "loss": 0.943, "step": 5868 }, { "epoch": 1.044871794871795, "grad_norm": 0.5300645232200623, "learning_rate": 0.00016831622495069878, "loss": 0.9087, "step": 5869 }, { "epoch": 1.0450498575498575, "grad_norm": 0.5627666115760803, "learning_rate": 0.00016830600239474186, "loss": 1.081, "step": 5870 }, { "epoch": 1.0452279202279202, "grad_norm": 0.6760496497154236, "learning_rate": 0.0001682957785004707, "loss": 1.1098, "step": 5871 }, { "epoch": 1.045405982905983, "grad_norm": 0.6424084901809692, "learning_rate": 0.00016828555326808565, "loss": 0.9657, "step": 5872 }, { "epoch": 1.0455840455840455, "grad_norm": 0.5523313283920288, "learning_rate": 0.000168275326697787, "loss": 1.0163, "step": 5873 }, { "epoch": 1.0457621082621082, "grad_norm": 0.5582337975502014, "learning_rate": 0.00016826509878977518, "loss": 0.8825, "step": 5874 }, { "epoch": 1.045940170940171, "grad_norm": 0.5603214502334595, "learning_rate": 0.00016825486954425055, "loss": 0.9032, "step": 5875 }, { "epoch": 1.0461182336182335, "grad_norm": 0.5944222807884216, "learning_rate": 0.00016824463896141355, "loss": 0.9384, "step": 5876 }, { "epoch": 1.0462962962962963, "grad_norm": 0.6220229268074036, "learning_rate": 0.00016823440704146457, "loss": 0.8962, "step": 5877 }, { "epoch": 1.046474358974359, "grad_norm": 0.5607972145080566, "learning_rate": 0.0001682241737846042, "loss": 0.9385, "step": 5878 }, { "epoch": 1.0466524216524216, "grad_norm": 0.6206870079040527, "learning_rate": 0.00016821393919103282, "loss": 1.0597, "step": 5879 }, { "epoch": 1.0468304843304843, "grad_norm": 0.5126399993896484, "learning_rate": 0.000168203703260951, "loss": 0.9403, "step": 5880 }, { "epoch": 1.047008547008547, "grad_norm": 0.6569282412528992, "learning_rate": 0.00016819346599455929, "loss": 0.8124, "step": 5881 }, { "epoch": 1.0471866096866096, "grad_norm": 0.6670137047767639, "learning_rate": 0.0001681832273920583, "loss": 1.1927, "step": 5882 }, { "epoch": 1.0473646723646723, "grad_norm": 0.5403243899345398, "learning_rate": 0.00016817298745364862, "loss": 0.8539, "step": 5883 }, { "epoch": 1.047542735042735, "grad_norm": 0.5500505566596985, "learning_rate": 0.00016816274617953086, "loss": 1.1064, "step": 5884 }, { "epoch": 1.0477207977207976, "grad_norm": 0.5482703447341919, "learning_rate": 0.00016815250356990566, "loss": 0.7276, "step": 5885 }, { "epoch": 1.0478988603988604, "grad_norm": 0.6290771961212158, "learning_rate": 0.00016814225962497373, "loss": 0.9018, "step": 5886 }, { "epoch": 1.0480769230769231, "grad_norm": 0.6404094696044922, "learning_rate": 0.00016813201434493578, "loss": 1.0638, "step": 5887 }, { "epoch": 1.0482549857549857, "grad_norm": 0.5484994053840637, "learning_rate": 0.0001681217677299926, "loss": 1.0033, "step": 5888 }, { "epoch": 1.0484330484330484, "grad_norm": 0.6474852561950684, "learning_rate": 0.0001681115197803448, "loss": 1.1017, "step": 5889 }, { "epoch": 1.0486111111111112, "grad_norm": 0.6186243295669556, "learning_rate": 0.0001681012704961933, "loss": 0.9978, "step": 5890 }, { "epoch": 1.0487891737891737, "grad_norm": 0.6244034767150879, "learning_rate": 0.00016809101987773887, "loss": 0.9906, "step": 5891 }, { "epoch": 1.0489672364672364, "grad_norm": 0.5893426537513733, "learning_rate": 0.00016808076792518235, "loss": 0.9345, "step": 5892 }, { "epoch": 1.0491452991452992, "grad_norm": 0.6283876299858093, "learning_rate": 0.0001680705146387246, "loss": 1.0041, "step": 5893 }, { "epoch": 1.0493233618233617, "grad_norm": 0.6075255870819092, "learning_rate": 0.00016806026001856656, "loss": 1.0661, "step": 5894 }, { "epoch": 1.0495014245014245, "grad_norm": 0.5350496768951416, "learning_rate": 0.00016805000406490907, "loss": 0.6789, "step": 5895 }, { "epoch": 1.0496794871794872, "grad_norm": 0.5380373597145081, "learning_rate": 0.00016803974677795312, "loss": 0.8889, "step": 5896 }, { "epoch": 1.0498575498575498, "grad_norm": 0.6145668029785156, "learning_rate": 0.0001680294881578997, "loss": 0.8952, "step": 5897 }, { "epoch": 1.0500356125356125, "grad_norm": 0.5666532516479492, "learning_rate": 0.00016801922820494972, "loss": 0.9697, "step": 5898 }, { "epoch": 1.0502136752136753, "grad_norm": 0.5352747440338135, "learning_rate": 0.0001680089669193043, "loss": 0.9619, "step": 5899 }, { "epoch": 1.0503917378917378, "grad_norm": 0.5405527949333191, "learning_rate": 0.00016799870430116444, "loss": 0.8733, "step": 5900 }, { "epoch": 1.0505698005698005, "grad_norm": 0.5936748385429382, "learning_rate": 0.00016798844035073124, "loss": 0.8746, "step": 5901 }, { "epoch": 1.0507478632478633, "grad_norm": 0.539652943611145, "learning_rate": 0.00016797817506820578, "loss": 0.8743, "step": 5902 }, { "epoch": 1.0509259259259258, "grad_norm": 0.644528865814209, "learning_rate": 0.00016796790845378915, "loss": 0.9251, "step": 5903 }, { "epoch": 1.0511039886039886, "grad_norm": 0.5429201126098633, "learning_rate": 0.00016795764050768258, "loss": 0.747, "step": 5904 }, { "epoch": 1.0512820512820513, "grad_norm": 0.6432006359100342, "learning_rate": 0.00016794737123008725, "loss": 0.9166, "step": 5905 }, { "epoch": 1.0514601139601139, "grad_norm": 0.6084117293357849, "learning_rate": 0.00016793710062120427, "loss": 1.0778, "step": 5906 }, { "epoch": 1.0516381766381766, "grad_norm": 0.5351580381393433, "learning_rate": 0.00016792682868123495, "loss": 0.9124, "step": 5907 }, { "epoch": 1.0518162393162394, "grad_norm": 0.7078854441642761, "learning_rate": 0.00016791655541038053, "loss": 1.1209, "step": 5908 }, { "epoch": 1.051994301994302, "grad_norm": 0.5943832993507385, "learning_rate": 0.0001679062808088423, "loss": 0.9077, "step": 5909 }, { "epoch": 1.0521723646723646, "grad_norm": 0.5216894745826721, "learning_rate": 0.00016789600487682156, "loss": 0.9866, "step": 5910 }, { "epoch": 1.0523504273504274, "grad_norm": 0.738451361656189, "learning_rate": 0.00016788572761451963, "loss": 1.1611, "step": 5911 }, { "epoch": 1.05252849002849, "grad_norm": 0.6411251425743103, "learning_rate": 0.00016787544902213791, "loss": 1.1481, "step": 5912 }, { "epoch": 1.0527065527065527, "grad_norm": 0.6768319010734558, "learning_rate": 0.00016786516909987774, "loss": 0.8614, "step": 5913 }, { "epoch": 1.0528846153846154, "grad_norm": 0.5838070511817932, "learning_rate": 0.0001678548878479406, "loss": 0.9719, "step": 5914 }, { "epoch": 1.0530626780626782, "grad_norm": 0.541522741317749, "learning_rate": 0.00016784460526652784, "loss": 0.767, "step": 5915 }, { "epoch": 1.0532407407407407, "grad_norm": 0.6064762473106384, "learning_rate": 0.000167834321355841, "loss": 1.0792, "step": 5916 }, { "epoch": 1.0534188034188035, "grad_norm": 0.5515492558479309, "learning_rate": 0.00016782403611608152, "loss": 0.7897, "step": 5917 }, { "epoch": 1.0535968660968662, "grad_norm": 0.6326262950897217, "learning_rate": 0.000167813749547451, "loss": 0.9279, "step": 5918 }, { "epoch": 1.0537749287749287, "grad_norm": 0.6262009739875793, "learning_rate": 0.0001678034616501509, "loss": 0.9752, "step": 5919 }, { "epoch": 1.0539529914529915, "grad_norm": 0.6049023270606995, "learning_rate": 0.00016779317242438278, "loss": 0.9167, "step": 5920 }, { "epoch": 1.0541310541310542, "grad_norm": 0.6286031007766724, "learning_rate": 0.0001677828818703483, "loss": 1.1277, "step": 5921 }, { "epoch": 1.0543091168091168, "grad_norm": 0.662086009979248, "learning_rate": 0.00016777258998824907, "loss": 1.0824, "step": 5922 }, { "epoch": 1.0544871794871795, "grad_norm": 0.5358783006668091, "learning_rate": 0.00016776229677828672, "loss": 0.825, "step": 5923 }, { "epoch": 1.0546652421652423, "grad_norm": 0.490326464176178, "learning_rate": 0.00016775200224066294, "loss": 0.7916, "step": 5924 }, { "epoch": 1.0548433048433048, "grad_norm": 0.5940443277359009, "learning_rate": 0.0001677417063755794, "loss": 1.0121, "step": 5925 }, { "epoch": 1.0550213675213675, "grad_norm": 0.5974507927894592, "learning_rate": 0.00016773140918323787, "loss": 0.7629, "step": 5926 }, { "epoch": 1.0551994301994303, "grad_norm": 0.5747174024581909, "learning_rate": 0.00016772111066384003, "loss": 0.9373, "step": 5927 }, { "epoch": 1.0553774928774928, "grad_norm": 0.5998024940490723, "learning_rate": 0.00016771081081758772, "loss": 0.8543, "step": 5928 }, { "epoch": 1.0555555555555556, "grad_norm": 0.5771155953407288, "learning_rate": 0.00016770050964468275, "loss": 0.9108, "step": 5929 }, { "epoch": 1.0557336182336183, "grad_norm": 0.5695661306381226, "learning_rate": 0.00016769020714532692, "loss": 0.8055, "step": 5930 }, { "epoch": 1.0559116809116809, "grad_norm": 0.6164212226867676, "learning_rate": 0.0001676799033197221, "loss": 1.0917, "step": 5931 }, { "epoch": 1.0560897435897436, "grad_norm": 0.6092487573623657, "learning_rate": 0.00016766959816807018, "loss": 0.9276, "step": 5932 }, { "epoch": 1.0562678062678064, "grad_norm": 0.5595401525497437, "learning_rate": 0.00016765929169057305, "loss": 0.9435, "step": 5933 }, { "epoch": 1.056445868945869, "grad_norm": 0.5875109434127808, "learning_rate": 0.00016764898388743263, "loss": 0.959, "step": 5934 }, { "epoch": 1.0566239316239316, "grad_norm": 0.6045668721199036, "learning_rate": 0.00016763867475885088, "loss": 0.8636, "step": 5935 }, { "epoch": 1.0568019943019944, "grad_norm": 0.6088171005249023, "learning_rate": 0.00016762836430502987, "loss": 0.6807, "step": 5936 }, { "epoch": 1.056980056980057, "grad_norm": 0.6293274760246277, "learning_rate": 0.00016761805252617148, "loss": 1.042, "step": 5937 }, { "epoch": 1.0571581196581197, "grad_norm": 0.588472843170166, "learning_rate": 0.00016760773942247785, "loss": 0.8896, "step": 5938 }, { "epoch": 1.0573361823361824, "grad_norm": 0.4412326216697693, "learning_rate": 0.000167597424994151, "loss": 0.6727, "step": 5939 }, { "epoch": 1.057514245014245, "grad_norm": 0.6086825132369995, "learning_rate": 0.00016758710924139302, "loss": 0.9908, "step": 5940 }, { "epoch": 1.0576923076923077, "grad_norm": 0.6424705386161804, "learning_rate": 0.00016757679216440608, "loss": 1.0182, "step": 5941 }, { "epoch": 1.0578703703703705, "grad_norm": 0.6610676050186157, "learning_rate": 0.00016756647376339222, "loss": 0.9645, "step": 5942 }, { "epoch": 1.058048433048433, "grad_norm": 0.598292887210846, "learning_rate": 0.0001675561540385537, "loss": 0.9694, "step": 5943 }, { "epoch": 1.0582264957264957, "grad_norm": 0.6941167116165161, "learning_rate": 0.00016754583299009266, "loss": 1.0786, "step": 5944 }, { "epoch": 1.0584045584045585, "grad_norm": 0.6543232798576355, "learning_rate": 0.00016753551061821133, "loss": 1.0488, "step": 5945 }, { "epoch": 1.058582621082621, "grad_norm": 0.606159508228302, "learning_rate": 0.000167525186923112, "loss": 0.9448, "step": 5946 }, { "epoch": 1.0587606837606838, "grad_norm": 0.5051791071891785, "learning_rate": 0.00016751486190499685, "loss": 0.7485, "step": 5947 }, { "epoch": 1.0589387464387465, "grad_norm": 0.6459367275238037, "learning_rate": 0.00016750453556406826, "loss": 1.0055, "step": 5948 }, { "epoch": 1.059116809116809, "grad_norm": 0.551591157913208, "learning_rate": 0.00016749420790052852, "loss": 0.9717, "step": 5949 }, { "epoch": 1.0592948717948718, "grad_norm": 0.5899214148521423, "learning_rate": 0.00016748387891458, "loss": 0.7774, "step": 5950 }, { "epoch": 1.0594729344729346, "grad_norm": 0.582379162311554, "learning_rate": 0.00016747354860642503, "loss": 0.953, "step": 5951 }, { "epoch": 1.059650997150997, "grad_norm": 0.6035816073417664, "learning_rate": 0.00016746321697626605, "loss": 1.1175, "step": 5952 }, { "epoch": 1.0598290598290598, "grad_norm": 0.6476401686668396, "learning_rate": 0.00016745288402430548, "loss": 0.9448, "step": 5953 }, { "epoch": 1.0600071225071226, "grad_norm": 0.6126405596733093, "learning_rate": 0.00016744254975074578, "loss": 0.882, "step": 5954 }, { "epoch": 1.0601851851851851, "grad_norm": 0.5333579182624817, "learning_rate": 0.0001674322141557894, "loss": 0.9539, "step": 5955 }, { "epoch": 1.0603632478632479, "grad_norm": 0.6085022687911987, "learning_rate": 0.0001674218772396389, "loss": 1.0028, "step": 5956 }, { "epoch": 1.0605413105413106, "grad_norm": 0.5809528827667236, "learning_rate": 0.0001674115390024967, "loss": 0.84, "step": 5957 }, { "epoch": 1.0607193732193732, "grad_norm": 0.5820229649543762, "learning_rate": 0.00016740119944456548, "loss": 0.9563, "step": 5958 }, { "epoch": 1.060897435897436, "grad_norm": 0.6349015831947327, "learning_rate": 0.00016739085856604775, "loss": 0.9739, "step": 5959 }, { "epoch": 1.0610754985754987, "grad_norm": 0.6346020102500916, "learning_rate": 0.00016738051636714616, "loss": 0.907, "step": 5960 }, { "epoch": 1.0612535612535612, "grad_norm": 0.5850573778152466, "learning_rate": 0.0001673701728480633, "loss": 1.0688, "step": 5961 }, { "epoch": 1.061431623931624, "grad_norm": 0.6258122324943542, "learning_rate": 0.00016735982800900184, "loss": 0.9997, "step": 5962 }, { "epoch": 1.0616096866096867, "grad_norm": 0.6744239330291748, "learning_rate": 0.00016734948185016452, "loss": 0.9431, "step": 5963 }, { "epoch": 1.0617877492877492, "grad_norm": 0.5769457817077637, "learning_rate": 0.000167339134371754, "loss": 0.9658, "step": 5964 }, { "epoch": 1.061965811965812, "grad_norm": 0.6385112404823303, "learning_rate": 0.000167328785573973, "loss": 1.0199, "step": 5965 }, { "epoch": 1.0621438746438747, "grad_norm": 0.536522388458252, "learning_rate": 0.00016731843545702435, "loss": 0.8496, "step": 5966 }, { "epoch": 1.0623219373219372, "grad_norm": 0.5978497862815857, "learning_rate": 0.00016730808402111075, "loss": 0.8536, "step": 5967 }, { "epoch": 1.0625, "grad_norm": 0.6091681122779846, "learning_rate": 0.0001672977312664351, "loss": 1.0241, "step": 5968 }, { "epoch": 1.0626780626780628, "grad_norm": 0.5807273387908936, "learning_rate": 0.0001672873771932002, "loss": 1.0522, "step": 5969 }, { "epoch": 1.0628561253561253, "grad_norm": 0.6511965990066528, "learning_rate": 0.0001672770218016089, "loss": 0.8908, "step": 5970 }, { "epoch": 1.063034188034188, "grad_norm": 0.6241721510887146, "learning_rate": 0.00016726666509186416, "loss": 0.9854, "step": 5971 }, { "epoch": 1.0632122507122508, "grad_norm": 0.6112468242645264, "learning_rate": 0.0001672563070641688, "loss": 1.0091, "step": 5972 }, { "epoch": 1.0633903133903133, "grad_norm": 0.6135509014129639, "learning_rate": 0.00016724594771872587, "loss": 0.8891, "step": 5973 }, { "epoch": 1.063568376068376, "grad_norm": 0.608384370803833, "learning_rate": 0.00016723558705573823, "loss": 1.017, "step": 5974 }, { "epoch": 1.0637464387464388, "grad_norm": 0.6578485369682312, "learning_rate": 0.00016722522507540895, "loss": 0.9165, "step": 5975 }, { "epoch": 1.0639245014245013, "grad_norm": 0.562588095664978, "learning_rate": 0.00016721486177794106, "loss": 0.7989, "step": 5976 }, { "epoch": 1.064102564102564, "grad_norm": 0.5541409254074097, "learning_rate": 0.00016720449716353753, "loss": 0.8917, "step": 5977 }, { "epoch": 1.0642806267806268, "grad_norm": 0.551167905330658, "learning_rate": 0.0001671941312324015, "loss": 0.824, "step": 5978 }, { "epoch": 1.0644586894586894, "grad_norm": 0.6280582547187805, "learning_rate": 0.0001671837639847361, "loss": 0.9708, "step": 5979 }, { "epoch": 1.0646367521367521, "grad_norm": 0.6389226913452148, "learning_rate": 0.00016717339542074436, "loss": 1.0081, "step": 5980 }, { "epoch": 1.0648148148148149, "grad_norm": 0.6677889823913574, "learning_rate": 0.0001671630255406295, "loss": 1.2709, "step": 5981 }, { "epoch": 1.0649928774928774, "grad_norm": 0.5748161673545837, "learning_rate": 0.00016715265434459465, "loss": 0.9157, "step": 5982 }, { "epoch": 1.0651709401709402, "grad_norm": 0.6677651405334473, "learning_rate": 0.00016714228183284304, "loss": 1.1097, "step": 5983 }, { "epoch": 1.065349002849003, "grad_norm": 0.6253604292869568, "learning_rate": 0.0001671319080055779, "loss": 0.9819, "step": 5984 }, { "epoch": 1.0655270655270654, "grad_norm": 0.5548844337463379, "learning_rate": 0.0001671215328630025, "loss": 0.9324, "step": 5985 }, { "epoch": 1.0657051282051282, "grad_norm": 0.622062623500824, "learning_rate": 0.00016711115640532004, "loss": 0.8749, "step": 5986 }, { "epoch": 1.065883190883191, "grad_norm": 0.6496043801307678, "learning_rate": 0.00016710077863273394, "loss": 1.0642, "step": 5987 }, { "epoch": 1.0660612535612535, "grad_norm": 0.6140534281730652, "learning_rate": 0.00016709039954544746, "loss": 0.8928, "step": 5988 }, { "epoch": 1.0662393162393162, "grad_norm": 0.6387218236923218, "learning_rate": 0.00016708001914366393, "loss": 0.9525, "step": 5989 }, { "epoch": 1.066417378917379, "grad_norm": 0.6119858026504517, "learning_rate": 0.0001670696374275868, "loss": 0.8663, "step": 5990 }, { "epoch": 1.0665954415954415, "grad_norm": 0.6722040772438049, "learning_rate": 0.00016705925439741947, "loss": 1.1173, "step": 5991 }, { "epoch": 1.0667735042735043, "grad_norm": 0.8226081132888794, "learning_rate": 0.00016704887005336534, "loss": 1.0572, "step": 5992 }, { "epoch": 1.066951566951567, "grad_norm": 0.7248596549034119, "learning_rate": 0.00016703848439562785, "loss": 1.0493, "step": 5993 }, { "epoch": 1.0671296296296295, "grad_norm": 0.7185787558555603, "learning_rate": 0.00016702809742441058, "loss": 1.1366, "step": 5994 }, { "epoch": 1.0673076923076923, "grad_norm": 0.6118780970573425, "learning_rate": 0.00016701770913991694, "loss": 0.9557, "step": 5995 }, { "epoch": 1.067485754985755, "grad_norm": 0.6472596526145935, "learning_rate": 0.0001670073195423505, "loss": 0.9977, "step": 5996 }, { "epoch": 1.0676638176638176, "grad_norm": 0.7110133767127991, "learning_rate": 0.00016699692863191484, "loss": 1.1932, "step": 5997 }, { "epoch": 1.0678418803418803, "grad_norm": 0.5827305912971497, "learning_rate": 0.00016698653640881354, "loss": 0.7641, "step": 5998 }, { "epoch": 1.068019943019943, "grad_norm": 0.527208149433136, "learning_rate": 0.00016697614287325017, "loss": 0.7683, "step": 5999 }, { "epoch": 1.0681980056980056, "grad_norm": 0.6680626273155212, "learning_rate": 0.00016696574802542848, "loss": 1.1748, "step": 6000 }, { "epoch": 1.0683760683760684, "grad_norm": 0.5947227478027344, "learning_rate": 0.00016695535186555204, "loss": 1.0894, "step": 6001 }, { "epoch": 1.068554131054131, "grad_norm": 0.5828250646591187, "learning_rate": 0.00016694495439382456, "loss": 0.9895, "step": 6002 }, { "epoch": 1.0687321937321936, "grad_norm": 0.5897728204727173, "learning_rate": 0.00016693455561044978, "loss": 0.9686, "step": 6003 }, { "epoch": 1.0689102564102564, "grad_norm": 0.5441751480102539, "learning_rate": 0.0001669241555156314, "loss": 0.8948, "step": 6004 }, { "epoch": 1.0690883190883191, "grad_norm": 0.694199800491333, "learning_rate": 0.00016691375410957324, "loss": 1.0824, "step": 6005 }, { "epoch": 1.0692663817663817, "grad_norm": 0.6077630519866943, "learning_rate": 0.00016690335139247906, "loss": 1.0931, "step": 6006 }, { "epoch": 1.0694444444444444, "grad_norm": 0.6558539867401123, "learning_rate": 0.0001668929473645527, "loss": 1.0099, "step": 6007 }, { "epoch": 1.0696225071225072, "grad_norm": 0.5722812414169312, "learning_rate": 0.00016688254202599798, "loss": 0.7999, "step": 6008 }, { "epoch": 1.0698005698005697, "grad_norm": 0.5915400981903076, "learning_rate": 0.0001668721353770188, "loss": 0.7866, "step": 6009 }, { "epoch": 1.0699786324786325, "grad_norm": 0.5290952324867249, "learning_rate": 0.00016686172741781901, "loss": 0.793, "step": 6010 }, { "epoch": 1.0701566951566952, "grad_norm": 0.5501774549484253, "learning_rate": 0.00016685131814860263, "loss": 0.8775, "step": 6011 }, { "epoch": 1.0703347578347577, "grad_norm": 0.6192594766616821, "learning_rate": 0.00016684090756957347, "loss": 1.1686, "step": 6012 }, { "epoch": 1.0705128205128205, "grad_norm": 0.6640267968177795, "learning_rate": 0.00016683049568093561, "loss": 1.1789, "step": 6013 }, { "epoch": 1.0706908831908832, "grad_norm": 0.552893877029419, "learning_rate": 0.00016682008248289303, "loss": 0.7957, "step": 6014 }, { "epoch": 1.0708689458689458, "grad_norm": 0.6406302452087402, "learning_rate": 0.00016680966797564972, "loss": 1.1174, "step": 6015 }, { "epoch": 1.0710470085470085, "grad_norm": Infinity, "learning_rate": 0.00016680966797564972, "loss": 0.9168, "step": 6016 }, { "epoch": 1.0712250712250713, "grad_norm": 0.6384762525558472, "learning_rate": 0.00016679925215940975, "loss": 0.9831, "step": 6017 }, { "epoch": 1.071403133903134, "grad_norm": 0.5906224846839905, "learning_rate": 0.0001667888350343772, "loss": 0.9167, "step": 6018 }, { "epoch": 1.0715811965811965, "grad_norm": 0.658044695854187, "learning_rate": 0.00016677841660075617, "loss": 1.0075, "step": 6019 }, { "epoch": 1.0717592592592593, "grad_norm": 0.6313242316246033, "learning_rate": 0.00016676799685875078, "loss": 0.8551, "step": 6020 }, { "epoch": 1.0719373219373218, "grad_norm": 0.5891841053962708, "learning_rate": 0.00016675757580856518, "loss": 0.8475, "step": 6021 }, { "epoch": 1.0721153846153846, "grad_norm": 0.581317126750946, "learning_rate": 0.00016674715345040358, "loss": 0.9308, "step": 6022 }, { "epoch": 1.0722934472934473, "grad_norm": 0.5952537655830383, "learning_rate": 0.00016673672978447017, "loss": 0.9104, "step": 6023 }, { "epoch": 1.07247150997151, "grad_norm": 0.5934227705001831, "learning_rate": 0.00016672630481096915, "loss": 0.9882, "step": 6024 }, { "epoch": 1.0726495726495726, "grad_norm": 0.5867539048194885, "learning_rate": 0.00016671587853010482, "loss": 1.0186, "step": 6025 }, { "epoch": 1.0728276353276354, "grad_norm": 0.6002280116081238, "learning_rate": 0.00016670545094208143, "loss": 0.92, "step": 6026 }, { "epoch": 1.073005698005698, "grad_norm": 0.6261683702468872, "learning_rate": 0.0001666950220471033, "loss": 0.9293, "step": 6027 }, { "epoch": 1.0731837606837606, "grad_norm": 0.6128147840499878, "learning_rate": 0.00016668459184537477, "loss": 1.0787, "step": 6028 }, { "epoch": 1.0733618233618234, "grad_norm": 0.62148118019104, "learning_rate": 0.00016667416033710016, "loss": 0.8843, "step": 6029 }, { "epoch": 1.0735398860398861, "grad_norm": 0.7166166305541992, "learning_rate": 0.0001666637275224839, "loss": 0.8877, "step": 6030 }, { "epoch": 1.0737179487179487, "grad_norm": 0.5275574922561646, "learning_rate": 0.0001666532934017304, "loss": 0.9604, "step": 6031 }, { "epoch": 1.0738960113960114, "grad_norm": 0.8132784962654114, "learning_rate": 0.00016664285797504406, "loss": 1.0203, "step": 6032 }, { "epoch": 1.074074074074074, "grad_norm": 0.5887695550918579, "learning_rate": 0.00016663242124262935, "loss": 0.8819, "step": 6033 }, { "epoch": 1.0742521367521367, "grad_norm": 0.5552900433540344, "learning_rate": 0.00016662198320469078, "loss": 0.7542, "step": 6034 }, { "epoch": 1.0744301994301995, "grad_norm": 0.6228970885276794, "learning_rate": 0.0001666115438614328, "loss": 1.0362, "step": 6035 }, { "epoch": 1.0746082621082622, "grad_norm": 0.7193471789360046, "learning_rate": 0.00016660110321306003, "loss": 1.3073, "step": 6036 }, { "epoch": 1.0747863247863247, "grad_norm": 0.6167412996292114, "learning_rate": 0.000166590661259777, "loss": 0.941, "step": 6037 }, { "epoch": 1.0749643874643875, "grad_norm": 0.5716922879219055, "learning_rate": 0.00016658021800178827, "loss": 0.83, "step": 6038 }, { "epoch": 1.0751424501424502, "grad_norm": 0.6404047012329102, "learning_rate": 0.00016656977343929848, "loss": 1.0617, "step": 6039 }, { "epoch": 1.0753205128205128, "grad_norm": 0.531395435333252, "learning_rate": 0.00016655932757251226, "loss": 0.7785, "step": 6040 }, { "epoch": 1.0754985754985755, "grad_norm": 0.6468462347984314, "learning_rate": 0.0001665488804016343, "loss": 0.7893, "step": 6041 }, { "epoch": 1.0756766381766383, "grad_norm": 0.6539653539657593, "learning_rate": 0.00016653843192686925, "loss": 1.1011, "step": 6042 }, { "epoch": 1.0758547008547008, "grad_norm": 0.630107045173645, "learning_rate": 0.0001665279821484219, "loss": 0.9262, "step": 6043 }, { "epoch": 1.0760327635327636, "grad_norm": 0.5875992774963379, "learning_rate": 0.00016651753106649688, "loss": 1.0501, "step": 6044 }, { "epoch": 1.0762108262108263, "grad_norm": 0.573428750038147, "learning_rate": 0.00016650707868129904, "loss": 1.0672, "step": 6045 }, { "epoch": 1.0763888888888888, "grad_norm": 0.6215469241142273, "learning_rate": 0.00016649662499303316, "loss": 0.868, "step": 6046 }, { "epoch": 1.0765669515669516, "grad_norm": 0.6666893362998962, "learning_rate": 0.00016648617000190402, "loss": 1.0965, "step": 6047 }, { "epoch": 1.0767450142450143, "grad_norm": 0.8343498706817627, "learning_rate": 0.00016647571370811653, "loss": 1.2302, "step": 6048 }, { "epoch": 1.0769230769230769, "grad_norm": 0.591147780418396, "learning_rate": 0.0001664652561118755, "loss": 0.9698, "step": 6049 }, { "epoch": 1.0771011396011396, "grad_norm": 0.573375940322876, "learning_rate": 0.00016645479721338584, "loss": 0.8798, "step": 6050 }, { "epoch": 1.0772792022792024, "grad_norm": 0.4956737160682678, "learning_rate": 0.00016644433701285246, "loss": 0.6523, "step": 6051 }, { "epoch": 1.077457264957265, "grad_norm": 0.6896619200706482, "learning_rate": 0.00016643387551048034, "loss": 0.8911, "step": 6052 }, { "epoch": 1.0776353276353277, "grad_norm": 0.5820416808128357, "learning_rate": 0.00016642341270647445, "loss": 1.1486, "step": 6053 }, { "epoch": 1.0778133903133904, "grad_norm": 0.611132025718689, "learning_rate": 0.00016641294860103976, "loss": 1.0705, "step": 6054 }, { "epoch": 1.077991452991453, "grad_norm": 0.6705698370933533, "learning_rate": 0.00016640248319438133, "loss": 0.9826, "step": 6055 }, { "epoch": 1.0781695156695157, "grad_norm": 0.5987013578414917, "learning_rate": 0.00016639201648670416, "loss": 1.0409, "step": 6056 }, { "epoch": 1.0783475783475784, "grad_norm": 0.6707149744033813, "learning_rate": 0.00016638154847821332, "loss": 1.1332, "step": 6057 }, { "epoch": 1.078525641025641, "grad_norm": 0.6400678157806396, "learning_rate": 0.00016637107916911393, "loss": 1.2559, "step": 6058 }, { "epoch": 1.0787037037037037, "grad_norm": 0.6370311379432678, "learning_rate": 0.00016636060855961115, "loss": 0.9752, "step": 6059 }, { "epoch": 1.0788817663817665, "grad_norm": 0.6116052269935608, "learning_rate": 0.00016635013664991012, "loss": 0.8364, "step": 6060 }, { "epoch": 1.079059829059829, "grad_norm": 0.7932127714157104, "learning_rate": 0.00016633966344021593, "loss": 0.939, "step": 6061 }, { "epoch": 1.0792378917378918, "grad_norm": 0.576249897480011, "learning_rate": 0.00016632918893073385, "loss": 0.8911, "step": 6062 }, { "epoch": 1.0794159544159545, "grad_norm": 0.5456888675689697, "learning_rate": 0.00016631871312166915, "loss": 0.8646, "step": 6063 }, { "epoch": 1.079594017094017, "grad_norm": 0.717522919178009, "learning_rate": 0.000166308236013227, "loss": 1.0814, "step": 6064 }, { "epoch": 1.0797720797720798, "grad_norm": 0.6637256145477295, "learning_rate": 0.0001662977576056127, "loss": 1.22, "step": 6065 }, { "epoch": 1.0799501424501425, "grad_norm": 0.5846666693687439, "learning_rate": 0.0001662872778990316, "loss": 1.1745, "step": 6066 }, { "epoch": 1.080128205128205, "grad_norm": 0.6611326336860657, "learning_rate": 0.00016627679689368895, "loss": 1.1262, "step": 6067 }, { "epoch": 1.0803062678062678, "grad_norm": 0.6022892594337463, "learning_rate": 0.00016626631458979015, "loss": 0.9741, "step": 6068 }, { "epoch": 1.0804843304843306, "grad_norm": 0.5862685441970825, "learning_rate": 0.00016625583098754058, "loss": 0.914, "step": 6069 }, { "epoch": 1.080662393162393, "grad_norm": 0.7089241147041321, "learning_rate": 0.00016624534608714563, "loss": 1.0614, "step": 6070 }, { "epoch": 1.0808404558404558, "grad_norm": 0.5286028981208801, "learning_rate": 0.00016623485988881076, "loss": 0.8756, "step": 6071 }, { "epoch": 1.0810185185185186, "grad_norm": 0.6437101364135742, "learning_rate": 0.00016622437239274137, "loss": 0.7222, "step": 6072 }, { "epoch": 1.0811965811965811, "grad_norm": 0.6197740435600281, "learning_rate": 0.000166213883599143, "loss": 0.7876, "step": 6073 }, { "epoch": 1.0813746438746439, "grad_norm": 0.5889328122138977, "learning_rate": 0.0001662033935082211, "loss": 0.9587, "step": 6074 }, { "epoch": 1.0815527065527066, "grad_norm": 0.5353847742080688, "learning_rate": 0.00016619290212018125, "loss": 0.8664, "step": 6075 }, { "epoch": 1.0817307692307692, "grad_norm": 0.7202061414718628, "learning_rate": 0.00016618240943522898, "loss": 1.0429, "step": 6076 }, { "epoch": 1.081908831908832, "grad_norm": 0.5831515192985535, "learning_rate": 0.0001661719154535699, "loss": 1.0323, "step": 6077 }, { "epoch": 1.0820868945868947, "grad_norm": 0.6270500421524048, "learning_rate": 0.00016616142017540953, "loss": 0.9272, "step": 6078 }, { "epoch": 1.0822649572649572, "grad_norm": 0.6064695119857788, "learning_rate": 0.00016615092360095364, "loss": 1.0629, "step": 6079 }, { "epoch": 1.08244301994302, "grad_norm": 0.5578122138977051, "learning_rate": 0.00016614042573040777, "loss": 0.8601, "step": 6080 }, { "epoch": 1.0826210826210827, "grad_norm": 0.5920688509941101, "learning_rate": 0.0001661299265639777, "loss": 1.0082, "step": 6081 }, { "epoch": 1.0827991452991452, "grad_norm": 0.6191682815551758, "learning_rate": 0.0001661194261018691, "loss": 0.9645, "step": 6082 }, { "epoch": 1.082977207977208, "grad_norm": 0.6403279304504395, "learning_rate": 0.00016610892434428765, "loss": 0.9263, "step": 6083 }, { "epoch": 1.0831552706552707, "grad_norm": 0.579502284526825, "learning_rate": 0.00016609842129143915, "loss": 0.8997, "step": 6084 }, { "epoch": 1.0833333333333333, "grad_norm": 0.5831437706947327, "learning_rate": 0.00016608791694352944, "loss": 1.0703, "step": 6085 }, { "epoch": 1.083511396011396, "grad_norm": 0.6188452243804932, "learning_rate": 0.00016607741130076424, "loss": 0.8856, "step": 6086 }, { "epoch": 1.0836894586894588, "grad_norm": 0.7413692474365234, "learning_rate": 0.00016606690436334946, "loss": 1.1995, "step": 6087 }, { "epoch": 1.0838675213675213, "grad_norm": 0.5552099347114563, "learning_rate": 0.00016605639613149093, "loss": 0.8514, "step": 6088 }, { "epoch": 1.084045584045584, "grad_norm": 0.5906503200531006, "learning_rate": 0.00016604588660539452, "loss": 0.9431, "step": 6089 }, { "epoch": 1.0842236467236468, "grad_norm": 0.5326111316680908, "learning_rate": 0.0001660353757852662, "loss": 0.8306, "step": 6090 }, { "epoch": 1.0844017094017093, "grad_norm": 0.7273091673851013, "learning_rate": 0.0001660248636713118, "loss": 1.1109, "step": 6091 }, { "epoch": 1.084579772079772, "grad_norm": 0.66513592004776, "learning_rate": 0.00016601435026373737, "loss": 1.0621, "step": 6092 }, { "epoch": 1.0847578347578348, "grad_norm": 0.6470831632614136, "learning_rate": 0.00016600383556274892, "loss": 1.1075, "step": 6093 }, { "epoch": 1.0849358974358974, "grad_norm": 0.6308658719062805, "learning_rate": 0.0001659933195685524, "loss": 0.9832, "step": 6094 }, { "epoch": 1.08511396011396, "grad_norm": 0.6569336652755737, "learning_rate": 0.00016598280228135388, "loss": 0.9754, "step": 6095 }, { "epoch": 1.0852920227920229, "grad_norm": 0.5672318339347839, "learning_rate": 0.0001659722837013594, "loss": 0.9075, "step": 6096 }, { "epoch": 1.0854700854700854, "grad_norm": 0.6397247314453125, "learning_rate": 0.00016596176382877506, "loss": 1.0358, "step": 6097 }, { "epoch": 1.0856481481481481, "grad_norm": 0.6046154499053955, "learning_rate": 0.000165951242663807, "loss": 0.9036, "step": 6098 }, { "epoch": 1.085826210826211, "grad_norm": 0.7190790176391602, "learning_rate": 0.00016594072020666134, "loss": 1.05, "step": 6099 }, { "epoch": 1.0860042735042734, "grad_norm": 0.636986255645752, "learning_rate": 0.00016593019645754425, "loss": 1.0648, "step": 6100 }, { "epoch": 1.0861823361823362, "grad_norm": 0.7239426374435425, "learning_rate": 0.00016591967141666193, "loss": 1.3332, "step": 6101 }, { "epoch": 1.086360398860399, "grad_norm": 0.5623281002044678, "learning_rate": 0.00016590914508422054, "loss": 0.997, "step": 6102 }, { "epoch": 1.0865384615384615, "grad_norm": 0.5559574365615845, "learning_rate": 0.00016589861746042642, "loss": 0.9309, "step": 6103 }, { "epoch": 1.0867165242165242, "grad_norm": 0.6056998372077942, "learning_rate": 0.00016588808854548574, "loss": 1.05, "step": 6104 }, { "epoch": 1.086894586894587, "grad_norm": 0.6419603228569031, "learning_rate": 0.00016587755833960487, "loss": 0.8933, "step": 6105 }, { "epoch": 1.0870726495726495, "grad_norm": 0.5236496329307556, "learning_rate": 0.00016586702684299006, "loss": 1.0061, "step": 6106 }, { "epoch": 1.0872507122507122, "grad_norm": 0.5764613747596741, "learning_rate": 0.0001658564940558477, "loss": 1.0218, "step": 6107 }, { "epoch": 1.087428774928775, "grad_norm": 0.6049391627311707, "learning_rate": 0.00016584595997838416, "loss": 0.8157, "step": 6108 }, { "epoch": 1.0876068376068375, "grad_norm": 0.585422933101654, "learning_rate": 0.0001658354246108058, "loss": 1.2761, "step": 6109 }, { "epoch": 1.0877849002849003, "grad_norm": 0.6420125365257263, "learning_rate": 0.00016582488795331907, "loss": 1.1978, "step": 6110 }, { "epoch": 1.087962962962963, "grad_norm": 0.646091878414154, "learning_rate": 0.00016581435000613038, "loss": 0.8946, "step": 6111 }, { "epoch": 1.0881410256410255, "grad_norm": 0.6563934087753296, "learning_rate": 0.00016580381076944625, "loss": 1.0625, "step": 6112 }, { "epoch": 1.0883190883190883, "grad_norm": 0.6796613931655884, "learning_rate": 0.0001657932702434731, "loss": 0.9401, "step": 6113 }, { "epoch": 1.088497150997151, "grad_norm": 0.6248648762702942, "learning_rate": 0.00016578272842841753, "loss": 0.8558, "step": 6114 }, { "epoch": 1.0886752136752136, "grad_norm": 0.5136269330978394, "learning_rate": 0.00016577218532448605, "loss": 0.6424, "step": 6115 }, { "epoch": 1.0888532763532763, "grad_norm": 0.5581641793251038, "learning_rate": 0.00016576164093188523, "loss": 0.7923, "step": 6116 }, { "epoch": 1.089031339031339, "grad_norm": 0.630352258682251, "learning_rate": 0.0001657510952508216, "loss": 0.9115, "step": 6117 }, { "epoch": 1.0892094017094016, "grad_norm": 0.6167593002319336, "learning_rate": 0.0001657405482815019, "loss": 1.1112, "step": 6118 }, { "epoch": 1.0893874643874644, "grad_norm": 0.5908578634262085, "learning_rate": 0.00016573000002413271, "loss": 1.0359, "step": 6119 }, { "epoch": 1.0895655270655271, "grad_norm": 0.6326140761375427, "learning_rate": 0.00016571945047892073, "loss": 1.0459, "step": 6120 }, { "epoch": 1.0897435897435896, "grad_norm": 0.7273572683334351, "learning_rate": 0.00016570889964607262, "loss": 1.0901, "step": 6121 }, { "epoch": 1.0899216524216524, "grad_norm": 0.6168062090873718, "learning_rate": 0.00016569834752579513, "loss": 0.8739, "step": 6122 }, { "epoch": 1.0900997150997151, "grad_norm": 0.5620378255844116, "learning_rate": 0.00016568779411829497, "loss": 0.9614, "step": 6123 }, { "epoch": 1.0902777777777777, "grad_norm": 0.6319156885147095, "learning_rate": 0.00016567723942377899, "loss": 1.1031, "step": 6124 }, { "epoch": 1.0904558404558404, "grad_norm": 0.6590072512626648, "learning_rate": 0.00016566668344245388, "loss": 1.0086, "step": 6125 }, { "epoch": 1.0906339031339032, "grad_norm": 0.5823387503623962, "learning_rate": 0.00016565612617452656, "loss": 0.8886, "step": 6126 }, { "epoch": 1.0908119658119657, "grad_norm": 0.5795989632606506, "learning_rate": 0.00016564556762020381, "loss": 0.7683, "step": 6127 }, { "epoch": 1.0909900284900285, "grad_norm": 0.5940101742744446, "learning_rate": 0.00016563500777969255, "loss": 0.8873, "step": 6128 }, { "epoch": 1.0911680911680912, "grad_norm": 0.5708247423171997, "learning_rate": 0.00016562444665319963, "loss": 0.7382, "step": 6129 }, { "epoch": 1.0913461538461537, "grad_norm": 0.6339239478111267, "learning_rate": 0.00016561388424093202, "loss": 0.9323, "step": 6130 }, { "epoch": 1.0915242165242165, "grad_norm": 0.720000147819519, "learning_rate": 0.00016560332054309663, "loss": 1.0437, "step": 6131 }, { "epoch": 1.0917022792022792, "grad_norm": 0.686580240726471, "learning_rate": 0.00016559275555990048, "loss": 0.9841, "step": 6132 }, { "epoch": 1.091880341880342, "grad_norm": 0.6067900061607361, "learning_rate": 0.00016558218929155053, "loss": 1.0862, "step": 6133 }, { "epoch": 1.0920584045584045, "grad_norm": 0.6678896546363831, "learning_rate": 0.00016557162173825384, "loss": 0.8509, "step": 6134 }, { "epoch": 1.0922364672364673, "grad_norm": 0.53044193983078, "learning_rate": 0.0001655610529002174, "loss": 0.9227, "step": 6135 }, { "epoch": 1.0924145299145298, "grad_norm": 0.6499412655830383, "learning_rate": 0.00016555048277764836, "loss": 1.0867, "step": 6136 }, { "epoch": 1.0925925925925926, "grad_norm": 0.6543099284172058, "learning_rate": 0.00016553991137075374, "loss": 0.849, "step": 6137 }, { "epoch": 1.0927706552706553, "grad_norm": 0.5772737860679626, "learning_rate": 0.0001655293386797407, "loss": 0.8475, "step": 6138 }, { "epoch": 1.092948717948718, "grad_norm": 0.616348385810852, "learning_rate": 0.00016551876470481642, "loss": 0.9205, "step": 6139 }, { "epoch": 1.0931267806267806, "grad_norm": 0.7151142954826355, "learning_rate": 0.00016550818944618801, "loss": 1.1389, "step": 6140 }, { "epoch": 1.0933048433048433, "grad_norm": 0.6566469669342041, "learning_rate": 0.00016549761290406275, "loss": 0.8216, "step": 6141 }, { "epoch": 1.0934829059829059, "grad_norm": 0.7075428366661072, "learning_rate": 0.00016548703507864783, "loss": 1.065, "step": 6142 }, { "epoch": 1.0936609686609686, "grad_norm": 0.6589360237121582, "learning_rate": 0.00016547645597015046, "loss": 0.9899, "step": 6143 }, { "epoch": 1.0938390313390314, "grad_norm": 0.6445585489273071, "learning_rate": 0.00016546587557877797, "loss": 1.1629, "step": 6144 }, { "epoch": 1.0940170940170941, "grad_norm": 0.6216462850570679, "learning_rate": 0.00016545529390473763, "loss": 0.9685, "step": 6145 }, { "epoch": 1.0941951566951567, "grad_norm": 0.6195303797721863, "learning_rate": 0.0001654447109482368, "loss": 1.144, "step": 6146 }, { "epoch": 1.0943732193732194, "grad_norm": 0.6625444293022156, "learning_rate": 0.0001654341267094828, "loss": 0.9886, "step": 6147 }, { "epoch": 1.094551282051282, "grad_norm": 0.6449851393699646, "learning_rate": 0.000165423541188683, "loss": 0.9568, "step": 6148 }, { "epoch": 1.0947293447293447, "grad_norm": 0.6490375995635986, "learning_rate": 0.00016541295438604484, "loss": 1.1304, "step": 6149 }, { "epoch": 1.0949074074074074, "grad_norm": 0.6771987676620483, "learning_rate": 0.00016540236630177574, "loss": 1.0426, "step": 6150 }, { "epoch": 1.0950854700854702, "grad_norm": 0.5214568376541138, "learning_rate": 0.00016539177693608307, "loss": 0.6742, "step": 6151 }, { "epoch": 1.0952635327635327, "grad_norm": 0.6005097031593323, "learning_rate": 0.00016538118628917442, "loss": 0.9901, "step": 6152 }, { "epoch": 1.0954415954415955, "grad_norm": 0.6449539065361023, "learning_rate": 0.0001653705943612572, "loss": 0.9654, "step": 6153 }, { "epoch": 1.095619658119658, "grad_norm": 0.6443646550178528, "learning_rate": 0.00016536000115253903, "loss": 0.9084, "step": 6154 }, { "epoch": 1.0957977207977208, "grad_norm": 0.6072495579719543, "learning_rate": 0.0001653494066632274, "loss": 0.6308, "step": 6155 }, { "epoch": 1.0959757834757835, "grad_norm": 0.5751157999038696, "learning_rate": 0.00016533881089352988, "loss": 0.96, "step": 6156 }, { "epoch": 1.0961538461538463, "grad_norm": 0.6310713291168213, "learning_rate": 0.0001653282138436541, "loss": 1.0997, "step": 6157 }, { "epoch": 1.0963319088319088, "grad_norm": 0.5573651790618896, "learning_rate": 0.00016531761551380765, "loss": 0.9738, "step": 6158 }, { "epoch": 1.0965099715099715, "grad_norm": 0.5615308880805969, "learning_rate": 0.00016530701590419824, "loss": 0.9658, "step": 6159 }, { "epoch": 1.0966880341880343, "grad_norm": 0.6471942663192749, "learning_rate": 0.0001652964150150335, "loss": 1.0763, "step": 6160 }, { "epoch": 1.0968660968660968, "grad_norm": 0.6305427551269531, "learning_rate": 0.00016528581284652117, "loss": 1.112, "step": 6161 }, { "epoch": 1.0970441595441596, "grad_norm": 0.6881145238876343, "learning_rate": 0.00016527520939886892, "loss": 0.8476, "step": 6162 }, { "epoch": 1.0972222222222223, "grad_norm": 0.6507891416549683, "learning_rate": 0.00016526460467228458, "loss": 1.1097, "step": 6163 }, { "epoch": 1.0974002849002849, "grad_norm": 0.5960137844085693, "learning_rate": 0.00016525399866697586, "loss": 0.9934, "step": 6164 }, { "epoch": 1.0975783475783476, "grad_norm": 0.6001808643341064, "learning_rate": 0.0001652433913831506, "loss": 1.0782, "step": 6165 }, { "epoch": 1.0977564102564104, "grad_norm": 0.5639005303382874, "learning_rate": 0.00016523278282101663, "loss": 1.0929, "step": 6166 }, { "epoch": 1.0979344729344729, "grad_norm": 0.5962058305740356, "learning_rate": 0.00016522217298078177, "loss": 1.0315, "step": 6167 }, { "epoch": 1.0981125356125356, "grad_norm": 0.6920329928398132, "learning_rate": 0.0001652115618626539, "loss": 0.9176, "step": 6168 }, { "epoch": 1.0982905982905984, "grad_norm": 0.6963527202606201, "learning_rate": 0.00016520094946684098, "loss": 1.2136, "step": 6169 }, { "epoch": 1.098468660968661, "grad_norm": 0.5855711102485657, "learning_rate": 0.00016519033579355093, "loss": 0.8453, "step": 6170 }, { "epoch": 1.0986467236467237, "grad_norm": 0.6454927325248718, "learning_rate": 0.0001651797208429916, "loss": 1.0747, "step": 6171 }, { "epoch": 1.0988247863247864, "grad_norm": 0.644585907459259, "learning_rate": 0.00016516910461537108, "loss": 0.8165, "step": 6172 }, { "epoch": 1.099002849002849, "grad_norm": 0.6488069891929626, "learning_rate": 0.00016515848711089732, "loss": 1.1048, "step": 6173 }, { "epoch": 1.0991809116809117, "grad_norm": 0.5867953896522522, "learning_rate": 0.00016514786832977834, "loss": 0.63, "step": 6174 }, { "epoch": 1.0993589743589745, "grad_norm": 0.560591459274292, "learning_rate": 0.00016513724827222227, "loss": 0.9255, "step": 6175 }, { "epoch": 1.099537037037037, "grad_norm": 0.675262451171875, "learning_rate": 0.00016512662693843707, "loss": 0.7637, "step": 6176 }, { "epoch": 1.0997150997150997, "grad_norm": 0.6515669822692871, "learning_rate": 0.00016511600432863091, "loss": 0.7579, "step": 6177 }, { "epoch": 1.0998931623931625, "grad_norm": 0.683409571647644, "learning_rate": 0.00016510538044301192, "loss": 0.9183, "step": 6178 }, { "epoch": 1.100071225071225, "grad_norm": 0.6194507479667664, "learning_rate": 0.00016509475528178827, "loss": 1.16, "step": 6179 }, { "epoch": 1.1002492877492878, "grad_norm": 0.6192209720611572, "learning_rate": 0.0001650841288451681, "loss": 1.1392, "step": 6180 }, { "epoch": 1.1004273504273505, "grad_norm": 0.6029189825057983, "learning_rate": 0.0001650735011333596, "loss": 1.1453, "step": 6181 }, { "epoch": 1.100605413105413, "grad_norm": 0.7040731310844421, "learning_rate": 0.00016506287214657105, "loss": 0.9367, "step": 6182 }, { "epoch": 1.1007834757834758, "grad_norm": 0.5909842252731323, "learning_rate": 0.00016505224188501067, "loss": 0.6463, "step": 6183 }, { "epoch": 1.1009615384615385, "grad_norm": 0.6129698157310486, "learning_rate": 0.00016504161034888674, "loss": 0.9432, "step": 6184 }, { "epoch": 1.101139601139601, "grad_norm": 0.6181607842445374, "learning_rate": 0.00016503097753840757, "loss": 0.9934, "step": 6185 }, { "epoch": 1.1013176638176638, "grad_norm": 0.6463226675987244, "learning_rate": 0.0001650203434537815, "loss": 0.8471, "step": 6186 }, { "epoch": 1.1014957264957266, "grad_norm": 0.5999348163604736, "learning_rate": 0.00016500970809521688, "loss": 0.9418, "step": 6187 }, { "epoch": 1.101673789173789, "grad_norm": 0.629504919052124, "learning_rate": 0.00016499907146292204, "loss": 0.9699, "step": 6188 }, { "epoch": 1.1018518518518519, "grad_norm": 0.694767951965332, "learning_rate": 0.00016498843355710542, "loss": 0.8793, "step": 6189 }, { "epoch": 1.1020299145299146, "grad_norm": 0.6205509901046753, "learning_rate": 0.00016497779437797547, "loss": 0.8384, "step": 6190 }, { "epoch": 1.1022079772079771, "grad_norm": 0.6256579756736755, "learning_rate": 0.0001649671539257406, "loss": 0.9275, "step": 6191 }, { "epoch": 1.10238603988604, "grad_norm": 0.6593793034553528, "learning_rate": 0.00016495651220060933, "loss": 1.0495, "step": 6192 }, { "epoch": 1.1025641025641026, "grad_norm": 0.7809221148490906, "learning_rate": 0.00016494586920279012, "loss": 1.0485, "step": 6193 }, { "epoch": 1.1027421652421652, "grad_norm": 0.6147717833518982, "learning_rate": 0.0001649352249324915, "loss": 0.8739, "step": 6194 }, { "epoch": 1.102920227920228, "grad_norm": 0.565411388874054, "learning_rate": 0.00016492457938992208, "loss": 0.9759, "step": 6195 }, { "epoch": 1.1030982905982907, "grad_norm": 0.596370279788971, "learning_rate": 0.00016491393257529036, "loss": 0.9658, "step": 6196 }, { "epoch": 1.1032763532763532, "grad_norm": 0.6334326863288879, "learning_rate": 0.00016490328448880498, "loss": 0.8785, "step": 6197 }, { "epoch": 1.103454415954416, "grad_norm": 0.5538334846496582, "learning_rate": 0.0001648926351306746, "loss": 0.7174, "step": 6198 }, { "epoch": 1.1036324786324787, "grad_norm": 0.6249658465385437, "learning_rate": 0.00016488198450110778, "loss": 0.8579, "step": 6199 }, { "epoch": 1.1038105413105412, "grad_norm": 0.6128895878791809, "learning_rate": 0.00016487133260031329, "loss": 0.8538, "step": 6200 }, { "epoch": 1.103988603988604, "grad_norm": 0.5808702707290649, "learning_rate": 0.0001648606794284998, "loss": 0.8143, "step": 6201 }, { "epoch": 1.1041666666666667, "grad_norm": 0.671419084072113, "learning_rate": 0.00016485002498587602, "loss": 1.1268, "step": 6202 }, { "epoch": 1.1043447293447293, "grad_norm": 0.5706788897514343, "learning_rate": 0.00016483936927265075, "loss": 0.9558, "step": 6203 }, { "epoch": 1.104522792022792, "grad_norm": 0.5700307488441467, "learning_rate": 0.00016482871228903266, "loss": 0.9616, "step": 6204 }, { "epoch": 1.1047008547008548, "grad_norm": 0.5764816403388977, "learning_rate": 0.0001648180540352307, "loss": 0.8692, "step": 6205 }, { "epoch": 1.1048789173789173, "grad_norm": 0.5786563754081726, "learning_rate": 0.00016480739451145358, "loss": 0.9406, "step": 6206 }, { "epoch": 1.10505698005698, "grad_norm": 0.6112591624259949, "learning_rate": 0.0001647967337179102, "loss": 0.8999, "step": 6207 }, { "epoch": 1.1052350427350428, "grad_norm": 0.5708907246589661, "learning_rate": 0.00016478607165480944, "loss": 0.9236, "step": 6208 }, { "epoch": 1.1054131054131053, "grad_norm": 0.6742013692855835, "learning_rate": 0.00016477540832236014, "loss": 1.0911, "step": 6209 }, { "epoch": 1.105591168091168, "grad_norm": 0.6382617354393005, "learning_rate": 0.0001647647437207713, "loss": 0.7901, "step": 6210 }, { "epoch": 1.1057692307692308, "grad_norm": 0.6241547465324402, "learning_rate": 0.00016475407785025188, "loss": 1.0048, "step": 6211 }, { "epoch": 1.1059472934472934, "grad_norm": 0.6452877521514893, "learning_rate": 0.00016474341071101077, "loss": 0.8902, "step": 6212 }, { "epoch": 1.1061253561253561, "grad_norm": 0.6212326288223267, "learning_rate": 0.00016473274230325704, "loss": 1.078, "step": 6213 }, { "epoch": 1.1063034188034189, "grad_norm": 0.6870912909507751, "learning_rate": 0.00016472207262719968, "loss": 0.9127, "step": 6214 }, { "epoch": 1.1064814814814814, "grad_norm": 0.6286750435829163, "learning_rate": 0.00016471140168304777, "loss": 1.0271, "step": 6215 }, { "epoch": 1.1066595441595442, "grad_norm": 0.645806074142456, "learning_rate": 0.00016470072947101036, "loss": 1.1514, "step": 6216 }, { "epoch": 1.106837606837607, "grad_norm": 0.6800320148468018, "learning_rate": 0.00016469005599129653, "loss": 0.9322, "step": 6217 }, { "epoch": 1.1070156695156694, "grad_norm": 0.5898309946060181, "learning_rate": 0.0001646793812441155, "loss": 1.065, "step": 6218 }, { "epoch": 1.1071937321937322, "grad_norm": 0.6000019907951355, "learning_rate": 0.00016466870522967634, "loss": 0.911, "step": 6219 }, { "epoch": 1.107371794871795, "grad_norm": 0.6164331436157227, "learning_rate": 0.0001646580279481882, "loss": 0.8421, "step": 6220 }, { "epoch": 1.1075498575498575, "grad_norm": 0.6410242319107056, "learning_rate": 0.00016464734939986036, "loss": 0.9688, "step": 6221 }, { "epoch": 1.1077279202279202, "grad_norm": 0.7153300046920776, "learning_rate": 0.00016463666958490197, "loss": 1.0722, "step": 6222 }, { "epoch": 1.107905982905983, "grad_norm": 0.6977026462554932, "learning_rate": 0.00016462598850352234, "loss": 1.0192, "step": 6223 }, { "epoch": 1.1080840455840455, "grad_norm": 0.6379461884498596, "learning_rate": 0.0001646153061559307, "loss": 1.0474, "step": 6224 }, { "epoch": 1.1082621082621082, "grad_norm": 0.6135090589523315, "learning_rate": 0.00016460462254233634, "loss": 1.0082, "step": 6225 }, { "epoch": 1.108440170940171, "grad_norm": 0.6326230764389038, "learning_rate": 0.00016459393766294866, "loss": 1.1097, "step": 6226 }, { "epoch": 1.1086182336182335, "grad_norm": 0.6636839509010315, "learning_rate": 0.0001645832515179769, "loss": 0.9689, "step": 6227 }, { "epoch": 1.1087962962962963, "grad_norm": 0.5713129043579102, "learning_rate": 0.00016457256410763052, "loss": 0.8642, "step": 6228 }, { "epoch": 1.108974358974359, "grad_norm": 0.584204912185669, "learning_rate": 0.00016456187543211888, "loss": 0.9957, "step": 6229 }, { "epoch": 1.1091524216524216, "grad_norm": 0.5920230746269226, "learning_rate": 0.0001645511854916514, "loss": 0.7297, "step": 6230 }, { "epoch": 1.1093304843304843, "grad_norm": 0.6207385063171387, "learning_rate": 0.0001645404942864375, "loss": 0.868, "step": 6231 }, { "epoch": 1.109508547008547, "grad_norm": 0.7267234921455383, "learning_rate": 0.00016452980181668673, "loss": 1.0248, "step": 6232 }, { "epoch": 1.1096866096866096, "grad_norm": 0.5925650596618652, "learning_rate": 0.00016451910808260852, "loss": 1.1075, "step": 6233 }, { "epoch": 1.1098646723646723, "grad_norm": 0.5632196664810181, "learning_rate": 0.00016450841308441244, "loss": 0.9865, "step": 6234 }, { "epoch": 1.110042735042735, "grad_norm": 0.6115161180496216, "learning_rate": 0.000164497716822308, "loss": 1.1343, "step": 6235 }, { "epoch": 1.1102207977207976, "grad_norm": 0.634398341178894, "learning_rate": 0.00016448701929650477, "loss": 1.1039, "step": 6236 }, { "epoch": 1.1103988603988604, "grad_norm": 0.5843468308448792, "learning_rate": 0.00016447632050721237, "loss": 0.8462, "step": 6237 }, { "epoch": 1.1105769230769231, "grad_norm": 0.799375593662262, "learning_rate": 0.0001644656204546404, "loss": 0.9861, "step": 6238 }, { "epoch": 1.1107549857549857, "grad_norm": 0.600289523601532, "learning_rate": 0.0001644549191389985, "loss": 1.0323, "step": 6239 }, { "epoch": 1.1109330484330484, "grad_norm": 0.6154919266700745, "learning_rate": 0.00016444421656049637, "loss": 0.9158, "step": 6240 }, { "epoch": 1.1111111111111112, "grad_norm": 0.6685689687728882, "learning_rate": 0.00016443351271934367, "loss": 1.0429, "step": 6241 }, { "epoch": 1.1112891737891737, "grad_norm": 0.699978232383728, "learning_rate": 0.00016442280761575016, "loss": 1.072, "step": 6242 }, { "epoch": 1.1114672364672364, "grad_norm": 0.6461396217346191, "learning_rate": 0.00016441210124992556, "loss": 0.9758, "step": 6243 }, { "epoch": 1.1116452991452992, "grad_norm": 0.6463284492492676, "learning_rate": 0.00016440139362207962, "loss": 0.9205, "step": 6244 }, { "epoch": 1.1118233618233617, "grad_norm": 0.6587556004524231, "learning_rate": 0.00016439068473242217, "loss": 1.0027, "step": 6245 }, { "epoch": 1.1120014245014245, "grad_norm": 0.6896520256996155, "learning_rate": 0.000164379974581163, "loss": 0.9788, "step": 6246 }, { "epoch": 1.1121794871794872, "grad_norm": 0.6766142845153809, "learning_rate": 0.000164369263168512, "loss": 0.9647, "step": 6247 }, { "epoch": 1.1123575498575498, "grad_norm": 0.7024297118186951, "learning_rate": 0.00016435855049467898, "loss": 1.1163, "step": 6248 }, { "epoch": 1.1125356125356125, "grad_norm": 0.6654963493347168, "learning_rate": 0.00016434783655987385, "loss": 0.9302, "step": 6249 }, { "epoch": 1.1127136752136753, "grad_norm": 0.6973692774772644, "learning_rate": 0.0001643371213643065, "loss": 0.9585, "step": 6250 }, { "epoch": 1.1128917378917378, "grad_norm": 0.7153545022010803, "learning_rate": 0.000164326404908187, "loss": 1.0485, "step": 6251 }, { "epoch": 1.1130698005698005, "grad_norm": 0.6114685535430908, "learning_rate": 0.00016431568719172516, "loss": 0.8881, "step": 6252 }, { "epoch": 1.1132478632478633, "grad_norm": 0.6500731706619263, "learning_rate": 0.00016430496821513103, "loss": 1.0658, "step": 6253 }, { "epoch": 1.113425925925926, "grad_norm": 0.5800092220306396, "learning_rate": 0.00016429424797861466, "loss": 0.9158, "step": 6254 }, { "epoch": 1.1136039886039886, "grad_norm": 0.6653759479522705, "learning_rate": 0.00016428352648238602, "loss": 0.9762, "step": 6255 }, { "epoch": 1.1137820512820513, "grad_norm": 0.649208128452301, "learning_rate": 0.00016427280372665525, "loss": 1.1184, "step": 6256 }, { "epoch": 1.1139601139601139, "grad_norm": 0.6665199398994446, "learning_rate": 0.00016426207971163238, "loss": 0.9417, "step": 6257 }, { "epoch": 1.1141381766381766, "grad_norm": 0.6110978126525879, "learning_rate": 0.00016425135443752758, "loss": 1.1531, "step": 6258 }, { "epoch": 1.1143162393162394, "grad_norm": 0.6517077088356018, "learning_rate": 0.00016424062790455093, "loss": 0.9055, "step": 6259 }, { "epoch": 1.114494301994302, "grad_norm": 0.6278966665267944, "learning_rate": 0.00016422990011291265, "loss": 1.0087, "step": 6260 }, { "epoch": 1.1146723646723646, "grad_norm": 0.5818809270858765, "learning_rate": 0.00016421917106282288, "loss": 1.0202, "step": 6261 }, { "epoch": 1.1148504273504274, "grad_norm": 0.5670005679130554, "learning_rate": 0.00016420844075449187, "loss": 0.841, "step": 6262 }, { "epoch": 1.11502849002849, "grad_norm": 0.6584762334823608, "learning_rate": 0.00016419770918812984, "loss": 1.0322, "step": 6263 }, { "epoch": 1.1152065527065527, "grad_norm": 0.6023790836334229, "learning_rate": 0.00016418697636394705, "loss": 0.9152, "step": 6264 }, { "epoch": 1.1153846153846154, "grad_norm": 0.6234691739082336, "learning_rate": 0.00016417624228215382, "loss": 0.9555, "step": 6265 }, { "epoch": 1.1155626780626782, "grad_norm": 0.6690816879272461, "learning_rate": 0.00016416550694296045, "loss": 0.9341, "step": 6266 }, { "epoch": 1.1157407407407407, "grad_norm": 0.6030237078666687, "learning_rate": 0.00016415477034657723, "loss": 1.0442, "step": 6267 }, { "epoch": 1.1159188034188035, "grad_norm": 0.5954633951187134, "learning_rate": 0.00016414403249321455, "loss": 0.9132, "step": 6268 }, { "epoch": 1.116096866096866, "grad_norm": 0.7876830101013184, "learning_rate": 0.0001641332933830828, "loss": 0.9456, "step": 6269 }, { "epoch": 1.1162749287749287, "grad_norm": 0.6776009798049927, "learning_rate": 0.00016412255301639244, "loss": 0.9022, "step": 6270 }, { "epoch": 1.1164529914529915, "grad_norm": 0.6094426512718201, "learning_rate": 0.0001641118113933538, "loss": 0.9629, "step": 6271 }, { "epoch": 1.1166310541310542, "grad_norm": 0.5818213820457458, "learning_rate": 0.00016410106851417742, "loss": 0.9049, "step": 6272 }, { "epoch": 1.1168091168091168, "grad_norm": 0.5668078064918518, "learning_rate": 0.00016409032437907377, "loss": 1.0011, "step": 6273 }, { "epoch": 1.1169871794871795, "grad_norm": 0.6984922289848328, "learning_rate": 0.00016407957898825334, "loss": 0.9454, "step": 6274 }, { "epoch": 1.1171652421652423, "grad_norm": 0.5509830117225647, "learning_rate": 0.00016406883234192668, "loss": 0.9132, "step": 6275 }, { "epoch": 1.1173433048433048, "grad_norm": 0.5117461681365967, "learning_rate": 0.00016405808444030435, "loss": 0.7675, "step": 6276 }, { "epoch": 1.1175213675213675, "grad_norm": 0.6358339786529541, "learning_rate": 0.00016404733528359688, "loss": 0.9777, "step": 6277 }, { "epoch": 1.1176994301994303, "grad_norm": 0.5870591402053833, "learning_rate": 0.00016403658487201494, "loss": 0.8576, "step": 6278 }, { "epoch": 1.1178774928774928, "grad_norm": 0.6532407999038696, "learning_rate": 0.00016402583320576915, "loss": 1.1787, "step": 6279 }, { "epoch": 1.1180555555555556, "grad_norm": 0.6374639272689819, "learning_rate": 0.00016401508028507017, "loss": 0.9298, "step": 6280 }, { "epoch": 1.1182336182336183, "grad_norm": 0.7280316352844238, "learning_rate": 0.00016400432611012869, "loss": 1.1081, "step": 6281 }, { "epoch": 1.1184116809116809, "grad_norm": 0.6070699095726013, "learning_rate": 0.00016399357068115538, "loss": 0.9107, "step": 6282 }, { "epoch": 1.1185897435897436, "grad_norm": 0.6701489686965942, "learning_rate": 0.00016398281399836097, "loss": 1.0879, "step": 6283 }, { "epoch": 1.1187678062678064, "grad_norm": 0.6343162655830383, "learning_rate": 0.00016397205606195626, "loss": 0.8552, "step": 6284 }, { "epoch": 1.118945868945869, "grad_norm": 0.6450608968734741, "learning_rate": 0.00016396129687215198, "loss": 1.1119, "step": 6285 }, { "epoch": 1.1191239316239316, "grad_norm": 0.7219904661178589, "learning_rate": 0.00016395053642915896, "loss": 0.9081, "step": 6286 }, { "epoch": 1.1193019943019944, "grad_norm": 0.6189733147621155, "learning_rate": 0.00016393977473318802, "loss": 0.9818, "step": 6287 }, { "epoch": 1.119480056980057, "grad_norm": 0.6310907602310181, "learning_rate": 0.00016392901178445004, "loss": 1.0334, "step": 6288 }, { "epoch": 1.1196581196581197, "grad_norm": 0.6556720733642578, "learning_rate": 0.00016391824758315587, "loss": 1.0452, "step": 6289 }, { "epoch": 1.1198361823361824, "grad_norm": 0.6697782278060913, "learning_rate": 0.00016390748212951638, "loss": 0.9627, "step": 6290 }, { "epoch": 1.120014245014245, "grad_norm": 0.6341549754142761, "learning_rate": 0.00016389671542374256, "loss": 1.112, "step": 6291 }, { "epoch": 1.1201923076923077, "grad_norm": 0.6913946270942688, "learning_rate": 0.00016388594746604535, "loss": 0.9622, "step": 6292 }, { "epoch": 1.1203703703703705, "grad_norm": 0.695488691329956, "learning_rate": 0.0001638751782566357, "loss": 1.0951, "step": 6293 }, { "epoch": 1.120548433048433, "grad_norm": 0.6965359449386597, "learning_rate": 0.00016386440779572463, "loss": 1.1742, "step": 6294 }, { "epoch": 1.1207264957264957, "grad_norm": 0.624679684638977, "learning_rate": 0.00016385363608352314, "loss": 0.9756, "step": 6295 }, { "epoch": 1.1209045584045585, "grad_norm": 0.7511318922042847, "learning_rate": 0.0001638428631202423, "loss": 0.907, "step": 6296 }, { "epoch": 1.121082621082621, "grad_norm": 0.5334641337394714, "learning_rate": 0.00016383208890609317, "loss": 0.7932, "step": 6297 }, { "epoch": 1.1212606837606838, "grad_norm": 0.7518552541732788, "learning_rate": 0.00016382131344128687, "loss": 1.1556, "step": 6298 }, { "epoch": 1.1214387464387465, "grad_norm": 0.618618369102478, "learning_rate": 0.00016381053672603449, "loss": 1.1027, "step": 6299 }, { "epoch": 1.121616809116809, "grad_norm": 0.638956606388092, "learning_rate": 0.00016379975876054724, "loss": 1.0377, "step": 6300 }, { "epoch": 1.1217948717948718, "grad_norm": 0.8031370639801025, "learning_rate": 0.0001637889795450362, "loss": 1.0821, "step": 6301 }, { "epoch": 1.1219729344729346, "grad_norm": 0.6710168123245239, "learning_rate": 0.00016377819907971265, "loss": 1.2896, "step": 6302 }, { "epoch": 1.122150997150997, "grad_norm": 0.5850739479064941, "learning_rate": 0.00016376741736478777, "loss": 1.0836, "step": 6303 }, { "epoch": 1.1223290598290598, "grad_norm": 0.6410611271858215, "learning_rate": 0.0001637566344004728, "loss": 1.0395, "step": 6304 }, { "epoch": 1.1225071225071226, "grad_norm": 0.6884660720825195, "learning_rate": 0.00016374585018697903, "loss": 0.871, "step": 6305 }, { "epoch": 1.1226851851851851, "grad_norm": 0.622207522392273, "learning_rate": 0.00016373506472451777, "loss": 0.9897, "step": 6306 }, { "epoch": 1.1228632478632479, "grad_norm": 0.6018275618553162, "learning_rate": 0.00016372427801330028, "loss": 0.8398, "step": 6307 }, { "epoch": 1.1230413105413106, "grad_norm": 0.6451539993286133, "learning_rate": 0.00016371349005353796, "loss": 0.9878, "step": 6308 }, { "epoch": 1.1232193732193732, "grad_norm": 0.5549424886703491, "learning_rate": 0.00016370270084544215, "loss": 0.844, "step": 6309 }, { "epoch": 1.123397435897436, "grad_norm": 0.6082940697669983, "learning_rate": 0.00016369191038922423, "loss": 1.0704, "step": 6310 }, { "epoch": 1.1235754985754987, "grad_norm": 0.6423100829124451, "learning_rate": 0.00016368111868509563, "loss": 1.0639, "step": 6311 }, { "epoch": 1.1237535612535612, "grad_norm": 0.6274200081825256, "learning_rate": 0.00016367032573326784, "loss": 0.9996, "step": 6312 }, { "epoch": 1.123931623931624, "grad_norm": 0.6618558168411255, "learning_rate": 0.00016365953153395227, "loss": 0.8074, "step": 6313 }, { "epoch": 1.1241096866096867, "grad_norm": 0.7624069452285767, "learning_rate": 0.00016364873608736038, "loss": 0.9741, "step": 6314 }, { "epoch": 1.1242877492877492, "grad_norm": 0.5391361117362976, "learning_rate": 0.00016363793939370375, "loss": 0.6992, "step": 6315 }, { "epoch": 1.124465811965812, "grad_norm": 0.7564396858215332, "learning_rate": 0.0001636271414531939, "loss": 1.1971, "step": 6316 }, { "epoch": 1.1246438746438747, "grad_norm": 0.6584066152572632, "learning_rate": 0.00016361634226604239, "loss": 1.0842, "step": 6317 }, { "epoch": 1.1248219373219372, "grad_norm": 0.6851227283477783, "learning_rate": 0.00016360554183246078, "loss": 1.0879, "step": 6318 }, { "epoch": 1.125, "grad_norm": 0.5699417591094971, "learning_rate": 0.00016359474015266074, "loss": 0.782, "step": 6319 }, { "epoch": 1.1251780626780628, "grad_norm": 0.5495570302009583, "learning_rate": 0.00016358393722685385, "loss": 1.076, "step": 6320 }, { "epoch": 1.1253561253561253, "grad_norm": 0.5872206091880798, "learning_rate": 0.0001635731330552518, "loss": 0.8601, "step": 6321 }, { "epoch": 1.125534188034188, "grad_norm": 0.7012827396392822, "learning_rate": 0.00016356232763806627, "loss": 1.0443, "step": 6322 }, { "epoch": 1.1257122507122508, "grad_norm": 0.6645881533622742, "learning_rate": 0.00016355152097550897, "loss": 1.0027, "step": 6323 }, { "epoch": 1.1258903133903133, "grad_norm": 0.7376120090484619, "learning_rate": 0.00016354071306779163, "loss": 1.1941, "step": 6324 }, { "epoch": 1.126068376068376, "grad_norm": 0.648932695388794, "learning_rate": 0.000163529903915126, "loss": 1.096, "step": 6325 }, { "epoch": 1.1262464387464388, "grad_norm": 0.6186314821243286, "learning_rate": 0.0001635190935177239, "loss": 1.011, "step": 6326 }, { "epoch": 1.1264245014245013, "grad_norm": 0.5964710116386414, "learning_rate": 0.0001635082818757971, "loss": 0.8893, "step": 6327 }, { "epoch": 1.126602564102564, "grad_norm": 0.5264934301376343, "learning_rate": 0.00016349746898955747, "loss": 0.7325, "step": 6328 }, { "epoch": 1.1267806267806268, "grad_norm": 0.6523048877716064, "learning_rate": 0.00016348665485921678, "loss": 1.0488, "step": 6329 }, { "epoch": 1.1269586894586894, "grad_norm": 0.6878600120544434, "learning_rate": 0.00016347583948498703, "loss": 1.0926, "step": 6330 }, { "epoch": 1.1271367521367521, "grad_norm": 0.592656672000885, "learning_rate": 0.00016346502286708004, "loss": 0.978, "step": 6331 }, { "epoch": 1.1273148148148149, "grad_norm": 0.6338315606117249, "learning_rate": 0.00016345420500570777, "loss": 1.1048, "step": 6332 }, { "epoch": 1.1274928774928774, "grad_norm": 0.5955204367637634, "learning_rate": 0.00016344338590108218, "loss": 0.88, "step": 6333 }, { "epoch": 1.1276709401709402, "grad_norm": 0.690448522567749, "learning_rate": 0.0001634325655534152, "loss": 1.0564, "step": 6334 }, { "epoch": 1.127849002849003, "grad_norm": 0.6125795841217041, "learning_rate": 0.00016342174396291888, "loss": 1.0608, "step": 6335 }, { "epoch": 1.1280270655270654, "grad_norm": 0.6387807726860046, "learning_rate": 0.00016341092112980523, "loss": 0.9581, "step": 6336 }, { "epoch": 1.1282051282051282, "grad_norm": 0.6247823238372803, "learning_rate": 0.0001634000970542863, "loss": 0.932, "step": 6337 }, { "epoch": 1.128383190883191, "grad_norm": 0.5928077697753906, "learning_rate": 0.0001633892717365742, "loss": 0.8963, "step": 6338 }, { "epoch": 1.1285612535612535, "grad_norm": 0.5922074913978577, "learning_rate": 0.000163378445176881, "loss": 0.9772, "step": 6339 }, { "epoch": 1.1287393162393162, "grad_norm": 0.6573056578636169, "learning_rate": 0.00016336761737541878, "loss": 0.8233, "step": 6340 }, { "epoch": 1.128917378917379, "grad_norm": 0.627772867679596, "learning_rate": 0.0001633567883323998, "loss": 0.9618, "step": 6341 }, { "epoch": 1.1290954415954415, "grad_norm": 0.6066579818725586, "learning_rate": 0.0001633459580480361, "loss": 0.9066, "step": 6342 }, { "epoch": 1.1292735042735043, "grad_norm": 0.670295000076294, "learning_rate": 0.00016333512652253997, "loss": 0.8003, "step": 6343 }, { "epoch": 1.129451566951567, "grad_norm": 0.6402488946914673, "learning_rate": 0.0001633242937561236, "loss": 0.998, "step": 6344 }, { "epoch": 1.1296296296296295, "grad_norm": 0.7224995493888855, "learning_rate": 0.00016331345974899923, "loss": 1.0308, "step": 6345 }, { "epoch": 1.1298076923076923, "grad_norm": 0.5019716620445251, "learning_rate": 0.00016330262450137917, "loss": 0.6874, "step": 6346 }, { "epoch": 1.129985754985755, "grad_norm": 0.5774167776107788, "learning_rate": 0.00016329178801347566, "loss": 0.8287, "step": 6347 }, { "epoch": 1.1301638176638176, "grad_norm": 0.7797795534133911, "learning_rate": 0.00016328095028550103, "loss": 1.2145, "step": 6348 }, { "epoch": 1.1303418803418803, "grad_norm": 0.5384017825126648, "learning_rate": 0.00016327011131766765, "loss": 0.8022, "step": 6349 }, { "epoch": 1.130519943019943, "grad_norm": 0.6350888609886169, "learning_rate": 0.00016325927111018786, "loss": 1.1178, "step": 6350 }, { "epoch": 1.1306980056980056, "grad_norm": 0.6386831998825073, "learning_rate": 0.0001632484296632741, "loss": 0.967, "step": 6351 }, { "epoch": 1.1308760683760684, "grad_norm": 0.6214167475700378, "learning_rate": 0.0001632375869771387, "loss": 0.9416, "step": 6352 }, { "epoch": 1.131054131054131, "grad_norm": 0.6145567297935486, "learning_rate": 0.00016322674305199416, "loss": 0.9175, "step": 6353 }, { "epoch": 1.1312321937321936, "grad_norm": 0.7027857303619385, "learning_rate": 0.00016321589788805297, "loss": 1.0063, "step": 6354 }, { "epoch": 1.1314102564102564, "grad_norm": 0.6942669153213501, "learning_rate": 0.00016320505148552755, "loss": 0.9191, "step": 6355 }, { "epoch": 1.1315883190883191, "grad_norm": 0.6388658285140991, "learning_rate": 0.0001631942038446304, "loss": 0.993, "step": 6356 }, { "epoch": 1.131766381766382, "grad_norm": 0.6627292633056641, "learning_rate": 0.00016318335496557415, "loss": 1.0055, "step": 6357 }, { "epoch": 1.1319444444444444, "grad_norm": 0.7997342944145203, "learning_rate": 0.0001631725048485713, "loss": 0.9019, "step": 6358 }, { "epoch": 1.1321225071225072, "grad_norm": 0.8817830681800842, "learning_rate": 0.00016316165349383445, "loss": 0.9793, "step": 6359 }, { "epoch": 1.1323005698005697, "grad_norm": 0.5629408955574036, "learning_rate": 0.00016315080090157621, "loss": 0.6139, "step": 6360 }, { "epoch": 1.1324786324786325, "grad_norm": 0.647220253944397, "learning_rate": 0.0001631399470720092, "loss": 0.9776, "step": 6361 }, { "epoch": 1.1326566951566952, "grad_norm": 0.6762630939483643, "learning_rate": 0.0001631290920053461, "loss": 1.1027, "step": 6362 }, { "epoch": 1.132834757834758, "grad_norm": 0.5862727761268616, "learning_rate": 0.00016311823570179957, "loss": 1.1359, "step": 6363 }, { "epoch": 1.1330128205128205, "grad_norm": 0.7042981386184692, "learning_rate": 0.00016310737816158235, "loss": 1.142, "step": 6364 }, { "epoch": 1.1331908831908832, "grad_norm": 0.5990639328956604, "learning_rate": 0.00016309651938490712, "loss": 0.9306, "step": 6365 }, { "epoch": 1.1333689458689458, "grad_norm": 0.5894871950149536, "learning_rate": 0.00016308565937198669, "loss": 0.8343, "step": 6366 }, { "epoch": 1.1335470085470085, "grad_norm": 0.6863628029823303, "learning_rate": 0.0001630747981230338, "loss": 0.9552, "step": 6367 }, { "epoch": 1.1337250712250713, "grad_norm": 0.7438958287239075, "learning_rate": 0.00016306393563826128, "loss": 1.0422, "step": 6368 }, { "epoch": 1.133903133903134, "grad_norm": 0.5695775747299194, "learning_rate": 0.00016305307191788194, "loss": 0.8633, "step": 6369 }, { "epoch": 1.1340811965811965, "grad_norm": 0.6257741451263428, "learning_rate": 0.00016304220696210863, "loss": 1.0333, "step": 6370 }, { "epoch": 1.1342592592592593, "grad_norm": 0.6366072297096252, "learning_rate": 0.00016303134077115425, "loss": 1.1452, "step": 6371 }, { "epoch": 1.1344373219373218, "grad_norm": 0.624569296836853, "learning_rate": 0.00016302047334523168, "loss": 1.0569, "step": 6372 }, { "epoch": 1.1346153846153846, "grad_norm": 0.5585938096046448, "learning_rate": 0.00016300960468455382, "loss": 0.9612, "step": 6373 }, { "epoch": 1.1347934472934473, "grad_norm": 0.5738831162452698, "learning_rate": 0.00016299873478933368, "loss": 0.9206, "step": 6374 }, { "epoch": 1.13497150997151, "grad_norm": 0.6797143220901489, "learning_rate": 0.00016298786365978417, "loss": 1.0748, "step": 6375 }, { "epoch": 1.1351495726495726, "grad_norm": 0.6341326832771301, "learning_rate": 0.00016297699129611833, "loss": 0.9901, "step": 6376 }, { "epoch": 1.1353276353276354, "grad_norm": 0.6568490862846375, "learning_rate": 0.00016296611769854916, "loss": 1.0598, "step": 6377 }, { "epoch": 1.135505698005698, "grad_norm": 0.6151928901672363, "learning_rate": 0.00016295524286728973, "loss": 0.8352, "step": 6378 }, { "epoch": 1.1356837606837606, "grad_norm": 0.7209593057632446, "learning_rate": 0.0001629443668025531, "loss": 0.9945, "step": 6379 }, { "epoch": 1.1358618233618234, "grad_norm": 0.6600689888000488, "learning_rate": 0.00016293348950455235, "loss": 1.0572, "step": 6380 }, { "epoch": 1.1360398860398861, "grad_norm": 0.5587523579597473, "learning_rate": 0.0001629226109735006, "loss": 0.8526, "step": 6381 }, { "epoch": 1.1362179487179487, "grad_norm": 0.6184542775154114, "learning_rate": 0.00016291173120961102, "loss": 0.8246, "step": 6382 }, { "epoch": 1.1363960113960114, "grad_norm": 0.6604713797569275, "learning_rate": 0.00016290085021309673, "loss": 1.0349, "step": 6383 }, { "epoch": 1.136574074074074, "grad_norm": 0.5880835056304932, "learning_rate": 0.00016288996798417097, "loss": 0.8726, "step": 6384 }, { "epoch": 1.1367521367521367, "grad_norm": 0.5770880579948425, "learning_rate": 0.00016287908452304692, "loss": 0.7639, "step": 6385 }, { "epoch": 1.1369301994301995, "grad_norm": 0.5719713568687439, "learning_rate": 0.00016286819982993782, "loss": 0.9717, "step": 6386 }, { "epoch": 1.1371082621082622, "grad_norm": 0.7028461694717407, "learning_rate": 0.00016285731390505695, "loss": 1.0147, "step": 6387 }, { "epoch": 1.1372863247863247, "grad_norm": 0.5396828651428223, "learning_rate": 0.00016284642674861756, "loss": 0.8119, "step": 6388 }, { "epoch": 1.1374643874643875, "grad_norm": 0.592580258846283, "learning_rate": 0.00016283553836083303, "loss": 1.0914, "step": 6389 }, { "epoch": 1.13764245014245, "grad_norm": 0.634596586227417, "learning_rate": 0.00016282464874191663, "loss": 1.1037, "step": 6390 }, { "epoch": 1.1378205128205128, "grad_norm": 0.6462705731391907, "learning_rate": 0.00016281375789208176, "loss": 1.1523, "step": 6391 }, { "epoch": 1.1379985754985755, "grad_norm": 0.6527917385101318, "learning_rate": 0.0001628028658115418, "loss": 1.0415, "step": 6392 }, { "epoch": 1.1381766381766383, "grad_norm": 0.6309964060783386, "learning_rate": 0.00016279197250051013, "loss": 0.9747, "step": 6393 }, { "epoch": 1.1383547008547008, "grad_norm": 0.6342993974685669, "learning_rate": 0.00016278107795920018, "loss": 0.9897, "step": 6394 }, { "epoch": 1.1385327635327636, "grad_norm": 0.7149887084960938, "learning_rate": 0.00016277018218782544, "loss": 0.9659, "step": 6395 }, { "epoch": 1.138710826210826, "grad_norm": 0.7219462394714355, "learning_rate": 0.00016275928518659938, "loss": 0.9301, "step": 6396 }, { "epoch": 1.1388888888888888, "grad_norm": 0.6649485230445862, "learning_rate": 0.0001627483869557355, "loss": 0.9012, "step": 6397 }, { "epoch": 1.1390669515669516, "grad_norm": 0.6910027861595154, "learning_rate": 0.00016273748749544731, "loss": 0.956, "step": 6398 }, { "epoch": 1.1392450142450143, "grad_norm": 0.6369016766548157, "learning_rate": 0.00016272658680594837, "loss": 0.8027, "step": 6399 }, { "epoch": 1.1394230769230769, "grad_norm": 0.6540524959564209, "learning_rate": 0.00016271568488745227, "loss": 1.2397, "step": 6400 }, { "epoch": 1.1396011396011396, "grad_norm": 0.5912376046180725, "learning_rate": 0.00016270478174017263, "loss": 0.8453, "step": 6401 }, { "epoch": 1.1397792022792024, "grad_norm": 0.6847240924835205, "learning_rate": 0.00016269387736432303, "loss": 0.9776, "step": 6402 }, { "epoch": 1.139957264957265, "grad_norm": 0.6465024352073669, "learning_rate": 0.00016268297176011716, "loss": 0.8971, "step": 6403 }, { "epoch": 1.1401353276353277, "grad_norm": 0.6639063954353333, "learning_rate": 0.00016267206492776866, "loss": 0.9756, "step": 6404 }, { "epoch": 1.1403133903133904, "grad_norm": 0.6343763470649719, "learning_rate": 0.00016266115686749123, "loss": 0.9368, "step": 6405 }, { "epoch": 1.140491452991453, "grad_norm": 0.7144993543624878, "learning_rate": 0.0001626502475794986, "loss": 0.9285, "step": 6406 }, { "epoch": 1.1406695156695157, "grad_norm": 0.6217414736747742, "learning_rate": 0.00016263933706400451, "loss": 0.8867, "step": 6407 }, { "epoch": 1.1408475783475784, "grad_norm": 0.6843730807304382, "learning_rate": 0.00016262842532122274, "loss": 0.9863, "step": 6408 }, { "epoch": 1.141025641025641, "grad_norm": 0.6866166591644287, "learning_rate": 0.00016261751235136705, "loss": 1.0517, "step": 6409 }, { "epoch": 1.1412037037037037, "grad_norm": 0.6650584936141968, "learning_rate": 0.0001626065981546513, "loss": 1.0629, "step": 6410 }, { "epoch": 1.1413817663817665, "grad_norm": 0.5805012583732605, "learning_rate": 0.00016259568273128933, "loss": 0.8175, "step": 6411 }, { "epoch": 1.141559829059829, "grad_norm": 0.7005903124809265, "learning_rate": 0.00016258476608149497, "loss": 1.0267, "step": 6412 }, { "epoch": 1.1417378917378918, "grad_norm": 0.6293461322784424, "learning_rate": 0.00016257384820548217, "loss": 1.1034, "step": 6413 }, { "epoch": 1.1419159544159545, "grad_norm": 0.6281774640083313, "learning_rate": 0.00016256292910346476, "loss": 1.0775, "step": 6414 }, { "epoch": 1.142094017094017, "grad_norm": 0.5912862420082092, "learning_rate": 0.0001625520087756567, "loss": 0.9589, "step": 6415 }, { "epoch": 1.1422720797720798, "grad_norm": 0.5813978314399719, "learning_rate": 0.00016254108722227198, "loss": 0.9195, "step": 6416 }, { "epoch": 1.1424501424501425, "grad_norm": 0.650805652141571, "learning_rate": 0.00016253016444352458, "loss": 1.0207, "step": 6417 }, { "epoch": 1.142628205128205, "grad_norm": 0.6909520030021667, "learning_rate": 0.00016251924043962851, "loss": 0.9854, "step": 6418 }, { "epoch": 1.1428062678062678, "grad_norm": 0.6054595112800598, "learning_rate": 0.0001625083152107978, "loss": 0.852, "step": 6419 }, { "epoch": 1.1429843304843306, "grad_norm": 0.601078987121582, "learning_rate": 0.00016249738875724647, "loss": 0.9609, "step": 6420 }, { "epoch": 1.143162393162393, "grad_norm": 0.5340180397033691, "learning_rate": 0.00016248646107918868, "loss": 0.8364, "step": 6421 }, { "epoch": 1.1433404558404558, "grad_norm": 0.6687821745872498, "learning_rate": 0.00016247553217683846, "loss": 1.005, "step": 6422 }, { "epoch": 1.1435185185185186, "grad_norm": 0.6347902417182922, "learning_rate": 0.00016246460205040998, "loss": 1.026, "step": 6423 }, { "epoch": 1.1436965811965811, "grad_norm": 0.6136734485626221, "learning_rate": 0.00016245367070011736, "loss": 0.7811, "step": 6424 }, { "epoch": 1.1438746438746439, "grad_norm": 0.6591334342956543, "learning_rate": 0.00016244273812617482, "loss": 0.991, "step": 6425 }, { "epoch": 1.1440527065527066, "grad_norm": 0.6062475442886353, "learning_rate": 0.00016243180432879656, "loss": 0.9879, "step": 6426 }, { "epoch": 1.1442307692307692, "grad_norm": 0.5941380858421326, "learning_rate": 0.00016242086930819678, "loss": 0.9771, "step": 6427 }, { "epoch": 1.144408831908832, "grad_norm": 0.7320533990859985, "learning_rate": 0.00016240993306458973, "loss": 1.0919, "step": 6428 }, { "epoch": 1.1445868945868947, "grad_norm": 0.6998075246810913, "learning_rate": 0.00016239899559818962, "loss": 1.0721, "step": 6429 }, { "epoch": 1.1447649572649572, "grad_norm": 0.847931444644928, "learning_rate": 0.0001623880569092109, "loss": 0.8759, "step": 6430 }, { "epoch": 1.14494301994302, "grad_norm": 0.6670104265213013, "learning_rate": 0.00016237711699786775, "loss": 1.0515, "step": 6431 }, { "epoch": 1.1451210826210827, "grad_norm": 0.601759672164917, "learning_rate": 0.00016236617586437463, "loss": 0.7298, "step": 6432 }, { "epoch": 1.1452991452991452, "grad_norm": 0.6411594152450562, "learning_rate": 0.00016235523350894578, "loss": 0.9336, "step": 6433 }, { "epoch": 1.145477207977208, "grad_norm": 0.6485120058059692, "learning_rate": 0.0001623442899317957, "loss": 1.1215, "step": 6434 }, { "epoch": 1.1456552706552707, "grad_norm": 0.6041508316993713, "learning_rate": 0.00016233334513313875, "loss": 0.8917, "step": 6435 }, { "epoch": 1.1458333333333333, "grad_norm": 0.6292745471000671, "learning_rate": 0.0001623223991131894, "loss": 0.9976, "step": 6436 }, { "epoch": 1.146011396011396, "grad_norm": 0.5442200303077698, "learning_rate": 0.0001623114518721621, "loss": 0.8072, "step": 6437 }, { "epoch": 1.1461894586894588, "grad_norm": 0.6668170094490051, "learning_rate": 0.00016230050341027136, "loss": 0.9641, "step": 6438 }, { "epoch": 1.1463675213675213, "grad_norm": 0.644186794757843, "learning_rate": 0.00016228955372773164, "loss": 0.9248, "step": 6439 }, { "epoch": 1.146545584045584, "grad_norm": 0.6661991477012634, "learning_rate": 0.00016227860282475753, "loss": 0.8719, "step": 6440 }, { "epoch": 1.1467236467236468, "grad_norm": 0.5232062935829163, "learning_rate": 0.00016226765070156355, "loss": 0.5418, "step": 6441 }, { "epoch": 1.1469017094017093, "grad_norm": 0.573176383972168, "learning_rate": 0.00016225669735836436, "loss": 1.0858, "step": 6442 }, { "epoch": 1.147079772079772, "grad_norm": 0.6137439608573914, "learning_rate": 0.00016224574279537446, "loss": 1.1205, "step": 6443 }, { "epoch": 1.1472578347578348, "grad_norm": 0.6328136920928955, "learning_rate": 0.00016223478701280855, "loss": 0.8957, "step": 6444 }, { "epoch": 1.1474358974358974, "grad_norm": 0.6687374114990234, "learning_rate": 0.00016222383001088126, "loss": 1.0318, "step": 6445 }, { "epoch": 1.14761396011396, "grad_norm": 0.6057115793228149, "learning_rate": 0.0001622128717898073, "loss": 0.9575, "step": 6446 }, { "epoch": 1.1477920227920229, "grad_norm": 0.6758735775947571, "learning_rate": 0.0001622019123498013, "loss": 1.2273, "step": 6447 }, { "epoch": 1.1479700854700854, "grad_norm": 0.6233550310134888, "learning_rate": 0.0001621909516910781, "loss": 0.7875, "step": 6448 }, { "epoch": 1.1481481481481481, "grad_norm": 0.6371827721595764, "learning_rate": 0.0001621799898138524, "loss": 1.0488, "step": 6449 }, { "epoch": 1.148326210826211, "grad_norm": 0.6179831624031067, "learning_rate": 0.00016216902671833892, "loss": 0.9792, "step": 6450 }, { "epoch": 1.1485042735042734, "grad_norm": 0.6234193444252014, "learning_rate": 0.00016215806240475256, "loss": 0.927, "step": 6451 }, { "epoch": 1.1486823361823362, "grad_norm": 0.6940563917160034, "learning_rate": 0.00016214709687330803, "loss": 1.047, "step": 6452 }, { "epoch": 1.148860398860399, "grad_norm": 0.6567606925964355, "learning_rate": 0.00016213613012422027, "loss": 0.9695, "step": 6453 }, { "epoch": 1.1490384615384615, "grad_norm": 0.7374183535575867, "learning_rate": 0.0001621251621577041, "loss": 1.0443, "step": 6454 }, { "epoch": 1.1492165242165242, "grad_norm": 0.6789869666099548, "learning_rate": 0.00016211419297397443, "loss": 1.0319, "step": 6455 }, { "epoch": 1.149394586894587, "grad_norm": 0.6225521564483643, "learning_rate": 0.00016210322257324619, "loss": 1.0529, "step": 6456 }, { "epoch": 1.1495726495726495, "grad_norm": 0.619701623916626, "learning_rate": 0.00016209225095573432, "loss": 0.962, "step": 6457 }, { "epoch": 1.1497507122507122, "grad_norm": 0.6132834553718567, "learning_rate": 0.00016208127812165375, "loss": 0.9588, "step": 6458 }, { "epoch": 1.149928774928775, "grad_norm": 0.6005367040634155, "learning_rate": 0.00016207030407121954, "loss": 0.9497, "step": 6459 }, { "epoch": 1.1501068376068375, "grad_norm": 0.575309157371521, "learning_rate": 0.00016205932880464664, "loss": 1.0035, "step": 6460 }, { "epoch": 1.1502849002849003, "grad_norm": 0.5958710312843323, "learning_rate": 0.0001620483523221501, "loss": 1.0004, "step": 6461 }, { "epoch": 1.150462962962963, "grad_norm": 0.5934719443321228, "learning_rate": 0.000162037374623945, "loss": 0.8694, "step": 6462 }, { "epoch": 1.1506410256410255, "grad_norm": 0.6042510271072388, "learning_rate": 0.00016202639571024643, "loss": 0.8598, "step": 6463 }, { "epoch": 1.1508190883190883, "grad_norm": 0.6206158399581909, "learning_rate": 0.00016201541558126946, "loss": 0.961, "step": 6464 }, { "epoch": 1.150997150997151, "grad_norm": 0.5997715592384338, "learning_rate": 0.00016200443423722925, "loss": 0.8686, "step": 6465 }, { "epoch": 1.1511752136752136, "grad_norm": 0.742457926273346, "learning_rate": 0.00016199345167834098, "loss": 1.1113, "step": 6466 }, { "epoch": 1.1513532763532763, "grad_norm": 0.6772766709327698, "learning_rate": 0.00016198246790481976, "loss": 1.0717, "step": 6467 }, { "epoch": 1.151531339031339, "grad_norm": 0.6127712726593018, "learning_rate": 0.0001619714829168809, "loss": 0.8887, "step": 6468 }, { "epoch": 1.1517094017094016, "grad_norm": 0.5585067272186279, "learning_rate": 0.00016196049671473954, "loss": 1.0144, "step": 6469 }, { "epoch": 1.1518874643874644, "grad_norm": 0.6269431710243225, "learning_rate": 0.00016194950929861092, "loss": 1.0206, "step": 6470 }, { "epoch": 1.1520655270655271, "grad_norm": 0.6270785331726074, "learning_rate": 0.0001619385206687104, "loss": 1.0517, "step": 6471 }, { "epoch": 1.1522435897435896, "grad_norm": 0.744712233543396, "learning_rate": 0.00016192753082525322, "loss": 1.0699, "step": 6472 }, { "epoch": 1.1524216524216524, "grad_norm": 0.7025929689407349, "learning_rate": 0.00016191653976845474, "loss": 0.951, "step": 6473 }, { "epoch": 1.1525997150997151, "grad_norm": 0.6175379753112793, "learning_rate": 0.00016190554749853024, "loss": 1.2153, "step": 6474 }, { "epoch": 1.1527777777777777, "grad_norm": 0.6212149858474731, "learning_rate": 0.00016189455401569513, "loss": 1.0428, "step": 6475 }, { "epoch": 1.1529558404558404, "grad_norm": 0.6716817617416382, "learning_rate": 0.00016188355932016484, "loss": 1.179, "step": 6476 }, { "epoch": 1.1531339031339032, "grad_norm": 0.6247739791870117, "learning_rate": 0.00016187256341215476, "loss": 0.9451, "step": 6477 }, { "epoch": 1.153311965811966, "grad_norm": 0.6223008036613464, "learning_rate": 0.00016186156629188032, "loss": 0.9915, "step": 6478 }, { "epoch": 1.1534900284900285, "grad_norm": 0.5610866546630859, "learning_rate": 0.000161850567959557, "loss": 0.7741, "step": 6479 }, { "epoch": 1.1536680911680912, "grad_norm": 0.6241226196289062, "learning_rate": 0.0001618395684154003, "loss": 1.2193, "step": 6480 }, { "epoch": 1.1538461538461537, "grad_norm": 0.703789472579956, "learning_rate": 0.00016182856765962567, "loss": 1.0725, "step": 6481 }, { "epoch": 1.1540242165242165, "grad_norm": 0.6802006959915161, "learning_rate": 0.00016181756569244872, "loss": 1.0908, "step": 6482 }, { "epoch": 1.1542022792022792, "grad_norm": 0.6504136919975281, "learning_rate": 0.000161806562514085, "loss": 0.9706, "step": 6483 }, { "epoch": 1.154380341880342, "grad_norm": 0.7217034101486206, "learning_rate": 0.00016179555812475003, "loss": 0.9084, "step": 6484 }, { "epoch": 1.1545584045584045, "grad_norm": 0.5919039249420166, "learning_rate": 0.0001617845525246595, "loss": 0.949, "step": 6485 }, { "epoch": 1.1547364672364673, "grad_norm": 0.6160184741020203, "learning_rate": 0.00016177354571402902, "loss": 0.8144, "step": 6486 }, { "epoch": 1.1549145299145298, "grad_norm": 0.7323806285858154, "learning_rate": 0.00016176253769307426, "loss": 1.0528, "step": 6487 }, { "epoch": 1.1550925925925926, "grad_norm": 0.6051317453384399, "learning_rate": 0.0001617515284620108, "loss": 0.9558, "step": 6488 }, { "epoch": 1.1552706552706553, "grad_norm": 0.6418905258178711, "learning_rate": 0.00016174051802105447, "loss": 1.062, "step": 6489 }, { "epoch": 1.155448717948718, "grad_norm": 0.6914883852005005, "learning_rate": 0.00016172950637042096, "loss": 0.9999, "step": 6490 }, { "epoch": 1.1556267806267806, "grad_norm": 0.5558316707611084, "learning_rate": 0.000161718493510326, "loss": 0.9561, "step": 6491 }, { "epoch": 1.1558048433048433, "grad_norm": 0.6632496118545532, "learning_rate": 0.00016170747944098531, "loss": 1.0133, "step": 6492 }, { "epoch": 1.1559829059829059, "grad_norm": 0.6407149434089661, "learning_rate": 0.00016169646416261478, "loss": 1.0563, "step": 6493 }, { "epoch": 1.1561609686609686, "grad_norm": 0.8128494024276733, "learning_rate": 0.0001616854476754302, "loss": 1.1559, "step": 6494 }, { "epoch": 1.1563390313390314, "grad_norm": 0.6403429508209229, "learning_rate": 0.00016167442997964742, "loss": 1.0983, "step": 6495 }, { "epoch": 1.1565170940170941, "grad_norm": 0.76612788438797, "learning_rate": 0.0001616634110754823, "loss": 0.973, "step": 6496 }, { "epoch": 1.1566951566951567, "grad_norm": 0.6914355754852295, "learning_rate": 0.0001616523909631507, "loss": 0.9307, "step": 6497 }, { "epoch": 1.1568732193732194, "grad_norm": 0.546602725982666, "learning_rate": 0.00016164136964286863, "loss": 1.0328, "step": 6498 }, { "epoch": 1.157051282051282, "grad_norm": 0.5695818662643433, "learning_rate": 0.00016163034711485193, "loss": 0.9607, "step": 6499 }, { "epoch": 1.1572293447293447, "grad_norm": 0.5649738311767578, "learning_rate": 0.00016161932337931662, "loss": 1.1521, "step": 6500 }, { "epoch": 1.1574074074074074, "grad_norm": 0.6437582969665527, "learning_rate": 0.00016160829843647867, "loss": 0.9613, "step": 6501 }, { "epoch": 1.1575854700854702, "grad_norm": 0.5841929316520691, "learning_rate": 0.0001615972722865541, "loss": 0.8187, "step": 6502 }, { "epoch": 1.1577635327635327, "grad_norm": 0.6481246948242188, "learning_rate": 0.00016158624492975892, "loss": 1.0447, "step": 6503 }, { "epoch": 1.1579415954415955, "grad_norm": 0.629804790019989, "learning_rate": 0.0001615752163663092, "loss": 0.9034, "step": 6504 }, { "epoch": 1.158119658119658, "grad_norm": 0.5797054171562195, "learning_rate": 0.00016156418659642104, "loss": 0.8168, "step": 6505 }, { "epoch": 1.1582977207977208, "grad_norm": 0.588424563407898, "learning_rate": 0.00016155315562031052, "loss": 0.828, "step": 6506 }, { "epoch": 1.1584757834757835, "grad_norm": 0.7120068669319153, "learning_rate": 0.0001615421234381938, "loss": 1.0637, "step": 6507 }, { "epoch": 1.1586538461538463, "grad_norm": 0.6635081768035889, "learning_rate": 0.00016153109005028702, "loss": 0.9838, "step": 6508 }, { "epoch": 1.1588319088319088, "grad_norm": 0.6080414056777954, "learning_rate": 0.00016152005545680634, "loss": 0.983, "step": 6509 }, { "epoch": 1.1590099715099715, "grad_norm": 0.7131237983703613, "learning_rate": 0.00016150901965796796, "loss": 1.1053, "step": 6510 }, { "epoch": 1.159188034188034, "grad_norm": 0.6051005125045776, "learning_rate": 0.00016149798265398813, "loss": 0.9903, "step": 6511 }, { "epoch": 1.1593660968660968, "grad_norm": 0.6193733811378479, "learning_rate": 0.00016148694444508306, "loss": 1.0478, "step": 6512 }, { "epoch": 1.1595441595441596, "grad_norm": 0.567888081073761, "learning_rate": 0.00016147590503146905, "loss": 0.7995, "step": 6513 }, { "epoch": 1.1597222222222223, "grad_norm": 0.6889783143997192, "learning_rate": 0.00016146486441336242, "loss": 0.9684, "step": 6514 }, { "epoch": 1.1599002849002849, "grad_norm": 0.6470308303833008, "learning_rate": 0.0001614538225909794, "loss": 0.9824, "step": 6515 }, { "epoch": 1.1600783475783476, "grad_norm": 0.6833886504173279, "learning_rate": 0.00016144277956453638, "loss": 0.9845, "step": 6516 }, { "epoch": 1.1602564102564104, "grad_norm": 0.5827815532684326, "learning_rate": 0.00016143173533424978, "loss": 0.9476, "step": 6517 }, { "epoch": 1.1604344729344729, "grad_norm": 0.6701242327690125, "learning_rate": 0.00016142068990033593, "loss": 1.0839, "step": 6518 }, { "epoch": 1.1606125356125356, "grad_norm": 0.5844996571540833, "learning_rate": 0.00016140964326301122, "loss": 0.8861, "step": 6519 }, { "epoch": 1.1607905982905984, "grad_norm": 0.5831994414329529, "learning_rate": 0.00016139859542249214, "loss": 0.9817, "step": 6520 }, { "epoch": 1.160968660968661, "grad_norm": 0.6830124855041504, "learning_rate": 0.0001613875463789951, "loss": 0.8749, "step": 6521 }, { "epoch": 1.1611467236467237, "grad_norm": 0.6003018021583557, "learning_rate": 0.00016137649613273667, "loss": 0.9593, "step": 6522 }, { "epoch": 1.1613247863247864, "grad_norm": 0.5973994731903076, "learning_rate": 0.00016136544468393327, "loss": 1.0384, "step": 6523 }, { "epoch": 1.161502849002849, "grad_norm": 0.6702523827552795, "learning_rate": 0.00016135439203280143, "loss": 1.0431, "step": 6524 }, { "epoch": 1.1616809116809117, "grad_norm": 0.6160697937011719, "learning_rate": 0.00016134333817955775, "loss": 1.0339, "step": 6525 }, { "epoch": 1.1618589743589745, "grad_norm": 0.7078264355659485, "learning_rate": 0.0001613322831244188, "loss": 1.0285, "step": 6526 }, { "epoch": 1.162037037037037, "grad_norm": 0.5744216442108154, "learning_rate": 0.00016132122686760117, "loss": 0.6589, "step": 6527 }, { "epoch": 1.1622150997150997, "grad_norm": 0.6802098155021667, "learning_rate": 0.00016131016940932146, "loss": 0.9532, "step": 6528 }, { "epoch": 1.1623931623931625, "grad_norm": 0.6523237228393555, "learning_rate": 0.00016129911074979635, "loss": 0.9409, "step": 6529 }, { "epoch": 1.162571225071225, "grad_norm": 0.710307776927948, "learning_rate": 0.00016128805088924252, "loss": 1.2536, "step": 6530 }, { "epoch": 1.1627492877492878, "grad_norm": 0.6349819898605347, "learning_rate": 0.0001612769898278766, "loss": 1.0857, "step": 6531 }, { "epoch": 1.1629273504273505, "grad_norm": 0.5348139405250549, "learning_rate": 0.00016126592756591542, "loss": 0.5969, "step": 6532 }, { "epoch": 1.163105413105413, "grad_norm": 0.635619580745697, "learning_rate": 0.00016125486410357564, "loss": 0.9885, "step": 6533 }, { "epoch": 1.1632834757834758, "grad_norm": 0.6434559226036072, "learning_rate": 0.000161243799441074, "loss": 0.8377, "step": 6534 }, { "epoch": 1.1634615384615385, "grad_norm": 0.6509647369384766, "learning_rate": 0.00016123273357862737, "loss": 0.8393, "step": 6535 }, { "epoch": 1.163639601139601, "grad_norm": 0.6179081797599792, "learning_rate": 0.0001612216665164525, "loss": 0.9143, "step": 6536 }, { "epoch": 1.1638176638176638, "grad_norm": 0.5923223495483398, "learning_rate": 0.0001612105982547663, "loss": 1.0185, "step": 6537 }, { "epoch": 1.1639957264957266, "grad_norm": 0.702150285243988, "learning_rate": 0.00016119952879378556, "loss": 0.863, "step": 6538 }, { "epoch": 1.164173789173789, "grad_norm": 0.6596643328666687, "learning_rate": 0.00016118845813372715, "loss": 1.0089, "step": 6539 }, { "epoch": 1.1643518518518519, "grad_norm": 0.7675769329071045, "learning_rate": 0.00016117738627480804, "loss": 1.0179, "step": 6540 }, { "epoch": 1.1645299145299146, "grad_norm": 0.6742541193962097, "learning_rate": 0.00016116631321724513, "loss": 1.0663, "step": 6541 }, { "epoch": 1.1647079772079771, "grad_norm": 0.7379785776138306, "learning_rate": 0.0001611552389612554, "loss": 1.0162, "step": 6542 }, { "epoch": 1.16488603988604, "grad_norm": 0.5729365944862366, "learning_rate": 0.00016114416350705577, "loss": 0.8146, "step": 6543 }, { "epoch": 1.1650641025641026, "grad_norm": 0.6481349468231201, "learning_rate": 0.00016113308685486327, "loss": 1.0748, "step": 6544 }, { "epoch": 1.1652421652421652, "grad_norm": 0.5588181018829346, "learning_rate": 0.00016112200900489493, "loss": 0.7511, "step": 6545 }, { "epoch": 1.165420227920228, "grad_norm": 0.674363911151886, "learning_rate": 0.0001611109299573678, "loss": 0.9852, "step": 6546 }, { "epoch": 1.1655982905982907, "grad_norm": 0.6712620854377747, "learning_rate": 0.00016109984971249893, "loss": 0.9558, "step": 6547 }, { "epoch": 1.1657763532763532, "grad_norm": 0.5260626077651978, "learning_rate": 0.00016108876827050544, "loss": 0.7008, "step": 6548 }, { "epoch": 1.165954415954416, "grad_norm": 0.6056292057037354, "learning_rate": 0.00016107768563160445, "loss": 0.7756, "step": 6549 }, { "epoch": 1.1661324786324787, "grad_norm": 0.5725821256637573, "learning_rate": 0.00016106660179601308, "loss": 0.8228, "step": 6550 }, { "epoch": 1.1663105413105412, "grad_norm": 0.6708397269248962, "learning_rate": 0.00016105551676394848, "loss": 1.0711, "step": 6551 }, { "epoch": 1.166488603988604, "grad_norm": 0.645453155040741, "learning_rate": 0.00016104443053562787, "loss": 0.9299, "step": 6552 }, { "epoch": 1.1666666666666667, "grad_norm": 0.6743524074554443, "learning_rate": 0.00016103334311126847, "loss": 0.8977, "step": 6553 }, { "epoch": 1.1668447293447293, "grad_norm": 0.7248545289039612, "learning_rate": 0.0001610222544910875, "loss": 1.2135, "step": 6554 }, { "epoch": 1.167022792022792, "grad_norm": 0.5798853635787964, "learning_rate": 0.00016101116467530217, "loss": 0.857, "step": 6555 }, { "epoch": 1.1672008547008548, "grad_norm": 0.6828082799911499, "learning_rate": 0.00016100007366412985, "loss": 0.9405, "step": 6556 }, { "epoch": 1.1673789173789173, "grad_norm": 0.6820163130760193, "learning_rate": 0.0001609889814577878, "loss": 0.9144, "step": 6557 }, { "epoch": 1.16755698005698, "grad_norm": 0.6482275128364563, "learning_rate": 0.00016097788805649333, "loss": 0.8586, "step": 6558 }, { "epoch": 1.1677350427350428, "grad_norm": 0.6404715180397034, "learning_rate": 0.00016096679346046385, "loss": 0.7018, "step": 6559 }, { "epoch": 1.1679131054131053, "grad_norm": 0.6315203309059143, "learning_rate": 0.0001609556976699167, "loss": 0.9602, "step": 6560 }, { "epoch": 1.168091168091168, "grad_norm": 0.5521387457847595, "learning_rate": 0.00016094460068506925, "loss": 0.9294, "step": 6561 }, { "epoch": 1.1682692307692308, "grad_norm": 0.583372175693512, "learning_rate": 0.00016093350250613895, "loss": 1.077, "step": 6562 }, { "epoch": 1.1684472934472934, "grad_norm": 0.5990512371063232, "learning_rate": 0.00016092240313334325, "loss": 1.0102, "step": 6563 }, { "epoch": 1.1686253561253561, "grad_norm": 0.675128161907196, "learning_rate": 0.00016091130256689964, "loss": 1.0407, "step": 6564 }, { "epoch": 1.1688034188034189, "grad_norm": 0.48797324299812317, "learning_rate": 0.00016090020080702556, "loss": 0.7821, "step": 6565 }, { "epoch": 1.1689814814814814, "grad_norm": 0.7487484216690063, "learning_rate": 0.00016088909785393857, "loss": 1.0444, "step": 6566 }, { "epoch": 1.1691595441595442, "grad_norm": 0.6288858652114868, "learning_rate": 0.00016087799370785618, "loss": 1.1854, "step": 6567 }, { "epoch": 1.169337606837607, "grad_norm": 0.6639021635055542, "learning_rate": 0.000160866888368996, "loss": 0.9632, "step": 6568 }, { "epoch": 1.1695156695156694, "grad_norm": 0.6553738713264465, "learning_rate": 0.00016085578183757556, "loss": 1.2765, "step": 6569 }, { "epoch": 1.1696937321937322, "grad_norm": 0.7489066123962402, "learning_rate": 0.00016084467411381248, "loss": 1.0705, "step": 6570 }, { "epoch": 1.169871794871795, "grad_norm": 0.7079828381538391, "learning_rate": 0.00016083356519792444, "loss": 0.8256, "step": 6571 }, { "epoch": 1.1700498575498575, "grad_norm": 0.7065926790237427, "learning_rate": 0.00016082245509012902, "loss": 1.0439, "step": 6572 }, { "epoch": 1.1702279202279202, "grad_norm": 0.6113346815109253, "learning_rate": 0.00016081134379064395, "loss": 0.9153, "step": 6573 }, { "epoch": 1.170405982905983, "grad_norm": 0.6094171404838562, "learning_rate": 0.0001608002312996869, "loss": 0.9723, "step": 6574 }, { "epoch": 1.1705840455840455, "grad_norm": 0.6208072900772095, "learning_rate": 0.00016078911761747565, "loss": 0.948, "step": 6575 }, { "epoch": 1.1707621082621082, "grad_norm": 0.5736680626869202, "learning_rate": 0.00016077800274422792, "loss": 0.9155, "step": 6576 }, { "epoch": 1.170940170940171, "grad_norm": 0.6793957948684692, "learning_rate": 0.0001607668866801615, "loss": 0.9574, "step": 6577 }, { "epoch": 1.1711182336182335, "grad_norm": 0.6251805424690247, "learning_rate": 0.00016075576942549413, "loss": 1.0319, "step": 6578 }, { "epoch": 1.1712962962962963, "grad_norm": 0.628882110118866, "learning_rate": 0.0001607446509804437, "loss": 0.9336, "step": 6579 }, { "epoch": 1.171474358974359, "grad_norm": 0.6712356805801392, "learning_rate": 0.000160733531345228, "loss": 1.0958, "step": 6580 }, { "epoch": 1.1716524216524216, "grad_norm": 0.599365770816803, "learning_rate": 0.0001607224105200649, "loss": 0.9814, "step": 6581 }, { "epoch": 1.1718304843304843, "grad_norm": 0.5798245668411255, "learning_rate": 0.00016071128850517235, "loss": 1.0355, "step": 6582 }, { "epoch": 1.172008547008547, "grad_norm": 0.7646229863166809, "learning_rate": 0.00016070016530076817, "loss": 0.9976, "step": 6583 }, { "epoch": 1.1721866096866096, "grad_norm": 0.6371127367019653, "learning_rate": 0.0001606890409070704, "loss": 0.9588, "step": 6584 }, { "epoch": 1.1723646723646723, "grad_norm": 0.6497066617012024, "learning_rate": 0.0001606779153242969, "loss": 0.8817, "step": 6585 }, { "epoch": 1.172542735042735, "grad_norm": 0.7255781888961792, "learning_rate": 0.0001606667885526657, "loss": 1.1319, "step": 6586 }, { "epoch": 1.1727207977207976, "grad_norm": 0.67711341381073, "learning_rate": 0.00016065566059239483, "loss": 1.0755, "step": 6587 }, { "epoch": 1.1728988603988604, "grad_norm": 0.6159650087356567, "learning_rate": 0.00016064453144370227, "loss": 0.9892, "step": 6588 }, { "epoch": 1.1730769230769231, "grad_norm": 0.658938467502594, "learning_rate": 0.00016063340110680609, "loss": 0.9131, "step": 6589 }, { "epoch": 1.1732549857549857, "grad_norm": 0.6754795908927917, "learning_rate": 0.00016062226958192438, "loss": 1.0119, "step": 6590 }, { "epoch": 1.1734330484330484, "grad_norm": 0.6453405022621155, "learning_rate": 0.00016061113686927523, "loss": 0.997, "step": 6591 }, { "epoch": 1.1736111111111112, "grad_norm": 0.6580284237861633, "learning_rate": 0.00016060000296907675, "loss": 0.8432, "step": 6592 }, { "epoch": 1.173789173789174, "grad_norm": 0.6588153839111328, "learning_rate": 0.00016058886788154712, "loss": 1.0725, "step": 6593 }, { "epoch": 1.1739672364672364, "grad_norm": 0.6247910857200623, "learning_rate": 0.00016057773160690447, "loss": 0.8736, "step": 6594 }, { "epoch": 1.1741452991452992, "grad_norm": 0.579594075679779, "learning_rate": 0.000160566594145367, "loss": 0.8809, "step": 6595 }, { "epoch": 1.1743233618233617, "grad_norm": 0.6738116145133972, "learning_rate": 0.00016055545549715293, "loss": 0.825, "step": 6596 }, { "epoch": 1.1745014245014245, "grad_norm": 0.6658982634544373, "learning_rate": 0.00016054431566248054, "loss": 1.0809, "step": 6597 }, { "epoch": 1.1746794871794872, "grad_norm": 0.5367915630340576, "learning_rate": 0.00016053317464156803, "loss": 0.9005, "step": 6598 }, { "epoch": 1.17485754985755, "grad_norm": 0.7243228554725647, "learning_rate": 0.00016052203243463372, "loss": 1.0573, "step": 6599 }, { "epoch": 1.1750356125356125, "grad_norm": 0.6359432935714722, "learning_rate": 0.0001605108890418959, "loss": 0.8569, "step": 6600 }, { "epoch": 1.1752136752136753, "grad_norm": 0.6565225720405579, "learning_rate": 0.0001604997444635729, "loss": 0.9748, "step": 6601 }, { "epoch": 1.1753917378917378, "grad_norm": 0.7124663591384888, "learning_rate": 0.0001604885986998831, "loss": 1.0271, "step": 6602 }, { "epoch": 1.1755698005698005, "grad_norm": 0.659766435623169, "learning_rate": 0.00016047745175104487, "loss": 1.0635, "step": 6603 }, { "epoch": 1.1757478632478633, "grad_norm": 0.5874318480491638, "learning_rate": 0.00016046630361727656, "loss": 0.9257, "step": 6604 }, { "epoch": 1.175925925925926, "grad_norm": 0.587345540523529, "learning_rate": 0.0001604551542987967, "loss": 1.0759, "step": 6605 }, { "epoch": 1.1761039886039886, "grad_norm": 0.733567476272583, "learning_rate": 0.00016044400379582364, "loss": 0.9877, "step": 6606 }, { "epoch": 1.1762820512820513, "grad_norm": 0.6538317203521729, "learning_rate": 0.0001604328521085759, "loss": 1.0094, "step": 6607 }, { "epoch": 1.1764601139601139, "grad_norm": 0.6279696822166443, "learning_rate": 0.00016042169923727195, "loss": 1.1049, "step": 6608 }, { "epoch": 1.1766381766381766, "grad_norm": 0.6949752569198608, "learning_rate": 0.00016041054518213033, "loss": 1.1418, "step": 6609 }, { "epoch": 1.1768162393162394, "grad_norm": 0.6144010424613953, "learning_rate": 0.00016039938994336957, "loss": 1.0306, "step": 6610 }, { "epoch": 1.176994301994302, "grad_norm": 0.5868683457374573, "learning_rate": 0.00016038823352120823, "loss": 0.9894, "step": 6611 }, { "epoch": 1.1771723646723646, "grad_norm": 0.7181115746498108, "learning_rate": 0.0001603770759158649, "loss": 1.1674, "step": 6612 }, { "epoch": 1.1773504273504274, "grad_norm": 0.6271308064460754, "learning_rate": 0.00016036591712755818, "loss": 0.9726, "step": 6613 }, { "epoch": 1.17752849002849, "grad_norm": 0.6922675371170044, "learning_rate": 0.00016035475715650668, "loss": 0.9142, "step": 6614 }, { "epoch": 1.1777065527065527, "grad_norm": 0.6838833689689636, "learning_rate": 0.00016034359600292913, "loss": 1.1627, "step": 6615 }, { "epoch": 1.1778846153846154, "grad_norm": 0.6628252267837524, "learning_rate": 0.00016033243366704418, "loss": 0.739, "step": 6616 }, { "epoch": 1.1780626780626782, "grad_norm": 0.6367576122283936, "learning_rate": 0.0001603212701490705, "loss": 0.9015, "step": 6617 }, { "epoch": 1.1782407407407407, "grad_norm": 0.6498967409133911, "learning_rate": 0.00016031010544922687, "loss": 0.9645, "step": 6618 }, { "epoch": 1.1784188034188035, "grad_norm": 0.468795508146286, "learning_rate": 0.00016029893956773198, "loss": 0.7305, "step": 6619 }, { "epoch": 1.178596866096866, "grad_norm": 0.6355500817298889, "learning_rate": 0.00016028777250480465, "loss": 0.9183, "step": 6620 }, { "epoch": 1.1787749287749287, "grad_norm": 0.7582615613937378, "learning_rate": 0.0001602766042606636, "loss": 1.1641, "step": 6621 }, { "epoch": 1.1789529914529915, "grad_norm": 0.580035924911499, "learning_rate": 0.00016026543483552776, "loss": 0.9164, "step": 6622 }, { "epoch": 1.1791310541310542, "grad_norm": 0.6198559999465942, "learning_rate": 0.00016025426422961592, "loss": 0.9803, "step": 6623 }, { "epoch": 1.1793091168091168, "grad_norm": 0.59112149477005, "learning_rate": 0.0001602430924431469, "loss": 0.8645, "step": 6624 }, { "epoch": 1.1794871794871795, "grad_norm": 0.6200533509254456, "learning_rate": 0.00016023191947633965, "loss": 1.068, "step": 6625 }, { "epoch": 1.179665242165242, "grad_norm": 0.6077516078948975, "learning_rate": 0.00016022074532941305, "loss": 1.0017, "step": 6626 }, { "epoch": 1.1798433048433048, "grad_norm": 0.6770145893096924, "learning_rate": 0.00016020957000258606, "loss": 0.9022, "step": 6627 }, { "epoch": 1.1800213675213675, "grad_norm": 0.6478054523468018, "learning_rate": 0.0001601983934960776, "loss": 0.8615, "step": 6628 }, { "epoch": 1.1801994301994303, "grad_norm": 0.6528988480567932, "learning_rate": 0.00016018721581010666, "loss": 1.0015, "step": 6629 }, { "epoch": 1.1803774928774928, "grad_norm": 0.6160712242126465, "learning_rate": 0.0001601760369448923, "loss": 0.9382, "step": 6630 }, { "epoch": 1.1805555555555556, "grad_norm": 0.5755789875984192, "learning_rate": 0.00016016485690065345, "loss": 1.0551, "step": 6631 }, { "epoch": 1.180733618233618, "grad_norm": 0.8495022654533386, "learning_rate": 0.00016015367567760925, "loss": 0.9295, "step": 6632 }, { "epoch": 1.1809116809116809, "grad_norm": 0.6010929346084595, "learning_rate": 0.0001601424932759787, "loss": 1.0413, "step": 6633 }, { "epoch": 1.1810897435897436, "grad_norm": 0.6953579187393188, "learning_rate": 0.00016013130969598093, "loss": 1.0149, "step": 6634 }, { "epoch": 1.1812678062678064, "grad_norm": 0.6949529647827148, "learning_rate": 0.0001601201249378351, "loss": 0.9992, "step": 6635 }, { "epoch": 1.181445868945869, "grad_norm": 0.6471893787384033, "learning_rate": 0.00016010893900176028, "loss": 0.7985, "step": 6636 }, { "epoch": 1.1816239316239316, "grad_norm": 0.6524858474731445, "learning_rate": 0.00016009775188797568, "loss": 0.9517, "step": 6637 }, { "epoch": 1.1818019943019944, "grad_norm": 0.639214038848877, "learning_rate": 0.00016008656359670046, "loss": 1.0357, "step": 6638 }, { "epoch": 1.181980056980057, "grad_norm": 0.6039628386497498, "learning_rate": 0.00016007537412815386, "loss": 1.0536, "step": 6639 }, { "epoch": 1.1821581196581197, "grad_norm": 0.653540313243866, "learning_rate": 0.00016006418348255507, "loss": 0.9414, "step": 6640 }, { "epoch": 1.1823361823361824, "grad_norm": 0.6331741809844971, "learning_rate": 0.0001600529916601234, "loss": 1.0352, "step": 6641 }, { "epoch": 1.182514245014245, "grad_norm": 0.7552719712257385, "learning_rate": 0.00016004179866107812, "loss": 1.1103, "step": 6642 }, { "epoch": 1.1826923076923077, "grad_norm": 0.6795875430107117, "learning_rate": 0.00016003060448563852, "loss": 1.1246, "step": 6643 }, { "epoch": 1.1828703703703705, "grad_norm": 0.6308842301368713, "learning_rate": 0.0001600194091340239, "loss": 0.9532, "step": 6644 }, { "epoch": 1.183048433048433, "grad_norm": 0.5640553832054138, "learning_rate": 0.00016000821260645366, "loss": 0.7491, "step": 6645 }, { "epoch": 1.1832264957264957, "grad_norm": 0.5611832141876221, "learning_rate": 0.00015999701490314712, "loss": 0.9239, "step": 6646 }, { "epoch": 1.1834045584045585, "grad_norm": 0.5881187915802002, "learning_rate": 0.00015998581602432374, "loss": 0.9246, "step": 6647 }, { "epoch": 1.183582621082621, "grad_norm": 0.7291010022163391, "learning_rate": 0.00015997461597020291, "loss": 1.0314, "step": 6648 }, { "epoch": 1.1837606837606838, "grad_norm": 0.6784794926643372, "learning_rate": 0.00015996341474100402, "loss": 1.0011, "step": 6649 }, { "epoch": 1.1839387464387465, "grad_norm": 0.7083746194839478, "learning_rate": 0.00015995221233694663, "loss": 1.0336, "step": 6650 }, { "epoch": 1.184116809116809, "grad_norm": 0.7081790566444397, "learning_rate": 0.00015994100875825015, "loss": 1.2386, "step": 6651 }, { "epoch": 1.1842948717948718, "grad_norm": 0.5938812494277954, "learning_rate": 0.00015992980400513415, "loss": 0.7549, "step": 6652 }, { "epoch": 1.1844729344729346, "grad_norm": 0.7084267139434814, "learning_rate": 0.00015991859807781811, "loss": 1.1194, "step": 6653 }, { "epoch": 1.184650997150997, "grad_norm": 0.6391362547874451, "learning_rate": 0.0001599073909765216, "loss": 1.0857, "step": 6654 }, { "epoch": 1.1848290598290598, "grad_norm": 0.8074106574058533, "learning_rate": 0.00015989618270146423, "loss": 1.1715, "step": 6655 }, { "epoch": 1.1850071225071226, "grad_norm": 0.5778565406799316, "learning_rate": 0.0001598849732528656, "loss": 0.8843, "step": 6656 }, { "epoch": 1.1851851851851851, "grad_norm": 0.6955079436302185, "learning_rate": 0.00015987376263094526, "loss": 1.0281, "step": 6657 }, { "epoch": 1.1853632478632479, "grad_norm": 0.6789296269416809, "learning_rate": 0.00015986255083592297, "loss": 0.9739, "step": 6658 }, { "epoch": 1.1855413105413106, "grad_norm": 0.6294292211532593, "learning_rate": 0.00015985133786801834, "loss": 1.0692, "step": 6659 }, { "epoch": 1.1857193732193732, "grad_norm": 0.5604581832885742, "learning_rate": 0.00015984012372745107, "loss": 0.9059, "step": 6660 }, { "epoch": 1.185897435897436, "grad_norm": 0.6727550625801086, "learning_rate": 0.00015982890841444088, "loss": 1.049, "step": 6661 }, { "epoch": 1.1860754985754987, "grad_norm": 0.620914101600647, "learning_rate": 0.0001598176919292075, "loss": 1.1021, "step": 6662 }, { "epoch": 1.1862535612535612, "grad_norm": 0.6696683168411255, "learning_rate": 0.00015980647427197076, "loss": 0.9053, "step": 6663 }, { "epoch": 1.186431623931624, "grad_norm": 0.6713385581970215, "learning_rate": 0.00015979525544295036, "loss": 0.9596, "step": 6664 }, { "epoch": 1.1866096866096867, "grad_norm": 0.7643477320671082, "learning_rate": 0.00015978403544236614, "loss": 0.882, "step": 6665 }, { "epoch": 1.1867877492877492, "grad_norm": 0.5890966057777405, "learning_rate": 0.00015977281427043794, "loss": 1.0215, "step": 6666 }, { "epoch": 1.186965811965812, "grad_norm": 0.7287502288818359, "learning_rate": 0.0001597615919273856, "loss": 1.0111, "step": 6667 }, { "epoch": 1.1871438746438747, "grad_norm": 0.5713803172111511, "learning_rate": 0.00015975036841342903, "loss": 1.0068, "step": 6668 }, { "epoch": 1.1873219373219372, "grad_norm": 0.5113094449043274, "learning_rate": 0.0001597391437287881, "loss": 0.9018, "step": 6669 }, { "epoch": 1.1875, "grad_norm": 0.585640013217926, "learning_rate": 0.00015972791787368276, "loss": 1.0375, "step": 6670 }, { "epoch": 1.1876780626780628, "grad_norm": 0.5778326392173767, "learning_rate": 0.00015971669084833293, "loss": 0.9975, "step": 6671 }, { "epoch": 1.1878561253561253, "grad_norm": 0.6707763075828552, "learning_rate": 0.0001597054626529586, "loss": 1.0048, "step": 6672 }, { "epoch": 1.188034188034188, "grad_norm": 0.6113292574882507, "learning_rate": 0.00015969423328777974, "loss": 1.1447, "step": 6673 }, { "epoch": 1.1882122507122508, "grad_norm": 0.6075651049613953, "learning_rate": 0.00015968300275301638, "loss": 0.9212, "step": 6674 }, { "epoch": 1.1883903133903133, "grad_norm": 0.6990494132041931, "learning_rate": 0.00015967177104888857, "loss": 0.9952, "step": 6675 }, { "epoch": 1.188568376068376, "grad_norm": 0.6228706240653992, "learning_rate": 0.00015966053817561638, "loss": 1.0187, "step": 6676 }, { "epoch": 1.1887464387464388, "grad_norm": 0.6387844085693359, "learning_rate": 0.00015964930413341985, "loss": 1.1614, "step": 6677 }, { "epoch": 1.1889245014245013, "grad_norm": 0.6501925587654114, "learning_rate": 0.00015963806892251915, "loss": 1.0366, "step": 6678 }, { "epoch": 1.189102564102564, "grad_norm": 0.6923739910125732, "learning_rate": 0.00015962683254313435, "loss": 1.1992, "step": 6679 }, { "epoch": 1.1892806267806268, "grad_norm": 0.6640275120735168, "learning_rate": 0.00015961559499548563, "loss": 0.8883, "step": 6680 }, { "epoch": 1.1894586894586894, "grad_norm": 0.6493857502937317, "learning_rate": 0.00015960435627979317, "loss": 1.1368, "step": 6681 }, { "epoch": 1.1896367521367521, "grad_norm": 0.6357189416885376, "learning_rate": 0.0001595931163962772, "loss": 1.0502, "step": 6682 }, { "epoch": 1.1898148148148149, "grad_norm": 0.5756343007087708, "learning_rate": 0.0001595818753451579, "loss": 0.9871, "step": 6683 }, { "epoch": 1.1899928774928774, "grad_norm": 0.7369210124015808, "learning_rate": 0.0001595706331266555, "loss": 1.3229, "step": 6684 }, { "epoch": 1.1901709401709402, "grad_norm": 0.7140820622444153, "learning_rate": 0.0001595593897409903, "loss": 1.1154, "step": 6685 }, { "epoch": 1.190349002849003, "grad_norm": 0.696973443031311, "learning_rate": 0.00015954814518838255, "loss": 0.9806, "step": 6686 }, { "epoch": 1.1905270655270654, "grad_norm": 0.5299260020256042, "learning_rate": 0.00015953689946905262, "loss": 0.771, "step": 6687 }, { "epoch": 1.1907051282051282, "grad_norm": 0.6814879775047302, "learning_rate": 0.00015952565258322085, "loss": 0.8444, "step": 6688 }, { "epoch": 1.190883190883191, "grad_norm": 0.6215870976448059, "learning_rate": 0.00015951440453110754, "loss": 1.0743, "step": 6689 }, { "epoch": 1.1910612535612535, "grad_norm": 0.7017203569412231, "learning_rate": 0.00015950315531293308, "loss": 1.185, "step": 6690 }, { "epoch": 1.1912393162393162, "grad_norm": 0.7147250175476074, "learning_rate": 0.00015949190492891795, "loss": 1.0646, "step": 6691 }, { "epoch": 1.191417378917379, "grad_norm": 0.5867117047309875, "learning_rate": 0.00015948065337928252, "loss": 1.0554, "step": 6692 }, { "epoch": 1.1915954415954415, "grad_norm": 0.6813527345657349, "learning_rate": 0.0001594694006642472, "loss": 1.1451, "step": 6693 }, { "epoch": 1.1917735042735043, "grad_norm": 0.5192593932151794, "learning_rate": 0.00015945814678403256, "loss": 0.7886, "step": 6694 }, { "epoch": 1.191951566951567, "grad_norm": 0.6537744402885437, "learning_rate": 0.00015944689173885904, "loss": 0.9905, "step": 6695 }, { "epoch": 1.1921296296296295, "grad_norm": 0.7350276112556458, "learning_rate": 0.00015943563552894716, "loss": 0.9009, "step": 6696 }, { "epoch": 1.1923076923076923, "grad_norm": 0.7086381316184998, "learning_rate": 0.00015942437815451746, "loss": 0.9117, "step": 6697 }, { "epoch": 1.192485754985755, "grad_norm": 0.6774969696998596, "learning_rate": 0.00015941311961579054, "loss": 1.1172, "step": 6698 }, { "epoch": 1.1926638176638176, "grad_norm": 0.7034362554550171, "learning_rate": 0.00015940185991298694, "loss": 0.8054, "step": 6699 }, { "epoch": 1.1928418803418803, "grad_norm": 0.66145920753479, "learning_rate": 0.00015939059904632728, "loss": 0.7417, "step": 6700 }, { "epoch": 1.193019943019943, "grad_norm": 0.6590890884399414, "learning_rate": 0.00015937933701603223, "loss": 0.9169, "step": 6701 }, { "epoch": 1.1931980056980056, "grad_norm": 0.7492850422859192, "learning_rate": 0.0001593680738223224, "loss": 1.0529, "step": 6702 }, { "epoch": 1.1933760683760684, "grad_norm": 0.7103236317634583, "learning_rate": 0.00015935680946541848, "loss": 1.1377, "step": 6703 }, { "epoch": 1.193554131054131, "grad_norm": 0.6164175868034363, "learning_rate": 0.00015934554394554122, "loss": 0.8636, "step": 6704 }, { "epoch": 1.1937321937321936, "grad_norm": 0.6667410135269165, "learning_rate": 0.0001593342772629113, "loss": 1.0073, "step": 6705 }, { "epoch": 1.1939102564102564, "grad_norm": 0.6785695552825928, "learning_rate": 0.00015932300941774944, "loss": 1.0752, "step": 6706 }, { "epoch": 1.1940883190883191, "grad_norm": 0.6446872353553772, "learning_rate": 0.0001593117404102765, "loss": 0.9509, "step": 6707 }, { "epoch": 1.194266381766382, "grad_norm": 0.6607686877250671, "learning_rate": 0.00015930047024071317, "loss": 1.0902, "step": 6708 }, { "epoch": 1.1944444444444444, "grad_norm": 0.664804995059967, "learning_rate": 0.0001592891989092803, "loss": 0.9783, "step": 6709 }, { "epoch": 1.1946225071225072, "grad_norm": 0.7147907018661499, "learning_rate": 0.00015927792641619876, "loss": 1.0558, "step": 6710 }, { "epoch": 1.1948005698005697, "grad_norm": 0.6858944296836853, "learning_rate": 0.0001592666527616894, "loss": 1.0514, "step": 6711 }, { "epoch": 1.1949786324786325, "grad_norm": 0.598463773727417, "learning_rate": 0.0001592553779459731, "loss": 0.8927, "step": 6712 }, { "epoch": 1.1951566951566952, "grad_norm": 0.6872668862342834, "learning_rate": 0.00015924410196927076, "loss": 1.016, "step": 6713 }, { "epoch": 1.195334757834758, "grad_norm": 0.6547996401786804, "learning_rate": 0.00015923282483180326, "loss": 1.1573, "step": 6714 }, { "epoch": 1.1955128205128205, "grad_norm": 0.6254705786705017, "learning_rate": 0.00015922154653379167, "loss": 1.0179, "step": 6715 }, { "epoch": 1.1956908831908832, "grad_norm": 0.6049207448959351, "learning_rate": 0.00015921026707545684, "loss": 1.0713, "step": 6716 }, { "epoch": 1.1958689458689458, "grad_norm": 0.6042858958244324, "learning_rate": 0.0001591989864570199, "loss": 0.919, "step": 6717 }, { "epoch": 1.1960470085470085, "grad_norm": 0.6521187424659729, "learning_rate": 0.0001591877046787017, "loss": 1.0112, "step": 6718 }, { "epoch": 1.1962250712250713, "grad_norm": 0.766260027885437, "learning_rate": 0.00015917642174072348, "loss": 0.9774, "step": 6719 }, { "epoch": 1.196403133903134, "grad_norm": 0.7066532373428345, "learning_rate": 0.00015916513764330613, "loss": 1.1112, "step": 6720 }, { "epoch": 1.1965811965811965, "grad_norm": 0.7351508140563965, "learning_rate": 0.00015915385238667083, "loss": 0.9841, "step": 6721 }, { "epoch": 1.1967592592592593, "grad_norm": 0.6133812069892883, "learning_rate": 0.0001591425659710387, "loss": 0.8629, "step": 6722 }, { "epoch": 1.1969373219373218, "grad_norm": 0.7244157791137695, "learning_rate": 0.00015913127839663083, "loss": 1.1584, "step": 6723 }, { "epoch": 1.1971153846153846, "grad_norm": 0.5986210107803345, "learning_rate": 0.00015911998966366842, "loss": 0.8507, "step": 6724 }, { "epoch": 1.1972934472934473, "grad_norm": 0.6087439060211182, "learning_rate": 0.00015910869977237257, "loss": 0.884, "step": 6725 }, { "epoch": 1.19747150997151, "grad_norm": 0.7546007633209229, "learning_rate": 0.00015909740872296457, "loss": 1.1449, "step": 6726 }, { "epoch": 1.1976495726495726, "grad_norm": 0.6437731385231018, "learning_rate": 0.0001590861165156656, "loss": 0.7845, "step": 6727 }, { "epoch": 1.1978276353276354, "grad_norm": 0.6281737089157104, "learning_rate": 0.00015907482315069693, "loss": 0.8969, "step": 6728 }, { "epoch": 1.198005698005698, "grad_norm": 0.6196113228797913, "learning_rate": 0.00015906352862827983, "loss": 1.0264, "step": 6729 }, { "epoch": 1.1981837606837606, "grad_norm": 0.5990965962409973, "learning_rate": 0.00015905223294863553, "loss": 1.0017, "step": 6730 }, { "epoch": 1.1983618233618234, "grad_norm": 0.6509191393852234, "learning_rate": 0.00015904093611198542, "loss": 1.1066, "step": 6731 }, { "epoch": 1.1985398860398861, "grad_norm": 0.6648043990135193, "learning_rate": 0.00015902963811855085, "loss": 1.077, "step": 6732 }, { "epoch": 1.1987179487179487, "grad_norm": 0.7071963548660278, "learning_rate": 0.00015901833896855307, "loss": 1.1346, "step": 6733 }, { "epoch": 1.1988960113960114, "grad_norm": 0.5889959335327148, "learning_rate": 0.0001590070386622136, "loss": 0.9525, "step": 6734 }, { "epoch": 1.199074074074074, "grad_norm": 0.6233037710189819, "learning_rate": 0.00015899573719975376, "loss": 1.0513, "step": 6735 }, { "epoch": 1.1992521367521367, "grad_norm": 0.7912302613258362, "learning_rate": 0.000158984434581395, "loss": 0.8749, "step": 6736 }, { "epoch": 1.1994301994301995, "grad_norm": 0.5783160924911499, "learning_rate": 0.0001589731308073588, "loss": 0.7173, "step": 6737 }, { "epoch": 1.1996082621082622, "grad_norm": 0.718950092792511, "learning_rate": 0.00015896182587786658, "loss": 1.0815, "step": 6738 }, { "epoch": 1.1997863247863247, "grad_norm": 0.6700926423072815, "learning_rate": 0.0001589505197931399, "loss": 1.0817, "step": 6739 }, { "epoch": 1.1999643874643875, "grad_norm": 0.7614455223083496, "learning_rate": 0.0001589392125534002, "loss": 0.9707, "step": 6740 }, { "epoch": 1.20014245014245, "grad_norm": 0.6998619437217712, "learning_rate": 0.00015892790415886906, "loss": 1.0541, "step": 6741 }, { "epoch": 1.2003205128205128, "grad_norm": 0.6127668619155884, "learning_rate": 0.0001589165946097681, "loss": 0.9147, "step": 6742 }, { "epoch": 1.2004985754985755, "grad_norm": 0.7112005352973938, "learning_rate": 0.00015890528390631885, "loss": 0.868, "step": 6743 }, { "epoch": 1.2006766381766383, "grad_norm": 0.6631024479866028, "learning_rate": 0.0001588939720487429, "loss": 0.9277, "step": 6744 }, { "epoch": 1.2008547008547008, "grad_norm": 0.6106321215629578, "learning_rate": 0.00015888265903726188, "loss": 1.0223, "step": 6745 }, { "epoch": 1.2010327635327636, "grad_norm": 0.6400851607322693, "learning_rate": 0.00015887134487209753, "loss": 1.1279, "step": 6746 }, { "epoch": 1.201210826210826, "grad_norm": 0.6298650503158569, "learning_rate": 0.00015886002955347147, "loss": 0.9481, "step": 6747 }, { "epoch": 1.2013888888888888, "grad_norm": 0.647974967956543, "learning_rate": 0.00015884871308160538, "loss": 1.1513, "step": 6748 }, { "epoch": 1.2015669515669516, "grad_norm": 0.6770651936531067, "learning_rate": 0.000158837395456721, "loss": 0.9914, "step": 6749 }, { "epoch": 1.2017450142450143, "grad_norm": 0.6708947420120239, "learning_rate": 0.0001588260766790401, "loss": 1.1848, "step": 6750 }, { "epoch": 1.2019230769230769, "grad_norm": 0.5624440908432007, "learning_rate": 0.00015881475674878442, "loss": 0.9848, "step": 6751 }, { "epoch": 1.2021011396011396, "grad_norm": 0.5512633919715881, "learning_rate": 0.00015880343566617575, "loss": 1.0308, "step": 6752 }, { "epoch": 1.2022792022792024, "grad_norm": 0.5621042251586914, "learning_rate": 0.0001587921134314359, "loss": 0.8724, "step": 6753 }, { "epoch": 1.202457264957265, "grad_norm": 0.6881251931190491, "learning_rate": 0.00015878079004478675, "loss": 0.9771, "step": 6754 }, { "epoch": 1.2026353276353277, "grad_norm": 0.729998767375946, "learning_rate": 0.0001587694655064501, "loss": 1.002, "step": 6755 }, { "epoch": 1.2028133903133904, "grad_norm": 0.5972567200660706, "learning_rate": 0.00015875813981664787, "loss": 1.0571, "step": 6756 }, { "epoch": 1.202991452991453, "grad_norm": 0.6319229006767273, "learning_rate": 0.00015874681297560196, "loss": 0.9294, "step": 6757 }, { "epoch": 1.2031695156695157, "grad_norm": 0.6751521825790405, "learning_rate": 0.00015873548498353428, "loss": 0.783, "step": 6758 }, { "epoch": 1.2033475783475784, "grad_norm": 0.6476554870605469, "learning_rate": 0.00015872415584066677, "loss": 0.8939, "step": 6759 }, { "epoch": 1.203525641025641, "grad_norm": 0.6530960202217102, "learning_rate": 0.0001587128255472214, "loss": 0.9828, "step": 6760 }, { "epoch": 1.2037037037037037, "grad_norm": 0.6708502173423767, "learning_rate": 0.00015870149410342023, "loss": 0.9285, "step": 6761 }, { "epoch": 1.2038817663817665, "grad_norm": 0.7749543190002441, "learning_rate": 0.0001586901615094852, "loss": 1.1295, "step": 6762 }, { "epoch": 1.204059829059829, "grad_norm": 0.6750495433807373, "learning_rate": 0.00015867882776563836, "loss": 1.0562, "step": 6763 }, { "epoch": 1.2042378917378918, "grad_norm": 0.6892416477203369, "learning_rate": 0.00015866749287210178, "loss": 0.7207, "step": 6764 }, { "epoch": 1.2044159544159545, "grad_norm": 0.7066485285758972, "learning_rate": 0.00015865615682909758, "loss": 1.0489, "step": 6765 }, { "epoch": 1.204594017094017, "grad_norm": 0.5669938325881958, "learning_rate": 0.00015864481963684783, "loss": 0.8149, "step": 6766 }, { "epoch": 1.2047720797720798, "grad_norm": 0.6467341780662537, "learning_rate": 0.0001586334812955746, "loss": 0.9595, "step": 6767 }, { "epoch": 1.2049501424501425, "grad_norm": 0.6026045680046082, "learning_rate": 0.0001586221418055002, "loss": 0.9832, "step": 6768 }, { "epoch": 1.205128205128205, "grad_norm": 0.7655174732208252, "learning_rate": 0.00015861080116684665, "loss": 0.9796, "step": 6769 }, { "epoch": 1.2053062678062678, "grad_norm": 0.6386621594429016, "learning_rate": 0.00015859945937983624, "loss": 0.9368, "step": 6770 }, { "epoch": 1.2054843304843306, "grad_norm": 0.7088032364845276, "learning_rate": 0.0001585881164446911, "loss": 1.0167, "step": 6771 }, { "epoch": 1.205662393162393, "grad_norm": 0.6015275716781616, "learning_rate": 0.0001585767723616336, "loss": 0.8551, "step": 6772 }, { "epoch": 1.2058404558404558, "grad_norm": 0.7013260722160339, "learning_rate": 0.00015856542713088583, "loss": 0.8009, "step": 6773 }, { "epoch": 1.2060185185185186, "grad_norm": 0.6931240558624268, "learning_rate": 0.00015855408075267024, "loss": 0.9964, "step": 6774 }, { "epoch": 1.2061965811965811, "grad_norm": 0.7274388670921326, "learning_rate": 0.00015854273322720908, "loss": 1.0991, "step": 6775 }, { "epoch": 1.2063746438746439, "grad_norm": 0.6353716254234314, "learning_rate": 0.00015853138455472466, "loss": 1.0893, "step": 6776 }, { "epoch": 1.2065527065527066, "grad_norm": 0.6958979368209839, "learning_rate": 0.00015852003473543932, "loss": 1.0238, "step": 6777 }, { "epoch": 1.2067307692307692, "grad_norm": 0.626838743686676, "learning_rate": 0.00015850868376957551, "loss": 0.9384, "step": 6778 }, { "epoch": 1.206908831908832, "grad_norm": 0.5455024242401123, "learning_rate": 0.00015849733165735556, "loss": 0.8068, "step": 6779 }, { "epoch": 1.2070868945868947, "grad_norm": 0.6337353587150574, "learning_rate": 0.0001584859783990019, "loss": 1.1341, "step": 6780 }, { "epoch": 1.2072649572649572, "grad_norm": 0.6318019032478333, "learning_rate": 0.000158474623994737, "loss": 1.1095, "step": 6781 }, { "epoch": 1.20744301994302, "grad_norm": 0.8183810710906982, "learning_rate": 0.00015846326844478332, "loss": 1.1471, "step": 6782 }, { "epoch": 1.2076210826210827, "grad_norm": 0.6140483021736145, "learning_rate": 0.00015845191174936334, "loss": 0.8538, "step": 6783 }, { "epoch": 1.2077991452991452, "grad_norm": 0.7570197582244873, "learning_rate": 0.0001584405539086996, "loss": 1.427, "step": 6784 }, { "epoch": 1.207977207977208, "grad_norm": 0.7616991996765137, "learning_rate": 0.00015842919492301455, "loss": 1.2214, "step": 6785 }, { "epoch": 1.2081552706552707, "grad_norm": 0.561996579170227, "learning_rate": 0.00015841783479253084, "loss": 0.8916, "step": 6786 }, { "epoch": 1.2083333333333333, "grad_norm": 0.6124222874641418, "learning_rate": 0.000158406473517471, "loss": 0.9637, "step": 6787 }, { "epoch": 1.208511396011396, "grad_norm": 0.6053098440170288, "learning_rate": 0.00015839511109805762, "loss": 1.0365, "step": 6788 }, { "epoch": 1.2086894586894588, "grad_norm": 0.6451675295829773, "learning_rate": 0.00015838374753451338, "loss": 1.0497, "step": 6789 }, { "epoch": 1.2088675213675213, "grad_norm": 0.6789399981498718, "learning_rate": 0.00015837238282706087, "loss": 0.9286, "step": 6790 }, { "epoch": 1.209045584045584, "grad_norm": 0.5742998123168945, "learning_rate": 0.0001583610169759228, "loss": 1.082, "step": 6791 }, { "epoch": 1.2092236467236468, "grad_norm": 0.6813693642616272, "learning_rate": 0.0001583496499813218, "loss": 0.9785, "step": 6792 }, { "epoch": 1.2094017094017093, "grad_norm": 0.6150603890419006, "learning_rate": 0.0001583382818434806, "loss": 0.9533, "step": 6793 }, { "epoch": 1.209579772079772, "grad_norm": 0.6905919909477234, "learning_rate": 0.000158326912562622, "loss": 1.0132, "step": 6794 }, { "epoch": 1.2097578347578348, "grad_norm": 0.5861411094665527, "learning_rate": 0.0001583155421389687, "loss": 0.7071, "step": 6795 }, { "epoch": 1.2099358974358974, "grad_norm": 0.6822740435600281, "learning_rate": 0.0001583041705727435, "loss": 1.1366, "step": 6796 }, { "epoch": 1.21011396011396, "grad_norm": 0.6013675928115845, "learning_rate": 0.00015829279786416916, "loss": 0.9232, "step": 6797 }, { "epoch": 1.2102920227920229, "grad_norm": 0.650675356388092, "learning_rate": 0.00015828142401346857, "loss": 0.887, "step": 6798 }, { "epoch": 1.2104700854700854, "grad_norm": 0.6764078736305237, "learning_rate": 0.00015827004902086456, "loss": 0.8423, "step": 6799 }, { "epoch": 1.2106481481481481, "grad_norm": 0.6460821628570557, "learning_rate": 0.00015825867288657994, "loss": 1.0074, "step": 6800 }, { "epoch": 1.210826210826211, "grad_norm": 0.692562997341156, "learning_rate": 0.00015824729561083768, "loss": 0.7978, "step": 6801 }, { "epoch": 1.2110042735042734, "grad_norm": 0.7255034446716309, "learning_rate": 0.00015823591719386066, "loss": 1.071, "step": 6802 }, { "epoch": 1.2111823361823362, "grad_norm": 0.6598904728889465, "learning_rate": 0.0001582245376358718, "loss": 0.9736, "step": 6803 }, { "epoch": 1.211360398860399, "grad_norm": 0.6372483968734741, "learning_rate": 0.0001582131569370941, "loss": 0.9029, "step": 6804 }, { "epoch": 1.2115384615384615, "grad_norm": 0.5907173156738281, "learning_rate": 0.00015820177509775048, "loss": 0.918, "step": 6805 }, { "epoch": 1.2117165242165242, "grad_norm": 0.6252630949020386, "learning_rate": 0.00015819039211806404, "loss": 0.7801, "step": 6806 }, { "epoch": 1.211894586894587, "grad_norm": 0.5793096423149109, "learning_rate": 0.0001581790079982577, "loss": 0.5769, "step": 6807 }, { "epoch": 1.2120726495726495, "grad_norm": 0.7267270684242249, "learning_rate": 0.00015816762273855454, "loss": 1.1428, "step": 6808 }, { "epoch": 1.2122507122507122, "grad_norm": 0.7481234073638916, "learning_rate": 0.00015815623633917767, "loss": 1.0209, "step": 6809 }, { "epoch": 1.212428774928775, "grad_norm": 0.6114386916160583, "learning_rate": 0.00015814484880035017, "loss": 0.9073, "step": 6810 }, { "epoch": 1.2126068376068375, "grad_norm": 0.6871182322502136, "learning_rate": 0.00015813346012229516, "loss": 1.151, "step": 6811 }, { "epoch": 1.2127849002849003, "grad_norm": 0.6380293965339661, "learning_rate": 0.0001581220703052357, "loss": 1.0981, "step": 6812 }, { "epoch": 1.212962962962963, "grad_norm": 0.6013718247413635, "learning_rate": 0.00015811067934939503, "loss": 0.8832, "step": 6813 }, { "epoch": 1.2131410256410255, "grad_norm": 0.5816897749900818, "learning_rate": 0.00015809928725499632, "loss": 1.063, "step": 6814 }, { "epoch": 1.2133190883190883, "grad_norm": 0.5970914363861084, "learning_rate": 0.00015808789402226278, "loss": 1.1177, "step": 6815 }, { "epoch": 1.213497150997151, "grad_norm": 0.7624936103820801, "learning_rate": 0.00015807649965141762, "loss": 1.048, "step": 6816 }, { "epoch": 1.2136752136752136, "grad_norm": 0.636263906955719, "learning_rate": 0.0001580651041426841, "loss": 0.9743, "step": 6817 }, { "epoch": 1.2138532763532763, "grad_norm": 0.641090452671051, "learning_rate": 0.00015805370749628547, "loss": 1.0227, "step": 6818 }, { "epoch": 1.214031339031339, "grad_norm": 0.6484021544456482, "learning_rate": 0.00015804230971244504, "loss": 0.9615, "step": 6819 }, { "epoch": 1.2142094017094016, "grad_norm": 0.6473353505134583, "learning_rate": 0.00015803091079138613, "loss": 1.0507, "step": 6820 }, { "epoch": 1.2143874643874644, "grad_norm": 0.5477129220962524, "learning_rate": 0.00015801951073333206, "loss": 0.7928, "step": 6821 }, { "epoch": 1.2145655270655271, "grad_norm": 0.7256210446357727, "learning_rate": 0.0001580081095385062, "loss": 1.0172, "step": 6822 }, { "epoch": 1.2147435897435896, "grad_norm": 0.5785418748855591, "learning_rate": 0.00015799670720713195, "loss": 0.8478, "step": 6823 }, { "epoch": 1.2149216524216524, "grad_norm": 0.6782996654510498, "learning_rate": 0.00015798530373943267, "loss": 1.1819, "step": 6824 }, { "epoch": 1.2150997150997151, "grad_norm": 0.6513699293136597, "learning_rate": 0.00015797389913563186, "loss": 0.9626, "step": 6825 }, { "epoch": 1.2152777777777777, "grad_norm": 0.6503037214279175, "learning_rate": 0.0001579624933959529, "loss": 1.0282, "step": 6826 }, { "epoch": 1.2154558404558404, "grad_norm": 0.581501841545105, "learning_rate": 0.0001579510865206193, "loss": 0.8976, "step": 6827 }, { "epoch": 1.2156339031339032, "grad_norm": 0.6696721911430359, "learning_rate": 0.00015793967850985454, "loss": 0.6418, "step": 6828 }, { "epoch": 1.215811965811966, "grad_norm": 0.6577274203300476, "learning_rate": 0.00015792826936388213, "loss": 1.0615, "step": 6829 }, { "epoch": 1.2159900284900285, "grad_norm": 0.66291743516922, "learning_rate": 0.00015791685908292564, "loss": 0.8582, "step": 6830 }, { "epoch": 1.2161680911680912, "grad_norm": 0.6548362374305725, "learning_rate": 0.0001579054476672086, "loss": 1.0343, "step": 6831 }, { "epoch": 1.2163461538461537, "grad_norm": 0.6381218433380127, "learning_rate": 0.00015789403511695457, "loss": 0.8133, "step": 6832 }, { "epoch": 1.2165242165242165, "grad_norm": 0.7217492461204529, "learning_rate": 0.00015788262143238722, "loss": 0.9183, "step": 6833 }, { "epoch": 1.2167022792022792, "grad_norm": 0.610454797744751, "learning_rate": 0.00015787120661373013, "loss": 0.8488, "step": 6834 }, { "epoch": 1.216880341880342, "grad_norm": 0.592771053314209, "learning_rate": 0.00015785979066120696, "loss": 0.8673, "step": 6835 }, { "epoch": 1.2170584045584045, "grad_norm": 0.5787834525108337, "learning_rate": 0.00015784837357504138, "loss": 0.7945, "step": 6836 }, { "epoch": 1.2172364672364673, "grad_norm": 0.6814196109771729, "learning_rate": 0.0001578369553554571, "loss": 0.8906, "step": 6837 }, { "epoch": 1.2174145299145298, "grad_norm": 0.6383981108665466, "learning_rate": 0.00015782553600267787, "loss": 0.8962, "step": 6838 }, { "epoch": 1.2175925925925926, "grad_norm": 0.6733864545822144, "learning_rate": 0.0001578141155169273, "loss": 1.2077, "step": 6839 }, { "epoch": 1.2177706552706553, "grad_norm": 0.5891284346580505, "learning_rate": 0.0001578026938984293, "loss": 0.9477, "step": 6840 }, { "epoch": 1.217948717948718, "grad_norm": 0.7220266461372375, "learning_rate": 0.00015779127114740757, "loss": 1.0343, "step": 6841 }, { "epoch": 1.2181267806267806, "grad_norm": 0.6566546559333801, "learning_rate": 0.0001577798472640859, "loss": 0.9576, "step": 6842 }, { "epoch": 1.2183048433048433, "grad_norm": 0.6428449153900146, "learning_rate": 0.0001577684222486882, "loss": 0.8957, "step": 6843 }, { "epoch": 1.2184829059829059, "grad_norm": 0.6542909741401672, "learning_rate": 0.00015775699610143823, "loss": 0.9942, "step": 6844 }, { "epoch": 1.2186609686609686, "grad_norm": 0.7101675868034363, "learning_rate": 0.00015774556882255992, "loss": 1.015, "step": 6845 }, { "epoch": 1.2188390313390314, "grad_norm": 0.6606267094612122, "learning_rate": 0.00015773414041227713, "loss": 1.1406, "step": 6846 }, { "epoch": 1.2190170940170941, "grad_norm": 0.67124342918396, "learning_rate": 0.00015772271087081383, "loss": 1.2392, "step": 6847 }, { "epoch": 1.2191951566951567, "grad_norm": 0.6615056991577148, "learning_rate": 0.0001577112801983939, "loss": 1.1583, "step": 6848 }, { "epoch": 1.2193732193732194, "grad_norm": 0.6941317319869995, "learning_rate": 0.0001576998483952413, "loss": 1.0255, "step": 6849 }, { "epoch": 1.219551282051282, "grad_norm": 0.5740683674812317, "learning_rate": 0.00015768841546158005, "loss": 1.0393, "step": 6850 }, { "epoch": 1.2197293447293447, "grad_norm": 0.7143667340278625, "learning_rate": 0.00015767698139763415, "loss": 0.7564, "step": 6851 }, { "epoch": 1.2199074074074074, "grad_norm": 0.6730484366416931, "learning_rate": 0.00015766554620362758, "loss": 1.2221, "step": 6852 }, { "epoch": 1.2200854700854702, "grad_norm": 0.6883087754249573, "learning_rate": 0.00015765410987978444, "loss": 1.0156, "step": 6853 }, { "epoch": 1.2202635327635327, "grad_norm": 0.6585961580276489, "learning_rate": 0.00015764267242632875, "loss": 1.0888, "step": 6854 }, { "epoch": 1.2204415954415955, "grad_norm": 0.6325246691703796, "learning_rate": 0.00015763123384348465, "loss": 0.973, "step": 6855 }, { "epoch": 1.220619658119658, "grad_norm": 0.5930588245391846, "learning_rate": 0.00015761979413147627, "loss": 0.8551, "step": 6856 }, { "epoch": 1.2207977207977208, "grad_norm": 0.6440611481666565, "learning_rate": 0.0001576083532905277, "loss": 0.8396, "step": 6857 }, { "epoch": 1.2209757834757835, "grad_norm": 0.6796659231185913, "learning_rate": 0.00015759691132086315, "loss": 1.0662, "step": 6858 }, { "epoch": 1.2211538461538463, "grad_norm": 0.6813400983810425, "learning_rate": 0.00015758546822270674, "loss": 1.0457, "step": 6859 }, { "epoch": 1.2213319088319088, "grad_norm": 0.6871716976165771, "learning_rate": 0.00015757402399628272, "loss": 1.1675, "step": 6860 }, { "epoch": 1.2215099715099715, "grad_norm": 0.6431481838226318, "learning_rate": 0.00015756257864181524, "loss": 0.9366, "step": 6861 }, { "epoch": 1.221688034188034, "grad_norm": 0.6061800718307495, "learning_rate": 0.00015755113215952868, "loss": 0.9267, "step": 6862 }, { "epoch": 1.2218660968660968, "grad_norm": 0.5755770206451416, "learning_rate": 0.00015753968454964722, "loss": 0.7342, "step": 6863 }, { "epoch": 1.2220441595441596, "grad_norm": 0.571345329284668, "learning_rate": 0.00015752823581239515, "loss": 0.8943, "step": 6864 }, { "epoch": 1.2222222222222223, "grad_norm": 0.6925615668296814, "learning_rate": 0.0001575167859479968, "loss": 0.8801, "step": 6865 }, { "epoch": 1.2224002849002849, "grad_norm": 0.6812975406646729, "learning_rate": 0.00015750533495667655, "loss": 0.9567, "step": 6866 }, { "epoch": 1.2225783475783476, "grad_norm": 0.8216777443885803, "learning_rate": 0.00015749388283865868, "loss": 1.0908, "step": 6867 }, { "epoch": 1.2227564102564104, "grad_norm": 0.6051010489463806, "learning_rate": 0.00015748242959416763, "loss": 0.8851, "step": 6868 }, { "epoch": 1.2229344729344729, "grad_norm": 0.7750816345214844, "learning_rate": 0.00015747097522342775, "loss": 1.1526, "step": 6869 }, { "epoch": 1.2231125356125356, "grad_norm": 0.6240930557250977, "learning_rate": 0.00015745951972666355, "loss": 1.0603, "step": 6870 }, { "epoch": 1.2232905982905984, "grad_norm": 0.7228875160217285, "learning_rate": 0.00015744806310409937, "loss": 1.1028, "step": 6871 }, { "epoch": 1.223468660968661, "grad_norm": 0.724075436592102, "learning_rate": 0.00015743660535595978, "loss": 0.8983, "step": 6872 }, { "epoch": 1.2236467236467237, "grad_norm": 0.6398203372955322, "learning_rate": 0.00015742514648246916, "loss": 1.0548, "step": 6873 }, { "epoch": 1.2238247863247864, "grad_norm": 0.7024285793304443, "learning_rate": 0.00015741368648385212, "loss": 1.0172, "step": 6874 }, { "epoch": 1.224002849002849, "grad_norm": 0.6717609763145447, "learning_rate": 0.00015740222536033316, "loss": 0.9002, "step": 6875 }, { "epoch": 1.2241809116809117, "grad_norm": 0.5886133313179016, "learning_rate": 0.00015739076311213686, "loss": 0.8614, "step": 6876 }, { "epoch": 1.2243589743589745, "grad_norm": 0.6856684684753418, "learning_rate": 0.00015737929973948776, "loss": 1.1633, "step": 6877 }, { "epoch": 1.224537037037037, "grad_norm": 0.6771421432495117, "learning_rate": 0.00015736783524261045, "loss": 1.0921, "step": 6878 }, { "epoch": 1.2247150997150997, "grad_norm": 0.5016412138938904, "learning_rate": 0.0001573563696217296, "loss": 0.6732, "step": 6879 }, { "epoch": 1.2248931623931625, "grad_norm": 0.7595276236534119, "learning_rate": 0.00015734490287706984, "loss": 1.0427, "step": 6880 }, { "epoch": 1.225071225071225, "grad_norm": 0.6664281487464905, "learning_rate": 0.00015733343500885582, "loss": 1.2836, "step": 6881 }, { "epoch": 1.2252492877492878, "grad_norm": 0.6662577390670776, "learning_rate": 0.00015732196601731224, "loss": 1.1288, "step": 6882 }, { "epoch": 1.2254273504273505, "grad_norm": 0.6238988041877747, "learning_rate": 0.00015731049590266385, "loss": 1.0809, "step": 6883 }, { "epoch": 1.225605413105413, "grad_norm": 0.6483062505722046, "learning_rate": 0.00015729902466513532, "loss": 0.9992, "step": 6884 }, { "epoch": 1.2257834757834758, "grad_norm": 0.6890861988067627, "learning_rate": 0.0001572875523049514, "loss": 1.1844, "step": 6885 }, { "epoch": 1.2259615384615385, "grad_norm": 0.7087607383728027, "learning_rate": 0.00015727607882233695, "loss": 1.013, "step": 6886 }, { "epoch": 1.226139601139601, "grad_norm": 0.709048867225647, "learning_rate": 0.00015726460421751668, "loss": 0.9748, "step": 6887 }, { "epoch": 1.2263176638176638, "grad_norm": 0.5918150544166565, "learning_rate": 0.00015725312849071546, "loss": 0.9978, "step": 6888 }, { "epoch": 1.2264957264957266, "grad_norm": 0.4343377947807312, "learning_rate": 0.0001572416516421581, "loss": 0.6233, "step": 6889 }, { "epoch": 1.226673789173789, "grad_norm": 0.6360403895378113, "learning_rate": 0.00015723017367206952, "loss": 0.9698, "step": 6890 }, { "epoch": 1.2268518518518519, "grad_norm": 0.7261984944343567, "learning_rate": 0.00015721869458067454, "loss": 1.0426, "step": 6891 }, { "epoch": 1.2270299145299146, "grad_norm": 0.6806774139404297, "learning_rate": 0.0001572072143681981, "loss": 0.9692, "step": 6892 }, { "epoch": 1.2272079772079771, "grad_norm": 0.7140612006187439, "learning_rate": 0.00015719573303486515, "loss": 1.0828, "step": 6893 }, { "epoch": 1.22738603988604, "grad_norm": 0.5383326411247253, "learning_rate": 0.0001571842505809006, "loss": 1.012, "step": 6894 }, { "epoch": 1.2275641025641026, "grad_norm": 0.5992259383201599, "learning_rate": 0.0001571727670065295, "loss": 0.876, "step": 6895 }, { "epoch": 1.2277421652421652, "grad_norm": 0.636696457862854, "learning_rate": 0.00015716128231197676, "loss": 1.1001, "step": 6896 }, { "epoch": 1.227920227920228, "grad_norm": 0.5980371236801147, "learning_rate": 0.00015714979649746744, "loss": 0.937, "step": 6897 }, { "epoch": 1.2280982905982907, "grad_norm": 0.7678794860839844, "learning_rate": 0.00015713830956322656, "loss": 1.1965, "step": 6898 }, { "epoch": 1.2282763532763532, "grad_norm": 0.6918835639953613, "learning_rate": 0.00015712682150947923, "loss": 0.8578, "step": 6899 }, { "epoch": 1.228454415954416, "grad_norm": 0.6463451385498047, "learning_rate": 0.00015711533233645048, "loss": 1.009, "step": 6900 }, { "epoch": 1.2286324786324787, "grad_norm": 0.6720646023750305, "learning_rate": 0.00015710384204436549, "loss": 1.0031, "step": 6901 }, { "epoch": 1.2288105413105412, "grad_norm": 0.6618736982345581, "learning_rate": 0.00015709235063344926, "loss": 0.9017, "step": 6902 }, { "epoch": 1.228988603988604, "grad_norm": 0.6789427399635315, "learning_rate": 0.0001570808581039271, "loss": 1.1289, "step": 6903 }, { "epoch": 1.2291666666666667, "grad_norm": 0.6395950317382812, "learning_rate": 0.00015706936445602403, "loss": 1.1051, "step": 6904 }, { "epoch": 1.2293447293447293, "grad_norm": 0.7023917436599731, "learning_rate": 0.00015705786968996533, "loss": 1.2876, "step": 6905 }, { "epoch": 1.229522792022792, "grad_norm": 0.7473352551460266, "learning_rate": 0.00015704637380597623, "loss": 1.237, "step": 6906 }, { "epoch": 1.2297008547008548, "grad_norm": 0.6952672004699707, "learning_rate": 0.00015703487680428192, "loss": 1.0674, "step": 6907 }, { "epoch": 1.2298789173789173, "grad_norm": 0.5968644022941589, "learning_rate": 0.0001570233786851077, "loss": 0.9169, "step": 6908 }, { "epoch": 1.23005698005698, "grad_norm": 0.7219798564910889, "learning_rate": 0.0001570118794486788, "loss": 1.0556, "step": 6909 }, { "epoch": 1.2302350427350428, "grad_norm": 0.6603400707244873, "learning_rate": 0.0001570003790952206, "loss": 0.9596, "step": 6910 }, { "epoch": 1.2304131054131053, "grad_norm": 0.5972838401794434, "learning_rate": 0.0001569888776249583, "loss": 0.9168, "step": 6911 }, { "epoch": 1.230591168091168, "grad_norm": 0.792585551738739, "learning_rate": 0.00015697737503811738, "loss": 1.1074, "step": 6912 }, { "epoch": 1.2307692307692308, "grad_norm": 0.5845609903335571, "learning_rate": 0.00015696587133492314, "loss": 0.8413, "step": 6913 }, { "epoch": 1.2309472934472934, "grad_norm": 0.6603896021842957, "learning_rate": 0.000156954366515601, "loss": 0.9109, "step": 6914 }, { "epoch": 1.2311253561253561, "grad_norm": 0.6367142796516418, "learning_rate": 0.00015694286058037636, "loss": 1.0119, "step": 6915 }, { "epoch": 1.2313034188034189, "grad_norm": 0.693854570388794, "learning_rate": 0.00015693135352947465, "loss": 1.0925, "step": 6916 }, { "epoch": 1.2314814814814814, "grad_norm": 0.6570404171943665, "learning_rate": 0.00015691984536312135, "loss": 0.9731, "step": 6917 }, { "epoch": 1.2316595441595442, "grad_norm": 0.6778639554977417, "learning_rate": 0.0001569083360815419, "loss": 1.1415, "step": 6918 }, { "epoch": 1.231837606837607, "grad_norm": 0.6656233668327332, "learning_rate": 0.00015689682568496182, "loss": 0.8603, "step": 6919 }, { "epoch": 1.2320156695156694, "grad_norm": 0.6569861173629761, "learning_rate": 0.00015688531417360665, "loss": 0.8374, "step": 6920 }, { "epoch": 1.2321937321937322, "grad_norm": 0.6746888160705566, "learning_rate": 0.0001568738015477019, "loss": 1.1395, "step": 6921 }, { "epoch": 1.232371794871795, "grad_norm": 0.6180813908576965, "learning_rate": 0.00015686228780747316, "loss": 1.0049, "step": 6922 }, { "epoch": 1.2325498575498575, "grad_norm": 0.7326146960258484, "learning_rate": 0.000156850772953146, "loss": 1.2389, "step": 6923 }, { "epoch": 1.2327279202279202, "grad_norm": 0.5912215709686279, "learning_rate": 0.00015683925698494608, "loss": 1.0174, "step": 6924 }, { "epoch": 1.232905982905983, "grad_norm": 0.5214745402336121, "learning_rate": 0.00015682773990309895, "loss": 0.5778, "step": 6925 }, { "epoch": 1.2330840455840455, "grad_norm": 0.6862079501152039, "learning_rate": 0.00015681622170783034, "loss": 0.896, "step": 6926 }, { "epoch": 1.2332621082621082, "grad_norm": 0.7858926057815552, "learning_rate": 0.00015680470239936586, "loss": 1.0714, "step": 6927 }, { "epoch": 1.233440170940171, "grad_norm": 0.6706146597862244, "learning_rate": 0.00015679318197793127, "loss": 1.0157, "step": 6928 }, { "epoch": 1.2336182336182335, "grad_norm": 0.6657105088233948, "learning_rate": 0.00015678166044375225, "loss": 0.9674, "step": 6929 }, { "epoch": 1.2337962962962963, "grad_norm": 0.6790838837623596, "learning_rate": 0.0001567701377970545, "loss": 0.9744, "step": 6930 }, { "epoch": 1.233974358974359, "grad_norm": 0.6469771862030029, "learning_rate": 0.00015675861403806386, "loss": 1.0205, "step": 6931 }, { "epoch": 1.2341524216524216, "grad_norm": 0.4926300346851349, "learning_rate": 0.0001567470891670061, "loss": 0.6336, "step": 6932 }, { "epoch": 1.2343304843304843, "grad_norm": 0.6762157082557678, "learning_rate": 0.000156735563184107, "loss": 1.059, "step": 6933 }, { "epoch": 1.234508547008547, "grad_norm": 0.6998521685600281, "learning_rate": 0.0001567240360895924, "loss": 1.0586, "step": 6934 }, { "epoch": 1.2346866096866096, "grad_norm": 0.5947706699371338, "learning_rate": 0.00015671250788368814, "loss": 0.8815, "step": 6935 }, { "epoch": 1.2348646723646723, "grad_norm": 0.6966122984886169, "learning_rate": 0.0001567009785666201, "loss": 1.0105, "step": 6936 }, { "epoch": 1.235042735042735, "grad_norm": 0.6747866272926331, "learning_rate": 0.0001566894481386142, "loss": 0.8783, "step": 6937 }, { "epoch": 1.2352207977207976, "grad_norm": 0.6348921060562134, "learning_rate": 0.0001566779165998963, "loss": 0.7813, "step": 6938 }, { "epoch": 1.2353988603988604, "grad_norm": 0.596466600894928, "learning_rate": 0.00015666638395069236, "loss": 0.8689, "step": 6939 }, { "epoch": 1.2355769230769231, "grad_norm": 0.6926795244216919, "learning_rate": 0.00015665485019122834, "loss": 1.0266, "step": 6940 }, { "epoch": 1.2357549857549857, "grad_norm": 0.6590100526809692, "learning_rate": 0.00015664331532173022, "loss": 1.128, "step": 6941 }, { "epoch": 1.2359330484330484, "grad_norm": 0.7422109246253967, "learning_rate": 0.00015663177934242402, "loss": 0.8495, "step": 6942 }, { "epoch": 1.2361111111111112, "grad_norm": 0.6463228464126587, "learning_rate": 0.0001566202422535357, "loss": 1.0941, "step": 6943 }, { "epoch": 1.236289173789174, "grad_norm": 0.7278686761856079, "learning_rate": 0.0001566087040552914, "loss": 1.2039, "step": 6944 }, { "epoch": 1.2364672364672364, "grad_norm": 0.6917086839675903, "learning_rate": 0.00015659716474791712, "loss": 1.042, "step": 6945 }, { "epoch": 1.2366452991452992, "grad_norm": 0.637205183506012, "learning_rate": 0.00015658562433163898, "loss": 1.0379, "step": 6946 }, { "epoch": 1.2368233618233617, "grad_norm": 0.6706623435020447, "learning_rate": 0.00015657408280668307, "loss": 1.0347, "step": 6947 }, { "epoch": 1.2370014245014245, "grad_norm": 0.6435480713844299, "learning_rate": 0.00015656254017327553, "loss": 0.7708, "step": 6948 }, { "epoch": 1.2371794871794872, "grad_norm": 0.5703113675117493, "learning_rate": 0.0001565509964316425, "loss": 0.8786, "step": 6949 }, { "epoch": 1.23735754985755, "grad_norm": 0.6438127160072327, "learning_rate": 0.00015653945158201018, "loss": 0.9435, "step": 6950 }, { "epoch": 1.2375356125356125, "grad_norm": 0.68101966381073, "learning_rate": 0.00015652790562460474, "loss": 1.1062, "step": 6951 }, { "epoch": 1.2377136752136753, "grad_norm": 0.661230206489563, "learning_rate": 0.00015651635855965242, "loss": 1.0113, "step": 6952 }, { "epoch": 1.2378917378917378, "grad_norm": 0.6399117708206177, "learning_rate": 0.0001565048103873795, "loss": 1.1423, "step": 6953 }, { "epoch": 1.2380698005698005, "grad_norm": 0.7614672780036926, "learning_rate": 0.00015649326110801215, "loss": 1.0359, "step": 6954 }, { "epoch": 1.2382478632478633, "grad_norm": 0.6461986303329468, "learning_rate": 0.00015648171072177674, "loss": 1.0145, "step": 6955 }, { "epoch": 1.238425925925926, "grad_norm": 0.5902668833732605, "learning_rate": 0.0001564701592288995, "loss": 0.9451, "step": 6956 }, { "epoch": 1.2386039886039886, "grad_norm": 0.5686020255088806, "learning_rate": 0.00015645860662960682, "loss": 0.7512, "step": 6957 }, { "epoch": 1.2387820512820513, "grad_norm": 0.6640077829360962, "learning_rate": 0.00015644705292412503, "loss": 0.7133, "step": 6958 }, { "epoch": 1.2389601139601139, "grad_norm": 0.7402132749557495, "learning_rate": 0.00015643549811268049, "loss": 1.0903, "step": 6959 }, { "epoch": 1.2391381766381766, "grad_norm": 0.62332683801651, "learning_rate": 0.00015642394219549962, "loss": 0.9378, "step": 6960 }, { "epoch": 1.2393162393162394, "grad_norm": 0.6374901533126831, "learning_rate": 0.00015641238517280877, "loss": 1.0746, "step": 6961 }, { "epoch": 1.239494301994302, "grad_norm": 0.5939112901687622, "learning_rate": 0.00015640082704483443, "loss": 0.7185, "step": 6962 }, { "epoch": 1.2396723646723646, "grad_norm": 0.8378096222877502, "learning_rate": 0.00015638926781180306, "loss": 1.1932, "step": 6963 }, { "epoch": 1.2398504273504274, "grad_norm": 0.5707982778549194, "learning_rate": 0.0001563777074739411, "loss": 0.9834, "step": 6964 }, { "epoch": 1.24002849002849, "grad_norm": 0.6339748501777649, "learning_rate": 0.00015636614603147512, "loss": 1.0307, "step": 6965 }, { "epoch": 1.2402065527065527, "grad_norm": 0.7353155016899109, "learning_rate": 0.00015635458348463156, "loss": 1.0311, "step": 6966 }, { "epoch": 1.2403846153846154, "grad_norm": 0.8307726979255676, "learning_rate": 0.00015634301983363704, "loss": 1.0673, "step": 6967 }, { "epoch": 1.2405626780626782, "grad_norm": 0.5299199819564819, "learning_rate": 0.00015633145507871807, "loss": 0.6649, "step": 6968 }, { "epoch": 1.2407407407407407, "grad_norm": 0.6162533760070801, "learning_rate": 0.00015631988922010126, "loss": 0.8096, "step": 6969 }, { "epoch": 1.2409188034188035, "grad_norm": 0.6212689876556396, "learning_rate": 0.0001563083222580132, "loss": 1.0371, "step": 6970 }, { "epoch": 1.241096866096866, "grad_norm": 0.6148123145103455, "learning_rate": 0.00015629675419268055, "loss": 1.0439, "step": 6971 }, { "epoch": 1.2412749287749287, "grad_norm": 0.6163684129714966, "learning_rate": 0.00015628518502432994, "loss": 0.9075, "step": 6972 }, { "epoch": 1.2414529914529915, "grad_norm": 0.5127472877502441, "learning_rate": 0.00015627361475318807, "loss": 0.6138, "step": 6973 }, { "epoch": 1.2416310541310542, "grad_norm": 0.6508103013038635, "learning_rate": 0.0001562620433794816, "loss": 0.9608, "step": 6974 }, { "epoch": 1.2418091168091168, "grad_norm": 0.6711046695709229, "learning_rate": 0.0001562504709034373, "loss": 1.1494, "step": 6975 }, { "epoch": 1.2419871794871795, "grad_norm": 0.6831514835357666, "learning_rate": 0.00015623889732528182, "loss": 0.9664, "step": 6976 }, { "epoch": 1.242165242165242, "grad_norm": 0.693732738494873, "learning_rate": 0.00015622732264524198, "loss": 0.9055, "step": 6977 }, { "epoch": 1.2423433048433048, "grad_norm": 0.8475173711776733, "learning_rate": 0.00015621574686354456, "loss": 1.2014, "step": 6978 }, { "epoch": 1.2425213675213675, "grad_norm": 0.6342347264289856, "learning_rate": 0.0001562041699804164, "loss": 1.0691, "step": 6979 }, { "epoch": 1.2426994301994303, "grad_norm": 0.620517373085022, "learning_rate": 0.00015619259199608422, "loss": 0.7318, "step": 6980 }, { "epoch": 1.2428774928774928, "grad_norm": 0.589567244052887, "learning_rate": 0.000156181012910775, "loss": 1.0656, "step": 6981 }, { "epoch": 1.2430555555555556, "grad_norm": 0.7570258975028992, "learning_rate": 0.00015616943272471546, "loss": 1.0517, "step": 6982 }, { "epoch": 1.243233618233618, "grad_norm": 0.6232032775878906, "learning_rate": 0.00015615785143813262, "loss": 0.8867, "step": 6983 }, { "epoch": 1.2434116809116809, "grad_norm": 0.630095899105072, "learning_rate": 0.0001561462690512533, "loss": 0.9287, "step": 6984 }, { "epoch": 1.2435897435897436, "grad_norm": 0.7410848140716553, "learning_rate": 0.00015613468556430454, "loss": 1.162, "step": 6985 }, { "epoch": 1.2437678062678064, "grad_norm": 0.7574684023857117, "learning_rate": 0.00015612310097751317, "loss": 1.2118, "step": 6986 }, { "epoch": 1.243945868945869, "grad_norm": 0.580760657787323, "learning_rate": 0.0001561115152911062, "loss": 1.0612, "step": 6987 }, { "epoch": 1.2441239316239316, "grad_norm": 0.6105104088783264, "learning_rate": 0.00015609992850531073, "loss": 0.9262, "step": 6988 }, { "epoch": 1.2443019943019944, "grad_norm": 0.669435441493988, "learning_rate": 0.00015608834062035362, "loss": 0.9595, "step": 6989 }, { "epoch": 1.244480056980057, "grad_norm": 0.6530314683914185, "learning_rate": 0.00015607675163646206, "loss": 0.7987, "step": 6990 }, { "epoch": 1.2446581196581197, "grad_norm": 0.5801477432250977, "learning_rate": 0.00015606516155386297, "loss": 0.7667, "step": 6991 }, { "epoch": 1.2448361823361824, "grad_norm": 0.5773885250091553, "learning_rate": 0.00015605357037278355, "loss": 0.847, "step": 6992 }, { "epoch": 1.245014245014245, "grad_norm": 0.5399810075759888, "learning_rate": 0.00015604197809345082, "loss": 0.9284, "step": 6993 }, { "epoch": 1.2451923076923077, "grad_norm": 0.5910452604293823, "learning_rate": 0.000156030384716092, "loss": 1.0004, "step": 6994 }, { "epoch": 1.2453703703703705, "grad_norm": 0.5979224443435669, "learning_rate": 0.00015601879024093414, "loss": 0.9027, "step": 6995 }, { "epoch": 1.245548433048433, "grad_norm": 0.6092126369476318, "learning_rate": 0.0001560071946682045, "loss": 0.9755, "step": 6996 }, { "epoch": 1.2457264957264957, "grad_norm": 0.6536708474159241, "learning_rate": 0.0001559955979981302, "loss": 1.1828, "step": 6997 }, { "epoch": 1.2459045584045585, "grad_norm": 0.6602030992507935, "learning_rate": 0.00015598400023093847, "loss": 1.0395, "step": 6998 }, { "epoch": 1.246082621082621, "grad_norm": 0.6864825487136841, "learning_rate": 0.00015597240136685657, "loss": 1.083, "step": 6999 }, { "epoch": 1.2462606837606838, "grad_norm": 0.6194674968719482, "learning_rate": 0.0001559608014061117, "loss": 1.0461, "step": 7000 }, { "epoch": 1.2464387464387465, "grad_norm": 0.5879074335098267, "learning_rate": 0.00015594920034893122, "loss": 1.076, "step": 7001 }, { "epoch": 1.246616809116809, "grad_norm": 0.6514387726783752, "learning_rate": 0.00015593759819554234, "loss": 1.0396, "step": 7002 }, { "epoch": 1.2467948717948718, "grad_norm": 0.5988301634788513, "learning_rate": 0.00015592599494617247, "loss": 0.9501, "step": 7003 }, { "epoch": 1.2469729344729346, "grad_norm": 0.6282773613929749, "learning_rate": 0.00015591439060104887, "loss": 1.1002, "step": 7004 }, { "epoch": 1.247150997150997, "grad_norm": 0.6910465955734253, "learning_rate": 0.00015590278516039896, "loss": 1.1771, "step": 7005 }, { "epoch": 1.2473290598290598, "grad_norm": 0.6097282767295837, "learning_rate": 0.00015589117862445007, "loss": 1.0707, "step": 7006 }, { "epoch": 1.2475071225071226, "grad_norm": 0.7076875567436218, "learning_rate": 0.00015587957099342967, "loss": 1.0078, "step": 7007 }, { "epoch": 1.2476851851851851, "grad_norm": 0.6776556372642517, "learning_rate": 0.00015586796226756518, "loss": 0.8971, "step": 7008 }, { "epoch": 1.2478632478632479, "grad_norm": 0.6506341695785522, "learning_rate": 0.00015585635244708398, "loss": 0.9727, "step": 7009 }, { "epoch": 1.2480413105413106, "grad_norm": 0.624724805355072, "learning_rate": 0.00015584474153221357, "loss": 0.9858, "step": 7010 }, { "epoch": 1.2482193732193732, "grad_norm": 0.6070096492767334, "learning_rate": 0.0001558331295231815, "loss": 0.9385, "step": 7011 }, { "epoch": 1.248397435897436, "grad_norm": 0.6948656439781189, "learning_rate": 0.00015582151642021524, "loss": 0.9425, "step": 7012 }, { "epoch": 1.2485754985754987, "grad_norm": 0.6559088230133057, "learning_rate": 0.0001558099022235423, "loss": 1.0002, "step": 7013 }, { "epoch": 1.2487535612535612, "grad_norm": 0.6097117066383362, "learning_rate": 0.00015579828693339026, "loss": 1.0234, "step": 7014 }, { "epoch": 1.248931623931624, "grad_norm": 0.6612260341644287, "learning_rate": 0.00015578667054998673, "loss": 1.1376, "step": 7015 }, { "epoch": 1.2491096866096867, "grad_norm": 0.6305607557296753, "learning_rate": 0.00015577505307355925, "loss": 0.9127, "step": 7016 }, { "epoch": 1.2492877492877492, "grad_norm": 0.6648319959640503, "learning_rate": 0.00015576343450433549, "loss": 0.8697, "step": 7017 }, { "epoch": 1.249465811965812, "grad_norm": 0.7642946839332581, "learning_rate": 0.00015575181484254303, "loss": 1.0998, "step": 7018 }, { "epoch": 1.2496438746438747, "grad_norm": 0.6775243282318115, "learning_rate": 0.00015574019408840962, "loss": 1.0186, "step": 7019 }, { "epoch": 1.2498219373219372, "grad_norm": 0.6075591444969177, "learning_rate": 0.00015572857224216286, "loss": 0.9592, "step": 7020 }, { "epoch": 1.2498219373219372, "eval_loss": 1.105136752128601, "eval_runtime": 24.4793, "eval_samples_per_second": 42.526, "eval_steps_per_second": 21.283, "step": 7020 }, { "epoch": 1.25, "grad_norm": 0.5856962203979492, "learning_rate": 0.0001557169493040305, "loss": 0.8336, "step": 7021 }, { "epoch": 1.2501780626780628, "grad_norm": 0.6451364159584045, "learning_rate": 0.00015570532527424028, "loss": 0.8805, "step": 7022 }, { "epoch": 1.2503561253561253, "grad_norm": 0.6266474723815918, "learning_rate": 0.00015569370015301991, "loss": 1.0023, "step": 7023 }, { "epoch": 1.250534188034188, "grad_norm": 0.5547378063201904, "learning_rate": 0.00015568207394059722, "loss": 0.7385, "step": 7024 }, { "epoch": 1.2507122507122508, "grad_norm": 0.604169487953186, "learning_rate": 0.0001556704466371999, "loss": 0.9194, "step": 7025 }, { "epoch": 1.2508903133903133, "grad_norm": 0.7054405212402344, "learning_rate": 0.00015565881824305586, "loss": 1.1864, "step": 7026 }, { "epoch": 1.251068376068376, "grad_norm": 0.6429929733276367, "learning_rate": 0.0001556471887583929, "loss": 1.0129, "step": 7027 }, { "epoch": 1.2512464387464388, "grad_norm": 0.695957362651825, "learning_rate": 0.00015563555818343887, "loss": 1.2994, "step": 7028 }, { "epoch": 1.2514245014245013, "grad_norm": 0.5889938473701477, "learning_rate": 0.0001556239265184216, "loss": 1.0109, "step": 7029 }, { "epoch": 1.251602564102564, "grad_norm": 0.6424569487571716, "learning_rate": 0.0001556122937635691, "loss": 0.8585, "step": 7030 }, { "epoch": 1.2517806267806268, "grad_norm": 0.5561244487762451, "learning_rate": 0.0001556006599191092, "loss": 0.9994, "step": 7031 }, { "epoch": 1.2519586894586894, "grad_norm": 0.6355302333831787, "learning_rate": 0.00015558902498526988, "loss": 0.9495, "step": 7032 }, { "epoch": 1.2521367521367521, "grad_norm": 0.6272686719894409, "learning_rate": 0.00015557738896227908, "loss": 0.7611, "step": 7033 }, { "epoch": 1.2523148148148149, "grad_norm": 0.7069199085235596, "learning_rate": 0.00015556575185036482, "loss": 1.0612, "step": 7034 }, { "epoch": 1.2524928774928774, "grad_norm": 0.6635094285011292, "learning_rate": 0.00015555411364975505, "loss": 1.1182, "step": 7035 }, { "epoch": 1.2526709401709402, "grad_norm": 0.6112014651298523, "learning_rate": 0.00015554247436067785, "loss": 0.8677, "step": 7036 }, { "epoch": 1.252849002849003, "grad_norm": 0.678963303565979, "learning_rate": 0.00015553083398336126, "loss": 1.1421, "step": 7037 }, { "epoch": 1.2530270655270654, "grad_norm": 0.6291939616203308, "learning_rate": 0.0001555191925180333, "loss": 0.9157, "step": 7038 }, { "epoch": 1.2532051282051282, "grad_norm": 0.6519795656204224, "learning_rate": 0.0001555075499649221, "loss": 1.0074, "step": 7039 }, { "epoch": 1.253383190883191, "grad_norm": 0.6063529849052429, "learning_rate": 0.00015549590632425576, "loss": 1.0205, "step": 7040 }, { "epoch": 1.2535612535612537, "grad_norm": 0.7055633664131165, "learning_rate": 0.00015548426159626242, "loss": 1.0254, "step": 7041 }, { "epoch": 1.2537393162393162, "grad_norm": 0.6783022880554199, "learning_rate": 0.00015547261578117025, "loss": 1.1017, "step": 7042 }, { "epoch": 1.253917378917379, "grad_norm": 0.7055003643035889, "learning_rate": 0.0001554609688792074, "loss": 1.0269, "step": 7043 }, { "epoch": 1.2540954415954415, "grad_norm": 0.6465007662773132, "learning_rate": 0.0001554493208906021, "loss": 1.0492, "step": 7044 }, { "epoch": 1.2542735042735043, "grad_norm": 0.6443775296211243, "learning_rate": 0.0001554376718155825, "loss": 0.9778, "step": 7045 }, { "epoch": 1.254451566951567, "grad_norm": 0.695214569568634, "learning_rate": 0.0001554260216543769, "loss": 0.8792, "step": 7046 }, { "epoch": 1.2546296296296298, "grad_norm": 0.6777814626693726, "learning_rate": 0.00015541437040721354, "loss": 0.8944, "step": 7047 }, { "epoch": 1.2548076923076923, "grad_norm": 0.6269369721412659, "learning_rate": 0.0001554027180743207, "loss": 0.8825, "step": 7048 }, { "epoch": 1.254985754985755, "grad_norm": 0.6197061538696289, "learning_rate": 0.0001553910646559267, "loss": 0.9823, "step": 7049 }, { "epoch": 1.2551638176638176, "grad_norm": 0.681347131729126, "learning_rate": 0.00015537941015225984, "loss": 0.995, "step": 7050 }, { "epoch": 1.2553418803418803, "grad_norm": 0.6224286556243896, "learning_rate": 0.00015536775456354848, "loss": 0.7714, "step": 7051 }, { "epoch": 1.255519943019943, "grad_norm": 0.6113278269767761, "learning_rate": 0.00015535609789002098, "loss": 0.9859, "step": 7052 }, { "epoch": 1.2556980056980058, "grad_norm": 0.6985422372817993, "learning_rate": 0.00015534444013190577, "loss": 0.8785, "step": 7053 }, { "epoch": 1.2558760683760684, "grad_norm": 0.5602933168411255, "learning_rate": 0.00015533278128943118, "loss": 0.8341, "step": 7054 }, { "epoch": 1.256054131054131, "grad_norm": 0.587684690952301, "learning_rate": 0.0001553211213628257, "loss": 0.7933, "step": 7055 }, { "epoch": 1.2562321937321936, "grad_norm": 0.692997932434082, "learning_rate": 0.0001553094603523178, "loss": 1.0957, "step": 7056 }, { "epoch": 1.2564102564102564, "grad_norm": 0.6925587058067322, "learning_rate": 0.00015529779825813588, "loss": 0.8602, "step": 7057 }, { "epoch": 1.2565883190883191, "grad_norm": 0.6383063197135925, "learning_rate": 0.0001552861350805085, "loss": 0.9933, "step": 7058 }, { "epoch": 1.256766381766382, "grad_norm": 0.6520544290542603, "learning_rate": 0.00015527447081966413, "loss": 0.9498, "step": 7059 }, { "epoch": 1.2569444444444444, "grad_norm": 0.7353914380073547, "learning_rate": 0.00015526280547583133, "loss": 1.1071, "step": 7060 }, { "epoch": 1.2571225071225072, "grad_norm": 0.7141618132591248, "learning_rate": 0.00015525113904923864, "loss": 0.8333, "step": 7061 }, { "epoch": 1.2573005698005697, "grad_norm": 0.6194499731063843, "learning_rate": 0.00015523947154011468, "loss": 0.9421, "step": 7062 }, { "epoch": 1.2574786324786325, "grad_norm": 0.7514514327049255, "learning_rate": 0.00015522780294868803, "loss": 1.226, "step": 7063 }, { "epoch": 1.2576566951566952, "grad_norm": 0.762923538684845, "learning_rate": 0.0001552161332751873, "loss": 1.1893, "step": 7064 }, { "epoch": 1.257834757834758, "grad_norm": 0.6265730261802673, "learning_rate": 0.00015520446251984113, "loss": 0.6604, "step": 7065 }, { "epoch": 1.2580128205128205, "grad_norm": 0.6447750329971313, "learning_rate": 0.0001551927906828782, "loss": 0.9814, "step": 7066 }, { "epoch": 1.2581908831908832, "grad_norm": 0.5791042447090149, "learning_rate": 0.00015518111776452722, "loss": 0.8283, "step": 7067 }, { "epoch": 1.2583689458689458, "grad_norm": 0.5267777442932129, "learning_rate": 0.00015516944376501682, "loss": 0.5748, "step": 7068 }, { "epoch": 1.2585470085470085, "grad_norm": 0.7343912720680237, "learning_rate": 0.0001551577686845758, "loss": 1.1777, "step": 7069 }, { "epoch": 1.2587250712250713, "grad_norm": 0.645746111869812, "learning_rate": 0.00015514609252343284, "loss": 0.9356, "step": 7070 }, { "epoch": 1.258903133903134, "grad_norm": 0.6993104219436646, "learning_rate": 0.0001551344152818168, "loss": 1.06, "step": 7071 }, { "epoch": 1.2590811965811965, "grad_norm": 0.6661365628242493, "learning_rate": 0.0001551227369599564, "loss": 1.061, "step": 7072 }, { "epoch": 1.2592592592592593, "grad_norm": 0.7833736538887024, "learning_rate": 0.0001551110575580805, "loss": 0.9674, "step": 7073 }, { "epoch": 1.2594373219373218, "grad_norm": 0.5878575444221497, "learning_rate": 0.00015509937707641787, "loss": 0.9002, "step": 7074 }, { "epoch": 1.2596153846153846, "grad_norm": 0.6402907371520996, "learning_rate": 0.00015508769551519745, "loss": 1.0157, "step": 7075 }, { "epoch": 1.2597934472934473, "grad_norm": 0.6794611215591431, "learning_rate": 0.00015507601287464805, "loss": 1.052, "step": 7076 }, { "epoch": 1.25997150997151, "grad_norm": 0.706922173500061, "learning_rate": 0.0001550643291549986, "loss": 1.0814, "step": 7077 }, { "epoch": 1.2601495726495726, "grad_norm": 0.6722953915596008, "learning_rate": 0.000155052644356478, "loss": 1.1402, "step": 7078 }, { "epoch": 1.2603276353276354, "grad_norm": 0.6619611978530884, "learning_rate": 0.00015504095847931518, "loss": 0.9583, "step": 7079 }, { "epoch": 1.260505698005698, "grad_norm": 0.5645583271980286, "learning_rate": 0.00015502927152373914, "loss": 0.6746, "step": 7080 }, { "epoch": 1.2606837606837606, "grad_norm": 0.6634977459907532, "learning_rate": 0.00015501758348997882, "loss": 1.0451, "step": 7081 }, { "epoch": 1.2608618233618234, "grad_norm": 0.7167651057243347, "learning_rate": 0.00015500589437826326, "loss": 0.931, "step": 7082 }, { "epoch": 1.2610398860398861, "grad_norm": 0.6179340481758118, "learning_rate": 0.00015499420418882146, "loss": 1.0953, "step": 7083 }, { "epoch": 1.2612179487179487, "grad_norm": 0.6948468685150146, "learning_rate": 0.00015498251292188247, "loss": 1.0277, "step": 7084 }, { "epoch": 1.2613960113960114, "grad_norm": 0.6256045699119568, "learning_rate": 0.00015497082057767532, "loss": 1.0154, "step": 7085 }, { "epoch": 1.261574074074074, "grad_norm": 0.6457428336143494, "learning_rate": 0.0001549591271564292, "loss": 0.9693, "step": 7086 }, { "epoch": 1.2617521367521367, "grad_norm": 0.722259521484375, "learning_rate": 0.0001549474326583731, "loss": 0.9176, "step": 7087 }, { "epoch": 1.2619301994301995, "grad_norm": 0.742477297782898, "learning_rate": 0.0001549357370837362, "loss": 0.9813, "step": 7088 }, { "epoch": 1.2621082621082622, "grad_norm": 0.5981723666191101, "learning_rate": 0.0001549240404327477, "loss": 0.8943, "step": 7089 }, { "epoch": 1.2622863247863247, "grad_norm": 0.6266574859619141, "learning_rate": 0.00015491234270563665, "loss": 0.8439, "step": 7090 }, { "epoch": 1.2624643874643875, "grad_norm": 0.6723998188972473, "learning_rate": 0.00015490064390263238, "loss": 1.2278, "step": 7091 }, { "epoch": 1.26264245014245, "grad_norm": 0.6628100275993347, "learning_rate": 0.00015488894402396398, "loss": 0.9526, "step": 7092 }, { "epoch": 1.2628205128205128, "grad_norm": 0.6661350727081299, "learning_rate": 0.0001548772430698608, "loss": 0.974, "step": 7093 }, { "epoch": 1.2629985754985755, "grad_norm": 0.8210669755935669, "learning_rate": 0.000154865541040552, "loss": 1.1142, "step": 7094 }, { "epoch": 1.2631766381766383, "grad_norm": 0.6329003572463989, "learning_rate": 0.0001548538379362669, "loss": 0.8485, "step": 7095 }, { "epoch": 1.2633547008547008, "grad_norm": 0.6288384795188904, "learning_rate": 0.0001548421337572348, "loss": 0.816, "step": 7096 }, { "epoch": 1.2635327635327636, "grad_norm": 0.631060004234314, "learning_rate": 0.00015483042850368504, "loss": 0.8237, "step": 7097 }, { "epoch": 1.263710826210826, "grad_norm": 0.7343839406967163, "learning_rate": 0.0001548187221758469, "loss": 1.1507, "step": 7098 }, { "epoch": 1.2638888888888888, "grad_norm": 0.6313042640686035, "learning_rate": 0.0001548070147739498, "loss": 0.7762, "step": 7099 }, { "epoch": 1.2640669515669516, "grad_norm": 0.6449850797653198, "learning_rate": 0.00015479530629822308, "loss": 0.9225, "step": 7100 }, { "epoch": 1.2642450142450143, "grad_norm": 0.6371589303016663, "learning_rate": 0.00015478359674889617, "loss": 1.0088, "step": 7101 }, { "epoch": 1.2644230769230769, "grad_norm": 0.6483678221702576, "learning_rate": 0.00015477188612619849, "loss": 0.6234, "step": 7102 }, { "epoch": 1.2646011396011396, "grad_norm": 0.6945441365242004, "learning_rate": 0.00015476017443035947, "loss": 1.123, "step": 7103 }, { "epoch": 1.2647792022792022, "grad_norm": 0.6356340050697327, "learning_rate": 0.00015474846166160856, "loss": 0.9923, "step": 7104 }, { "epoch": 1.264957264957265, "grad_norm": 0.6774702668190002, "learning_rate": 0.00015473674782017532, "loss": 0.9694, "step": 7105 }, { "epoch": 1.2651353276353277, "grad_norm": 0.6332793831825256, "learning_rate": 0.0001547250329062892, "loss": 1.0633, "step": 7106 }, { "epoch": 1.2653133903133904, "grad_norm": 0.6563684344291687, "learning_rate": 0.00015471331692017972, "loss": 1.0893, "step": 7107 }, { "epoch": 1.265491452991453, "grad_norm": 0.7318371534347534, "learning_rate": 0.0001547015998620765, "loss": 1.1777, "step": 7108 }, { "epoch": 1.2656695156695157, "grad_norm": 0.7099173069000244, "learning_rate": 0.000154689881732209, "loss": 1.1717, "step": 7109 }, { "epoch": 1.2658475783475782, "grad_norm": 0.661078691482544, "learning_rate": 0.00015467816253080693, "loss": 1.0448, "step": 7110 }, { "epoch": 1.266025641025641, "grad_norm": 0.6206802129745483, "learning_rate": 0.0001546664422580998, "loss": 0.9334, "step": 7111 }, { "epoch": 1.2662037037037037, "grad_norm": 0.6514355540275574, "learning_rate": 0.00015465472091431728, "loss": 0.9533, "step": 7112 }, { "epoch": 1.2663817663817665, "grad_norm": 0.6090209484100342, "learning_rate": 0.0001546429984996891, "loss": 0.9206, "step": 7113 }, { "epoch": 1.266559829059829, "grad_norm": 0.6345987915992737, "learning_rate": 0.00015463127501444488, "loss": 1.0537, "step": 7114 }, { "epoch": 1.2667378917378918, "grad_norm": 0.6095160245895386, "learning_rate": 0.0001546195504588143, "loss": 0.8652, "step": 7115 }, { "epoch": 1.2669159544159543, "grad_norm": 0.6751621961593628, "learning_rate": 0.00015460782483302707, "loss": 0.9001, "step": 7116 }, { "epoch": 1.267094017094017, "grad_norm": 0.6261575222015381, "learning_rate": 0.00015459609813731295, "loss": 0.929, "step": 7117 }, { "epoch": 1.2672720797720798, "grad_norm": 0.589495837688446, "learning_rate": 0.0001545843703719017, "loss": 0.9023, "step": 7118 }, { "epoch": 1.2674501424501425, "grad_norm": 0.6364617943763733, "learning_rate": 0.00015457264153702311, "loss": 0.8261, "step": 7119 }, { "epoch": 1.267628205128205, "grad_norm": 0.6685599684715271, "learning_rate": 0.00015456091163290698, "loss": 1.1267, "step": 7120 }, { "epoch": 1.2678062678062678, "grad_norm": 0.6440932750701904, "learning_rate": 0.0001545491806597831, "loss": 0.9643, "step": 7121 }, { "epoch": 1.2679843304843303, "grad_norm": 0.7641597390174866, "learning_rate": 0.00015453744861788137, "loss": 1.1577, "step": 7122 }, { "epoch": 1.268162393162393, "grad_norm": 0.6965937614440918, "learning_rate": 0.00015452571550743163, "loss": 0.7835, "step": 7123 }, { "epoch": 1.2683404558404558, "grad_norm": 0.6332844495773315, "learning_rate": 0.00015451398132866376, "loss": 0.9794, "step": 7124 }, { "epoch": 1.2685185185185186, "grad_norm": 0.6719903349876404, "learning_rate": 0.00015450224608180765, "loss": 0.9795, "step": 7125 }, { "epoch": 1.2686965811965811, "grad_norm": 0.567414402961731, "learning_rate": 0.00015449050976709328, "loss": 0.9737, "step": 7126 }, { "epoch": 1.2688746438746439, "grad_norm": 0.6810645461082458, "learning_rate": 0.0001544787723847505, "loss": 1.2358, "step": 7127 }, { "epoch": 1.2690527065527066, "grad_norm": 0.6693191528320312, "learning_rate": 0.00015446703393500938, "loss": 0.9475, "step": 7128 }, { "epoch": 1.2692307692307692, "grad_norm": 0.7077522277832031, "learning_rate": 0.00015445529441809988, "loss": 1.013, "step": 7129 }, { "epoch": 1.269408831908832, "grad_norm": 0.6596258878707886, "learning_rate": 0.000154443553834252, "loss": 1.1506, "step": 7130 }, { "epoch": 1.2695868945868947, "grad_norm": 0.6721500754356384, "learning_rate": 0.0001544318121836958, "loss": 0.8848, "step": 7131 }, { "epoch": 1.2697649572649572, "grad_norm": 0.6943998336791992, "learning_rate": 0.00015442006946666132, "loss": 1.1118, "step": 7132 }, { "epoch": 1.26994301994302, "grad_norm": 0.6132234930992126, "learning_rate": 0.0001544083256833786, "loss": 0.9932, "step": 7133 }, { "epoch": 1.2701210826210827, "grad_norm": 0.7337939739227295, "learning_rate": 0.00015439658083407775, "loss": 1.0973, "step": 7134 }, { "epoch": 1.2702991452991452, "grad_norm": 0.6551772356033325, "learning_rate": 0.00015438483491898893, "loss": 1.0006, "step": 7135 }, { "epoch": 1.270477207977208, "grad_norm": 0.660068929195404, "learning_rate": 0.00015437308793834223, "loss": 0.9291, "step": 7136 }, { "epoch": 1.2706552706552707, "grad_norm": 0.7622788548469543, "learning_rate": 0.00015436133989236783, "loss": 1.0782, "step": 7137 }, { "epoch": 1.2708333333333333, "grad_norm": 0.848494291305542, "learning_rate": 0.00015434959078129587, "loss": 1.2001, "step": 7138 }, { "epoch": 1.271011396011396, "grad_norm": 0.6222602725028992, "learning_rate": 0.0001543378406053566, "loss": 1.011, "step": 7139 }, { "epoch": 1.2711894586894588, "grad_norm": 0.6164663434028625, "learning_rate": 0.00015432608936478026, "loss": 1.0282, "step": 7140 }, { "epoch": 1.2713675213675213, "grad_norm": 0.7236546277999878, "learning_rate": 0.000154314337059797, "loss": 1.0112, "step": 7141 }, { "epoch": 1.271545584045584, "grad_norm": 0.6891111135482788, "learning_rate": 0.00015430258369063715, "loss": 1.1191, "step": 7142 }, { "epoch": 1.2717236467236468, "grad_norm": 0.6600295901298523, "learning_rate": 0.00015429082925753099, "loss": 0.9561, "step": 7143 }, { "epoch": 1.2719017094017093, "grad_norm": 0.6819902062416077, "learning_rate": 0.0001542790737607088, "loss": 1.0631, "step": 7144 }, { "epoch": 1.272079772079772, "grad_norm": 0.6518470644950867, "learning_rate": 0.0001542673172004009, "loss": 1.0806, "step": 7145 }, { "epoch": 1.2722578347578348, "grad_norm": 0.737501859664917, "learning_rate": 0.00015425555957683767, "loss": 1.0144, "step": 7146 }, { "epoch": 1.2724358974358974, "grad_norm": 0.6245740652084351, "learning_rate": 0.00015424380089024944, "loss": 1.0612, "step": 7147 }, { "epoch": 1.27261396011396, "grad_norm": 0.7118125557899475, "learning_rate": 0.0001542320411408666, "loss": 1.1458, "step": 7148 }, { "epoch": 1.2727920227920229, "grad_norm": 0.6965761780738831, "learning_rate": 0.00015422028032891958, "loss": 0.8052, "step": 7149 }, { "epoch": 1.2729700854700854, "grad_norm": 0.7661466598510742, "learning_rate": 0.0001542085184546388, "loss": 1.1245, "step": 7150 }, { "epoch": 1.2731481481481481, "grad_norm": 0.7238876223564148, "learning_rate": 0.00015419675551825475, "loss": 0.9346, "step": 7151 }, { "epoch": 1.273326210826211, "grad_norm": 0.669562041759491, "learning_rate": 0.0001541849915199978, "loss": 0.7816, "step": 7152 }, { "epoch": 1.2735042735042734, "grad_norm": 0.6799174547195435, "learning_rate": 0.00015417322646009855, "loss": 1.047, "step": 7153 }, { "epoch": 1.2736823361823362, "grad_norm": 0.6012796759605408, "learning_rate": 0.00015416146033878745, "loss": 1.0101, "step": 7154 }, { "epoch": 1.273860398860399, "grad_norm": 0.7008427977561951, "learning_rate": 0.00015414969315629505, "loss": 1.1321, "step": 7155 }, { "epoch": 1.2740384615384617, "grad_norm": 0.6555556058883667, "learning_rate": 0.0001541379249128519, "loss": 0.9926, "step": 7156 }, { "epoch": 1.2742165242165242, "grad_norm": 0.6324251294136047, "learning_rate": 0.00015412615560868854, "loss": 0.9051, "step": 7157 }, { "epoch": 1.274394586894587, "grad_norm": 0.6035568714141846, "learning_rate": 0.0001541143852440356, "loss": 0.8248, "step": 7158 }, { "epoch": 1.2745726495726495, "grad_norm": 0.6733569502830505, "learning_rate": 0.0001541026138191237, "loss": 0.9149, "step": 7159 }, { "epoch": 1.2747507122507122, "grad_norm": 0.8306798338890076, "learning_rate": 0.0001540908413341835, "loss": 1.0694, "step": 7160 }, { "epoch": 1.274928774928775, "grad_norm": 0.6649713516235352, "learning_rate": 0.00015407906778944563, "loss": 1.1358, "step": 7161 }, { "epoch": 1.2751068376068377, "grad_norm": 0.6889697909355164, "learning_rate": 0.00015406729318514074, "loss": 1.0096, "step": 7162 }, { "epoch": 1.2752849002849003, "grad_norm": 0.6948645114898682, "learning_rate": 0.0001540555175214996, "loss": 1.0649, "step": 7163 }, { "epoch": 1.275462962962963, "grad_norm": 0.6844844818115234, "learning_rate": 0.0001540437407987528, "loss": 0.884, "step": 7164 }, { "epoch": 1.2756410256410255, "grad_norm": 0.7124526500701904, "learning_rate": 0.00015403196301713124, "loss": 1.1307, "step": 7165 }, { "epoch": 1.2758190883190883, "grad_norm": 0.7328375577926636, "learning_rate": 0.00015402018417686556, "loss": 1.0348, "step": 7166 }, { "epoch": 1.275997150997151, "grad_norm": 0.5872696042060852, "learning_rate": 0.00015400840427818663, "loss": 0.9827, "step": 7167 }, { "epoch": 1.2761752136752138, "grad_norm": 0.6370702385902405, "learning_rate": 0.00015399662332132519, "loss": 0.9171, "step": 7168 }, { "epoch": 1.2763532763532763, "grad_norm": 0.6481866240501404, "learning_rate": 0.00015398484130651205, "loss": 0.8704, "step": 7169 }, { "epoch": 1.276531339031339, "grad_norm": 0.598739743232727, "learning_rate": 0.00015397305823397812, "loss": 0.8097, "step": 7170 }, { "epoch": 1.2767094017094016, "grad_norm": 0.5941228270530701, "learning_rate": 0.00015396127410395423, "loss": 0.8853, "step": 7171 }, { "epoch": 1.2768874643874644, "grad_norm": 0.6485885381698608, "learning_rate": 0.00015394948891667127, "loss": 0.702, "step": 7172 }, { "epoch": 1.2770655270655271, "grad_norm": 0.5314942598342896, "learning_rate": 0.00015393770267236017, "loss": 0.7899, "step": 7173 }, { "epoch": 1.2772435897435899, "grad_norm": 0.6113781929016113, "learning_rate": 0.00015392591537125182, "loss": 0.9871, "step": 7174 }, { "epoch": 1.2774216524216524, "grad_norm": 0.5625866651535034, "learning_rate": 0.00015391412701357715, "loss": 0.8246, "step": 7175 }, { "epoch": 1.2775997150997151, "grad_norm": 0.6006998419761658, "learning_rate": 0.00015390233759956718, "loss": 0.899, "step": 7176 }, { "epoch": 1.2777777777777777, "grad_norm": 0.6916918158531189, "learning_rate": 0.0001538905471294529, "loss": 1.0443, "step": 7177 }, { "epoch": 1.2779558404558404, "grad_norm": 0.6263536810874939, "learning_rate": 0.00015387875560346525, "loss": 0.9159, "step": 7178 }, { "epoch": 1.2781339031339032, "grad_norm": 0.6563085913658142, "learning_rate": 0.00015386696302183535, "loss": 0.994, "step": 7179 }, { "epoch": 1.278311965811966, "grad_norm": 0.6312007904052734, "learning_rate": 0.00015385516938479416, "loss": 0.9148, "step": 7180 }, { "epoch": 1.2784900284900285, "grad_norm": 0.6408209204673767, "learning_rate": 0.00015384337469257284, "loss": 1.0508, "step": 7181 }, { "epoch": 1.2786680911680912, "grad_norm": 0.656234085559845, "learning_rate": 0.00015383157894540244, "loss": 0.9952, "step": 7182 }, { "epoch": 1.2788461538461537, "grad_norm": 0.7401639819145203, "learning_rate": 0.00015381978214351407, "loss": 1.1615, "step": 7183 }, { "epoch": 1.2790242165242165, "grad_norm": 0.5746055841445923, "learning_rate": 0.00015380798428713885, "loss": 0.9142, "step": 7184 }, { "epoch": 1.2792022792022792, "grad_norm": 0.8061720728874207, "learning_rate": 0.00015379618537650797, "loss": 1.13, "step": 7185 }, { "epoch": 1.279380341880342, "grad_norm": 0.6336073875427246, "learning_rate": 0.0001537843854118526, "loss": 1.0581, "step": 7186 }, { "epoch": 1.2795584045584045, "grad_norm": 0.6549856066703796, "learning_rate": 0.0001537725843934039, "loss": 1.09, "step": 7187 }, { "epoch": 1.2797364672364673, "grad_norm": 0.5759010910987854, "learning_rate": 0.00015376078232139315, "loss": 0.8441, "step": 7188 }, { "epoch": 1.2799145299145298, "grad_norm": 0.5733884572982788, "learning_rate": 0.00015374897919605152, "loss": 0.9086, "step": 7189 }, { "epoch": 1.2800925925925926, "grad_norm": 0.6505870819091797, "learning_rate": 0.0001537371750176103, "loss": 1.1683, "step": 7190 }, { "epoch": 1.2802706552706553, "grad_norm": 0.6744688153266907, "learning_rate": 0.00015372536978630077, "loss": 0.9483, "step": 7191 }, { "epoch": 1.280448717948718, "grad_norm": 0.598098874092102, "learning_rate": 0.0001537135635023542, "loss": 0.7747, "step": 7192 }, { "epoch": 1.2806267806267806, "grad_norm": 0.6711761951446533, "learning_rate": 0.00015370175616600195, "loss": 1.1897, "step": 7193 }, { "epoch": 1.2808048433048433, "grad_norm": 0.6207453608512878, "learning_rate": 0.00015368994777747536, "loss": 1.0063, "step": 7194 }, { "epoch": 1.2809829059829059, "grad_norm": 0.6701686382293701, "learning_rate": 0.00015367813833700575, "loss": 1.0864, "step": 7195 }, { "epoch": 1.2811609686609686, "grad_norm": 0.5916469693183899, "learning_rate": 0.00015366632784482456, "loss": 0.8786, "step": 7196 }, { "epoch": 1.2813390313390314, "grad_norm": 0.6567547917366028, "learning_rate": 0.00015365451630116312, "loss": 0.9977, "step": 7197 }, { "epoch": 1.2815170940170941, "grad_norm": 0.7287433743476868, "learning_rate": 0.00015364270370625294, "loss": 1.1248, "step": 7198 }, { "epoch": 1.2816951566951567, "grad_norm": 0.7736039161682129, "learning_rate": 0.0001536308900603254, "loss": 0.9832, "step": 7199 }, { "epoch": 1.2818732193732194, "grad_norm": 0.6799852252006531, "learning_rate": 0.00015361907536361194, "loss": 1.0275, "step": 7200 }, { "epoch": 1.282051282051282, "grad_norm": 0.5975812673568726, "learning_rate": 0.00015360725961634407, "loss": 1.0516, "step": 7201 }, { "epoch": 1.2822293447293447, "grad_norm": 0.616307258605957, "learning_rate": 0.00015359544281875337, "loss": 0.8095, "step": 7202 }, { "epoch": 1.2824074074074074, "grad_norm": 0.6357580423355103, "learning_rate": 0.00015358362497107126, "loss": 0.9186, "step": 7203 }, { "epoch": 1.2825854700854702, "grad_norm": 0.679333508014679, "learning_rate": 0.00015357180607352935, "loss": 0.9433, "step": 7204 }, { "epoch": 1.2827635327635327, "grad_norm": 0.6345439553260803, "learning_rate": 0.00015355998612635914, "loss": 0.9186, "step": 7205 }, { "epoch": 1.2829415954415955, "grad_norm": 0.6256508827209473, "learning_rate": 0.00015354816512979231, "loss": 0.9984, "step": 7206 }, { "epoch": 1.283119658119658, "grad_norm": 0.7973852753639221, "learning_rate": 0.00015353634308406044, "loss": 1.1145, "step": 7207 }, { "epoch": 1.2832977207977208, "grad_norm": 0.711125910282135, "learning_rate": 0.0001535245199893951, "loss": 1.1947, "step": 7208 }, { "epoch": 1.2834757834757835, "grad_norm": 0.6096055507659912, "learning_rate": 0.00015351269584602798, "loss": 1.0078, "step": 7209 }, { "epoch": 1.2836538461538463, "grad_norm": 0.7089232802391052, "learning_rate": 0.00015350087065419077, "loss": 1.112, "step": 7210 }, { "epoch": 1.2838319088319088, "grad_norm": 0.716199517250061, "learning_rate": 0.00015348904441411508, "loss": 1.1015, "step": 7211 }, { "epoch": 1.2840099715099715, "grad_norm": 0.6374632716178894, "learning_rate": 0.00015347721712603276, "loss": 1.0519, "step": 7212 }, { "epoch": 1.284188034188034, "grad_norm": 0.6500036716461182, "learning_rate": 0.0001534653887901754, "loss": 1.1719, "step": 7213 }, { "epoch": 1.2843660968660968, "grad_norm": 0.7249937653541565, "learning_rate": 0.00015345355940677485, "loss": 1.0188, "step": 7214 }, { "epoch": 1.2845441595441596, "grad_norm": 0.6645919680595398, "learning_rate": 0.00015344172897606285, "loss": 0.9788, "step": 7215 }, { "epoch": 1.2847222222222223, "grad_norm": 0.7032710313796997, "learning_rate": 0.00015342989749827113, "loss": 1.1093, "step": 7216 }, { "epoch": 1.2849002849002849, "grad_norm": 0.622767984867096, "learning_rate": 0.0001534180649736316, "loss": 0.8978, "step": 7217 }, { "epoch": 1.2850783475783476, "grad_norm": 0.7499693036079407, "learning_rate": 0.00015340623140237605, "loss": 1.2232, "step": 7218 }, { "epoch": 1.2852564102564101, "grad_norm": 0.6308625936508179, "learning_rate": 0.00015339439678473636, "loss": 0.8621, "step": 7219 }, { "epoch": 1.2854344729344729, "grad_norm": 0.6513667106628418, "learning_rate": 0.00015338256112094434, "loss": 1.0541, "step": 7220 }, { "epoch": 1.2856125356125356, "grad_norm": 0.6080937385559082, "learning_rate": 0.00015337072441123193, "loss": 0.8474, "step": 7221 }, { "epoch": 1.2857905982905984, "grad_norm": 0.6742652058601379, "learning_rate": 0.00015335888665583104, "loss": 1.0172, "step": 7222 }, { "epoch": 1.285968660968661, "grad_norm": 0.620810866355896, "learning_rate": 0.00015334704785497364, "loss": 1.049, "step": 7223 }, { "epoch": 1.2861467236467237, "grad_norm": 0.5733018517494202, "learning_rate": 0.00015333520800889165, "loss": 0.7371, "step": 7224 }, { "epoch": 1.2863247863247862, "grad_norm": 0.6447640061378479, "learning_rate": 0.00015332336711781702, "loss": 0.9925, "step": 7225 }, { "epoch": 1.286502849002849, "grad_norm": 0.6764999628067017, "learning_rate": 0.00015331152518198183, "loss": 0.9052, "step": 7226 }, { "epoch": 1.2866809116809117, "grad_norm": 0.6492836475372314, "learning_rate": 0.00015329968220161803, "loss": 0.9493, "step": 7227 }, { "epoch": 1.2868589743589745, "grad_norm": 0.666157603263855, "learning_rate": 0.00015328783817695766, "loss": 1.0626, "step": 7228 }, { "epoch": 1.287037037037037, "grad_norm": 0.7098026871681213, "learning_rate": 0.00015327599310823283, "loss": 1.0461, "step": 7229 }, { "epoch": 1.2872150997150997, "grad_norm": 0.637778103351593, "learning_rate": 0.00015326414699567555, "loss": 0.9383, "step": 7230 }, { "epoch": 1.2873931623931623, "grad_norm": 0.6816399693489075, "learning_rate": 0.00015325229983951798, "loss": 1.0647, "step": 7231 }, { "epoch": 1.287571225071225, "grad_norm": 0.668689489364624, "learning_rate": 0.0001532404516399922, "loss": 1.0479, "step": 7232 }, { "epoch": 1.2877492877492878, "grad_norm": 0.6459103226661682, "learning_rate": 0.0001532286023973304, "loss": 1.1751, "step": 7233 }, { "epoch": 1.2879273504273505, "grad_norm": 0.679999589920044, "learning_rate": 0.00015321675211176468, "loss": 0.7541, "step": 7234 }, { "epoch": 1.288105413105413, "grad_norm": 0.5415067672729492, "learning_rate": 0.00015320490078352724, "loss": 0.822, "step": 7235 }, { "epoch": 1.2882834757834758, "grad_norm": 0.6817963719367981, "learning_rate": 0.00015319304841285032, "loss": 0.9424, "step": 7236 }, { "epoch": 1.2884615384615383, "grad_norm": 0.6187505125999451, "learning_rate": 0.0001531811949999661, "loss": 0.8596, "step": 7237 }, { "epoch": 1.288639601139601, "grad_norm": 0.6737838387489319, "learning_rate": 0.00015316934054510685, "loss": 1.0046, "step": 7238 }, { "epoch": 1.2888176638176638, "grad_norm": 0.6445996761322021, "learning_rate": 0.00015315748504850482, "loss": 1.01, "step": 7239 }, { "epoch": 1.2889957264957266, "grad_norm": 0.7279136180877686, "learning_rate": 0.0001531456285103923, "loss": 0.9066, "step": 7240 }, { "epoch": 1.289173789173789, "grad_norm": 0.6619178652763367, "learning_rate": 0.00015313377093100153, "loss": 0.8977, "step": 7241 }, { "epoch": 1.2893518518518519, "grad_norm": 0.7644323110580444, "learning_rate": 0.000153121912310565, "loss": 1.3085, "step": 7242 }, { "epoch": 1.2895299145299146, "grad_norm": 0.645882248878479, "learning_rate": 0.00015311005264931487, "loss": 1.0337, "step": 7243 }, { "epoch": 1.2897079772079771, "grad_norm": 0.6868017911911011, "learning_rate": 0.0001530981919474836, "loss": 0.9616, "step": 7244 }, { "epoch": 1.28988603988604, "grad_norm": 0.7176693677902222, "learning_rate": 0.00015308633020530362, "loss": 1.1975, "step": 7245 }, { "epoch": 1.2900641025641026, "grad_norm": 0.7358015775680542, "learning_rate": 0.00015307446742300718, "loss": 0.9308, "step": 7246 }, { "epoch": 1.2902421652421652, "grad_norm": 0.7330248355865479, "learning_rate": 0.00015306260360082688, "loss": 0.9518, "step": 7247 }, { "epoch": 1.290420227920228, "grad_norm": 0.6571981310844421, "learning_rate": 0.00015305073873899503, "loss": 0.9531, "step": 7248 }, { "epoch": 1.2905982905982907, "grad_norm": 0.5968486666679382, "learning_rate": 0.00015303887283774417, "loss": 0.9245, "step": 7249 }, { "epoch": 1.2907763532763532, "grad_norm": 0.6398176550865173, "learning_rate": 0.0001530270058973068, "loss": 1.0452, "step": 7250 }, { "epoch": 1.290954415954416, "grad_norm": 0.5462267994880676, "learning_rate": 0.00015301513791791542, "loss": 0.8451, "step": 7251 }, { "epoch": 1.2911324786324787, "grad_norm": 0.7536166906356812, "learning_rate": 0.00015300326889980252, "loss": 1.0086, "step": 7252 }, { "epoch": 1.2913105413105412, "grad_norm": 0.6208569407463074, "learning_rate": 0.00015299139884320065, "loss": 0.7437, "step": 7253 }, { "epoch": 1.291488603988604, "grad_norm": 0.7025452852249146, "learning_rate": 0.00015297952774834242, "loss": 0.8874, "step": 7254 }, { "epoch": 1.2916666666666667, "grad_norm": 0.6758308410644531, "learning_rate": 0.00015296765561546041, "loss": 1.0378, "step": 7255 }, { "epoch": 1.2918447293447293, "grad_norm": 0.7170431613922119, "learning_rate": 0.00015295578244478724, "loss": 1.0111, "step": 7256 }, { "epoch": 1.292022792022792, "grad_norm": 0.6263511180877686, "learning_rate": 0.00015294390823655544, "loss": 0.7836, "step": 7257 }, { "epoch": 1.2922008547008548, "grad_norm": 0.5887803435325623, "learning_rate": 0.0001529320329909978, "loss": 1.068, "step": 7258 }, { "epoch": 1.2923789173789173, "grad_norm": 0.5955889821052551, "learning_rate": 0.00015292015670834692, "loss": 0.8903, "step": 7259 }, { "epoch": 1.29255698005698, "grad_norm": 0.630449652671814, "learning_rate": 0.00015290827938883552, "loss": 1.1096, "step": 7260 }, { "epoch": 1.2927350427350428, "grad_norm": 0.7405480146408081, "learning_rate": 0.00015289640103269625, "loss": 1.0648, "step": 7261 }, { "epoch": 1.2929131054131053, "grad_norm": 0.6082221865653992, "learning_rate": 0.00015288452164016191, "loss": 0.9266, "step": 7262 }, { "epoch": 1.293091168091168, "grad_norm": 0.6211720108985901, "learning_rate": 0.00015287264121146524, "loss": 0.849, "step": 7263 }, { "epoch": 1.2932692307692308, "grad_norm": 0.6481043100357056, "learning_rate": 0.00015286075974683898, "loss": 0.7761, "step": 7264 }, { "epoch": 1.2934472934472934, "grad_norm": 0.5957167744636536, "learning_rate": 0.00015284887724651593, "loss": 0.8942, "step": 7265 }, { "epoch": 1.2936253561253561, "grad_norm": 0.7272268533706665, "learning_rate": 0.00015283699371072894, "loss": 1.0913, "step": 7266 }, { "epoch": 1.2938034188034189, "grad_norm": 0.5902758836746216, "learning_rate": 0.0001528251091397108, "loss": 1.1045, "step": 7267 }, { "epoch": 1.2939814814814814, "grad_norm": 0.6382482051849365, "learning_rate": 0.00015281322353369436, "loss": 0.9265, "step": 7268 }, { "epoch": 1.2941595441595442, "grad_norm": 0.6556048393249512, "learning_rate": 0.00015280133689291256, "loss": 1.0536, "step": 7269 }, { "epoch": 1.294337606837607, "grad_norm": 0.680895209312439, "learning_rate": 0.00015278944921759822, "loss": 0.9996, "step": 7270 }, { "epoch": 1.2945156695156697, "grad_norm": 0.670317530632019, "learning_rate": 0.00015277756050798428, "loss": 1.1402, "step": 7271 }, { "epoch": 1.2946937321937322, "grad_norm": 0.6312688589096069, "learning_rate": 0.0001527656707643037, "loss": 1.0669, "step": 7272 }, { "epoch": 1.294871794871795, "grad_norm": 0.6267009973526001, "learning_rate": 0.0001527537799867894, "loss": 0.8985, "step": 7273 }, { "epoch": 1.2950498575498575, "grad_norm": 0.7069001197814941, "learning_rate": 0.00015274188817567436, "loss": 0.9478, "step": 7274 }, { "epoch": 1.2952279202279202, "grad_norm": 0.7229067087173462, "learning_rate": 0.00015272999533119162, "loss": 0.9005, "step": 7275 }, { "epoch": 1.295405982905983, "grad_norm": 0.6254632472991943, "learning_rate": 0.00015271810145357412, "loss": 0.9746, "step": 7276 }, { "epoch": 1.2955840455840457, "grad_norm": 0.6772669553756714, "learning_rate": 0.00015270620654305494, "loss": 1.1714, "step": 7277 }, { "epoch": 1.2957621082621082, "grad_norm": 0.605576753616333, "learning_rate": 0.00015269431059986713, "loss": 0.7735, "step": 7278 }, { "epoch": 1.295940170940171, "grad_norm": 0.7144771814346313, "learning_rate": 0.00015268241362424378, "loss": 0.9757, "step": 7279 }, { "epoch": 1.2961182336182335, "grad_norm": 0.5275486707687378, "learning_rate": 0.00015267051561641798, "loss": 0.5669, "step": 7280 }, { "epoch": 1.2962962962962963, "grad_norm": 0.6619452238082886, "learning_rate": 0.00015265861657662284, "loss": 0.9511, "step": 7281 }, { "epoch": 1.296474358974359, "grad_norm": 0.6788223385810852, "learning_rate": 0.00015264671650509147, "loss": 1.2649, "step": 7282 }, { "epoch": 1.2966524216524218, "grad_norm": 0.6198732852935791, "learning_rate": 0.00015263481540205706, "loss": 1.0659, "step": 7283 }, { "epoch": 1.2968304843304843, "grad_norm": 0.6038815975189209, "learning_rate": 0.0001526229132677528, "loss": 1.0655, "step": 7284 }, { "epoch": 1.297008547008547, "grad_norm": 0.7616196870803833, "learning_rate": 0.00015261101010241186, "loss": 1.131, "step": 7285 }, { "epoch": 1.2971866096866096, "grad_norm": 0.7002527713775635, "learning_rate": 0.00015259910590626746, "loss": 1.1375, "step": 7286 }, { "epoch": 1.2973646723646723, "grad_norm": 0.6067437529563904, "learning_rate": 0.00015258720067955284, "loss": 0.9306, "step": 7287 }, { "epoch": 1.297542735042735, "grad_norm": 0.653232216835022, "learning_rate": 0.00015257529442250128, "loss": 1.107, "step": 7288 }, { "epoch": 1.2977207977207978, "grad_norm": 0.6969175934791565, "learning_rate": 0.00015256338713534603, "loss": 0.8365, "step": 7289 }, { "epoch": 1.2978988603988604, "grad_norm": 0.6176731586456299, "learning_rate": 0.00015255147881832043, "loss": 0.9707, "step": 7290 }, { "epoch": 1.2980769230769231, "grad_norm": 0.6543741822242737, "learning_rate": 0.00015253956947165772, "loss": 0.7714, "step": 7291 }, { "epoch": 1.2982549857549857, "grad_norm": 0.5224920511245728, "learning_rate": 0.00015252765909559135, "loss": 0.7469, "step": 7292 }, { "epoch": 1.2984330484330484, "grad_norm": 0.638708770275116, "learning_rate": 0.00015251574769035455, "loss": 1.0965, "step": 7293 }, { "epoch": 1.2986111111111112, "grad_norm": 0.6742943525314331, "learning_rate": 0.0001525038352561808, "loss": 1.1286, "step": 7294 }, { "epoch": 1.298789173789174, "grad_norm": 0.6027839183807373, "learning_rate": 0.00015249192179330346, "loss": 0.8824, "step": 7295 }, { "epoch": 1.2989672364672364, "grad_norm": 0.7462167143821716, "learning_rate": 0.00015248000730195597, "loss": 0.94, "step": 7296 }, { "epoch": 1.2991452991452992, "grad_norm": 0.6972534656524658, "learning_rate": 0.00015246809178237172, "loss": 1.0664, "step": 7297 }, { "epoch": 1.2993233618233617, "grad_norm": 0.569949209690094, "learning_rate": 0.0001524561752347842, "loss": 0.691, "step": 7298 }, { "epoch": 1.2995014245014245, "grad_norm": 0.6066586375236511, "learning_rate": 0.00015244425765942695, "loss": 1.083, "step": 7299 }, { "epoch": 1.2996794871794872, "grad_norm": 0.6927483677864075, "learning_rate": 0.00015243233905653337, "loss": 1.0068, "step": 7300 }, { "epoch": 1.29985754985755, "grad_norm": 0.752824604511261, "learning_rate": 0.00015242041942633704, "loss": 0.9946, "step": 7301 }, { "epoch": 1.3000356125356125, "grad_norm": 0.6532080173492432, "learning_rate": 0.0001524084987690715, "loss": 1.2326, "step": 7302 }, { "epoch": 1.3002136752136753, "grad_norm": 0.7954180836677551, "learning_rate": 0.0001523965770849703, "loss": 1.1105, "step": 7303 }, { "epoch": 1.3003917378917378, "grad_norm": 0.5971781015396118, "learning_rate": 0.000152384654374267, "loss": 1.0984, "step": 7304 }, { "epoch": 1.3005698005698005, "grad_norm": 0.7778682112693787, "learning_rate": 0.0001523727306371952, "loss": 1.0795, "step": 7305 }, { "epoch": 1.3007478632478633, "grad_norm": 0.6712004542350769, "learning_rate": 0.00015236080587398856, "loss": 1.0814, "step": 7306 }, { "epoch": 1.300925925925926, "grad_norm": 0.581048846244812, "learning_rate": 0.00015234888008488066, "loss": 0.9868, "step": 7307 }, { "epoch": 1.3011039886039886, "grad_norm": 0.697695791721344, "learning_rate": 0.00015233695327010523, "loss": 1.1045, "step": 7308 }, { "epoch": 1.3012820512820513, "grad_norm": 0.6858421564102173, "learning_rate": 0.00015232502542989593, "loss": 1.0769, "step": 7309 }, { "epoch": 1.3014601139601139, "grad_norm": 0.6312826871871948, "learning_rate": 0.00015231309656448642, "loss": 0.9523, "step": 7310 }, { "epoch": 1.3016381766381766, "grad_norm": 0.9243300557136536, "learning_rate": 0.0001523011666741105, "loss": 0.947, "step": 7311 }, { "epoch": 1.3018162393162394, "grad_norm": 0.6808217763900757, "learning_rate": 0.00015228923575900184, "loss": 0.8631, "step": 7312 }, { "epoch": 1.301994301994302, "grad_norm": 0.6713891625404358, "learning_rate": 0.00015227730381939424, "loss": 0.9157, "step": 7313 }, { "epoch": 1.3021723646723646, "grad_norm": 0.6802582740783691, "learning_rate": 0.00015226537085552146, "loss": 1.041, "step": 7314 }, { "epoch": 1.3023504273504274, "grad_norm": 0.6543951034545898, "learning_rate": 0.0001522534368676173, "loss": 0.8709, "step": 7315 }, { "epoch": 1.30252849002849, "grad_norm": 0.6290678381919861, "learning_rate": 0.0001522415018559156, "loss": 1.0568, "step": 7316 }, { "epoch": 1.3027065527065527, "grad_norm": 0.6590015292167664, "learning_rate": 0.0001522295658206502, "loss": 0.9919, "step": 7317 }, { "epoch": 1.3028846153846154, "grad_norm": 0.6374103426933289, "learning_rate": 0.00015221762876205494, "loss": 0.878, "step": 7318 }, { "epoch": 1.3030626780626782, "grad_norm": 0.7247048616409302, "learning_rate": 0.00015220569068036372, "loss": 1.061, "step": 7319 }, { "epoch": 1.3032407407407407, "grad_norm": 0.6450991630554199, "learning_rate": 0.00015219375157581047, "loss": 0.9389, "step": 7320 }, { "epoch": 1.3034188034188035, "grad_norm": 0.8039840459823608, "learning_rate": 0.00015218181144862903, "loss": 1.0692, "step": 7321 }, { "epoch": 1.303596866096866, "grad_norm": 0.6539456248283386, "learning_rate": 0.00015216987029905346, "loss": 1.0478, "step": 7322 }, { "epoch": 1.3037749287749287, "grad_norm": 0.60880047082901, "learning_rate": 0.00015215792812731758, "loss": 0.8412, "step": 7323 }, { "epoch": 1.3039529914529915, "grad_norm": 0.6757258176803589, "learning_rate": 0.0001521459849336555, "loss": 0.896, "step": 7324 }, { "epoch": 1.3041310541310542, "grad_norm": 0.6735622882843018, "learning_rate": 0.00015213404071830116, "loss": 1.1078, "step": 7325 }, { "epoch": 1.3043091168091168, "grad_norm": 0.7321233749389648, "learning_rate": 0.00015212209548148858, "loss": 1.1021, "step": 7326 }, { "epoch": 1.3044871794871795, "grad_norm": 0.6678910851478577, "learning_rate": 0.00015211014922345182, "loss": 1.0043, "step": 7327 }, { "epoch": 1.304665242165242, "grad_norm": 0.6876940727233887, "learning_rate": 0.0001520982019444249, "loss": 1.0376, "step": 7328 }, { "epoch": 1.3048433048433048, "grad_norm": 0.6171853542327881, "learning_rate": 0.00015208625364464195, "loss": 0.839, "step": 7329 }, { "epoch": 1.3050213675213675, "grad_norm": 0.6449569463729858, "learning_rate": 0.0001520743043243371, "loss": 1.0908, "step": 7330 }, { "epoch": 1.3051994301994303, "grad_norm": 0.6894628405570984, "learning_rate": 0.00015206235398374443, "loss": 1.0263, "step": 7331 }, { "epoch": 1.3053774928774928, "grad_norm": 0.5853552222251892, "learning_rate": 0.00015205040262309804, "loss": 0.8342, "step": 7332 }, { "epoch": 1.3055555555555556, "grad_norm": 0.5934799313545227, "learning_rate": 0.00015203845024263214, "loss": 0.9464, "step": 7333 }, { "epoch": 1.305733618233618, "grad_norm": 0.668927788734436, "learning_rate": 0.00015202649684258095, "loss": 0.9018, "step": 7334 }, { "epoch": 1.3059116809116809, "grad_norm": 0.676810085773468, "learning_rate": 0.0001520145424231786, "loss": 0.9284, "step": 7335 }, { "epoch": 1.3060897435897436, "grad_norm": 0.6223878264427185, "learning_rate": 0.00015200258698465935, "loss": 1.0779, "step": 7336 }, { "epoch": 1.3062678062678064, "grad_norm": 0.6092363595962524, "learning_rate": 0.00015199063052725745, "loss": 0.8602, "step": 7337 }, { "epoch": 1.306445868945869, "grad_norm": 0.7668731212615967, "learning_rate": 0.00015197867305120712, "loss": 1.0756, "step": 7338 }, { "epoch": 1.3066239316239316, "grad_norm": 0.6485331654548645, "learning_rate": 0.00015196671455674268, "loss": 1.0193, "step": 7339 }, { "epoch": 1.3068019943019942, "grad_norm": 0.5661036372184753, "learning_rate": 0.0001519547550440984, "loss": 0.8321, "step": 7340 }, { "epoch": 1.306980056980057, "grad_norm": 0.6270507574081421, "learning_rate": 0.00015194279451350866, "loss": 0.6403, "step": 7341 }, { "epoch": 1.3071581196581197, "grad_norm": 0.7283764481544495, "learning_rate": 0.00015193083296520773, "loss": 1.0401, "step": 7342 }, { "epoch": 1.3073361823361824, "grad_norm": 0.658835232257843, "learning_rate": 0.00015191887039943, "loss": 1.0172, "step": 7343 }, { "epoch": 1.307514245014245, "grad_norm": 0.6288984417915344, "learning_rate": 0.00015190690681640988, "loss": 0.8649, "step": 7344 }, { "epoch": 1.3076923076923077, "grad_norm": 0.666442334651947, "learning_rate": 0.00015189494221638176, "loss": 1.0757, "step": 7345 }, { "epoch": 1.3078703703703702, "grad_norm": 0.6116433143615723, "learning_rate": 0.00015188297659958003, "loss": 0.9244, "step": 7346 }, { "epoch": 1.308048433048433, "grad_norm": 0.6378964185714722, "learning_rate": 0.0001518710099662392, "loss": 0.9629, "step": 7347 }, { "epoch": 1.3082264957264957, "grad_norm": 0.6258945465087891, "learning_rate": 0.00015185904231659357, "loss": 0.8524, "step": 7348 }, { "epoch": 1.3084045584045585, "grad_norm": 0.6498504877090454, "learning_rate": 0.0001518470736508778, "loss": 0.9685, "step": 7349 }, { "epoch": 1.308582621082621, "grad_norm": 0.6928247809410095, "learning_rate": 0.00015183510396932635, "loss": 0.9054, "step": 7350 }, { "epoch": 1.3087606837606838, "grad_norm": 0.6350936889648438, "learning_rate": 0.0001518231332721737, "loss": 1.0039, "step": 7351 }, { "epoch": 1.3089387464387463, "grad_norm": 0.6652286648750305, "learning_rate": 0.00015181116155965437, "loss": 0.8946, "step": 7352 }, { "epoch": 1.309116809116809, "grad_norm": 0.6554864048957825, "learning_rate": 0.000151799188832003, "loss": 0.9518, "step": 7353 }, { "epoch": 1.3092948717948718, "grad_norm": 0.7523114085197449, "learning_rate": 0.0001517872150894541, "loss": 0.9462, "step": 7354 }, { "epoch": 1.3094729344729346, "grad_norm": 0.7113336324691772, "learning_rate": 0.0001517752403322423, "loss": 1.2347, "step": 7355 }, { "epoch": 1.309650997150997, "grad_norm": 0.6461622714996338, "learning_rate": 0.00015176326456060223, "loss": 0.8891, "step": 7356 }, { "epoch": 1.3098290598290598, "grad_norm": 0.7429143190383911, "learning_rate": 0.00015175128777476852, "loss": 1.1944, "step": 7357 }, { "epoch": 1.3100071225071226, "grad_norm": 0.6816306114196777, "learning_rate": 0.00015173930997497585, "loss": 1.1445, "step": 7358 }, { "epoch": 1.3101851851851851, "grad_norm": 0.6644450426101685, "learning_rate": 0.00015172733116145884, "loss": 0.9808, "step": 7359 }, { "epoch": 1.3103632478632479, "grad_norm": 0.6921063661575317, "learning_rate": 0.00015171535133445225, "loss": 1.0162, "step": 7360 }, { "epoch": 1.3105413105413106, "grad_norm": 0.6386187672615051, "learning_rate": 0.00015170337049419082, "loss": 0.9951, "step": 7361 }, { "epoch": 1.3107193732193732, "grad_norm": 0.6505418419837952, "learning_rate": 0.0001516913886409092, "loss": 0.8872, "step": 7362 }, { "epoch": 1.310897435897436, "grad_norm": 0.6415576934814453, "learning_rate": 0.00015167940577484222, "loss": 1.056, "step": 7363 }, { "epoch": 1.3110754985754987, "grad_norm": 0.6691195964813232, "learning_rate": 0.00015166742189622458, "loss": 1.0561, "step": 7364 }, { "epoch": 1.3112535612535612, "grad_norm": 0.6376257538795471, "learning_rate": 0.00015165543700529122, "loss": 0.8499, "step": 7365 }, { "epoch": 1.311431623931624, "grad_norm": 0.6270790696144104, "learning_rate": 0.00015164345110227684, "loss": 1.0244, "step": 7366 }, { "epoch": 1.3116096866096867, "grad_norm": 0.7120122313499451, "learning_rate": 0.0001516314641874163, "loss": 1.0476, "step": 7367 }, { "epoch": 1.3117877492877492, "grad_norm": 0.6152660250663757, "learning_rate": 0.0001516194762609445, "loss": 0.897, "step": 7368 }, { "epoch": 1.311965811965812, "grad_norm": 0.7578088045120239, "learning_rate": 0.00015160748732309626, "loss": 1.1609, "step": 7369 }, { "epoch": 1.3121438746438747, "grad_norm": 0.6594924330711365, "learning_rate": 0.00015159549737410656, "loss": 1.1706, "step": 7370 }, { "epoch": 1.3123219373219372, "grad_norm": 0.6559173464775085, "learning_rate": 0.00015158350641421024, "loss": 0.9452, "step": 7371 }, { "epoch": 1.3125, "grad_norm": 0.6667516231536865, "learning_rate": 0.00015157151444364226, "loss": 0.8153, "step": 7372 }, { "epoch": 1.3126780626780628, "grad_norm": 0.7054803371429443, "learning_rate": 0.00015155952146263761, "loss": 0.9887, "step": 7373 }, { "epoch": 1.3128561253561253, "grad_norm": 0.7035902142524719, "learning_rate": 0.00015154752747143123, "loss": 1.1832, "step": 7374 }, { "epoch": 1.313034188034188, "grad_norm": 0.6297488212585449, "learning_rate": 0.00015153553247025813, "loss": 0.9602, "step": 7375 }, { "epoch": 1.3132122507122508, "grad_norm": 0.6851378083229065, "learning_rate": 0.00015152353645935335, "loss": 1.0743, "step": 7376 }, { "epoch": 1.3133903133903133, "grad_norm": 0.6215537786483765, "learning_rate": 0.00015151153943895187, "loss": 0.9484, "step": 7377 }, { "epoch": 1.313568376068376, "grad_norm": 0.6848666071891785, "learning_rate": 0.0001514995414092888, "loss": 1.0978, "step": 7378 }, { "epoch": 1.3137464387464388, "grad_norm": 0.7527492642402649, "learning_rate": 0.00015148754237059918, "loss": 1.083, "step": 7379 }, { "epoch": 1.3139245014245013, "grad_norm": 0.6264588236808777, "learning_rate": 0.00015147554232311814, "loss": 0.9995, "step": 7380 }, { "epoch": 1.314102564102564, "grad_norm": 0.6666619181632996, "learning_rate": 0.00015146354126708075, "loss": 1.0156, "step": 7381 }, { "epoch": 1.3142806267806268, "grad_norm": 0.6626597046852112, "learning_rate": 0.00015145153920272222, "loss": 1.0047, "step": 7382 }, { "epoch": 1.3144586894586894, "grad_norm": 0.5975428223609924, "learning_rate": 0.0001514395361302776, "loss": 0.806, "step": 7383 }, { "epoch": 1.3146367521367521, "grad_norm": 0.6509957909584045, "learning_rate": 0.00015142753204998218, "loss": 0.8871, "step": 7384 }, { "epoch": 1.3148148148148149, "grad_norm": 0.6672926545143127, "learning_rate": 0.00015141552696207108, "loss": 0.9616, "step": 7385 }, { "epoch": 1.3149928774928774, "grad_norm": 0.6965435147285461, "learning_rate": 0.00015140352086677954, "loss": 1.124, "step": 7386 }, { "epoch": 1.3151709401709402, "grad_norm": 0.6559258103370667, "learning_rate": 0.00015139151376434277, "loss": 1.0271, "step": 7387 }, { "epoch": 1.315349002849003, "grad_norm": 0.7613587379455566, "learning_rate": 0.00015137950565499608, "loss": 1.0349, "step": 7388 }, { "epoch": 1.3155270655270654, "grad_norm": 0.7001944780349731, "learning_rate": 0.0001513674965389747, "loss": 0.8551, "step": 7389 }, { "epoch": 1.3157051282051282, "grad_norm": 0.6087043285369873, "learning_rate": 0.0001513554864165139, "loss": 0.7118, "step": 7390 }, { "epoch": 1.315883190883191, "grad_norm": 0.71526700258255, "learning_rate": 0.00015134347528784908, "loss": 1.0478, "step": 7391 }, { "epoch": 1.3160612535612537, "grad_norm": 0.6182073950767517, "learning_rate": 0.00015133146315321548, "loss": 0.9474, "step": 7392 }, { "epoch": 1.3162393162393162, "grad_norm": 0.7771387696266174, "learning_rate": 0.0001513194500128485, "loss": 1.0544, "step": 7393 }, { "epoch": 1.316417378917379, "grad_norm": 0.7108260989189148, "learning_rate": 0.00015130743586698353, "loss": 0.8813, "step": 7394 }, { "epoch": 1.3165954415954415, "grad_norm": 0.7057309150695801, "learning_rate": 0.0001512954207158559, "loss": 0.899, "step": 7395 }, { "epoch": 1.3167735042735043, "grad_norm": 0.6139237880706787, "learning_rate": 0.00015128340455970106, "loss": 0.8885, "step": 7396 }, { "epoch": 1.316951566951567, "grad_norm": 0.7166598439216614, "learning_rate": 0.00015127138739875443, "loss": 0.9792, "step": 7397 }, { "epoch": 1.3171296296296298, "grad_norm": 0.6916186809539795, "learning_rate": 0.00015125936923325153, "loss": 0.8871, "step": 7398 }, { "epoch": 1.3173076923076923, "grad_norm": 0.7189087271690369, "learning_rate": 0.0001512473500634277, "loss": 0.8302, "step": 7399 }, { "epoch": 1.317485754985755, "grad_norm": 0.5739200115203857, "learning_rate": 0.00015123532988951853, "loss": 0.9137, "step": 7400 }, { "epoch": 1.3176638176638176, "grad_norm": 0.7661057114601135, "learning_rate": 0.00015122330871175952, "loss": 1.1255, "step": 7401 }, { "epoch": 1.3178418803418803, "grad_norm": 0.6487592458724976, "learning_rate": 0.00015121128653038617, "loss": 1.0519, "step": 7402 }, { "epoch": 1.318019943019943, "grad_norm": 0.693134605884552, "learning_rate": 0.00015119926334563406, "loss": 0.9585, "step": 7403 }, { "epoch": 1.3181980056980058, "grad_norm": 0.5895997285842896, "learning_rate": 0.0001511872391577387, "loss": 0.8033, "step": 7404 }, { "epoch": 1.3183760683760684, "grad_norm": 0.654876172542572, "learning_rate": 0.00015117521396693575, "loss": 1.0082, "step": 7405 }, { "epoch": 1.318554131054131, "grad_norm": 0.5877239108085632, "learning_rate": 0.0001511631877734608, "loss": 1.0147, "step": 7406 }, { "epoch": 1.3187321937321936, "grad_norm": 0.6109837889671326, "learning_rate": 0.00015115116057754944, "loss": 0.7498, "step": 7407 }, { "epoch": 1.3189102564102564, "grad_norm": 0.643856942653656, "learning_rate": 0.00015113913237943736, "loss": 1.0417, "step": 7408 }, { "epoch": 1.3190883190883191, "grad_norm": 0.654077410697937, "learning_rate": 0.00015112710317936022, "loss": 1.1809, "step": 7409 }, { "epoch": 1.319266381766382, "grad_norm": 0.6785375475883484, "learning_rate": 0.00015111507297755367, "loss": 0.9447, "step": 7410 }, { "epoch": 1.3194444444444444, "grad_norm": 0.6513382196426392, "learning_rate": 0.00015110304177425347, "loss": 0.8286, "step": 7411 }, { "epoch": 1.3196225071225072, "grad_norm": 0.6536405682563782, "learning_rate": 0.00015109100956969533, "loss": 1.1959, "step": 7412 }, { "epoch": 1.3198005698005697, "grad_norm": 0.6633172035217285, "learning_rate": 0.00015107897636411498, "loss": 0.8839, "step": 7413 }, { "epoch": 1.3199786324786325, "grad_norm": 0.5773791670799255, "learning_rate": 0.00015106694215774821, "loss": 0.9785, "step": 7414 }, { "epoch": 1.3201566951566952, "grad_norm": 0.7005468010902405, "learning_rate": 0.00015105490695083078, "loss": 1.0752, "step": 7415 }, { "epoch": 1.320334757834758, "grad_norm": 0.6509538888931274, "learning_rate": 0.0001510428707435985, "loss": 0.9886, "step": 7416 }, { "epoch": 1.3205128205128205, "grad_norm": 0.6607788801193237, "learning_rate": 0.0001510308335362872, "loss": 0.9756, "step": 7417 }, { "epoch": 1.3206908831908832, "grad_norm": 0.5977858304977417, "learning_rate": 0.00015101879532913274, "loss": 1.0574, "step": 7418 }, { "epoch": 1.3208689458689458, "grad_norm": 0.6478607058525085, "learning_rate": 0.00015100675612237096, "loss": 1.0076, "step": 7419 }, { "epoch": 1.3210470085470085, "grad_norm": 0.6386681199073792, "learning_rate": 0.00015099471591623775, "loss": 0.9639, "step": 7420 }, { "epoch": 1.3212250712250713, "grad_norm": 0.6348143815994263, "learning_rate": 0.000150982674710969, "loss": 1.0226, "step": 7421 }, { "epoch": 1.321403133903134, "grad_norm": 0.6737388372421265, "learning_rate": 0.00015097063250680068, "loss": 0.9985, "step": 7422 }, { "epoch": 1.3215811965811965, "grad_norm": 0.7302656769752502, "learning_rate": 0.00015095858930396866, "loss": 0.9969, "step": 7423 }, { "epoch": 1.3217592592592593, "grad_norm": 0.7062691450119019, "learning_rate": 0.00015094654510270898, "loss": 0.9137, "step": 7424 }, { "epoch": 1.3219373219373218, "grad_norm": 0.6289888620376587, "learning_rate": 0.00015093449990325754, "loss": 0.9231, "step": 7425 }, { "epoch": 1.3221153846153846, "grad_norm": 0.643284261226654, "learning_rate": 0.0001509224537058504, "loss": 0.8981, "step": 7426 }, { "epoch": 1.3222934472934473, "grad_norm": 0.7019244432449341, "learning_rate": 0.00015091040651072355, "loss": 0.9994, "step": 7427 }, { "epoch": 1.32247150997151, "grad_norm": 0.5982088446617126, "learning_rate": 0.0001508983583181131, "loss": 0.9365, "step": 7428 }, { "epoch": 1.3226495726495726, "grad_norm": 0.6086063385009766, "learning_rate": 0.00015088630912825498, "loss": 0.8621, "step": 7429 }, { "epoch": 1.3228276353276354, "grad_norm": 0.6829213500022888, "learning_rate": 0.00015087425894138535, "loss": 1.1959, "step": 7430 }, { "epoch": 1.323005698005698, "grad_norm": 0.6538017392158508, "learning_rate": 0.00015086220775774033, "loss": 0.9412, "step": 7431 }, { "epoch": 1.3231837606837606, "grad_norm": 0.6334070563316345, "learning_rate": 0.00015085015557755597, "loss": 0.9044, "step": 7432 }, { "epoch": 1.3233618233618234, "grad_norm": 0.6514624357223511, "learning_rate": 0.00015083810240106845, "loss": 0.8859, "step": 7433 }, { "epoch": 1.3235398860398861, "grad_norm": 0.7130434513092041, "learning_rate": 0.00015082604822851397, "loss": 1.2845, "step": 7434 }, { "epoch": 1.3237179487179487, "grad_norm": 0.609419584274292, "learning_rate": 0.00015081399306012862, "loss": 1.0725, "step": 7435 }, { "epoch": 1.3238960113960114, "grad_norm": 0.586807370185852, "learning_rate": 0.0001508019368961486, "loss": 0.9032, "step": 7436 }, { "epoch": 1.324074074074074, "grad_norm": 0.6937291026115417, "learning_rate": 0.0001507898797368102, "loss": 0.7975, "step": 7437 }, { "epoch": 1.3242521367521367, "grad_norm": 0.6804966330528259, "learning_rate": 0.00015077782158234962, "loss": 1.1018, "step": 7438 }, { "epoch": 1.3244301994301995, "grad_norm": 0.6110677123069763, "learning_rate": 0.0001507657624330031, "loss": 0.7988, "step": 7439 }, { "epoch": 1.3246082621082622, "grad_norm": 0.6340961456298828, "learning_rate": 0.0001507537022890069, "loss": 0.844, "step": 7440 }, { "epoch": 1.3247863247863247, "grad_norm": 0.7291021943092346, "learning_rate": 0.00015074164115059735, "loss": 0.9867, "step": 7441 }, { "epoch": 1.3249643874643875, "grad_norm": 0.6818505525588989, "learning_rate": 0.00015072957901801076, "loss": 1.1541, "step": 7442 }, { "epoch": 1.32514245014245, "grad_norm": 0.6174707412719727, "learning_rate": 0.00015071751589148345, "loss": 1.1679, "step": 7443 }, { "epoch": 1.3253205128205128, "grad_norm": 0.6481367945671082, "learning_rate": 0.00015070545177125176, "loss": 1.0955, "step": 7444 }, { "epoch": 1.3254985754985755, "grad_norm": 0.6752339005470276, "learning_rate": 0.00015069338665755203, "loss": 0.8651, "step": 7445 }, { "epoch": 1.3256766381766383, "grad_norm": 0.6608055830001831, "learning_rate": 0.00015068132055062077, "loss": 0.9553, "step": 7446 }, { "epoch": 1.3258547008547008, "grad_norm": 0.5933246612548828, "learning_rate": 0.00015066925345069425, "loss": 0.8584, "step": 7447 }, { "epoch": 1.3260327635327636, "grad_norm": 0.6301844716072083, "learning_rate": 0.000150657185358009, "loss": 0.8583, "step": 7448 }, { "epoch": 1.326210826210826, "grad_norm": 0.7359434962272644, "learning_rate": 0.00015064511627280145, "loss": 1.0905, "step": 7449 }, { "epoch": 1.3263888888888888, "grad_norm": 0.6334579586982727, "learning_rate": 0.00015063304619530806, "loss": 0.9814, "step": 7450 }, { "epoch": 1.3265669515669516, "grad_norm": 0.6974197626113892, "learning_rate": 0.00015062097512576528, "loss": 0.9302, "step": 7451 }, { "epoch": 1.3267450142450143, "grad_norm": 0.6895849704742432, "learning_rate": 0.00015060890306440965, "loss": 1.0175, "step": 7452 }, { "epoch": 1.3269230769230769, "grad_norm": 0.5938003659248352, "learning_rate": 0.00015059683001147767, "loss": 0.8084, "step": 7453 }, { "epoch": 1.3271011396011396, "grad_norm": 0.6821470856666565, "learning_rate": 0.00015058475596720596, "loss": 0.9897, "step": 7454 }, { "epoch": 1.3272792022792022, "grad_norm": 0.5507164001464844, "learning_rate": 0.00015057268093183104, "loss": 0.7012, "step": 7455 }, { "epoch": 1.327457264957265, "grad_norm": 0.6216199398040771, "learning_rate": 0.00015056060490558945, "loss": 1.0281, "step": 7456 }, { "epoch": 1.3276353276353277, "grad_norm": 0.6674157977104187, "learning_rate": 0.00015054852788871787, "loss": 0.8776, "step": 7457 }, { "epoch": 1.3278133903133904, "grad_norm": 0.666963517665863, "learning_rate": 0.0001505364498814529, "loss": 1.0742, "step": 7458 }, { "epoch": 1.327991452991453, "grad_norm": 0.6205331683158875, "learning_rate": 0.00015052437088403114, "loss": 1.1109, "step": 7459 }, { "epoch": 1.3281695156695157, "grad_norm": 0.6402750611305237, "learning_rate": 0.00015051229089668933, "loss": 1.0648, "step": 7460 }, { "epoch": 1.3283475783475782, "grad_norm": 0.7445703744888306, "learning_rate": 0.00015050020991966406, "loss": 0.8989, "step": 7461 }, { "epoch": 1.328525641025641, "grad_norm": 0.8131299614906311, "learning_rate": 0.00015048812795319212, "loss": 0.9552, "step": 7462 }, { "epoch": 1.3287037037037037, "grad_norm": 0.7007313966751099, "learning_rate": 0.00015047604499751017, "loss": 0.9899, "step": 7463 }, { "epoch": 1.3288817663817665, "grad_norm": 0.60536789894104, "learning_rate": 0.000150463961052855, "loss": 0.7694, "step": 7464 }, { "epoch": 1.329059829059829, "grad_norm": 0.6910434365272522, "learning_rate": 0.00015045187611946331, "loss": 0.9575, "step": 7465 }, { "epoch": 1.3292378917378918, "grad_norm": 0.7693352103233337, "learning_rate": 0.00015043979019757194, "loss": 1.1987, "step": 7466 }, { "epoch": 1.3294159544159543, "grad_norm": 0.6675218939781189, "learning_rate": 0.00015042770328741763, "loss": 1.0099, "step": 7467 }, { "epoch": 1.329594017094017, "grad_norm": 0.8040883541107178, "learning_rate": 0.00015041561538923722, "loss": 0.9493, "step": 7468 }, { "epoch": 1.3297720797720798, "grad_norm": 0.6765826344490051, "learning_rate": 0.00015040352650326762, "loss": 1.1035, "step": 7469 }, { "epoch": 1.3299501424501425, "grad_norm": 0.7099924087524414, "learning_rate": 0.0001503914366297456, "loss": 0.9198, "step": 7470 }, { "epoch": 1.330128205128205, "grad_norm": 0.6673682928085327, "learning_rate": 0.00015037934576890804, "loss": 1.0234, "step": 7471 }, { "epoch": 1.3303062678062678, "grad_norm": 0.7022300958633423, "learning_rate": 0.00015036725392099184, "loss": 1.3875, "step": 7472 }, { "epoch": 1.3304843304843303, "grad_norm": 0.6997060179710388, "learning_rate": 0.00015035516108623394, "loss": 0.8114, "step": 7473 }, { "epoch": 1.330662393162393, "grad_norm": 0.6262350678443909, "learning_rate": 0.00015034306726487127, "loss": 1.128, "step": 7474 }, { "epoch": 1.3308404558404558, "grad_norm": 0.6330382227897644, "learning_rate": 0.00015033097245714078, "loss": 0.9032, "step": 7475 }, { "epoch": 1.3310185185185186, "grad_norm": 0.6527551412582397, "learning_rate": 0.00015031887666327944, "loss": 0.9311, "step": 7476 }, { "epoch": 1.3311965811965811, "grad_norm": 0.6754798889160156, "learning_rate": 0.00015030677988352422, "loss": 1.0626, "step": 7477 }, { "epoch": 1.3313746438746439, "grad_norm": 0.6397945284843445, "learning_rate": 0.00015029468211811216, "loss": 0.9222, "step": 7478 }, { "epoch": 1.3315527065527066, "grad_norm": 0.8163481950759888, "learning_rate": 0.0001502825833672803, "loss": 1.1827, "step": 7479 }, { "epoch": 1.3317307692307692, "grad_norm": 0.6645621657371521, "learning_rate": 0.00015027048363126566, "loss": 0.9744, "step": 7480 }, { "epoch": 1.331908831908832, "grad_norm": 0.6943182349205017, "learning_rate": 0.0001502583829103053, "loss": 1.1597, "step": 7481 }, { "epoch": 1.3320868945868947, "grad_norm": 0.6283710598945618, "learning_rate": 0.00015024628120463636, "loss": 0.9514, "step": 7482 }, { "epoch": 1.3322649572649572, "grad_norm": 0.6159678101539612, "learning_rate": 0.0001502341785144959, "loss": 0.9752, "step": 7483 }, { "epoch": 1.33244301994302, "grad_norm": 0.6259802579879761, "learning_rate": 0.00015022207484012107, "loss": 0.9356, "step": 7484 }, { "epoch": 1.3326210826210827, "grad_norm": 0.7322365641593933, "learning_rate": 0.00015020997018174904, "loss": 1.2072, "step": 7485 }, { "epoch": 1.3327991452991452, "grad_norm": 0.6323443651199341, "learning_rate": 0.0001501978645396169, "loss": 1.1661, "step": 7486 }, { "epoch": 1.332977207977208, "grad_norm": 0.7811527848243713, "learning_rate": 0.00015018575791396187, "loss": 1.0304, "step": 7487 }, { "epoch": 1.3331552706552707, "grad_norm": 0.7221232056617737, "learning_rate": 0.0001501736503050212, "loss": 0.8838, "step": 7488 }, { "epoch": 1.3333333333333333, "grad_norm": 0.6980099081993103, "learning_rate": 0.00015016154171303207, "loss": 1.1841, "step": 7489 }, { "epoch": 1.333511396011396, "grad_norm": 0.6802879571914673, "learning_rate": 0.00015014943213823175, "loss": 0.959, "step": 7490 }, { "epoch": 1.3336894586894588, "grad_norm": 0.637698233127594, "learning_rate": 0.00015013732158085746, "loss": 1.0517, "step": 7491 }, { "epoch": 1.3338675213675213, "grad_norm": 0.6386787295341492, "learning_rate": 0.0001501252100411465, "loss": 0.7125, "step": 7492 }, { "epoch": 1.334045584045584, "grad_norm": 0.6287358403205872, "learning_rate": 0.0001501130975193362, "loss": 0.8913, "step": 7493 }, { "epoch": 1.3342236467236468, "grad_norm": 0.6142337322235107, "learning_rate": 0.00015010098401566386, "loss": 0.8149, "step": 7494 }, { "epoch": 1.3344017094017093, "grad_norm": 0.6369916200637817, "learning_rate": 0.0001500888695303668, "loss": 1.0186, "step": 7495 }, { "epoch": 1.334579772079772, "grad_norm": 0.7526934146881104, "learning_rate": 0.0001500767540636824, "loss": 1.2421, "step": 7496 }, { "epoch": 1.3347578347578348, "grad_norm": 0.7278095483779907, "learning_rate": 0.00015006463761584802, "loss": 0.9856, "step": 7497 }, { "epoch": 1.3349358974358974, "grad_norm": 0.6165127158164978, "learning_rate": 0.00015005252018710104, "loss": 1.0041, "step": 7498 }, { "epoch": 1.33511396011396, "grad_norm": 0.637856662273407, "learning_rate": 0.00015004040177767896, "loss": 0.9134, "step": 7499 }, { "epoch": 1.3352920227920229, "grad_norm": 0.661227285861969, "learning_rate": 0.00015002828238781912, "loss": 1.0393, "step": 7500 }, { "epoch": 1.3354700854700854, "grad_norm": 0.6061869859695435, "learning_rate": 0.000150016162017759, "loss": 0.8453, "step": 7501 }, { "epoch": 1.3356481481481481, "grad_norm": 0.6938419938087463, "learning_rate": 0.0001500040406677361, "loss": 1.0338, "step": 7502 }, { "epoch": 1.335826210826211, "grad_norm": 0.6672863960266113, "learning_rate": 0.0001499919183379879, "loss": 0.8765, "step": 7503 }, { "epoch": 1.3360042735042734, "grad_norm": 0.6200515031814575, "learning_rate": 0.00014997979502875193, "loss": 0.8286, "step": 7504 }, { "epoch": 1.3361823361823362, "grad_norm": 0.6287549138069153, "learning_rate": 0.00014996767074026567, "loss": 0.9761, "step": 7505 }, { "epoch": 1.336360398860399, "grad_norm": 0.6036837100982666, "learning_rate": 0.0001499555454727667, "loss": 1.0506, "step": 7506 }, { "epoch": 1.3365384615384617, "grad_norm": 0.6875260472297668, "learning_rate": 0.0001499434192264926, "loss": 1.001, "step": 7507 }, { "epoch": 1.3367165242165242, "grad_norm": 0.6558469533920288, "learning_rate": 0.00014993129200168096, "loss": 0.6874, "step": 7508 }, { "epoch": 1.336894586894587, "grad_norm": 0.604167103767395, "learning_rate": 0.00014991916379856934, "loss": 1.0173, "step": 7509 }, { "epoch": 1.3370726495726495, "grad_norm": 0.5941442251205444, "learning_rate": 0.00014990703461739544, "loss": 0.8569, "step": 7510 }, { "epoch": 1.3372507122507122, "grad_norm": 0.7645071148872375, "learning_rate": 0.00014989490445839687, "loss": 1.0172, "step": 7511 }, { "epoch": 1.337428774928775, "grad_norm": 0.5491678714752197, "learning_rate": 0.00014988277332181126, "loss": 0.8018, "step": 7512 }, { "epoch": 1.3376068376068377, "grad_norm": 0.583322286605835, "learning_rate": 0.00014987064120787635, "loss": 0.8704, "step": 7513 }, { "epoch": 1.3377849002849003, "grad_norm": 0.7385724186897278, "learning_rate": 0.00014985850811682984, "loss": 1.1121, "step": 7514 }, { "epoch": 1.337962962962963, "grad_norm": 0.6842585206031799, "learning_rate": 0.00014984637404890941, "loss": 0.914, "step": 7515 }, { "epoch": 1.3381410256410255, "grad_norm": 0.6771186590194702, "learning_rate": 0.00014983423900435285, "loss": 1.0838, "step": 7516 }, { "epoch": 1.3383190883190883, "grad_norm": 0.7562049031257629, "learning_rate": 0.00014982210298339788, "loss": 1.123, "step": 7517 }, { "epoch": 1.338497150997151, "grad_norm": 0.7617804408073425, "learning_rate": 0.0001498099659862823, "loss": 0.9438, "step": 7518 }, { "epoch": 1.3386752136752138, "grad_norm": 0.561958909034729, "learning_rate": 0.00014979782801324392, "loss": 0.8739, "step": 7519 }, { "epoch": 1.3388532763532763, "grad_norm": 0.7726154923439026, "learning_rate": 0.00014978568906452052, "loss": 1.1306, "step": 7520 }, { "epoch": 1.339031339031339, "grad_norm": 0.6658660173416138, "learning_rate": 0.00014977354914035002, "loss": 1.0214, "step": 7521 }, { "epoch": 1.3392094017094016, "grad_norm": 0.6385402679443359, "learning_rate": 0.00014976140824097015, "loss": 0.8851, "step": 7522 }, { "epoch": 1.3393874643874644, "grad_norm": 0.6315767168998718, "learning_rate": 0.0001497492663666189, "loss": 0.986, "step": 7523 }, { "epoch": 1.3395655270655271, "grad_norm": 0.6379088759422302, "learning_rate": 0.0001497371235175341, "loss": 0.9322, "step": 7524 }, { "epoch": 1.3397435897435899, "grad_norm": 0.6605859994888306, "learning_rate": 0.0001497249796939537, "loss": 1.1112, "step": 7525 }, { "epoch": 1.3399216524216524, "grad_norm": 0.7342822551727295, "learning_rate": 0.0001497128348961156, "loss": 0.9798, "step": 7526 }, { "epoch": 1.3400997150997151, "grad_norm": 0.5667192935943604, "learning_rate": 0.0001497006891242578, "loss": 0.7493, "step": 7527 }, { "epoch": 1.3402777777777777, "grad_norm": 0.6106827855110168, "learning_rate": 0.0001496885423786182, "loss": 1.0924, "step": 7528 }, { "epoch": 1.3404558404558404, "grad_norm": 0.6207202076911926, "learning_rate": 0.00014967639465943486, "loss": 1.1123, "step": 7529 }, { "epoch": 1.3406339031339032, "grad_norm": 0.6272760033607483, "learning_rate": 0.00014966424596694574, "loss": 0.9275, "step": 7530 }, { "epoch": 1.340811965811966, "grad_norm": 0.6485986113548279, "learning_rate": 0.0001496520963013889, "loss": 1.1491, "step": 7531 }, { "epoch": 1.3409900284900285, "grad_norm": 0.5743561387062073, "learning_rate": 0.00014963994566300238, "loss": 1.1101, "step": 7532 }, { "epoch": 1.3411680911680912, "grad_norm": 0.6508657336235046, "learning_rate": 0.00014962779405202424, "loss": 1.0368, "step": 7533 }, { "epoch": 1.3413461538461537, "grad_norm": 0.6598748564720154, "learning_rate": 0.00014961564146869259, "loss": 1.1064, "step": 7534 }, { "epoch": 1.3415242165242165, "grad_norm": 0.6722840070724487, "learning_rate": 0.00014960348791324547, "loss": 0.9758, "step": 7535 }, { "epoch": 1.3417022792022792, "grad_norm": 0.5807220935821533, "learning_rate": 0.00014959133338592108, "loss": 0.9936, "step": 7536 }, { "epoch": 1.341880341880342, "grad_norm": 0.6318647265434265, "learning_rate": 0.00014957917788695752, "loss": 0.907, "step": 7537 }, { "epoch": 1.3420584045584045, "grad_norm": 0.6725485324859619, "learning_rate": 0.00014956702141659295, "loss": 0.988, "step": 7538 }, { "epoch": 1.3422364672364673, "grad_norm": 0.6675217747688293, "learning_rate": 0.0001495548639750656, "loss": 1.0194, "step": 7539 }, { "epoch": 1.3424145299145298, "grad_norm": 0.6976884603500366, "learning_rate": 0.0001495427055626136, "loss": 1.2515, "step": 7540 }, { "epoch": 1.3425925925925926, "grad_norm": 0.654941737651825, "learning_rate": 0.0001495305461794752, "loss": 1.2072, "step": 7541 }, { "epoch": 1.3427706552706553, "grad_norm": 0.7085291743278503, "learning_rate": 0.00014951838582588864, "loss": 0.9772, "step": 7542 }, { "epoch": 1.342948717948718, "grad_norm": 0.6319566965103149, "learning_rate": 0.00014950622450209217, "loss": 1.0162, "step": 7543 }, { "epoch": 1.3431267806267806, "grad_norm": 0.6272495985031128, "learning_rate": 0.00014949406220832407, "loss": 0.7985, "step": 7544 }, { "epoch": 1.3433048433048433, "grad_norm": 0.6352069973945618, "learning_rate": 0.00014948189894482266, "loss": 1.0041, "step": 7545 }, { "epoch": 1.3434829059829059, "grad_norm": 0.6071867346763611, "learning_rate": 0.0001494697347118262, "loss": 0.9486, "step": 7546 }, { "epoch": 1.3436609686609686, "grad_norm": 0.6458829641342163, "learning_rate": 0.00014945756950957308, "loss": 0.9417, "step": 7547 }, { "epoch": 1.3438390313390314, "grad_norm": 0.6472262740135193, "learning_rate": 0.0001494454033383016, "loss": 1.056, "step": 7548 }, { "epoch": 1.3440170940170941, "grad_norm": 0.6985635161399841, "learning_rate": 0.00014943323619825017, "loss": 1.0483, "step": 7549 }, { "epoch": 1.3441951566951567, "grad_norm": 0.6379460096359253, "learning_rate": 0.00014942106808965718, "loss": 0.9552, "step": 7550 }, { "epoch": 1.3443732193732194, "grad_norm": 0.7036557793617249, "learning_rate": 0.00014940889901276098, "loss": 0.9647, "step": 7551 }, { "epoch": 1.344551282051282, "grad_norm": 0.6697289943695068, "learning_rate": 0.0001493967289678001, "loss": 0.9029, "step": 7552 }, { "epoch": 1.3447293447293447, "grad_norm": 0.6336250901222229, "learning_rate": 0.00014938455795501286, "loss": 0.9458, "step": 7553 }, { "epoch": 1.3449074074074074, "grad_norm": 0.7279673218727112, "learning_rate": 0.00014937238597463785, "loss": 1.0228, "step": 7554 }, { "epoch": 1.3450854700854702, "grad_norm": 0.6514406204223633, "learning_rate": 0.00014936021302691349, "loss": 0.8265, "step": 7555 }, { "epoch": 1.3452635327635327, "grad_norm": 0.6405338644981384, "learning_rate": 0.0001493480391120783, "loss": 0.9516, "step": 7556 }, { "epoch": 1.3454415954415955, "grad_norm": 0.6442672610282898, "learning_rate": 0.00014933586423037076, "loss": 0.9279, "step": 7557 }, { "epoch": 1.345619658119658, "grad_norm": 0.7588633894920349, "learning_rate": 0.00014932368838202945, "loss": 1.0976, "step": 7558 }, { "epoch": 1.3457977207977208, "grad_norm": 0.5536739230155945, "learning_rate": 0.00014931151156729296, "loss": 0.713, "step": 7559 }, { "epoch": 1.3459757834757835, "grad_norm": 0.6897570490837097, "learning_rate": 0.00014929933378639981, "loss": 0.9521, "step": 7560 }, { "epoch": 1.3461538461538463, "grad_norm": 0.6654927134513855, "learning_rate": 0.00014928715503958863, "loss": 0.8506, "step": 7561 }, { "epoch": 1.3463319088319088, "grad_norm": 0.655806839466095, "learning_rate": 0.00014927497532709808, "loss": 0.8636, "step": 7562 }, { "epoch": 1.3465099715099715, "grad_norm": 0.6547064185142517, "learning_rate": 0.00014926279464916667, "loss": 0.9155, "step": 7563 }, { "epoch": 1.346688034188034, "grad_norm": 0.7555415034294128, "learning_rate": 0.00014925061300603316, "loss": 0.8791, "step": 7564 }, { "epoch": 1.3468660968660968, "grad_norm": 0.7439392805099487, "learning_rate": 0.0001492384303979362, "loss": 1.1669, "step": 7565 }, { "epoch": 1.3470441595441596, "grad_norm": 0.6016925573348999, "learning_rate": 0.0001492262468251145, "loss": 0.9811, "step": 7566 }, { "epoch": 1.3472222222222223, "grad_norm": 0.644652783870697, "learning_rate": 0.00014921406228780675, "loss": 0.7096, "step": 7567 }, { "epoch": 1.3474002849002849, "grad_norm": 0.721814751625061, "learning_rate": 0.00014920187678625166, "loss": 0.9933, "step": 7568 }, { "epoch": 1.3475783475783476, "grad_norm": 0.6212092638015747, "learning_rate": 0.000149189690320688, "loss": 0.8499, "step": 7569 }, { "epoch": 1.3477564102564101, "grad_norm": 0.6235958337783813, "learning_rate": 0.00014917750289135455, "loss": 0.9189, "step": 7570 }, { "epoch": 1.3479344729344729, "grad_norm": 0.6309674978256226, "learning_rate": 0.0001491653144984901, "loss": 0.9744, "step": 7571 }, { "epoch": 1.3481125356125356, "grad_norm": 0.7606496214866638, "learning_rate": 0.00014915312514233344, "loss": 1.0181, "step": 7572 }, { "epoch": 1.3482905982905984, "grad_norm": 0.6892654895782471, "learning_rate": 0.00014914093482312342, "loss": 0.9517, "step": 7573 }, { "epoch": 1.348468660968661, "grad_norm": 0.6746503114700317, "learning_rate": 0.0001491287435410988, "loss": 1.056, "step": 7574 }, { "epoch": 1.3486467236467237, "grad_norm": 0.5892919301986694, "learning_rate": 0.00014911655129649858, "loss": 1.0515, "step": 7575 }, { "epoch": 1.3488247863247862, "grad_norm": 0.6278096437454224, "learning_rate": 0.0001491043580895615, "loss": 0.864, "step": 7576 }, { "epoch": 1.349002849002849, "grad_norm": 0.7017706632614136, "learning_rate": 0.0001490921639205266, "loss": 1.0618, "step": 7577 }, { "epoch": 1.3491809116809117, "grad_norm": 0.7318746447563171, "learning_rate": 0.00014907996878963268, "loss": 0.9905, "step": 7578 }, { "epoch": 1.3493589743589745, "grad_norm": 0.6485885977745056, "learning_rate": 0.00014906777269711873, "loss": 1.0498, "step": 7579 }, { "epoch": 1.349537037037037, "grad_norm": 0.644902229309082, "learning_rate": 0.00014905557564322372, "loss": 0.885, "step": 7580 }, { "epoch": 1.3497150997150997, "grad_norm": 0.6567610502243042, "learning_rate": 0.0001490433776281866, "loss": 0.8938, "step": 7581 }, { "epoch": 1.3498931623931623, "grad_norm": 0.6233102679252625, "learning_rate": 0.0001490311786522464, "loss": 0.9007, "step": 7582 }, { "epoch": 1.350071225071225, "grad_norm": 0.6962146759033203, "learning_rate": 0.00014901897871564206, "loss": 0.9257, "step": 7583 }, { "epoch": 1.3502492877492878, "grad_norm": 0.6986933350563049, "learning_rate": 0.00014900677781861266, "loss": 1.0089, "step": 7584 }, { "epoch": 1.3504273504273505, "grad_norm": 0.7527925968170166, "learning_rate": 0.00014899457596139729, "loss": 1.0762, "step": 7585 }, { "epoch": 1.350605413105413, "grad_norm": 0.69191974401474, "learning_rate": 0.00014898237314423494, "loss": 0.9829, "step": 7586 }, { "epoch": 1.3507834757834758, "grad_norm": 0.7866443395614624, "learning_rate": 0.00014897016936736478, "loss": 1.0911, "step": 7587 }, { "epoch": 1.3509615384615383, "grad_norm": 0.7087522745132446, "learning_rate": 0.00014895796463102587, "loss": 1.0693, "step": 7588 }, { "epoch": 1.351139601139601, "grad_norm": 0.704276442527771, "learning_rate": 0.00014894575893545736, "loss": 0.9082, "step": 7589 }, { "epoch": 1.3513176638176638, "grad_norm": 0.7074487805366516, "learning_rate": 0.00014893355228089833, "loss": 0.8731, "step": 7590 }, { "epoch": 1.3514957264957266, "grad_norm": 0.6542425155639648, "learning_rate": 0.00014892134466758803, "loss": 0.9325, "step": 7591 }, { "epoch": 1.351673789173789, "grad_norm": 0.6577230095863342, "learning_rate": 0.0001489091360957656, "loss": 0.8468, "step": 7592 }, { "epoch": 1.3518518518518519, "grad_norm": 0.638534426689148, "learning_rate": 0.00014889692656567025, "loss": 0.8598, "step": 7593 }, { "epoch": 1.3520299145299146, "grad_norm": 0.751133918762207, "learning_rate": 0.0001488847160775412, "loss": 1.0006, "step": 7594 }, { "epoch": 1.3522079772079771, "grad_norm": 0.6272708773612976, "learning_rate": 0.00014887250463161767, "loss": 0.8782, "step": 7595 }, { "epoch": 1.35238603988604, "grad_norm": 0.7242439985275269, "learning_rate": 0.00014886029222813897, "loss": 1.2443, "step": 7596 }, { "epoch": 1.3525641025641026, "grad_norm": 0.6199275851249695, "learning_rate": 0.0001488480788673443, "loss": 0.9211, "step": 7597 }, { "epoch": 1.3527421652421652, "grad_norm": 0.6401306986808777, "learning_rate": 0.00014883586454947305, "loss": 0.8808, "step": 7598 }, { "epoch": 1.352920227920228, "grad_norm": 0.6340938806533813, "learning_rate": 0.00014882364927476443, "loss": 0.9406, "step": 7599 }, { "epoch": 1.3530982905982907, "grad_norm": 0.6388604044914246, "learning_rate": 0.00014881143304345783, "loss": 1.0674, "step": 7600 }, { "epoch": 1.3532763532763532, "grad_norm": 0.7562061548233032, "learning_rate": 0.00014879921585579263, "loss": 1.0959, "step": 7601 }, { "epoch": 1.353454415954416, "grad_norm": 0.6303606033325195, "learning_rate": 0.00014878699771200815, "loss": 0.9641, "step": 7602 }, { "epoch": 1.3536324786324787, "grad_norm": 0.8623232841491699, "learning_rate": 0.00014877477861234382, "loss": 1.1529, "step": 7603 }, { "epoch": 1.3538105413105412, "grad_norm": 0.6607624888420105, "learning_rate": 0.00014876255855703896, "loss": 0.6291, "step": 7604 }, { "epoch": 1.353988603988604, "grad_norm": 0.6226931214332581, "learning_rate": 0.0001487503375463331, "loss": 0.7485, "step": 7605 }, { "epoch": 1.3541666666666667, "grad_norm": 0.7626705169677734, "learning_rate": 0.00014873811558046565, "loss": 0.9694, "step": 7606 }, { "epoch": 1.3543447293447293, "grad_norm": 0.5436057448387146, "learning_rate": 0.00014872589265967605, "loss": 0.6173, "step": 7607 }, { "epoch": 1.354522792022792, "grad_norm": 0.7822177410125732, "learning_rate": 0.00014871366878420382, "loss": 1.0048, "step": 7608 }, { "epoch": 1.3547008547008548, "grad_norm": 0.6955201625823975, "learning_rate": 0.00014870144395428848, "loss": 0.9487, "step": 7609 }, { "epoch": 1.3548789173789173, "grad_norm": 0.6625505685806274, "learning_rate": 0.00014868921817016943, "loss": 0.9389, "step": 7610 }, { "epoch": 1.35505698005698, "grad_norm": 0.6625354886054993, "learning_rate": 0.00014867699143208634, "loss": 0.9538, "step": 7611 }, { "epoch": 1.3552350427350428, "grad_norm": 0.7426592707633972, "learning_rate": 0.00014866476374027874, "loss": 1.2566, "step": 7612 }, { "epoch": 1.3554131054131053, "grad_norm": 0.6856544017791748, "learning_rate": 0.00014865253509498616, "loss": 0.9663, "step": 7613 }, { "epoch": 1.355591168091168, "grad_norm": 0.6343915462493896, "learning_rate": 0.00014864030549644825, "loss": 0.9416, "step": 7614 }, { "epoch": 1.3557692307692308, "grad_norm": 0.6319553256034851, "learning_rate": 0.00014862807494490454, "loss": 0.9335, "step": 7615 }, { "epoch": 1.3559472934472934, "grad_norm": 0.6919772624969482, "learning_rate": 0.00014861584344059476, "loss": 0.8516, "step": 7616 }, { "epoch": 1.3561253561253561, "grad_norm": 0.6405790448188782, "learning_rate": 0.00014860361098375851, "loss": 1.1278, "step": 7617 }, { "epoch": 1.3563034188034189, "grad_norm": 0.7591732144355774, "learning_rate": 0.00014859137757463548, "loss": 1.0961, "step": 7618 }, { "epoch": 1.3564814814814814, "grad_norm": 0.6166727542877197, "learning_rate": 0.0001485791432134653, "loss": 0.9358, "step": 7619 }, { "epoch": 1.3566595441595442, "grad_norm": 0.7068707346916199, "learning_rate": 0.00014856690790048777, "loss": 0.8325, "step": 7620 }, { "epoch": 1.356837606837607, "grad_norm": 0.8465402722358704, "learning_rate": 0.00014855467163594257, "loss": 1.0047, "step": 7621 }, { "epoch": 1.3570156695156697, "grad_norm": 0.7403460741043091, "learning_rate": 0.00014854243442006943, "loss": 1.0907, "step": 7622 }, { "epoch": 1.3571937321937322, "grad_norm": 0.6939566135406494, "learning_rate": 0.00014853019625310813, "loss": 0.9156, "step": 7623 }, { "epoch": 1.357371794871795, "grad_norm": 0.6425924897193909, "learning_rate": 0.0001485179571352984, "loss": 0.8156, "step": 7624 }, { "epoch": 1.3575498575498575, "grad_norm": 0.7091902494430542, "learning_rate": 0.00014850571706688013, "loss": 1.0483, "step": 7625 }, { "epoch": 1.3577279202279202, "grad_norm": 0.663342297077179, "learning_rate": 0.00014849347604809312, "loss": 1.0405, "step": 7626 }, { "epoch": 1.357905982905983, "grad_norm": 0.6727671027183533, "learning_rate": 0.00014848123407917716, "loss": 1.0389, "step": 7627 }, { "epoch": 1.3580840455840457, "grad_norm": 0.6572692394256592, "learning_rate": 0.0001484689911603721, "loss": 1.0489, "step": 7628 }, { "epoch": 1.3582621082621082, "grad_norm": 0.7629066109657288, "learning_rate": 0.0001484567472919179, "loss": 1.0372, "step": 7629 }, { "epoch": 1.358440170940171, "grad_norm": 0.7848913669586182, "learning_rate": 0.00014844450247405435, "loss": 0.9437, "step": 7630 }, { "epoch": 1.3586182336182335, "grad_norm": 0.715949535369873, "learning_rate": 0.00014843225670702143, "loss": 1.1949, "step": 7631 }, { "epoch": 1.3587962962962963, "grad_norm": 0.6498245596885681, "learning_rate": 0.00014842000999105905, "loss": 0.8845, "step": 7632 }, { "epoch": 1.358974358974359, "grad_norm": 0.7251074910163879, "learning_rate": 0.00014840776232640716, "loss": 1.093, "step": 7633 }, { "epoch": 1.3591524216524218, "grad_norm": 0.6223580837249756, "learning_rate": 0.0001483955137133057, "loss": 1.0344, "step": 7634 }, { "epoch": 1.3593304843304843, "grad_norm": 0.6504943370819092, "learning_rate": 0.00014838326415199472, "loss": 1.109, "step": 7635 }, { "epoch": 1.359508547008547, "grad_norm": 0.5912374258041382, "learning_rate": 0.00014837101364271416, "loss": 1.0756, "step": 7636 }, { "epoch": 1.3596866096866096, "grad_norm": 0.6116467714309692, "learning_rate": 0.00014835876218570408, "loss": 0.7871, "step": 7637 }, { "epoch": 1.3598646723646723, "grad_norm": 0.7013412117958069, "learning_rate": 0.0001483465097812045, "loss": 1.0003, "step": 7638 }, { "epoch": 1.360042735042735, "grad_norm": 0.5930750370025635, "learning_rate": 0.00014833425642945552, "loss": 0.9926, "step": 7639 }, { "epoch": 1.3602207977207978, "grad_norm": 0.732955276966095, "learning_rate": 0.00014832200213069717, "loss": 1.2801, "step": 7640 }, { "epoch": 1.3603988603988604, "grad_norm": 0.6836149096488953, "learning_rate": 0.00014830974688516958, "loss": 0.9292, "step": 7641 }, { "epoch": 1.3605769230769231, "grad_norm": 0.6531919836997986, "learning_rate": 0.00014829749069311283, "loss": 0.9551, "step": 7642 }, { "epoch": 1.3607549857549857, "grad_norm": 0.719093382358551, "learning_rate": 0.0001482852335547671, "loss": 0.8588, "step": 7643 }, { "epoch": 1.3609330484330484, "grad_norm": 0.6144105792045593, "learning_rate": 0.00014827297547037252, "loss": 0.9033, "step": 7644 }, { "epoch": 1.3611111111111112, "grad_norm": 0.789241373538971, "learning_rate": 0.00014826071644016926, "loss": 1.1916, "step": 7645 }, { "epoch": 1.361289173789174, "grad_norm": 0.6137418746948242, "learning_rate": 0.0001482484564643975, "loss": 0.9648, "step": 7646 }, { "epoch": 1.3614672364672364, "grad_norm": 0.6789261698722839, "learning_rate": 0.00014823619554329745, "loss": 0.829, "step": 7647 }, { "epoch": 1.3616452991452992, "grad_norm": 0.6508790254592896, "learning_rate": 0.0001482239336771094, "loss": 0.942, "step": 7648 }, { "epoch": 1.3618233618233617, "grad_norm": 0.6725571751594543, "learning_rate": 0.00014821167086607353, "loss": 0.8884, "step": 7649 }, { "epoch": 1.3620014245014245, "grad_norm": 0.6252003908157349, "learning_rate": 0.00014819940711043012, "loss": 0.9778, "step": 7650 }, { "epoch": 1.3621794871794872, "grad_norm": 0.6950626969337463, "learning_rate": 0.00014818714241041943, "loss": 1.2104, "step": 7651 }, { "epoch": 1.36235754985755, "grad_norm": 0.6527379155158997, "learning_rate": 0.0001481748767662818, "loss": 0.7845, "step": 7652 }, { "epoch": 1.3625356125356125, "grad_norm": 0.7438235282897949, "learning_rate": 0.00014816261017825755, "loss": 0.9513, "step": 7653 }, { "epoch": 1.3627136752136753, "grad_norm": 0.6412696838378906, "learning_rate": 0.000148150342646587, "loss": 0.8478, "step": 7654 }, { "epoch": 1.3628917378917378, "grad_norm": 0.658481240272522, "learning_rate": 0.00014813807417151046, "loss": 0.6816, "step": 7655 }, { "epoch": 1.3630698005698005, "grad_norm": 0.6170126795768738, "learning_rate": 0.0001481258047532684, "loss": 0.8862, "step": 7656 }, { "epoch": 1.3632478632478633, "grad_norm": 0.7049173712730408, "learning_rate": 0.0001481135343921012, "loss": 1.0027, "step": 7657 }, { "epoch": 1.363425925925926, "grad_norm": 0.7780741453170776, "learning_rate": 0.0001481012630882492, "loss": 1.0183, "step": 7658 }, { "epoch": 1.3636039886039886, "grad_norm": 0.6658362746238708, "learning_rate": 0.00014808899084195286, "loss": 0.878, "step": 7659 }, { "epoch": 1.3637820512820513, "grad_norm": 0.7192076444625854, "learning_rate": 0.00014807671765345267, "loss": 1.2269, "step": 7660 }, { "epoch": 1.3639601139601139, "grad_norm": 0.7038660049438477, "learning_rate": 0.00014806444352298903, "loss": 0.889, "step": 7661 }, { "epoch": 1.3641381766381766, "grad_norm": 0.622803270816803, "learning_rate": 0.00014805216845080249, "loss": 0.9623, "step": 7662 }, { "epoch": 1.3643162393162394, "grad_norm": 0.9157076478004456, "learning_rate": 0.00014803989243713353, "loss": 1.106, "step": 7663 }, { "epoch": 1.364494301994302, "grad_norm": 0.6369999647140503, "learning_rate": 0.00014802761548222268, "loss": 0.9755, "step": 7664 }, { "epoch": 1.3646723646723646, "grad_norm": 0.8318394422531128, "learning_rate": 0.00014801533758631045, "loss": 1.1786, "step": 7665 }, { "epoch": 1.3648504273504274, "grad_norm": 0.7065796852111816, "learning_rate": 0.00014800305874963744, "loss": 1.2066, "step": 7666 }, { "epoch": 1.36502849002849, "grad_norm": 0.6570265293121338, "learning_rate": 0.0001479907789724442, "loss": 1.0084, "step": 7667 }, { "epoch": 1.3652065527065527, "grad_norm": 0.637321949005127, "learning_rate": 0.00014797849825497135, "loss": 0.9075, "step": 7668 }, { "epoch": 1.3653846153846154, "grad_norm": 0.7656470537185669, "learning_rate": 0.00014796621659745948, "loss": 1.1497, "step": 7669 }, { "epoch": 1.3655626780626782, "grad_norm": 0.6798120737075806, "learning_rate": 0.0001479539340001493, "loss": 0.8154, "step": 7670 }, { "epoch": 1.3657407407407407, "grad_norm": 0.7004328966140747, "learning_rate": 0.0001479416504632813, "loss": 1.0513, "step": 7671 }, { "epoch": 1.3659188034188035, "grad_norm": 0.6551713943481445, "learning_rate": 0.0001479293659870963, "loss": 0.8735, "step": 7672 }, { "epoch": 1.366096866096866, "grad_norm": 0.7685719132423401, "learning_rate": 0.00014791708057183494, "loss": 1.111, "step": 7673 }, { "epoch": 1.3662749287749287, "grad_norm": 0.673624575138092, "learning_rate": 0.0001479047942177379, "loss": 0.9418, "step": 7674 }, { "epoch": 1.3664529914529915, "grad_norm": 0.6281047463417053, "learning_rate": 0.00014789250692504597, "loss": 1.0938, "step": 7675 }, { "epoch": 1.3666310541310542, "grad_norm": 0.5846312642097473, "learning_rate": 0.0001478802186939998, "loss": 0.6352, "step": 7676 }, { "epoch": 1.3668091168091168, "grad_norm": 0.7037251591682434, "learning_rate": 0.00014786792952484025, "loss": 1.1775, "step": 7677 }, { "epoch": 1.3669871794871795, "grad_norm": 0.69822758436203, "learning_rate": 0.00014785563941780808, "loss": 1.0877, "step": 7678 }, { "epoch": 1.367165242165242, "grad_norm": 0.7229313254356384, "learning_rate": 0.000147843348373144, "loss": 1.0305, "step": 7679 }, { "epoch": 1.3673433048433048, "grad_norm": 0.665771484375, "learning_rate": 0.00014783105639108897, "loss": 0.9056, "step": 7680 }, { "epoch": 1.3675213675213675, "grad_norm": 0.6418357491493225, "learning_rate": 0.00014781876347188367, "loss": 0.9374, "step": 7681 }, { "epoch": 1.3676994301994303, "grad_norm": 0.7255483269691467, "learning_rate": 0.0001478064696157691, "loss": 0.8533, "step": 7682 }, { "epoch": 1.3678774928774928, "grad_norm": 0.668064534664154, "learning_rate": 0.00014779417482298603, "loss": 0.9002, "step": 7683 }, { "epoch": 1.3680555555555556, "grad_norm": 0.6797603368759155, "learning_rate": 0.0001477818790937754, "loss": 0.9733, "step": 7684 }, { "epoch": 1.368233618233618, "grad_norm": 0.6905350685119629, "learning_rate": 0.0001477695824283781, "loss": 0.7985, "step": 7685 }, { "epoch": 1.3684116809116809, "grad_norm": 0.6846137046813965, "learning_rate": 0.00014775728482703507, "loss": 0.9154, "step": 7686 }, { "epoch": 1.3685897435897436, "grad_norm": 0.6686832904815674, "learning_rate": 0.00014774498628998726, "loss": 0.926, "step": 7687 }, { "epoch": 1.3687678062678064, "grad_norm": 0.7050234079360962, "learning_rate": 0.00014773268681747561, "loss": 0.9386, "step": 7688 }, { "epoch": 1.368945868945869, "grad_norm": 0.7048354744911194, "learning_rate": 0.00014772038640974112, "loss": 1.1483, "step": 7689 }, { "epoch": 1.3691239316239316, "grad_norm": 0.698192298412323, "learning_rate": 0.0001477080850670248, "loss": 1.1452, "step": 7690 }, { "epoch": 1.3693019943019942, "grad_norm": 0.6838962435722351, "learning_rate": 0.00014769578278956766, "loss": 0.9789, "step": 7691 }, { "epoch": 1.369480056980057, "grad_norm": 0.6636955142021179, "learning_rate": 0.00014768347957761074, "loss": 0.931, "step": 7692 }, { "epoch": 1.3696581196581197, "grad_norm": 0.706030547618866, "learning_rate": 0.0001476711754313951, "loss": 1.1096, "step": 7693 }, { "epoch": 1.3698361823361824, "grad_norm": 0.6771288514137268, "learning_rate": 0.00014765887035116178, "loss": 0.9641, "step": 7694 }, { "epoch": 1.370014245014245, "grad_norm": 0.6805008053779602, "learning_rate": 0.00014764656433715188, "loss": 0.8724, "step": 7695 }, { "epoch": 1.3701923076923077, "grad_norm": 0.6599233746528625, "learning_rate": 0.00014763425738960657, "loss": 0.8477, "step": 7696 }, { "epoch": 1.3703703703703702, "grad_norm": 0.7036116123199463, "learning_rate": 0.0001476219495087669, "loss": 1.0991, "step": 7697 }, { "epoch": 1.370548433048433, "grad_norm": 0.6677989363670349, "learning_rate": 0.0001476096406948741, "loss": 1.2397, "step": 7698 }, { "epoch": 1.3707264957264957, "grad_norm": 0.5652269721031189, "learning_rate": 0.00014759733094816928, "loss": 0.9302, "step": 7699 }, { "epoch": 1.3709045584045585, "grad_norm": 0.6670156121253967, "learning_rate": 0.00014758502026889362, "loss": 0.8362, "step": 7700 }, { "epoch": 1.371082621082621, "grad_norm": 0.6705406904220581, "learning_rate": 0.00014757270865728832, "loss": 0.876, "step": 7701 }, { "epoch": 1.3712606837606838, "grad_norm": 0.6020053625106812, "learning_rate": 0.00014756039611359465, "loss": 0.9182, "step": 7702 }, { "epoch": 1.3714387464387463, "grad_norm": 0.6370134949684143, "learning_rate": 0.0001475480826380538, "loss": 1.1063, "step": 7703 }, { "epoch": 1.371616809116809, "grad_norm": 0.6906460523605347, "learning_rate": 0.00014753576823090705, "loss": 0.988, "step": 7704 }, { "epoch": 1.3717948717948718, "grad_norm": 0.6047569513320923, "learning_rate": 0.00014752345289239567, "loss": 1.15, "step": 7705 }, { "epoch": 1.3719729344729346, "grad_norm": 0.7019868493080139, "learning_rate": 0.00014751113662276095, "loss": 1.1185, "step": 7706 }, { "epoch": 1.372150997150997, "grad_norm": 0.6534035801887512, "learning_rate": 0.00014749881942224417, "loss": 0.9006, "step": 7707 }, { "epoch": 1.3723290598290598, "grad_norm": 0.6111651659011841, "learning_rate": 0.00014748650129108674, "loss": 0.935, "step": 7708 }, { "epoch": 1.3725071225071226, "grad_norm": 0.6678512096405029, "learning_rate": 0.00014747418222952995, "loss": 0.8771, "step": 7709 }, { "epoch": 1.3726851851851851, "grad_norm": 0.607829749584198, "learning_rate": 0.00014746186223781518, "loss": 1.0509, "step": 7710 }, { "epoch": 1.3728632478632479, "grad_norm": 0.7274412512779236, "learning_rate": 0.00014744954131618382, "loss": 0.9545, "step": 7711 }, { "epoch": 1.3730413105413106, "grad_norm": 0.640333354473114, "learning_rate": 0.00014743721946487723, "loss": 1.018, "step": 7712 }, { "epoch": 1.3732193732193732, "grad_norm": 0.6772079467773438, "learning_rate": 0.0001474248966841369, "loss": 1.0983, "step": 7713 }, { "epoch": 1.373397435897436, "grad_norm": 0.49630534648895264, "learning_rate": 0.00014741257297420422, "loss": 0.5238, "step": 7714 }, { "epoch": 1.3735754985754987, "grad_norm": 0.6316596269607544, "learning_rate": 0.00014740024833532068, "loss": 1.1342, "step": 7715 }, { "epoch": 1.3737535612535612, "grad_norm": 0.5928404331207275, "learning_rate": 0.00014738792276772775, "loss": 0.7987, "step": 7716 }, { "epoch": 1.373931623931624, "grad_norm": 0.6773418188095093, "learning_rate": 0.00014737559627166688, "loss": 0.934, "step": 7717 }, { "epoch": 1.3741096866096867, "grad_norm": 0.7895028591156006, "learning_rate": 0.00014736326884737963, "loss": 0.984, "step": 7718 }, { "epoch": 1.3742877492877492, "grad_norm": 0.7074753046035767, "learning_rate": 0.00014735094049510752, "loss": 1.0093, "step": 7719 }, { "epoch": 1.374465811965812, "grad_norm": 0.5389847159385681, "learning_rate": 0.00014733861121509208, "loss": 0.8138, "step": 7720 }, { "epoch": 1.3746438746438747, "grad_norm": 0.6138495206832886, "learning_rate": 0.00014732628100757493, "loss": 0.9282, "step": 7721 }, { "epoch": 1.3748219373219372, "grad_norm": 0.7609560489654541, "learning_rate": 0.00014731394987279757, "loss": 0.9859, "step": 7722 }, { "epoch": 1.375, "grad_norm": 0.6806198954582214, "learning_rate": 0.00014730161781100165, "loss": 0.8932, "step": 7723 }, { "epoch": 1.3751780626780628, "grad_norm": 0.7229103446006775, "learning_rate": 0.0001472892848224288, "loss": 0.956, "step": 7724 }, { "epoch": 1.3753561253561253, "grad_norm": 0.6157994866371155, "learning_rate": 0.00014727695090732066, "loss": 1.0285, "step": 7725 }, { "epoch": 1.375534188034188, "grad_norm": 0.5885980129241943, "learning_rate": 0.00014726461606591885, "loss": 0.9174, "step": 7726 }, { "epoch": 1.3757122507122508, "grad_norm": 0.6655769944190979, "learning_rate": 0.0001472522802984651, "loss": 0.9059, "step": 7727 }, { "epoch": 1.3758903133903133, "grad_norm": 0.7075541019439697, "learning_rate": 0.00014723994360520105, "loss": 1.0055, "step": 7728 }, { "epoch": 1.376068376068376, "grad_norm": 0.6947159171104431, "learning_rate": 0.00014722760598636847, "loss": 0.9782, "step": 7729 }, { "epoch": 1.3762464387464388, "grad_norm": 0.6629964709281921, "learning_rate": 0.00014721526744220905, "loss": 0.9427, "step": 7730 }, { "epoch": 1.3764245014245013, "grad_norm": 0.7385284304618835, "learning_rate": 0.00014720292797296453, "loss": 0.9953, "step": 7731 }, { "epoch": 1.376602564102564, "grad_norm": 0.6123563051223755, "learning_rate": 0.0001471905875788767, "loss": 1.0103, "step": 7732 }, { "epoch": 1.3767806267806268, "grad_norm": 0.6457047462463379, "learning_rate": 0.00014717824626018732, "loss": 0.9779, "step": 7733 }, { "epoch": 1.3769586894586894, "grad_norm": 0.6196442246437073, "learning_rate": 0.00014716590401713824, "loss": 0.8747, "step": 7734 }, { "epoch": 1.3771367521367521, "grad_norm": 0.7932298183441162, "learning_rate": 0.00014715356084997122, "loss": 1.1617, "step": 7735 }, { "epoch": 1.3773148148148149, "grad_norm": 0.787304699420929, "learning_rate": 0.00014714121675892815, "loss": 1.1383, "step": 7736 }, { "epoch": 1.3774928774928774, "grad_norm": 0.672795295715332, "learning_rate": 0.00014712887174425085, "loss": 1.2563, "step": 7737 }, { "epoch": 1.3776709401709402, "grad_norm": 0.6505744457244873, "learning_rate": 0.00014711652580618123, "loss": 0.9194, "step": 7738 }, { "epoch": 1.377849002849003, "grad_norm": 0.8141193985939026, "learning_rate": 0.00014710417894496115, "loss": 1.1428, "step": 7739 }, { "epoch": 1.3780270655270654, "grad_norm": 0.6269707679748535, "learning_rate": 0.00014709183116083253, "loss": 0.7164, "step": 7740 }, { "epoch": 1.3782051282051282, "grad_norm": 0.6737076640129089, "learning_rate": 0.0001470794824540373, "loss": 0.9965, "step": 7741 }, { "epoch": 1.378383190883191, "grad_norm": 0.6451728343963623, "learning_rate": 0.0001470671328248174, "loss": 1.0539, "step": 7742 }, { "epoch": 1.3785612535612537, "grad_norm": 0.6480295062065125, "learning_rate": 0.00014705478227341486, "loss": 0.9118, "step": 7743 }, { "epoch": 1.3787393162393162, "grad_norm": 0.7429090738296509, "learning_rate": 0.00014704243080007154, "loss": 1.0031, "step": 7744 }, { "epoch": 1.378917378917379, "grad_norm": 0.5601376891136169, "learning_rate": 0.00014703007840502955, "loss": 0.849, "step": 7745 }, { "epoch": 1.3790954415954415, "grad_norm": 0.7067657113075256, "learning_rate": 0.00014701772508853088, "loss": 1.3067, "step": 7746 }, { "epoch": 1.3792735042735043, "grad_norm": 0.7016390562057495, "learning_rate": 0.00014700537085081755, "loss": 1.0236, "step": 7747 }, { "epoch": 1.379451566951567, "grad_norm": 0.6505000591278076, "learning_rate": 0.0001469930156921316, "loss": 1.0121, "step": 7748 }, { "epoch": 1.3796296296296298, "grad_norm": 0.8515380620956421, "learning_rate": 0.00014698065961271512, "loss": 1.0413, "step": 7749 }, { "epoch": 1.3798076923076923, "grad_norm": 0.6322008371353149, "learning_rate": 0.00014696830261281025, "loss": 0.8306, "step": 7750 }, { "epoch": 1.379985754985755, "grad_norm": 0.7090431451797485, "learning_rate": 0.00014695594469265902, "loss": 1.1829, "step": 7751 }, { "epoch": 1.3801638176638176, "grad_norm": 0.5913167595863342, "learning_rate": 0.00014694358585250363, "loss": 0.9769, "step": 7752 }, { "epoch": 1.3803418803418803, "grad_norm": 0.7345432639122009, "learning_rate": 0.00014693122609258616, "loss": 0.9928, "step": 7753 }, { "epoch": 1.380519943019943, "grad_norm": 0.6158214211463928, "learning_rate": 0.00014691886541314884, "loss": 1.1166, "step": 7754 }, { "epoch": 1.3806980056980058, "grad_norm": 0.6874041557312012, "learning_rate": 0.0001469065038144338, "loss": 1.0808, "step": 7755 }, { "epoch": 1.3808760683760684, "grad_norm": 0.8135195970535278, "learning_rate": 0.00014689414129668326, "loss": 0.9482, "step": 7756 }, { "epoch": 1.381054131054131, "grad_norm": 0.6389174461364746, "learning_rate": 0.00014688177786013944, "loss": 1.039, "step": 7757 }, { "epoch": 1.3812321937321936, "grad_norm": 0.6953016519546509, "learning_rate": 0.00014686941350504454, "loss": 0.9426, "step": 7758 }, { "epoch": 1.3814102564102564, "grad_norm": 0.8171859383583069, "learning_rate": 0.00014685704823164087, "loss": 1.0393, "step": 7759 }, { "epoch": 1.3815883190883191, "grad_norm": 0.6968414783477783, "learning_rate": 0.0001468446820401707, "loss": 1.1167, "step": 7760 }, { "epoch": 1.381766381766382, "grad_norm": 0.6916623711585999, "learning_rate": 0.00014683231493087628, "loss": 1.1886, "step": 7761 }, { "epoch": 1.3819444444444444, "grad_norm": 0.7351683378219604, "learning_rate": 0.00014681994690399992, "loss": 0.9893, "step": 7762 }, { "epoch": 1.3821225071225072, "grad_norm": 0.6617491245269775, "learning_rate": 0.00014680757795978395, "loss": 1.0505, "step": 7763 }, { "epoch": 1.3823005698005697, "grad_norm": 0.6627485156059265, "learning_rate": 0.00014679520809847074, "loss": 0.9878, "step": 7764 }, { "epoch": 1.3824786324786325, "grad_norm": 0.704636812210083, "learning_rate": 0.00014678283732030264, "loss": 0.8332, "step": 7765 }, { "epoch": 1.3826566951566952, "grad_norm": 0.698853075504303, "learning_rate": 0.00014677046562552203, "loss": 1.0926, "step": 7766 }, { "epoch": 1.382834757834758, "grad_norm": 0.6695869565010071, "learning_rate": 0.0001467580930143713, "loss": 1.0626, "step": 7767 }, { "epoch": 1.3830128205128205, "grad_norm": 0.672173023223877, "learning_rate": 0.00014674571948709286, "loss": 0.8842, "step": 7768 }, { "epoch": 1.3831908831908832, "grad_norm": 0.6735473871231079, "learning_rate": 0.00014673334504392916, "loss": 0.9382, "step": 7769 }, { "epoch": 1.3833689458689458, "grad_norm": 0.6864013075828552, "learning_rate": 0.00014672096968512265, "loss": 1.1369, "step": 7770 }, { "epoch": 1.3835470085470085, "grad_norm": 0.7154954075813293, "learning_rate": 0.0001467085934109158, "loss": 1.1447, "step": 7771 }, { "epoch": 1.3837250712250713, "grad_norm": 0.5934487581253052, "learning_rate": 0.0001466962162215511, "loss": 0.8923, "step": 7772 }, { "epoch": 1.383903133903134, "grad_norm": 0.8116832971572876, "learning_rate": 0.00014668383811727097, "loss": 1.0997, "step": 7773 }, { "epoch": 1.3840811965811965, "grad_norm": 0.8661674857139587, "learning_rate": 0.00014667145909831808, "loss": 1.0112, "step": 7774 }, { "epoch": 1.3842592592592593, "grad_norm": 0.5173856616020203, "learning_rate": 0.00014665907916493488, "loss": 0.6571, "step": 7775 }, { "epoch": 1.3844373219373218, "grad_norm": 0.6165067553520203, "learning_rate": 0.00014664669831736395, "loss": 1.0992, "step": 7776 }, { "epoch": 1.3846153846153846, "grad_norm": 0.6564429998397827, "learning_rate": 0.00014663431655584787, "loss": 0.9103, "step": 7777 }, { "epoch": 1.3847934472934473, "grad_norm": 0.7162124514579773, "learning_rate": 0.00014662193388062923, "loss": 1.0645, "step": 7778 }, { "epoch": 1.38497150997151, "grad_norm": 0.6391215920448303, "learning_rate": 0.00014660955029195064, "loss": 0.902, "step": 7779 }, { "epoch": 1.3851495726495726, "grad_norm": 0.6876635551452637, "learning_rate": 0.00014659716579005475, "loss": 1.0924, "step": 7780 }, { "epoch": 1.3853276353276354, "grad_norm": 0.7254653573036194, "learning_rate": 0.00014658478037518418, "loss": 1.0135, "step": 7781 }, { "epoch": 1.385505698005698, "grad_norm": 0.6900535225868225, "learning_rate": 0.00014657239404758162, "loss": 0.983, "step": 7782 }, { "epoch": 1.3856837606837606, "grad_norm": 0.7477042078971863, "learning_rate": 0.00014656000680748975, "loss": 1.0707, "step": 7783 }, { "epoch": 1.3858618233618234, "grad_norm": 0.5756927132606506, "learning_rate": 0.00014654761865515124, "loss": 0.8881, "step": 7784 }, { "epoch": 1.3860398860398861, "grad_norm": 0.6736083626747131, "learning_rate": 0.00014653522959080884, "loss": 1.0193, "step": 7785 }, { "epoch": 1.3862179487179487, "grad_norm": 0.616179883480072, "learning_rate": 0.0001465228396147053, "loss": 0.8676, "step": 7786 }, { "epoch": 1.3863960113960114, "grad_norm": 0.7956456542015076, "learning_rate": 0.00014651044872708338, "loss": 0.9787, "step": 7787 }, { "epoch": 1.386574074074074, "grad_norm": 0.6613463163375854, "learning_rate": 0.00014649805692818578, "loss": 1.0032, "step": 7788 }, { "epoch": 1.3867521367521367, "grad_norm": 0.6215800642967224, "learning_rate": 0.0001464856642182554, "loss": 1.0123, "step": 7789 }, { "epoch": 1.3869301994301995, "grad_norm": 0.6701171398162842, "learning_rate": 0.00014647327059753496, "loss": 0.9108, "step": 7790 }, { "epoch": 1.3871082621082622, "grad_norm": 0.6213465929031372, "learning_rate": 0.00014646087606626736, "loss": 0.9313, "step": 7791 }, { "epoch": 1.3872863247863247, "grad_norm": 0.7535304427146912, "learning_rate": 0.00014644848062469535, "loss": 1.0813, "step": 7792 }, { "epoch": 1.3874643874643875, "grad_norm": 0.6778230667114258, "learning_rate": 0.0001464360842730619, "loss": 1.0405, "step": 7793 }, { "epoch": 1.38764245014245, "grad_norm": 0.7816025614738464, "learning_rate": 0.0001464236870116098, "loss": 0.9228, "step": 7794 }, { "epoch": 1.3878205128205128, "grad_norm": 0.6815229058265686, "learning_rate": 0.00014641128884058203, "loss": 0.9607, "step": 7795 }, { "epoch": 1.3879985754985755, "grad_norm": 0.7027714848518372, "learning_rate": 0.00014639888976022145, "loss": 0.9379, "step": 7796 }, { "epoch": 1.3881766381766383, "grad_norm": 0.7636353373527527, "learning_rate": 0.00014638648977077104, "loss": 1.1186, "step": 7797 }, { "epoch": 1.3883547008547008, "grad_norm": 0.6732974052429199, "learning_rate": 0.00014637408887247365, "loss": 1.1378, "step": 7798 }, { "epoch": 1.3885327635327636, "grad_norm": 0.7539397478103638, "learning_rate": 0.0001463616870655724, "loss": 0.999, "step": 7799 }, { "epoch": 1.388710826210826, "grad_norm": 0.6872972846031189, "learning_rate": 0.00014634928435031013, "loss": 0.9564, "step": 7800 }, { "epoch": 1.3888888888888888, "grad_norm": 0.6823115348815918, "learning_rate": 0.00014633688072693, "loss": 0.9745, "step": 7801 }, { "epoch": 1.3890669515669516, "grad_norm": 0.6462571620941162, "learning_rate": 0.00014632447619567488, "loss": 0.8314, "step": 7802 }, { "epoch": 1.3892450142450143, "grad_norm": 0.7245402932167053, "learning_rate": 0.0001463120707567879, "loss": 0.8291, "step": 7803 }, { "epoch": 1.3894230769230769, "grad_norm": 0.697179913520813, "learning_rate": 0.00014629966441051208, "loss": 1.017, "step": 7804 }, { "epoch": 1.3896011396011396, "grad_norm": 0.6304250359535217, "learning_rate": 0.00014628725715709053, "loss": 0.9262, "step": 7805 }, { "epoch": 1.3897792022792022, "grad_norm": 0.5780240297317505, "learning_rate": 0.00014627484899676634, "loss": 0.6596, "step": 7806 }, { "epoch": 1.389957264957265, "grad_norm": 0.8030684590339661, "learning_rate": 0.0001462624399297826, "loss": 0.9977, "step": 7807 }, { "epoch": 1.3901353276353277, "grad_norm": 0.7999774813652039, "learning_rate": 0.00014625002995638246, "loss": 1.1036, "step": 7808 }, { "epoch": 1.3903133903133904, "grad_norm": 0.7054862976074219, "learning_rate": 0.00014623761907680904, "loss": 1.1435, "step": 7809 }, { "epoch": 1.390491452991453, "grad_norm": 0.6660647392272949, "learning_rate": 0.00014622520729130556, "loss": 0.703, "step": 7810 }, { "epoch": 1.3906695156695157, "grad_norm": 0.6339690089225769, "learning_rate": 0.00014621279460011515, "loss": 1.0451, "step": 7811 }, { "epoch": 1.3908475783475782, "grad_norm": 0.8568736910820007, "learning_rate": 0.00014620038100348102, "loss": 1.009, "step": 7812 }, { "epoch": 1.391025641025641, "grad_norm": 0.7126797437667847, "learning_rate": 0.00014618796650164642, "loss": 0.9592, "step": 7813 }, { "epoch": 1.3912037037037037, "grad_norm": 0.6768994331359863, "learning_rate": 0.00014617555109485453, "loss": 1.09, "step": 7814 }, { "epoch": 1.3913817663817665, "grad_norm": 0.7609471678733826, "learning_rate": 0.00014616313478334864, "loss": 0.9781, "step": 7815 }, { "epoch": 1.391559829059829, "grad_norm": 0.7107006907463074, "learning_rate": 0.00014615071756737203, "loss": 0.9769, "step": 7816 }, { "epoch": 1.3917378917378918, "grad_norm": 0.6324763894081116, "learning_rate": 0.00014613829944716802, "loss": 1.089, "step": 7817 }, { "epoch": 1.3919159544159543, "grad_norm": 0.6617186069488525, "learning_rate": 0.00014612588042297984, "loss": 1.0466, "step": 7818 }, { "epoch": 1.392094017094017, "grad_norm": 0.7881436944007874, "learning_rate": 0.00014611346049505083, "loss": 1.003, "step": 7819 }, { "epoch": 1.3922720797720798, "grad_norm": 0.7391049861907959, "learning_rate": 0.00014610103966362437, "loss": 1.0531, "step": 7820 }, { "epoch": 1.3924501424501425, "grad_norm": 0.6299472451210022, "learning_rate": 0.00014608861792894383, "loss": 0.8433, "step": 7821 }, { "epoch": 1.392628205128205, "grad_norm": 0.6053452491760254, "learning_rate": 0.00014607619529125255, "loss": 0.7945, "step": 7822 }, { "epoch": 1.3928062678062678, "grad_norm": 0.7160114645957947, "learning_rate": 0.0001460637717507939, "loss": 1.1604, "step": 7823 }, { "epoch": 1.3929843304843303, "grad_norm": 0.6308854222297668, "learning_rate": 0.00014605134730781135, "loss": 1.0918, "step": 7824 }, { "epoch": 1.393162393162393, "grad_norm": 0.7187000513076782, "learning_rate": 0.00014603892196254833, "loss": 1.0594, "step": 7825 }, { "epoch": 1.3933404558404558, "grad_norm": 0.7516581416130066, "learning_rate": 0.00014602649571524826, "loss": 0.9222, "step": 7826 }, { "epoch": 1.3935185185185186, "grad_norm": 0.6340481638908386, "learning_rate": 0.00014601406856615463, "loss": 0.8131, "step": 7827 }, { "epoch": 1.3936965811965811, "grad_norm": 0.8161744475364685, "learning_rate": 0.0001460016405155109, "loss": 0.8695, "step": 7828 }, { "epoch": 1.3938746438746439, "grad_norm": 0.6926971077919006, "learning_rate": 0.0001459892115635606, "loss": 0.9548, "step": 7829 }, { "epoch": 1.3940527065527066, "grad_norm": 0.6669796109199524, "learning_rate": 0.0001459767817105472, "loss": 0.9255, "step": 7830 }, { "epoch": 1.3942307692307692, "grad_norm": 0.6626184582710266, "learning_rate": 0.00014596435095671432, "loss": 1.1141, "step": 7831 }, { "epoch": 1.394408831908832, "grad_norm": 0.6755738854408264, "learning_rate": 0.00014595191930230546, "loss": 0.9596, "step": 7832 }, { "epoch": 1.3945868945868947, "grad_norm": 0.6034863591194153, "learning_rate": 0.00014593948674756417, "loss": 0.8088, "step": 7833 }, { "epoch": 1.3947649572649572, "grad_norm": 0.5638226866722107, "learning_rate": 0.00014592705329273406, "loss": 0.5828, "step": 7834 }, { "epoch": 1.39494301994302, "grad_norm": 0.6902222633361816, "learning_rate": 0.0001459146189380588, "loss": 0.7954, "step": 7835 }, { "epoch": 1.3951210826210827, "grad_norm": 0.7579947710037231, "learning_rate": 0.0001459021836837819, "loss": 1.1301, "step": 7836 }, { "epoch": 1.3952991452991452, "grad_norm": 0.6894911527633667, "learning_rate": 0.00014588974753014712, "loss": 1.082, "step": 7837 }, { "epoch": 1.395477207977208, "grad_norm": 0.6330230832099915, "learning_rate": 0.000145877310477398, "loss": 0.7614, "step": 7838 }, { "epoch": 1.3956552706552707, "grad_norm": 0.6164960265159607, "learning_rate": 0.00014586487252577832, "loss": 0.8981, "step": 7839 }, { "epoch": 1.3958333333333333, "grad_norm": 0.6575061678886414, "learning_rate": 0.0001458524336755317, "loss": 0.9735, "step": 7840 }, { "epoch": 1.396011396011396, "grad_norm": 0.687921941280365, "learning_rate": 0.00014583999392690195, "loss": 0.9207, "step": 7841 }, { "epoch": 1.3961894586894588, "grad_norm": 0.6175212860107422, "learning_rate": 0.00014582755328013274, "loss": 1.0444, "step": 7842 }, { "epoch": 1.3963675213675213, "grad_norm": 0.6351733207702637, "learning_rate": 0.00014581511173546781, "loss": 1.0143, "step": 7843 }, { "epoch": 1.396545584045584, "grad_norm": 0.7235051989555359, "learning_rate": 0.00014580266929315093, "loss": 0.9108, "step": 7844 }, { "epoch": 1.3967236467236468, "grad_norm": 0.6432043313980103, "learning_rate": 0.00014579022595342586, "loss": 0.8674, "step": 7845 }, { "epoch": 1.3969017094017093, "grad_norm": 0.7775412797927856, "learning_rate": 0.00014577778171653648, "loss": 1.0637, "step": 7846 }, { "epoch": 1.397079772079772, "grad_norm": 0.6748763918876648, "learning_rate": 0.00014576533658272655, "loss": 1.0356, "step": 7847 }, { "epoch": 1.3972578347578348, "grad_norm": 0.6940401196479797, "learning_rate": 0.00014575289055223994, "loss": 0.9937, "step": 7848 }, { "epoch": 1.3974358974358974, "grad_norm": 0.6971304416656494, "learning_rate": 0.00014574044362532045, "loss": 0.9753, "step": 7849 }, { "epoch": 1.39761396011396, "grad_norm": 0.6576017141342163, "learning_rate": 0.00014572799580221197, "loss": 1.1233, "step": 7850 }, { "epoch": 1.3977920227920229, "grad_norm": 0.6270702481269836, "learning_rate": 0.00014571554708315843, "loss": 0.9771, "step": 7851 }, { "epoch": 1.3979700854700854, "grad_norm": 0.6898425817489624, "learning_rate": 0.00014570309746840372, "loss": 0.9235, "step": 7852 }, { "epoch": 1.3981481481481481, "grad_norm": 0.7017102241516113, "learning_rate": 0.00014569064695819174, "loss": 1.1056, "step": 7853 }, { "epoch": 1.398326210826211, "grad_norm": 0.6298288702964783, "learning_rate": 0.00014567819555276647, "loss": 0.8635, "step": 7854 }, { "epoch": 1.3985042735042734, "grad_norm": 0.7173134684562683, "learning_rate": 0.00014566574325237182, "loss": 1.0893, "step": 7855 }, { "epoch": 1.3986823361823362, "grad_norm": 0.7541036605834961, "learning_rate": 0.0001456532900572518, "loss": 1.0996, "step": 7856 }, { "epoch": 1.398860398860399, "grad_norm": 0.6204771399497986, "learning_rate": 0.0001456408359676504, "loss": 0.7601, "step": 7857 }, { "epoch": 1.3990384615384617, "grad_norm": 0.629557192325592, "learning_rate": 0.00014562838098381163, "loss": 0.9239, "step": 7858 }, { "epoch": 1.3992165242165242, "grad_norm": 0.6878390908241272, "learning_rate": 0.00014561592510597954, "loss": 0.9641, "step": 7859 }, { "epoch": 1.399394586894587, "grad_norm": 0.7490049004554749, "learning_rate": 0.00014560346833439813, "loss": 1.0198, "step": 7860 }, { "epoch": 1.3995726495726495, "grad_norm": 0.6337960958480835, "learning_rate": 0.0001455910106693115, "loss": 0.8709, "step": 7861 }, { "epoch": 1.3997507122507122, "grad_norm": 0.6210524439811707, "learning_rate": 0.0001455785521109637, "loss": 1.1049, "step": 7862 }, { "epoch": 1.399928774928775, "grad_norm": 0.7894936203956604, "learning_rate": 0.00014556609265959887, "loss": 0.8933, "step": 7863 }, { "epoch": 1.4001068376068377, "grad_norm": 0.6888098120689392, "learning_rate": 0.00014555363231546112, "loss": 0.9738, "step": 7864 }, { "epoch": 1.4002849002849003, "grad_norm": 0.608799934387207, "learning_rate": 0.00014554117107879456, "loss": 0.9103, "step": 7865 }, { "epoch": 1.400462962962963, "grad_norm": 0.7390474081039429, "learning_rate": 0.00014552870894984335, "loss": 1.2484, "step": 7866 }, { "epoch": 1.4006410256410255, "grad_norm": 0.6513381600379944, "learning_rate": 0.00014551624592885169, "loss": 0.8523, "step": 7867 }, { "epoch": 1.4008190883190883, "grad_norm": 0.6357464790344238, "learning_rate": 0.00014550378201606373, "loss": 0.9594, "step": 7868 }, { "epoch": 1.400997150997151, "grad_norm": 0.6893286108970642, "learning_rate": 0.0001454913172117237, "loss": 0.9798, "step": 7869 }, { "epoch": 1.4011752136752138, "grad_norm": 0.6566550731658936, "learning_rate": 0.0001454788515160758, "loss": 1.0532, "step": 7870 }, { "epoch": 1.4013532763532763, "grad_norm": 0.6442158222198486, "learning_rate": 0.00014546638492936425, "loss": 1.0789, "step": 7871 }, { "epoch": 1.401531339031339, "grad_norm": 0.7570971846580505, "learning_rate": 0.0001454539174518334, "loss": 0.9806, "step": 7872 }, { "epoch": 1.4017094017094016, "grad_norm": 0.6180047392845154, "learning_rate": 0.0001454414490837274, "loss": 0.857, "step": 7873 }, { "epoch": 1.4018874643874644, "grad_norm": 0.7143170237541199, "learning_rate": 0.0001454289798252906, "loss": 0.8815, "step": 7874 }, { "epoch": 1.4020655270655271, "grad_norm": 0.6388922929763794, "learning_rate": 0.00014541650967676736, "loss": 0.95, "step": 7875 }, { "epoch": 1.4022435897435899, "grad_norm": 0.7137351632118225, "learning_rate": 0.00014540403863840193, "loss": 0.8973, "step": 7876 }, { "epoch": 1.4024216524216524, "grad_norm": 0.656315267086029, "learning_rate": 0.0001453915667104387, "loss": 1.149, "step": 7877 }, { "epoch": 1.4025997150997151, "grad_norm": 0.7234711647033691, "learning_rate": 0.000145379093893122, "loss": 0.9798, "step": 7878 }, { "epoch": 1.4027777777777777, "grad_norm": 0.6595289707183838, "learning_rate": 0.00014536662018669623, "loss": 1.2704, "step": 7879 }, { "epoch": 1.4029558404558404, "grad_norm": 0.6760551333427429, "learning_rate": 0.00014535414559140576, "loss": 0.8672, "step": 7880 }, { "epoch": 1.4031339031339032, "grad_norm": 0.5916706919670105, "learning_rate": 0.000145341670107495, "loss": 0.888, "step": 7881 }, { "epoch": 1.403311965811966, "grad_norm": 0.7272133231163025, "learning_rate": 0.00014532919373520846, "loss": 1.0466, "step": 7882 }, { "epoch": 1.4034900284900285, "grad_norm": 0.8512467741966248, "learning_rate": 0.00014531671647479048, "loss": 1.2482, "step": 7883 }, { "epoch": 1.4036680911680912, "grad_norm": 0.5536492466926575, "learning_rate": 0.0001453042383264856, "loss": 0.7823, "step": 7884 }, { "epoch": 1.4038461538461537, "grad_norm": 0.7262215614318848, "learning_rate": 0.0001452917592905383, "loss": 0.9713, "step": 7885 }, { "epoch": 1.4040242165242165, "grad_norm": 0.7146059274673462, "learning_rate": 0.00014527927936719304, "loss": 1.1064, "step": 7886 }, { "epoch": 1.4042022792022792, "grad_norm": 0.5915318131446838, "learning_rate": 0.00014526679855669436, "loss": 0.8567, "step": 7887 }, { "epoch": 1.404380341880342, "grad_norm": 0.6548298001289368, "learning_rate": 0.00014525431685928682, "loss": 1.1359, "step": 7888 }, { "epoch": 1.4045584045584045, "grad_norm": 0.7482563853263855, "learning_rate": 0.0001452418342752149, "loss": 0.9095, "step": 7889 }, { "epoch": 1.4047364672364673, "grad_norm": 0.6660130023956299, "learning_rate": 0.0001452293508047233, "loss": 1.2343, "step": 7890 }, { "epoch": 1.4049145299145298, "grad_norm": 0.7457148432731628, "learning_rate": 0.00014521686644805644, "loss": 1.2086, "step": 7891 }, { "epoch": 1.4050925925925926, "grad_norm": 0.5957929491996765, "learning_rate": 0.00014520438120545906, "loss": 0.9724, "step": 7892 }, { "epoch": 1.4052706552706553, "grad_norm": 0.6832270622253418, "learning_rate": 0.00014519189507717573, "loss": 0.9903, "step": 7893 }, { "epoch": 1.405448717948718, "grad_norm": 0.6202489733695984, "learning_rate": 0.00014517940806345109, "loss": 0.962, "step": 7894 }, { "epoch": 1.4056267806267806, "grad_norm": 0.6419472694396973, "learning_rate": 0.0001451669201645298, "loss": 0.8147, "step": 7895 }, { "epoch": 1.4058048433048433, "grad_norm": 0.61143958568573, "learning_rate": 0.00014515443138065652, "loss": 0.8674, "step": 7896 }, { "epoch": 1.4059829059829059, "grad_norm": 0.7527356743812561, "learning_rate": 0.00014514194171207597, "loss": 1.0581, "step": 7897 }, { "epoch": 1.4061609686609686, "grad_norm": 0.7195194363594055, "learning_rate": 0.00014512945115903285, "loss": 1.0268, "step": 7898 }, { "epoch": 1.4063390313390314, "grad_norm": 0.7919661998748779, "learning_rate": 0.00014511695972177187, "loss": 1.0259, "step": 7899 }, { "epoch": 1.4065170940170941, "grad_norm": 0.6774758696556091, "learning_rate": 0.00014510446740053783, "loss": 1.1214, "step": 7900 }, { "epoch": 1.4066951566951567, "grad_norm": 0.6102406978607178, "learning_rate": 0.0001450919741955754, "loss": 1.1846, "step": 7901 }, { "epoch": 1.4068732193732194, "grad_norm": 0.7189443707466125, "learning_rate": 0.00014507948010712942, "loss": 0.7758, "step": 7902 }, { "epoch": 1.407051282051282, "grad_norm": 0.654153048992157, "learning_rate": 0.00014506698513544467, "loss": 0.899, "step": 7903 }, { "epoch": 1.4072293447293447, "grad_norm": 0.637934684753418, "learning_rate": 0.00014505448928076598, "loss": 0.8301, "step": 7904 }, { "epoch": 1.4074074074074074, "grad_norm": 0.7504615783691406, "learning_rate": 0.00014504199254333812, "loss": 0.9883, "step": 7905 }, { "epoch": 1.4075854700854702, "grad_norm": 0.7902522683143616, "learning_rate": 0.00014502949492340602, "loss": 0.9615, "step": 7906 }, { "epoch": 1.4077635327635327, "grad_norm": 0.5832732319831848, "learning_rate": 0.0001450169964212145, "loss": 0.7136, "step": 7907 }, { "epoch": 1.4079415954415955, "grad_norm": 0.6025400757789612, "learning_rate": 0.00014500449703700846, "loss": 0.8812, "step": 7908 }, { "epoch": 1.408119658119658, "grad_norm": 0.6412411332130432, "learning_rate": 0.0001449919967710328, "loss": 0.9346, "step": 7909 }, { "epoch": 1.4082977207977208, "grad_norm": 0.7546970844268799, "learning_rate": 0.00014497949562353242, "loss": 1.0794, "step": 7910 }, { "epoch": 1.4084757834757835, "grad_norm": 0.6175593733787537, "learning_rate": 0.00014496699359475222, "loss": 0.8939, "step": 7911 }, { "epoch": 1.4086538461538463, "grad_norm": 0.6571716666221619, "learning_rate": 0.00014495449068493722, "loss": 1.1003, "step": 7912 }, { "epoch": 1.4088319088319088, "grad_norm": 0.7038990259170532, "learning_rate": 0.00014494198689433236, "loss": 0.8844, "step": 7913 }, { "epoch": 1.4090099715099715, "grad_norm": 0.7007337212562561, "learning_rate": 0.00014492948222318263, "loss": 1.2038, "step": 7914 }, { "epoch": 1.409188034188034, "grad_norm": 0.7318591475486755, "learning_rate": 0.00014491697667173302, "loss": 1.0388, "step": 7915 }, { "epoch": 1.4093660968660968, "grad_norm": 0.7010329961776733, "learning_rate": 0.00014490447024022855, "loss": 1.1485, "step": 7916 }, { "epoch": 1.4095441595441596, "grad_norm": 0.7844831347465515, "learning_rate": 0.0001448919629289143, "loss": 1.1417, "step": 7917 }, { "epoch": 1.4097222222222223, "grad_norm": 0.6953392624855042, "learning_rate": 0.00014487945473803525, "loss": 0.9546, "step": 7918 }, { "epoch": 1.4099002849002849, "grad_norm": 0.6307587623596191, "learning_rate": 0.00014486694566783655, "loss": 0.9912, "step": 7919 }, { "epoch": 1.4100783475783476, "grad_norm": 0.6200215816497803, "learning_rate": 0.00014485443571856326, "loss": 1.0998, "step": 7920 }, { "epoch": 1.4102564102564101, "grad_norm": 0.7096502184867859, "learning_rate": 0.00014484192489046043, "loss": 0.9587, "step": 7921 }, { "epoch": 1.4104344729344729, "grad_norm": 0.6965526342391968, "learning_rate": 0.00014482941318377327, "loss": 0.8791, "step": 7922 }, { "epoch": 1.4106125356125356, "grad_norm": 0.7303466200828552, "learning_rate": 0.00014481690059874687, "loss": 1.084, "step": 7923 }, { "epoch": 1.4107905982905984, "grad_norm": 0.6144066452980042, "learning_rate": 0.00014480438713562638, "loss": 0.9646, "step": 7924 }, { "epoch": 1.410968660968661, "grad_norm": 0.645222008228302, "learning_rate": 0.00014479187279465704, "loss": 0.728, "step": 7925 }, { "epoch": 1.4111467236467237, "grad_norm": 0.6069912314414978, "learning_rate": 0.000144779357576084, "loss": 0.842, "step": 7926 }, { "epoch": 1.4113247863247862, "grad_norm": 0.6212135553359985, "learning_rate": 0.00014476684148015243, "loss": 0.9817, "step": 7927 }, { "epoch": 1.411502849002849, "grad_norm": 0.6893343329429626, "learning_rate": 0.00014475432450710763, "loss": 1.0265, "step": 7928 }, { "epoch": 1.4116809116809117, "grad_norm": 0.6842793822288513, "learning_rate": 0.00014474180665719478, "loss": 1.0593, "step": 7929 }, { "epoch": 1.4118589743589745, "grad_norm": 0.74690842628479, "learning_rate": 0.0001447292879306592, "loss": 0.9096, "step": 7930 }, { "epoch": 1.412037037037037, "grad_norm": 0.6624761819839478, "learning_rate": 0.00014471676832774613, "loss": 1.2244, "step": 7931 }, { "epoch": 1.4122150997150997, "grad_norm": 0.6205778121948242, "learning_rate": 0.00014470424784870088, "loss": 1.1, "step": 7932 }, { "epoch": 1.4123931623931623, "grad_norm": 0.7592337131500244, "learning_rate": 0.00014469172649376875, "loss": 0.963, "step": 7933 }, { "epoch": 1.412571225071225, "grad_norm": 0.673328697681427, "learning_rate": 0.00014467920426319508, "loss": 0.8923, "step": 7934 }, { "epoch": 1.4127492877492878, "grad_norm": 0.6064394116401672, "learning_rate": 0.00014466668115722522, "loss": 0.9679, "step": 7935 }, { "epoch": 1.4129273504273505, "grad_norm": 0.7738677859306335, "learning_rate": 0.00014465415717610454, "loss": 1.0678, "step": 7936 }, { "epoch": 1.413105413105413, "grad_norm": 0.7013397812843323, "learning_rate": 0.00014464163232007836, "loss": 0.9017, "step": 7937 }, { "epoch": 1.4132834757834758, "grad_norm": 0.713291347026825, "learning_rate": 0.0001446291065893922, "loss": 1.1953, "step": 7938 }, { "epoch": 1.4134615384615383, "grad_norm": 0.7538655996322632, "learning_rate": 0.00014461657998429136, "loss": 1.0571, "step": 7939 }, { "epoch": 1.413639601139601, "grad_norm": 0.6358973383903503, "learning_rate": 0.00014460405250502133, "loss": 0.8552, "step": 7940 }, { "epoch": 1.4138176638176638, "grad_norm": 0.67508864402771, "learning_rate": 0.00014459152415182756, "loss": 1.0293, "step": 7941 }, { "epoch": 1.4139957264957266, "grad_norm": 0.7074598670005798, "learning_rate": 0.00014457899492495546, "loss": 1.2102, "step": 7942 }, { "epoch": 1.414173789173789, "grad_norm": 0.7157037854194641, "learning_rate": 0.00014456646482465058, "loss": 1.0566, "step": 7943 }, { "epoch": 1.4143518518518519, "grad_norm": 0.7918477058410645, "learning_rate": 0.00014455393385115844, "loss": 1.3727, "step": 7944 }, { "epoch": 1.4145299145299146, "grad_norm": 0.569144606590271, "learning_rate": 0.0001445414020047245, "loss": 0.7251, "step": 7945 }, { "epoch": 1.4147079772079771, "grad_norm": 0.7589054107666016, "learning_rate": 0.0001445288692855943, "loss": 1.0155, "step": 7946 }, { "epoch": 1.41488603988604, "grad_norm": 0.7531685829162598, "learning_rate": 0.0001445163356940134, "loss": 0.8404, "step": 7947 }, { "epoch": 1.4150641025641026, "grad_norm": 0.5730917453765869, "learning_rate": 0.0001445038012302274, "loss": 0.8215, "step": 7948 }, { "epoch": 1.4152421652421652, "grad_norm": 0.6960710883140564, "learning_rate": 0.00014449126589448187, "loss": 0.7902, "step": 7949 }, { "epoch": 1.415420227920228, "grad_norm": 0.8207054138183594, "learning_rate": 0.0001444787296870224, "loss": 1.493, "step": 7950 }, { "epoch": 1.4155982905982907, "grad_norm": 0.5854668617248535, "learning_rate": 0.00014446619260809462, "loss": 0.9262, "step": 7951 }, { "epoch": 1.4157763532763532, "grad_norm": 0.5458414554595947, "learning_rate": 0.00014445365465794413, "loss": 0.8431, "step": 7952 }, { "epoch": 1.415954415954416, "grad_norm": 0.6880569458007812, "learning_rate": 0.00014444111583681666, "loss": 1.0184, "step": 7953 }, { "epoch": 1.4161324786324787, "grad_norm": 0.6391083598136902, "learning_rate": 0.00014442857614495783, "loss": 0.88, "step": 7954 }, { "epoch": 1.4163105413105412, "grad_norm": 0.6246135234832764, "learning_rate": 0.00014441603558261335, "loss": 0.776, "step": 7955 }, { "epoch": 1.416488603988604, "grad_norm": 0.6263493895530701, "learning_rate": 0.00014440349415002893, "loss": 0.9069, "step": 7956 }, { "epoch": 1.4166666666666667, "grad_norm": 0.7123475670814514, "learning_rate": 0.00014439095184745024, "loss": 0.8339, "step": 7957 }, { "epoch": 1.4168447293447293, "grad_norm": 0.7171050906181335, "learning_rate": 0.00014437840867512309, "loss": 1.0633, "step": 7958 }, { "epoch": 1.417022792022792, "grad_norm": 0.7097769975662231, "learning_rate": 0.00014436586463329322, "loss": 1.0852, "step": 7959 }, { "epoch": 1.4172008547008548, "grad_norm": 0.6889223456382751, "learning_rate": 0.00014435331972220637, "loss": 0.916, "step": 7960 }, { "epoch": 1.4173789173789173, "grad_norm": 0.6674435138702393, "learning_rate": 0.0001443407739421084, "loss": 0.9307, "step": 7961 }, { "epoch": 1.41755698005698, "grad_norm": 0.6578894853591919, "learning_rate": 0.00014432822729324503, "loss": 0.8767, "step": 7962 }, { "epoch": 1.4177350427350428, "grad_norm": 0.7145379781723022, "learning_rate": 0.00014431567977586212, "loss": 0.9962, "step": 7963 }, { "epoch": 1.4179131054131053, "grad_norm": 0.6916680335998535, "learning_rate": 0.00014430313139020555, "loss": 1.0464, "step": 7964 }, { "epoch": 1.418091168091168, "grad_norm": 0.6296181678771973, "learning_rate": 0.00014429058213652116, "loss": 1.0699, "step": 7965 }, { "epoch": 1.4182692307692308, "grad_norm": 0.5640227198600769, "learning_rate": 0.00014427803201505482, "loss": 0.7006, "step": 7966 }, { "epoch": 1.4184472934472934, "grad_norm": 0.7181212306022644, "learning_rate": 0.0001442654810260524, "loss": 1.1648, "step": 7967 }, { "epoch": 1.4186253561253561, "grad_norm": 0.6830772757530212, "learning_rate": 0.00014425292916975984, "loss": 1.0641, "step": 7968 }, { "epoch": 1.4188034188034189, "grad_norm": 0.665716290473938, "learning_rate": 0.00014424037644642307, "loss": 0.8769, "step": 7969 }, { "epoch": 1.4189814814814814, "grad_norm": 0.8088666796684265, "learning_rate": 0.00014422782285628802, "loss": 1.1496, "step": 7970 }, { "epoch": 1.4191595441595442, "grad_norm": 0.7186072468757629, "learning_rate": 0.00014421526839960064, "loss": 0.7421, "step": 7971 }, { "epoch": 1.419337606837607, "grad_norm": 0.6405926942825317, "learning_rate": 0.00014420271307660694, "loss": 1.0139, "step": 7972 }, { "epoch": 1.4195156695156697, "grad_norm": 0.7097104787826538, "learning_rate": 0.0001441901568875529, "loss": 1.1582, "step": 7973 }, { "epoch": 1.4196937321937322, "grad_norm": 0.7347947359085083, "learning_rate": 0.00014417759983268452, "loss": 0.9751, "step": 7974 }, { "epoch": 1.419871794871795, "grad_norm": 0.6999621987342834, "learning_rate": 0.00014416504191224787, "loss": 0.9419, "step": 7975 }, { "epoch": 1.4200498575498575, "grad_norm": 0.6500616073608398, "learning_rate": 0.00014415248312648897, "loss": 0.9407, "step": 7976 }, { "epoch": 1.4202279202279202, "grad_norm": 0.6368781328201294, "learning_rate": 0.00014413992347565383, "loss": 1.1224, "step": 7977 }, { "epoch": 1.420405982905983, "grad_norm": 0.6422648429870605, "learning_rate": 0.00014412736295998864, "loss": 0.9573, "step": 7978 }, { "epoch": 1.4205840455840457, "grad_norm": 0.744057297706604, "learning_rate": 0.00014411480157973942, "loss": 1.1384, "step": 7979 }, { "epoch": 1.4207621082621082, "grad_norm": 0.5905839204788208, "learning_rate": 0.00014410223933515232, "loss": 0.8212, "step": 7980 }, { "epoch": 1.420940170940171, "grad_norm": 0.5905438661575317, "learning_rate": 0.0001440896762264734, "loss": 0.8281, "step": 7981 }, { "epoch": 1.4211182336182335, "grad_norm": 0.7087140679359436, "learning_rate": 0.00014407711225394892, "loss": 1.0165, "step": 7982 }, { "epoch": 1.4212962962962963, "grad_norm": 0.6173902153968811, "learning_rate": 0.00014406454741782495, "loss": 0.8823, "step": 7983 }, { "epoch": 1.421474358974359, "grad_norm": 0.6649761199951172, "learning_rate": 0.00014405198171834772, "loss": 0.9489, "step": 7984 }, { "epoch": 1.4216524216524218, "grad_norm": 0.619286835193634, "learning_rate": 0.00014403941515576344, "loss": 0.8149, "step": 7985 }, { "epoch": 1.4218304843304843, "grad_norm": 0.6358469724655151, "learning_rate": 0.0001440268477303183, "loss": 1.0558, "step": 7986 }, { "epoch": 1.422008547008547, "grad_norm": 0.7239769697189331, "learning_rate": 0.0001440142794422585, "loss": 1.0528, "step": 7987 }, { "epoch": 1.4221866096866096, "grad_norm": 0.681168794631958, "learning_rate": 0.00014400171029183036, "loss": 1.0867, "step": 7988 }, { "epoch": 1.4223646723646723, "grad_norm": 0.6741157174110413, "learning_rate": 0.0001439891402792801, "loss": 0.9153, "step": 7989 }, { "epoch": 1.422542735042735, "grad_norm": 0.5881659984588623, "learning_rate": 0.00014397656940485403, "loss": 0.92, "step": 7990 }, { "epoch": 1.4227207977207978, "grad_norm": 0.637093722820282, "learning_rate": 0.00014396399766879842, "loss": 0.921, "step": 7991 }, { "epoch": 1.4228988603988604, "grad_norm": 0.7760605216026306, "learning_rate": 0.0001439514250713596, "loss": 1.1451, "step": 7992 }, { "epoch": 1.4230769230769231, "grad_norm": 0.6619600653648376, "learning_rate": 0.00014393885161278393, "loss": 1.0365, "step": 7993 }, { "epoch": 1.4232549857549857, "grad_norm": 0.5354374051094055, "learning_rate": 0.0001439262772933177, "loss": 0.8718, "step": 7994 }, { "epoch": 1.4234330484330484, "grad_norm": 0.7063560485839844, "learning_rate": 0.00014391370211320735, "loss": 0.8258, "step": 7995 }, { "epoch": 1.4236111111111112, "grad_norm": 0.6876368522644043, "learning_rate": 0.00014390112607269923, "loss": 0.9579, "step": 7996 }, { "epoch": 1.423789173789174, "grad_norm": 0.6976612210273743, "learning_rate": 0.00014388854917203974, "loss": 1.0376, "step": 7997 }, { "epoch": 1.4239672364672364, "grad_norm": 0.6157355308532715, "learning_rate": 0.00014387597141147525, "loss": 0.8743, "step": 7998 }, { "epoch": 1.4241452991452992, "grad_norm": 0.7273156046867371, "learning_rate": 0.0001438633927912523, "loss": 1.101, "step": 7999 }, { "epoch": 1.4243233618233617, "grad_norm": 0.918380618095398, "learning_rate": 0.0001438508133116173, "loss": 0.9625, "step": 8000 }, { "epoch": 1.4245014245014245, "grad_norm": 0.626040518283844, "learning_rate": 0.00014383823297281666, "loss": 0.9552, "step": 8001 }, { "epoch": 1.4246794871794872, "grad_norm": 0.7320386171340942, "learning_rate": 0.00014382565177509693, "loss": 1.0719, "step": 8002 }, { "epoch": 1.42485754985755, "grad_norm": 0.7283148169517517, "learning_rate": 0.0001438130697187046, "loss": 1.0455, "step": 8003 }, { "epoch": 1.4250356125356125, "grad_norm": 0.6614177823066711, "learning_rate": 0.00014380048680388613, "loss": 0.9876, "step": 8004 }, { "epoch": 1.4252136752136753, "grad_norm": 0.6726453900337219, "learning_rate": 0.00014378790303088817, "loss": 0.9861, "step": 8005 }, { "epoch": 1.4253917378917378, "grad_norm": 0.7968725562095642, "learning_rate": 0.00014377531839995718, "loss": 1.1662, "step": 8006 }, { "epoch": 1.4255698005698005, "grad_norm": 0.6510586738586426, "learning_rate": 0.0001437627329113398, "loss": 0.9452, "step": 8007 }, { "epoch": 1.4257478632478633, "grad_norm": 0.6933155655860901, "learning_rate": 0.00014375014656528253, "loss": 1.0149, "step": 8008 }, { "epoch": 1.425925925925926, "grad_norm": 0.7141832113265991, "learning_rate": 0.00014373755936203204, "loss": 1.0667, "step": 8009 }, { "epoch": 1.4261039886039886, "grad_norm": 0.6352181434631348, "learning_rate": 0.00014372497130183494, "loss": 0.8652, "step": 8010 }, { "epoch": 1.4262820512820513, "grad_norm": 0.7494860291481018, "learning_rate": 0.00014371238238493786, "loss": 0.9592, "step": 8011 }, { "epoch": 1.4264601139601139, "grad_norm": 0.610556423664093, "learning_rate": 0.00014369979261158746, "loss": 0.7015, "step": 8012 }, { "epoch": 1.4266381766381766, "grad_norm": 0.7305756211280823, "learning_rate": 0.00014368720198203037, "loss": 0.9681, "step": 8013 }, { "epoch": 1.4268162393162394, "grad_norm": 0.6964020133018494, "learning_rate": 0.0001436746104965133, "loss": 1.1166, "step": 8014 }, { "epoch": 1.426994301994302, "grad_norm": 0.7449237108230591, "learning_rate": 0.00014366201815528302, "loss": 1.1331, "step": 8015 }, { "epoch": 1.4271723646723646, "grad_norm": 0.625834047794342, "learning_rate": 0.00014364942495858615, "loss": 0.8796, "step": 8016 }, { "epoch": 1.4273504273504274, "grad_norm": 0.664559006690979, "learning_rate": 0.0001436368309066695, "loss": 1.0263, "step": 8017 }, { "epoch": 1.42752849002849, "grad_norm": Infinity, "learning_rate": 0.0001436368309066695, "loss": 1.0731, "step": 8018 }, { "epoch": 1.4277065527065527, "grad_norm": 0.6714464426040649, "learning_rate": 0.00014362423599977977, "loss": 0.9345, "step": 8019 }, { "epoch": 1.4278846153846154, "grad_norm": 0.7595751285552979, "learning_rate": 0.00014361164023816376, "loss": 0.9646, "step": 8020 }, { "epoch": 1.4280626780626782, "grad_norm": 0.6413954496383667, "learning_rate": 0.00014359904362206828, "loss": 1.0471, "step": 8021 }, { "epoch": 1.4282407407407407, "grad_norm": 0.7298843264579773, "learning_rate": 0.00014358644615174008, "loss": 0.8932, "step": 8022 }, { "epoch": 1.4284188034188035, "grad_norm": 0.8022156953811646, "learning_rate": 0.00014357384782742602, "loss": 1.0437, "step": 8023 }, { "epoch": 1.428596866096866, "grad_norm": 0.7264443635940552, "learning_rate": 0.00014356124864937296, "loss": 0.9368, "step": 8024 }, { "epoch": 1.4287749287749287, "grad_norm": 0.6819384098052979, "learning_rate": 0.00014354864861782768, "loss": 1.0, "step": 8025 }, { "epoch": 1.4289529914529915, "grad_norm": 0.5945104956626892, "learning_rate": 0.0001435360477330371, "loss": 0.8108, "step": 8026 }, { "epoch": 1.4291310541310542, "grad_norm": 0.6497398018836975, "learning_rate": 0.0001435234459952481, "loss": 0.8712, "step": 8027 }, { "epoch": 1.4293091168091168, "grad_norm": 0.6424077749252319, "learning_rate": 0.0001435108434047076, "loss": 0.9172, "step": 8028 }, { "epoch": 1.4294871794871795, "grad_norm": 0.6806963086128235, "learning_rate": 0.00014349823996166253, "loss": 1.1648, "step": 8029 }, { "epoch": 1.429665242165242, "grad_norm": 0.6601083874702454, "learning_rate": 0.00014348563566635977, "loss": 0.9453, "step": 8030 }, { "epoch": 1.4298433048433048, "grad_norm": 0.7024385929107666, "learning_rate": 0.00014347303051904636, "loss": 1.074, "step": 8031 }, { "epoch": 1.4300213675213675, "grad_norm": 0.7094005942344666, "learning_rate": 0.00014346042451996918, "loss": 0.9976, "step": 8032 }, { "epoch": 1.4301994301994303, "grad_norm": 0.6775936484336853, "learning_rate": 0.0001434478176693753, "loss": 0.9039, "step": 8033 }, { "epoch": 1.4303774928774928, "grad_norm": 0.6920986771583557, "learning_rate": 0.00014343520996751166, "loss": 0.9122, "step": 8034 }, { "epoch": 1.4305555555555556, "grad_norm": 0.720690906047821, "learning_rate": 0.00014342260141462528, "loss": 1.1028, "step": 8035 }, { "epoch": 1.430733618233618, "grad_norm": 0.624546229839325, "learning_rate": 0.00014340999201096328, "loss": 0.9083, "step": 8036 }, { "epoch": 1.4309116809116809, "grad_norm": 0.6560490727424622, "learning_rate": 0.00014339738175677265, "loss": 0.8029, "step": 8037 }, { "epoch": 1.4310897435897436, "grad_norm": 0.8266100883483887, "learning_rate": 0.00014338477065230047, "loss": 0.9655, "step": 8038 }, { "epoch": 1.4312678062678064, "grad_norm": 0.6593570113182068, "learning_rate": 0.00014337215869779385, "loss": 1.0299, "step": 8039 }, { "epoch": 1.431445868945869, "grad_norm": 0.6321794390678406, "learning_rate": 0.00014335954589349986, "loss": 0.8755, "step": 8040 }, { "epoch": 1.4316239316239316, "grad_norm": 0.7030870318412781, "learning_rate": 0.00014334693223966562, "loss": 1.1226, "step": 8041 }, { "epoch": 1.4318019943019942, "grad_norm": 0.7794312238693237, "learning_rate": 0.0001433343177365383, "loss": 1.1252, "step": 8042 }, { "epoch": 1.431980056980057, "grad_norm": 0.6115018129348755, "learning_rate": 0.00014332170238436507, "loss": 0.8753, "step": 8043 }, { "epoch": 1.4321581196581197, "grad_norm": 0.8525674939155579, "learning_rate": 0.00014330908618339304, "loss": 0.9135, "step": 8044 }, { "epoch": 1.4323361823361824, "grad_norm": 0.6869912147521973, "learning_rate": 0.00014329646913386948, "loss": 0.868, "step": 8045 }, { "epoch": 1.432514245014245, "grad_norm": 0.5877542495727539, "learning_rate": 0.0001432838512360415, "loss": 0.9051, "step": 8046 }, { "epoch": 1.4326923076923077, "grad_norm": 0.6609327793121338, "learning_rate": 0.0001432712324901564, "loss": 0.9084, "step": 8047 }, { "epoch": 1.4328703703703702, "grad_norm": 0.6318345069885254, "learning_rate": 0.0001432586128964614, "loss": 0.8291, "step": 8048 }, { "epoch": 1.433048433048433, "grad_norm": 0.6973567008972168, "learning_rate": 0.0001432459924552037, "loss": 0.97, "step": 8049 }, { "epoch": 1.4332264957264957, "grad_norm": 0.6838201284408569, "learning_rate": 0.00014323337116663062, "loss": 1.0957, "step": 8050 }, { "epoch": 1.4334045584045585, "grad_norm": 0.7472857236862183, "learning_rate": 0.00014322074903098944, "loss": 1.0981, "step": 8051 }, { "epoch": 1.433582621082621, "grad_norm": 0.7723061442375183, "learning_rate": 0.0001432081260485275, "loss": 1.2231, "step": 8052 }, { "epoch": 1.4337606837606838, "grad_norm": 0.681834876537323, "learning_rate": 0.00014319550221949208, "loss": 1.073, "step": 8053 }, { "epoch": 1.4339387464387463, "grad_norm": 0.6566045880317688, "learning_rate": 0.00014318287754413051, "loss": 1.1298, "step": 8054 }, { "epoch": 1.434116809116809, "grad_norm": 0.6792440414428711, "learning_rate": 0.00014317025202269015, "loss": 1.2224, "step": 8055 }, { "epoch": 1.4342948717948718, "grad_norm": 0.7946709394454956, "learning_rate": 0.00014315762565541838, "loss": 1.0728, "step": 8056 }, { "epoch": 1.4344729344729346, "grad_norm": 0.633466899394989, "learning_rate": 0.00014314499844256262, "loss": 0.944, "step": 8057 }, { "epoch": 1.434650997150997, "grad_norm": 0.7308502197265625, "learning_rate": 0.00014313237038437023, "loss": 1.0684, "step": 8058 }, { "epoch": 1.4348290598290598, "grad_norm": 0.6483737230300903, "learning_rate": 0.00014311974148108862, "loss": 1.0843, "step": 8059 }, { "epoch": 1.4350071225071226, "grad_norm": 0.6301209926605225, "learning_rate": 0.00014310711173296526, "loss": 1.0083, "step": 8060 }, { "epoch": 1.4351851851851851, "grad_norm": 0.6674302816390991, "learning_rate": 0.00014309448114024757, "loss": 0.9877, "step": 8061 }, { "epoch": 1.4353632478632479, "grad_norm": 0.6888732314109802, "learning_rate": 0.00014308184970318307, "loss": 0.9937, "step": 8062 }, { "epoch": 1.4355413105413106, "grad_norm": 0.6922950148582458, "learning_rate": 0.00014306921742201923, "loss": 1.0149, "step": 8063 }, { "epoch": 1.4357193732193732, "grad_norm": 0.6050686240196228, "learning_rate": 0.00014305658429700352, "loss": 0.7882, "step": 8064 }, { "epoch": 1.435897435897436, "grad_norm": 0.5080767869949341, "learning_rate": 0.00014304395032838348, "loss": 0.7796, "step": 8065 }, { "epoch": 1.4360754985754987, "grad_norm": 0.6382707953453064, "learning_rate": 0.00014303131551640668, "loss": 0.965, "step": 8066 }, { "epoch": 1.4362535612535612, "grad_norm": 0.7153477668762207, "learning_rate": 0.00014301867986132063, "loss": 1.1277, "step": 8067 }, { "epoch": 1.436431623931624, "grad_norm": 0.6208404898643494, "learning_rate": 0.00014300604336337292, "loss": 0.8246, "step": 8068 }, { "epoch": 1.4366096866096867, "grad_norm": 0.719695508480072, "learning_rate": 0.0001429934060228111, "loss": 0.7681, "step": 8069 }, { "epoch": 1.4367877492877492, "grad_norm": 0.6219030618667603, "learning_rate": 0.0001429807678398828, "loss": 1.0425, "step": 8070 }, { "epoch": 1.436965811965812, "grad_norm": 0.6080238819122314, "learning_rate": 0.00014296812881483566, "loss": 0.8762, "step": 8071 }, { "epoch": 1.4371438746438747, "grad_norm": 0.6264194846153259, "learning_rate": 0.00014295548894791729, "loss": 1.087, "step": 8072 }, { "epoch": 1.4373219373219372, "grad_norm": 0.6503600478172302, "learning_rate": 0.00014294284823937535, "loss": 1.0583, "step": 8073 }, { "epoch": 1.4375, "grad_norm": 0.7623817324638367, "learning_rate": 0.0001429302066894575, "loss": 1.2372, "step": 8074 }, { "epoch": 1.4376780626780628, "grad_norm": 0.7020344138145447, "learning_rate": 0.00014291756429841144, "loss": 1.2163, "step": 8075 }, { "epoch": 1.4378561253561253, "grad_norm": 0.7070338129997253, "learning_rate": 0.00014290492106648484, "loss": 0.986, "step": 8076 }, { "epoch": 1.438034188034188, "grad_norm": 0.6407621502876282, "learning_rate": 0.00014289227699392545, "loss": 0.9329, "step": 8077 }, { "epoch": 1.4382122507122508, "grad_norm": 0.6836710572242737, "learning_rate": 0.00014287963208098098, "loss": 0.9252, "step": 8078 }, { "epoch": 1.4383903133903133, "grad_norm": 0.648642897605896, "learning_rate": 0.00014286698632789922, "loss": 1.0457, "step": 8079 }, { "epoch": 1.438568376068376, "grad_norm": 0.7015881538391113, "learning_rate": 0.0001428543397349279, "loss": 1.0516, "step": 8080 }, { "epoch": 1.4387464387464388, "grad_norm": 0.6031532883644104, "learning_rate": 0.0001428416923023148, "loss": 0.9423, "step": 8081 }, { "epoch": 1.4389245014245013, "grad_norm": 0.8235578536987305, "learning_rate": 0.00014282904403030772, "loss": 1.3433, "step": 8082 }, { "epoch": 1.439102564102564, "grad_norm": 0.7355761528015137, "learning_rate": 0.00014281639491915452, "loss": 1.0128, "step": 8083 }, { "epoch": 1.4392806267806268, "grad_norm": 0.7429629564285278, "learning_rate": 0.00014280374496910303, "loss": 0.8546, "step": 8084 }, { "epoch": 1.4394586894586894, "grad_norm": 0.5831776857376099, "learning_rate": 0.00014279109418040105, "loss": 0.9021, "step": 8085 }, { "epoch": 1.4396367521367521, "grad_norm": 0.6585184931755066, "learning_rate": 0.00014277844255329645, "loss": 0.9256, "step": 8086 }, { "epoch": 1.4398148148148149, "grad_norm": 0.6412501931190491, "learning_rate": 0.00014276579008803717, "loss": 0.9305, "step": 8087 }, { "epoch": 1.4399928774928774, "grad_norm": 0.6305423378944397, "learning_rate": 0.00014275313678487102, "loss": 0.9471, "step": 8088 }, { "epoch": 1.4401709401709402, "grad_norm": 0.7160914540290833, "learning_rate": 0.00014274048264404602, "loss": 0.8798, "step": 8089 }, { "epoch": 1.440349002849003, "grad_norm": 0.6740858554840088, "learning_rate": 0.00014272782766581004, "loss": 0.9022, "step": 8090 }, { "epoch": 1.4405270655270654, "grad_norm": 0.7554821968078613, "learning_rate": 0.000142715171850411, "loss": 1.0924, "step": 8091 }, { "epoch": 1.4407051282051282, "grad_norm": 0.7361162304878235, "learning_rate": 0.00014270251519809694, "loss": 0.9907, "step": 8092 }, { "epoch": 1.440883190883191, "grad_norm": 0.731813371181488, "learning_rate": 0.0001426898577091158, "loss": 1.1765, "step": 8093 }, { "epoch": 1.4410612535612537, "grad_norm": 0.6877756714820862, "learning_rate": 0.00014267719938371558, "loss": 1.0536, "step": 8094 }, { "epoch": 1.4412393162393162, "grad_norm": 0.6724407076835632, "learning_rate": 0.00014266454022214426, "loss": 1.1895, "step": 8095 }, { "epoch": 1.441417378917379, "grad_norm": 0.6946671605110168, "learning_rate": 0.0001426518802246499, "loss": 1.0437, "step": 8096 }, { "epoch": 1.4415954415954415, "grad_norm": 0.7032839059829712, "learning_rate": 0.00014263921939148058, "loss": 1.1363, "step": 8097 }, { "epoch": 1.4417735042735043, "grad_norm": 0.6942192316055298, "learning_rate": 0.00014262655772288434, "loss": 1.315, "step": 8098 }, { "epoch": 1.441951566951567, "grad_norm": 0.7002301812171936, "learning_rate": 0.00014261389521910922, "loss": 1.0546, "step": 8099 }, { "epoch": 1.4421296296296298, "grad_norm": 0.7260788083076477, "learning_rate": 0.00014260123188040335, "loss": 0.9374, "step": 8100 }, { "epoch": 1.4423076923076923, "grad_norm": 0.6629201173782349, "learning_rate": 0.00014258856770701486, "loss": 0.8632, "step": 8101 }, { "epoch": 1.442485754985755, "grad_norm": 0.6570318937301636, "learning_rate": 0.0001425759026991918, "loss": 1.0102, "step": 8102 }, { "epoch": 1.4426638176638176, "grad_norm": 0.7696560621261597, "learning_rate": 0.00014256323685718242, "loss": 0.9703, "step": 8103 }, { "epoch": 1.4428418803418803, "grad_norm": 0.7206611633300781, "learning_rate": 0.00014255057018123482, "loss": 1.1728, "step": 8104 }, { "epoch": 1.443019943019943, "grad_norm": 0.6871611475944519, "learning_rate": 0.0001425379026715972, "loss": 0.9377, "step": 8105 }, { "epoch": 1.4431980056980058, "grad_norm": 0.6027442812919617, "learning_rate": 0.00014252523432851775, "loss": 0.9212, "step": 8106 }, { "epoch": 1.4433760683760684, "grad_norm": 0.7149752378463745, "learning_rate": 0.00014251256515224463, "loss": 0.9654, "step": 8107 }, { "epoch": 1.443554131054131, "grad_norm": 0.5949522256851196, "learning_rate": 0.00014249989514302614, "loss": 1.0646, "step": 8108 }, { "epoch": 1.4437321937321936, "grad_norm": 0.7345452904701233, "learning_rate": 0.0001424872243011105, "loss": 0.9801, "step": 8109 }, { "epoch": 1.4439102564102564, "grad_norm": 0.8045009970664978, "learning_rate": 0.00014247455262674592, "loss": 1.3529, "step": 8110 }, { "epoch": 1.4440883190883191, "grad_norm": 0.6712123155593872, "learning_rate": 0.00014246188012018073, "loss": 1.0416, "step": 8111 }, { "epoch": 1.444266381766382, "grad_norm": 0.7811154127120972, "learning_rate": 0.00014244920678166322, "loss": 1.2019, "step": 8112 }, { "epoch": 1.4444444444444444, "grad_norm": 0.6834486126899719, "learning_rate": 0.00014243653261144167, "loss": 0.986, "step": 8113 }, { "epoch": 1.4446225071225072, "grad_norm": 0.6901041269302368, "learning_rate": 0.00014242385760976443, "loss": 1.0988, "step": 8114 }, { "epoch": 1.4448005698005697, "grad_norm": 0.6233634948730469, "learning_rate": 0.00014241118177687982, "loss": 0.7748, "step": 8115 }, { "epoch": 1.4449786324786325, "grad_norm": 0.6899837851524353, "learning_rate": 0.00014239850511303624, "loss": 0.9734, "step": 8116 }, { "epoch": 1.4451566951566952, "grad_norm": 0.6316244006156921, "learning_rate": 0.00014238582761848197, "loss": 0.7888, "step": 8117 }, { "epoch": 1.445334757834758, "grad_norm": 0.6074259877204895, "learning_rate": 0.00014237314929346545, "loss": 0.8843, "step": 8118 }, { "epoch": 1.4455128205128205, "grad_norm": 0.6112192273139954, "learning_rate": 0.00014236047013823516, "loss": 0.8529, "step": 8119 }, { "epoch": 1.4456908831908832, "grad_norm": 0.6883894801139832, "learning_rate": 0.0001423477901530394, "loss": 0.9506, "step": 8120 }, { "epoch": 1.4458689458689458, "grad_norm": 0.7248309254646301, "learning_rate": 0.00014233510933812666, "loss": 0.9573, "step": 8121 }, { "epoch": 1.4460470085470085, "grad_norm": 0.6853367686271667, "learning_rate": 0.00014232242769374542, "loss": 0.9903, "step": 8122 }, { "epoch": 1.4462250712250713, "grad_norm": 0.7179274559020996, "learning_rate": 0.0001423097452201441, "loss": 0.9157, "step": 8123 }, { "epoch": 1.446403133903134, "grad_norm": 0.6704817414283752, "learning_rate": 0.00014229706191757127, "loss": 1.1361, "step": 8124 }, { "epoch": 1.4465811965811965, "grad_norm": 0.6380739212036133, "learning_rate": 0.00014228437778627533, "loss": 0.9336, "step": 8125 }, { "epoch": 1.4467592592592593, "grad_norm": 0.6275362372398376, "learning_rate": 0.00014227169282650487, "loss": 0.9617, "step": 8126 }, { "epoch": 1.4469373219373218, "grad_norm": 0.5644828677177429, "learning_rate": 0.00014225900703850836, "loss": 0.7384, "step": 8127 }, { "epoch": 1.4471153846153846, "grad_norm": 0.6522284150123596, "learning_rate": 0.00014224632042253443, "loss": 1.1098, "step": 8128 }, { "epoch": 1.4472934472934473, "grad_norm": 0.6228049993515015, "learning_rate": 0.0001422336329788316, "loss": 1.1061, "step": 8129 }, { "epoch": 1.44747150997151, "grad_norm": 0.6092000603675842, "learning_rate": 0.00014222094470764848, "loss": 0.808, "step": 8130 }, { "epoch": 1.4476495726495726, "grad_norm": 0.667435348033905, "learning_rate": 0.00014220825560923363, "loss": 1.1223, "step": 8131 }, { "epoch": 1.4478276353276354, "grad_norm": 0.6080766320228577, "learning_rate": 0.0001421955656838357, "loss": 1.0099, "step": 8132 }, { "epoch": 1.448005698005698, "grad_norm": 0.7597638368606567, "learning_rate": 0.00014218287493170332, "loss": 0.9718, "step": 8133 }, { "epoch": 1.4481837606837606, "grad_norm": 0.574130654335022, "learning_rate": 0.0001421701833530851, "loss": 0.7745, "step": 8134 }, { "epoch": 1.4483618233618234, "grad_norm": 0.6372822523117065, "learning_rate": 0.0001421574909482298, "loss": 1.0088, "step": 8135 }, { "epoch": 1.4485398860398861, "grad_norm": 0.6759644746780396, "learning_rate": 0.000142144797717386, "loss": 0.9684, "step": 8136 }, { "epoch": 1.4487179487179487, "grad_norm": 0.706351637840271, "learning_rate": 0.00014213210366080244, "loss": 1.021, "step": 8137 }, { "epoch": 1.4488960113960114, "grad_norm": 0.6976894736289978, "learning_rate": 0.0001421194087787278, "loss": 1.1038, "step": 8138 }, { "epoch": 1.449074074074074, "grad_norm": 0.7322551012039185, "learning_rate": 0.00014210671307141092, "loss": 1.0213, "step": 8139 }, { "epoch": 1.4492521367521367, "grad_norm": 0.5885626077651978, "learning_rate": 0.0001420940165391004, "loss": 0.821, "step": 8140 }, { "epoch": 1.4494301994301995, "grad_norm": 0.7009791135787964, "learning_rate": 0.0001420813191820451, "loss": 0.8647, "step": 8141 }, { "epoch": 1.4496082621082622, "grad_norm": 0.5715423822402954, "learning_rate": 0.00014206862100049375, "loss": 0.873, "step": 8142 }, { "epoch": 1.4497863247863247, "grad_norm": 1.1452178955078125, "learning_rate": 0.00014205592199469514, "loss": 1.2523, "step": 8143 }, { "epoch": 1.4499643874643875, "grad_norm": 0.8076814413070679, "learning_rate": 0.00014204322216489814, "loss": 1.1071, "step": 8144 }, { "epoch": 1.45014245014245, "grad_norm": 0.7325751185417175, "learning_rate": 0.00014203052151135154, "loss": 0.9846, "step": 8145 }, { "epoch": 1.4503205128205128, "grad_norm": 0.7009061574935913, "learning_rate": 0.00014201782003430417, "loss": 0.8153, "step": 8146 }, { "epoch": 1.4504985754985755, "grad_norm": 0.6502353549003601, "learning_rate": 0.0001420051177340049, "loss": 0.8959, "step": 8147 }, { "epoch": 1.4506766381766383, "grad_norm": 0.6134430170059204, "learning_rate": 0.00014199241461070261, "loss": 0.9683, "step": 8148 }, { "epoch": 1.4508547008547008, "grad_norm": 0.720160722732544, "learning_rate": 0.0001419797106646462, "loss": 0.9579, "step": 8149 }, { "epoch": 1.4510327635327636, "grad_norm": 0.6141422986984253, "learning_rate": 0.00014196700589608454, "loss": 0.9427, "step": 8150 }, { "epoch": 1.451210826210826, "grad_norm": 0.6835139393806458, "learning_rate": 0.00014195430030526656, "loss": 1.0374, "step": 8151 }, { "epoch": 1.4513888888888888, "grad_norm": 0.6829691529273987, "learning_rate": 0.00014194159389244128, "loss": 0.9418, "step": 8152 }, { "epoch": 1.4515669515669516, "grad_norm": 0.7142195701599121, "learning_rate": 0.00014192888665785755, "loss": 1.1876, "step": 8153 }, { "epoch": 1.4517450142450143, "grad_norm": 0.6719943284988403, "learning_rate": 0.0001419161786017644, "loss": 1.1417, "step": 8154 }, { "epoch": 1.4519230769230769, "grad_norm": 0.6478939652442932, "learning_rate": 0.0001419034697244108, "loss": 0.943, "step": 8155 }, { "epoch": 1.4521011396011396, "grad_norm": 0.6308888792991638, "learning_rate": 0.00014189076002604575, "loss": 0.9842, "step": 8156 }, { "epoch": 1.4522792022792022, "grad_norm": 0.673559844493866, "learning_rate": 0.00014187804950691827, "loss": 0.8108, "step": 8157 }, { "epoch": 1.452457264957265, "grad_norm": 0.5895359516143799, "learning_rate": 0.00014186533816727744, "loss": 0.8187, "step": 8158 }, { "epoch": 1.4526353276353277, "grad_norm": 0.6703287363052368, "learning_rate": 0.00014185262600737225, "loss": 0.9012, "step": 8159 }, { "epoch": 1.4528133903133904, "grad_norm": 0.697728157043457, "learning_rate": 0.00014183991302745182, "loss": 1.2572, "step": 8160 }, { "epoch": 1.452991452991453, "grad_norm": 0.599371075630188, "learning_rate": 0.00014182719922776514, "loss": 1.078, "step": 8161 }, { "epoch": 1.4531695156695157, "grad_norm": 0.6774863600730896, "learning_rate": 0.00014181448460856143, "loss": 1.0607, "step": 8162 }, { "epoch": 1.4533475783475782, "grad_norm": 0.6872009038925171, "learning_rate": 0.00014180176917008976, "loss": 1.0713, "step": 8163 }, { "epoch": 1.453525641025641, "grad_norm": 0.7949981093406677, "learning_rate": 0.00014178905291259926, "loss": 1.0471, "step": 8164 }, { "epoch": 1.4537037037037037, "grad_norm": 0.6592127084732056, "learning_rate": 0.00014177633583633908, "loss": 0.8409, "step": 8165 }, { "epoch": 1.4538817663817665, "grad_norm": 0.6745635867118835, "learning_rate": 0.00014176361794155837, "loss": 1.0859, "step": 8166 }, { "epoch": 1.454059829059829, "grad_norm": 0.6661605834960938, "learning_rate": 0.00014175089922850633, "loss": 1.0587, "step": 8167 }, { "epoch": 1.4542378917378918, "grad_norm": 0.6697571873664856, "learning_rate": 0.00014173817969743212, "loss": 0.8876, "step": 8168 }, { "epoch": 1.4544159544159543, "grad_norm": 0.6162588000297546, "learning_rate": 0.000141725459348585, "loss": 0.9575, "step": 8169 }, { "epoch": 1.454594017094017, "grad_norm": 0.6235088109970093, "learning_rate": 0.00014171273818221422, "loss": 0.9209, "step": 8170 }, { "epoch": 1.4547720797720798, "grad_norm": 0.6744212508201599, "learning_rate": 0.00014170001619856896, "loss": 0.9704, "step": 8171 }, { "epoch": 1.4549501424501425, "grad_norm": 0.6781345009803772, "learning_rate": 0.0001416872933978985, "loss": 1.1507, "step": 8172 }, { "epoch": 1.455128205128205, "grad_norm": 0.7160060405731201, "learning_rate": 0.0001416745697804521, "loss": 1.2529, "step": 8173 }, { "epoch": 1.4553062678062678, "grad_norm": 0.6742389798164368, "learning_rate": 0.00014166184534647913, "loss": 1.0168, "step": 8174 }, { "epoch": 1.4554843304843303, "grad_norm": 0.6685828566551208, "learning_rate": 0.0001416491200962288, "loss": 1.0807, "step": 8175 }, { "epoch": 1.455662393162393, "grad_norm": 0.6998327374458313, "learning_rate": 0.0001416363940299505, "loss": 1.1711, "step": 8176 }, { "epoch": 1.4558404558404558, "grad_norm": 0.7132518291473389, "learning_rate": 0.00014162366714789358, "loss": 1.1392, "step": 8177 }, { "epoch": 1.4560185185185186, "grad_norm": 0.6995887160301208, "learning_rate": 0.0001416109394503073, "loss": 1.3335, "step": 8178 }, { "epoch": 1.4561965811965811, "grad_norm": 0.7161234021186829, "learning_rate": 0.00014159821093744115, "loss": 0.9725, "step": 8179 }, { "epoch": 1.4563746438746439, "grad_norm": 0.7678874135017395, "learning_rate": 0.00014158548160954446, "loss": 1.1578, "step": 8180 }, { "epoch": 1.4565527065527066, "grad_norm": 0.67372065782547, "learning_rate": 0.00014157275146686662, "loss": 1.0867, "step": 8181 }, { "epoch": 1.4567307692307692, "grad_norm": 0.7757831811904907, "learning_rate": 0.00014156002050965712, "loss": 0.9768, "step": 8182 }, { "epoch": 1.456908831908832, "grad_norm": 0.7174801230430603, "learning_rate": 0.00014154728873816533, "loss": 1.1712, "step": 8183 }, { "epoch": 1.4570868945868947, "grad_norm": 0.5972673892974854, "learning_rate": 0.0001415345561526407, "loss": 0.9571, "step": 8184 }, { "epoch": 1.4572649572649572, "grad_norm": 0.7999650835990906, "learning_rate": 0.00014152182275333275, "loss": 1.0583, "step": 8185 }, { "epoch": 1.45744301994302, "grad_norm": 0.6737848520278931, "learning_rate": 0.00014150908854049091, "loss": 1.0562, "step": 8186 }, { "epoch": 1.4576210826210827, "grad_norm": 0.7756418585777283, "learning_rate": 0.00014149635351436474, "loss": 1.2301, "step": 8187 }, { "epoch": 1.4577991452991452, "grad_norm": 0.5633914470672607, "learning_rate": 0.00014148361767520374, "loss": 0.8847, "step": 8188 }, { "epoch": 1.457977207977208, "grad_norm": 0.8462759256362915, "learning_rate": 0.00014147088102325737, "loss": 0.8046, "step": 8189 }, { "epoch": 1.4581552706552707, "grad_norm": 0.7081632614135742, "learning_rate": 0.00014145814355877526, "loss": 1.0764, "step": 8190 }, { "epoch": 1.4583333333333333, "grad_norm": 0.7357106804847717, "learning_rate": 0.00014144540528200698, "loss": 1.0202, "step": 8191 }, { "epoch": 1.458511396011396, "grad_norm": 0.603566586971283, "learning_rate": 0.00014143266619320204, "loss": 0.8214, "step": 8192 }, { "epoch": 1.4586894586894588, "grad_norm": 0.6829110383987427, "learning_rate": 0.00014141992629261007, "loss": 0.9479, "step": 8193 }, { "epoch": 1.4588675213675213, "grad_norm": 0.6822739839553833, "learning_rate": 0.00014140718558048072, "loss": 0.9117, "step": 8194 }, { "epoch": 1.459045584045584, "grad_norm": 0.7383607029914856, "learning_rate": 0.00014139444405706356, "loss": 0.9819, "step": 8195 }, { "epoch": 1.4592236467236468, "grad_norm": 0.6319897770881653, "learning_rate": 0.00014138170172260826, "loss": 1.0508, "step": 8196 }, { "epoch": 1.4594017094017093, "grad_norm": 0.6804461479187012, "learning_rate": 0.0001413689585773645, "loss": 0.992, "step": 8197 }, { "epoch": 1.459579772079772, "grad_norm": 0.6198720335960388, "learning_rate": 0.0001413562146215819, "loss": 1.0113, "step": 8198 }, { "epoch": 1.4597578347578348, "grad_norm": 0.5968540906906128, "learning_rate": 0.0001413434698555102, "loss": 0.7562, "step": 8199 }, { "epoch": 1.4599358974358974, "grad_norm": 0.5370334982872009, "learning_rate": 0.00014133072427939913, "loss": 0.9238, "step": 8200 }, { "epoch": 1.46011396011396, "grad_norm": 0.6652548909187317, "learning_rate": 0.00014131797789349832, "loss": 0.9464, "step": 8201 }, { "epoch": 1.4602920227920229, "grad_norm": 0.637852668762207, "learning_rate": 0.00014130523069805757, "loss": 1.0395, "step": 8202 }, { "epoch": 1.4604700854700854, "grad_norm": 0.8186550140380859, "learning_rate": 0.00014129248269332664, "loss": 1.2116, "step": 8203 }, { "epoch": 1.4606481481481481, "grad_norm": 0.5290196537971497, "learning_rate": 0.00014127973387955528, "loss": 0.7331, "step": 8204 }, { "epoch": 1.460826210826211, "grad_norm": 0.6516342163085938, "learning_rate": 0.00014126698425699332, "loss": 0.9275, "step": 8205 }, { "epoch": 1.4610042735042734, "grad_norm": 0.767254114151001, "learning_rate": 0.00014125423382589048, "loss": 0.9355, "step": 8206 }, { "epoch": 1.4611823361823362, "grad_norm": 0.6476777195930481, "learning_rate": 0.00014124148258649668, "loss": 0.9263, "step": 8207 }, { "epoch": 1.461360398860399, "grad_norm": 0.6737871766090393, "learning_rate": 0.00014122873053906167, "loss": 0.9815, "step": 8208 }, { "epoch": 1.4615384615384617, "grad_norm": 0.6311159729957581, "learning_rate": 0.00014121597768383532, "loss": 0.9607, "step": 8209 }, { "epoch": 1.4617165242165242, "grad_norm": 0.6061250567436218, "learning_rate": 0.00014120322402106752, "loss": 0.7428, "step": 8210 }, { "epoch": 1.461894586894587, "grad_norm": 0.6916252970695496, "learning_rate": 0.00014119046955100815, "loss": 0.9664, "step": 8211 }, { "epoch": 1.4620726495726495, "grad_norm": 0.6583660840988159, "learning_rate": 0.00014117771427390706, "loss": 1.0645, "step": 8212 }, { "epoch": 1.4622507122507122, "grad_norm": 0.7034604549407959, "learning_rate": 0.00014116495819001425, "loss": 0.9223, "step": 8213 }, { "epoch": 1.462428774928775, "grad_norm": 0.6378605961799622, "learning_rate": 0.00014115220129957954, "loss": 0.7963, "step": 8214 }, { "epoch": 1.4626068376068377, "grad_norm": 0.6251596212387085, "learning_rate": 0.00014113944360285297, "loss": 0.9852, "step": 8215 }, { "epoch": 1.4627849002849003, "grad_norm": 0.7055560946464539, "learning_rate": 0.00014112668510008446, "loss": 0.9342, "step": 8216 }, { "epoch": 1.462962962962963, "grad_norm": 0.6250377893447876, "learning_rate": 0.00014111392579152396, "loss": 0.9886, "step": 8217 }, { "epoch": 1.4631410256410255, "grad_norm": 0.6011185050010681, "learning_rate": 0.00014110116567742152, "loss": 0.8465, "step": 8218 }, { "epoch": 1.4633190883190883, "grad_norm": 0.6632489562034607, "learning_rate": 0.0001410884047580271, "loss": 0.8619, "step": 8219 }, { "epoch": 1.463497150997151, "grad_norm": 0.7194828987121582, "learning_rate": 0.00014107564303359076, "loss": 1.1231, "step": 8220 }, { "epoch": 1.4636752136752138, "grad_norm": 0.7640393376350403, "learning_rate": 0.0001410628805043625, "loss": 1.1955, "step": 8221 }, { "epoch": 1.4638532763532763, "grad_norm": 0.9118906259536743, "learning_rate": 0.0001410501171705924, "loss": 1.0555, "step": 8222 }, { "epoch": 1.464031339031339, "grad_norm": 0.7545066475868225, "learning_rate": 0.00014103735303253053, "loss": 0.9425, "step": 8223 }, { "epoch": 1.4642094017094016, "grad_norm": 0.6848801970481873, "learning_rate": 0.000141024588090427, "loss": 1.0418, "step": 8224 }, { "epoch": 1.4643874643874644, "grad_norm": 0.6825160384178162, "learning_rate": 0.00014101182234453185, "loss": 0.9615, "step": 8225 }, { "epoch": 1.4645655270655271, "grad_norm": 0.8258556723594666, "learning_rate": 0.00014099905579509527, "loss": 1.1237, "step": 8226 }, { "epoch": 1.4647435897435899, "grad_norm": 0.6427522897720337, "learning_rate": 0.00014098628844236733, "loss": 1.0853, "step": 8227 }, { "epoch": 1.4649216524216524, "grad_norm": 0.6476351022720337, "learning_rate": 0.00014097352028659825, "loss": 1.1286, "step": 8228 }, { "epoch": 1.4650997150997151, "grad_norm": 0.7621034383773804, "learning_rate": 0.00014096075132803812, "loss": 1.1402, "step": 8229 }, { "epoch": 1.4652777777777777, "grad_norm": 0.6629892587661743, "learning_rate": 0.00014094798156693718, "loss": 0.7108, "step": 8230 }, { "epoch": 1.4654558404558404, "grad_norm": 0.6902043223381042, "learning_rate": 0.00014093521100354557, "loss": 1.1761, "step": 8231 }, { "epoch": 1.4656339031339032, "grad_norm": 0.7422910928726196, "learning_rate": 0.00014092243963811357, "loss": 0.867, "step": 8232 }, { "epoch": 1.465811965811966, "grad_norm": 0.7424963712692261, "learning_rate": 0.00014090966747089137, "loss": 1.015, "step": 8233 }, { "epoch": 1.4659900284900285, "grad_norm": 0.6855891942977905, "learning_rate": 0.0001408968945021292, "loss": 0.9624, "step": 8234 }, { "epoch": 1.4661680911680912, "grad_norm": 0.5968918204307556, "learning_rate": 0.00014088412073207736, "loss": 0.9243, "step": 8235 }, { "epoch": 1.4663461538461537, "grad_norm": 0.6153344511985779, "learning_rate": 0.0001408713461609861, "loss": 1.0305, "step": 8236 }, { "epoch": 1.4665242165242165, "grad_norm": 0.6627458333969116, "learning_rate": 0.0001408585707891057, "loss": 1.1102, "step": 8237 }, { "epoch": 1.4667022792022792, "grad_norm": 0.6475233435630798, "learning_rate": 0.0001408457946166865, "loss": 1.0045, "step": 8238 }, { "epoch": 1.466880341880342, "grad_norm": 0.6792858839035034, "learning_rate": 0.00014083301764397876, "loss": 1.0092, "step": 8239 }, { "epoch": 1.4670584045584045, "grad_norm": 0.6916255354881287, "learning_rate": 0.00014082023987123293, "loss": 1.0761, "step": 8240 }, { "epoch": 1.4672364672364673, "grad_norm": 0.7901251912117004, "learning_rate": 0.00014080746129869923, "loss": 0.8002, "step": 8241 }, { "epoch": 1.4674145299145298, "grad_norm": 0.8078263401985168, "learning_rate": 0.00014079468192662812, "loss": 0.9738, "step": 8242 }, { "epoch": 1.4675925925925926, "grad_norm": 0.6370784640312195, "learning_rate": 0.00014078190175526996, "loss": 1.0256, "step": 8243 }, { "epoch": 1.4677706552706553, "grad_norm": 0.6087532639503479, "learning_rate": 0.0001407691207848752, "loss": 0.9747, "step": 8244 }, { "epoch": 1.467948717948718, "grad_norm": 0.6333357691764832, "learning_rate": 0.00014075633901569414, "loss": 1.0135, "step": 8245 }, { "epoch": 1.4681267806267806, "grad_norm": 0.6914255619049072, "learning_rate": 0.00014074355644797733, "loss": 1.0261, "step": 8246 }, { "epoch": 1.4683048433048433, "grad_norm": 0.6374734044075012, "learning_rate": 0.00014073077308197513, "loss": 0.9197, "step": 8247 }, { "epoch": 1.4684829059829059, "grad_norm": 0.8023789525032043, "learning_rate": 0.00014071798891793807, "loss": 1.1085, "step": 8248 }, { "epoch": 1.4686609686609686, "grad_norm": 0.7722933888435364, "learning_rate": 0.0001407052039561166, "loss": 1.2018, "step": 8249 }, { "epoch": 1.4688390313390314, "grad_norm": 0.6823393106460571, "learning_rate": 0.0001406924181967612, "loss": 1.088, "step": 8250 }, { "epoch": 1.4690170940170941, "grad_norm": 0.7037357687950134, "learning_rate": 0.00014067963164012242, "loss": 1.0324, "step": 8251 }, { "epoch": 1.4691951566951567, "grad_norm": 0.6549737453460693, "learning_rate": 0.00014066684428645074, "loss": 1.152, "step": 8252 }, { "epoch": 1.4693732193732194, "grad_norm": 0.5349790453910828, "learning_rate": 0.00014065405613599674, "loss": 0.6996, "step": 8253 }, { "epoch": 1.469551282051282, "grad_norm": 0.6760679483413696, "learning_rate": 0.00014064126718901096, "loss": 0.9856, "step": 8254 }, { "epoch": 1.4697293447293447, "grad_norm": 0.5912436842918396, "learning_rate": 0.00014062847744574395, "loss": 1.0076, "step": 8255 }, { "epoch": 1.4699074074074074, "grad_norm": 0.75101637840271, "learning_rate": 0.00014061568690644632, "loss": 1.0033, "step": 8256 }, { "epoch": 1.4700854700854702, "grad_norm": 0.6233504414558411, "learning_rate": 0.00014060289557136873, "loss": 0.8525, "step": 8257 }, { "epoch": 1.4702635327635327, "grad_norm": 0.659570038318634, "learning_rate": 0.00014059010344076171, "loss": 0.855, "step": 8258 }, { "epoch": 1.4704415954415955, "grad_norm": 0.8096539974212646, "learning_rate": 0.00014057731051487593, "loss": 0.9905, "step": 8259 }, { "epoch": 1.470619658119658, "grad_norm": 0.5829728245735168, "learning_rate": 0.00014056451679396204, "loss": 0.7974, "step": 8260 }, { "epoch": 1.4707977207977208, "grad_norm": 0.6176979541778564, "learning_rate": 0.0001405517222782707, "loss": 0.9556, "step": 8261 }, { "epoch": 1.4709757834757835, "grad_norm": 0.6322479248046875, "learning_rate": 0.00014053892696805264, "loss": 0.8837, "step": 8262 }, { "epoch": 1.4711538461538463, "grad_norm": 0.6886917948722839, "learning_rate": 0.0001405261308635585, "loss": 0.9242, "step": 8263 }, { "epoch": 1.4713319088319088, "grad_norm": 0.7474521994590759, "learning_rate": 0.00014051333396503901, "loss": 0.9906, "step": 8264 }, { "epoch": 1.4715099715099715, "grad_norm": 0.7120978832244873, "learning_rate": 0.00014050053627274488, "loss": 1.1074, "step": 8265 }, { "epoch": 1.471688034188034, "grad_norm": 0.6778998374938965, "learning_rate": 0.0001404877377869269, "loss": 1.0027, "step": 8266 }, { "epoch": 1.4718660968660968, "grad_norm": 0.6832901239395142, "learning_rate": 0.0001404749385078358, "loss": 0.9399, "step": 8267 }, { "epoch": 1.4720441595441596, "grad_norm": 0.7428423762321472, "learning_rate": 0.00014046213843572236, "loss": 1.0591, "step": 8268 }, { "epoch": 1.4722222222222223, "grad_norm": 0.7522720098495483, "learning_rate": 0.00014044933757083737, "loss": 1.1184, "step": 8269 }, { "epoch": 1.4724002849002849, "grad_norm": 0.7714734673500061, "learning_rate": 0.00014043653591343163, "loss": 1.0783, "step": 8270 }, { "epoch": 1.4725783475783476, "grad_norm": 0.5860890746116638, "learning_rate": 0.00014042373346375597, "loss": 0.8394, "step": 8271 }, { "epoch": 1.4727564102564101, "grad_norm": 0.6400395035743713, "learning_rate": 0.0001404109302220612, "loss": 0.9153, "step": 8272 }, { "epoch": 1.4729344729344729, "grad_norm": 0.7441139817237854, "learning_rate": 0.00014039812618859827, "loss": 0.9224, "step": 8273 }, { "epoch": 1.4731125356125356, "grad_norm": 0.6030932664871216, "learning_rate": 0.00014038532136361793, "loss": 1.0783, "step": 8274 }, { "epoch": 1.4732905982905984, "grad_norm": 0.7243345975875854, "learning_rate": 0.0001403725157473711, "loss": 0.9894, "step": 8275 }, { "epoch": 1.473468660968661, "grad_norm": 0.6880641579627991, "learning_rate": 0.0001403597093401087, "loss": 0.9459, "step": 8276 }, { "epoch": 1.4736467236467237, "grad_norm": 0.6263882517814636, "learning_rate": 0.00014034690214208165, "loss": 0.8781, "step": 8277 }, { "epoch": 1.4738247863247862, "grad_norm": 0.7159495949745178, "learning_rate": 0.00014033409415354085, "loss": 1.0511, "step": 8278 }, { "epoch": 1.474002849002849, "grad_norm": 0.7182226181030273, "learning_rate": 0.00014032128537473727, "loss": 1.1196, "step": 8279 }, { "epoch": 1.4741809116809117, "grad_norm": 0.744478166103363, "learning_rate": 0.00014030847580592186, "loss": 1.0747, "step": 8280 }, { "epoch": 1.4743589743589745, "grad_norm": 0.6806797385215759, "learning_rate": 0.00014029566544734558, "loss": 1.1519, "step": 8281 }, { "epoch": 1.474537037037037, "grad_norm": 0.6813502311706543, "learning_rate": 0.00014028285429925946, "loss": 0.968, "step": 8282 }, { "epoch": 1.4747150997150997, "grad_norm": 0.639784574508667, "learning_rate": 0.00014027004236191452, "loss": 1.0685, "step": 8283 }, { "epoch": 1.4748931623931623, "grad_norm": 0.6325878500938416, "learning_rate": 0.00014025722963556173, "loss": 1.0358, "step": 8284 }, { "epoch": 1.475071225071225, "grad_norm": 0.7012955546379089, "learning_rate": 0.00014024441612045215, "loss": 1.1059, "step": 8285 }, { "epoch": 1.4752492877492878, "grad_norm": 0.690380334854126, "learning_rate": 0.00014023160181683684, "loss": 0.9628, "step": 8286 }, { "epoch": 1.4754273504273505, "grad_norm": 0.7178516983985901, "learning_rate": 0.00014021878672496686, "loss": 0.963, "step": 8287 }, { "epoch": 1.475605413105413, "grad_norm": 0.7049064636230469, "learning_rate": 0.0001402059708450933, "loss": 0.8996, "step": 8288 }, { "epoch": 1.4757834757834758, "grad_norm": 0.6777819395065308, "learning_rate": 0.00014019315417746728, "loss": 1.0696, "step": 8289 }, { "epoch": 1.4759615384615383, "grad_norm": 0.5948763489723206, "learning_rate": 0.00014018033672233987, "loss": 0.928, "step": 8290 }, { "epoch": 1.476139601139601, "grad_norm": 0.7183942198753357, "learning_rate": 0.00014016751847996224, "loss": 1.1053, "step": 8291 }, { "epoch": 1.4763176638176638, "grad_norm": 0.7426177263259888, "learning_rate": 0.00014015469945058556, "loss": 0.9504, "step": 8292 }, { "epoch": 1.4764957264957266, "grad_norm": 0.6508159041404724, "learning_rate": 0.0001401418796344609, "loss": 1.1176, "step": 8293 }, { "epoch": 1.476673789173789, "grad_norm": 0.6954567432403564, "learning_rate": 0.00014012905903183954, "loss": 0.9238, "step": 8294 }, { "epoch": 1.4768518518518519, "grad_norm": 0.7023960947990417, "learning_rate": 0.0001401162376429726, "loss": 1.2032, "step": 8295 }, { "epoch": 1.4770299145299146, "grad_norm": 0.7174739837646484, "learning_rate": 0.00014010341546811134, "loss": 0.9385, "step": 8296 }, { "epoch": 1.4772079772079771, "grad_norm": 0.611980140209198, "learning_rate": 0.00014009059250750695, "loss": 0.9469, "step": 8297 }, { "epoch": 1.47738603988604, "grad_norm": 0.6362917423248291, "learning_rate": 0.0001400777687614107, "loss": 1.1406, "step": 8298 }, { "epoch": 1.4775641025641026, "grad_norm": 0.6884697675704956, "learning_rate": 0.00014006494423007381, "loss": 0.7915, "step": 8299 }, { "epoch": 1.4777421652421652, "grad_norm": 0.6266025304794312, "learning_rate": 0.00014005211891374755, "loss": 0.94, "step": 8300 }, { "epoch": 1.477920227920228, "grad_norm": 0.6130280494689941, "learning_rate": 0.00014003929281268323, "loss": 0.9369, "step": 8301 }, { "epoch": 1.4780982905982907, "grad_norm": 0.7244207859039307, "learning_rate": 0.00014002646592713215, "loss": 1.1449, "step": 8302 }, { "epoch": 1.4782763532763532, "grad_norm": 0.6527345776557922, "learning_rate": 0.0001400136382573456, "loss": 0.7792, "step": 8303 }, { "epoch": 1.478454415954416, "grad_norm": 0.7102689743041992, "learning_rate": 0.00014000080980357496, "loss": 0.9577, "step": 8304 }, { "epoch": 1.4786324786324787, "grad_norm": 0.6179325580596924, "learning_rate": 0.00013998798056607154, "loss": 0.827, "step": 8305 }, { "epoch": 1.4788105413105412, "grad_norm": 0.761234700679779, "learning_rate": 0.00013997515054508668, "loss": 1.0576, "step": 8306 }, { "epoch": 1.478988603988604, "grad_norm": 0.6200914978981018, "learning_rate": 0.0001399623197408718, "loss": 1.0514, "step": 8307 }, { "epoch": 1.4791666666666667, "grad_norm": 0.5961193442344666, "learning_rate": 0.0001399494881536783, "loss": 0.7846, "step": 8308 }, { "epoch": 1.4793447293447293, "grad_norm": 0.645984411239624, "learning_rate": 0.00013993665578375758, "loss": 0.9927, "step": 8309 }, { "epoch": 1.479522792022792, "grad_norm": 0.7258989810943604, "learning_rate": 0.000139923822631361, "loss": 0.7567, "step": 8310 }, { "epoch": 1.4797008547008548, "grad_norm": 0.708882212638855, "learning_rate": 0.00013991098869674007, "loss": 1.1147, "step": 8311 }, { "epoch": 1.4798789173789173, "grad_norm": 0.669262707233429, "learning_rate": 0.00013989815398014624, "loss": 0.7142, "step": 8312 }, { "epoch": 1.48005698005698, "grad_norm": 0.7398767471313477, "learning_rate": 0.00013988531848183096, "loss": 1.043, "step": 8313 }, { "epoch": 1.4802350427350428, "grad_norm": 0.753197193145752, "learning_rate": 0.0001398724822020457, "loss": 1.058, "step": 8314 }, { "epoch": 1.4804131054131053, "grad_norm": 0.663526177406311, "learning_rate": 0.000139859645141042, "loss": 1.1272, "step": 8315 }, { "epoch": 1.480591168091168, "grad_norm": 0.6537514925003052, "learning_rate": 0.00013984680729907135, "loss": 1.011, "step": 8316 }, { "epoch": 1.4807692307692308, "grad_norm": 0.707554817199707, "learning_rate": 0.00013983396867638527, "loss": 1.0593, "step": 8317 }, { "epoch": 1.4809472934472934, "grad_norm": 0.6261475086212158, "learning_rate": 0.00013982112927323533, "loss": 1.0731, "step": 8318 }, { "epoch": 1.4811253561253561, "grad_norm": 0.6694258451461792, "learning_rate": 0.00013980828908987308, "loss": 1.0703, "step": 8319 }, { "epoch": 1.4813034188034189, "grad_norm": 0.7793164253234863, "learning_rate": 0.00013979544812655012, "loss": 1.0447, "step": 8320 }, { "epoch": 1.4814814814814814, "grad_norm": 0.6496448516845703, "learning_rate": 0.00013978260638351802, "loss": 1.0208, "step": 8321 }, { "epoch": 1.4816595441595442, "grad_norm": 0.5992059111595154, "learning_rate": 0.00013976976386102834, "loss": 0.9717, "step": 8322 }, { "epoch": 1.481837606837607, "grad_norm": 0.7473567128181458, "learning_rate": 0.0001397569205593328, "loss": 0.9612, "step": 8323 }, { "epoch": 1.4820156695156697, "grad_norm": 0.657558798789978, "learning_rate": 0.00013974407647868297, "loss": 1.2137, "step": 8324 }, { "epoch": 1.4821937321937322, "grad_norm": 0.7040614485740662, "learning_rate": 0.00013973123161933055, "loss": 1.007, "step": 8325 }, { "epoch": 1.482371794871795, "grad_norm": 0.6098681092262268, "learning_rate": 0.00013971838598152717, "loss": 1.0595, "step": 8326 }, { "epoch": 1.4825498575498575, "grad_norm": 0.7194869518280029, "learning_rate": 0.0001397055395655245, "loss": 0.9632, "step": 8327 }, { "epoch": 1.4827279202279202, "grad_norm": 0.645972728729248, "learning_rate": 0.00013969269237157426, "loss": 1.0712, "step": 8328 }, { "epoch": 1.482905982905983, "grad_norm": 0.6580560207366943, "learning_rate": 0.0001396798443999282, "loss": 1.2117, "step": 8329 }, { "epoch": 1.4830840455840457, "grad_norm": 0.6624418497085571, "learning_rate": 0.00013966699565083802, "loss": 0.8529, "step": 8330 }, { "epoch": 1.4832621082621082, "grad_norm": 0.659896731376648, "learning_rate": 0.00013965414612455545, "loss": 0.9359, "step": 8331 }, { "epoch": 1.483440170940171, "grad_norm": 0.6690883636474609, "learning_rate": 0.00013964129582133222, "loss": 0.971, "step": 8332 }, { "epoch": 1.4836182336182335, "grad_norm": 0.6767334938049316, "learning_rate": 0.00013962844474142022, "loss": 1.0137, "step": 8333 }, { "epoch": 1.4837962962962963, "grad_norm": 0.6412752270698547, "learning_rate": 0.0001396155928850711, "loss": 1.2812, "step": 8334 }, { "epoch": 1.483974358974359, "grad_norm": 0.6731469035148621, "learning_rate": 0.0001396027402525368, "loss": 0.8723, "step": 8335 }, { "epoch": 1.4841524216524218, "grad_norm": 0.7327923774719238, "learning_rate": 0.000139589886844069, "loss": 0.9606, "step": 8336 }, { "epoch": 1.4843304843304843, "grad_norm": 0.6194515824317932, "learning_rate": 0.00013957703265991963, "loss": 0.8514, "step": 8337 }, { "epoch": 1.484508547008547, "grad_norm": 0.7250012755393982, "learning_rate": 0.00013956417770034053, "loss": 0.9755, "step": 8338 }, { "epoch": 1.4846866096866096, "grad_norm": 0.7484263181686401, "learning_rate": 0.00013955132196558358, "loss": 1.0376, "step": 8339 }, { "epoch": 1.4848646723646723, "grad_norm": 0.7593362331390381, "learning_rate": 0.00013953846545590058, "loss": 1.3011, "step": 8340 }, { "epoch": 1.485042735042735, "grad_norm": 0.6670466065406799, "learning_rate": 0.00013952560817154352, "loss": 0.9726, "step": 8341 }, { "epoch": 1.4852207977207978, "grad_norm": 0.8001134395599365, "learning_rate": 0.00013951275011276425, "loss": 1.1447, "step": 8342 }, { "epoch": 1.4853988603988604, "grad_norm": 0.741450309753418, "learning_rate": 0.00013949989127981475, "loss": 1.1101, "step": 8343 }, { "epoch": 1.4855769230769231, "grad_norm": 0.6594467163085938, "learning_rate": 0.00013948703167294694, "loss": 1.0205, "step": 8344 }, { "epoch": 1.4857549857549857, "grad_norm": 0.6303030252456665, "learning_rate": 0.00013947417129241276, "loss": 0.9179, "step": 8345 }, { "epoch": 1.4859330484330484, "grad_norm": 0.6352720856666565, "learning_rate": 0.00013946131013846418, "loss": 1.158, "step": 8346 }, { "epoch": 1.4861111111111112, "grad_norm": 0.6720923781394958, "learning_rate": 0.0001394484482113532, "loss": 0.8805, "step": 8347 }, { "epoch": 1.486289173789174, "grad_norm": 0.7186421751976013, "learning_rate": 0.00013943558551133186, "loss": 0.8951, "step": 8348 }, { "epoch": 1.4864672364672364, "grad_norm": 0.6038698554039001, "learning_rate": 0.00013942272203865214, "loss": 1.0079, "step": 8349 }, { "epoch": 1.4866452991452992, "grad_norm": 0.665790319442749, "learning_rate": 0.00013940985779356606, "loss": 0.8853, "step": 8350 }, { "epoch": 1.4868233618233617, "grad_norm": 0.6941595673561096, "learning_rate": 0.00013939699277632568, "loss": 1.1404, "step": 8351 }, { "epoch": 1.4870014245014245, "grad_norm": 0.7943871021270752, "learning_rate": 0.00013938412698718305, "loss": 0.9961, "step": 8352 }, { "epoch": 1.4871794871794872, "grad_norm": 0.6363818645477295, "learning_rate": 0.00013937126042639028, "loss": 0.8621, "step": 8353 }, { "epoch": 1.48735754985755, "grad_norm": 0.7986421585083008, "learning_rate": 0.00013935839309419943, "loss": 1.0547, "step": 8354 }, { "epoch": 1.4875356125356125, "grad_norm": 0.5890130400657654, "learning_rate": 0.00013934552499086266, "loss": 0.9863, "step": 8355 }, { "epoch": 1.4877136752136753, "grad_norm": 0.7915370464324951, "learning_rate": 0.00013933265611663207, "loss": 1.0385, "step": 8356 }, { "epoch": 1.4878917378917378, "grad_norm": 0.7062503695487976, "learning_rate": 0.00013931978647175973, "loss": 1.0984, "step": 8357 }, { "epoch": 1.4880698005698005, "grad_norm": 0.6496769785881042, "learning_rate": 0.00013930691605649792, "loss": 1.0884, "step": 8358 }, { "epoch": 1.4882478632478633, "grad_norm": 0.6527266502380371, "learning_rate": 0.0001392940448710987, "loss": 1.0366, "step": 8359 }, { "epoch": 1.488425925925926, "grad_norm": 0.6269870400428772, "learning_rate": 0.00013928117291581431, "loss": 0.9097, "step": 8360 }, { "epoch": 1.4886039886039886, "grad_norm": 0.6581160426139832, "learning_rate": 0.00013926830019089694, "loss": 0.8694, "step": 8361 }, { "epoch": 1.4887820512820513, "grad_norm": 0.6196219325065613, "learning_rate": 0.0001392554266965988, "loss": 0.8054, "step": 8362 }, { "epoch": 1.4889601139601139, "grad_norm": 0.6246176362037659, "learning_rate": 0.0001392425524331721, "loss": 0.9309, "step": 8363 }, { "epoch": 1.4891381766381766, "grad_norm": 0.7293874025344849, "learning_rate": 0.00013922967740086914, "loss": 1.051, "step": 8364 }, { "epoch": 1.4893162393162394, "grad_norm": 0.6581604480743408, "learning_rate": 0.00013921680159994213, "loss": 0.8475, "step": 8365 }, { "epoch": 1.489494301994302, "grad_norm": 0.6294612288475037, "learning_rate": 0.00013920392503064335, "loss": 0.6946, "step": 8366 }, { "epoch": 1.4896723646723646, "grad_norm": 0.5725370645523071, "learning_rate": 0.00013919104769322512, "loss": 0.7838, "step": 8367 }, { "epoch": 1.4898504273504274, "grad_norm": 0.681520402431488, "learning_rate": 0.00013917816958793967, "loss": 0.99, "step": 8368 }, { "epoch": 1.49002849002849, "grad_norm": 0.6660219430923462, "learning_rate": 0.00013916529071503943, "loss": 0.9113, "step": 8369 }, { "epoch": 1.4902065527065527, "grad_norm": 0.7567862272262573, "learning_rate": 0.00013915241107477665, "loss": 1.2498, "step": 8370 }, { "epoch": 1.4903846153846154, "grad_norm": 0.7366036176681519, "learning_rate": 0.00013913953066740372, "loss": 1.115, "step": 8371 }, { "epoch": 1.4905626780626782, "grad_norm": 0.6201434135437012, "learning_rate": 0.00013912664949317297, "loss": 0.8447, "step": 8372 }, { "epoch": 1.4907407407407407, "grad_norm": 0.7618655562400818, "learning_rate": 0.00013911376755233683, "loss": 0.9696, "step": 8373 }, { "epoch": 1.4909188034188035, "grad_norm": 0.6716726422309875, "learning_rate": 0.00013910088484514764, "loss": 0.9753, "step": 8374 }, { "epoch": 1.491096866096866, "grad_norm": 0.6745659112930298, "learning_rate": 0.0001390880013718579, "loss": 1.134, "step": 8375 }, { "epoch": 1.4912749287749287, "grad_norm": 0.7524410486221313, "learning_rate": 0.0001390751171327199, "loss": 1.0235, "step": 8376 }, { "epoch": 1.4914529914529915, "grad_norm": 0.7409411072731018, "learning_rate": 0.00013906223212798615, "loss": 0.752, "step": 8377 }, { "epoch": 1.4916310541310542, "grad_norm": 0.7016384601593018, "learning_rate": 0.00013904934635790913, "loss": 1.1712, "step": 8378 }, { "epoch": 1.4918091168091168, "grad_norm": 0.6537824869155884, "learning_rate": 0.00013903645982274129, "loss": 1.1162, "step": 8379 }, { "epoch": 1.4919871794871795, "grad_norm": 0.6460806727409363, "learning_rate": 0.0001390235725227351, "loss": 0.9389, "step": 8380 }, { "epoch": 1.492165242165242, "grad_norm": 0.6405501365661621, "learning_rate": 0.0001390106844581431, "loss": 1.0508, "step": 8381 }, { "epoch": 1.4923433048433048, "grad_norm": 0.6672594547271729, "learning_rate": 0.00013899779562921775, "loss": 1.0018, "step": 8382 }, { "epoch": 1.4925213675213675, "grad_norm": 0.6303185820579529, "learning_rate": 0.0001389849060362116, "loss": 0.9964, "step": 8383 }, { "epoch": 1.4926994301994303, "grad_norm": 0.6981508731842041, "learning_rate": 0.00013897201567937719, "loss": 1.174, "step": 8384 }, { "epoch": 1.4928774928774928, "grad_norm": 0.6195989847183228, "learning_rate": 0.0001389591245589671, "loss": 0.9254, "step": 8385 }, { "epoch": 1.4930555555555556, "grad_norm": 0.6232163310050964, "learning_rate": 0.00013894623267523393, "loss": 0.7151, "step": 8386 }, { "epoch": 1.493233618233618, "grad_norm": 0.673067033290863, "learning_rate": 0.0001389333400284302, "loss": 1.0156, "step": 8387 }, { "epoch": 1.4934116809116809, "grad_norm": 0.706266462802887, "learning_rate": 0.00013892044661880856, "loss": 0.9387, "step": 8388 }, { "epoch": 1.4935897435897436, "grad_norm": 0.742640495300293, "learning_rate": 0.00013890755244662161, "loss": 1.1597, "step": 8389 }, { "epoch": 1.4937678062678064, "grad_norm": 0.6856846809387207, "learning_rate": 0.000138894657512122, "loss": 0.9998, "step": 8390 }, { "epoch": 1.493945868945869, "grad_norm": 0.7214110493659973, "learning_rate": 0.0001388817618155624, "loss": 1.1867, "step": 8391 }, { "epoch": 1.4941239316239316, "grad_norm": 0.7346787452697754, "learning_rate": 0.0001388688653571954, "loss": 0.9071, "step": 8392 }, { "epoch": 1.4943019943019942, "grad_norm": 0.7019181847572327, "learning_rate": 0.00013885596813727373, "loss": 1.0472, "step": 8393 }, { "epoch": 1.494480056980057, "grad_norm": 0.6780814528465271, "learning_rate": 0.00013884307015605012, "loss": 1.0031, "step": 8394 }, { "epoch": 1.4946581196581197, "grad_norm": 0.6722873449325562, "learning_rate": 0.0001388301714137772, "loss": 0.8889, "step": 8395 }, { "epoch": 1.4948361823361824, "grad_norm": 0.6736134886741638, "learning_rate": 0.00013881727191070777, "loss": 0.8695, "step": 8396 }, { "epoch": 1.495014245014245, "grad_norm": 0.632648766040802, "learning_rate": 0.00013880437164709452, "loss": 0.9391, "step": 8397 }, { "epoch": 1.4951923076923077, "grad_norm": 0.7004299163818359, "learning_rate": 0.0001387914706231902, "loss": 1.1423, "step": 8398 }, { "epoch": 1.4953703703703702, "grad_norm": 0.5787134766578674, "learning_rate": 0.0001387785688392476, "loss": 0.9953, "step": 8399 }, { "epoch": 1.495548433048433, "grad_norm": 0.6671785712242126, "learning_rate": 0.0001387656662955195, "loss": 0.9356, "step": 8400 }, { "epoch": 1.4957264957264957, "grad_norm": 0.7216096520423889, "learning_rate": 0.0001387527629922587, "loss": 0.9065, "step": 8401 }, { "epoch": 1.4959045584045585, "grad_norm": 0.6469849348068237, "learning_rate": 0.00013873985892971801, "loss": 1.0664, "step": 8402 }, { "epoch": 1.496082621082621, "grad_norm": 0.5598217248916626, "learning_rate": 0.00013872695410815027, "loss": 0.8834, "step": 8403 }, { "epoch": 1.4962606837606838, "grad_norm": 0.6860302686691284, "learning_rate": 0.00013871404852780828, "loss": 0.9061, "step": 8404 }, { "epoch": 1.4964387464387463, "grad_norm": 0.7101688385009766, "learning_rate": 0.00013870114218894497, "loss": 1.0236, "step": 8405 }, { "epoch": 1.496616809116809, "grad_norm": 0.6494225859642029, "learning_rate": 0.00013868823509181313, "loss": 0.9631, "step": 8406 }, { "epoch": 1.4967948717948718, "grad_norm": 0.6804189085960388, "learning_rate": 0.00013867532723666574, "loss": 0.9341, "step": 8407 }, { "epoch": 1.4969729344729346, "grad_norm": 0.8493942022323608, "learning_rate": 0.00013866241862375562, "loss": 1.1451, "step": 8408 }, { "epoch": 1.497150997150997, "grad_norm": 0.6248497366905212, "learning_rate": 0.00013864950925333576, "loss": 0.8584, "step": 8409 }, { "epoch": 1.4973290598290598, "grad_norm": 0.6238769292831421, "learning_rate": 0.00013863659912565903, "loss": 1.1612, "step": 8410 }, { "epoch": 1.4975071225071226, "grad_norm": 0.8538609147071838, "learning_rate": 0.0001386236882409784, "loss": 1.0817, "step": 8411 }, { "epoch": 1.4976851851851851, "grad_norm": 0.7301406264305115, "learning_rate": 0.00013861077659954683, "loss": 0.943, "step": 8412 }, { "epoch": 1.4978632478632479, "grad_norm": 0.6573456525802612, "learning_rate": 0.0001385978642016173, "loss": 1.0154, "step": 8413 }, { "epoch": 1.4980413105413106, "grad_norm": 0.7634185552597046, "learning_rate": 0.0001385849510474428, "loss": 1.0432, "step": 8414 }, { "epoch": 1.4982193732193732, "grad_norm": 0.6156686544418335, "learning_rate": 0.00013857203713727633, "loss": 1.0442, "step": 8415 }, { "epoch": 1.498397435897436, "grad_norm": 0.5386871695518494, "learning_rate": 0.00013855912247137092, "loss": 0.9055, "step": 8416 }, { "epoch": 1.4985754985754987, "grad_norm": 0.7108574509620667, "learning_rate": 0.00013854620704997962, "loss": 0.9705, "step": 8417 }, { "epoch": 1.4987535612535612, "grad_norm": 0.7313347458839417, "learning_rate": 0.00013853329087335547, "loss": 0.7541, "step": 8418 }, { "epoch": 1.498931623931624, "grad_norm": 0.8369119167327881, "learning_rate": 0.0001385203739417515, "loss": 1.1317, "step": 8419 }, { "epoch": 1.4991096866096867, "grad_norm": 0.6763789057731628, "learning_rate": 0.00013850745625542085, "loss": 0.7909, "step": 8420 }, { "epoch": 1.4992877492877492, "grad_norm": 0.7369635105133057, "learning_rate": 0.00013849453781461656, "loss": 1.1454, "step": 8421 }, { "epoch": 1.499465811965812, "grad_norm": 0.7165971398353577, "learning_rate": 0.0001384816186195918, "loss": 1.1927, "step": 8422 }, { "epoch": 1.4996438746438747, "grad_norm": 0.7502337694168091, "learning_rate": 0.00013846869867059966, "loss": 1.0592, "step": 8423 }, { "epoch": 1.4998219373219372, "grad_norm": 0.7207813858985901, "learning_rate": 0.00013845577796789326, "loss": 1.1133, "step": 8424 }, { "epoch": 1.4998219373219372, "eval_loss": 1.1057652235031128, "eval_runtime": 24.7975, "eval_samples_per_second": 41.98, "eval_steps_per_second": 21.01, "step": 8424 }, { "epoch": 1.5, "grad_norm": 0.6962727308273315, "learning_rate": 0.00013844285651172576, "loss": 1.0711, "step": 8425 }, { "epoch": 1.5001780626780628, "grad_norm": 0.6585133075714111, "learning_rate": 0.00013842993430235038, "loss": 0.9793, "step": 8426 }, { "epoch": 1.5003561253561255, "grad_norm": 0.7045056819915771, "learning_rate": 0.00013841701134002029, "loss": 1.0046, "step": 8427 }, { "epoch": 1.500534188034188, "grad_norm": 0.6788702011108398, "learning_rate": 0.00013840408762498863, "loss": 0.9539, "step": 8428 }, { "epoch": 1.5007122507122506, "grad_norm": 0.7253114581108093, "learning_rate": 0.00013839116315750863, "loss": 0.9446, "step": 8429 }, { "epoch": 1.5008903133903133, "grad_norm": 0.6103765368461609, "learning_rate": 0.0001383782379378336, "loss": 0.7862, "step": 8430 }, { "epoch": 1.501068376068376, "grad_norm": 0.6662353873252869, "learning_rate": 0.00013836531196621666, "loss": 1.2178, "step": 8431 }, { "epoch": 1.5012464387464388, "grad_norm": 0.6871803998947144, "learning_rate": 0.00013835238524291117, "loss": 0.9263, "step": 8432 }, { "epoch": 1.5014245014245016, "grad_norm": 0.62713223695755, "learning_rate": 0.00013833945776817034, "loss": 0.8879, "step": 8433 }, { "epoch": 1.501602564102564, "grad_norm": 0.6698164343833923, "learning_rate": 0.00013832652954224748, "loss": 0.9847, "step": 8434 }, { "epoch": 1.5017806267806266, "grad_norm": 0.6855883002281189, "learning_rate": 0.0001383136005653959, "loss": 0.8614, "step": 8435 }, { "epoch": 1.5019586894586894, "grad_norm": 0.7028802037239075, "learning_rate": 0.0001383006708378689, "loss": 1.0153, "step": 8436 }, { "epoch": 1.5021367521367521, "grad_norm": 0.6710380911827087, "learning_rate": 0.00013828774035991981, "loss": 1.0163, "step": 8437 }, { "epoch": 1.5023148148148149, "grad_norm": 0.618984580039978, "learning_rate": 0.000138274809131802, "loss": 1.0015, "step": 8438 }, { "epoch": 1.5024928774928776, "grad_norm": 0.6881645321846008, "learning_rate": 0.00013826187715376882, "loss": 0.9776, "step": 8439 }, { "epoch": 1.5026709401709402, "grad_norm": 0.6715859770774841, "learning_rate": 0.00013824894442607358, "loss": 0.9129, "step": 8440 }, { "epoch": 1.5028490028490027, "grad_norm": 0.5940943360328674, "learning_rate": 0.0001382360109489698, "loss": 1.0724, "step": 8441 }, { "epoch": 1.5030270655270654, "grad_norm": 0.6536458134651184, "learning_rate": 0.0001382230767227108, "loss": 1.0162, "step": 8442 }, { "epoch": 1.5032051282051282, "grad_norm": 0.6163156628608704, "learning_rate": 0.00013821014174755, "loss": 1.0521, "step": 8443 }, { "epoch": 1.503383190883191, "grad_norm": 0.7592282891273499, "learning_rate": 0.00013819720602374082, "loss": 0.9525, "step": 8444 }, { "epoch": 1.5035612535612537, "grad_norm": 0.6672595143318176, "learning_rate": 0.0001381842695515368, "loss": 0.9359, "step": 8445 }, { "epoch": 1.5037393162393162, "grad_norm": 0.6395034193992615, "learning_rate": 0.0001381713323311913, "loss": 1.166, "step": 8446 }, { "epoch": 1.5039173789173788, "grad_norm": 0.5958148837089539, "learning_rate": 0.00013815839436295783, "loss": 0.9885, "step": 8447 }, { "epoch": 1.5040954415954415, "grad_norm": 0.676555871963501, "learning_rate": 0.0001381454556470899, "loss": 1.0637, "step": 8448 }, { "epoch": 1.5042735042735043, "grad_norm": 0.642428994178772, "learning_rate": 0.00013813251618384102, "loss": 0.9288, "step": 8449 }, { "epoch": 1.504451566951567, "grad_norm": 0.6730920076370239, "learning_rate": 0.00013811957597346467, "loss": 1.1345, "step": 8450 }, { "epoch": 1.5046296296296298, "grad_norm": 0.7824259996414185, "learning_rate": 0.00013810663501621443, "loss": 0.7532, "step": 8451 }, { "epoch": 1.5048076923076923, "grad_norm": 0.8184825778007507, "learning_rate": 0.00013809369331234386, "loss": 1.2674, "step": 8452 }, { "epoch": 1.5049857549857548, "grad_norm": 0.7369286417961121, "learning_rate": 0.00013808075086210647, "loss": 1.0978, "step": 8453 }, { "epoch": 1.5051638176638176, "grad_norm": 0.6336679458618164, "learning_rate": 0.00013806780766575588, "loss": 1.0922, "step": 8454 }, { "epoch": 1.5053418803418803, "grad_norm": 0.700219452381134, "learning_rate": 0.0001380548637235457, "loss": 1.0908, "step": 8455 }, { "epoch": 1.505519943019943, "grad_norm": 0.6346127986907959, "learning_rate": 0.0001380419190357295, "loss": 1.1265, "step": 8456 }, { "epoch": 1.5056980056980058, "grad_norm": 0.8653196096420288, "learning_rate": 0.00013802897360256093, "loss": 1.0466, "step": 8457 }, { "epoch": 1.5058760683760684, "grad_norm": 0.6589069962501526, "learning_rate": 0.0001380160274242936, "loss": 1.245, "step": 8458 }, { "epoch": 1.506054131054131, "grad_norm": 0.6527602076530457, "learning_rate": 0.00013800308050118117, "loss": 1.1539, "step": 8459 }, { "epoch": 1.5062321937321936, "grad_norm": 0.6005436182022095, "learning_rate": 0.00013799013283347734, "loss": 0.899, "step": 8460 }, { "epoch": 1.5064102564102564, "grad_norm": 0.6954274773597717, "learning_rate": 0.0001379771844214358, "loss": 1.1245, "step": 8461 }, { "epoch": 1.5065883190883191, "grad_norm": 0.658764660358429, "learning_rate": 0.00013796423526531019, "loss": 0.9884, "step": 8462 }, { "epoch": 1.506766381766382, "grad_norm": 0.652214527130127, "learning_rate": 0.0001379512853653543, "loss": 0.9711, "step": 8463 }, { "epoch": 1.5069444444444444, "grad_norm": 0.5680044889450073, "learning_rate": 0.00013793833472182176, "loss": 0.9055, "step": 8464 }, { "epoch": 1.5071225071225072, "grad_norm": 0.7524166703224182, "learning_rate": 0.0001379253833349664, "loss": 1.1163, "step": 8465 }, { "epoch": 1.5073005698005697, "grad_norm": 0.692936897277832, "learning_rate": 0.0001379124312050419, "loss": 0.899, "step": 8466 }, { "epoch": 1.5074786324786325, "grad_norm": 0.6871617436408997, "learning_rate": 0.00013789947833230207, "loss": 0.9416, "step": 8467 }, { "epoch": 1.5076566951566952, "grad_norm": 0.5983462333679199, "learning_rate": 0.0001378865247170007, "loss": 0.9776, "step": 8468 }, { "epoch": 1.507834757834758, "grad_norm": 0.6486790180206299, "learning_rate": 0.0001378735703593916, "loss": 0.9346, "step": 8469 }, { "epoch": 1.5080128205128205, "grad_norm": 0.6843809485435486, "learning_rate": 0.00013786061525972857, "loss": 1.1276, "step": 8470 }, { "epoch": 1.5081908831908832, "grad_norm": 0.5734516382217407, "learning_rate": 0.00013784765941826538, "loss": 0.6939, "step": 8471 }, { "epoch": 1.5083689458689458, "grad_norm": 0.6126381754875183, "learning_rate": 0.00013783470283525596, "loss": 0.8609, "step": 8472 }, { "epoch": 1.5085470085470085, "grad_norm": 0.7570928335189819, "learning_rate": 0.00013782174551095415, "loss": 0.8809, "step": 8473 }, { "epoch": 1.5087250712250713, "grad_norm": 0.6911360025405884, "learning_rate": 0.00013780878744561377, "loss": 0.9916, "step": 8474 }, { "epoch": 1.508903133903134, "grad_norm": 0.6651954650878906, "learning_rate": 0.00013779582863948878, "loss": 1.0012, "step": 8475 }, { "epoch": 1.5090811965811965, "grad_norm": 0.845396876335144, "learning_rate": 0.000137782869092833, "loss": 0.8455, "step": 8476 }, { "epoch": 1.5092592592592593, "grad_norm": 0.6958050727844238, "learning_rate": 0.00013776990880590042, "loss": 1.0264, "step": 8477 }, { "epoch": 1.5094373219373218, "grad_norm": 0.6950124502182007, "learning_rate": 0.00013775694777894493, "loss": 1.0547, "step": 8478 }, { "epoch": 1.5096153846153846, "grad_norm": 0.7243088483810425, "learning_rate": 0.00013774398601222045, "loss": 1.0999, "step": 8479 }, { "epoch": 1.5097934472934473, "grad_norm": 0.6820448040962219, "learning_rate": 0.00013773102350598097, "loss": 0.823, "step": 8480 }, { "epoch": 1.50997150997151, "grad_norm": 0.689996063709259, "learning_rate": 0.0001377180602604805, "loss": 1.049, "step": 8481 }, { "epoch": 1.5101495726495726, "grad_norm": 0.6763314604759216, "learning_rate": 0.000137705096275973, "loss": 0.9633, "step": 8482 }, { "epoch": 1.5103276353276354, "grad_norm": 0.6760517358779907, "learning_rate": 0.00013769213155271243, "loss": 1.0326, "step": 8483 }, { "epoch": 1.510505698005698, "grad_norm": 0.7181188464164734, "learning_rate": 0.00013767916609095285, "loss": 0.9629, "step": 8484 }, { "epoch": 1.5106837606837606, "grad_norm": 0.7102212905883789, "learning_rate": 0.0001376661998909483, "loss": 1.2714, "step": 8485 }, { "epoch": 1.5108618233618234, "grad_norm": 0.6719805598258972, "learning_rate": 0.00013765323295295278, "loss": 0.7848, "step": 8486 }, { "epoch": 1.5110398860398861, "grad_norm": 0.6592095494270325, "learning_rate": 0.0001376402652772204, "loss": 0.882, "step": 8487 }, { "epoch": 1.5112179487179487, "grad_norm": 0.6858693361282349, "learning_rate": 0.00013762729686400522, "loss": 0.9418, "step": 8488 }, { "epoch": 1.5113960113960114, "grad_norm": 0.7183199524879456, "learning_rate": 0.0001376143277135613, "loss": 1.0611, "step": 8489 }, { "epoch": 1.511574074074074, "grad_norm": 0.6294263005256653, "learning_rate": 0.00013760135782614277, "loss": 0.864, "step": 8490 }, { "epoch": 1.5117521367521367, "grad_norm": 0.6762619614601135, "learning_rate": 0.00013758838720200376, "loss": 1.0295, "step": 8491 }, { "epoch": 1.5119301994301995, "grad_norm": 0.6919726133346558, "learning_rate": 0.00013757541584139834, "loss": 1.0803, "step": 8492 }, { "epoch": 1.5121082621082622, "grad_norm": 0.6801241040229797, "learning_rate": 0.00013756244374458075, "loss": 1.1394, "step": 8493 }, { "epoch": 1.5122863247863247, "grad_norm": 0.6758754253387451, "learning_rate": 0.0001375494709118051, "loss": 1.0053, "step": 8494 }, { "epoch": 1.5124643874643875, "grad_norm": 0.6727001070976257, "learning_rate": 0.00013753649734332555, "loss": 1.1407, "step": 8495 }, { "epoch": 1.51264245014245, "grad_norm": 0.693913459777832, "learning_rate": 0.00013752352303939632, "loss": 1.1804, "step": 8496 }, { "epoch": 1.5128205128205128, "grad_norm": 0.6122510433197021, "learning_rate": 0.0001375105480002716, "loss": 0.917, "step": 8497 }, { "epoch": 1.5129985754985755, "grad_norm": 0.6305009722709656, "learning_rate": 0.00013749757222620562, "loss": 1.1075, "step": 8498 }, { "epoch": 1.5131766381766383, "grad_norm": 0.7249642610549927, "learning_rate": 0.0001374845957174526, "loss": 0.9107, "step": 8499 }, { "epoch": 1.5133547008547008, "grad_norm": 0.6922136545181274, "learning_rate": 0.0001374716184742668, "loss": 0.9974, "step": 8500 }, { "epoch": 1.5135327635327636, "grad_norm": 0.6989904046058655, "learning_rate": 0.00013745864049690245, "loss": 0.9866, "step": 8501 }, { "epoch": 1.513710826210826, "grad_norm": 0.6284058094024658, "learning_rate": 0.0001374456617856139, "loss": 0.8658, "step": 8502 }, { "epoch": 1.5138888888888888, "grad_norm": 0.615388810634613, "learning_rate": 0.00013743268234065535, "loss": 0.7876, "step": 8503 }, { "epoch": 1.5140669515669516, "grad_norm": 0.6212600469589233, "learning_rate": 0.0001374197021622812, "loss": 0.855, "step": 8504 }, { "epoch": 1.5142450142450143, "grad_norm": 0.6312419772148132, "learning_rate": 0.00013740672125074567, "loss": 0.9252, "step": 8505 }, { "epoch": 1.5144230769230769, "grad_norm": 0.7094576954841614, "learning_rate": 0.00013739373960630315, "loss": 0.7655, "step": 8506 }, { "epoch": 1.5146011396011396, "grad_norm": 0.5583470463752747, "learning_rate": 0.000137380757229208, "loss": 0.7855, "step": 8507 }, { "epoch": 1.5147792022792022, "grad_norm": 0.6798399686813354, "learning_rate": 0.00013736777411971457, "loss": 0.9935, "step": 8508 }, { "epoch": 1.514957264957265, "grad_norm": 0.7835991978645325, "learning_rate": 0.00013735479027807723, "loss": 1.1603, "step": 8509 }, { "epoch": 1.5151353276353277, "grad_norm": 0.6230790615081787, "learning_rate": 0.00013734180570455033, "loss": 1.1463, "step": 8510 }, { "epoch": 1.5153133903133904, "grad_norm": 0.646603524684906, "learning_rate": 0.00013732882039938835, "loss": 0.9564, "step": 8511 }, { "epoch": 1.515491452991453, "grad_norm": 0.6619647145271301, "learning_rate": 0.0001373158343628457, "loss": 0.8492, "step": 8512 }, { "epoch": 1.5156695156695157, "grad_norm": 0.6458454132080078, "learning_rate": 0.00013730284759517675, "loss": 1.0049, "step": 8513 }, { "epoch": 1.5158475783475782, "grad_norm": 0.7415743470191956, "learning_rate": 0.00013728986009663602, "loss": 0.872, "step": 8514 }, { "epoch": 1.516025641025641, "grad_norm": 0.6198840141296387, "learning_rate": 0.00013727687186747793, "loss": 0.8645, "step": 8515 }, { "epoch": 1.5162037037037037, "grad_norm": 0.7160853147506714, "learning_rate": 0.00013726388290795697, "loss": 1.0144, "step": 8516 }, { "epoch": 1.5163817663817665, "grad_norm": 0.6604135632514954, "learning_rate": 0.00013725089321832765, "loss": 0.9827, "step": 8517 }, { "epoch": 1.5165598290598292, "grad_norm": 0.6480790972709656, "learning_rate": 0.00013723790279884443, "loss": 1.0357, "step": 8518 }, { "epoch": 1.5167378917378918, "grad_norm": 0.6207128167152405, "learning_rate": 0.00013722491164976187, "loss": 0.9467, "step": 8519 }, { "epoch": 1.5169159544159543, "grad_norm": 0.6024298667907715, "learning_rate": 0.00013721191977133452, "loss": 0.8821, "step": 8520 }, { "epoch": 1.517094017094017, "grad_norm": 0.684898316860199, "learning_rate": 0.00013719892716381688, "loss": 0.9823, "step": 8521 }, { "epoch": 1.5172720797720798, "grad_norm": 0.7460635304450989, "learning_rate": 0.00013718593382746355, "loss": 1.2573, "step": 8522 }, { "epoch": 1.5174501424501425, "grad_norm": 0.7193243503570557, "learning_rate": 0.00013717293976252907, "loss": 1.0162, "step": 8523 }, { "epoch": 1.5176282051282053, "grad_norm": 0.6328752040863037, "learning_rate": 0.0001371599449692681, "loss": 0.8183, "step": 8524 }, { "epoch": 1.5178062678062678, "grad_norm": 0.658784806728363, "learning_rate": 0.00013714694944793517, "loss": 0.9315, "step": 8525 }, { "epoch": 1.5179843304843303, "grad_norm": 0.7875827550888062, "learning_rate": 0.00013713395319878493, "loss": 1.0889, "step": 8526 }, { "epoch": 1.518162393162393, "grad_norm": 0.6580079793930054, "learning_rate": 0.00013712095622207203, "loss": 1.0276, "step": 8527 }, { "epoch": 1.5183404558404558, "grad_norm": 0.6214027404785156, "learning_rate": 0.00013710795851805106, "loss": 0.9692, "step": 8528 }, { "epoch": 1.5185185185185186, "grad_norm": 0.7839403748512268, "learning_rate": 0.0001370949600869768, "loss": 0.7378, "step": 8529 }, { "epoch": 1.5186965811965814, "grad_norm": 0.6632764339447021, "learning_rate": 0.0001370819609291038, "loss": 0.9431, "step": 8530 }, { "epoch": 1.5188746438746439, "grad_norm": 0.7071712017059326, "learning_rate": 0.00013706896104468682, "loss": 0.7684, "step": 8531 }, { "epoch": 1.5190527065527064, "grad_norm": 0.7494829297065735, "learning_rate": 0.00013705596043398058, "loss": 0.9709, "step": 8532 }, { "epoch": 1.5192307692307692, "grad_norm": 0.6408106088638306, "learning_rate": 0.00013704295909723973, "loss": 0.8494, "step": 8533 }, { "epoch": 1.519408831908832, "grad_norm": 0.6043150424957275, "learning_rate": 0.0001370299570347191, "loss": 0.7485, "step": 8534 }, { "epoch": 1.5195868945868947, "grad_norm": 0.6944992542266846, "learning_rate": 0.00013701695424667336, "loss": 0.8403, "step": 8535 }, { "epoch": 1.5197649572649574, "grad_norm": 0.7730217576026917, "learning_rate": 0.00013700395073335726, "loss": 0.9122, "step": 8536 }, { "epoch": 1.51994301994302, "grad_norm": 0.6300255060195923, "learning_rate": 0.00013699094649502564, "loss": 0.9185, "step": 8537 }, { "epoch": 1.5201210826210825, "grad_norm": 0.648676335811615, "learning_rate": 0.00013697794153193327, "loss": 0.9897, "step": 8538 }, { "epoch": 1.5202991452991452, "grad_norm": 0.7365788817405701, "learning_rate": 0.00013696493584433494, "loss": 0.958, "step": 8539 }, { "epoch": 1.520477207977208, "grad_norm": 0.6634557247161865, "learning_rate": 0.00013695192943248552, "loss": 0.9389, "step": 8540 }, { "epoch": 1.5206552706552707, "grad_norm": 0.6110827922821045, "learning_rate": 0.00013693892229663977, "loss": 0.9341, "step": 8541 }, { "epoch": 1.5208333333333335, "grad_norm": 0.7207275032997131, "learning_rate": 0.00013692591443705256, "loss": 0.9526, "step": 8542 }, { "epoch": 1.521011396011396, "grad_norm": 0.7071022391319275, "learning_rate": 0.0001369129058539788, "loss": 0.9572, "step": 8543 }, { "epoch": 1.5211894586894585, "grad_norm": 0.5898227691650391, "learning_rate": 0.0001368998965476733, "loss": 0.921, "step": 8544 }, { "epoch": 1.5213675213675213, "grad_norm": 0.7542559504508972, "learning_rate": 0.000136886886518391, "loss": 0.7799, "step": 8545 }, { "epoch": 1.521545584045584, "grad_norm": 0.6904959678649902, "learning_rate": 0.00013687387576638674, "loss": 0.9601, "step": 8546 }, { "epoch": 1.5217236467236468, "grad_norm": 0.763414204120636, "learning_rate": 0.00013686086429191553, "loss": 1.0046, "step": 8547 }, { "epoch": 1.5219017094017095, "grad_norm": 0.6879960298538208, "learning_rate": 0.00013684785209523224, "loss": 0.9615, "step": 8548 }, { "epoch": 1.522079772079772, "grad_norm": 0.7166057229042053, "learning_rate": 0.00013683483917659186, "loss": 0.9481, "step": 8549 }, { "epoch": 1.5222578347578346, "grad_norm": 0.6384348273277283, "learning_rate": 0.0001368218255362493, "loss": 1.1037, "step": 8550 }, { "epoch": 1.5224358974358974, "grad_norm": 0.6564528346061707, "learning_rate": 0.00013680881117445953, "loss": 0.951, "step": 8551 }, { "epoch": 1.52261396011396, "grad_norm": 0.749301016330719, "learning_rate": 0.00013679579609147762, "loss": 0.9324, "step": 8552 }, { "epoch": 1.5227920227920229, "grad_norm": 0.8130472898483276, "learning_rate": 0.00013678278028755848, "loss": 1.0178, "step": 8553 }, { "epoch": 1.5229700854700856, "grad_norm": 0.6763297319412231, "learning_rate": 0.0001367697637629572, "loss": 0.9224, "step": 8554 }, { "epoch": 1.5231481481481481, "grad_norm": 0.6630885601043701, "learning_rate": 0.00013675674651792878, "loss": 1.0254, "step": 8555 }, { "epoch": 1.5233262108262107, "grad_norm": 0.7377206087112427, "learning_rate": 0.00013674372855272825, "loss": 1.0413, "step": 8556 }, { "epoch": 1.5235042735042734, "grad_norm": 0.5270320177078247, "learning_rate": 0.00013673070986761068, "loss": 0.7124, "step": 8557 }, { "epoch": 1.5236823361823362, "grad_norm": 0.5941976308822632, "learning_rate": 0.00013671769046283116, "loss": 1.0281, "step": 8558 }, { "epoch": 1.523860398860399, "grad_norm": 0.6131376028060913, "learning_rate": 0.0001367046703386448, "loss": 0.7593, "step": 8559 }, { "epoch": 1.5240384615384617, "grad_norm": 0.7381763458251953, "learning_rate": 0.00013669164949530664, "loss": 1.148, "step": 8560 }, { "epoch": 1.5242165242165242, "grad_norm": 0.683274507522583, "learning_rate": 0.00013667862793307185, "loss": 0.8354, "step": 8561 }, { "epoch": 1.5243945868945867, "grad_norm": 0.6912649273872375, "learning_rate": 0.0001366656056521955, "loss": 0.9043, "step": 8562 }, { "epoch": 1.5245726495726495, "grad_norm": 0.5999594330787659, "learning_rate": 0.0001366525826529328, "loss": 0.6138, "step": 8563 }, { "epoch": 1.5247507122507122, "grad_norm": 0.7185927629470825, "learning_rate": 0.00013663955893553892, "loss": 0.895, "step": 8564 }, { "epoch": 1.524928774928775, "grad_norm": 0.5967002511024475, "learning_rate": 0.00013662653450026893, "loss": 0.9636, "step": 8565 }, { "epoch": 1.5251068376068377, "grad_norm": 0.7122953534126282, "learning_rate": 0.00013661350934737813, "loss": 0.9465, "step": 8566 }, { "epoch": 1.5252849002849003, "grad_norm": 0.705326497554779, "learning_rate": 0.00013660048347712163, "loss": 1.121, "step": 8567 }, { "epoch": 1.5254629629629628, "grad_norm": 0.6023733019828796, "learning_rate": 0.0001365874568897547, "loss": 0.9881, "step": 8568 }, { "epoch": 1.5256410256410255, "grad_norm": 0.6883122324943542, "learning_rate": 0.0001365744295855326, "loss": 1.2372, "step": 8569 }, { "epoch": 1.5258190883190883, "grad_norm": 0.718126654624939, "learning_rate": 0.0001365614015647105, "loss": 1.0888, "step": 8570 }, { "epoch": 1.525997150997151, "grad_norm": 0.6649243831634521, "learning_rate": 0.00013654837282754367, "loss": 1.0458, "step": 8571 }, { "epoch": 1.5261752136752138, "grad_norm": 0.6959797143936157, "learning_rate": 0.00013653534337428738, "loss": 0.9282, "step": 8572 }, { "epoch": 1.5263532763532763, "grad_norm": 0.6069976687431335, "learning_rate": 0.00013652231320519697, "loss": 0.9706, "step": 8573 }, { "epoch": 1.526531339031339, "grad_norm": 0.7085374593734741, "learning_rate": 0.0001365092823205277, "loss": 1.1241, "step": 8574 }, { "epoch": 1.5267094017094016, "grad_norm": 0.575106143951416, "learning_rate": 0.00013649625072053488, "loss": 0.9814, "step": 8575 }, { "epoch": 1.5268874643874644, "grad_norm": 0.6541273593902588, "learning_rate": 0.00013648321840547384, "loss": 1.0155, "step": 8576 }, { "epoch": 1.5270655270655271, "grad_norm": 0.6754382848739624, "learning_rate": 0.0001364701853755999, "loss": 1.0284, "step": 8577 }, { "epoch": 1.5272435897435899, "grad_norm": 0.6219634413719177, "learning_rate": 0.00013645715163116846, "loss": 1.1539, "step": 8578 }, { "epoch": 1.5274216524216524, "grad_norm": 0.7625157833099365, "learning_rate": 0.00013644411717243486, "loss": 1.1157, "step": 8579 }, { "epoch": 1.5275997150997151, "grad_norm": 0.6944296956062317, "learning_rate": 0.0001364310819996545, "loss": 0.8309, "step": 8580 }, { "epoch": 1.5277777777777777, "grad_norm": 0.7198494672775269, "learning_rate": 0.00013641804611308277, "loss": 1.0883, "step": 8581 }, { "epoch": 1.5279558404558404, "grad_norm": 0.6398822069168091, "learning_rate": 0.00013640500951297508, "loss": 1.0173, "step": 8582 }, { "epoch": 1.5281339031339032, "grad_norm": 0.7306683659553528, "learning_rate": 0.00013639197219958682, "loss": 0.9979, "step": 8583 }, { "epoch": 1.528311965811966, "grad_norm": 0.6873512268066406, "learning_rate": 0.00013637893417317348, "loss": 0.7883, "step": 8584 }, { "epoch": 1.5284900284900285, "grad_norm": 0.6482085585594177, "learning_rate": 0.00013636589543399052, "loss": 0.9367, "step": 8585 }, { "epoch": 1.5286680911680912, "grad_norm": 0.8161232471466064, "learning_rate": 0.00013635285598229336, "loss": 1.0582, "step": 8586 }, { "epoch": 1.5288461538461537, "grad_norm": 0.6722155809402466, "learning_rate": 0.0001363398158183375, "loss": 0.9805, "step": 8587 }, { "epoch": 1.5290242165242165, "grad_norm": 0.7175397872924805, "learning_rate": 0.00013632677494237845, "loss": 1.0747, "step": 8588 }, { "epoch": 1.5292022792022792, "grad_norm": 0.6665592789649963, "learning_rate": 0.00013631373335467172, "loss": 1.006, "step": 8589 }, { "epoch": 1.529380341880342, "grad_norm": 0.7002299427986145, "learning_rate": 0.0001363006910554728, "loss": 1.0702, "step": 8590 }, { "epoch": 1.5295584045584045, "grad_norm": 0.7712168097496033, "learning_rate": 0.00013628764804503725, "loss": 1.0628, "step": 8591 }, { "epoch": 1.5297364672364673, "grad_norm": 0.6620795130729675, "learning_rate": 0.0001362746043236206, "loss": 1.01, "step": 8592 }, { "epoch": 1.5299145299145298, "grad_norm": 0.6374393701553345, "learning_rate": 0.00013626155989147846, "loss": 0.9106, "step": 8593 }, { "epoch": 1.5300925925925926, "grad_norm": 0.6531631946563721, "learning_rate": 0.00013624851474886636, "loss": 1.0488, "step": 8594 }, { "epoch": 1.5302706552706553, "grad_norm": 0.6843775510787964, "learning_rate": 0.00013623546889603993, "loss": 0.8599, "step": 8595 }, { "epoch": 1.530448717948718, "grad_norm": 0.7232706546783447, "learning_rate": 0.00013622242233325476, "loss": 1.0875, "step": 8596 }, { "epoch": 1.5306267806267806, "grad_norm": 0.695691704750061, "learning_rate": 0.00013620937506076644, "loss": 0.9835, "step": 8597 }, { "epoch": 1.5308048433048433, "grad_norm": 0.6321248412132263, "learning_rate": 0.00013619632707883065, "loss": 0.9778, "step": 8598 }, { "epoch": 1.5309829059829059, "grad_norm": 0.6469168663024902, "learning_rate": 0.00013618327838770303, "loss": 0.9968, "step": 8599 }, { "epoch": 1.5311609686609686, "grad_norm": 0.6798683404922485, "learning_rate": 0.00013617022898763925, "loss": 0.78, "step": 8600 }, { "epoch": 1.5313390313390314, "grad_norm": 0.6932336091995239, "learning_rate": 0.00013615717887889496, "loss": 0.9473, "step": 8601 }, { "epoch": 1.5315170940170941, "grad_norm": 0.7304185628890991, "learning_rate": 0.00013614412806172585, "loss": 1.0478, "step": 8602 }, { "epoch": 1.5316951566951567, "grad_norm": 0.6585272550582886, "learning_rate": 0.00013613107653638763, "loss": 0.8563, "step": 8603 }, { "epoch": 1.5318732193732194, "grad_norm": 0.6804470419883728, "learning_rate": 0.00013611802430313604, "loss": 0.9839, "step": 8604 }, { "epoch": 1.532051282051282, "grad_norm": 0.7271378040313721, "learning_rate": 0.0001361049713622268, "loss": 1.0906, "step": 8605 }, { "epoch": 1.5322293447293447, "grad_norm": 0.7731603980064392, "learning_rate": 0.00013609191771391562, "loss": 1.1318, "step": 8606 }, { "epoch": 1.5324074074074074, "grad_norm": 0.6143709421157837, "learning_rate": 0.0001360788633584583, "loss": 0.8726, "step": 8607 }, { "epoch": 1.5325854700854702, "grad_norm": 0.6847203373908997, "learning_rate": 0.00013606580829611056, "loss": 0.9963, "step": 8608 }, { "epoch": 1.5327635327635327, "grad_norm": 0.7561219334602356, "learning_rate": 0.0001360527525271283, "loss": 0.8873, "step": 8609 }, { "epoch": 1.5329415954415955, "grad_norm": 0.7997925281524658, "learning_rate": 0.0001360396960517672, "loss": 0.7675, "step": 8610 }, { "epoch": 1.533119658119658, "grad_norm": 0.7206357717514038, "learning_rate": 0.00013602663887028315, "loss": 1.0084, "step": 8611 }, { "epoch": 1.5332977207977208, "grad_norm": 0.6454238891601562, "learning_rate": 0.00013601358098293194, "loss": 0.8194, "step": 8612 }, { "epoch": 1.5334757834757835, "grad_norm": 0.5531884431838989, "learning_rate": 0.0001360005223899694, "loss": 0.8596, "step": 8613 }, { "epoch": 1.5336538461538463, "grad_norm": 0.659161388874054, "learning_rate": 0.00013598746309165144, "loss": 1.0363, "step": 8614 }, { "epoch": 1.5338319088319088, "grad_norm": 0.6958948373794556, "learning_rate": 0.00013597440308823385, "loss": 0.9852, "step": 8615 }, { "epoch": 1.5340099715099715, "grad_norm": 0.7147171497344971, "learning_rate": 0.0001359613423799726, "loss": 1.0506, "step": 8616 }, { "epoch": 1.534188034188034, "grad_norm": 0.604450523853302, "learning_rate": 0.00013594828096712353, "loss": 0.9344, "step": 8617 }, { "epoch": 1.5343660968660968, "grad_norm": 0.714547336101532, "learning_rate": 0.00013593521884994257, "loss": 1.1583, "step": 8618 }, { "epoch": 1.5345441595441596, "grad_norm": 0.6864442825317383, "learning_rate": 0.00013592215602868565, "loss": 0.991, "step": 8619 }, { "epoch": 1.5347222222222223, "grad_norm": 0.6384446620941162, "learning_rate": 0.00013590909250360873, "loss": 0.8799, "step": 8620 }, { "epoch": 1.5349002849002849, "grad_norm": 0.7307949662208557, "learning_rate": 0.00013589602827496772, "loss": 1.0276, "step": 8621 }, { "epoch": 1.5350783475783476, "grad_norm": 0.6620129942893982, "learning_rate": 0.00013588296334301862, "loss": 0.9378, "step": 8622 }, { "epoch": 1.5352564102564101, "grad_norm": 0.7216851711273193, "learning_rate": 0.00013586989770801735, "loss": 0.8984, "step": 8623 }, { "epoch": 1.5354344729344729, "grad_norm": 0.7319885492324829, "learning_rate": 0.00013585683137022, "loss": 1.0357, "step": 8624 }, { "epoch": 1.5356125356125356, "grad_norm": 0.7455703616142273, "learning_rate": 0.00013584376432988247, "loss": 0.9727, "step": 8625 }, { "epoch": 1.5357905982905984, "grad_norm": 0.7285277247428894, "learning_rate": 0.0001358306965872609, "loss": 1.1132, "step": 8626 }, { "epoch": 1.535968660968661, "grad_norm": 0.6250096559524536, "learning_rate": 0.00013581762814261124, "loss": 0.8538, "step": 8627 }, { "epoch": 1.5361467236467237, "grad_norm": 0.6252279281616211, "learning_rate": 0.0001358045589961895, "loss": 0.822, "step": 8628 }, { "epoch": 1.5363247863247862, "grad_norm": 0.7723368406295776, "learning_rate": 0.0001357914891482519, "loss": 0.9841, "step": 8629 }, { "epoch": 1.536502849002849, "grad_norm": 0.6855236887931824, "learning_rate": 0.00013577841859905435, "loss": 0.9512, "step": 8630 }, { "epoch": 1.5366809116809117, "grad_norm": 0.8320944309234619, "learning_rate": 0.00013576534734885303, "loss": 1.0324, "step": 8631 }, { "epoch": 1.5368589743589745, "grad_norm": 0.6970052123069763, "learning_rate": 0.00013575227539790405, "loss": 0.9874, "step": 8632 }, { "epoch": 1.5370370370370372, "grad_norm": 0.7774853110313416, "learning_rate": 0.00013573920274646345, "loss": 0.962, "step": 8633 }, { "epoch": 1.5372150997150997, "grad_norm": 0.6479182839393616, "learning_rate": 0.0001357261293947875, "loss": 0.9438, "step": 8634 }, { "epoch": 1.5373931623931623, "grad_norm": 0.6855679750442505, "learning_rate": 0.00013571305534313218, "loss": 1.0898, "step": 8635 }, { "epoch": 1.537571225071225, "grad_norm": 0.6527835726737976, "learning_rate": 0.00013569998059175377, "loss": 0.954, "step": 8636 }, { "epoch": 1.5377492877492878, "grad_norm": 0.6601176857948303, "learning_rate": 0.00013568690514090837, "loss": 1.0183, "step": 8637 }, { "epoch": 1.5379273504273505, "grad_norm": 0.6628120541572571, "learning_rate": 0.0001356738289908522, "loss": 1.0651, "step": 8638 }, { "epoch": 1.5381054131054133, "grad_norm": 0.7492203712463379, "learning_rate": 0.00013566075214184147, "loss": 1.2438, "step": 8639 }, { "epoch": 1.5382834757834758, "grad_norm": 0.6781343817710876, "learning_rate": 0.00013564767459413237, "loss": 0.9413, "step": 8640 }, { "epoch": 1.5384615384615383, "grad_norm": 0.6890891790390015, "learning_rate": 0.00013563459634798115, "loss": 0.9912, "step": 8641 }, { "epoch": 1.538639601139601, "grad_norm": 0.722820520401001, "learning_rate": 0.00013562151740364404, "loss": 1.1799, "step": 8642 }, { "epoch": 1.5388176638176638, "grad_norm": 0.738369882106781, "learning_rate": 0.0001356084377613773, "loss": 1.1313, "step": 8643 }, { "epoch": 1.5389957264957266, "grad_norm": 0.6232718229293823, "learning_rate": 0.00013559535742143717, "loss": 0.9035, "step": 8644 }, { "epoch": 1.5391737891737893, "grad_norm": 0.7371624708175659, "learning_rate": 0.00013558227638407996, "loss": 1.3377, "step": 8645 }, { "epoch": 1.5393518518518519, "grad_norm": 0.658353865146637, "learning_rate": 0.00013556919464956197, "loss": 0.9591, "step": 8646 }, { "epoch": 1.5395299145299144, "grad_norm": 0.6205827593803406, "learning_rate": 0.0001355561122181395, "loss": 0.9217, "step": 8647 }, { "epoch": 1.5397079772079771, "grad_norm": 0.5892502069473267, "learning_rate": 0.00013554302909006888, "loss": 0.8893, "step": 8648 }, { "epoch": 1.53988603988604, "grad_norm": 1.224568486213684, "learning_rate": 0.0001355299452656064, "loss": 0.8237, "step": 8649 }, { "epoch": 1.5400641025641026, "grad_norm": 0.7732635736465454, "learning_rate": 0.0001355168607450085, "loss": 1.1043, "step": 8650 }, { "epoch": 1.5402421652421654, "grad_norm": 0.6365402340888977, "learning_rate": 0.00013550377552853146, "loss": 1.0345, "step": 8651 }, { "epoch": 1.540420227920228, "grad_norm": 0.7046400904655457, "learning_rate": 0.00013549068961643171, "loss": 1.0361, "step": 8652 }, { "epoch": 1.5405982905982905, "grad_norm": 0.6760256886482239, "learning_rate": 0.0001354776030089656, "loss": 0.9437, "step": 8653 }, { "epoch": 1.5407763532763532, "grad_norm": 0.6180984973907471, "learning_rate": 0.00013546451570638958, "loss": 0.9737, "step": 8654 }, { "epoch": 1.540954415954416, "grad_norm": 0.6221960186958313, "learning_rate": 0.00013545142770896005, "loss": 0.9313, "step": 8655 }, { "epoch": 1.5411324786324787, "grad_norm": 0.6887816786766052, "learning_rate": 0.0001354383390169334, "loss": 1.1736, "step": 8656 }, { "epoch": 1.5413105413105415, "grad_norm": 0.5840606093406677, "learning_rate": 0.00013542524963056614, "loss": 0.9269, "step": 8657 }, { "epoch": 1.541488603988604, "grad_norm": 0.7396654486656189, "learning_rate": 0.00013541215955011472, "loss": 1.1189, "step": 8658 }, { "epoch": 1.5416666666666665, "grad_norm": 0.780616819858551, "learning_rate": 0.00013539906877583555, "loss": 1.1251, "step": 8659 }, { "epoch": 1.5418447293447293, "grad_norm": 0.6975206732749939, "learning_rate": 0.0001353859773079852, "loss": 1.2134, "step": 8660 }, { "epoch": 1.542022792022792, "grad_norm": 0.7572869658470154, "learning_rate": 0.00013537288514682013, "loss": 0.9396, "step": 8661 }, { "epoch": 1.5422008547008548, "grad_norm": 0.6252159476280212, "learning_rate": 0.00013535979229259686, "loss": 0.8449, "step": 8662 }, { "epoch": 1.5423789173789175, "grad_norm": 0.7321650981903076, "learning_rate": 0.0001353466987455719, "loss": 1.3263, "step": 8663 }, { "epoch": 1.54255698005698, "grad_norm": 0.7168700695037842, "learning_rate": 0.00013533360450600177, "loss": 0.8923, "step": 8664 }, { "epoch": 1.5427350427350426, "grad_norm": 0.5931934714317322, "learning_rate": 0.00013532050957414313, "loss": 0.8448, "step": 8665 }, { "epoch": 1.5429131054131053, "grad_norm": 0.6621279120445251, "learning_rate": 0.00013530741395025245, "loss": 1.1023, "step": 8666 }, { "epoch": 1.543091168091168, "grad_norm": 0.7133732438087463, "learning_rate": 0.00013529431763458633, "loss": 0.9986, "step": 8667 }, { "epoch": 1.5432692307692308, "grad_norm": 0.7589015960693359, "learning_rate": 0.0001352812206274014, "loss": 1.0111, "step": 8668 }, { "epoch": 1.5434472934472936, "grad_norm": 0.6958192586898804, "learning_rate": 0.0001352681229289542, "loss": 0.9466, "step": 8669 }, { "epoch": 1.5436253561253561, "grad_norm": 0.7539750337600708, "learning_rate": 0.0001352550245395014, "loss": 1.0974, "step": 8670 }, { "epoch": 1.5438034188034186, "grad_norm": 0.7003816366195679, "learning_rate": 0.00013524192545929964, "loss": 1.0354, "step": 8671 }, { "epoch": 1.5439814814814814, "grad_norm": 0.6503025889396667, "learning_rate": 0.00013522882568860558, "loss": 1.0476, "step": 8672 }, { "epoch": 1.5441595441595442, "grad_norm": 0.6757345199584961, "learning_rate": 0.00013521572522767584, "loss": 0.864, "step": 8673 }, { "epoch": 1.544337606837607, "grad_norm": 0.6857611536979675, "learning_rate": 0.0001352026240767671, "loss": 1.1627, "step": 8674 }, { "epoch": 1.5445156695156697, "grad_norm": 0.5775430798530579, "learning_rate": 0.0001351895222361361, "loss": 0.7444, "step": 8675 }, { "epoch": 1.5446937321937322, "grad_norm": 0.7511499524116516, "learning_rate": 0.00013517641970603952, "loss": 1.1547, "step": 8676 }, { "epoch": 1.5448717948717947, "grad_norm": 0.6727504730224609, "learning_rate": 0.00013516331648673403, "loss": 1.0829, "step": 8677 }, { "epoch": 1.5450498575498575, "grad_norm": 0.6128812432289124, "learning_rate": 0.00013515021257847642, "loss": 0.9318, "step": 8678 }, { "epoch": 1.5452279202279202, "grad_norm": 0.7309781312942505, "learning_rate": 0.00013513710798152343, "loss": 1.0844, "step": 8679 }, { "epoch": 1.545405982905983, "grad_norm": 0.695655882358551, "learning_rate": 0.00013512400269613176, "loss": 1.113, "step": 8680 }, { "epoch": 1.5455840455840457, "grad_norm": 0.696441650390625, "learning_rate": 0.00013511089672255824, "loss": 1.0499, "step": 8681 }, { "epoch": 1.5457621082621082, "grad_norm": 0.6309961080551147, "learning_rate": 0.00013509779006105964, "loss": 0.8759, "step": 8682 }, { "epoch": 1.5459401709401708, "grad_norm": 0.6155984401702881, "learning_rate": 0.00013508468271189277, "loss": 0.8967, "step": 8683 }, { "epoch": 1.5461182336182335, "grad_norm": 0.6786884665489197, "learning_rate": 0.00013507157467531442, "loss": 1.0806, "step": 8684 }, { "epoch": 1.5462962962962963, "grad_norm": 0.6494075059890747, "learning_rate": 0.00013505846595158138, "loss": 1.0196, "step": 8685 }, { "epoch": 1.546474358974359, "grad_norm": 0.7599824070930481, "learning_rate": 0.00013504535654095055, "loss": 0.8662, "step": 8686 }, { "epoch": 1.5466524216524218, "grad_norm": 0.6017210483551025, "learning_rate": 0.00013503224644367877, "loss": 0.872, "step": 8687 }, { "epoch": 1.5468304843304843, "grad_norm": 0.7972410321235657, "learning_rate": 0.00013501913566002288, "loss": 1.0958, "step": 8688 }, { "epoch": 1.547008547008547, "grad_norm": 0.7572960257530212, "learning_rate": 0.00013500602419023978, "loss": 1.0219, "step": 8689 }, { "epoch": 1.5471866096866096, "grad_norm": 0.6329224109649658, "learning_rate": 0.00013499291203458635, "loss": 0.8636, "step": 8690 }, { "epoch": 1.5473646723646723, "grad_norm": 0.6777113080024719, "learning_rate": 0.0001349797991933195, "loss": 1.0297, "step": 8691 }, { "epoch": 1.547542735042735, "grad_norm": 0.6449527144432068, "learning_rate": 0.00013496668566669617, "loss": 1.0296, "step": 8692 }, { "epoch": 1.5477207977207978, "grad_norm": 0.8236973881721497, "learning_rate": 0.00013495357145497326, "loss": 0.8569, "step": 8693 }, { "epoch": 1.5478988603988604, "grad_norm": 0.6753743290901184, "learning_rate": 0.0001349404565584077, "loss": 1.0733, "step": 8694 }, { "epoch": 1.5480769230769231, "grad_norm": 0.6642967462539673, "learning_rate": 0.0001349273409772565, "loss": 0.9437, "step": 8695 }, { "epoch": 1.5482549857549857, "grad_norm": 0.6470823884010315, "learning_rate": 0.00013491422471177661, "loss": 0.999, "step": 8696 }, { "epoch": 1.5484330484330484, "grad_norm": 0.7287036776542664, "learning_rate": 0.000134901107762225, "loss": 0.9396, "step": 8697 }, { "epoch": 1.5486111111111112, "grad_norm": 0.6258324980735779, "learning_rate": 0.00013488799012885872, "loss": 1.045, "step": 8698 }, { "epoch": 1.548789173789174, "grad_norm": 0.6540539860725403, "learning_rate": 0.00013487487181193473, "loss": 0.9939, "step": 8699 }, { "epoch": 1.5489672364672364, "grad_norm": 0.7129563093185425, "learning_rate": 0.00013486175281171003, "loss": 1.2079, "step": 8700 }, { "epoch": 1.5491452991452992, "grad_norm": 0.6383145451545715, "learning_rate": 0.00013484863312844173, "loss": 0.9999, "step": 8701 }, { "epoch": 1.5493233618233617, "grad_norm": 0.6310200691223145, "learning_rate": 0.0001348355127623869, "loss": 1.1193, "step": 8702 }, { "epoch": 1.5495014245014245, "grad_norm": 0.6370054483413696, "learning_rate": 0.0001348223917138025, "loss": 1.0213, "step": 8703 }, { "epoch": 1.5496794871794872, "grad_norm": 0.7052688598632812, "learning_rate": 0.00013480926998294573, "loss": 0.8773, "step": 8704 }, { "epoch": 1.54985754985755, "grad_norm": 0.6369579434394836, "learning_rate": 0.00013479614757007355, "loss": 1.0072, "step": 8705 }, { "epoch": 1.5500356125356125, "grad_norm": 0.7152075171470642, "learning_rate": 0.0001347830244754432, "loss": 1.0409, "step": 8706 }, { "epoch": 1.5502136752136753, "grad_norm": 0.654183566570282, "learning_rate": 0.00013476990069931173, "loss": 0.9363, "step": 8707 }, { "epoch": 1.5503917378917378, "grad_norm": 0.6700537204742432, "learning_rate": 0.00013475677624193627, "loss": 0.985, "step": 8708 }, { "epoch": 1.5505698005698005, "grad_norm": 0.7195445895195007, "learning_rate": 0.00013474365110357402, "loss": 0.988, "step": 8709 }, { "epoch": 1.5507478632478633, "grad_norm": 0.6019890904426575, "learning_rate": 0.00013473052528448201, "loss": 0.9915, "step": 8710 }, { "epoch": 1.550925925925926, "grad_norm": 0.7787565588951111, "learning_rate": 0.0001347173987849176, "loss": 0.9676, "step": 8711 }, { "epoch": 1.5511039886039886, "grad_norm": 0.6997103691101074, "learning_rate": 0.00013470427160513782, "loss": 1.1158, "step": 8712 }, { "epoch": 1.5512820512820513, "grad_norm": 0.6259464025497437, "learning_rate": 0.00013469114374539998, "loss": 0.8784, "step": 8713 }, { "epoch": 1.5514601139601139, "grad_norm": 0.6159056425094604, "learning_rate": 0.00013467801520596122, "loss": 0.9184, "step": 8714 }, { "epoch": 1.5516381766381766, "grad_norm": 0.6823606491088867, "learning_rate": 0.00013466488598707876, "loss": 0.9542, "step": 8715 }, { "epoch": 1.5518162393162394, "grad_norm": 0.6781585812568665, "learning_rate": 0.0001346517560890099, "loss": 1.1761, "step": 8716 }, { "epoch": 1.551994301994302, "grad_norm": 0.6313831806182861, "learning_rate": 0.00013463862551201184, "loss": 0.8935, "step": 8717 }, { "epoch": 1.5521723646723646, "grad_norm": 0.7466186881065369, "learning_rate": 0.0001346254942563419, "loss": 1.0583, "step": 8718 }, { "epoch": 1.5523504273504274, "grad_norm": 0.7073680758476257, "learning_rate": 0.0001346123623222573, "loss": 0.9863, "step": 8719 }, { "epoch": 1.55252849002849, "grad_norm": 0.6286870241165161, "learning_rate": 0.00013459922971001536, "loss": 0.9921, "step": 8720 }, { "epoch": 1.5527065527065527, "grad_norm": 0.6047035455703735, "learning_rate": 0.0001345860964198734, "loss": 0.9155, "step": 8721 }, { "epoch": 1.5528846153846154, "grad_norm": 0.5909964442253113, "learning_rate": 0.00013457296245208874, "loss": 0.9593, "step": 8722 }, { "epoch": 1.5530626780626782, "grad_norm": 0.7838597893714905, "learning_rate": 0.00013455982780691869, "loss": 0.8872, "step": 8723 }, { "epoch": 1.5532407407407407, "grad_norm": 0.6914706230163574, "learning_rate": 0.00013454669248462063, "loss": 0.9104, "step": 8724 }, { "epoch": 1.5534188034188035, "grad_norm": 0.6777952909469604, "learning_rate": 0.00013453355648545182, "loss": 0.9839, "step": 8725 }, { "epoch": 1.553596866096866, "grad_norm": 0.7482799291610718, "learning_rate": 0.00013452041980966978, "loss": 1.1164, "step": 8726 }, { "epoch": 1.5537749287749287, "grad_norm": 0.6616327166557312, "learning_rate": 0.0001345072824575318, "loss": 0.9574, "step": 8727 }, { "epoch": 1.5539529914529915, "grad_norm": 0.7193203568458557, "learning_rate": 0.00013449414442929532, "loss": 1.0609, "step": 8728 }, { "epoch": 1.5541310541310542, "grad_norm": 0.6599446535110474, "learning_rate": 0.0001344810057252177, "loss": 0.9574, "step": 8729 }, { "epoch": 1.5543091168091168, "grad_norm": 0.7221707105636597, "learning_rate": 0.00013446786634555642, "loss": 0.9819, "step": 8730 }, { "epoch": 1.5544871794871795, "grad_norm": 0.6531312465667725, "learning_rate": 0.0001344547262905689, "loss": 0.9986, "step": 8731 }, { "epoch": 1.554665242165242, "grad_norm": 0.6879804730415344, "learning_rate": 0.0001344415855605126, "loss": 1.1078, "step": 8732 }, { "epoch": 1.5548433048433048, "grad_norm": 0.708907425403595, "learning_rate": 0.00013442844415564498, "loss": 1.0221, "step": 8733 }, { "epoch": 1.5550213675213675, "grad_norm": 0.7957375645637512, "learning_rate": 0.0001344153020762235, "loss": 1.3101, "step": 8734 }, { "epoch": 1.5551994301994303, "grad_norm": 0.7068197727203369, "learning_rate": 0.00013440215932250567, "loss": 0.8995, "step": 8735 }, { "epoch": 1.5553774928774928, "grad_norm": 0.6455841064453125, "learning_rate": 0.00013438901589474898, "loss": 0.7244, "step": 8736 }, { "epoch": 1.5555555555555556, "grad_norm": 0.7500516772270203, "learning_rate": 0.00013437587179321097, "loss": 1.0161, "step": 8737 }, { "epoch": 1.555733618233618, "grad_norm": 0.5983143448829651, "learning_rate": 0.00013436272701814917, "loss": 0.9922, "step": 8738 }, { "epoch": 1.5559116809116809, "grad_norm": 0.8761729598045349, "learning_rate": 0.0001343495815698211, "loss": 1.022, "step": 8739 }, { "epoch": 1.5560897435897436, "grad_norm": 0.6901857852935791, "learning_rate": 0.00013433643544848438, "loss": 1.0668, "step": 8740 }, { "epoch": 1.5562678062678064, "grad_norm": 0.6770836114883423, "learning_rate": 0.00013432328865439647, "loss": 0.9516, "step": 8741 }, { "epoch": 1.556445868945869, "grad_norm": 0.6138805150985718, "learning_rate": 0.00013431014118781505, "loss": 0.8682, "step": 8742 }, { "epoch": 1.5566239316239316, "grad_norm": 0.6796693801879883, "learning_rate": 0.00013429699304899772, "loss": 1.1132, "step": 8743 }, { "epoch": 1.5568019943019942, "grad_norm": 0.6626394987106323, "learning_rate": 0.000134283844238202, "loss": 0.9273, "step": 8744 }, { "epoch": 1.556980056980057, "grad_norm": 0.7088519334793091, "learning_rate": 0.00013427069475568563, "loss": 0.8915, "step": 8745 }, { "epoch": 1.5571581196581197, "grad_norm": 0.6244857311248779, "learning_rate": 0.0001342575446017061, "loss": 0.9466, "step": 8746 }, { "epoch": 1.5573361823361824, "grad_norm": 0.6969038248062134, "learning_rate": 0.00013424439377652123, "loss": 1.2307, "step": 8747 }, { "epoch": 1.5575142450142452, "grad_norm": 0.6636740565299988, "learning_rate": 0.0001342312422803886, "loss": 0.9456, "step": 8748 }, { "epoch": 1.5576923076923077, "grad_norm": 0.7863389253616333, "learning_rate": 0.00013421809011356586, "loss": 1.1888, "step": 8749 }, { "epoch": 1.5578703703703702, "grad_norm": 0.7504058480262756, "learning_rate": 0.00013420493727631073, "loss": 1.2602, "step": 8750 }, { "epoch": 1.558048433048433, "grad_norm": 0.7173139452934265, "learning_rate": 0.00013419178376888085, "loss": 1.0726, "step": 8751 }, { "epoch": 1.5582264957264957, "grad_norm": 0.6517474055290222, "learning_rate": 0.00013417862959153406, "loss": 1.1299, "step": 8752 }, { "epoch": 1.5584045584045585, "grad_norm": 0.8911739587783813, "learning_rate": 0.00013416547474452803, "loss": 1.105, "step": 8753 }, { "epoch": 1.5585826210826212, "grad_norm": 0.7116649150848389, "learning_rate": 0.00013415231922812049, "loss": 0.8037, "step": 8754 }, { "epoch": 1.5587606837606838, "grad_norm": 0.6935904026031494, "learning_rate": 0.00013413916304256916, "loss": 1.2778, "step": 8755 }, { "epoch": 1.5589387464387463, "grad_norm": 0.652763843536377, "learning_rate": 0.00013412600618813186, "loss": 0.9188, "step": 8756 }, { "epoch": 1.559116809116809, "grad_norm": 0.6545276641845703, "learning_rate": 0.00013411284866506637, "loss": 1.0116, "step": 8757 }, { "epoch": 1.5592948717948718, "grad_norm": 0.632165253162384, "learning_rate": 0.0001340996904736305, "loss": 0.8538, "step": 8758 }, { "epoch": 1.5594729344729346, "grad_norm": 0.6719664931297302, "learning_rate": 0.000134086531614082, "loss": 1.1877, "step": 8759 }, { "epoch": 1.5596509971509973, "grad_norm": 0.6691158413887024, "learning_rate": 0.00013407337208667873, "loss": 1.0411, "step": 8760 }, { "epoch": 1.5598290598290598, "grad_norm": 0.7711479067802429, "learning_rate": 0.0001340602118916785, "loss": 0.9995, "step": 8761 }, { "epoch": 1.5600071225071224, "grad_norm": 0.7229881286621094, "learning_rate": 0.0001340470510293392, "loss": 1.1751, "step": 8762 }, { "epoch": 1.5601851851851851, "grad_norm": 0.7183271646499634, "learning_rate": 0.00013403388949991864, "loss": 0.9371, "step": 8763 }, { "epoch": 1.5603632478632479, "grad_norm": 0.8142383098602295, "learning_rate": 0.00013402072730367475, "loss": 1.0199, "step": 8764 }, { "epoch": 1.5605413105413106, "grad_norm": 0.6349362134933472, "learning_rate": 0.00013400756444086534, "loss": 0.8453, "step": 8765 }, { "epoch": 1.5607193732193734, "grad_norm": 0.651900589466095, "learning_rate": 0.00013399440091174834, "loss": 0.8952, "step": 8766 }, { "epoch": 1.560897435897436, "grad_norm": 0.6873346567153931, "learning_rate": 0.00013398123671658172, "loss": 0.9438, "step": 8767 }, { "epoch": 1.5610754985754984, "grad_norm": 0.7404754757881165, "learning_rate": 0.00013396807185562333, "loss": 1.123, "step": 8768 }, { "epoch": 1.5612535612535612, "grad_norm": 0.7449641227722168, "learning_rate": 0.00013395490632913111, "loss": 0.9407, "step": 8769 }, { "epoch": 1.561431623931624, "grad_norm": 0.7393384575843811, "learning_rate": 0.0001339417401373631, "loss": 1.0209, "step": 8770 }, { "epoch": 1.5616096866096867, "grad_norm": 0.6787426471710205, "learning_rate": 0.00013392857328057713, "loss": 0.9768, "step": 8771 }, { "epoch": 1.5617877492877494, "grad_norm": 0.6295693516731262, "learning_rate": 0.00013391540575903127, "loss": 0.9011, "step": 8772 }, { "epoch": 1.561965811965812, "grad_norm": 0.7114503979682922, "learning_rate": 0.00013390223757298354, "loss": 1.0696, "step": 8773 }, { "epoch": 1.5621438746438745, "grad_norm": 0.7540110349655151, "learning_rate": 0.00013388906872269184, "loss": 1.0071, "step": 8774 }, { "epoch": 1.5623219373219372, "grad_norm": 0.6472305059432983, "learning_rate": 0.00013387589920841423, "loss": 1.105, "step": 8775 }, { "epoch": 1.5625, "grad_norm": 0.6936793327331543, "learning_rate": 0.00013386272903040874, "loss": 0.885, "step": 8776 }, { "epoch": 1.5626780626780628, "grad_norm": 0.7487989068031311, "learning_rate": 0.00013384955818893343, "loss": 0.7842, "step": 8777 }, { "epoch": 1.5628561253561255, "grad_norm": 0.6109505891799927, "learning_rate": 0.00013383638668424633, "loss": 0.9461, "step": 8778 }, { "epoch": 1.563034188034188, "grad_norm": 0.6650055646896362, "learning_rate": 0.00013382321451660558, "loss": 1.0463, "step": 8779 }, { "epoch": 1.5632122507122506, "grad_norm": 0.7147329449653625, "learning_rate": 0.00013381004168626915, "loss": 0.946, "step": 8780 }, { "epoch": 1.5633903133903133, "grad_norm": 0.6919382810592651, "learning_rate": 0.00013379686819349522, "loss": 0.8946, "step": 8781 }, { "epoch": 1.563568376068376, "grad_norm": 0.7339401245117188, "learning_rate": 0.00013378369403854184, "loss": 0.9625, "step": 8782 }, { "epoch": 1.5637464387464388, "grad_norm": 0.6337129473686218, "learning_rate": 0.00013377051922166717, "loss": 1.0854, "step": 8783 }, { "epoch": 1.5639245014245016, "grad_norm": 0.7301266193389893, "learning_rate": 0.0001337573437431293, "loss": 1.017, "step": 8784 }, { "epoch": 1.564102564102564, "grad_norm": 0.689540684223175, "learning_rate": 0.00013374416760318644, "loss": 0.8734, "step": 8785 }, { "epoch": 1.5642806267806266, "grad_norm": 0.7121307849884033, "learning_rate": 0.0001337309908020967, "loss": 1.0827, "step": 8786 }, { "epoch": 1.5644586894586894, "grad_norm": 0.6715386509895325, "learning_rate": 0.00013371781334011826, "loss": 0.946, "step": 8787 }, { "epoch": 1.5646367521367521, "grad_norm": 0.6895501613616943, "learning_rate": 0.00013370463521750932, "loss": 1.1113, "step": 8788 }, { "epoch": 1.5648148148148149, "grad_norm": 0.6592531204223633, "learning_rate": 0.00013369145643452805, "loss": 0.9952, "step": 8789 }, { "epoch": 1.5649928774928776, "grad_norm": 0.7495190501213074, "learning_rate": 0.0001336782769914327, "loss": 1.0936, "step": 8790 }, { "epoch": 1.5651709401709402, "grad_norm": 0.7273977398872375, "learning_rate": 0.00013366509688848147, "loss": 1.1749, "step": 8791 }, { "epoch": 1.5653490028490027, "grad_norm": 0.6447354555130005, "learning_rate": 0.0001336519161259326, "loss": 0.8638, "step": 8792 }, { "epoch": 1.5655270655270654, "grad_norm": 0.6572020053863525, "learning_rate": 0.00013363873470404432, "loss": 0.8005, "step": 8793 }, { "epoch": 1.5657051282051282, "grad_norm": 0.676418662071228, "learning_rate": 0.00013362555262307491, "loss": 0.7651, "step": 8794 }, { "epoch": 1.565883190883191, "grad_norm": 0.6886745095252991, "learning_rate": 0.0001336123698832827, "loss": 1.0765, "step": 8795 }, { "epoch": 1.5660612535612537, "grad_norm": 0.8134182095527649, "learning_rate": 0.00013359918648492584, "loss": 1.2228, "step": 8796 }, { "epoch": 1.5662393162393162, "grad_norm": 0.7210384011268616, "learning_rate": 0.00013358600242826277, "loss": 0.8247, "step": 8797 }, { "epoch": 1.5664173789173788, "grad_norm": 0.7086136341094971, "learning_rate": 0.00013357281771355175, "loss": 1.0323, "step": 8798 }, { "epoch": 1.5665954415954415, "grad_norm": 0.7419785857200623, "learning_rate": 0.0001335596323410511, "loss": 1.213, "step": 8799 }, { "epoch": 1.5667735042735043, "grad_norm": 0.6390291452407837, "learning_rate": 0.0001335464463110192, "loss": 1.0403, "step": 8800 }, { "epoch": 1.566951566951567, "grad_norm": 0.6111941337585449, "learning_rate": 0.00013353325962371434, "loss": 0.9747, "step": 8801 }, { "epoch": 1.5671296296296298, "grad_norm": 0.6792671084403992, "learning_rate": 0.00013352007227939488, "loss": 1.1179, "step": 8802 }, { "epoch": 1.5673076923076923, "grad_norm": 0.6656535863876343, "learning_rate": 0.0001335068842783193, "loss": 0.9214, "step": 8803 }, { "epoch": 1.5674857549857548, "grad_norm": 0.6910907626152039, "learning_rate": 0.0001334936956207459, "loss": 1.0609, "step": 8804 }, { "epoch": 1.5676638176638176, "grad_norm": 0.65049147605896, "learning_rate": 0.00013348050630693315, "loss": 0.7189, "step": 8805 }, { "epoch": 1.5678418803418803, "grad_norm": 0.6258065104484558, "learning_rate": 0.0001334673163371394, "loss": 1.0683, "step": 8806 }, { "epoch": 1.568019943019943, "grad_norm": 0.7518934607505798, "learning_rate": 0.00013345412571162305, "loss": 1.2415, "step": 8807 }, { "epoch": 1.5681980056980058, "grad_norm": 0.7395275235176086, "learning_rate": 0.00013344093443064267, "loss": 0.9153, "step": 8808 }, { "epoch": 1.5683760683760684, "grad_norm": 0.6789839267730713, "learning_rate": 0.00013342774249445663, "loss": 0.8051, "step": 8809 }, { "epoch": 1.568554131054131, "grad_norm": 0.786247193813324, "learning_rate": 0.00013341454990332342, "loss": 1.203, "step": 8810 }, { "epoch": 1.5687321937321936, "grad_norm": 0.6858161687850952, "learning_rate": 0.00013340135665750153, "loss": 0.9494, "step": 8811 }, { "epoch": 1.5689102564102564, "grad_norm": 0.7245797514915466, "learning_rate": 0.0001333881627572494, "loss": 1.0544, "step": 8812 }, { "epoch": 1.5690883190883191, "grad_norm": 0.6176164150238037, "learning_rate": 0.00013337496820282563, "loss": 0.9084, "step": 8813 }, { "epoch": 1.569266381766382, "grad_norm": 0.7342953681945801, "learning_rate": 0.00013336177299448868, "loss": 1.0006, "step": 8814 }, { "epoch": 1.5694444444444444, "grad_norm": 0.5183523297309875, "learning_rate": 0.00013334857713249708, "loss": 0.6295, "step": 8815 }, { "epoch": 1.5696225071225072, "grad_norm": 0.6664513349533081, "learning_rate": 0.00013333538061710936, "loss": 0.7569, "step": 8816 }, { "epoch": 1.5698005698005697, "grad_norm": 0.7051160931587219, "learning_rate": 0.0001333221834485841, "loss": 0.9917, "step": 8817 }, { "epoch": 1.5699786324786325, "grad_norm": 0.7888057231903076, "learning_rate": 0.0001333089856271799, "loss": 1.0337, "step": 8818 }, { "epoch": 1.5701566951566952, "grad_norm": 0.6796144247055054, "learning_rate": 0.00013329578715315534, "loss": 1.0915, "step": 8819 }, { "epoch": 1.570334757834758, "grad_norm": 0.7442883849143982, "learning_rate": 0.000133282588026769, "loss": 1.1695, "step": 8820 }, { "epoch": 1.5705128205128205, "grad_norm": 0.6164735555648804, "learning_rate": 0.00013326938824827946, "loss": 1.0143, "step": 8821 }, { "epoch": 1.5706908831908832, "grad_norm": 0.6526502966880798, "learning_rate": 0.00013325618781794539, "loss": 0.8402, "step": 8822 }, { "epoch": 1.5708689458689458, "grad_norm": 0.6376087069511414, "learning_rate": 0.00013324298673602535, "loss": 0.7582, "step": 8823 }, { "epoch": 1.5710470085470085, "grad_norm": 0.6888708472251892, "learning_rate": 0.00013322978500277807, "loss": 0.997, "step": 8824 }, { "epoch": 1.5712250712250713, "grad_norm": 0.553656280040741, "learning_rate": 0.0001332165826184622, "loss": 0.6917, "step": 8825 }, { "epoch": 1.571403133903134, "grad_norm": 0.643285870552063, "learning_rate": 0.0001332033795833364, "loss": 0.8689, "step": 8826 }, { "epoch": 1.5715811965811965, "grad_norm": 0.6210280060768127, "learning_rate": 0.00013319017589765933, "loss": 0.9047, "step": 8827 }, { "epoch": 1.5717592592592593, "grad_norm": 0.7612366676330566, "learning_rate": 0.0001331769715616897, "loss": 0.9818, "step": 8828 }, { "epoch": 1.5719373219373218, "grad_norm": 0.5970702171325684, "learning_rate": 0.00013316376657568628, "loss": 0.82, "step": 8829 }, { "epoch": 1.5721153846153846, "grad_norm": 0.7182583808898926, "learning_rate": 0.0001331505609399077, "loss": 1.0633, "step": 8830 }, { "epoch": 1.5722934472934473, "grad_norm": 0.7230739593505859, "learning_rate": 0.00013313735465461278, "loss": 0.977, "step": 8831 }, { "epoch": 1.57247150997151, "grad_norm": 0.6752985119819641, "learning_rate": 0.00013312414772006018, "loss": 0.9666, "step": 8832 }, { "epoch": 1.5726495726495726, "grad_norm": 0.7724275588989258, "learning_rate": 0.00013311094013650877, "loss": 1.148, "step": 8833 }, { "epoch": 1.5728276353276354, "grad_norm": 0.7216386198997498, "learning_rate": 0.00013309773190421724, "loss": 0.9935, "step": 8834 }, { "epoch": 1.573005698005698, "grad_norm": 0.6422320008277893, "learning_rate": 0.0001330845230234444, "loss": 0.9383, "step": 8835 }, { "epoch": 1.5731837606837606, "grad_norm": 0.669538140296936, "learning_rate": 0.00013307131349444906, "loss": 1.0866, "step": 8836 }, { "epoch": 1.5733618233618234, "grad_norm": 0.6994584798812866, "learning_rate": 0.00013305810331749003, "loss": 0.7882, "step": 8837 }, { "epoch": 1.5735398860398861, "grad_norm": 0.8094269633293152, "learning_rate": 0.00013304489249282617, "loss": 1.2316, "step": 8838 }, { "epoch": 1.5737179487179487, "grad_norm": 0.7180120348930359, "learning_rate": 0.00013303168102071625, "loss": 0.9795, "step": 8839 }, { "epoch": 1.5738960113960114, "grad_norm": 0.6191438436508179, "learning_rate": 0.00013301846890141918, "loss": 0.8957, "step": 8840 }, { "epoch": 1.574074074074074, "grad_norm": 0.671094536781311, "learning_rate": 0.00013300525613519382, "loss": 1.059, "step": 8841 }, { "epoch": 1.5742521367521367, "grad_norm": 0.8062624931335449, "learning_rate": 0.000132992042722299, "loss": 0.9782, "step": 8842 }, { "epoch": 1.5744301994301995, "grad_norm": 0.6674807667732239, "learning_rate": 0.00013297882866299362, "loss": 0.7765, "step": 8843 }, { "epoch": 1.5746082621082622, "grad_norm": 0.6369131803512573, "learning_rate": 0.00013296561395753664, "loss": 0.97, "step": 8844 }, { "epoch": 1.5747863247863247, "grad_norm": 0.7913636565208435, "learning_rate": 0.00013295239860618691, "loss": 1.0458, "step": 8845 }, { "epoch": 1.5749643874643875, "grad_norm": 0.6722261905670166, "learning_rate": 0.0001329391826092034, "loss": 1.1118, "step": 8846 }, { "epoch": 1.57514245014245, "grad_norm": 0.6936299800872803, "learning_rate": 0.00013292596596684502, "loss": 1.009, "step": 8847 }, { "epoch": 1.5753205128205128, "grad_norm": 0.7009961009025574, "learning_rate": 0.00013291274867937073, "loss": 0.9904, "step": 8848 }, { "epoch": 1.5754985754985755, "grad_norm": 0.6900732517242432, "learning_rate": 0.0001328995307470395, "loss": 1.0488, "step": 8849 }, { "epoch": 1.5756766381766383, "grad_norm": 0.6389018297195435, "learning_rate": 0.00013288631217011032, "loss": 0.9444, "step": 8850 }, { "epoch": 1.5758547008547008, "grad_norm": 0.6370900869369507, "learning_rate": 0.00013287309294884216, "loss": 0.7465, "step": 8851 }, { "epoch": 1.5760327635327636, "grad_norm": 0.6463848948478699, "learning_rate": 0.00013285987308349405, "loss": 0.896, "step": 8852 }, { "epoch": 1.576210826210826, "grad_norm": 0.6022449731826782, "learning_rate": 0.00013284665257432495, "loss": 0.8822, "step": 8853 }, { "epoch": 1.5763888888888888, "grad_norm": 0.768189013004303, "learning_rate": 0.00013283343142159396, "loss": 0.9862, "step": 8854 }, { "epoch": 1.5765669515669516, "grad_norm": 0.6642358303070068, "learning_rate": 0.00013282020962556007, "loss": 1.0713, "step": 8855 }, { "epoch": 1.5767450142450143, "grad_norm": 0.6883034706115723, "learning_rate": 0.00013280698718648234, "loss": 1.0351, "step": 8856 }, { "epoch": 1.5769230769230769, "grad_norm": 0.602808952331543, "learning_rate": 0.00013279376410461988, "loss": 0.7615, "step": 8857 }, { "epoch": 1.5771011396011396, "grad_norm": 0.5968614220619202, "learning_rate": 0.0001327805403802317, "loss": 0.9443, "step": 8858 }, { "epoch": 1.5772792022792022, "grad_norm": 0.7314837574958801, "learning_rate": 0.00013276731601357696, "loss": 0.8784, "step": 8859 }, { "epoch": 1.577457264957265, "grad_norm": 0.619754433631897, "learning_rate": 0.0001327540910049147, "loss": 0.954, "step": 8860 }, { "epoch": 1.5776353276353277, "grad_norm": 0.7195139527320862, "learning_rate": 0.0001327408653545041, "loss": 1.0227, "step": 8861 }, { "epoch": 1.5778133903133904, "grad_norm": 0.6796214580535889, "learning_rate": 0.0001327276390626042, "loss": 1.0593, "step": 8862 }, { "epoch": 1.577991452991453, "grad_norm": 0.6576255559921265, "learning_rate": 0.00013271441212947427, "loss": 0.7921, "step": 8863 }, { "epoch": 1.5781695156695157, "grad_norm": 0.7222092151641846, "learning_rate": 0.00013270118455537336, "loss": 1.0545, "step": 8864 }, { "epoch": 1.5783475783475782, "grad_norm": 0.7159737348556519, "learning_rate": 0.00013268795634056066, "loss": 0.9664, "step": 8865 }, { "epoch": 1.578525641025641, "grad_norm": 0.7120481133460999, "learning_rate": 0.00013267472748529536, "loss": 1.0148, "step": 8866 }, { "epoch": 1.5787037037037037, "grad_norm": 0.7353253364562988, "learning_rate": 0.00013266149798983666, "loss": 0.9288, "step": 8867 }, { "epoch": 1.5788817663817665, "grad_norm": 0.6652441620826721, "learning_rate": 0.00013264826785444375, "loss": 0.8246, "step": 8868 }, { "epoch": 1.5790598290598292, "grad_norm": 0.7254189252853394, "learning_rate": 0.00013263503707937584, "loss": 0.9892, "step": 8869 }, { "epoch": 1.5792378917378918, "grad_norm": 0.6305747032165527, "learning_rate": 0.00013262180566489223, "loss": 0.8931, "step": 8870 }, { "epoch": 1.5794159544159543, "grad_norm": 0.6560617089271545, "learning_rate": 0.00013260857361125205, "loss": 0.9245, "step": 8871 }, { "epoch": 1.579594017094017, "grad_norm": 0.7304151654243469, "learning_rate": 0.00013259534091871462, "loss": 1.009, "step": 8872 }, { "epoch": 1.5797720797720798, "grad_norm": 0.782636821269989, "learning_rate": 0.00013258210758753918, "loss": 1.1123, "step": 8873 }, { "epoch": 1.5799501424501425, "grad_norm": 0.6992011070251465, "learning_rate": 0.00013256887361798504, "loss": 1.099, "step": 8874 }, { "epoch": 1.5801282051282053, "grad_norm": 0.7159731984138489, "learning_rate": 0.00013255563901031148, "loss": 1.0257, "step": 8875 }, { "epoch": 1.5803062678062678, "grad_norm": 0.6055454611778259, "learning_rate": 0.0001325424037647778, "loss": 0.9199, "step": 8876 }, { "epoch": 1.5804843304843303, "grad_norm": 0.6838310360908508, "learning_rate": 0.00013252916788164334, "loss": 0.8644, "step": 8877 }, { "epoch": 1.580662393162393, "grad_norm": 0.7067445516586304, "learning_rate": 0.00013251593136116738, "loss": 1.0285, "step": 8878 }, { "epoch": 1.5808404558404558, "grad_norm": 0.7021774649620056, "learning_rate": 0.00013250269420360928, "loss": 1.1263, "step": 8879 }, { "epoch": 1.5810185185185186, "grad_norm": 0.6586757302284241, "learning_rate": 0.00013248945640922843, "loss": 0.906, "step": 8880 }, { "epoch": 1.5811965811965814, "grad_norm": 0.6673910021781921, "learning_rate": 0.00013247621797828418, "loss": 1.0652, "step": 8881 }, { "epoch": 1.5813746438746439, "grad_norm": 0.6763964295387268, "learning_rate": 0.00013246297891103588, "loss": 1.0227, "step": 8882 }, { "epoch": 1.5815527065527064, "grad_norm": 0.6536892056465149, "learning_rate": 0.00013244973920774298, "loss": 0.9026, "step": 8883 }, { "epoch": 1.5817307692307692, "grad_norm": 0.8010411858558655, "learning_rate": 0.0001324364988686648, "loss": 1.1167, "step": 8884 }, { "epoch": 1.581908831908832, "grad_norm": 0.8159251809120178, "learning_rate": 0.00013242325789406082, "loss": 1.233, "step": 8885 }, { "epoch": 1.5820868945868947, "grad_norm": 0.6487745046615601, "learning_rate": 0.00013241001628419048, "loss": 0.9888, "step": 8886 }, { "epoch": 1.5822649572649574, "grad_norm": 0.6750285029411316, "learning_rate": 0.00013239677403931318, "loss": 0.8874, "step": 8887 }, { "epoch": 1.58244301994302, "grad_norm": 0.7164602875709534, "learning_rate": 0.0001323835311596884, "loss": 1.2029, "step": 8888 }, { "epoch": 1.5826210826210825, "grad_norm": 0.6081351041793823, "learning_rate": 0.00013237028764557558, "loss": 0.9593, "step": 8889 }, { "epoch": 1.5827991452991452, "grad_norm": 0.7235409021377563, "learning_rate": 0.00013235704349723424, "loss": 1.5324, "step": 8890 }, { "epoch": 1.582977207977208, "grad_norm": 0.6658480763435364, "learning_rate": 0.0001323437987149238, "loss": 0.9756, "step": 8891 }, { "epoch": 1.5831552706552707, "grad_norm": 0.7924265265464783, "learning_rate": 0.00013233055329890387, "loss": 0.9329, "step": 8892 }, { "epoch": 1.5833333333333335, "grad_norm": 0.6262093186378479, "learning_rate": 0.0001323173072494339, "loss": 0.8288, "step": 8893 }, { "epoch": 1.583511396011396, "grad_norm": 0.6851989030838013, "learning_rate": 0.0001323040605667734, "loss": 0.9822, "step": 8894 }, { "epoch": 1.5836894586894585, "grad_norm": 0.6963728666305542, "learning_rate": 0.00013229081325118194, "loss": 1.0416, "step": 8895 }, { "epoch": 1.5838675213675213, "grad_norm": 0.6017457842826843, "learning_rate": 0.0001322775653029191, "loss": 0.8123, "step": 8896 }, { "epoch": 1.584045584045584, "grad_norm": 0.7396472096443176, "learning_rate": 0.0001322643167222444, "loss": 1.0339, "step": 8897 }, { "epoch": 1.5842236467236468, "grad_norm": 0.6360299587249756, "learning_rate": 0.00013225106750941744, "loss": 0.9463, "step": 8898 }, { "epoch": 1.5844017094017095, "grad_norm": 0.6297624111175537, "learning_rate": 0.00013223781766469783, "loss": 0.9921, "step": 8899 }, { "epoch": 1.584579772079772, "grad_norm": 0.7722037434577942, "learning_rate": 0.0001322245671883451, "loss": 0.8394, "step": 8900 }, { "epoch": 1.5847578347578346, "grad_norm": 0.677364706993103, "learning_rate": 0.00013221131608061895, "loss": 1.0954, "step": 8901 }, { "epoch": 1.5849358974358974, "grad_norm": 0.6954908967018127, "learning_rate": 0.00013219806434177899, "loss": 1.0637, "step": 8902 }, { "epoch": 1.58511396011396, "grad_norm": 0.7079192996025085, "learning_rate": 0.00013218481197208484, "loss": 1.039, "step": 8903 }, { "epoch": 1.5852920227920229, "grad_norm": 0.7070451378822327, "learning_rate": 0.00013217155897179611, "loss": 1.0025, "step": 8904 }, { "epoch": 1.5854700854700856, "grad_norm": 0.6940776705741882, "learning_rate": 0.00013215830534117257, "loss": 0.8039, "step": 8905 }, { "epoch": 1.5856481481481481, "grad_norm": 0.6545892953872681, "learning_rate": 0.00013214505108047382, "loss": 0.9347, "step": 8906 }, { "epoch": 1.5858262108262107, "grad_norm": 0.6769635081291199, "learning_rate": 0.00013213179618995957, "loss": 1.0321, "step": 8907 }, { "epoch": 1.5860042735042734, "grad_norm": 0.6505448222160339, "learning_rate": 0.00013211854066988953, "loss": 1.0558, "step": 8908 }, { "epoch": 1.5861823361823362, "grad_norm": 0.6764090061187744, "learning_rate": 0.00013210528452052336, "loss": 0.8407, "step": 8909 }, { "epoch": 1.586360398860399, "grad_norm": 0.6454851627349854, "learning_rate": 0.00013209202774212088, "loss": 0.7439, "step": 8910 }, { "epoch": 1.5865384615384617, "grad_norm": 0.6911695599555969, "learning_rate": 0.00013207877033494177, "loss": 0.9625, "step": 8911 }, { "epoch": 1.5867165242165242, "grad_norm": 0.7405226826667786, "learning_rate": 0.0001320655122992458, "loss": 1.054, "step": 8912 }, { "epoch": 1.5868945868945867, "grad_norm": 0.7362869381904602, "learning_rate": 0.00013205225363529274, "loss": 1.0516, "step": 8913 }, { "epoch": 1.5870726495726495, "grad_norm": 0.6923766136169434, "learning_rate": 0.0001320389943433423, "loss": 1.2323, "step": 8914 }, { "epoch": 1.5872507122507122, "grad_norm": 0.7980395555496216, "learning_rate": 0.00013202573442365435, "loss": 1.0229, "step": 8915 }, { "epoch": 1.587428774928775, "grad_norm": 0.7211610078811646, "learning_rate": 0.00013201247387648868, "loss": 1.0666, "step": 8916 }, { "epoch": 1.5876068376068377, "grad_norm": 0.6728795766830444, "learning_rate": 0.00013199921270210506, "loss": 1.0322, "step": 8917 }, { "epoch": 1.5877849002849003, "grad_norm": 0.6226436495780945, "learning_rate": 0.00013198595090076337, "loss": 1.0517, "step": 8918 }, { "epoch": 1.5879629629629628, "grad_norm": 0.6396511197090149, "learning_rate": 0.0001319726884727234, "loss": 0.8662, "step": 8919 }, { "epoch": 1.5881410256410255, "grad_norm": 0.5664374828338623, "learning_rate": 0.00013195942541824497, "loss": 0.6601, "step": 8920 }, { "epoch": 1.5883190883190883, "grad_norm": 0.6556946039199829, "learning_rate": 0.00013194616173758806, "loss": 0.9662, "step": 8921 }, { "epoch": 1.588497150997151, "grad_norm": 0.7332060933113098, "learning_rate": 0.00013193289743101245, "loss": 0.7687, "step": 8922 }, { "epoch": 1.5886752136752138, "grad_norm": 0.6103306412696838, "learning_rate": 0.00013191963249877805, "loss": 0.8329, "step": 8923 }, { "epoch": 1.5888532763532763, "grad_norm": 0.63165283203125, "learning_rate": 0.00013190636694114475, "loss": 0.8336, "step": 8924 }, { "epoch": 1.589031339031339, "grad_norm": 0.6955820322036743, "learning_rate": 0.00013189310075837246, "loss": 1.0457, "step": 8925 }, { "epoch": 1.5892094017094016, "grad_norm": 0.6911605596542358, "learning_rate": 0.00013187983395072114, "loss": 0.9389, "step": 8926 }, { "epoch": 1.5893874643874644, "grad_norm": 0.6493414640426636, "learning_rate": 0.00013186656651845068, "loss": 0.9821, "step": 8927 }, { "epoch": 1.5895655270655271, "grad_norm": 0.6168226599693298, "learning_rate": 0.00013185329846182107, "loss": 1.0259, "step": 8928 }, { "epoch": 1.5897435897435899, "grad_norm": 0.6460188627243042, "learning_rate": 0.0001318400297810922, "loss": 0.9836, "step": 8929 }, { "epoch": 1.5899216524216524, "grad_norm": 0.6630695462226868, "learning_rate": 0.0001318267604765241, "loss": 0.8936, "step": 8930 }, { "epoch": 1.5900997150997151, "grad_norm": 0.6308651566505432, "learning_rate": 0.00013181349054837676, "loss": 0.9583, "step": 8931 }, { "epoch": 1.5902777777777777, "grad_norm": 0.6508499979972839, "learning_rate": 0.00013180021999691018, "loss": 0.7647, "step": 8932 }, { "epoch": 1.5904558404558404, "grad_norm": 0.6625795960426331, "learning_rate": 0.00013178694882238432, "loss": 1.0329, "step": 8933 }, { "epoch": 1.5906339031339032, "grad_norm": 0.6721987128257751, "learning_rate": 0.00013177367702505924, "loss": 0.9377, "step": 8934 }, { "epoch": 1.590811965811966, "grad_norm": 0.7295519709587097, "learning_rate": 0.00013176040460519497, "loss": 0.9396, "step": 8935 }, { "epoch": 1.5909900284900285, "grad_norm": 0.6673944592475891, "learning_rate": 0.0001317471315630515, "loss": 1.0284, "step": 8936 }, { "epoch": 1.5911680911680912, "grad_norm": 0.6858960390090942, "learning_rate": 0.00013173385789888898, "loss": 1.2022, "step": 8937 }, { "epoch": 1.5913461538461537, "grad_norm": 0.5836796164512634, "learning_rate": 0.00013172058361296743, "loss": 1.0078, "step": 8938 }, { "epoch": 1.5915242165242165, "grad_norm": 0.7732513546943665, "learning_rate": 0.00013170730870554694, "loss": 1.0912, "step": 8939 }, { "epoch": 1.5917022792022792, "grad_norm": 0.7095892429351807, "learning_rate": 0.0001316940331768876, "loss": 1.0506, "step": 8940 }, { "epoch": 1.591880341880342, "grad_norm": 0.757534384727478, "learning_rate": 0.00013168075702724952, "loss": 1.036, "step": 8941 }, { "epoch": 1.5920584045584045, "grad_norm": 0.6719361543655396, "learning_rate": 0.00013166748025689282, "loss": 0.9406, "step": 8942 }, { "epoch": 1.5922364672364673, "grad_norm": 0.6955735087394714, "learning_rate": 0.00013165420286607763, "loss": 0.9325, "step": 8943 }, { "epoch": 1.5924145299145298, "grad_norm": 0.6810322999954224, "learning_rate": 0.00013164092485506407, "loss": 1.0402, "step": 8944 }, { "epoch": 1.5925925925925926, "grad_norm": 0.6346224546432495, "learning_rate": 0.00013162764622411233, "loss": 0.9725, "step": 8945 }, { "epoch": 1.5927706552706553, "grad_norm": 0.728705883026123, "learning_rate": 0.00013161436697348258, "loss": 0.9665, "step": 8946 }, { "epoch": 1.592948717948718, "grad_norm": 0.6838595271110535, "learning_rate": 0.00013160108710343494, "loss": 0.9771, "step": 8947 }, { "epoch": 1.5931267806267806, "grad_norm": 0.7052602767944336, "learning_rate": 0.00013158780661422966, "loss": 0.8819, "step": 8948 }, { "epoch": 1.5933048433048433, "grad_norm": 0.7237630486488342, "learning_rate": 0.00013157452550612697, "loss": 1.0609, "step": 8949 }, { "epoch": 1.5934829059829059, "grad_norm": 0.6554936766624451, "learning_rate": 0.00013156124377938699, "loss": 0.8592, "step": 8950 }, { "epoch": 1.5936609686609686, "grad_norm": 0.6125665307044983, "learning_rate": 0.00013154796143427, "loss": 0.8399, "step": 8951 }, { "epoch": 1.5938390313390314, "grad_norm": 0.6930897235870361, "learning_rate": 0.0001315346784710363, "loss": 0.9965, "step": 8952 }, { "epoch": 1.5940170940170941, "grad_norm": 0.7808064818382263, "learning_rate": 0.00013152139488994605, "loss": 1.0527, "step": 8953 }, { "epoch": 1.5941951566951567, "grad_norm": 0.6125522255897522, "learning_rate": 0.0001315081106912595, "loss": 1.1159, "step": 8954 }, { "epoch": 1.5943732193732194, "grad_norm": 0.5863428711891174, "learning_rate": 0.00013149482587523703, "loss": 0.84, "step": 8955 }, { "epoch": 1.594551282051282, "grad_norm": 0.7170202732086182, "learning_rate": 0.00013148154044213882, "loss": 1.0821, "step": 8956 }, { "epoch": 1.5947293447293447, "grad_norm": 0.6409463882446289, "learning_rate": 0.00013146825439222528, "loss": 1.0097, "step": 8957 }, { "epoch": 1.5949074074074074, "grad_norm": 0.7037690281867981, "learning_rate": 0.00013145496772575666, "loss": 1.1511, "step": 8958 }, { "epoch": 1.5950854700854702, "grad_norm": 0.6400953531265259, "learning_rate": 0.00013144168044299326, "loss": 1.0809, "step": 8959 }, { "epoch": 1.5952635327635327, "grad_norm": 0.6129940152168274, "learning_rate": 0.00013142839254419545, "loss": 0.8481, "step": 8960 }, { "epoch": 1.5954415954415955, "grad_norm": 0.7452271580696106, "learning_rate": 0.00013141510402962358, "loss": 1.0649, "step": 8961 }, { "epoch": 1.595619658119658, "grad_norm": 0.7407623529434204, "learning_rate": 0.000131401814899538, "loss": 0.9084, "step": 8962 }, { "epoch": 1.5957977207977208, "grad_norm": 0.7103050947189331, "learning_rate": 0.0001313885251541991, "loss": 0.946, "step": 8963 }, { "epoch": 1.5959757834757835, "grad_norm": 0.5566636323928833, "learning_rate": 0.00013137523479386727, "loss": 0.6781, "step": 8964 }, { "epoch": 1.5961538461538463, "grad_norm": 0.8137457966804504, "learning_rate": 0.00013136194381880288, "loss": 0.9273, "step": 8965 }, { "epoch": 1.5963319088319088, "grad_norm": 0.779330849647522, "learning_rate": 0.0001313486522292663, "loss": 1.1105, "step": 8966 }, { "epoch": 1.5965099715099715, "grad_norm": 0.6807126998901367, "learning_rate": 0.00013133536002551808, "loss": 1.0728, "step": 8967 }, { "epoch": 1.596688034188034, "grad_norm": 0.7371507287025452, "learning_rate": 0.00013132206720781853, "loss": 0.979, "step": 8968 }, { "epoch": 1.5968660968660968, "grad_norm": 0.6811465620994568, "learning_rate": 0.00013130877377642814, "loss": 0.9821, "step": 8969 }, { "epoch": 1.5970441595441596, "grad_norm": 0.6732743978500366, "learning_rate": 0.00013129547973160738, "loss": 0.8511, "step": 8970 }, { "epoch": 1.5972222222222223, "grad_norm": 0.594901978969574, "learning_rate": 0.0001312821850736167, "loss": 0.9674, "step": 8971 }, { "epoch": 1.5974002849002849, "grad_norm": 0.6743764281272888, "learning_rate": 0.00013126888980271657, "loss": 0.9268, "step": 8972 }, { "epoch": 1.5975783475783476, "grad_norm": 0.7532161474227905, "learning_rate": 0.00013125559391916752, "loss": 1.0474, "step": 8973 }, { "epoch": 1.5977564102564101, "grad_norm": 0.6331499814987183, "learning_rate": 0.00013124229742323, "loss": 1.05, "step": 8974 }, { "epoch": 1.5979344729344729, "grad_norm": 0.7418690323829651, "learning_rate": 0.0001312290003151646, "loss": 0.9475, "step": 8975 }, { "epoch": 1.5981125356125356, "grad_norm": 0.6511179804801941, "learning_rate": 0.0001312157025952318, "loss": 0.9206, "step": 8976 }, { "epoch": 1.5982905982905984, "grad_norm": 0.6380775570869446, "learning_rate": 0.00013120240426369215, "loss": 0.9953, "step": 8977 }, { "epoch": 1.598468660968661, "grad_norm": 0.8483675122261047, "learning_rate": 0.00013118910532080623, "loss": 0.9454, "step": 8978 }, { "epoch": 1.5986467236467237, "grad_norm": 0.6700518727302551, "learning_rate": 0.00013117580576683455, "loss": 1.0413, "step": 8979 }, { "epoch": 1.5988247863247862, "grad_norm": 0.7750083208084106, "learning_rate": 0.00013116250560203774, "loss": 1.1868, "step": 8980 }, { "epoch": 1.599002849002849, "grad_norm": 0.7474972009658813, "learning_rate": 0.00013114920482667635, "loss": 1.0876, "step": 8981 }, { "epoch": 1.5991809116809117, "grad_norm": 0.6920070052146912, "learning_rate": 0.000131135903441011, "loss": 1.0787, "step": 8982 }, { "epoch": 1.5993589743589745, "grad_norm": 0.7572436928749084, "learning_rate": 0.00013112260144530232, "loss": 0.9798, "step": 8983 }, { "epoch": 1.5995370370370372, "grad_norm": 0.6983019709587097, "learning_rate": 0.00013110929883981088, "loss": 1.1115, "step": 8984 }, { "epoch": 1.5997150997150997, "grad_norm": 0.6352120041847229, "learning_rate": 0.0001310959956247974, "loss": 0.9962, "step": 8985 }, { "epoch": 1.5998931623931623, "grad_norm": 0.596858561038971, "learning_rate": 0.00013108269180052244, "loss": 0.8686, "step": 8986 }, { "epoch": 1.600071225071225, "grad_norm": 0.6237605214118958, "learning_rate": 0.00013106938736724672, "loss": 0.9166, "step": 8987 }, { "epoch": 1.6002492877492878, "grad_norm": 0.6818585395812988, "learning_rate": 0.0001310560823252309, "loss": 0.9993, "step": 8988 }, { "epoch": 1.6004273504273505, "grad_norm": 0.6372287273406982, "learning_rate": 0.00013104277667473564, "loss": 0.8589, "step": 8989 }, { "epoch": 1.6006054131054133, "grad_norm": 0.6057302355766296, "learning_rate": 0.0001310294704160217, "loss": 0.9325, "step": 8990 }, { "epoch": 1.6007834757834758, "grad_norm": 0.6999384164810181, "learning_rate": 0.0001310161635493497, "loss": 0.8691, "step": 8991 }, { "epoch": 1.6009615384615383, "grad_norm": 0.6182113289833069, "learning_rate": 0.00013100285607498045, "loss": 1.0271, "step": 8992 }, { "epoch": 1.601139601139601, "grad_norm": 0.6681149005889893, "learning_rate": 0.0001309895479931746, "loss": 0.989, "step": 8993 }, { "epoch": 1.6013176638176638, "grad_norm": 0.6187826991081238, "learning_rate": 0.00013097623930419293, "loss": 0.8051, "step": 8994 }, { "epoch": 1.6014957264957266, "grad_norm": 0.698793888092041, "learning_rate": 0.00013096293000829621, "loss": 1.0762, "step": 8995 }, { "epoch": 1.6016737891737893, "grad_norm": 0.693149745464325, "learning_rate": 0.0001309496201057452, "loss": 1.0894, "step": 8996 }, { "epoch": 1.6018518518518519, "grad_norm": 0.6664052605628967, "learning_rate": 0.00013093630959680068, "loss": 0.9835, "step": 8997 }, { "epoch": 1.6020299145299144, "grad_norm": 0.6919469833374023, "learning_rate": 0.0001309229984817234, "loss": 0.9062, "step": 8998 }, { "epoch": 1.6022079772079771, "grad_norm": 0.704781174659729, "learning_rate": 0.00013090968676077427, "loss": 0.8582, "step": 8999 }, { "epoch": 1.60238603988604, "grad_norm": 0.8055264949798584, "learning_rate": 0.000130896374434214, "loss": 0.9813, "step": 9000 }, { "epoch": 1.6025641025641026, "grad_norm": 0.6301952004432678, "learning_rate": 0.00013088306150230348, "loss": 0.7056, "step": 9001 }, { "epoch": 1.6027421652421654, "grad_norm": 0.698544442653656, "learning_rate": 0.00013086974796530347, "loss": 0.9806, "step": 9002 }, { "epoch": 1.602920227920228, "grad_norm": 0.669548511505127, "learning_rate": 0.00013085643382347491, "loss": 1.0317, "step": 9003 }, { "epoch": 1.6030982905982905, "grad_norm": 0.6404716372489929, "learning_rate": 0.00013084311907707864, "loss": 0.8885, "step": 9004 }, { "epoch": 1.6032763532763532, "grad_norm": 0.6968616843223572, "learning_rate": 0.0001308298037263755, "loss": 1.0665, "step": 9005 }, { "epoch": 1.603454415954416, "grad_norm": 0.849311113357544, "learning_rate": 0.00013081648777162644, "loss": 1.1404, "step": 9006 }, { "epoch": 1.6036324786324787, "grad_norm": 0.6603094935417175, "learning_rate": 0.00013080317121309223, "loss": 0.8341, "step": 9007 }, { "epoch": 1.6038105413105415, "grad_norm": 0.6777810454368591, "learning_rate": 0.00013078985405103394, "loss": 1.044, "step": 9008 }, { "epoch": 1.603988603988604, "grad_norm": 0.6783546209335327, "learning_rate": 0.0001307765362857124, "loss": 1.042, "step": 9009 }, { "epoch": 1.6041666666666665, "grad_norm": 0.7251788377761841, "learning_rate": 0.00013076321791738858, "loss": 0.9004, "step": 9010 }, { "epoch": 1.6043447293447293, "grad_norm": 0.7885342240333557, "learning_rate": 0.00013074989894632338, "loss": 1.1966, "step": 9011 }, { "epoch": 1.604522792022792, "grad_norm": 0.7171013355255127, "learning_rate": 0.0001307365793727778, "loss": 1.2242, "step": 9012 }, { "epoch": 1.6047008547008548, "grad_norm": 0.6027249693870544, "learning_rate": 0.00013072325919701283, "loss": 0.917, "step": 9013 }, { "epoch": 1.6048789173789175, "grad_norm": 0.5957151055335999, "learning_rate": 0.00013070993841928936, "loss": 0.9154, "step": 9014 }, { "epoch": 1.60505698005698, "grad_norm": 0.6190659403800964, "learning_rate": 0.00013069661703986847, "loss": 0.7071, "step": 9015 }, { "epoch": 1.6052350427350426, "grad_norm": 0.6454868316650391, "learning_rate": 0.00013068329505901117, "loss": 0.8381, "step": 9016 }, { "epoch": 1.6054131054131053, "grad_norm": 0.6255491375923157, "learning_rate": 0.00013066997247697837, "loss": 0.7515, "step": 9017 }, { "epoch": 1.605591168091168, "grad_norm": 0.6214072108268738, "learning_rate": 0.0001306566492940312, "loss": 1.0101, "step": 9018 }, { "epoch": 1.6057692307692308, "grad_norm": 0.7244150638580322, "learning_rate": 0.0001306433255104307, "loss": 1.2558, "step": 9019 }, { "epoch": 1.6059472934472936, "grad_norm": 0.6162270903587341, "learning_rate": 0.00013063000112643785, "loss": 1.1009, "step": 9020 }, { "epoch": 1.6061253561253561, "grad_norm": 0.7309414744377136, "learning_rate": 0.0001306166761423138, "loss": 1.1973, "step": 9021 }, { "epoch": 1.6063034188034186, "grad_norm": 0.7150956392288208, "learning_rate": 0.00013060335055831957, "loss": 0.9136, "step": 9022 }, { "epoch": 1.6064814814814814, "grad_norm": 0.8187742829322815, "learning_rate": 0.00013059002437471623, "loss": 1.0524, "step": 9023 }, { "epoch": 1.6066595441595442, "grad_norm": 0.7928692698478699, "learning_rate": 0.00013057669759176493, "loss": 1.0249, "step": 9024 }, { "epoch": 1.606837606837607, "grad_norm": 0.6929279565811157, "learning_rate": 0.00013056337020972677, "loss": 1.1804, "step": 9025 }, { "epoch": 1.6070156695156697, "grad_norm": 0.6771654486656189, "learning_rate": 0.00013055004222886285, "loss": 1.0284, "step": 9026 }, { "epoch": 1.6071937321937322, "grad_norm": 0.6689024567604065, "learning_rate": 0.0001305367136494343, "loss": 1.0431, "step": 9027 }, { "epoch": 1.6073717948717947, "grad_norm": 0.71135413646698, "learning_rate": 0.0001305233844717023, "loss": 0.9692, "step": 9028 }, { "epoch": 1.6075498575498575, "grad_norm": 0.5459749698638916, "learning_rate": 0.00013051005469592796, "loss": 0.5643, "step": 9029 }, { "epoch": 1.6077279202279202, "grad_norm": 0.7225865125656128, "learning_rate": 0.00013049672432237253, "loss": 1.0954, "step": 9030 }, { "epoch": 1.607905982905983, "grad_norm": 0.6878093481063843, "learning_rate": 0.0001304833933512971, "loss": 0.894, "step": 9031 }, { "epoch": 1.6080840455840457, "grad_norm": 0.6967248320579529, "learning_rate": 0.00013047006178296288, "loss": 1.0356, "step": 9032 }, { "epoch": 1.6082621082621082, "grad_norm": 0.6404993534088135, "learning_rate": 0.00013045672961763114, "loss": 0.8528, "step": 9033 }, { "epoch": 1.6084401709401708, "grad_norm": 0.5919156074523926, "learning_rate": 0.000130443396855563, "loss": 0.7196, "step": 9034 }, { "epoch": 1.6086182336182335, "grad_norm": 0.6792302131652832, "learning_rate": 0.00013043006349701977, "loss": 0.9519, "step": 9035 }, { "epoch": 1.6087962962962963, "grad_norm": 0.6263542175292969, "learning_rate": 0.00013041672954226268, "loss": 1.0483, "step": 9036 }, { "epoch": 1.608974358974359, "grad_norm": 0.5865579843521118, "learning_rate": 0.00013040339499155294, "loss": 0.8794, "step": 9037 }, { "epoch": 1.6091524216524218, "grad_norm": 0.8383142948150635, "learning_rate": 0.00013039005984515181, "loss": 0.8929, "step": 9038 }, { "epoch": 1.6093304843304843, "grad_norm": 0.6438691020011902, "learning_rate": 0.00013037672410332063, "loss": 0.9957, "step": 9039 }, { "epoch": 1.609508547008547, "grad_norm": 0.74748694896698, "learning_rate": 0.0001303633877663206, "loss": 0.9809, "step": 9040 }, { "epoch": 1.6096866096866096, "grad_norm": 0.6697205901145935, "learning_rate": 0.00013035005083441312, "loss": 0.9556, "step": 9041 }, { "epoch": 1.6098646723646723, "grad_norm": 0.6577828526496887, "learning_rate": 0.00013033671330785941, "loss": 0.8956, "step": 9042 }, { "epoch": 1.610042735042735, "grad_norm": 0.6423429846763611, "learning_rate": 0.0001303233751869208, "loss": 0.8467, "step": 9043 }, { "epoch": 1.6102207977207978, "grad_norm": 0.6552175879478455, "learning_rate": 0.00013031003647185867, "loss": 0.8656, "step": 9044 }, { "epoch": 1.6103988603988604, "grad_norm": 0.6755174398422241, "learning_rate": 0.00013029669716293433, "loss": 0.7836, "step": 9045 }, { "epoch": 1.6105769230769231, "grad_norm": 0.6832906007766724, "learning_rate": 0.00013028335726040914, "loss": 1.1531, "step": 9046 }, { "epoch": 1.6107549857549857, "grad_norm": 0.6498637795448303, "learning_rate": 0.00013027001676454446, "loss": 0.8637, "step": 9047 }, { "epoch": 1.6109330484330484, "grad_norm": 0.6792944073677063, "learning_rate": 0.0001302566756756017, "loss": 1.0865, "step": 9048 }, { "epoch": 1.6111111111111112, "grad_norm": 0.6801337003707886, "learning_rate": 0.00013024333399384226, "loss": 1.0738, "step": 9049 }, { "epoch": 1.611289173789174, "grad_norm": 0.675216794013977, "learning_rate": 0.0001302299917195275, "loss": 1.1074, "step": 9050 }, { "epoch": 1.6114672364672364, "grad_norm": 0.6418983340263367, "learning_rate": 0.00013021664885291885, "loss": 1.0025, "step": 9051 }, { "epoch": 1.6116452991452992, "grad_norm": 0.7778789401054382, "learning_rate": 0.0001302033053942777, "loss": 1.0847, "step": 9052 }, { "epoch": 1.6118233618233617, "grad_norm": 0.7672827243804932, "learning_rate": 0.00013018996134386555, "loss": 1.0565, "step": 9053 }, { "epoch": 1.6120014245014245, "grad_norm": 0.6770617961883545, "learning_rate": 0.00013017661670194382, "loss": 0.9069, "step": 9054 }, { "epoch": 1.6121794871794872, "grad_norm": 0.7161242961883545, "learning_rate": 0.00013016327146877393, "loss": 1.1301, "step": 9055 }, { "epoch": 1.61235754985755, "grad_norm": 0.6923251152038574, "learning_rate": 0.00013014992564461746, "loss": 0.9546, "step": 9056 }, { "epoch": 1.6125356125356125, "grad_norm": 0.622953474521637, "learning_rate": 0.0001301365792297358, "loss": 0.8152, "step": 9057 }, { "epoch": 1.6127136752136753, "grad_norm": 0.7477008104324341, "learning_rate": 0.00013012323222439046, "loss": 0.8428, "step": 9058 }, { "epoch": 1.6128917378917378, "grad_norm": 0.6612883806228638, "learning_rate": 0.000130109884628843, "loss": 1.0678, "step": 9059 }, { "epoch": 1.6130698005698005, "grad_norm": 0.6406781077384949, "learning_rate": 0.00013009653644335486, "loss": 0.6792, "step": 9060 }, { "epoch": 1.6132478632478633, "grad_norm": 0.6279141902923584, "learning_rate": 0.00013008318766818763, "loss": 0.9826, "step": 9061 }, { "epoch": 1.613425925925926, "grad_norm": 0.6616412401199341, "learning_rate": 0.00013006983830360285, "loss": 1.0691, "step": 9062 }, { "epoch": 1.6136039886039886, "grad_norm": 0.6520406603813171, "learning_rate": 0.000130056488349862, "loss": 0.9487, "step": 9063 }, { "epoch": 1.6137820512820513, "grad_norm": 0.6378647089004517, "learning_rate": 0.00013004313780722672, "loss": 0.8557, "step": 9064 }, { "epoch": 1.6139601139601139, "grad_norm": 0.6547569036483765, "learning_rate": 0.00013002978667595857, "loss": 0.879, "step": 9065 }, { "epoch": 1.6141381766381766, "grad_norm": 0.7347842454910278, "learning_rate": 0.00013001643495631914, "loss": 1.0757, "step": 9066 }, { "epoch": 1.6143162393162394, "grad_norm": 0.5988406538963318, "learning_rate": 0.00013000308264857002, "loss": 0.6754, "step": 9067 }, { "epoch": 1.614494301994302, "grad_norm": 0.6949366331100464, "learning_rate": 0.00012998972975297282, "loss": 1.1236, "step": 9068 }, { "epoch": 1.6146723646723646, "grad_norm": 0.7095484137535095, "learning_rate": 0.00012997637626978913, "loss": 1.0124, "step": 9069 }, { "epoch": 1.6148504273504274, "grad_norm": 0.6634095311164856, "learning_rate": 0.00012996302219928064, "loss": 1.2018, "step": 9070 }, { "epoch": 1.61502849002849, "grad_norm": 0.6894524693489075, "learning_rate": 0.000129949667541709, "loss": 0.9959, "step": 9071 }, { "epoch": 1.6152065527065527, "grad_norm": 0.672334611415863, "learning_rate": 0.00012993631229733582, "loss": 1.0369, "step": 9072 }, { "epoch": 1.6153846153846154, "grad_norm": 0.725759744644165, "learning_rate": 0.00012992295646642278, "loss": 1.0079, "step": 9073 }, { "epoch": 1.6155626780626782, "grad_norm": 0.7941585779190063, "learning_rate": 0.00012990960004923154, "loss": 0.9468, "step": 9074 }, { "epoch": 1.6157407407407407, "grad_norm": 0.6556950807571411, "learning_rate": 0.00012989624304602385, "loss": 0.9915, "step": 9075 }, { "epoch": 1.6159188034188035, "grad_norm": 0.7515892386436462, "learning_rate": 0.0001298828854570614, "loss": 1.0924, "step": 9076 }, { "epoch": 1.616096866096866, "grad_norm": 0.6944101452827454, "learning_rate": 0.00012986952728260586, "loss": 0.9632, "step": 9077 }, { "epoch": 1.6162749287749287, "grad_norm": 0.6286170482635498, "learning_rate": 0.000129856168522919, "loss": 1.0311, "step": 9078 }, { "epoch": 1.6164529914529915, "grad_norm": 0.8362757563591003, "learning_rate": 0.0001298428091782625, "loss": 1.1232, "step": 9079 }, { "epoch": 1.6166310541310542, "grad_norm": 0.6199851632118225, "learning_rate": 0.0001298294492488982, "loss": 0.9454, "step": 9080 }, { "epoch": 1.6168091168091168, "grad_norm": 0.7541791796684265, "learning_rate": 0.0001298160887350878, "loss": 0.9759, "step": 9081 }, { "epoch": 1.6169871794871795, "grad_norm": 0.6940878033638, "learning_rate": 0.00012980272763709304, "loss": 0.9258, "step": 9082 }, { "epoch": 1.617165242165242, "grad_norm": 0.6934045553207397, "learning_rate": 0.00012978936595517575, "loss": 1.0142, "step": 9083 }, { "epoch": 1.6173433048433048, "grad_norm": 0.8147503733634949, "learning_rate": 0.00012977600368959774, "loss": 0.964, "step": 9084 }, { "epoch": 1.6175213675213675, "grad_norm": 0.6583107709884644, "learning_rate": 0.00012976264084062079, "loss": 1.0315, "step": 9085 }, { "epoch": 1.6176994301994303, "grad_norm": 0.7192013263702393, "learning_rate": 0.0001297492774085067, "loss": 0.9528, "step": 9086 }, { "epoch": 1.6178774928774928, "grad_norm": 0.665888786315918, "learning_rate": 0.00012973591339351733, "loss": 1.0188, "step": 9087 }, { "epoch": 1.6180555555555556, "grad_norm": 0.7170987725257874, "learning_rate": 0.0001297225487959145, "loss": 0.8969, "step": 9088 }, { "epoch": 1.618233618233618, "grad_norm": 0.6768732070922852, "learning_rate": 0.00012970918361596007, "loss": 1.1951, "step": 9089 }, { "epoch": 1.6184116809116809, "grad_norm": 0.6640290021896362, "learning_rate": 0.00012969581785391592, "loss": 0.9649, "step": 9090 }, { "epoch": 1.6185897435897436, "grad_norm": 0.6200813055038452, "learning_rate": 0.00012968245151004392, "loss": 0.9446, "step": 9091 }, { "epoch": 1.6187678062678064, "grad_norm": 0.6815837621688843, "learning_rate": 0.0001296690845846059, "loss": 1.0506, "step": 9092 }, { "epoch": 1.618945868945869, "grad_norm": 0.7252637147903442, "learning_rate": 0.0001296557170778638, "loss": 1.1977, "step": 9093 }, { "epoch": 1.6191239316239316, "grad_norm": 0.5609107613563538, "learning_rate": 0.00012964234899007955, "loss": 0.8009, "step": 9094 }, { "epoch": 1.6193019943019942, "grad_norm": 0.6539437770843506, "learning_rate": 0.00012962898032151506, "loss": 0.8482, "step": 9095 }, { "epoch": 1.619480056980057, "grad_norm": 0.6993300914764404, "learning_rate": 0.0001296156110724322, "loss": 1.0725, "step": 9096 }, { "epoch": 1.6196581196581197, "grad_norm": 0.6768273711204529, "learning_rate": 0.000129602241243093, "loss": 0.9247, "step": 9097 }, { "epoch": 1.6198361823361824, "grad_norm": 0.6896265745162964, "learning_rate": 0.00012958887083375939, "loss": 0.9526, "step": 9098 }, { "epoch": 1.6200142450142452, "grad_norm": 0.7475146651268005, "learning_rate": 0.00012957549984469327, "loss": 0.8302, "step": 9099 }, { "epoch": 1.6201923076923077, "grad_norm": 0.6622769236564636, "learning_rate": 0.00012956212827615674, "loss": 0.9505, "step": 9100 }, { "epoch": 1.6203703703703702, "grad_norm": 0.6938058137893677, "learning_rate": 0.00012954875612841167, "loss": 0.9757, "step": 9101 }, { "epoch": 1.620548433048433, "grad_norm": 0.7453510761260986, "learning_rate": 0.0001295353834017201, "loss": 1.0919, "step": 9102 }, { "epoch": 1.6207264957264957, "grad_norm": 0.7868932485580444, "learning_rate": 0.0001295220100963441, "loss": 0.9265, "step": 9103 }, { "epoch": 1.6209045584045585, "grad_norm": 0.6779825091362, "learning_rate": 0.00012950863621254558, "loss": 0.98, "step": 9104 }, { "epoch": 1.6210826210826212, "grad_norm": 0.6825897097587585, "learning_rate": 0.00012949526175058662, "loss": 0.9218, "step": 9105 }, { "epoch": 1.6212606837606838, "grad_norm": 0.6686047911643982, "learning_rate": 0.00012948188671072934, "loss": 0.9546, "step": 9106 }, { "epoch": 1.6214387464387463, "grad_norm": 0.7456090450286865, "learning_rate": 0.0001294685110932357, "loss": 1.0819, "step": 9107 }, { "epoch": 1.621616809116809, "grad_norm": 0.7111441493034363, "learning_rate": 0.0001294551348983678, "loss": 0.9916, "step": 9108 }, { "epoch": 1.6217948717948718, "grad_norm": 0.6534699201583862, "learning_rate": 0.00012944175812638773, "loss": 1.0374, "step": 9109 }, { "epoch": 1.6219729344729346, "grad_norm": 0.6046397089958191, "learning_rate": 0.00012942838077755758, "loss": 0.7922, "step": 9110 }, { "epoch": 1.6221509971509973, "grad_norm": 0.7736679911613464, "learning_rate": 0.00012941500285213942, "loss": 1.0056, "step": 9111 }, { "epoch": 1.6223290598290598, "grad_norm": 0.6850929260253906, "learning_rate": 0.00012940162435039538, "loss": 0.9538, "step": 9112 }, { "epoch": 1.6225071225071224, "grad_norm": 0.6305751800537109, "learning_rate": 0.00012938824527258756, "loss": 0.9341, "step": 9113 }, { "epoch": 1.6226851851851851, "grad_norm": 0.6740923523902893, "learning_rate": 0.0001293748656189782, "loss": 1.0037, "step": 9114 }, { "epoch": 1.6228632478632479, "grad_norm": 0.6579762101173401, "learning_rate": 0.00012936148538982928, "loss": 1.0022, "step": 9115 }, { "epoch": 1.6230413105413106, "grad_norm": 0.6500434279441833, "learning_rate": 0.0001293481045854031, "loss": 0.8589, "step": 9116 }, { "epoch": 1.6232193732193734, "grad_norm": 0.7825912237167358, "learning_rate": 0.00012933472320596177, "loss": 1.0345, "step": 9117 }, { "epoch": 1.623397435897436, "grad_norm": 0.8341414332389832, "learning_rate": 0.0001293213412517675, "loss": 1.0314, "step": 9118 }, { "epoch": 1.6235754985754984, "grad_norm": 0.63664311170578, "learning_rate": 0.00012930795872308242, "loss": 0.819, "step": 9119 }, { "epoch": 1.6237535612535612, "grad_norm": 0.6800840497016907, "learning_rate": 0.00012929457562016878, "loss": 0.95, "step": 9120 }, { "epoch": 1.623931623931624, "grad_norm": 0.754165530204773, "learning_rate": 0.0001292811919432888, "loss": 1.1193, "step": 9121 }, { "epoch": 1.6241096866096867, "grad_norm": 0.678871750831604, "learning_rate": 0.00012926780769270465, "loss": 0.9015, "step": 9122 }, { "epoch": 1.6242877492877494, "grad_norm": 0.6642945408821106, "learning_rate": 0.00012925442286867866, "loss": 0.9095, "step": 9123 }, { "epoch": 1.624465811965812, "grad_norm": 0.6089697480201721, "learning_rate": 0.000129241037471473, "loss": 0.8994, "step": 9124 }, { "epoch": 1.6246438746438745, "grad_norm": 0.7320881485939026, "learning_rate": 0.00012922765150134995, "loss": 1.0518, "step": 9125 }, { "epoch": 1.6248219373219372, "grad_norm": 0.7308032512664795, "learning_rate": 0.0001292142649585718, "loss": 1.0557, "step": 9126 }, { "epoch": 1.625, "grad_norm": 0.6896602511405945, "learning_rate": 0.0001292008778434008, "loss": 1.145, "step": 9127 }, { "epoch": 1.6251780626780628, "grad_norm": 0.6112532615661621, "learning_rate": 0.00012918749015609926, "loss": 0.9611, "step": 9128 }, { "epoch": 1.6253561253561255, "grad_norm": 0.6856057643890381, "learning_rate": 0.00012917410189692947, "loss": 1.0124, "step": 9129 }, { "epoch": 1.625534188034188, "grad_norm": 0.699252188205719, "learning_rate": 0.00012916071306615378, "loss": 0.8854, "step": 9130 }, { "epoch": 1.6257122507122506, "grad_norm": 0.6306683421134949, "learning_rate": 0.0001291473236640345, "loss": 1.0722, "step": 9131 }, { "epoch": 1.6258903133903133, "grad_norm": 0.6358118653297424, "learning_rate": 0.00012913393369083393, "loss": 0.889, "step": 9132 }, { "epoch": 1.626068376068376, "grad_norm": 0.6953601837158203, "learning_rate": 0.00012912054314681445, "loss": 1.0168, "step": 9133 }, { "epoch": 1.6262464387464388, "grad_norm": 0.6742331385612488, "learning_rate": 0.00012910715203223844, "loss": 0.8152, "step": 9134 }, { "epoch": 1.6264245014245016, "grad_norm": 0.5872861742973328, "learning_rate": 0.00012909376034736823, "loss": 0.8702, "step": 9135 }, { "epoch": 1.626602564102564, "grad_norm": 0.7580631971359253, "learning_rate": 0.00012908036809246623, "loss": 0.994, "step": 9136 }, { "epoch": 1.6267806267806266, "grad_norm": 0.7544930577278137, "learning_rate": 0.00012906697526779488, "loss": 0.7475, "step": 9137 }, { "epoch": 1.6269586894586894, "grad_norm": 0.6850766539573669, "learning_rate": 0.00012905358187361647, "loss": 1.0943, "step": 9138 }, { "epoch": 1.6271367521367521, "grad_norm": 0.6821565628051758, "learning_rate": 0.0001290401879101935, "loss": 1.2928, "step": 9139 }, { "epoch": 1.6273148148148149, "grad_norm": 0.6961034536361694, "learning_rate": 0.00012902679337778835, "loss": 0.8694, "step": 9140 }, { "epoch": 1.6274928774928776, "grad_norm": 0.7159550786018372, "learning_rate": 0.00012901339827666353, "loss": 0.8827, "step": 9141 }, { "epoch": 1.6276709401709402, "grad_norm": 0.7491081953048706, "learning_rate": 0.0001290000026070814, "loss": 0.8159, "step": 9142 }, { "epoch": 1.6278490028490027, "grad_norm": 0.7107849717140198, "learning_rate": 0.00012898660636930447, "loss": 1.0625, "step": 9143 }, { "epoch": 1.6280270655270654, "grad_norm": 0.7227210998535156, "learning_rate": 0.0001289732095635952, "loss": 0.9744, "step": 9144 }, { "epoch": 1.6282051282051282, "grad_norm": 0.7141995429992676, "learning_rate": 0.00012895981219021607, "loss": 0.9836, "step": 9145 }, { "epoch": 1.628383190883191, "grad_norm": 0.6445552706718445, "learning_rate": 0.00012894641424942958, "loss": 1.0183, "step": 9146 }, { "epoch": 1.6285612535612537, "grad_norm": 0.698783278465271, "learning_rate": 0.00012893301574149824, "loss": 0.8392, "step": 9147 }, { "epoch": 1.6287393162393162, "grad_norm": 0.6529116034507751, "learning_rate": 0.00012891961666668458, "loss": 0.9317, "step": 9148 }, { "epoch": 1.6289173789173788, "grad_norm": 0.7780548930168152, "learning_rate": 0.0001289062170252511, "loss": 1.2406, "step": 9149 }, { "epoch": 1.6290954415954415, "grad_norm": 0.6500990986824036, "learning_rate": 0.0001288928168174603, "loss": 1.0381, "step": 9150 }, { "epoch": 1.6292735042735043, "grad_norm": 0.7098208665847778, "learning_rate": 0.00012887941604357482, "loss": 1.2126, "step": 9151 }, { "epoch": 1.629451566951567, "grad_norm": 0.730648398399353, "learning_rate": 0.0001288660147038572, "loss": 0.8351, "step": 9152 }, { "epoch": 1.6296296296296298, "grad_norm": 0.5520278215408325, "learning_rate": 0.0001288526127985699, "loss": 0.5877, "step": 9153 }, { "epoch": 1.6298076923076923, "grad_norm": 0.7611770033836365, "learning_rate": 0.00012883921032797563, "loss": 1.2227, "step": 9154 }, { "epoch": 1.6299857549857548, "grad_norm": 0.636820375919342, "learning_rate": 0.00012882580729233696, "loss": 0.8305, "step": 9155 }, { "epoch": 1.6301638176638176, "grad_norm": 0.694492518901825, "learning_rate": 0.00012881240369191644, "loss": 1.0452, "step": 9156 }, { "epoch": 1.6303418803418803, "grad_norm": 0.67826908826828, "learning_rate": 0.00012879899952697677, "loss": 0.8345, "step": 9157 }, { "epoch": 1.630519943019943, "grad_norm": 0.5891323685646057, "learning_rate": 0.00012878559479778052, "loss": 0.8367, "step": 9158 }, { "epoch": 1.6306980056980058, "grad_norm": 0.6766192317008972, "learning_rate": 0.0001287721895045903, "loss": 0.8319, "step": 9159 }, { "epoch": 1.6308760683760684, "grad_norm": 0.5306392908096313, "learning_rate": 0.0001287587836476688, "loss": 0.7945, "step": 9160 }, { "epoch": 1.631054131054131, "grad_norm": 0.6677970290184021, "learning_rate": 0.0001287453772272787, "loss": 1.1228, "step": 9161 }, { "epoch": 1.6312321937321936, "grad_norm": 0.810052752494812, "learning_rate": 0.00012873197024368266, "loss": 0.8395, "step": 9162 }, { "epoch": 1.6314102564102564, "grad_norm": 0.7619220018386841, "learning_rate": 0.00012871856269714333, "loss": 1.3713, "step": 9163 }, { "epoch": 1.6315883190883191, "grad_norm": 0.6564521193504333, "learning_rate": 0.00012870515458792342, "loss": 1.0513, "step": 9164 }, { "epoch": 1.631766381766382, "grad_norm": 0.6874445676803589, "learning_rate": 0.00012869174591628564, "loss": 1.0255, "step": 9165 }, { "epoch": 1.6319444444444444, "grad_norm": 0.6958737373352051, "learning_rate": 0.0001286783366824927, "loss": 0.9361, "step": 9166 }, { "epoch": 1.6321225071225072, "grad_norm": 0.6909199357032776, "learning_rate": 0.0001286649268868073, "loss": 0.9855, "step": 9167 }, { "epoch": 1.6323005698005697, "grad_norm": 0.7671375274658203, "learning_rate": 0.00012865151652949225, "loss": 1.084, "step": 9168 }, { "epoch": 1.6324786324786325, "grad_norm": 0.750200092792511, "learning_rate": 0.00012863810561081023, "loss": 0.9341, "step": 9169 }, { "epoch": 1.6326566951566952, "grad_norm": 0.6595860123634338, "learning_rate": 0.00012862469413102402, "loss": 0.9386, "step": 9170 }, { "epoch": 1.632834757834758, "grad_norm": 0.622373640537262, "learning_rate": 0.0001286112820903964, "loss": 0.7697, "step": 9171 }, { "epoch": 1.6330128205128205, "grad_norm": 0.9628498554229736, "learning_rate": 0.00012859786948919014, "loss": 1.2629, "step": 9172 }, { "epoch": 1.6331908831908832, "grad_norm": 0.7610561847686768, "learning_rate": 0.000128584456327668, "loss": 0.9748, "step": 9173 }, { "epoch": 1.6333689458689458, "grad_norm": 0.6585374474525452, "learning_rate": 0.00012857104260609285, "loss": 0.9049, "step": 9174 }, { "epoch": 1.6335470085470085, "grad_norm": 0.6996221542358398, "learning_rate": 0.00012855762832472746, "loss": 0.8893, "step": 9175 }, { "epoch": 1.6337250712250713, "grad_norm": 0.6226270198822021, "learning_rate": 0.00012854421348383466, "loss": 0.8913, "step": 9176 }, { "epoch": 1.633903133903134, "grad_norm": 0.6570866107940674, "learning_rate": 0.00012853079808367731, "loss": 0.8632, "step": 9177 }, { "epoch": 1.6340811965811965, "grad_norm": 0.6899664402008057, "learning_rate": 0.00012851738212451826, "loss": 0.8177, "step": 9178 }, { "epoch": 1.6342592592592593, "grad_norm": 0.75257807970047, "learning_rate": 0.0001285039656066203, "loss": 0.9096, "step": 9179 }, { "epoch": 1.6344373219373218, "grad_norm": 0.6614963412284851, "learning_rate": 0.00012849054853024638, "loss": 0.9255, "step": 9180 }, { "epoch": 1.6346153846153846, "grad_norm": 0.7245957851409912, "learning_rate": 0.00012847713089565933, "loss": 1.0122, "step": 9181 }, { "epoch": 1.6347934472934473, "grad_norm": 0.7332839369773865, "learning_rate": 0.00012846371270312204, "loss": 0.8484, "step": 9182 }, { "epoch": 1.63497150997151, "grad_norm": 0.628089189529419, "learning_rate": 0.00012845029395289748, "loss": 1.0171, "step": 9183 }, { "epoch": 1.6351495726495726, "grad_norm": 0.7493528723716736, "learning_rate": 0.00012843687464524848, "loss": 1.1635, "step": 9184 }, { "epoch": 1.6353276353276354, "grad_norm": 0.6328163146972656, "learning_rate": 0.00012842345478043799, "loss": 1.1254, "step": 9185 }, { "epoch": 1.635505698005698, "grad_norm": 0.6720291376113892, "learning_rate": 0.00012841003435872894, "loss": 0.9729, "step": 9186 }, { "epoch": 1.6356837606837606, "grad_norm": 0.6657332181930542, "learning_rate": 0.00012839661338038427, "loss": 1.1047, "step": 9187 }, { "epoch": 1.6358618233618234, "grad_norm": 0.7416180968284607, "learning_rate": 0.000128383191845667, "loss": 0.9505, "step": 9188 }, { "epoch": 1.6360398860398861, "grad_norm": 0.8737816214561462, "learning_rate": 0.00012836976975484, "loss": 1.0518, "step": 9189 }, { "epoch": 1.6362179487179487, "grad_norm": 0.7351877093315125, "learning_rate": 0.0001283563471081663, "loss": 1.1152, "step": 9190 }, { "epoch": 1.6363960113960114, "grad_norm": 0.6442788243293762, "learning_rate": 0.00012834292390590893, "loss": 0.9432, "step": 9191 }, { "epoch": 1.636574074074074, "grad_norm": 0.6848029494285583, "learning_rate": 0.0001283295001483308, "loss": 0.8528, "step": 9192 }, { "epoch": 1.6367521367521367, "grad_norm": 0.6627060174942017, "learning_rate": 0.00012831607583569497, "loss": 1.0222, "step": 9193 }, { "epoch": 1.6369301994301995, "grad_norm": 0.7319555878639221, "learning_rate": 0.00012830265096826446, "loss": 0.9392, "step": 9194 }, { "epoch": 1.6371082621082622, "grad_norm": 0.6986424326896667, "learning_rate": 0.0001282892255463023, "loss": 1.2095, "step": 9195 }, { "epoch": 1.6372863247863247, "grad_norm": 0.6649929881095886, "learning_rate": 0.0001282757995700715, "loss": 0.9426, "step": 9196 }, { "epoch": 1.6374643874643875, "grad_norm": 0.6789031624794006, "learning_rate": 0.0001282623730398352, "loss": 0.9705, "step": 9197 }, { "epoch": 1.63764245014245, "grad_norm": 0.6388779878616333, "learning_rate": 0.00012824894595585637, "loss": 1.0698, "step": 9198 }, { "epoch": 1.6378205128205128, "grad_norm": 0.636832594871521, "learning_rate": 0.00012823551831839814, "loss": 0.9445, "step": 9199 }, { "epoch": 1.6379985754985755, "grad_norm": 0.670190691947937, "learning_rate": 0.0001282220901277236, "loss": 0.9847, "step": 9200 }, { "epoch": 1.6381766381766383, "grad_norm": 0.6020209193229675, "learning_rate": 0.0001282086613840958, "loss": 1.0047, "step": 9201 }, { "epoch": 1.6383547008547008, "grad_norm": 0.6648211479187012, "learning_rate": 0.0001281952320877779, "loss": 0.8717, "step": 9202 }, { "epoch": 1.6385327635327636, "grad_norm": 0.7207710146903992, "learning_rate": 0.000128181802239033, "loss": 1.1232, "step": 9203 }, { "epoch": 1.638710826210826, "grad_norm": 0.800992488861084, "learning_rate": 0.0001281683718381242, "loss": 1.0688, "step": 9204 }, { "epoch": 1.6388888888888888, "grad_norm": 0.789398193359375, "learning_rate": 0.0001281549408853147, "loss": 1.1772, "step": 9205 }, { "epoch": 1.6390669515669516, "grad_norm": 0.6514480710029602, "learning_rate": 0.0001281415093808676, "loss": 1.1685, "step": 9206 }, { "epoch": 1.6392450142450143, "grad_norm": 0.6914686560630798, "learning_rate": 0.00012812807732504608, "loss": 1.1307, "step": 9207 }, { "epoch": 1.6394230769230769, "grad_norm": 0.6788144111633301, "learning_rate": 0.00012811464471811334, "loss": 1.1735, "step": 9208 }, { "epoch": 1.6396011396011396, "grad_norm": 0.7049870491027832, "learning_rate": 0.00012810121156033252, "loss": 1.0128, "step": 9209 }, { "epoch": 1.6397792022792022, "grad_norm": 0.7156766057014465, "learning_rate": 0.00012808777785196687, "loss": 0.9503, "step": 9210 }, { "epoch": 1.639957264957265, "grad_norm": 0.651716411113739, "learning_rate": 0.0001280743435932795, "loss": 1.1227, "step": 9211 }, { "epoch": 1.6401353276353277, "grad_norm": 0.7276262044906616, "learning_rate": 0.0001280609087845337, "loss": 1.06, "step": 9212 }, { "epoch": 1.6403133903133904, "grad_norm": 0.6591095924377441, "learning_rate": 0.0001280474734259927, "loss": 1.0861, "step": 9213 }, { "epoch": 1.640491452991453, "grad_norm": 0.6675926446914673, "learning_rate": 0.00012803403751791975, "loss": 0.9815, "step": 9214 }, { "epoch": 1.6406695156695157, "grad_norm": 0.6391474008560181, "learning_rate": 0.00012802060106057803, "loss": 0.8027, "step": 9215 }, { "epoch": 1.6408475783475782, "grad_norm": 0.6384556293487549, "learning_rate": 0.00012800716405423086, "loss": 0.7877, "step": 9216 }, { "epoch": 1.641025641025641, "grad_norm": 0.661191463470459, "learning_rate": 0.00012799372649914146, "loss": 0.9725, "step": 9217 }, { "epoch": 1.6412037037037037, "grad_norm": 0.7418332695960999, "learning_rate": 0.0001279802883955732, "loss": 1.1756, "step": 9218 }, { "epoch": 1.6413817663817665, "grad_norm": 0.6588954329490662, "learning_rate": 0.00012796684974378928, "loss": 1.0428, "step": 9219 }, { "epoch": 1.6415598290598292, "grad_norm": 0.7566093802452087, "learning_rate": 0.000127953410544053, "loss": 1.1254, "step": 9220 }, { "epoch": 1.6417378917378918, "grad_norm": 0.6801039576530457, "learning_rate": 0.00012793997079662777, "loss": 1.0854, "step": 9221 }, { "epoch": 1.6419159544159543, "grad_norm": 0.7262716889381409, "learning_rate": 0.0001279265305017768, "loss": 0.9343, "step": 9222 }, { "epoch": 1.642094017094017, "grad_norm": 0.628625750541687, "learning_rate": 0.0001279130896597635, "loss": 0.8942, "step": 9223 }, { "epoch": 1.6422720797720798, "grad_norm": 0.6183576583862305, "learning_rate": 0.0001278996482708512, "loss": 0.9284, "step": 9224 }, { "epoch": 1.6424501424501425, "grad_norm": 0.7912000417709351, "learning_rate": 0.00012788620633530327, "loss": 1.3043, "step": 9225 }, { "epoch": 1.6426282051282053, "grad_norm": 0.6982026100158691, "learning_rate": 0.00012787276385338298, "loss": 1.0224, "step": 9226 }, { "epoch": 1.6428062678062678, "grad_norm": 0.6734985709190369, "learning_rate": 0.00012785932082535386, "loss": 0.8781, "step": 9227 }, { "epoch": 1.6429843304843303, "grad_norm": 0.8799532055854797, "learning_rate": 0.0001278458772514792, "loss": 1.1482, "step": 9228 }, { "epoch": 1.643162393162393, "grad_norm": 0.590295672416687, "learning_rate": 0.0001278324331320224, "loss": 0.9502, "step": 9229 }, { "epoch": 1.6433404558404558, "grad_norm": 0.6562125086784363, "learning_rate": 0.0001278189884672469, "loss": 0.9834, "step": 9230 }, { "epoch": 1.6435185185185186, "grad_norm": 0.6848936676979065, "learning_rate": 0.00012780554325741612, "loss": 1.0414, "step": 9231 }, { "epoch": 1.6436965811965814, "grad_norm": 0.5985032320022583, "learning_rate": 0.00012779209750279344, "loss": 0.9469, "step": 9232 }, { "epoch": 1.6438746438746439, "grad_norm": 0.7500917911529541, "learning_rate": 0.00012777865120364238, "loss": 0.9626, "step": 9233 }, { "epoch": 1.6440527065527064, "grad_norm": 0.6565709114074707, "learning_rate": 0.00012776520436022634, "loss": 1.0594, "step": 9234 }, { "epoch": 1.6442307692307692, "grad_norm": 0.8005441427230835, "learning_rate": 0.00012775175697280882, "loss": 1.2379, "step": 9235 }, { "epoch": 1.644408831908832, "grad_norm": 0.6734150648117065, "learning_rate": 0.00012773830904165326, "loss": 0.9171, "step": 9236 }, { "epoch": 1.6445868945868947, "grad_norm": 0.6950868368148804, "learning_rate": 0.00012772486056702314, "loss": 1.1782, "step": 9237 }, { "epoch": 1.6447649572649574, "grad_norm": 0.8009599447250366, "learning_rate": 0.000127711411549182, "loss": 1.0288, "step": 9238 }, { "epoch": 1.64494301994302, "grad_norm": 0.6227970719337463, "learning_rate": 0.0001276979619883933, "loss": 0.9327, "step": 9239 }, { "epoch": 1.6451210826210825, "grad_norm": 0.6828190088272095, "learning_rate": 0.00012768451188492058, "loss": 0.9816, "step": 9240 }, { "epoch": 1.6452991452991452, "grad_norm": 0.9689767360687256, "learning_rate": 0.00012767106123902738, "loss": 0.9049, "step": 9241 }, { "epoch": 1.645477207977208, "grad_norm": 0.677061140537262, "learning_rate": 0.00012765761005097717, "loss": 0.9472, "step": 9242 }, { "epoch": 1.6456552706552707, "grad_norm": 0.7227110862731934, "learning_rate": 0.00012764415832103356, "loss": 1.0384, "step": 9243 }, { "epoch": 1.6458333333333335, "grad_norm": 0.6540094614028931, "learning_rate": 0.0001276307060494601, "loss": 0.8166, "step": 9244 }, { "epoch": 1.646011396011396, "grad_norm": 0.6921904683113098, "learning_rate": 0.00012761725323652033, "loss": 0.9746, "step": 9245 }, { "epoch": 1.6461894586894585, "grad_norm": 0.6742660999298096, "learning_rate": 0.0001276037998824779, "loss": 0.8441, "step": 9246 }, { "epoch": 1.6463675213675213, "grad_norm": 0.6611103415489197, "learning_rate": 0.0001275903459875963, "loss": 1.087, "step": 9247 }, { "epoch": 1.646545584045584, "grad_norm": 0.6805498003959656, "learning_rate": 0.00012757689155213923, "loss": 0.923, "step": 9248 }, { "epoch": 1.6467236467236468, "grad_norm": 0.6598179340362549, "learning_rate": 0.00012756343657637024, "loss": 0.9371, "step": 9249 }, { "epoch": 1.6469017094017095, "grad_norm": 0.7147273421287537, "learning_rate": 0.00012754998106055297, "loss": 1.053, "step": 9250 }, { "epoch": 1.647079772079772, "grad_norm": 0.72414630651474, "learning_rate": 0.00012753652500495103, "loss": 1.0547, "step": 9251 }, { "epoch": 1.6472578347578346, "grad_norm": 0.7784913182258606, "learning_rate": 0.00012752306840982811, "loss": 0.9012, "step": 9252 }, { "epoch": 1.6474358974358974, "grad_norm": 0.644026517868042, "learning_rate": 0.0001275096112754478, "loss": 1.0911, "step": 9253 }, { "epoch": 1.64761396011396, "grad_norm": 0.691124677658081, "learning_rate": 0.00012749615360207382, "loss": 0.9918, "step": 9254 }, { "epoch": 1.6477920227920229, "grad_norm": 0.6632972359657288, "learning_rate": 0.00012748269538996986, "loss": 0.9438, "step": 9255 }, { "epoch": 1.6479700854700856, "grad_norm": 0.6548733115196228, "learning_rate": 0.00012746923663939955, "loss": 1.1082, "step": 9256 }, { "epoch": 1.6481481481481481, "grad_norm": 0.6737542748451233, "learning_rate": 0.00012745577735062664, "loss": 0.9255, "step": 9257 }, { "epoch": 1.6483262108262107, "grad_norm": 0.686862051486969, "learning_rate": 0.00012744231752391479, "loss": 0.9493, "step": 9258 }, { "epoch": 1.6485042735042734, "grad_norm": 0.6096474528312683, "learning_rate": 0.00012742885715952772, "loss": 0.6849, "step": 9259 }, { "epoch": 1.6486823361823362, "grad_norm": 0.702751636505127, "learning_rate": 0.00012741539625772918, "loss": 1.0335, "step": 9260 }, { "epoch": 1.648860398860399, "grad_norm": 0.7470958232879639, "learning_rate": 0.0001274019348187829, "loss": 1.105, "step": 9261 }, { "epoch": 1.6490384615384617, "grad_norm": 0.6642739176750183, "learning_rate": 0.0001273884728429526, "loss": 1.01, "step": 9262 }, { "epoch": 1.6492165242165242, "grad_norm": 0.6470904350280762, "learning_rate": 0.00012737501033050213, "loss": 0.9009, "step": 9263 }, { "epoch": 1.6493945868945867, "grad_norm": 0.7487246990203857, "learning_rate": 0.00012736154728169518, "loss": 0.9832, "step": 9264 }, { "epoch": 1.6495726495726495, "grad_norm": 0.7370779514312744, "learning_rate": 0.00012734808369679553, "loss": 1.0464, "step": 9265 }, { "epoch": 1.6497507122507122, "grad_norm": 0.7942814826965332, "learning_rate": 0.00012733461957606702, "loss": 1.102, "step": 9266 }, { "epoch": 1.649928774928775, "grad_norm": 0.6535606980323792, "learning_rate": 0.00012732115491977336, "loss": 1.0655, "step": 9267 }, { "epoch": 1.6501068376068377, "grad_norm": 0.601716935634613, "learning_rate": 0.00012730768972817847, "loss": 0.8236, "step": 9268 }, { "epoch": 1.6502849002849003, "grad_norm": 0.7375118732452393, "learning_rate": 0.00012729422400154614, "loss": 0.9313, "step": 9269 }, { "epoch": 1.6504629629629628, "grad_norm": 0.7360411882400513, "learning_rate": 0.00012728075774014018, "loss": 0.9254, "step": 9270 }, { "epoch": 1.6506410256410255, "grad_norm": 0.8453929424285889, "learning_rate": 0.00012726729094422444, "loss": 1.0975, "step": 9271 }, { "epoch": 1.6508190883190883, "grad_norm": 0.5615501999855042, "learning_rate": 0.00012725382361406274, "loss": 0.8243, "step": 9272 }, { "epoch": 1.650997150997151, "grad_norm": 0.6494898796081543, "learning_rate": 0.000127240355749919, "loss": 0.9766, "step": 9273 }, { "epoch": 1.6511752136752138, "grad_norm": 0.6544778347015381, "learning_rate": 0.0001272268873520571, "loss": 0.9969, "step": 9274 }, { "epoch": 1.6513532763532763, "grad_norm": 0.6937400698661804, "learning_rate": 0.00012721341842074092, "loss": 1.0626, "step": 9275 }, { "epoch": 1.651531339031339, "grad_norm": 0.7068421244621277, "learning_rate": 0.0001271999489562343, "loss": 1.0068, "step": 9276 }, { "epoch": 1.6517094017094016, "grad_norm": 0.6425052285194397, "learning_rate": 0.0001271864789588012, "loss": 0.8716, "step": 9277 }, { "epoch": 1.6518874643874644, "grad_norm": 0.6895090341567993, "learning_rate": 0.0001271730084287055, "loss": 1.081, "step": 9278 }, { "epoch": 1.6520655270655271, "grad_norm": 0.6773712038993835, "learning_rate": 0.00012715953736621116, "loss": 0.7586, "step": 9279 }, { "epoch": 1.6522435897435899, "grad_norm": 0.6085716485977173, "learning_rate": 0.0001271460657715821, "loss": 0.8627, "step": 9280 }, { "epoch": 1.6524216524216524, "grad_norm": 0.6415461897850037, "learning_rate": 0.00012713259364508227, "loss": 0.9751, "step": 9281 }, { "epoch": 1.6525997150997151, "grad_norm": 0.6460939645767212, "learning_rate": 0.00012711912098697565, "loss": 0.9578, "step": 9282 }, { "epoch": 1.6527777777777777, "grad_norm": 0.6076797246932983, "learning_rate": 0.00012710564779752615, "loss": 0.9627, "step": 9283 }, { "epoch": 1.6529558404558404, "grad_norm": 0.710782527923584, "learning_rate": 0.00012709217407699783, "loss": 0.8725, "step": 9284 }, { "epoch": 1.6531339031339032, "grad_norm": 0.6793623566627502, "learning_rate": 0.00012707869982565463, "loss": 0.908, "step": 9285 }, { "epoch": 1.653311965811966, "grad_norm": 0.6841681003570557, "learning_rate": 0.00012706522504376055, "loss": 0.8546, "step": 9286 }, { "epoch": 1.6534900284900285, "grad_norm": 0.7908675670623779, "learning_rate": 0.0001270517497315796, "loss": 0.9409, "step": 9287 }, { "epoch": 1.6536680911680912, "grad_norm": 0.6918683648109436, "learning_rate": 0.0001270382738893758, "loss": 1.0493, "step": 9288 }, { "epoch": 1.6538461538461537, "grad_norm": 0.6891819834709167, "learning_rate": 0.00012702479751741322, "loss": 1.0675, "step": 9289 }, { "epoch": 1.6540242165242165, "grad_norm": 0.6965166926383972, "learning_rate": 0.00012701132061595586, "loss": 0.8563, "step": 9290 }, { "epoch": 1.6542022792022792, "grad_norm": 0.7549001574516296, "learning_rate": 0.00012699784318526779, "loss": 1.1572, "step": 9291 }, { "epoch": 1.654380341880342, "grad_norm": 0.6100513339042664, "learning_rate": 0.00012698436522561303, "loss": 0.897, "step": 9292 }, { "epoch": 1.6545584045584045, "grad_norm": 0.6477037668228149, "learning_rate": 0.00012697088673725574, "loss": 0.7961, "step": 9293 }, { "epoch": 1.6547364672364673, "grad_norm": 0.7402619123458862, "learning_rate": 0.0001269574077204599, "loss": 1.2001, "step": 9294 }, { "epoch": 1.6549145299145298, "grad_norm": 0.7162346243858337, "learning_rate": 0.0001269439281754897, "loss": 0.9963, "step": 9295 }, { "epoch": 1.6550925925925926, "grad_norm": 0.6757413744926453, "learning_rate": 0.0001269304481026092, "loss": 1.0476, "step": 9296 }, { "epoch": 1.6552706552706553, "grad_norm": 0.6455655097961426, "learning_rate": 0.0001269169675020825, "loss": 0.9716, "step": 9297 }, { "epoch": 1.655448717948718, "grad_norm": 0.7705031037330627, "learning_rate": 0.0001269034863741737, "loss": 0.9886, "step": 9298 }, { "epoch": 1.6556267806267806, "grad_norm": 0.6084272861480713, "learning_rate": 0.000126890004719147, "loss": 0.8231, "step": 9299 }, { "epoch": 1.6558048433048433, "grad_norm": 0.7051045298576355, "learning_rate": 0.00012687652253726652, "loss": 0.8673, "step": 9300 }, { "epoch": 1.6559829059829059, "grad_norm": 0.731675386428833, "learning_rate": 0.0001268630398287964, "loss": 0.8609, "step": 9301 }, { "epoch": 1.6561609686609686, "grad_norm": 0.6796799302101135, "learning_rate": 0.00012684955659400087, "loss": 1.0157, "step": 9302 }, { "epoch": 1.6563390313390314, "grad_norm": 0.6270264983177185, "learning_rate": 0.000126836072833144, "loss": 0.8924, "step": 9303 }, { "epoch": 1.6565170940170941, "grad_norm": 0.7235464453697205, "learning_rate": 0.00012682258854649004, "loss": 0.8904, "step": 9304 }, { "epoch": 1.6566951566951567, "grad_norm": 0.7644724249839783, "learning_rate": 0.00012680910373430318, "loss": 0.9119, "step": 9305 }, { "epoch": 1.6568732193732194, "grad_norm": 0.661411702632904, "learning_rate": 0.00012679561839684764, "loss": 1.0066, "step": 9306 }, { "epoch": 1.657051282051282, "grad_norm": 0.6981723308563232, "learning_rate": 0.0001267821325343876, "loss": 1.2579, "step": 9307 }, { "epoch": 1.6572293447293447, "grad_norm": 0.6469807028770447, "learning_rate": 0.0001267686461471873, "loss": 0.8678, "step": 9308 }, { "epoch": 1.6574074074074074, "grad_norm": 0.8255495429039001, "learning_rate": 0.000126755159235511, "loss": 0.9053, "step": 9309 }, { "epoch": 1.6575854700854702, "grad_norm": 0.6882261037826538, "learning_rate": 0.00012674167179962294, "loss": 0.8364, "step": 9310 }, { "epoch": 1.6577635327635327, "grad_norm": 0.6816701889038086, "learning_rate": 0.00012672818383978733, "loss": 0.9627, "step": 9311 }, { "epoch": 1.6579415954415955, "grad_norm": 0.6993424892425537, "learning_rate": 0.00012671469535626852, "loss": 0.8337, "step": 9312 }, { "epoch": 1.658119658119658, "grad_norm": 0.6271458864212036, "learning_rate": 0.00012670120634933075, "loss": 0.8322, "step": 9313 }, { "epoch": 1.6582977207977208, "grad_norm": 0.7012003660202026, "learning_rate": 0.00012668771681923827, "loss": 0.8895, "step": 9314 }, { "epoch": 1.6584757834757835, "grad_norm": 0.6704670190811157, "learning_rate": 0.00012667422676625547, "loss": 1.0544, "step": 9315 }, { "epoch": 1.6586538461538463, "grad_norm": 0.6189491748809814, "learning_rate": 0.0001266607361906466, "loss": 0.9623, "step": 9316 }, { "epoch": 1.6588319088319088, "grad_norm": 0.7065694332122803, "learning_rate": 0.000126647245092676, "loss": 0.8874, "step": 9317 }, { "epoch": 1.6590099715099715, "grad_norm": 0.7473452687263489, "learning_rate": 0.00012663375347260795, "loss": 1.0576, "step": 9318 }, { "epoch": 1.659188034188034, "grad_norm": 0.6839408874511719, "learning_rate": 0.0001266202613307068, "loss": 0.9127, "step": 9319 }, { "epoch": 1.6593660968660968, "grad_norm": 0.7154020071029663, "learning_rate": 0.00012660676866723699, "loss": 1.1174, "step": 9320 }, { "epoch": 1.6595441595441596, "grad_norm": 0.7123729586601257, "learning_rate": 0.0001265932754824628, "loss": 0.9617, "step": 9321 }, { "epoch": 1.6597222222222223, "grad_norm": 0.7537810802459717, "learning_rate": 0.0001265797817766486, "loss": 1.0333, "step": 9322 }, { "epoch": 1.6599002849002849, "grad_norm": 0.706551730632782, "learning_rate": 0.00012656628755005884, "loss": 1.0838, "step": 9323 }, { "epoch": 1.6600783475783476, "grad_norm": 0.8104004859924316, "learning_rate": 0.0001265527928029578, "loss": 0.9807, "step": 9324 }, { "epoch": 1.6602564102564101, "grad_norm": 0.6892881989479065, "learning_rate": 0.00012653929753560998, "loss": 0.9941, "step": 9325 }, { "epoch": 1.6604344729344729, "grad_norm": 0.5919203758239746, "learning_rate": 0.00012652580174827974, "loss": 0.9268, "step": 9326 }, { "epoch": 1.6606125356125356, "grad_norm": 0.6715863347053528, "learning_rate": 0.00012651230544123154, "loss": 1.0912, "step": 9327 }, { "epoch": 1.6607905982905984, "grad_norm": 0.6765137314796448, "learning_rate": 0.0001264988086147298, "loss": 1.1576, "step": 9328 }, { "epoch": 1.660968660968661, "grad_norm": 0.6781638860702515, "learning_rate": 0.00012648531126903888, "loss": 1.1162, "step": 9329 }, { "epoch": 1.6611467236467237, "grad_norm": 0.715871274471283, "learning_rate": 0.00012647181340442337, "loss": 0.714, "step": 9330 }, { "epoch": 1.6613247863247862, "grad_norm": 0.6237258315086365, "learning_rate": 0.00012645831502114762, "loss": 0.8512, "step": 9331 }, { "epoch": 1.661502849002849, "grad_norm": 0.6668339967727661, "learning_rate": 0.0001264448161194762, "loss": 1.0384, "step": 9332 }, { "epoch": 1.6616809116809117, "grad_norm": 0.8316730260848999, "learning_rate": 0.00012643131669967352, "loss": 0.8931, "step": 9333 }, { "epoch": 1.6618589743589745, "grad_norm": 0.7013183832168579, "learning_rate": 0.00012641781676200406, "loss": 1.0548, "step": 9334 }, { "epoch": 1.6620370370370372, "grad_norm": 0.6980466842651367, "learning_rate": 0.00012640431630673243, "loss": 0.8988, "step": 9335 }, { "epoch": 1.6622150997150997, "grad_norm": 0.7045995593070984, "learning_rate": 0.000126390815334123, "loss": 1.107, "step": 9336 }, { "epoch": 1.6623931623931623, "grad_norm": 0.6699773669242859, "learning_rate": 0.00012637731384444043, "loss": 1.1757, "step": 9337 }, { "epoch": 1.662571225071225, "grad_norm": 0.6489999294281006, "learning_rate": 0.00012636381183794916, "loss": 0.9282, "step": 9338 }, { "epoch": 1.6627492877492878, "grad_norm": 0.7085952758789062, "learning_rate": 0.00012635030931491375, "loss": 1.0221, "step": 9339 }, { "epoch": 1.6629273504273505, "grad_norm": 0.6893135905265808, "learning_rate": 0.00012633680627559878, "loss": 1.0517, "step": 9340 }, { "epoch": 1.6631054131054133, "grad_norm": 0.5659682154655457, "learning_rate": 0.00012632330272026882, "loss": 0.6294, "step": 9341 }, { "epoch": 1.6632834757834758, "grad_norm": 0.6889018416404724, "learning_rate": 0.00012630979864918838, "loss": 1.0735, "step": 9342 }, { "epoch": 1.6634615384615383, "grad_norm": 0.7333424687385559, "learning_rate": 0.00012629629406262212, "loss": 0.9079, "step": 9343 }, { "epoch": 1.663639601139601, "grad_norm": 0.6340580582618713, "learning_rate": 0.00012628278896083462, "loss": 0.9738, "step": 9344 }, { "epoch": 1.6638176638176638, "grad_norm": 0.7042564749717712, "learning_rate": 0.00012626928334409044, "loss": 0.959, "step": 9345 }, { "epoch": 1.6639957264957266, "grad_norm": 0.711757242679596, "learning_rate": 0.00012625577721265424, "loss": 0.8113, "step": 9346 }, { "epoch": 1.6641737891737893, "grad_norm": 0.7723299264907837, "learning_rate": 0.0001262422705667906, "loss": 1.1724, "step": 9347 }, { "epoch": 1.6643518518518519, "grad_norm": 0.711334228515625, "learning_rate": 0.00012622876340676422, "loss": 1.0121, "step": 9348 }, { "epoch": 1.6645299145299144, "grad_norm": 0.6954590678215027, "learning_rate": 0.0001262152557328397, "loss": 1.2093, "step": 9349 }, { "epoch": 1.6647079772079771, "grad_norm": 0.6341620087623596, "learning_rate": 0.00012620174754528166, "loss": 1.0535, "step": 9350 }, { "epoch": 1.66488603988604, "grad_norm": 0.6434268355369568, "learning_rate": 0.00012618823884435484, "loss": 0.8964, "step": 9351 }, { "epoch": 1.6650641025641026, "grad_norm": 0.7685084939002991, "learning_rate": 0.00012617472963032385, "loss": 1.0639, "step": 9352 }, { "epoch": 1.6652421652421654, "grad_norm": 0.6347958445549011, "learning_rate": 0.00012616121990345345, "loss": 1.0252, "step": 9353 }, { "epoch": 1.665420227920228, "grad_norm": 0.647722601890564, "learning_rate": 0.0001261477096640083, "loss": 0.9527, "step": 9354 }, { "epoch": 1.6655982905982905, "grad_norm": 0.5942047834396362, "learning_rate": 0.000126134198912253, "loss": 1.0062, "step": 9355 }, { "epoch": 1.6657763532763532, "grad_norm": 0.683555006980896, "learning_rate": 0.00012612068764845247, "loss": 0.8101, "step": 9356 }, { "epoch": 1.665954415954416, "grad_norm": 0.6832289099693298, "learning_rate": 0.00012610717587287128, "loss": 1.1436, "step": 9357 }, { "epoch": 1.6661324786324787, "grad_norm": 0.7035253047943115, "learning_rate": 0.00012609366358577422, "loss": 0.9724, "step": 9358 }, { "epoch": 1.6663105413105415, "grad_norm": 0.6471409797668457, "learning_rate": 0.00012608015078742604, "loss": 0.776, "step": 9359 }, { "epoch": 1.666488603988604, "grad_norm": 0.7069687247276306, "learning_rate": 0.00012606663747809145, "loss": 0.9667, "step": 9360 }, { "epoch": 1.6666666666666665, "grad_norm": 0.6744135618209839, "learning_rate": 0.00012605312365803525, "loss": 1.1152, "step": 9361 }, { "epoch": 1.6668447293447293, "grad_norm": 0.7212334275245667, "learning_rate": 0.00012603960932752227, "loss": 1.1543, "step": 9362 }, { "epoch": 1.667022792022792, "grad_norm": 0.6501669883728027, "learning_rate": 0.0001260260944868172, "loss": 0.8595, "step": 9363 }, { "epoch": 1.6672008547008548, "grad_norm": 0.6970864534378052, "learning_rate": 0.00012601257913618486, "loss": 0.9364, "step": 9364 }, { "epoch": 1.6673789173789175, "grad_norm": 0.6802223324775696, "learning_rate": 0.00012599906327589007, "loss": 0.8429, "step": 9365 }, { "epoch": 1.66755698005698, "grad_norm": 0.6842933893203735, "learning_rate": 0.00012598554690619764, "loss": 1.1255, "step": 9366 }, { "epoch": 1.6677350427350426, "grad_norm": 0.6547088623046875, "learning_rate": 0.0001259720300273724, "loss": 0.983, "step": 9367 }, { "epoch": 1.6679131054131053, "grad_norm": 0.620424211025238, "learning_rate": 0.0001259585126396792, "loss": 0.918, "step": 9368 }, { "epoch": 1.668091168091168, "grad_norm": 0.5659816861152649, "learning_rate": 0.00012594499474338287, "loss": 0.7788, "step": 9369 }, { "epoch": 1.6682692307692308, "grad_norm": 0.5904595255851746, "learning_rate": 0.00012593147633874826, "loss": 0.801, "step": 9370 }, { "epoch": 1.6684472934472936, "grad_norm": 0.6444024443626404, "learning_rate": 0.0001259179574260402, "loss": 1.0997, "step": 9371 }, { "epoch": 1.6686253561253561, "grad_norm": 0.6408827304840088, "learning_rate": 0.00012590443800552365, "loss": 0.9839, "step": 9372 }, { "epoch": 1.6688034188034186, "grad_norm": 0.752391517162323, "learning_rate": 0.00012589091807746345, "loss": 1.0249, "step": 9373 }, { "epoch": 1.6689814814814814, "grad_norm": 0.8256397247314453, "learning_rate": 0.00012587739764212448, "loss": 0.9541, "step": 9374 }, { "epoch": 1.6691595441595442, "grad_norm": 0.7878768444061279, "learning_rate": 0.00012586387669977166, "loss": 1.0071, "step": 9375 }, { "epoch": 1.669337606837607, "grad_norm": 0.6179735660552979, "learning_rate": 0.0001258503552506699, "loss": 0.8495, "step": 9376 }, { "epoch": 1.6695156695156697, "grad_norm": 0.6699580550193787, "learning_rate": 0.00012583683329508413, "loss": 0.8999, "step": 9377 }, { "epoch": 1.6696937321937322, "grad_norm": 0.6542006731033325, "learning_rate": 0.00012582331083327929, "loss": 1.0357, "step": 9378 }, { "epoch": 1.6698717948717947, "grad_norm": 0.7275210618972778, "learning_rate": 0.0001258097878655203, "loss": 1.0259, "step": 9379 }, { "epoch": 1.6700498575498575, "grad_norm": 0.6836326122283936, "learning_rate": 0.00012579626439207216, "loss": 1.0428, "step": 9380 }, { "epoch": 1.6702279202279202, "grad_norm": 0.760123610496521, "learning_rate": 0.00012578274041319978, "loss": 0.9716, "step": 9381 }, { "epoch": 1.670405982905983, "grad_norm": 0.5525194406509399, "learning_rate": 0.00012576921592916818, "loss": 0.8253, "step": 9382 }, { "epoch": 1.6705840455840457, "grad_norm": 0.6881270408630371, "learning_rate": 0.00012575569094024232, "loss": 1.0571, "step": 9383 }, { "epoch": 1.6707621082621082, "grad_norm": 0.6776245832443237, "learning_rate": 0.0001257421654466872, "loss": 0.9119, "step": 9384 }, { "epoch": 1.6709401709401708, "grad_norm": 0.7903014421463013, "learning_rate": 0.0001257286394487678, "loss": 1.0626, "step": 9385 }, { "epoch": 1.6711182336182335, "grad_norm": 0.61158287525177, "learning_rate": 0.0001257151129467492, "loss": 0.9378, "step": 9386 }, { "epoch": 1.6712962962962963, "grad_norm": 0.655189573764801, "learning_rate": 0.00012570158594089637, "loss": 0.9334, "step": 9387 }, { "epoch": 1.671474358974359, "grad_norm": 0.6707320809364319, "learning_rate": 0.0001256880584314743, "loss": 1.1802, "step": 9388 }, { "epoch": 1.6716524216524218, "grad_norm": 0.847341775894165, "learning_rate": 0.00012567453041874814, "loss": 1.1169, "step": 9389 }, { "epoch": 1.6718304843304843, "grad_norm": 0.6136410236358643, "learning_rate": 0.00012566100190298287, "loss": 0.8959, "step": 9390 }, { "epoch": 1.672008547008547, "grad_norm": 0.7203437089920044, "learning_rate": 0.00012564747288444357, "loss": 0.9803, "step": 9391 }, { "epoch": 1.6721866096866096, "grad_norm": 0.7832576632499695, "learning_rate": 0.00012563394336339534, "loss": 0.8696, "step": 9392 }, { "epoch": 1.6723646723646723, "grad_norm": 0.6940804719924927, "learning_rate": 0.00012562041334010323, "loss": 1.0571, "step": 9393 }, { "epoch": 1.672542735042735, "grad_norm": 0.6042298674583435, "learning_rate": 0.00012560688281483234, "loss": 0.8835, "step": 9394 }, { "epoch": 1.6727207977207978, "grad_norm": 0.7870675921440125, "learning_rate": 0.00012559335178784776, "loss": 1.1585, "step": 9395 }, { "epoch": 1.6728988603988604, "grad_norm": 0.7448568940162659, "learning_rate": 0.00012557982025941463, "loss": 0.9699, "step": 9396 }, { "epoch": 1.6730769230769231, "grad_norm": 0.7226544618606567, "learning_rate": 0.00012556628822979807, "loss": 0.7817, "step": 9397 }, { "epoch": 1.6732549857549857, "grad_norm": 0.5652043223381042, "learning_rate": 0.0001255527556992632, "loss": 0.8077, "step": 9398 }, { "epoch": 1.6734330484330484, "grad_norm": 0.6459930539131165, "learning_rate": 0.00012553922266807517, "loss": 1.22, "step": 9399 }, { "epoch": 1.6736111111111112, "grad_norm": 0.7568991780281067, "learning_rate": 0.00012552568913649912, "loss": 1.1559, "step": 9400 }, { "epoch": 1.673789173789174, "grad_norm": 0.7462680339813232, "learning_rate": 0.0001255121551048002, "loss": 1.1438, "step": 9401 }, { "epoch": 1.6739672364672364, "grad_norm": 0.6653871536254883, "learning_rate": 0.0001254986205732436, "loss": 0.9468, "step": 9402 }, { "epoch": 1.6741452991452992, "grad_norm": 0.6261825561523438, "learning_rate": 0.0001254850855420945, "loss": 0.8558, "step": 9403 }, { "epoch": 1.6743233618233617, "grad_norm": 0.6442354321479797, "learning_rate": 0.0001254715500116181, "loss": 0.8605, "step": 9404 }, { "epoch": 1.6745014245014245, "grad_norm": 0.7483665943145752, "learning_rate": 0.00012545801398207958, "loss": 0.9089, "step": 9405 }, { "epoch": 1.6746794871794872, "grad_norm": 0.7319819927215576, "learning_rate": 0.00012544447745374416, "loss": 0.9937, "step": 9406 }, { "epoch": 1.67485754985755, "grad_norm": 0.703014075756073, "learning_rate": 0.00012543094042687708, "loss": 0.9597, "step": 9407 }, { "epoch": 1.6750356125356125, "grad_norm": 0.6593887209892273, "learning_rate": 0.00012541740290174353, "loss": 0.844, "step": 9408 }, { "epoch": 1.6752136752136753, "grad_norm": 0.6567463874816895, "learning_rate": 0.00012540386487860879, "loss": 1.0744, "step": 9409 }, { "epoch": 1.6753917378917378, "grad_norm": 0.7784611582756042, "learning_rate": 0.00012539032635773805, "loss": 0.974, "step": 9410 }, { "epoch": 1.6755698005698005, "grad_norm": 0.6760087609291077, "learning_rate": 0.00012537678733939663, "loss": 0.8948, "step": 9411 }, { "epoch": 1.6757478632478633, "grad_norm": 0.825965940952301, "learning_rate": 0.0001253632478238498, "loss": 1.1196, "step": 9412 }, { "epoch": 1.675925925925926, "grad_norm": 0.7215564250946045, "learning_rate": 0.00012534970781136277, "loss": 1.1774, "step": 9413 }, { "epoch": 1.6761039886039886, "grad_norm": 0.6548578143119812, "learning_rate": 0.00012533616730220094, "loss": 0.8671, "step": 9414 }, { "epoch": 1.6762820512820513, "grad_norm": 0.7257684469223022, "learning_rate": 0.00012532262629662947, "loss": 1.105, "step": 9415 }, { "epoch": 1.6764601139601139, "grad_norm": 0.6695847511291504, "learning_rate": 0.00012530908479491378, "loss": 0.9189, "step": 9416 }, { "epoch": 1.6766381766381766, "grad_norm": 0.684695303440094, "learning_rate": 0.00012529554279731915, "loss": 1.066, "step": 9417 }, { "epoch": 1.6768162393162394, "grad_norm": 0.7107276320457458, "learning_rate": 0.0001252820003041109, "loss": 0.9311, "step": 9418 }, { "epoch": 1.676994301994302, "grad_norm": 0.6755440831184387, "learning_rate": 0.0001252684573155544, "loss": 1.1036, "step": 9419 }, { "epoch": 1.6771723646723646, "grad_norm": 0.7571110725402832, "learning_rate": 0.00012525491383191491, "loss": 1.0244, "step": 9420 }, { "epoch": 1.6773504273504274, "grad_norm": 0.6960614323616028, "learning_rate": 0.0001252413698534579, "loss": 0.9077, "step": 9421 }, { "epoch": 1.67752849002849, "grad_norm": 0.6675550937652588, "learning_rate": 0.00012522782538044867, "loss": 1.0543, "step": 9422 }, { "epoch": 1.6777065527065527, "grad_norm": 0.6637391448020935, "learning_rate": 0.0001252142804131526, "loss": 0.9471, "step": 9423 }, { "epoch": 1.6778846153846154, "grad_norm": 0.6382880210876465, "learning_rate": 0.00012520073495183508, "loss": 0.9729, "step": 9424 }, { "epoch": 1.6780626780626782, "grad_norm": 0.731922447681427, "learning_rate": 0.0001251871889967615, "loss": 1.0385, "step": 9425 }, { "epoch": 1.6782407407407407, "grad_norm": 0.5868890285491943, "learning_rate": 0.00012517364254819728, "loss": 0.8466, "step": 9426 }, { "epoch": 1.6784188034188035, "grad_norm": 0.8535677790641785, "learning_rate": 0.00012516009560640786, "loss": 1.1009, "step": 9427 }, { "epoch": 1.678596866096866, "grad_norm": 0.7044199705123901, "learning_rate": 0.0001251465481716586, "loss": 1.0862, "step": 9428 }, { "epoch": 1.6787749287749287, "grad_norm": 0.7207323312759399, "learning_rate": 0.00012513300024421498, "loss": 1.064, "step": 9429 }, { "epoch": 1.6789529914529915, "grad_norm": 0.7739703059196472, "learning_rate": 0.0001251194518243424, "loss": 1.1738, "step": 9430 }, { "epoch": 1.6791310541310542, "grad_norm": 0.6829344630241394, "learning_rate": 0.00012510590291230637, "loss": 1.0555, "step": 9431 }, { "epoch": 1.6793091168091168, "grad_norm": 0.6760238409042358, "learning_rate": 0.0001250923535083723, "loss": 1.2177, "step": 9432 }, { "epoch": 1.6794871794871795, "grad_norm": 0.6666911840438843, "learning_rate": 0.0001250788036128057, "loss": 0.8957, "step": 9433 }, { "epoch": 1.679665242165242, "grad_norm": 0.747797429561615, "learning_rate": 0.00012506525322587207, "loss": 0.9793, "step": 9434 }, { "epoch": 1.6798433048433048, "grad_norm": 0.6261107325553894, "learning_rate": 0.00012505170234783686, "loss": 0.7781, "step": 9435 }, { "epoch": 1.6800213675213675, "grad_norm": 0.7055163979530334, "learning_rate": 0.00012503815097896555, "loss": 1.0617, "step": 9436 }, { "epoch": 1.6801994301994303, "grad_norm": 0.5567409992218018, "learning_rate": 0.00012502459911952371, "loss": 0.7911, "step": 9437 }, { "epoch": 1.6803774928774928, "grad_norm": 0.7410423159599304, "learning_rate": 0.0001250110467697768, "loss": 1.1041, "step": 9438 }, { "epoch": 1.6805555555555556, "grad_norm": 0.6185283064842224, "learning_rate": 0.00012499749392999045, "loss": 0.8101, "step": 9439 }, { "epoch": 1.680733618233618, "grad_norm": 0.6988311409950256, "learning_rate": 0.0001249839406004301, "loss": 0.8579, "step": 9440 }, { "epoch": 1.6809116809116809, "grad_norm": 0.5588746070861816, "learning_rate": 0.00012497038678136132, "loss": 0.8035, "step": 9441 }, { "epoch": 1.6810897435897436, "grad_norm": 0.6568905711174011, "learning_rate": 0.0001249568324730497, "loss": 0.7455, "step": 9442 }, { "epoch": 1.6812678062678064, "grad_norm": 0.6924821138381958, "learning_rate": 0.00012494327767576078, "loss": 1.134, "step": 9443 }, { "epoch": 1.681445868945869, "grad_norm": 0.6940170526504517, "learning_rate": 0.00012492972238976018, "loss": 0.9719, "step": 9444 }, { "epoch": 1.6816239316239316, "grad_norm": 0.667465090751648, "learning_rate": 0.00012491616661531343, "loss": 0.953, "step": 9445 }, { "epoch": 1.6818019943019942, "grad_norm": 0.7693275809288025, "learning_rate": 0.00012490261035268612, "loss": 1.1342, "step": 9446 }, { "epoch": 1.681980056980057, "grad_norm": 0.7243115305900574, "learning_rate": 0.00012488905360214393, "loss": 1.1847, "step": 9447 }, { "epoch": 1.6821581196581197, "grad_norm": 0.657357931137085, "learning_rate": 0.00012487549636395245, "loss": 0.8747, "step": 9448 }, { "epoch": 1.6823361823361824, "grad_norm": 0.7471592426300049, "learning_rate": 0.00012486193863837727, "loss": 1.0472, "step": 9449 }, { "epoch": 1.6825142450142452, "grad_norm": 0.7476530075073242, "learning_rate": 0.00012484838042568406, "loss": 1.0708, "step": 9450 }, { "epoch": 1.6826923076923077, "grad_norm": 0.6031121611595154, "learning_rate": 0.00012483482172613846, "loss": 0.8243, "step": 9451 }, { "epoch": 1.6828703703703702, "grad_norm": 0.6733492016792297, "learning_rate": 0.00012482126254000607, "loss": 0.7808, "step": 9452 }, { "epoch": 1.683048433048433, "grad_norm": 0.5865318179130554, "learning_rate": 0.00012480770286755265, "loss": 0.829, "step": 9453 }, { "epoch": 1.6832264957264957, "grad_norm": 0.6805713772773743, "learning_rate": 0.0001247941427090438, "loss": 0.7206, "step": 9454 }, { "epoch": 1.6834045584045585, "grad_norm": 0.6514836549758911, "learning_rate": 0.0001247805820647453, "loss": 0.9499, "step": 9455 }, { "epoch": 1.6835826210826212, "grad_norm": 0.7432990074157715, "learning_rate": 0.0001247670209349227, "loss": 1.1324, "step": 9456 }, { "epoch": 1.6837606837606838, "grad_norm": 0.6348414421081543, "learning_rate": 0.00012475345931984178, "loss": 0.8246, "step": 9457 }, { "epoch": 1.6839387464387463, "grad_norm": 0.7194374203681946, "learning_rate": 0.00012473989721976825, "loss": 0.9634, "step": 9458 }, { "epoch": 1.684116809116809, "grad_norm": 0.7869647741317749, "learning_rate": 0.00012472633463496785, "loss": 1.2115, "step": 9459 }, { "epoch": 1.6842948717948718, "grad_norm": 0.6672070026397705, "learning_rate": 0.00012471277156570623, "loss": 0.9842, "step": 9460 }, { "epoch": 1.6844729344729346, "grad_norm": 0.6611466407775879, "learning_rate": 0.00012469920801224925, "loss": 0.9343, "step": 9461 }, { "epoch": 1.6846509971509973, "grad_norm": 0.6715068221092224, "learning_rate": 0.0001246856439748626, "loss": 0.6852, "step": 9462 }, { "epoch": 1.6848290598290598, "grad_norm": 0.641942024230957, "learning_rate": 0.00012467207945381198, "loss": 0.8863, "step": 9463 }, { "epoch": 1.6850071225071224, "grad_norm": 0.8414762616157532, "learning_rate": 0.00012465851444936325, "loss": 1.3404, "step": 9464 }, { "epoch": 1.6851851851851851, "grad_norm": 0.715752363204956, "learning_rate": 0.00012464494896178216, "loss": 1.123, "step": 9465 }, { "epoch": 1.6853632478632479, "grad_norm": 0.6913973093032837, "learning_rate": 0.00012463138299133447, "loss": 1.0659, "step": 9466 }, { "epoch": 1.6855413105413106, "grad_norm": 0.6998484134674072, "learning_rate": 0.000124617816538286, "loss": 1.0555, "step": 9467 }, { "epoch": 1.6857193732193734, "grad_norm": 0.7313308119773865, "learning_rate": 0.00012460424960290256, "loss": 1.0915, "step": 9468 }, { "epoch": 1.685897435897436, "grad_norm": 0.6790569424629211, "learning_rate": 0.00012459068218544995, "loss": 1.0214, "step": 9469 }, { "epoch": 1.6860754985754984, "grad_norm": 0.6494466662406921, "learning_rate": 0.00012457711428619402, "loss": 0.9476, "step": 9470 }, { "epoch": 1.6862535612535612, "grad_norm": 0.8048526048660278, "learning_rate": 0.0001245635459054006, "loss": 1.1852, "step": 9471 }, { "epoch": 1.686431623931624, "grad_norm": 0.6237879395484924, "learning_rate": 0.0001245499770433355, "loss": 1.0106, "step": 9472 }, { "epoch": 1.6866096866096867, "grad_norm": 0.6282906532287598, "learning_rate": 0.0001245364077002646, "loss": 0.9858, "step": 9473 }, { "epoch": 1.6867877492877494, "grad_norm": 0.7239370346069336, "learning_rate": 0.00012452283787645375, "loss": 0.9586, "step": 9474 }, { "epoch": 1.686965811965812, "grad_norm": 0.6438776850700378, "learning_rate": 0.00012450926757216887, "loss": 0.9198, "step": 9475 }, { "epoch": 1.6871438746438745, "grad_norm": 0.6451360583305359, "learning_rate": 0.00012449569678767578, "loss": 1.0183, "step": 9476 }, { "epoch": 1.6873219373219372, "grad_norm": 0.6950216293334961, "learning_rate": 0.0001244821255232404, "loss": 0.9048, "step": 9477 }, { "epoch": 1.6875, "grad_norm": 0.710489809513092, "learning_rate": 0.00012446855377912865, "loss": 1.1596, "step": 9478 }, { "epoch": 1.6876780626780628, "grad_norm": 0.6819305419921875, "learning_rate": 0.0001244549815556064, "loss": 0.8486, "step": 9479 }, { "epoch": 1.6878561253561255, "grad_norm": 0.7185879945755005, "learning_rate": 0.00012444140885293958, "loss": 0.9539, "step": 9480 }, { "epoch": 1.688034188034188, "grad_norm": 0.8181464672088623, "learning_rate": 0.00012442783567139415, "loss": 1.0038, "step": 9481 }, { "epoch": 1.6882122507122506, "grad_norm": 0.47161349654197693, "learning_rate": 0.000124414262011236, "loss": 0.67, "step": 9482 }, { "epoch": 1.6883903133903133, "grad_norm": 0.7752482295036316, "learning_rate": 0.00012440068787273112, "loss": 0.9944, "step": 9483 }, { "epoch": 1.688568376068376, "grad_norm": 0.7119397521018982, "learning_rate": 0.00012438711325614543, "loss": 0.9098, "step": 9484 }, { "epoch": 1.6887464387464388, "grad_norm": 0.7161153554916382, "learning_rate": 0.00012437353816174493, "loss": 1.0003, "step": 9485 }, { "epoch": 1.6889245014245016, "grad_norm": 0.5989507436752319, "learning_rate": 0.0001243599625897956, "loss": 1.0301, "step": 9486 }, { "epoch": 1.689102564102564, "grad_norm": 0.7906841039657593, "learning_rate": 0.00012434638654056334, "loss": 1.0388, "step": 9487 }, { "epoch": 1.6892806267806266, "grad_norm": 0.6679551601409912, "learning_rate": 0.00012433281001431428, "loss": 0.9505, "step": 9488 }, { "epoch": 1.6894586894586894, "grad_norm": 0.7090578675270081, "learning_rate": 0.0001243192330113143, "loss": 0.8616, "step": 9489 }, { "epoch": 1.6896367521367521, "grad_norm": 0.6401308178901672, "learning_rate": 0.00012430565553182949, "loss": 0.9099, "step": 9490 }, { "epoch": 1.6898148148148149, "grad_norm": 0.7360149621963501, "learning_rate": 0.00012429207757612586, "loss": 1.0233, "step": 9491 }, { "epoch": 1.6899928774928776, "grad_norm": 0.6736137270927429, "learning_rate": 0.00012427849914446946, "loss": 0.9803, "step": 9492 }, { "epoch": 1.6901709401709402, "grad_norm": 0.7728668451309204, "learning_rate": 0.00012426492023712623, "loss": 1.2316, "step": 9493 }, { "epoch": 1.6903490028490027, "grad_norm": 0.789718508720398, "learning_rate": 0.00012425134085436234, "loss": 1.1218, "step": 9494 }, { "epoch": 1.6905270655270654, "grad_norm": 0.7314121723175049, "learning_rate": 0.0001242377609964438, "loss": 1.1294, "step": 9495 }, { "epoch": 1.6907051282051282, "grad_norm": 0.7222046256065369, "learning_rate": 0.0001242241806636367, "loss": 1.0288, "step": 9496 }, { "epoch": 1.690883190883191, "grad_norm": 0.7546363472938538, "learning_rate": 0.00012421059985620708, "loss": 0.8781, "step": 9497 }, { "epoch": 1.6910612535612537, "grad_norm": 0.7502550482749939, "learning_rate": 0.00012419701857442104, "loss": 0.927, "step": 9498 }, { "epoch": 1.6912393162393162, "grad_norm": 0.6244059205055237, "learning_rate": 0.00012418343681854473, "loss": 0.9689, "step": 9499 }, { "epoch": 1.6914173789173788, "grad_norm": 0.7214263677597046, "learning_rate": 0.00012416985458884417, "loss": 1.0842, "step": 9500 }, { "epoch": 1.6915954415954415, "grad_norm": 0.6960242390632629, "learning_rate": 0.00012415627188558555, "loss": 0.9766, "step": 9501 }, { "epoch": 1.6917735042735043, "grad_norm": 0.6687830686569214, "learning_rate": 0.00012414268870903494, "loss": 1.0222, "step": 9502 }, { "epoch": 1.691951566951567, "grad_norm": 0.8611155152320862, "learning_rate": 0.00012412910505945848, "loss": 1.1792, "step": 9503 }, { "epoch": 1.6921296296296298, "grad_norm": 0.6655587553977966, "learning_rate": 0.00012411552093712235, "loss": 0.8763, "step": 9504 }, { "epoch": 1.6923076923076923, "grad_norm": 0.7829837799072266, "learning_rate": 0.00012410193634229268, "loss": 1.0803, "step": 9505 }, { "epoch": 1.6924857549857548, "grad_norm": 0.7951042652130127, "learning_rate": 0.00012408835127523566, "loss": 1.0925, "step": 9506 }, { "epoch": 1.6926638176638176, "grad_norm": 0.715495228767395, "learning_rate": 0.0001240747657362174, "loss": 1.2411, "step": 9507 }, { "epoch": 1.6928418803418803, "grad_norm": 0.6779513359069824, "learning_rate": 0.00012406117972550414, "loss": 0.8886, "step": 9508 }, { "epoch": 1.693019943019943, "grad_norm": 0.647588312625885, "learning_rate": 0.00012404759324336203, "loss": 1.107, "step": 9509 }, { "epoch": 1.6931980056980058, "grad_norm": 0.7398989796638489, "learning_rate": 0.00012403400629005726, "loss": 1.0256, "step": 9510 }, { "epoch": 1.6933760683760684, "grad_norm": 0.7572638392448425, "learning_rate": 0.0001240204188658561, "loss": 0.9662, "step": 9511 }, { "epoch": 1.693554131054131, "grad_norm": 0.7044163346290588, "learning_rate": 0.00012400683097102473, "loss": 1.1388, "step": 9512 }, { "epoch": 1.6937321937321936, "grad_norm": 0.7889094948768616, "learning_rate": 0.00012399324260582936, "loss": 1.0453, "step": 9513 }, { "epoch": 1.6939102564102564, "grad_norm": 0.7977854609489441, "learning_rate": 0.00012397965377053627, "loss": 1.015, "step": 9514 }, { "epoch": 1.6940883190883191, "grad_norm": 0.6223814487457275, "learning_rate": 0.00012396606446541165, "loss": 0.7985, "step": 9515 }, { "epoch": 1.694266381766382, "grad_norm": 0.8307462334632874, "learning_rate": 0.0001239524746907218, "loss": 0.8899, "step": 9516 }, { "epoch": 1.6944444444444444, "grad_norm": 0.7780544757843018, "learning_rate": 0.00012393888444673295, "loss": 0.9406, "step": 9517 }, { "epoch": 1.6946225071225072, "grad_norm": 0.6894499659538269, "learning_rate": 0.0001239252937337114, "loss": 0.9412, "step": 9518 }, { "epoch": 1.6948005698005697, "grad_norm": 0.7000680565834045, "learning_rate": 0.00012391170255192342, "loss": 1.0314, "step": 9519 }, { "epoch": 1.6949786324786325, "grad_norm": 0.6772416830062866, "learning_rate": 0.0001238981109016353, "loss": 0.9153, "step": 9520 }, { "epoch": 1.6951566951566952, "grad_norm": 0.7069609761238098, "learning_rate": 0.00012388451878311333, "loss": 1.1777, "step": 9521 }, { "epoch": 1.695334757834758, "grad_norm": 0.6138432621955872, "learning_rate": 0.00012387092619662386, "loss": 0.8085, "step": 9522 }, { "epoch": 1.6955128205128205, "grad_norm": 0.6122859716415405, "learning_rate": 0.00012385733314243313, "loss": 0.8534, "step": 9523 }, { "epoch": 1.6956908831908832, "grad_norm": 0.7499903440475464, "learning_rate": 0.00012384373962080755, "loss": 0.9329, "step": 9524 }, { "epoch": 1.6958689458689458, "grad_norm": 0.6413441896438599, "learning_rate": 0.00012383014563201343, "loss": 0.9609, "step": 9525 }, { "epoch": 1.6960470085470085, "grad_norm": 0.7467969059944153, "learning_rate": 0.0001238165511763171, "loss": 0.9142, "step": 9526 }, { "epoch": 1.6962250712250713, "grad_norm": 0.6540884375572205, "learning_rate": 0.00012380295625398494, "loss": 0.9503, "step": 9527 }, { "epoch": 1.696403133903134, "grad_norm": 0.6298567652702332, "learning_rate": 0.00012378936086528326, "loss": 0.8853, "step": 9528 }, { "epoch": 1.6965811965811965, "grad_norm": 0.8003417253494263, "learning_rate": 0.00012377576501047845, "loss": 0.969, "step": 9529 }, { "epoch": 1.6967592592592593, "grad_norm": 0.8318493962287903, "learning_rate": 0.00012376216868983697, "loss": 1.1413, "step": 9530 }, { "epoch": 1.6969373219373218, "grad_norm": 0.8294426202774048, "learning_rate": 0.00012374857190362515, "loss": 1.1885, "step": 9531 }, { "epoch": 1.6971153846153846, "grad_norm": 0.7502955198287964, "learning_rate": 0.0001237349746521094, "loss": 1.233, "step": 9532 }, { "epoch": 1.6972934472934473, "grad_norm": 0.6306588649749756, "learning_rate": 0.00012372137693555612, "loss": 1.2255, "step": 9533 }, { "epoch": 1.69747150997151, "grad_norm": 0.7802746891975403, "learning_rate": 0.0001237077787542317, "loss": 1.2054, "step": 9534 }, { "epoch": 1.6976495726495726, "grad_norm": 0.685114860534668, "learning_rate": 0.00012369418010840265, "loss": 0.9865, "step": 9535 }, { "epoch": 1.6978276353276354, "grad_norm": 0.6656857132911682, "learning_rate": 0.00012368058099833536, "loss": 1.1579, "step": 9536 }, { "epoch": 1.698005698005698, "grad_norm": 0.6596674919128418, "learning_rate": 0.00012366698142429625, "loss": 0.9104, "step": 9537 }, { "epoch": 1.6981837606837606, "grad_norm": 0.6025584936141968, "learning_rate": 0.00012365338138655183, "loss": 1.117, "step": 9538 }, { "epoch": 1.6983618233618234, "grad_norm": 0.671585202217102, "learning_rate": 0.0001236397808853685, "loss": 1.0271, "step": 9539 }, { "epoch": 1.6985398860398861, "grad_norm": 0.7467984557151794, "learning_rate": 0.0001236261799210128, "loss": 1.0411, "step": 9540 }, { "epoch": 1.6987179487179487, "grad_norm": 0.6251640915870667, "learning_rate": 0.0001236125784937512, "loss": 0.7154, "step": 9541 }, { "epoch": 1.6988960113960114, "grad_norm": 0.7560956478118896, "learning_rate": 0.00012359897660385016, "loss": 1.0048, "step": 9542 }, { "epoch": 1.699074074074074, "grad_norm": 0.6144903302192688, "learning_rate": 0.00012358537425157618, "loss": 1.1294, "step": 9543 }, { "epoch": 1.6992521367521367, "grad_norm": 0.7839425206184387, "learning_rate": 0.00012357177143719578, "loss": 1.0725, "step": 9544 }, { "epoch": 1.6994301994301995, "grad_norm": 0.6488651037216187, "learning_rate": 0.00012355816816097553, "loss": 0.9267, "step": 9545 }, { "epoch": 1.6996082621082622, "grad_norm": 0.6848782896995544, "learning_rate": 0.00012354456442318187, "loss": 1.0426, "step": 9546 }, { "epoch": 1.6997863247863247, "grad_norm": 0.7164611220359802, "learning_rate": 0.0001235309602240814, "loss": 0.8208, "step": 9547 }, { "epoch": 1.6999643874643875, "grad_norm": 0.6725530624389648, "learning_rate": 0.0001235173555639406, "loss": 0.9366, "step": 9548 }, { "epoch": 1.70014245014245, "grad_norm": 0.6958004236221313, "learning_rate": 0.00012350375044302612, "loss": 1.0185, "step": 9549 }, { "epoch": 1.7003205128205128, "grad_norm": 0.8035947680473328, "learning_rate": 0.00012349014486160445, "loss": 1.065, "step": 9550 }, { "epoch": 1.7004985754985755, "grad_norm": 0.6705633997917175, "learning_rate": 0.00012347653881994222, "loss": 0.8381, "step": 9551 }, { "epoch": 1.7006766381766383, "grad_norm": 0.6652300357818604, "learning_rate": 0.00012346293231830596, "loss": 1.1428, "step": 9552 }, { "epoch": 1.7008547008547008, "grad_norm": 0.6719335913658142, "learning_rate": 0.0001234493253569623, "loss": 1.0138, "step": 9553 }, { "epoch": 1.7010327635327636, "grad_norm": 0.746981680393219, "learning_rate": 0.0001234357179361778, "loss": 1.1169, "step": 9554 }, { "epoch": 1.701210826210826, "grad_norm": 0.6768170595169067, "learning_rate": 0.0001234221100562191, "loss": 0.9065, "step": 9555 }, { "epoch": 1.7013888888888888, "grad_norm": 0.7127171754837036, "learning_rate": 0.00012340850171735278, "loss": 0.9467, "step": 9556 }, { "epoch": 1.7015669515669516, "grad_norm": 0.6802694797515869, "learning_rate": 0.00012339489291984554, "loss": 0.8938, "step": 9557 }, { "epoch": 1.7017450142450143, "grad_norm": 0.7101455926895142, "learning_rate": 0.00012338128366396394, "loss": 1.1939, "step": 9558 }, { "epoch": 1.7019230769230769, "grad_norm": 0.621223509311676, "learning_rate": 0.00012336767394997467, "loss": 0.7583, "step": 9559 }, { "epoch": 1.7021011396011396, "grad_norm": 0.7130763530731201, "learning_rate": 0.00012335406377814439, "loss": 0.8684, "step": 9560 }, { "epoch": 1.7022792022792022, "grad_norm": 0.6761086583137512, "learning_rate": 0.00012334045314873972, "loss": 1.0197, "step": 9561 }, { "epoch": 1.702457264957265, "grad_norm": 0.7030459642410278, "learning_rate": 0.00012332684206202736, "loss": 0.8627, "step": 9562 }, { "epoch": 1.7026353276353277, "grad_norm": 0.6278037428855896, "learning_rate": 0.000123313230518274, "loss": 0.8953, "step": 9563 }, { "epoch": 1.7028133903133904, "grad_norm": 0.6450623869895935, "learning_rate": 0.00012329961851774627, "loss": 0.8826, "step": 9564 }, { "epoch": 1.702991452991453, "grad_norm": 0.7324244976043701, "learning_rate": 0.00012328600606071097, "loss": 1.0133, "step": 9565 }, { "epoch": 1.7031695156695157, "grad_norm": 0.6560033559799194, "learning_rate": 0.00012327239314743473, "loss": 0.9601, "step": 9566 }, { "epoch": 1.7033475783475782, "grad_norm": 0.6693514585494995, "learning_rate": 0.0001232587797781843, "loss": 0.9447, "step": 9567 }, { "epoch": 1.703525641025641, "grad_norm": 0.6403199434280396, "learning_rate": 0.00012324516595322638, "loss": 0.8554, "step": 9568 }, { "epoch": 1.7037037037037037, "grad_norm": 0.8290280103683472, "learning_rate": 0.00012323155167282774, "loss": 1.1877, "step": 9569 }, { "epoch": 1.7038817663817665, "grad_norm": 0.7207778692245483, "learning_rate": 0.00012321793693725509, "loss": 1.0978, "step": 9570 }, { "epoch": 1.7040598290598292, "grad_norm": 0.8794265985488892, "learning_rate": 0.00012320432174677519, "loss": 0.9387, "step": 9571 }, { "epoch": 1.7042378917378918, "grad_norm": 0.6683359146118164, "learning_rate": 0.00012319070610165484, "loss": 0.9227, "step": 9572 }, { "epoch": 1.7044159544159543, "grad_norm": 0.7342001795768738, "learning_rate": 0.00012317709000216076, "loss": 0.9453, "step": 9573 }, { "epoch": 1.704594017094017, "grad_norm": 0.6315770149230957, "learning_rate": 0.00012316347344855973, "loss": 0.8263, "step": 9574 }, { "epoch": 1.7047720797720798, "grad_norm": 0.7697155475616455, "learning_rate": 0.00012314985644111857, "loss": 1.0238, "step": 9575 }, { "epoch": 1.7049501424501425, "grad_norm": 0.6674068570137024, "learning_rate": 0.00012313623898010408, "loss": 1.0823, "step": 9576 }, { "epoch": 1.7051282051282053, "grad_norm": 0.6995484232902527, "learning_rate": 0.00012312262106578304, "loss": 1.2001, "step": 9577 }, { "epoch": 1.7053062678062678, "grad_norm": 0.7639257907867432, "learning_rate": 0.00012310900269842226, "loss": 1.3438, "step": 9578 }, { "epoch": 1.7054843304843303, "grad_norm": 0.6486390233039856, "learning_rate": 0.00012309538387828857, "loss": 0.9924, "step": 9579 }, { "epoch": 1.705662393162393, "grad_norm": 0.6737813949584961, "learning_rate": 0.00012308176460564885, "loss": 0.8722, "step": 9580 }, { "epoch": 1.7058404558404558, "grad_norm": 0.6462090611457825, "learning_rate": 0.00012306814488076987, "loss": 1.1013, "step": 9581 }, { "epoch": 1.7060185185185186, "grad_norm": 0.7887832522392273, "learning_rate": 0.00012305452470391852, "loss": 0.9998, "step": 9582 }, { "epoch": 1.7061965811965814, "grad_norm": 0.6345070004463196, "learning_rate": 0.00012304090407536165, "loss": 1.0305, "step": 9583 }, { "epoch": 1.7063746438746439, "grad_norm": 0.6398460268974304, "learning_rate": 0.0001230272829953661, "loss": 1.2243, "step": 9584 }, { "epoch": 1.7065527065527064, "grad_norm": 0.6501944065093994, "learning_rate": 0.00012301366146419879, "loss": 0.9425, "step": 9585 }, { "epoch": 1.7067307692307692, "grad_norm": 0.6406761407852173, "learning_rate": 0.00012300003948212661, "loss": 0.948, "step": 9586 }, { "epoch": 1.706908831908832, "grad_norm": 0.7114266157150269, "learning_rate": 0.00012298641704941644, "loss": 1.1291, "step": 9587 }, { "epoch": 1.7070868945868947, "grad_norm": 0.6653099656105042, "learning_rate": 0.00012297279416633515, "loss": 1.0156, "step": 9588 }, { "epoch": 1.7072649572649574, "grad_norm": 0.5970917344093323, "learning_rate": 0.0001229591708331497, "loss": 0.9424, "step": 9589 }, { "epoch": 1.70744301994302, "grad_norm": 0.6861461400985718, "learning_rate": 0.00012294554705012694, "loss": 0.7581, "step": 9590 }, { "epoch": 1.7076210826210825, "grad_norm": 0.6930568218231201, "learning_rate": 0.00012293192281753393, "loss": 1.0544, "step": 9591 }, { "epoch": 1.7077991452991452, "grad_norm": 0.7420656085014343, "learning_rate": 0.00012291829813563748, "loss": 0.7092, "step": 9592 }, { "epoch": 1.707977207977208, "grad_norm": 0.6607801914215088, "learning_rate": 0.0001229046730047046, "loss": 0.5544, "step": 9593 }, { "epoch": 1.7081552706552707, "grad_norm": 0.8419139385223389, "learning_rate": 0.00012289104742500224, "loss": 1.0443, "step": 9594 }, { "epoch": 1.7083333333333335, "grad_norm": 0.6774617433547974, "learning_rate": 0.00012287742139679734, "loss": 1.0098, "step": 9595 }, { "epoch": 1.708511396011396, "grad_norm": 0.7517698407173157, "learning_rate": 0.0001228637949203569, "loss": 1.1145, "step": 9596 }, { "epoch": 1.7086894586894585, "grad_norm": 0.6048635840415955, "learning_rate": 0.00012285016799594791, "loss": 0.7398, "step": 9597 }, { "epoch": 1.7088675213675213, "grad_norm": 0.8054425716400146, "learning_rate": 0.00012283654062383734, "loss": 1.0893, "step": 9598 }, { "epoch": 1.709045584045584, "grad_norm": 0.8694897294044495, "learning_rate": 0.0001228229128042922, "loss": 1.2366, "step": 9599 }, { "epoch": 1.7092236467236468, "grad_norm": 0.7460638880729675, "learning_rate": 0.00012280928453757946, "loss": 1.1753, "step": 9600 }, { "epoch": 1.7094017094017095, "grad_norm": 0.6714958548545837, "learning_rate": 0.00012279565582396618, "loss": 1.0473, "step": 9601 }, { "epoch": 1.709579772079772, "grad_norm": 0.6893340945243835, "learning_rate": 0.00012278202666371937, "loss": 1.2761, "step": 9602 }, { "epoch": 1.7097578347578346, "grad_norm": 0.6816153526306152, "learning_rate": 0.00012276839705710612, "loss": 0.991, "step": 9603 }, { "epoch": 1.7099358974358974, "grad_norm": 0.6961633563041687, "learning_rate": 0.0001227547670043934, "loss": 1.0634, "step": 9604 }, { "epoch": 1.71011396011396, "grad_norm": 0.643734872341156, "learning_rate": 0.0001227411365058483, "loss": 0.8672, "step": 9605 }, { "epoch": 1.7102920227920229, "grad_norm": 0.7313315272331238, "learning_rate": 0.00012272750556173784, "loss": 1.1152, "step": 9606 }, { "epoch": 1.7104700854700856, "grad_norm": 0.6464954614639282, "learning_rate": 0.00012271387417232916, "loss": 0.8798, "step": 9607 }, { "epoch": 1.7106481481481481, "grad_norm": 0.8365204334259033, "learning_rate": 0.00012270024233788929, "loss": 1.213, "step": 9608 }, { "epoch": 1.7108262108262107, "grad_norm": 0.6460705995559692, "learning_rate": 0.0001226866100586853, "loss": 0.9232, "step": 9609 }, { "epoch": 1.7110042735042734, "grad_norm": 0.6446022987365723, "learning_rate": 0.00012267297733498434, "loss": 0.8295, "step": 9610 }, { "epoch": 1.7111823361823362, "grad_norm": 0.7692012190818787, "learning_rate": 0.00012265934416705345, "loss": 1.0715, "step": 9611 }, { "epoch": 1.711360398860399, "grad_norm": 0.671154260635376, "learning_rate": 0.0001226457105551598, "loss": 0.9752, "step": 9612 }, { "epoch": 1.7115384615384617, "grad_norm": 0.6525935530662537, "learning_rate": 0.00012263207649957053, "loss": 1.09, "step": 9613 }, { "epoch": 1.7117165242165242, "grad_norm": 0.6984749436378479, "learning_rate": 0.0001226184420005527, "loss": 0.9956, "step": 9614 }, { "epoch": 1.7118945868945867, "grad_norm": 0.6769809126853943, "learning_rate": 0.0001226048070583735, "loss": 1.0151, "step": 9615 }, { "epoch": 1.7120726495726495, "grad_norm": 0.6085978746414185, "learning_rate": 0.00012259117167330005, "loss": 0.8706, "step": 9616 }, { "epoch": 1.7122507122507122, "grad_norm": 0.7335749268531799, "learning_rate": 0.00012257753584559952, "loss": 1.0575, "step": 9617 }, { "epoch": 1.712428774928775, "grad_norm": 0.7392038106918335, "learning_rate": 0.0001225638995755391, "loss": 0.8763, "step": 9618 }, { "epoch": 1.7126068376068377, "grad_norm": 0.6708608865737915, "learning_rate": 0.00012255026286338592, "loss": 1.131, "step": 9619 }, { "epoch": 1.7127849002849003, "grad_norm": 0.726657509803772, "learning_rate": 0.0001225366257094072, "loss": 1.0569, "step": 9620 }, { "epoch": 1.7129629629629628, "grad_norm": 0.749098002910614, "learning_rate": 0.0001225229881138701, "loss": 0.9196, "step": 9621 }, { "epoch": 1.7131410256410255, "grad_norm": 0.6550580263137817, "learning_rate": 0.00012250935007704182, "loss": 1.0244, "step": 9622 }, { "epoch": 1.7133190883190883, "grad_norm": 0.7714282274246216, "learning_rate": 0.00012249571159918962, "loss": 1.1025, "step": 9623 }, { "epoch": 1.713497150997151, "grad_norm": 0.7869850397109985, "learning_rate": 0.00012248207268058064, "loss": 0.9238, "step": 9624 }, { "epoch": 1.7136752136752138, "grad_norm": 0.7187856435775757, "learning_rate": 0.00012246843332148216, "loss": 1.081, "step": 9625 }, { "epoch": 1.7138532763532763, "grad_norm": 0.6634210348129272, "learning_rate": 0.00012245479352216142, "loss": 1.1944, "step": 9626 }, { "epoch": 1.714031339031339, "grad_norm": 0.6609212160110474, "learning_rate": 0.00012244115328288567, "loss": 0.9613, "step": 9627 }, { "epoch": 1.7142094017094016, "grad_norm": 0.7906867861747742, "learning_rate": 0.0001224275126039221, "loss": 1.2692, "step": 9628 }, { "epoch": 1.7143874643874644, "grad_norm": 0.8037096858024597, "learning_rate": 0.000122413871485538, "loss": 0.9823, "step": 9629 }, { "epoch": 1.7145655270655271, "grad_norm": 0.7740145921707153, "learning_rate": 0.00012240022992800068, "loss": 1.1937, "step": 9630 }, { "epoch": 1.7147435897435899, "grad_norm": 0.595372200012207, "learning_rate": 0.00012238658793157738, "loss": 0.9153, "step": 9631 }, { "epoch": 1.7149216524216524, "grad_norm": 0.6671900749206543, "learning_rate": 0.0001223729454965354, "loss": 1.0895, "step": 9632 }, { "epoch": 1.7150997150997151, "grad_norm": 0.5805774927139282, "learning_rate": 0.000122359302623142, "loss": 1.0001, "step": 9633 }, { "epoch": 1.7152777777777777, "grad_norm": 0.8851602673530579, "learning_rate": 0.00012234565931166456, "loss": 1.2828, "step": 9634 }, { "epoch": 1.7154558404558404, "grad_norm": 0.6960011720657349, "learning_rate": 0.0001223320155623703, "loss": 1.0622, "step": 9635 }, { "epoch": 1.7156339031339032, "grad_norm": 0.5587009191513062, "learning_rate": 0.0001223183713755266, "loss": 0.83, "step": 9636 }, { "epoch": 1.715811965811966, "grad_norm": 0.6892730593681335, "learning_rate": 0.00012230472675140076, "loss": 0.9214, "step": 9637 }, { "epoch": 1.7159900284900285, "grad_norm": 0.6545090079307556, "learning_rate": 0.00012229108169026017, "loss": 0.829, "step": 9638 }, { "epoch": 1.7161680911680912, "grad_norm": 0.6539101600646973, "learning_rate": 0.00012227743619237213, "loss": 1.0686, "step": 9639 }, { "epoch": 1.7163461538461537, "grad_norm": 0.5887274146080017, "learning_rate": 0.000122263790258004, "loss": 0.9285, "step": 9640 }, { "epoch": 1.7165242165242165, "grad_norm": 0.6328918933868408, "learning_rate": 0.00012225014388742313, "loss": 0.9684, "step": 9641 }, { "epoch": 1.7167022792022792, "grad_norm": 0.6377436518669128, "learning_rate": 0.00012223649708089694, "loss": 0.9425, "step": 9642 }, { "epoch": 1.716880341880342, "grad_norm": 0.6967392563819885, "learning_rate": 0.00012222284983869275, "loss": 0.9342, "step": 9643 }, { "epoch": 1.7170584045584045, "grad_norm": 0.7051317691802979, "learning_rate": 0.00012220920216107802, "loss": 1.1843, "step": 9644 }, { "epoch": 1.7172364672364673, "grad_norm": 0.6864503622055054, "learning_rate": 0.00012219555404832007, "loss": 1.0371, "step": 9645 }, { "epoch": 1.7174145299145298, "grad_norm": 0.583454430103302, "learning_rate": 0.00012218190550068638, "loss": 0.6774, "step": 9646 }, { "epoch": 1.7175925925925926, "grad_norm": 0.6755677461624146, "learning_rate": 0.0001221682565184443, "loss": 0.9517, "step": 9647 }, { "epoch": 1.7177706552706553, "grad_norm": 0.7230031490325928, "learning_rate": 0.0001221546071018613, "loss": 1.0385, "step": 9648 }, { "epoch": 1.717948717948718, "grad_norm": 0.7381200194358826, "learning_rate": 0.0001221409572512048, "loss": 0.9893, "step": 9649 }, { "epoch": 1.7181267806267806, "grad_norm": 0.7079094648361206, "learning_rate": 0.0001221273069667422, "loss": 0.7793, "step": 9650 }, { "epoch": 1.7183048433048433, "grad_norm": 0.6666881442070007, "learning_rate": 0.00012211365624874106, "loss": 0.9752, "step": 9651 }, { "epoch": 1.7184829059829059, "grad_norm": 0.6196922659873962, "learning_rate": 0.00012210000509746868, "loss": 0.922, "step": 9652 }, { "epoch": 1.7186609686609686, "grad_norm": 0.657879650592804, "learning_rate": 0.00012208635351319266, "loss": 1.2583, "step": 9653 }, { "epoch": 1.7188390313390314, "grad_norm": 0.7240459322929382, "learning_rate": 0.00012207270149618043, "loss": 0.8479, "step": 9654 }, { "epoch": 1.7190170940170941, "grad_norm": 0.8293825387954712, "learning_rate": 0.00012205904904669945, "loss": 0.9092, "step": 9655 }, { "epoch": 1.7191951566951567, "grad_norm": 0.6907553672790527, "learning_rate": 0.0001220453961650172, "loss": 1.0543, "step": 9656 }, { "epoch": 1.7193732193732194, "grad_norm": 0.7178300023078918, "learning_rate": 0.00012203174285140124, "loss": 0.9147, "step": 9657 }, { "epoch": 1.719551282051282, "grad_norm": 0.7037166357040405, "learning_rate": 0.00012201808910611905, "loss": 0.8685, "step": 9658 }, { "epoch": 1.7197293447293447, "grad_norm": 0.5850751996040344, "learning_rate": 0.00012200443492943813, "loss": 0.72, "step": 9659 }, { "epoch": 1.7199074074074074, "grad_norm": 0.744239330291748, "learning_rate": 0.00012199078032162603, "loss": 0.9717, "step": 9660 }, { "epoch": 1.7200854700854702, "grad_norm": 0.6509126424789429, "learning_rate": 0.00012197712528295025, "loss": 0.9768, "step": 9661 }, { "epoch": 1.7202635327635327, "grad_norm": 0.623220682144165, "learning_rate": 0.00012196346981367837, "loss": 0.9824, "step": 9662 }, { "epoch": 1.7204415954415955, "grad_norm": 0.6376451849937439, "learning_rate": 0.00012194981391407792, "loss": 0.8228, "step": 9663 }, { "epoch": 1.720619658119658, "grad_norm": 0.794830322265625, "learning_rate": 0.00012193615758441648, "loss": 0.9168, "step": 9664 }, { "epoch": 1.7207977207977208, "grad_norm": 0.7812975645065308, "learning_rate": 0.0001219225008249616, "loss": 0.8625, "step": 9665 }, { "epoch": 1.7209757834757835, "grad_norm": 0.6843218207359314, "learning_rate": 0.0001219088436359808, "loss": 1.0176, "step": 9666 }, { "epoch": 1.7211538461538463, "grad_norm": 0.6924905180931091, "learning_rate": 0.00012189518601774178, "loss": 0.855, "step": 9667 }, { "epoch": 1.7213319088319088, "grad_norm": 0.6348826289176941, "learning_rate": 0.00012188152797051202, "loss": 1.1596, "step": 9668 }, { "epoch": 1.7215099715099715, "grad_norm": 0.7170482873916626, "learning_rate": 0.00012186786949455922, "loss": 0.9811, "step": 9669 }, { "epoch": 1.721688034188034, "grad_norm": 0.7471763491630554, "learning_rate": 0.00012185421059015094, "loss": 1.0925, "step": 9670 }, { "epoch": 1.7218660968660968, "grad_norm": 0.6771119236946106, "learning_rate": 0.00012184055125755481, "loss": 0.9403, "step": 9671 }, { "epoch": 1.7220441595441596, "grad_norm": 0.4335343539714813, "learning_rate": 0.0001218268914970384, "loss": 0.4925, "step": 9672 }, { "epoch": 1.7222222222222223, "grad_norm": 0.6652585864067078, "learning_rate": 0.00012181323130886943, "loss": 0.7684, "step": 9673 }, { "epoch": 1.7224002849002849, "grad_norm": 0.6465467810630798, "learning_rate": 0.00012179957069331548, "loss": 0.9011, "step": 9674 }, { "epoch": 1.7225783475783476, "grad_norm": 0.6725688576698303, "learning_rate": 0.00012178590965064427, "loss": 0.9563, "step": 9675 }, { "epoch": 1.7227564102564101, "grad_norm": 0.6223418712615967, "learning_rate": 0.00012177224818112341, "loss": 0.9099, "step": 9676 }, { "epoch": 1.7229344729344729, "grad_norm": 0.79325270652771, "learning_rate": 0.00012175858628502053, "loss": 1.0318, "step": 9677 }, { "epoch": 1.7231125356125356, "grad_norm": 0.6735602617263794, "learning_rate": 0.0001217449239626034, "loss": 1.0797, "step": 9678 }, { "epoch": 1.7232905982905984, "grad_norm": 0.7082492113113403, "learning_rate": 0.00012173126121413962, "loss": 1.1341, "step": 9679 }, { "epoch": 1.723468660968661, "grad_norm": 0.6563859581947327, "learning_rate": 0.00012171759803989696, "loss": 0.8778, "step": 9680 }, { "epoch": 1.7236467236467237, "grad_norm": 0.6867792010307312, "learning_rate": 0.00012170393444014306, "loss": 0.8301, "step": 9681 }, { "epoch": 1.7238247863247862, "grad_norm": 0.7870511412620544, "learning_rate": 0.00012169027041514562, "loss": 0.9165, "step": 9682 }, { "epoch": 1.724002849002849, "grad_norm": 0.8006493449211121, "learning_rate": 0.00012167660596517241, "loss": 1.0395, "step": 9683 }, { "epoch": 1.7241809116809117, "grad_norm": 0.6936125159263611, "learning_rate": 0.00012166294109049114, "loss": 1.1037, "step": 9684 }, { "epoch": 1.7243589743589745, "grad_norm": 0.8176514506340027, "learning_rate": 0.00012164927579136956, "loss": 0.8791, "step": 9685 }, { "epoch": 1.7245370370370372, "grad_norm": 0.6948300004005432, "learning_rate": 0.00012163561006807537, "loss": 0.9292, "step": 9686 }, { "epoch": 1.7247150997150997, "grad_norm": 0.6237453818321228, "learning_rate": 0.00012162194392087634, "loss": 0.8553, "step": 9687 }, { "epoch": 1.7248931623931623, "grad_norm": 0.6198007464408875, "learning_rate": 0.00012160827735004021, "loss": 0.9599, "step": 9688 }, { "epoch": 1.725071225071225, "grad_norm": 0.639838695526123, "learning_rate": 0.00012159461035583482, "loss": 0.9328, "step": 9689 }, { "epoch": 1.7252492877492878, "grad_norm": 0.7264436483383179, "learning_rate": 0.00012158094293852789, "loss": 1.0247, "step": 9690 }, { "epoch": 1.7254273504273505, "grad_norm": 0.6320534348487854, "learning_rate": 0.00012156727509838721, "loss": 1.1222, "step": 9691 }, { "epoch": 1.7256054131054133, "grad_norm": 0.6204122304916382, "learning_rate": 0.00012155360683568056, "loss": 0.9765, "step": 9692 }, { "epoch": 1.7257834757834758, "grad_norm": 0.7026457190513611, "learning_rate": 0.00012153993815067579, "loss": 1.0178, "step": 9693 }, { "epoch": 1.7259615384615383, "grad_norm": 0.6471006870269775, "learning_rate": 0.00012152626904364067, "loss": 1.0035, "step": 9694 }, { "epoch": 1.726139601139601, "grad_norm": 0.6875706911087036, "learning_rate": 0.00012151259951484301, "loss": 0.7921, "step": 9695 }, { "epoch": 1.7263176638176638, "grad_norm": 0.6963251233100891, "learning_rate": 0.00012149892956455067, "loss": 0.9677, "step": 9696 }, { "epoch": 1.7264957264957266, "grad_norm": 0.9077282547950745, "learning_rate": 0.00012148525919303142, "loss": 0.9362, "step": 9697 }, { "epoch": 1.7266737891737893, "grad_norm": 0.7347434163093567, "learning_rate": 0.00012147158840055319, "loss": 0.8712, "step": 9698 }, { "epoch": 1.7268518518518519, "grad_norm": 0.7206630110740662, "learning_rate": 0.00012145791718738377, "loss": 1.032, "step": 9699 }, { "epoch": 1.7270299145299144, "grad_norm": 0.7174662947654724, "learning_rate": 0.00012144424555379106, "loss": 0.954, "step": 9700 }, { "epoch": 1.7272079772079771, "grad_norm": 0.7442345023155212, "learning_rate": 0.0001214305735000429, "loss": 1.0709, "step": 9701 }, { "epoch": 1.72738603988604, "grad_norm": 0.6154376268386841, "learning_rate": 0.00012141690102640715, "loss": 0.9365, "step": 9702 }, { "epoch": 1.7275641025641026, "grad_norm": 0.6213796734809875, "learning_rate": 0.00012140322813315172, "loss": 0.8337, "step": 9703 }, { "epoch": 1.7277421652421654, "grad_norm": 0.7682011127471924, "learning_rate": 0.0001213895548205445, "loss": 1.1579, "step": 9704 }, { "epoch": 1.727920227920228, "grad_norm": 0.6796970963478088, "learning_rate": 0.0001213758810888534, "loss": 0.8875, "step": 9705 }, { "epoch": 1.7280982905982905, "grad_norm": 0.7203732132911682, "learning_rate": 0.0001213622069383463, "loss": 0.7827, "step": 9706 }, { "epoch": 1.7282763532763532, "grad_norm": 0.6151877045631409, "learning_rate": 0.00012134853236929111, "loss": 1.0282, "step": 9707 }, { "epoch": 1.728454415954416, "grad_norm": 0.6665124297142029, "learning_rate": 0.0001213348573819558, "loss": 1.0636, "step": 9708 }, { "epoch": 1.7286324786324787, "grad_norm": 0.7334614396095276, "learning_rate": 0.00012132118197660829, "loss": 1.0889, "step": 9709 }, { "epoch": 1.7288105413105415, "grad_norm": 0.7267759442329407, "learning_rate": 0.00012130750615351649, "loss": 1.096, "step": 9710 }, { "epoch": 1.728988603988604, "grad_norm": 0.6542944312095642, "learning_rate": 0.00012129382991294837, "loss": 1.0855, "step": 9711 }, { "epoch": 1.7291666666666665, "grad_norm": 0.694523274898529, "learning_rate": 0.00012128015325517193, "loss": 0.8482, "step": 9712 }, { "epoch": 1.7293447293447293, "grad_norm": 0.7879082560539246, "learning_rate": 0.00012126647618045504, "loss": 1.2356, "step": 9713 }, { "epoch": 1.729522792022792, "grad_norm": 0.7108420729637146, "learning_rate": 0.00012125279868906574, "loss": 1.0185, "step": 9714 }, { "epoch": 1.7297008547008548, "grad_norm": 0.6928725838661194, "learning_rate": 0.000121239120781272, "loss": 1.1507, "step": 9715 }, { "epoch": 1.7298789173789175, "grad_norm": 0.6195241212844849, "learning_rate": 0.00012122544245734182, "loss": 0.8656, "step": 9716 }, { "epoch": 1.73005698005698, "grad_norm": 0.5962017774581909, "learning_rate": 0.00012121176371754317, "loss": 0.918, "step": 9717 }, { "epoch": 1.7302350427350426, "grad_norm": 0.7409394979476929, "learning_rate": 0.00012119808456214407, "loss": 1.0283, "step": 9718 }, { "epoch": 1.7304131054131053, "grad_norm": 0.6571973562240601, "learning_rate": 0.00012118440499141257, "loss": 1.1015, "step": 9719 }, { "epoch": 1.730591168091168, "grad_norm": 0.681394100189209, "learning_rate": 0.00012117072500561664, "loss": 0.8247, "step": 9720 }, { "epoch": 1.7307692307692308, "grad_norm": 0.7278251647949219, "learning_rate": 0.00012115704460502432, "loss": 1.0693, "step": 9721 }, { "epoch": 1.7309472934472936, "grad_norm": 0.6569405794143677, "learning_rate": 0.0001211433637899037, "loss": 0.8992, "step": 9722 }, { "epoch": 1.7311253561253561, "grad_norm": 0.6305136680603027, "learning_rate": 0.00012112968256052272, "loss": 0.8543, "step": 9723 }, { "epoch": 1.7313034188034186, "grad_norm": 0.6111339330673218, "learning_rate": 0.00012111600091714956, "loss": 0.991, "step": 9724 }, { "epoch": 1.7314814814814814, "grad_norm": 0.646973192691803, "learning_rate": 0.00012110231886005223, "loss": 0.8855, "step": 9725 }, { "epoch": 1.7316595441595442, "grad_norm": 0.7054407000541687, "learning_rate": 0.00012108863638949879, "loss": 1.0816, "step": 9726 }, { "epoch": 1.731837606837607, "grad_norm": 0.6592162847518921, "learning_rate": 0.00012107495350575729, "loss": 1.0961, "step": 9727 }, { "epoch": 1.7320156695156697, "grad_norm": 0.6615595817565918, "learning_rate": 0.00012106127020909587, "loss": 0.9669, "step": 9728 }, { "epoch": 1.7321937321937322, "grad_norm": 0.9030881524085999, "learning_rate": 0.00012104758649978263, "loss": 0.9438, "step": 9729 }, { "epoch": 1.7323717948717947, "grad_norm": 0.6776516437530518, "learning_rate": 0.00012103390237808566, "loss": 0.8967, "step": 9730 }, { "epoch": 1.7325498575498575, "grad_norm": 0.6010605096817017, "learning_rate": 0.00012102021784427306, "loss": 0.8893, "step": 9731 }, { "epoch": 1.7327279202279202, "grad_norm": 0.6540384292602539, "learning_rate": 0.00012100653289861295, "loss": 0.9328, "step": 9732 }, { "epoch": 1.732905982905983, "grad_norm": 0.6836950182914734, "learning_rate": 0.00012099284754137345, "loss": 0.9019, "step": 9733 }, { "epoch": 1.7330840455840457, "grad_norm": 0.7597874402999878, "learning_rate": 0.00012097916177282274, "loss": 1.0093, "step": 9734 }, { "epoch": 1.7332621082621082, "grad_norm": 0.7686513066291809, "learning_rate": 0.00012096547559322892, "loss": 0.8685, "step": 9735 }, { "epoch": 1.7334401709401708, "grad_norm": 0.613777220249176, "learning_rate": 0.0001209517890028602, "loss": 0.8317, "step": 9736 }, { "epoch": 1.7336182336182335, "grad_norm": 0.6788455843925476, "learning_rate": 0.00012093810200198466, "loss": 0.866, "step": 9737 }, { "epoch": 1.7337962962962963, "grad_norm": 0.616801381111145, "learning_rate": 0.00012092441459087047, "loss": 0.8299, "step": 9738 }, { "epoch": 1.733974358974359, "grad_norm": 0.731987476348877, "learning_rate": 0.00012091072676978589, "loss": 1.089, "step": 9739 }, { "epoch": 1.7341524216524218, "grad_norm": 0.7042871117591858, "learning_rate": 0.00012089703853899905, "loss": 0.8667, "step": 9740 }, { "epoch": 1.7343304843304843, "grad_norm": 0.62722247838974, "learning_rate": 0.00012088334989877817, "loss": 0.9185, "step": 9741 }, { "epoch": 1.734508547008547, "grad_norm": 0.6354684829711914, "learning_rate": 0.0001208696608493914, "loss": 0.9951, "step": 9742 }, { "epoch": 1.7346866096866096, "grad_norm": 0.658647894859314, "learning_rate": 0.00012085597139110698, "loss": 0.9324, "step": 9743 }, { "epoch": 1.7348646723646723, "grad_norm": 0.84359210729599, "learning_rate": 0.00012084228152419312, "loss": 1.0861, "step": 9744 }, { "epoch": 1.735042735042735, "grad_norm": 0.6293938755989075, "learning_rate": 0.00012082859124891807, "loss": 0.9676, "step": 9745 }, { "epoch": 1.7352207977207978, "grad_norm": 0.6398760676383972, "learning_rate": 0.00012081490056555004, "loss": 0.8502, "step": 9746 }, { "epoch": 1.7353988603988604, "grad_norm": 0.6918041706085205, "learning_rate": 0.00012080120947435726, "loss": 1.0081, "step": 9747 }, { "epoch": 1.7355769230769231, "grad_norm": 0.7374079823493958, "learning_rate": 0.00012078751797560798, "loss": 0.9485, "step": 9748 }, { "epoch": 1.7357549857549857, "grad_norm": 0.7392128705978394, "learning_rate": 0.00012077382606957049, "loss": 0.9283, "step": 9749 }, { "epoch": 1.7359330484330484, "grad_norm": 0.701320230960846, "learning_rate": 0.00012076013375651303, "loss": 1.0339, "step": 9750 }, { "epoch": 1.7361111111111112, "grad_norm": 0.6316696405410767, "learning_rate": 0.00012074644103670387, "loss": 0.9097, "step": 9751 }, { "epoch": 1.736289173789174, "grad_norm": 0.6892024278640747, "learning_rate": 0.00012073274791041132, "loss": 1.0863, "step": 9752 }, { "epoch": 1.7364672364672364, "grad_norm": 0.6032847762107849, "learning_rate": 0.00012071905437790361, "loss": 0.9305, "step": 9753 }, { "epoch": 1.7366452991452992, "grad_norm": 0.6659184098243713, "learning_rate": 0.00012070536043944907, "loss": 0.9793, "step": 9754 }, { "epoch": 1.7368233618233617, "grad_norm": 0.7413665056228638, "learning_rate": 0.00012069166609531602, "loss": 1.0523, "step": 9755 }, { "epoch": 1.7370014245014245, "grad_norm": 0.7814368009567261, "learning_rate": 0.00012067797134577275, "loss": 0.9988, "step": 9756 }, { "epoch": 1.7371794871794872, "grad_norm": 0.6174948811531067, "learning_rate": 0.00012066427619108757, "loss": 0.9002, "step": 9757 }, { "epoch": 1.73735754985755, "grad_norm": 0.6521819233894348, "learning_rate": 0.00012065058063152885, "loss": 1.1307, "step": 9758 }, { "epoch": 1.7375356125356125, "grad_norm": 0.6797493696212769, "learning_rate": 0.00012063688466736489, "loss": 0.84, "step": 9759 }, { "epoch": 1.7377136752136753, "grad_norm": 0.6496474146842957, "learning_rate": 0.00012062318829886404, "loss": 0.86, "step": 9760 }, { "epoch": 1.7378917378917378, "grad_norm": 0.6701306104660034, "learning_rate": 0.00012060949152629467, "loss": 0.9422, "step": 9761 }, { "epoch": 1.7380698005698005, "grad_norm": 0.7331172823905945, "learning_rate": 0.00012059579434992512, "loss": 1.1648, "step": 9762 }, { "epoch": 1.7382478632478633, "grad_norm": 0.63930743932724, "learning_rate": 0.00012058209677002375, "loss": 1.0617, "step": 9763 }, { "epoch": 1.738425925925926, "grad_norm": 0.668851912021637, "learning_rate": 0.00012056839878685895, "loss": 0.8219, "step": 9764 }, { "epoch": 1.7386039886039886, "grad_norm": 0.7305747270584106, "learning_rate": 0.00012055470040069912, "loss": 1.0416, "step": 9765 }, { "epoch": 1.7387820512820513, "grad_norm": 0.6931866407394409, "learning_rate": 0.00012054100161181264, "loss": 1.0588, "step": 9766 }, { "epoch": 1.7389601139601139, "grad_norm": 0.6565485000610352, "learning_rate": 0.00012052730242046785, "loss": 0.7885, "step": 9767 }, { "epoch": 1.7391381766381766, "grad_norm": 0.739985466003418, "learning_rate": 0.00012051360282693327, "loss": 1.0973, "step": 9768 }, { "epoch": 1.7393162393162394, "grad_norm": 0.6477079391479492, "learning_rate": 0.00012049990283147723, "loss": 0.9841, "step": 9769 }, { "epoch": 1.739494301994302, "grad_norm": 0.7018330097198486, "learning_rate": 0.00012048620243436819, "loss": 1.0869, "step": 9770 }, { "epoch": 1.7396723646723646, "grad_norm": 0.7087421417236328, "learning_rate": 0.00012047250163587456, "loss": 0.916, "step": 9771 }, { "epoch": 1.7398504273504274, "grad_norm": 0.8747151494026184, "learning_rate": 0.00012045880043626481, "loss": 0.8245, "step": 9772 }, { "epoch": 1.74002849002849, "grad_norm": 0.777498722076416, "learning_rate": 0.00012044509883580735, "loss": 1.071, "step": 9773 }, { "epoch": 1.7402065527065527, "grad_norm": 0.6668971180915833, "learning_rate": 0.00012043139683477062, "loss": 1.0447, "step": 9774 }, { "epoch": 1.7403846153846154, "grad_norm": 0.6702026724815369, "learning_rate": 0.00012041769443342317, "loss": 0.8688, "step": 9775 }, { "epoch": 1.7405626780626782, "grad_norm": 0.7866267561912537, "learning_rate": 0.00012040399163203337, "loss": 1.0842, "step": 9776 }, { "epoch": 1.7407407407407407, "grad_norm": 0.7655110955238342, "learning_rate": 0.00012039028843086977, "loss": 1.2417, "step": 9777 }, { "epoch": 1.7409188034188035, "grad_norm": 0.7084119915962219, "learning_rate": 0.0001203765848302008, "loss": 0.9844, "step": 9778 }, { "epoch": 1.741096866096866, "grad_norm": 0.7135398983955383, "learning_rate": 0.00012036288083029497, "loss": 1.1102, "step": 9779 }, { "epoch": 1.7412749287749287, "grad_norm": 0.6784615516662598, "learning_rate": 0.0001203491764314208, "loss": 1.0349, "step": 9780 }, { "epoch": 1.7414529914529915, "grad_norm": 0.7170301079750061, "learning_rate": 0.00012033547163384682, "loss": 1.0899, "step": 9781 }, { "epoch": 1.7416310541310542, "grad_norm": 0.6692060828208923, "learning_rate": 0.0001203217664378415, "loss": 1.0486, "step": 9782 }, { "epoch": 1.7418091168091168, "grad_norm": 0.6730037927627563, "learning_rate": 0.00012030806084367336, "loss": 0.9684, "step": 9783 }, { "epoch": 1.7419871794871795, "grad_norm": 0.5983504056930542, "learning_rate": 0.00012029435485161096, "loss": 0.7106, "step": 9784 }, { "epoch": 1.742165242165242, "grad_norm": 0.6834231615066528, "learning_rate": 0.00012028064846192284, "loss": 0.803, "step": 9785 }, { "epoch": 1.7423433048433048, "grad_norm": 0.621046245098114, "learning_rate": 0.00012026694167487755, "loss": 0.9129, "step": 9786 }, { "epoch": 1.7425213675213675, "grad_norm": 0.6348989605903625, "learning_rate": 0.00012025323449074361, "loss": 1.0076, "step": 9787 }, { "epoch": 1.7426994301994303, "grad_norm": 0.6139974594116211, "learning_rate": 0.00012023952690978966, "loss": 1.0756, "step": 9788 }, { "epoch": 1.7428774928774928, "grad_norm": 0.6473259925842285, "learning_rate": 0.00012022581893228419, "loss": 1.0568, "step": 9789 }, { "epoch": 1.7430555555555556, "grad_norm": 0.6133778095245361, "learning_rate": 0.00012021211055849581, "loss": 0.8722, "step": 9790 }, { "epoch": 1.743233618233618, "grad_norm": 0.6934139728546143, "learning_rate": 0.00012019840178869315, "loss": 1.0329, "step": 9791 }, { "epoch": 1.7434116809116809, "grad_norm": 0.6730150580406189, "learning_rate": 0.00012018469262314474, "loss": 0.9326, "step": 9792 }, { "epoch": 1.7435897435897436, "grad_norm": 0.6805521249771118, "learning_rate": 0.0001201709830621192, "loss": 1.0527, "step": 9793 }, { "epoch": 1.7437678062678064, "grad_norm": 0.6972569823265076, "learning_rate": 0.00012015727310588516, "loss": 1.0024, "step": 9794 }, { "epoch": 1.743945868945869, "grad_norm": 0.7329187989234924, "learning_rate": 0.00012014356275471122, "loss": 1.1864, "step": 9795 }, { "epoch": 1.7441239316239316, "grad_norm": 0.7220240831375122, "learning_rate": 0.00012012985200886602, "loss": 0.8831, "step": 9796 }, { "epoch": 1.7443019943019942, "grad_norm": 0.7829749584197998, "learning_rate": 0.00012011614086861818, "loss": 1.0365, "step": 9797 }, { "epoch": 1.744480056980057, "grad_norm": 0.7148944735527039, "learning_rate": 0.00012010242933423637, "loss": 1.0413, "step": 9798 }, { "epoch": 1.7446581196581197, "grad_norm": 0.5607262253761292, "learning_rate": 0.00012008871740598917, "loss": 0.8154, "step": 9799 }, { "epoch": 1.7448361823361824, "grad_norm": 0.754626452922821, "learning_rate": 0.00012007500508414531, "loss": 1.0569, "step": 9800 }, { "epoch": 1.7450142450142452, "grad_norm": 0.7216293215751648, "learning_rate": 0.00012006129236897343, "loss": 1.1641, "step": 9801 }, { "epoch": 1.7451923076923077, "grad_norm": 0.6575515270233154, "learning_rate": 0.0001200475792607422, "loss": 0.9063, "step": 9802 }, { "epoch": 1.7453703703703702, "grad_norm": 0.7411505579948425, "learning_rate": 0.00012003386575972031, "loss": 0.9791, "step": 9803 }, { "epoch": 1.745548433048433, "grad_norm": 0.6945903301239014, "learning_rate": 0.0001200201518661764, "loss": 0.8111, "step": 9804 }, { "epoch": 1.7457264957264957, "grad_norm": 0.5760970115661621, "learning_rate": 0.00012000643758037924, "loss": 1.1054, "step": 9805 }, { "epoch": 1.7459045584045585, "grad_norm": 0.6732224225997925, "learning_rate": 0.00011999272290259748, "loss": 0.8992, "step": 9806 }, { "epoch": 1.7460826210826212, "grad_norm": 0.673270046710968, "learning_rate": 0.00011997900783309983, "loss": 1.0554, "step": 9807 }, { "epoch": 1.7462606837606838, "grad_norm": 0.7233314514160156, "learning_rate": 0.00011996529237215503, "loss": 1.066, "step": 9808 }, { "epoch": 1.7464387464387463, "grad_norm": 0.7016494274139404, "learning_rate": 0.00011995157652003183, "loss": 0.891, "step": 9809 }, { "epoch": 1.746616809116809, "grad_norm": 0.9377092719078064, "learning_rate": 0.00011993786027699889, "loss": 0.8626, "step": 9810 }, { "epoch": 1.7467948717948718, "grad_norm": 0.6825845241546631, "learning_rate": 0.00011992414364332503, "loss": 0.8996, "step": 9811 }, { "epoch": 1.7469729344729346, "grad_norm": 0.6836053729057312, "learning_rate": 0.00011991042661927896, "loss": 0.9338, "step": 9812 }, { "epoch": 1.7471509971509973, "grad_norm": 0.6462908387184143, "learning_rate": 0.00011989670920512943, "loss": 1.1185, "step": 9813 }, { "epoch": 1.7473290598290598, "grad_norm": 0.7191921472549438, "learning_rate": 0.00011988299140114522, "loss": 0.9084, "step": 9814 }, { "epoch": 1.7475071225071224, "grad_norm": 0.6951598525047302, "learning_rate": 0.00011986927320759508, "loss": 1.0653, "step": 9815 }, { "epoch": 1.7476851851851851, "grad_norm": 0.7512598037719727, "learning_rate": 0.00011985555462474784, "loss": 1.0259, "step": 9816 }, { "epoch": 1.7478632478632479, "grad_norm": 0.6885492205619812, "learning_rate": 0.00011984183565287226, "loss": 0.7148, "step": 9817 }, { "epoch": 1.7480413105413106, "grad_norm": 0.6880139708518982, "learning_rate": 0.00011982811629223709, "loss": 1.1567, "step": 9818 }, { "epoch": 1.7482193732193734, "grad_norm": 0.7381170392036438, "learning_rate": 0.0001198143965431112, "loss": 0.8483, "step": 9819 }, { "epoch": 1.748397435897436, "grad_norm": 0.6761063933372498, "learning_rate": 0.00011980067640576333, "loss": 0.9498, "step": 9820 }, { "epoch": 1.7485754985754984, "grad_norm": 0.6454669237136841, "learning_rate": 0.00011978695588046238, "loss": 0.7336, "step": 9821 }, { "epoch": 1.7487535612535612, "grad_norm": 0.6026871800422668, "learning_rate": 0.00011977323496747712, "loss": 0.8618, "step": 9822 }, { "epoch": 1.748931623931624, "grad_norm": 0.6877408027648926, "learning_rate": 0.0001197595136670764, "loss": 0.9146, "step": 9823 }, { "epoch": 1.7491096866096867, "grad_norm": 0.6874892115592957, "learning_rate": 0.00011974579197952906, "loss": 1.1628, "step": 9824 }, { "epoch": 1.7492877492877494, "grad_norm": 0.7464384436607361, "learning_rate": 0.00011973206990510393, "loss": 1.007, "step": 9825 }, { "epoch": 1.749465811965812, "grad_norm": 0.7281473278999329, "learning_rate": 0.00011971834744406986, "loss": 1.0776, "step": 9826 }, { "epoch": 1.7496438746438745, "grad_norm": 0.6112284660339355, "learning_rate": 0.00011970462459669575, "loss": 0.7616, "step": 9827 }, { "epoch": 1.7498219373219372, "grad_norm": 0.6498035192489624, "learning_rate": 0.00011969090136325048, "loss": 0.884, "step": 9828 }, { "epoch": 1.7498219373219372, "eval_loss": 1.1018389463424683, "eval_runtime": 24.5594, "eval_samples_per_second": 42.387, "eval_steps_per_second": 21.214, "step": 9828 }, { "epoch": 1.75, "grad_norm": 0.6746426224708557, "learning_rate": 0.00011967717774400289, "loss": 0.9023, "step": 9829 }, { "epoch": 1.7501780626780628, "grad_norm": 0.6513423323631287, "learning_rate": 0.00011966345373922188, "loss": 0.9786, "step": 9830 }, { "epoch": 1.7503561253561255, "grad_norm": 0.7053804397583008, "learning_rate": 0.00011964972934917632, "loss": 1.0667, "step": 9831 }, { "epoch": 1.750534188034188, "grad_norm": 0.6769008040428162, "learning_rate": 0.00011963600457413513, "loss": 0.8596, "step": 9832 }, { "epoch": 1.7507122507122506, "grad_norm": 0.7162246108055115, "learning_rate": 0.00011962227941436725, "loss": 1.0746, "step": 9833 }, { "epoch": 1.7508903133903133, "grad_norm": 0.7665811777114868, "learning_rate": 0.00011960855387014156, "loss": 1.0056, "step": 9834 }, { "epoch": 1.751068376068376, "grad_norm": 0.6186950206756592, "learning_rate": 0.00011959482794172696, "loss": 0.9016, "step": 9835 }, { "epoch": 1.7512464387464388, "grad_norm": 0.8018904328346252, "learning_rate": 0.00011958110162939245, "loss": 0.9534, "step": 9836 }, { "epoch": 1.7514245014245016, "grad_norm": 0.8239033818244934, "learning_rate": 0.0001195673749334069, "loss": 1.214, "step": 9837 }, { "epoch": 1.751602564102564, "grad_norm": 0.7886297106742859, "learning_rate": 0.00011955364785403931, "loss": 0.9672, "step": 9838 }, { "epoch": 1.7517806267806266, "grad_norm": 0.6463177800178528, "learning_rate": 0.00011953992039155862, "loss": 0.9184, "step": 9839 }, { "epoch": 1.7519586894586894, "grad_norm": 0.7374706864356995, "learning_rate": 0.00011952619254623374, "loss": 0.9988, "step": 9840 }, { "epoch": 1.7521367521367521, "grad_norm": 0.7456657886505127, "learning_rate": 0.00011951246431833369, "loss": 1.2197, "step": 9841 }, { "epoch": 1.7523148148148149, "grad_norm": 0.6644248962402344, "learning_rate": 0.00011949873570812746, "loss": 0.9449, "step": 9842 }, { "epoch": 1.7524928774928776, "grad_norm": 0.707919180393219, "learning_rate": 0.000119485006715884, "loss": 0.774, "step": 9843 }, { "epoch": 1.7526709401709402, "grad_norm": 0.6273906826972961, "learning_rate": 0.00011947127734187231, "loss": 0.8682, "step": 9844 }, { "epoch": 1.7528490028490027, "grad_norm": 0.8335350155830383, "learning_rate": 0.00011945754758636136, "loss": 1.2282, "step": 9845 }, { "epoch": 1.7530270655270654, "grad_norm": 0.6849051117897034, "learning_rate": 0.00011944381744962022, "loss": 1.1091, "step": 9846 }, { "epoch": 1.7532051282051282, "grad_norm": 0.8571760058403015, "learning_rate": 0.00011943008693191781, "loss": 0.9806, "step": 9847 }, { "epoch": 1.753383190883191, "grad_norm": 0.7045019268989563, "learning_rate": 0.00011941635603352328, "loss": 0.9217, "step": 9848 }, { "epoch": 1.7535612535612537, "grad_norm": 0.6820187568664551, "learning_rate": 0.00011940262475470556, "loss": 0.9983, "step": 9849 }, { "epoch": 1.7537393162393162, "grad_norm": 0.7400697469711304, "learning_rate": 0.00011938889309573374, "loss": 0.9521, "step": 9850 }, { "epoch": 1.7539173789173788, "grad_norm": 0.7027658820152283, "learning_rate": 0.00011937516105687678, "loss": 1.0749, "step": 9851 }, { "epoch": 1.7540954415954415, "grad_norm": 0.6778307557106018, "learning_rate": 0.00011936142863840382, "loss": 1.0249, "step": 9852 }, { "epoch": 1.7542735042735043, "grad_norm": 0.6787961721420288, "learning_rate": 0.00011934769584058389, "loss": 1.0014, "step": 9853 }, { "epoch": 1.754451566951567, "grad_norm": 0.7515636086463928, "learning_rate": 0.00011933396266368606, "loss": 1.0351, "step": 9854 }, { "epoch": 1.7546296296296298, "grad_norm": 0.6620134115219116, "learning_rate": 0.00011932022910797938, "loss": 1.0294, "step": 9855 }, { "epoch": 1.7548076923076923, "grad_norm": 0.8260951638221741, "learning_rate": 0.00011930649517373294, "loss": 0.9078, "step": 9856 }, { "epoch": 1.7549857549857548, "grad_norm": 0.7680675983428955, "learning_rate": 0.00011929276086121584, "loss": 0.92, "step": 9857 }, { "epoch": 1.7551638176638176, "grad_norm": 0.7104191184043884, "learning_rate": 0.00011927902617069717, "loss": 0.9937, "step": 9858 }, { "epoch": 1.7553418803418803, "grad_norm": 0.7185840606689453, "learning_rate": 0.00011926529110244603, "loss": 0.9775, "step": 9859 }, { "epoch": 1.755519943019943, "grad_norm": 0.7114652991294861, "learning_rate": 0.00011925155565673151, "loss": 0.883, "step": 9860 }, { "epoch": 1.7556980056980058, "grad_norm": 0.6906639337539673, "learning_rate": 0.00011923781983382276, "loss": 0.9789, "step": 9861 }, { "epoch": 1.7558760683760684, "grad_norm": 0.706908106803894, "learning_rate": 0.00011922408363398892, "loss": 1.1186, "step": 9862 }, { "epoch": 1.756054131054131, "grad_norm": 0.7532939910888672, "learning_rate": 0.00011921034705749908, "loss": 0.977, "step": 9863 }, { "epoch": 1.7562321937321936, "grad_norm": 0.7397763729095459, "learning_rate": 0.0001191966101046224, "loss": 1.1121, "step": 9864 }, { "epoch": 1.7564102564102564, "grad_norm": 0.6955398321151733, "learning_rate": 0.00011918287277562801, "loss": 1.0439, "step": 9865 }, { "epoch": 1.7565883190883191, "grad_norm": 0.7485929727554321, "learning_rate": 0.00011916913507078507, "loss": 1.1644, "step": 9866 }, { "epoch": 1.756766381766382, "grad_norm": 0.6337487101554871, "learning_rate": 0.00011915539699036274, "loss": 0.8216, "step": 9867 }, { "epoch": 1.7569444444444444, "grad_norm": 0.6628872752189636, "learning_rate": 0.00011914165853463022, "loss": 0.9584, "step": 9868 }, { "epoch": 1.7571225071225072, "grad_norm": 0.6577547788619995, "learning_rate": 0.00011912791970385666, "loss": 0.9484, "step": 9869 }, { "epoch": 1.7573005698005697, "grad_norm": 0.6409304738044739, "learning_rate": 0.00011911418049831127, "loss": 1.1256, "step": 9870 }, { "epoch": 1.7574786324786325, "grad_norm": 0.7499844431877136, "learning_rate": 0.00011910044091826319, "loss": 0.7991, "step": 9871 }, { "epoch": 1.7576566951566952, "grad_norm": 0.6786715388298035, "learning_rate": 0.00011908670096398165, "loss": 1.0368, "step": 9872 }, { "epoch": 1.757834757834758, "grad_norm": 0.6432101130485535, "learning_rate": 0.00011907296063573585, "loss": 0.9059, "step": 9873 }, { "epoch": 1.7580128205128205, "grad_norm": 0.6542613506317139, "learning_rate": 0.00011905921993379503, "loss": 0.9866, "step": 9874 }, { "epoch": 1.7581908831908832, "grad_norm": 0.6048218011856079, "learning_rate": 0.00011904547885842838, "loss": 0.9488, "step": 9875 }, { "epoch": 1.7583689458689458, "grad_norm": 0.7694938778877258, "learning_rate": 0.00011903173740990512, "loss": 1.1026, "step": 9876 }, { "epoch": 1.7585470085470085, "grad_norm": 0.6621627807617188, "learning_rate": 0.00011901799558849451, "loss": 1.135, "step": 9877 }, { "epoch": 1.7587250712250713, "grad_norm": 0.6561587452888489, "learning_rate": 0.0001190042533944658, "loss": 0.9322, "step": 9878 }, { "epoch": 1.758903133903134, "grad_norm": 0.7846759557723999, "learning_rate": 0.00011899051082808821, "loss": 0.9324, "step": 9879 }, { "epoch": 1.7590811965811965, "grad_norm": 0.6004071831703186, "learning_rate": 0.00011897676788963101, "loss": 0.9641, "step": 9880 }, { "epoch": 1.7592592592592593, "grad_norm": 0.6731070280075073, "learning_rate": 0.00011896302457936344, "loss": 1.1437, "step": 9881 }, { "epoch": 1.7594373219373218, "grad_norm": 0.6768675446510315, "learning_rate": 0.00011894928089755481, "loss": 1.0707, "step": 9882 }, { "epoch": 1.7596153846153846, "grad_norm": 0.8368878960609436, "learning_rate": 0.0001189355368444744, "loss": 1.0435, "step": 9883 }, { "epoch": 1.7597934472934473, "grad_norm": 0.6132324934005737, "learning_rate": 0.00011892179242039149, "loss": 0.8889, "step": 9884 }, { "epoch": 1.75997150997151, "grad_norm": 0.7598093152046204, "learning_rate": 0.00011890804762557535, "loss": 1.151, "step": 9885 }, { "epoch": 1.7601495726495726, "grad_norm": 0.7317715883255005, "learning_rate": 0.00011889430246029527, "loss": 0.9992, "step": 9886 }, { "epoch": 1.7603276353276354, "grad_norm": 0.7664858102798462, "learning_rate": 0.00011888055692482059, "loss": 0.8398, "step": 9887 }, { "epoch": 1.760505698005698, "grad_norm": 0.6916853189468384, "learning_rate": 0.00011886681101942063, "loss": 0.9507, "step": 9888 }, { "epoch": 1.7606837606837606, "grad_norm": 0.7103399634361267, "learning_rate": 0.0001188530647443647, "loss": 0.915, "step": 9889 }, { "epoch": 1.7608618233618234, "grad_norm": 0.6177804470062256, "learning_rate": 0.00011883931809992215, "loss": 0.721, "step": 9890 }, { "epoch": 1.7610398860398861, "grad_norm": 0.7523959279060364, "learning_rate": 0.00011882557108636227, "loss": 0.99, "step": 9891 }, { "epoch": 1.7612179487179487, "grad_norm": 0.6211134791374207, "learning_rate": 0.00011881182370395442, "loss": 0.8089, "step": 9892 }, { "epoch": 1.7613960113960114, "grad_norm": 0.6660307049751282, "learning_rate": 0.00011879807595296802, "loss": 1.1062, "step": 9893 }, { "epoch": 1.761574074074074, "grad_norm": 0.7039240598678589, "learning_rate": 0.00011878432783367232, "loss": 0.9739, "step": 9894 }, { "epoch": 1.7617521367521367, "grad_norm": 0.658064603805542, "learning_rate": 0.00011877057934633675, "loss": 0.9438, "step": 9895 }, { "epoch": 1.7619301994301995, "grad_norm": 0.8227152228355408, "learning_rate": 0.00011875683049123068, "loss": 0.8385, "step": 9896 }, { "epoch": 1.7621082621082622, "grad_norm": 0.6622483730316162, "learning_rate": 0.00011874308126862346, "loss": 0.9432, "step": 9897 }, { "epoch": 1.7622863247863247, "grad_norm": 0.7211357951164246, "learning_rate": 0.00011872933167878453, "loss": 1.2471, "step": 9898 }, { "epoch": 1.7624643874643875, "grad_norm": 0.6177424192428589, "learning_rate": 0.00011871558172198322, "loss": 0.8892, "step": 9899 }, { "epoch": 1.76264245014245, "grad_norm": 0.6924285888671875, "learning_rate": 0.00011870183139848898, "loss": 1.021, "step": 9900 }, { "epoch": 1.7628205128205128, "grad_norm": 0.6168648600578308, "learning_rate": 0.0001186880807085712, "loss": 0.9013, "step": 9901 }, { "epoch": 1.7629985754985755, "grad_norm": 0.6410452723503113, "learning_rate": 0.00011867432965249929, "loss": 0.6686, "step": 9902 }, { "epoch": 1.7631766381766383, "grad_norm": 0.6959559917449951, "learning_rate": 0.0001186605782305427, "loss": 0.9814, "step": 9903 }, { "epoch": 1.7633547008547008, "grad_norm": 0.7456178069114685, "learning_rate": 0.00011864682644297085, "loss": 1.0151, "step": 9904 }, { "epoch": 1.7635327635327636, "grad_norm": 0.6499991416931152, "learning_rate": 0.00011863307429005317, "loss": 0.83, "step": 9905 }, { "epoch": 1.763710826210826, "grad_norm": 0.643344521522522, "learning_rate": 0.00011861932177205908, "loss": 0.8853, "step": 9906 }, { "epoch": 1.7638888888888888, "grad_norm": 0.6570441722869873, "learning_rate": 0.00011860556888925804, "loss": 0.9179, "step": 9907 }, { "epoch": 1.7640669515669516, "grad_norm": 0.6892307996749878, "learning_rate": 0.00011859181564191957, "loss": 0.9657, "step": 9908 }, { "epoch": 1.7642450142450143, "grad_norm": 0.648158073425293, "learning_rate": 0.0001185780620303131, "loss": 0.9179, "step": 9909 }, { "epoch": 1.7644230769230769, "grad_norm": 0.5833603143692017, "learning_rate": 0.00011856430805470808, "loss": 0.8505, "step": 9910 }, { "epoch": 1.7646011396011396, "grad_norm": 0.8302416205406189, "learning_rate": 0.000118550553715374, "loss": 0.8948, "step": 9911 }, { "epoch": 1.7647792022792022, "grad_norm": 0.7075300216674805, "learning_rate": 0.00011853679901258035, "loss": 1.2467, "step": 9912 }, { "epoch": 1.764957264957265, "grad_norm": 0.81916344165802, "learning_rate": 0.00011852304394659666, "loss": 0.9963, "step": 9913 }, { "epoch": 1.7651353276353277, "grad_norm": 0.6492435932159424, "learning_rate": 0.00011850928851769239, "loss": 1.0704, "step": 9914 }, { "epoch": 1.7653133903133904, "grad_norm": 0.7301090359687805, "learning_rate": 0.00011849553272613704, "loss": 1.0477, "step": 9915 }, { "epoch": 1.765491452991453, "grad_norm": 0.7280275821685791, "learning_rate": 0.00011848177657220019, "loss": 0.9124, "step": 9916 }, { "epoch": 1.7656695156695157, "grad_norm": 0.6948845386505127, "learning_rate": 0.00011846802005615127, "loss": 1.2275, "step": 9917 }, { "epoch": 1.7658475783475782, "grad_norm": 0.6553834676742554, "learning_rate": 0.0001184542631782599, "loss": 1.2311, "step": 9918 }, { "epoch": 1.766025641025641, "grad_norm": 0.6899739503860474, "learning_rate": 0.00011844050593879556, "loss": 0.8936, "step": 9919 }, { "epoch": 1.7662037037037037, "grad_norm": 0.6076815128326416, "learning_rate": 0.00011842674833802782, "loss": 0.8432, "step": 9920 }, { "epoch": 1.7663817663817665, "grad_norm": 0.7650902271270752, "learning_rate": 0.00011841299037622624, "loss": 1.0447, "step": 9921 }, { "epoch": 1.7665598290598292, "grad_norm": 0.6864938735961914, "learning_rate": 0.00011839923205366032, "loss": 0.936, "step": 9922 }, { "epoch": 1.7667378917378918, "grad_norm": 0.7176852226257324, "learning_rate": 0.0001183854733705997, "loss": 0.9764, "step": 9923 }, { "epoch": 1.7669159544159543, "grad_norm": 0.6513439416885376, "learning_rate": 0.00011837171432731393, "loss": 1.0095, "step": 9924 }, { "epoch": 1.767094017094017, "grad_norm": 0.8031024932861328, "learning_rate": 0.00011835795492407256, "loss": 1.1348, "step": 9925 }, { "epoch": 1.7672720797720798, "grad_norm": 0.7659830451011658, "learning_rate": 0.00011834419516114518, "loss": 0.9058, "step": 9926 }, { "epoch": 1.7674501424501425, "grad_norm": 0.8864039778709412, "learning_rate": 0.00011833043503880145, "loss": 1.0342, "step": 9927 }, { "epoch": 1.7676282051282053, "grad_norm": 0.6870512962341309, "learning_rate": 0.00011831667455731088, "loss": 0.9361, "step": 9928 }, { "epoch": 1.7678062678062678, "grad_norm": 0.6458830833435059, "learning_rate": 0.00011830291371694315, "loss": 0.8215, "step": 9929 }, { "epoch": 1.7679843304843303, "grad_norm": 0.7456086874008179, "learning_rate": 0.00011828915251796787, "loss": 1.1243, "step": 9930 }, { "epoch": 1.768162393162393, "grad_norm": 0.6834850311279297, "learning_rate": 0.00011827539096065459, "loss": 0.9536, "step": 9931 }, { "epoch": 1.7683404558404558, "grad_norm": 0.643864631652832, "learning_rate": 0.00011826162904527302, "loss": 1.1707, "step": 9932 }, { "epoch": 1.7685185185185186, "grad_norm": 0.6312864422798157, "learning_rate": 0.00011824786677209275, "loss": 0.7937, "step": 9933 }, { "epoch": 1.7686965811965814, "grad_norm": 0.6092729568481445, "learning_rate": 0.00011823410414138343, "loss": 0.8787, "step": 9934 }, { "epoch": 1.7688746438746439, "grad_norm": 0.6859988570213318, "learning_rate": 0.00011822034115341474, "loss": 0.9691, "step": 9935 }, { "epoch": 1.7690527065527064, "grad_norm": 0.7219935059547424, "learning_rate": 0.0001182065778084563, "loss": 1.0606, "step": 9936 }, { "epoch": 1.7692307692307692, "grad_norm": 0.6596202850341797, "learning_rate": 0.00011819281410677778, "loss": 1.0543, "step": 9937 }, { "epoch": 1.769408831908832, "grad_norm": 0.6616338491439819, "learning_rate": 0.00011817905004864887, "loss": 0.9757, "step": 9938 }, { "epoch": 1.7695868945868947, "grad_norm": 0.6637360453605652, "learning_rate": 0.00011816528563433924, "loss": 0.925, "step": 9939 }, { "epoch": 1.7697649572649574, "grad_norm": 0.8422333002090454, "learning_rate": 0.00011815152086411859, "loss": 1.1343, "step": 9940 }, { "epoch": 1.76994301994302, "grad_norm": 0.6638204455375671, "learning_rate": 0.00011813775573825656, "loss": 1.2136, "step": 9941 }, { "epoch": 1.7701210826210825, "grad_norm": 0.7258831858634949, "learning_rate": 0.0001181239902570229, "loss": 0.7308, "step": 9942 }, { "epoch": 1.7702991452991452, "grad_norm": 0.730582594871521, "learning_rate": 0.0001181102244206873, "loss": 1.1097, "step": 9943 }, { "epoch": 1.770477207977208, "grad_norm": 0.7324019074440002, "learning_rate": 0.00011809645822951946, "loss": 0.9802, "step": 9944 }, { "epoch": 1.7706552706552707, "grad_norm": 0.5565997958183289, "learning_rate": 0.00011808269168378914, "loss": 0.7079, "step": 9945 }, { "epoch": 1.7708333333333335, "grad_norm": 0.6395503282546997, "learning_rate": 0.00011806892478376601, "loss": 1.0048, "step": 9946 }, { "epoch": 1.771011396011396, "grad_norm": 0.7670905590057373, "learning_rate": 0.00011805515752971985, "loss": 1.2509, "step": 9947 }, { "epoch": 1.7711894586894585, "grad_norm": 0.5945813655853271, "learning_rate": 0.00011804138992192037, "loss": 0.8856, "step": 9948 }, { "epoch": 1.7713675213675213, "grad_norm": 0.7355493307113647, "learning_rate": 0.00011802762196063737, "loss": 0.9629, "step": 9949 }, { "epoch": 1.771545584045584, "grad_norm": 0.7024806141853333, "learning_rate": 0.00011801385364614055, "loss": 1.1351, "step": 9950 }, { "epoch": 1.7717236467236468, "grad_norm": 0.6553003191947937, "learning_rate": 0.00011800008497869968, "loss": 0.911, "step": 9951 }, { "epoch": 1.7719017094017095, "grad_norm": 0.6883971691131592, "learning_rate": 0.00011798631595858454, "loss": 1.0099, "step": 9952 }, { "epoch": 1.772079772079772, "grad_norm": 0.7106832265853882, "learning_rate": 0.00011797254658606489, "loss": 1.0298, "step": 9953 }, { "epoch": 1.7722578347578346, "grad_norm": 0.7902877926826477, "learning_rate": 0.00011795877686141055, "loss": 1.0572, "step": 9954 }, { "epoch": 1.7724358974358974, "grad_norm": 0.7105007171630859, "learning_rate": 0.00011794500678489126, "loss": 1.1725, "step": 9955 }, { "epoch": 1.77261396011396, "grad_norm": 0.7314959764480591, "learning_rate": 0.00011793123635677685, "loss": 1.1074, "step": 9956 }, { "epoch": 1.7727920227920229, "grad_norm": 0.6358618140220642, "learning_rate": 0.00011791746557733712, "loss": 0.8786, "step": 9957 }, { "epoch": 1.7729700854700856, "grad_norm": 0.6441367864608765, "learning_rate": 0.00011790369444684187, "loss": 1.1332, "step": 9958 }, { "epoch": 1.7731481481481481, "grad_norm": 0.686787486076355, "learning_rate": 0.0001178899229655609, "loss": 0.9566, "step": 9959 }, { "epoch": 1.7733262108262107, "grad_norm": 0.653840184211731, "learning_rate": 0.00011787615113376407, "loss": 0.8763, "step": 9960 }, { "epoch": 1.7735042735042734, "grad_norm": 0.7106643915176392, "learning_rate": 0.00011786237895172119, "loss": 0.9929, "step": 9961 }, { "epoch": 1.7736823361823362, "grad_norm": 0.6634044051170349, "learning_rate": 0.0001178486064197021, "loss": 0.7467, "step": 9962 }, { "epoch": 1.773860398860399, "grad_norm": 0.7087352871894836, "learning_rate": 0.00011783483353797663, "loss": 1.0104, "step": 9963 }, { "epoch": 1.7740384615384617, "grad_norm": 0.8088061213493347, "learning_rate": 0.00011782106030681466, "loss": 1.0376, "step": 9964 }, { "epoch": 1.7742165242165242, "grad_norm": 0.7204688787460327, "learning_rate": 0.00011780728672648604, "loss": 0.8556, "step": 9965 }, { "epoch": 1.7743945868945867, "grad_norm": 0.7893314957618713, "learning_rate": 0.0001177935127972606, "loss": 0.9764, "step": 9966 }, { "epoch": 1.7745726495726495, "grad_norm": 0.6098896265029907, "learning_rate": 0.00011777973851940826, "loss": 0.9407, "step": 9967 }, { "epoch": 1.7747507122507122, "grad_norm": 0.6420868039131165, "learning_rate": 0.0001177659638931989, "loss": 1.1328, "step": 9968 }, { "epoch": 1.774928774928775, "grad_norm": 0.7732378244400024, "learning_rate": 0.00011775218891890234, "loss": 1.1236, "step": 9969 }, { "epoch": 1.7751068376068377, "grad_norm": 0.6591582894325256, "learning_rate": 0.00011773841359678855, "loss": 1.1523, "step": 9970 }, { "epoch": 1.7752849002849003, "grad_norm": 0.6337170004844666, "learning_rate": 0.00011772463792712738, "loss": 1.1998, "step": 9971 }, { "epoch": 1.7754629629629628, "grad_norm": 0.6400532126426697, "learning_rate": 0.00011771086191018874, "loss": 0.9543, "step": 9972 }, { "epoch": 1.7756410256410255, "grad_norm": 0.6431527733802795, "learning_rate": 0.00011769708554624257, "loss": 0.8164, "step": 9973 }, { "epoch": 1.7758190883190883, "grad_norm": 0.7303599119186401, "learning_rate": 0.00011768330883555876, "loss": 0.9553, "step": 9974 }, { "epoch": 1.775997150997151, "grad_norm": 0.7838605642318726, "learning_rate": 0.00011766953177840725, "loss": 0.9759, "step": 9975 }, { "epoch": 1.7761752136752138, "grad_norm": 0.6505265831947327, "learning_rate": 0.00011765575437505796, "loss": 0.8527, "step": 9976 }, { "epoch": 1.7763532763532763, "grad_norm": 0.7336180806159973, "learning_rate": 0.00011764197662578086, "loss": 1.1098, "step": 9977 }, { "epoch": 1.776531339031339, "grad_norm": 0.7040138244628906, "learning_rate": 0.00011762819853084586, "loss": 1.1289, "step": 9978 }, { "epoch": 1.7767094017094016, "grad_norm": 0.6414867043495178, "learning_rate": 0.00011761442009052293, "loss": 1.0826, "step": 9979 }, { "epoch": 1.7768874643874644, "grad_norm": 0.6760666370391846, "learning_rate": 0.00011760064130508204, "loss": 1.0188, "step": 9980 }, { "epoch": 1.7770655270655271, "grad_norm": 0.7864978909492493, "learning_rate": 0.00011758686217479316, "loss": 1.1938, "step": 9981 }, { "epoch": 1.7772435897435899, "grad_norm": 0.7964870929718018, "learning_rate": 0.00011757308269992622, "loss": 0.9876, "step": 9982 }, { "epoch": 1.7774216524216524, "grad_norm": 0.5158692002296448, "learning_rate": 0.00011755930288075123, "loss": 0.6508, "step": 9983 }, { "epoch": 1.7775997150997151, "grad_norm": 0.7208606600761414, "learning_rate": 0.00011754552271753819, "loss": 1.0738, "step": 9984 }, { "epoch": 1.7777777777777777, "grad_norm": 0.6811334490776062, "learning_rate": 0.00011753174221055705, "loss": 1.1216, "step": 9985 }, { "epoch": 1.7779558404558404, "grad_norm": 0.6389986276626587, "learning_rate": 0.00011751796136007787, "loss": 0.9664, "step": 9986 }, { "epoch": 1.7781339031339032, "grad_norm": 0.7081875205039978, "learning_rate": 0.00011750418016637064, "loss": 0.9365, "step": 9987 }, { "epoch": 1.778311965811966, "grad_norm": 0.7291778326034546, "learning_rate": 0.00011749039862970535, "loss": 1.3222, "step": 9988 }, { "epoch": 1.7784900284900285, "grad_norm": 0.6790453791618347, "learning_rate": 0.000117476616750352, "loss": 0.9537, "step": 9989 }, { "epoch": 1.7786680911680912, "grad_norm": 0.6271076202392578, "learning_rate": 0.00011746283452858069, "loss": 0.9842, "step": 9990 }, { "epoch": 1.7788461538461537, "grad_norm": 0.675628662109375, "learning_rate": 0.00011744905196466138, "loss": 0.8675, "step": 9991 }, { "epoch": 1.7790242165242165, "grad_norm": 0.7328314185142517, "learning_rate": 0.00011743526905886417, "loss": 0.9793, "step": 9992 }, { "epoch": 1.7792022792022792, "grad_norm": 0.698764979839325, "learning_rate": 0.00011742148581145908, "loss": 0.9527, "step": 9993 }, { "epoch": 1.779380341880342, "grad_norm": 0.6911364793777466, "learning_rate": 0.00011740770222271616, "loss": 1.1069, "step": 9994 }, { "epoch": 1.7795584045584045, "grad_norm": 0.6990836262702942, "learning_rate": 0.00011739391829290547, "loss": 0.9132, "step": 9995 }, { "epoch": 1.7797364672364673, "grad_norm": 0.7056801319122314, "learning_rate": 0.0001173801340222971, "loss": 1.053, "step": 9996 }, { "epoch": 1.7799145299145298, "grad_norm": 0.7453791499137878, "learning_rate": 0.0001173663494111611, "loss": 0.8806, "step": 9997 }, { "epoch": 1.7800925925925926, "grad_norm": 0.7211771011352539, "learning_rate": 0.00011735256445976757, "loss": 0.9968, "step": 9998 }, { "epoch": 1.7802706552706553, "grad_norm": 0.7259734272956848, "learning_rate": 0.00011733877916838656, "loss": 1.167, "step": 9999 }, { "epoch": 1.780448717948718, "grad_norm": 0.6931926012039185, "learning_rate": 0.00011732499353728821, "loss": 1.0634, "step": 10000 }, { "epoch": 1.7806267806267806, "grad_norm": 0.6900074481964111, "learning_rate": 0.00011731120756674259, "loss": 0.9718, "step": 10001 }, { "epoch": 1.7808048433048433, "grad_norm": 0.6817582845687866, "learning_rate": 0.00011729742125701984, "loss": 1.0896, "step": 10002 }, { "epoch": 1.7809829059829059, "grad_norm": 0.6901891231536865, "learning_rate": 0.00011728363460839003, "loss": 1.0163, "step": 10003 }, { "epoch": 1.7811609686609686, "grad_norm": 0.9138323664665222, "learning_rate": 0.00011726984762112328, "loss": 1.1713, "step": 10004 }, { "epoch": 1.7813390313390314, "grad_norm": 0.6105810403823853, "learning_rate": 0.00011725606029548977, "loss": 0.9331, "step": 10005 }, { "epoch": 1.7815170940170941, "grad_norm": 0.5605259537696838, "learning_rate": 0.0001172422726317596, "loss": 0.7154, "step": 10006 }, { "epoch": 1.7816951566951567, "grad_norm": 0.6950963735580444, "learning_rate": 0.00011722848463020292, "loss": 1.0093, "step": 10007 }, { "epoch": 1.7818732193732194, "grad_norm": 0.6806309819221497, "learning_rate": 0.00011721469629108988, "loss": 0.8662, "step": 10008 }, { "epoch": 1.782051282051282, "grad_norm": 0.7528520226478577, "learning_rate": 0.00011720090761469063, "loss": 0.8567, "step": 10009 }, { "epoch": 1.7822293447293447, "grad_norm": 0.6617229580879211, "learning_rate": 0.00011718711860127529, "loss": 1.0378, "step": 10010 }, { "epoch": 1.7824074074074074, "grad_norm": 0.6468376517295837, "learning_rate": 0.00011717332925111411, "loss": 1.0658, "step": 10011 }, { "epoch": 1.7825854700854702, "grad_norm": 0.7141897082328796, "learning_rate": 0.00011715953956447721, "loss": 1.023, "step": 10012 }, { "epoch": 1.7827635327635327, "grad_norm": 0.5777570605278015, "learning_rate": 0.00011714574954163475, "loss": 0.9154, "step": 10013 }, { "epoch": 1.7829415954415955, "grad_norm": 0.7536137700080872, "learning_rate": 0.00011713195918285695, "loss": 0.9651, "step": 10014 }, { "epoch": 1.783119658119658, "grad_norm": 0.6977683305740356, "learning_rate": 0.00011711816848841402, "loss": 0.7977, "step": 10015 }, { "epoch": 1.7832977207977208, "grad_norm": 0.6522472500801086, "learning_rate": 0.00011710437745857614, "loss": 0.8834, "step": 10016 }, { "epoch": 1.7834757834757835, "grad_norm": 0.6263057589530945, "learning_rate": 0.0001170905860936135, "loss": 1.0576, "step": 10017 }, { "epoch": 1.7836538461538463, "grad_norm": 0.6470699310302734, "learning_rate": 0.00011707679439379635, "loss": 0.9412, "step": 10018 }, { "epoch": 1.7838319088319088, "grad_norm": Infinity, "learning_rate": 0.00011707679439379635, "loss": 1.1746, "step": 10019 }, { "epoch": 1.7840099715099715, "grad_norm": 0.6022017002105713, "learning_rate": 0.00011706300235939485, "loss": 0.8945, "step": 10020 }, { "epoch": 1.784188034188034, "grad_norm": 0.637208104133606, "learning_rate": 0.00011704920999067927, "loss": 1.0215, "step": 10021 }, { "epoch": 1.7843660968660968, "grad_norm": 0.7467851042747498, "learning_rate": 0.00011703541728791987, "loss": 1.0341, "step": 10022 }, { "epoch": 1.7845441595441596, "grad_norm": 0.7562711238861084, "learning_rate": 0.00011702162425138683, "loss": 0.9748, "step": 10023 }, { "epoch": 1.7847222222222223, "grad_norm": 0.6480089426040649, "learning_rate": 0.00011700783088135043, "loss": 1.05, "step": 10024 }, { "epoch": 1.7849002849002849, "grad_norm": 0.6293981671333313, "learning_rate": 0.00011699403717808091, "loss": 1.0376, "step": 10025 }, { "epoch": 1.7850783475783476, "grad_norm": 0.6821253895759583, "learning_rate": 0.00011698024314184853, "loss": 1.0542, "step": 10026 }, { "epoch": 1.7852564102564101, "grad_norm": 0.6681216359138489, "learning_rate": 0.00011696644877292356, "loss": 1.0018, "step": 10027 }, { "epoch": 1.7854344729344729, "grad_norm": 0.6788804531097412, "learning_rate": 0.00011695265407157628, "loss": 1.1823, "step": 10028 }, { "epoch": 1.7856125356125356, "grad_norm": 0.6147881150245667, "learning_rate": 0.00011693885903807697, "loss": 0.9246, "step": 10029 }, { "epoch": 1.7857905982905984, "grad_norm": 0.7952296137809753, "learning_rate": 0.00011692506367269588, "loss": 1.0528, "step": 10030 }, { "epoch": 1.785968660968661, "grad_norm": 0.6985954642295837, "learning_rate": 0.00011691126797570333, "loss": 0.9173, "step": 10031 }, { "epoch": 1.7861467236467237, "grad_norm": 0.6211223602294922, "learning_rate": 0.00011689747194736961, "loss": 0.7527, "step": 10032 }, { "epoch": 1.7863247863247862, "grad_norm": 0.7531208992004395, "learning_rate": 0.00011688367558796507, "loss": 1.1087, "step": 10033 }, { "epoch": 1.786502849002849, "grad_norm": 0.7742924690246582, "learning_rate": 0.00011686987889775996, "loss": 1.1512, "step": 10034 }, { "epoch": 1.7866809116809117, "grad_norm": 0.7046231627464294, "learning_rate": 0.00011685608187702459, "loss": 1.0516, "step": 10035 }, { "epoch": 1.7868589743589745, "grad_norm": 0.6264076232910156, "learning_rate": 0.00011684228452602933, "loss": 0.8938, "step": 10036 }, { "epoch": 1.7870370370370372, "grad_norm": 0.6342145800590515, "learning_rate": 0.00011682848684504448, "loss": 0.8177, "step": 10037 }, { "epoch": 1.7872150997150997, "grad_norm": 0.6609861254692078, "learning_rate": 0.00011681468883434041, "loss": 0.9692, "step": 10038 }, { "epoch": 1.7873931623931623, "grad_norm": 0.7918622493743896, "learning_rate": 0.00011680089049418743, "loss": 0.8246, "step": 10039 }, { "epoch": 1.787571225071225, "grad_norm": 0.697712779045105, "learning_rate": 0.00011678709182485592, "loss": 0.8981, "step": 10040 }, { "epoch": 1.7877492877492878, "grad_norm": 0.6747658252716064, "learning_rate": 0.00011677329282661617, "loss": 1.1243, "step": 10041 }, { "epoch": 1.7879273504273505, "grad_norm": 0.6525771617889404, "learning_rate": 0.00011675949349973863, "loss": 0.852, "step": 10042 }, { "epoch": 1.7881054131054133, "grad_norm": 0.7062464952468872, "learning_rate": 0.00011674569384449363, "loss": 1.2582, "step": 10043 }, { "epoch": 1.7882834757834758, "grad_norm": 0.6453786492347717, "learning_rate": 0.00011673189386115154, "loss": 0.868, "step": 10044 }, { "epoch": 1.7884615384615383, "grad_norm": 0.7939708232879639, "learning_rate": 0.00011671809354998273, "loss": 0.7553, "step": 10045 }, { "epoch": 1.788639601139601, "grad_norm": 0.6466066837310791, "learning_rate": 0.00011670429291125761, "loss": 0.942, "step": 10046 }, { "epoch": 1.7888176638176638, "grad_norm": 0.7380510568618774, "learning_rate": 0.00011669049194524657, "loss": 1.044, "step": 10047 }, { "epoch": 1.7889957264957266, "grad_norm": 0.6719707250595093, "learning_rate": 0.00011667669065222002, "loss": 1.1624, "step": 10048 }, { "epoch": 1.7891737891737893, "grad_norm": 0.6996603012084961, "learning_rate": 0.00011666288903244837, "loss": 1.001, "step": 10049 }, { "epoch": 1.7893518518518519, "grad_norm": 0.696590006351471, "learning_rate": 0.00011664908708620202, "loss": 1.17, "step": 10050 }, { "epoch": 1.7895299145299144, "grad_norm": 0.7226764559745789, "learning_rate": 0.00011663528481375137, "loss": 1.0762, "step": 10051 }, { "epoch": 1.7897079772079771, "grad_norm": 0.6117866635322571, "learning_rate": 0.00011662148221536689, "loss": 0.9199, "step": 10052 }, { "epoch": 1.78988603988604, "grad_norm": 0.6424985527992249, "learning_rate": 0.000116607679291319, "loss": 1.1672, "step": 10053 }, { "epoch": 1.7900641025641026, "grad_norm": 0.6390290856361389, "learning_rate": 0.00011659387604187813, "loss": 1.1895, "step": 10054 }, { "epoch": 1.7902421652421654, "grad_norm": 0.6553205251693726, "learning_rate": 0.00011658007246731473, "loss": 1.0967, "step": 10055 }, { "epoch": 1.790420227920228, "grad_norm": 0.7737570405006409, "learning_rate": 0.00011656626856789922, "loss": 0.9637, "step": 10056 }, { "epoch": 1.7905982905982905, "grad_norm": 0.644296407699585, "learning_rate": 0.00011655246434390212, "loss": 0.9933, "step": 10057 }, { "epoch": 1.7907763532763532, "grad_norm": 0.8154410123825073, "learning_rate": 0.00011653865979559388, "loss": 0.9623, "step": 10058 }, { "epoch": 1.790954415954416, "grad_norm": 0.7181384563446045, "learning_rate": 0.00011652485492324495, "loss": 0.9113, "step": 10059 }, { "epoch": 1.7911324786324787, "grad_norm": 0.7835097908973694, "learning_rate": 0.00011651104972712582, "loss": 1.0804, "step": 10060 }, { "epoch": 1.7913105413105415, "grad_norm": 0.6843693852424622, "learning_rate": 0.00011649724420750691, "loss": 1.0242, "step": 10061 }, { "epoch": 1.791488603988604, "grad_norm": 0.8364703059196472, "learning_rate": 0.00011648343836465885, "loss": 0.8445, "step": 10062 }, { "epoch": 1.7916666666666665, "grad_norm": 0.7122092843055725, "learning_rate": 0.00011646963219885201, "loss": 1.0453, "step": 10063 }, { "epoch": 1.7918447293447293, "grad_norm": 0.7018755078315735, "learning_rate": 0.00011645582571035696, "loss": 0.9753, "step": 10064 }, { "epoch": 1.792022792022792, "grad_norm": 0.6522594094276428, "learning_rate": 0.00011644201889944419, "loss": 1.0328, "step": 10065 }, { "epoch": 1.7922008547008548, "grad_norm": 0.70301353931427, "learning_rate": 0.00011642821176638419, "loss": 0.9143, "step": 10066 }, { "epoch": 1.7923789173789175, "grad_norm": 0.6255469918251038, "learning_rate": 0.0001164144043114475, "loss": 0.9527, "step": 10067 }, { "epoch": 1.79255698005698, "grad_norm": 0.6780602931976318, "learning_rate": 0.0001164005965349047, "loss": 0.9192, "step": 10068 }, { "epoch": 1.7927350427350426, "grad_norm": 0.6025984287261963, "learning_rate": 0.00011638678843702626, "loss": 0.9055, "step": 10069 }, { "epoch": 1.7929131054131053, "grad_norm": 0.6430829763412476, "learning_rate": 0.00011637298001808275, "loss": 0.9359, "step": 10070 }, { "epoch": 1.793091168091168, "grad_norm": 0.6388106942176819, "learning_rate": 0.0001163591712783447, "loss": 0.8847, "step": 10071 }, { "epoch": 1.7932692307692308, "grad_norm": 0.706347644329071, "learning_rate": 0.00011634536221808265, "loss": 0.9055, "step": 10072 }, { "epoch": 1.7934472934472936, "grad_norm": 0.661226749420166, "learning_rate": 0.00011633155283756721, "loss": 1.118, "step": 10073 }, { "epoch": 1.7936253561253561, "grad_norm": 0.543207049369812, "learning_rate": 0.00011631774313706891, "loss": 0.8856, "step": 10074 }, { "epoch": 1.7938034188034186, "grad_norm": 0.6514154672622681, "learning_rate": 0.00011630393311685835, "loss": 0.8967, "step": 10075 }, { "epoch": 1.7939814814814814, "grad_norm": 0.8669198155403137, "learning_rate": 0.00011629012277720607, "loss": 1.0362, "step": 10076 }, { "epoch": 1.7941595441595442, "grad_norm": 0.7256068587303162, "learning_rate": 0.00011627631211838266, "loss": 1.1948, "step": 10077 }, { "epoch": 1.794337606837607, "grad_norm": 0.6504935622215271, "learning_rate": 0.00011626250114065875, "loss": 0.8309, "step": 10078 }, { "epoch": 1.7945156695156697, "grad_norm": 0.6964160799980164, "learning_rate": 0.0001162486898443049, "loss": 0.9593, "step": 10079 }, { "epoch": 1.7946937321937322, "grad_norm": 0.668727695941925, "learning_rate": 0.00011623487822959174, "loss": 0.8897, "step": 10080 }, { "epoch": 1.7948717948717947, "grad_norm": 0.6907223463058472, "learning_rate": 0.00011622106629678986, "loss": 0.897, "step": 10081 }, { "epoch": 1.7950498575498575, "grad_norm": 0.6652865409851074, "learning_rate": 0.00011620725404616985, "loss": 0.9321, "step": 10082 }, { "epoch": 1.7952279202279202, "grad_norm": 0.6523811221122742, "learning_rate": 0.00011619344147800239, "loss": 0.8991, "step": 10083 }, { "epoch": 1.795405982905983, "grad_norm": 0.6162952184677124, "learning_rate": 0.0001161796285925581, "loss": 0.8061, "step": 10084 }, { "epoch": 1.7955840455840457, "grad_norm": 0.670606791973114, "learning_rate": 0.0001161658153901076, "loss": 0.9341, "step": 10085 }, { "epoch": 1.7957621082621082, "grad_norm": 0.6372489333152771, "learning_rate": 0.00011615200187092148, "loss": 1.1049, "step": 10086 }, { "epoch": 1.7959401709401708, "grad_norm": 0.7311037182807922, "learning_rate": 0.00011613818803527045, "loss": 1.0881, "step": 10087 }, { "epoch": 1.7961182336182335, "grad_norm": 0.7440751194953918, "learning_rate": 0.00011612437388342518, "loss": 0.9487, "step": 10088 }, { "epoch": 1.7962962962962963, "grad_norm": 0.6605934500694275, "learning_rate": 0.00011611055941565629, "loss": 0.8757, "step": 10089 }, { "epoch": 1.796474358974359, "grad_norm": 0.7546001076698303, "learning_rate": 0.00011609674463223446, "loss": 0.9368, "step": 10090 }, { "epoch": 1.7966524216524218, "grad_norm": 0.7001389861106873, "learning_rate": 0.00011608292953343036, "loss": 0.9098, "step": 10091 }, { "epoch": 1.7968304843304843, "grad_norm": 0.6898102760314941, "learning_rate": 0.00011606911411951462, "loss": 0.8821, "step": 10092 }, { "epoch": 1.797008547008547, "grad_norm": 0.7020773887634277, "learning_rate": 0.00011605529839075801, "loss": 1.2775, "step": 10093 }, { "epoch": 1.7971866096866096, "grad_norm": 0.6061446070671082, "learning_rate": 0.0001160414823474312, "loss": 1.0156, "step": 10094 }, { "epoch": 1.7973646723646723, "grad_norm": 0.6746069192886353, "learning_rate": 0.00011602766598980484, "loss": 0.8223, "step": 10095 }, { "epoch": 1.797542735042735, "grad_norm": 0.655829131603241, "learning_rate": 0.00011601384931814967, "loss": 0.9482, "step": 10096 }, { "epoch": 1.7977207977207978, "grad_norm": 0.6762703061103821, "learning_rate": 0.00011600003233273636, "loss": 1.0191, "step": 10097 }, { "epoch": 1.7978988603988604, "grad_norm": 0.7610527276992798, "learning_rate": 0.00011598621503383566, "loss": 1.0771, "step": 10098 }, { "epoch": 1.7980769230769231, "grad_norm": 0.6857240200042725, "learning_rate": 0.0001159723974217183, "loss": 0.8325, "step": 10099 }, { "epoch": 1.7982549857549857, "grad_norm": 0.6897954940795898, "learning_rate": 0.00011595857949665501, "loss": 1.0064, "step": 10100 }, { "epoch": 1.7984330484330484, "grad_norm": 0.7023211717605591, "learning_rate": 0.00011594476125891649, "loss": 1.1346, "step": 10101 }, { "epoch": 1.7986111111111112, "grad_norm": 0.8131003975868225, "learning_rate": 0.00011593094270877347, "loss": 1.0384, "step": 10102 }, { "epoch": 1.798789173789174, "grad_norm": 0.6504445672035217, "learning_rate": 0.00011591712384649676, "loss": 0.8172, "step": 10103 }, { "epoch": 1.7989672364672364, "grad_norm": 0.7379748821258545, "learning_rate": 0.00011590330467235704, "loss": 1.0118, "step": 10104 }, { "epoch": 1.7991452991452992, "grad_norm": 0.8867329955101013, "learning_rate": 0.0001158894851866251, "loss": 1.023, "step": 10105 }, { "epoch": 1.7993233618233617, "grad_norm": 0.7057412266731262, "learning_rate": 0.00011587566538957173, "loss": 0.8415, "step": 10106 }, { "epoch": 1.7995014245014245, "grad_norm": 0.7479654550552368, "learning_rate": 0.00011586184528146769, "loss": 0.9663, "step": 10107 }, { "epoch": 1.7996794871794872, "grad_norm": 0.6280845403671265, "learning_rate": 0.00011584802486258368, "loss": 0.973, "step": 10108 }, { "epoch": 1.79985754985755, "grad_norm": 0.6735749840736389, "learning_rate": 0.00011583420413319059, "loss": 0.8631, "step": 10109 }, { "epoch": 1.8000356125356125, "grad_norm": 0.5940406918525696, "learning_rate": 0.00011582038309355918, "loss": 0.8533, "step": 10110 }, { "epoch": 1.8002136752136753, "grad_norm": 0.6923874020576477, "learning_rate": 0.00011580656174396021, "loss": 1.1105, "step": 10111 }, { "epoch": 1.8003917378917378, "grad_norm": 0.6996715664863586, "learning_rate": 0.00011579274008466447, "loss": 0.9952, "step": 10112 }, { "epoch": 1.8005698005698005, "grad_norm": 0.656561553478241, "learning_rate": 0.00011577891811594281, "loss": 0.9621, "step": 10113 }, { "epoch": 1.8007478632478633, "grad_norm": 0.7121242880821228, "learning_rate": 0.00011576509583806605, "loss": 0.8658, "step": 10114 }, { "epoch": 1.800925925925926, "grad_norm": 0.7864459753036499, "learning_rate": 0.00011575127325130498, "loss": 0.9867, "step": 10115 }, { "epoch": 1.8011039886039886, "grad_norm": 0.6086452007293701, "learning_rate": 0.00011573745035593042, "loss": 0.8625, "step": 10116 }, { "epoch": 1.8012820512820513, "grad_norm": 0.6553642749786377, "learning_rate": 0.00011572362715221321, "loss": 0.8475, "step": 10117 }, { "epoch": 1.8014601139601139, "grad_norm": 0.6677348017692566, "learning_rate": 0.00011570980364042419, "loss": 0.9672, "step": 10118 }, { "epoch": 1.8016381766381766, "grad_norm": 0.6275015473365784, "learning_rate": 0.0001156959798208342, "loss": 0.8663, "step": 10119 }, { "epoch": 1.8018162393162394, "grad_norm": 0.787568211555481, "learning_rate": 0.0001156821556937141, "loss": 1.0188, "step": 10120 }, { "epoch": 1.801994301994302, "grad_norm": 0.6983163356781006, "learning_rate": 0.00011566833125933473, "loss": 1.0767, "step": 10121 }, { "epoch": 1.8021723646723646, "grad_norm": 0.7008936405181885, "learning_rate": 0.00011565450651796695, "loss": 1.0116, "step": 10122 }, { "epoch": 1.8023504273504274, "grad_norm": 0.7694976925849915, "learning_rate": 0.00011564068146988163, "loss": 1.0227, "step": 10123 }, { "epoch": 1.80252849002849, "grad_norm": 0.9530014991760254, "learning_rate": 0.00011562685611534967, "loss": 0.907, "step": 10124 }, { "epoch": 1.8027065527065527, "grad_norm": 0.6714984178543091, "learning_rate": 0.00011561303045464189, "loss": 0.9501, "step": 10125 }, { "epoch": 1.8028846153846154, "grad_norm": 0.7233797311782837, "learning_rate": 0.00011559920448802925, "loss": 1.021, "step": 10126 }, { "epoch": 1.8030626780626782, "grad_norm": 0.7600540518760681, "learning_rate": 0.0001155853782157826, "loss": 1.1056, "step": 10127 }, { "epoch": 1.8032407407407407, "grad_norm": 0.7836297750473022, "learning_rate": 0.00011557155163817281, "loss": 0.9906, "step": 10128 }, { "epoch": 1.8034188034188035, "grad_norm": 0.7161104083061218, "learning_rate": 0.00011555772475547084, "loss": 0.9541, "step": 10129 }, { "epoch": 1.803596866096866, "grad_norm": 0.6613732576370239, "learning_rate": 0.00011554389756794757, "loss": 0.9188, "step": 10130 }, { "epoch": 1.8037749287749287, "grad_norm": 0.6415915489196777, "learning_rate": 0.00011553007007587391, "loss": 0.9928, "step": 10131 }, { "epoch": 1.8039529914529915, "grad_norm": 0.7730516195297241, "learning_rate": 0.0001155162422795208, "loss": 1.0654, "step": 10132 }, { "epoch": 1.8041310541310542, "grad_norm": 0.6769654750823975, "learning_rate": 0.00011550241417915913, "loss": 1.0678, "step": 10133 }, { "epoch": 1.8043091168091168, "grad_norm": 0.6542425751686096, "learning_rate": 0.00011548858577505988, "loss": 0.9796, "step": 10134 }, { "epoch": 1.8044871794871795, "grad_norm": 0.7282404899597168, "learning_rate": 0.00011547475706749395, "loss": 1.0314, "step": 10135 }, { "epoch": 1.804665242165242, "grad_norm": 0.6450245976448059, "learning_rate": 0.00011546092805673232, "loss": 0.9564, "step": 10136 }, { "epoch": 1.8048433048433048, "grad_norm": 0.65577632188797, "learning_rate": 0.0001154470987430459, "loss": 1.0219, "step": 10137 }, { "epoch": 1.8050213675213675, "grad_norm": 0.7151737809181213, "learning_rate": 0.00011543326912670567, "loss": 0.9245, "step": 10138 }, { "epoch": 1.8051994301994303, "grad_norm": 0.6695905327796936, "learning_rate": 0.00011541943920798259, "loss": 0.9535, "step": 10139 }, { "epoch": 1.8053774928774928, "grad_norm": 0.7443813681602478, "learning_rate": 0.00011540560898714767, "loss": 1.1697, "step": 10140 }, { "epoch": 1.8055555555555556, "grad_norm": 0.5701992511749268, "learning_rate": 0.0001153917784644718, "loss": 0.7868, "step": 10141 }, { "epoch": 1.805733618233618, "grad_norm": 0.6992354989051819, "learning_rate": 0.00011537794764022605, "loss": 0.9856, "step": 10142 }, { "epoch": 1.8059116809116809, "grad_norm": 0.6354477405548096, "learning_rate": 0.00011536411651468131, "loss": 0.8752, "step": 10143 }, { "epoch": 1.8060897435897436, "grad_norm": 0.6952932476997375, "learning_rate": 0.00011535028508810864, "loss": 0.9446, "step": 10144 }, { "epoch": 1.8062678062678064, "grad_norm": 0.5527541637420654, "learning_rate": 0.00011533645336077901, "loss": 0.5486, "step": 10145 }, { "epoch": 1.806445868945869, "grad_norm": 0.685046374797821, "learning_rate": 0.00011532262133296345, "loss": 0.9529, "step": 10146 }, { "epoch": 1.8066239316239316, "grad_norm": 0.6927558779716492, "learning_rate": 0.00011530878900493296, "loss": 1.1758, "step": 10147 }, { "epoch": 1.8068019943019942, "grad_norm": 0.6758309602737427, "learning_rate": 0.00011529495637695855, "loss": 1.0076, "step": 10148 }, { "epoch": 1.806980056980057, "grad_norm": 0.6739441156387329, "learning_rate": 0.00011528112344931121, "loss": 1.1914, "step": 10149 }, { "epoch": 1.8071581196581197, "grad_norm": 0.7031944394111633, "learning_rate": 0.00011526729022226204, "loss": 0.783, "step": 10150 }, { "epoch": 1.8073361823361824, "grad_norm": 0.6476930975914001, "learning_rate": 0.00011525345669608202, "loss": 0.9595, "step": 10151 }, { "epoch": 1.8075142450142452, "grad_norm": 0.710498571395874, "learning_rate": 0.00011523962287104222, "loss": 0.8821, "step": 10152 }, { "epoch": 1.8076923076923077, "grad_norm": 0.6664412617683411, "learning_rate": 0.00011522578874741365, "loss": 1.0182, "step": 10153 }, { "epoch": 1.8078703703703702, "grad_norm": 0.8374263048171997, "learning_rate": 0.00011521195432546737, "loss": 0.9394, "step": 10154 }, { "epoch": 1.808048433048433, "grad_norm": 0.6770764589309692, "learning_rate": 0.00011519811960547447, "loss": 1.0568, "step": 10155 }, { "epoch": 1.8082264957264957, "grad_norm": 0.7014045715332031, "learning_rate": 0.00011518428458770595, "loss": 1.1705, "step": 10156 }, { "epoch": 1.8084045584045585, "grad_norm": 0.6590061187744141, "learning_rate": 0.00011517044927243295, "loss": 1.1233, "step": 10157 }, { "epoch": 1.8085826210826212, "grad_norm": 0.6093801856040955, "learning_rate": 0.00011515661365992647, "loss": 0.953, "step": 10158 }, { "epoch": 1.8087606837606838, "grad_norm": 0.6197089552879333, "learning_rate": 0.00011514277775045768, "loss": 0.9414, "step": 10159 }, { "epoch": 1.8089387464387463, "grad_norm": 0.7530463337898254, "learning_rate": 0.00011512894154429759, "loss": 0.9168, "step": 10160 }, { "epoch": 1.809116809116809, "grad_norm": 0.6051347851753235, "learning_rate": 0.00011511510504171735, "loss": 0.9132, "step": 10161 }, { "epoch": 1.8092948717948718, "grad_norm": 0.6388311982154846, "learning_rate": 0.000115101268242988, "loss": 0.6551, "step": 10162 }, { "epoch": 1.8094729344729346, "grad_norm": 0.7040972709655762, "learning_rate": 0.00011508743114838063, "loss": 0.9409, "step": 10163 }, { "epoch": 1.8096509971509973, "grad_norm": 0.7669548392295837, "learning_rate": 0.00011507359375816644, "loss": 1.0376, "step": 10164 }, { "epoch": 1.8098290598290598, "grad_norm": 0.7309662699699402, "learning_rate": 0.00011505975607261646, "loss": 0.9071, "step": 10165 }, { "epoch": 1.8100071225071224, "grad_norm": 0.6624547839164734, "learning_rate": 0.00011504591809200187, "loss": 1.0765, "step": 10166 }, { "epoch": 1.8101851851851851, "grad_norm": 0.7719045281410217, "learning_rate": 0.00011503207981659376, "loss": 0.9244, "step": 10167 }, { "epoch": 1.8103632478632479, "grad_norm": 0.6701484322547913, "learning_rate": 0.0001150182412466633, "loss": 0.9475, "step": 10168 }, { "epoch": 1.8105413105413106, "grad_norm": 0.5604981184005737, "learning_rate": 0.00011500440238248154, "loss": 0.6268, "step": 10169 }, { "epoch": 1.8107193732193734, "grad_norm": 0.6736510992050171, "learning_rate": 0.00011499056322431973, "loss": 0.9088, "step": 10170 }, { "epoch": 1.810897435897436, "grad_norm": 0.7428455948829651, "learning_rate": 0.00011497672377244897, "loss": 0.9298, "step": 10171 }, { "epoch": 1.8110754985754984, "grad_norm": 0.6543142795562744, "learning_rate": 0.00011496288402714042, "loss": 0.8863, "step": 10172 }, { "epoch": 1.8112535612535612, "grad_norm": 0.6809250712394714, "learning_rate": 0.00011494904398866524, "loss": 0.977, "step": 10173 }, { "epoch": 1.811431623931624, "grad_norm": 0.8105120062828064, "learning_rate": 0.00011493520365729456, "loss": 1.2115, "step": 10174 }, { "epoch": 1.8116096866096867, "grad_norm": 0.6985095143318176, "learning_rate": 0.00011492136303329964, "loss": 0.8233, "step": 10175 }, { "epoch": 1.8117877492877494, "grad_norm": 0.7198361754417419, "learning_rate": 0.00011490752211695158, "loss": 1.0552, "step": 10176 }, { "epoch": 1.811965811965812, "grad_norm": 0.7077036499977112, "learning_rate": 0.0001148936809085216, "loss": 0.9171, "step": 10177 }, { "epoch": 1.8121438746438745, "grad_norm": 0.9362925887107849, "learning_rate": 0.00011487983940828089, "loss": 0.9042, "step": 10178 }, { "epoch": 1.8123219373219372, "grad_norm": 0.6732819676399231, "learning_rate": 0.0001148659976165006, "loss": 1.1033, "step": 10179 }, { "epoch": 1.8125, "grad_norm": 0.747702419757843, "learning_rate": 0.00011485215553345201, "loss": 1.0692, "step": 10180 }, { "epoch": 1.8126780626780628, "grad_norm": 0.7011259198188782, "learning_rate": 0.00011483831315940627, "loss": 0.9278, "step": 10181 }, { "epoch": 1.8128561253561255, "grad_norm": 0.8542702198028564, "learning_rate": 0.00011482447049463462, "loss": 0.9476, "step": 10182 }, { "epoch": 1.813034188034188, "grad_norm": 0.6975166201591492, "learning_rate": 0.00011481062753940825, "loss": 0.9486, "step": 10183 }, { "epoch": 1.8132122507122506, "grad_norm": 0.8239036798477173, "learning_rate": 0.0001147967842939984, "loss": 1.0518, "step": 10184 }, { "epoch": 1.8133903133903133, "grad_norm": 0.7559717297554016, "learning_rate": 0.00011478294075867628, "loss": 1.1877, "step": 10185 }, { "epoch": 1.813568376068376, "grad_norm": 0.6755532026290894, "learning_rate": 0.00011476909693371318, "loss": 0.9287, "step": 10186 }, { "epoch": 1.8137464387464388, "grad_norm": 0.6561332941055298, "learning_rate": 0.0001147552528193803, "loss": 0.83, "step": 10187 }, { "epoch": 1.8139245014245016, "grad_norm": 0.7223508954048157, "learning_rate": 0.00011474140841594887, "loss": 1.1259, "step": 10188 }, { "epoch": 1.814102564102564, "grad_norm": 0.7920593023300171, "learning_rate": 0.0001147275637236902, "loss": 1.0925, "step": 10189 }, { "epoch": 1.8142806267806266, "grad_norm": 0.6896616816520691, "learning_rate": 0.00011471371874287546, "loss": 1.0204, "step": 10190 }, { "epoch": 1.8144586894586894, "grad_norm": 0.6149865388870239, "learning_rate": 0.00011469987347377602, "loss": 1.1249, "step": 10191 }, { "epoch": 1.8146367521367521, "grad_norm": 0.6650002598762512, "learning_rate": 0.00011468602791666307, "loss": 0.9723, "step": 10192 }, { "epoch": 1.8148148148148149, "grad_norm": 0.7298738956451416, "learning_rate": 0.00011467218207180792, "loss": 1.0225, "step": 10193 }, { "epoch": 1.8149928774928776, "grad_norm": 0.8075628876686096, "learning_rate": 0.00011465833593948183, "loss": 1.0429, "step": 10194 }, { "epoch": 1.8151709401709402, "grad_norm": 0.8196593523025513, "learning_rate": 0.0001146444895199561, "loss": 0.9148, "step": 10195 }, { "epoch": 1.8153490028490027, "grad_norm": 0.6394698023796082, "learning_rate": 0.00011463064281350204, "loss": 0.9781, "step": 10196 }, { "epoch": 1.8155270655270654, "grad_norm": 0.7302836775779724, "learning_rate": 0.00011461679582039091, "loss": 1.0394, "step": 10197 }, { "epoch": 1.8157051282051282, "grad_norm": 0.7066670060157776, "learning_rate": 0.00011460294854089404, "loss": 1.1153, "step": 10198 }, { "epoch": 1.815883190883191, "grad_norm": 0.6471068263053894, "learning_rate": 0.0001145891009752827, "loss": 1.1533, "step": 10199 }, { "epoch": 1.8160612535612537, "grad_norm": 0.6842355132102966, "learning_rate": 0.00011457525312382826, "loss": 0.953, "step": 10200 }, { "epoch": 1.8162393162393162, "grad_norm": 0.6720319986343384, "learning_rate": 0.00011456140498680202, "loss": 1.003, "step": 10201 }, { "epoch": 1.8164173789173788, "grad_norm": 0.632017970085144, "learning_rate": 0.00011454755656447527, "loss": 0.8148, "step": 10202 }, { "epoch": 1.8165954415954415, "grad_norm": 0.7193828225135803, "learning_rate": 0.00011453370785711939, "loss": 1.0098, "step": 10203 }, { "epoch": 1.8167735042735043, "grad_norm": 0.7098045349121094, "learning_rate": 0.00011451985886500566, "loss": 1.1276, "step": 10204 }, { "epoch": 1.816951566951567, "grad_norm": 0.7076733708381653, "learning_rate": 0.00011450600958840547, "loss": 1.1216, "step": 10205 }, { "epoch": 1.8171296296296298, "grad_norm": 0.6864610314369202, "learning_rate": 0.00011449216002759018, "loss": 0.9896, "step": 10206 }, { "epoch": 1.8173076923076923, "grad_norm": 0.737727701663971, "learning_rate": 0.0001144783101828311, "loss": 0.9447, "step": 10207 }, { "epoch": 1.8174857549857548, "grad_norm": 0.6562525033950806, "learning_rate": 0.00011446446005439964, "loss": 1.1208, "step": 10208 }, { "epoch": 1.8176638176638176, "grad_norm": 0.7203826308250427, "learning_rate": 0.0001144506096425671, "loss": 1.1339, "step": 10209 }, { "epoch": 1.8178418803418803, "grad_norm": 0.6657233834266663, "learning_rate": 0.00011443675894760489, "loss": 0.8307, "step": 10210 }, { "epoch": 1.818019943019943, "grad_norm": 0.7032586932182312, "learning_rate": 0.00011442290796978437, "loss": 0.8546, "step": 10211 }, { "epoch": 1.8181980056980058, "grad_norm": 0.6989460587501526, "learning_rate": 0.00011440905670937696, "loss": 1.0749, "step": 10212 }, { "epoch": 1.8183760683760684, "grad_norm": 0.6461085677146912, "learning_rate": 0.00011439520516665399, "loss": 0.984, "step": 10213 }, { "epoch": 1.818554131054131, "grad_norm": 0.7077372670173645, "learning_rate": 0.00011438135334188689, "loss": 1.0813, "step": 10214 }, { "epoch": 1.8187321937321936, "grad_norm": 0.6724075675010681, "learning_rate": 0.00011436750123534704, "loss": 0.9975, "step": 10215 }, { "epoch": 1.8189102564102564, "grad_norm": 0.6205753684043884, "learning_rate": 0.00011435364884730583, "loss": 0.7414, "step": 10216 }, { "epoch": 1.8190883190883191, "grad_norm": 0.6416093707084656, "learning_rate": 0.00011433979617803472, "loss": 1.0024, "step": 10217 }, { "epoch": 1.819266381766382, "grad_norm": 0.7817183136940002, "learning_rate": 0.00011432594322780508, "loss": 1.0577, "step": 10218 }, { "epoch": 1.8194444444444444, "grad_norm": 0.688220202922821, "learning_rate": 0.00011431208999688835, "loss": 1.0301, "step": 10219 }, { "epoch": 1.8196225071225072, "grad_norm": 0.6464754343032837, "learning_rate": 0.0001142982364855559, "loss": 1.0608, "step": 10220 }, { "epoch": 1.8198005698005697, "grad_norm": 0.6607306599617004, "learning_rate": 0.00011428438269407926, "loss": 1.1203, "step": 10221 }, { "epoch": 1.8199786324786325, "grad_norm": 0.5779942870140076, "learning_rate": 0.00011427052862272982, "loss": 0.7895, "step": 10222 }, { "epoch": 1.8201566951566952, "grad_norm": 0.7599068880081177, "learning_rate": 0.000114256674271779, "loss": 0.883, "step": 10223 }, { "epoch": 1.820334757834758, "grad_norm": 0.6578865051269531, "learning_rate": 0.00011424281964149824, "loss": 1.101, "step": 10224 }, { "epoch": 1.8205128205128205, "grad_norm": 0.7090746760368347, "learning_rate": 0.00011422896473215905, "loss": 0.9514, "step": 10225 }, { "epoch": 1.8206908831908832, "grad_norm": 0.7537758946418762, "learning_rate": 0.00011421510954403281, "loss": 1.2193, "step": 10226 }, { "epoch": 1.8208689458689458, "grad_norm": 0.670183002948761, "learning_rate": 0.00011420125407739106, "loss": 1.1408, "step": 10227 }, { "epoch": 1.8210470085470085, "grad_norm": 0.742520809173584, "learning_rate": 0.00011418739833250524, "loss": 0.8826, "step": 10228 }, { "epoch": 1.8212250712250713, "grad_norm": 0.6542800664901733, "learning_rate": 0.00011417354230964683, "loss": 1.0039, "step": 10229 }, { "epoch": 1.821403133903134, "grad_norm": 0.6713709235191345, "learning_rate": 0.00011415968600908727, "loss": 0.9351, "step": 10230 }, { "epoch": 1.8215811965811965, "grad_norm": 0.6794951558113098, "learning_rate": 0.0001141458294310981, "loss": 0.9491, "step": 10231 }, { "epoch": 1.8217592592592593, "grad_norm": 0.6921972632408142, "learning_rate": 0.00011413197257595079, "loss": 1.1342, "step": 10232 }, { "epoch": 1.8219373219373218, "grad_norm": 0.702586829662323, "learning_rate": 0.00011411811544391682, "loss": 0.9992, "step": 10233 }, { "epoch": 1.8221153846153846, "grad_norm": 0.8147975206375122, "learning_rate": 0.00011410425803526772, "loss": 1.0507, "step": 10234 }, { "epoch": 1.8222934472934473, "grad_norm": 0.66419517993927, "learning_rate": 0.00011409040035027496, "loss": 1.0426, "step": 10235 }, { "epoch": 1.82247150997151, "grad_norm": 0.6132485866546631, "learning_rate": 0.00011407654238921011, "loss": 0.9859, "step": 10236 }, { "epoch": 1.8226495726495726, "grad_norm": 0.7522366046905518, "learning_rate": 0.00011406268415234462, "loss": 0.9379, "step": 10237 }, { "epoch": 1.8228276353276354, "grad_norm": 0.6335554122924805, "learning_rate": 0.00011404882563995007, "loss": 0.9322, "step": 10238 }, { "epoch": 1.823005698005698, "grad_norm": 0.7577497363090515, "learning_rate": 0.00011403496685229797, "loss": 1.1383, "step": 10239 }, { "epoch": 1.8231837606837606, "grad_norm": 0.6796886920928955, "learning_rate": 0.00011402110778965982, "loss": 1.0092, "step": 10240 }, { "epoch": 1.8233618233618234, "grad_norm": 0.7676617503166199, "learning_rate": 0.0001140072484523072, "loss": 1.0137, "step": 10241 }, { "epoch": 1.8235398860398861, "grad_norm": 0.7807821035385132, "learning_rate": 0.00011399338884051165, "loss": 0.8987, "step": 10242 }, { "epoch": 1.8237179487179487, "grad_norm": 0.7169568538665771, "learning_rate": 0.00011397952895454473, "loss": 0.8984, "step": 10243 }, { "epoch": 1.8238960113960114, "grad_norm": 0.6564654111862183, "learning_rate": 0.00011396566879467793, "loss": 1.0255, "step": 10244 }, { "epoch": 1.824074074074074, "grad_norm": 0.7290034294128418, "learning_rate": 0.00011395180836118292, "loss": 0.9962, "step": 10245 }, { "epoch": 1.8242521367521367, "grad_norm": 0.6610758900642395, "learning_rate": 0.00011393794765433115, "loss": 1.102, "step": 10246 }, { "epoch": 1.8244301994301995, "grad_norm": 0.6875932216644287, "learning_rate": 0.0001139240866743943, "loss": 0.9963, "step": 10247 }, { "epoch": 1.8246082621082622, "grad_norm": 0.7595645189285278, "learning_rate": 0.00011391022542164387, "loss": 1.1285, "step": 10248 }, { "epoch": 1.8247863247863247, "grad_norm": 0.6752721667289734, "learning_rate": 0.0001138963638963515, "loss": 0.9447, "step": 10249 }, { "epoch": 1.8249643874643875, "grad_norm": 0.6697955131530762, "learning_rate": 0.00011388250209878873, "loss": 1.0804, "step": 10250 }, { "epoch": 1.82514245014245, "grad_norm": 0.6546956896781921, "learning_rate": 0.00011386864002922713, "loss": 0.9626, "step": 10251 }, { "epoch": 1.8253205128205128, "grad_norm": 0.8002896904945374, "learning_rate": 0.00011385477768793838, "loss": 1.1933, "step": 10252 }, { "epoch": 1.8254985754985755, "grad_norm": 0.6566781401634216, "learning_rate": 0.00011384091507519403, "loss": 0.9802, "step": 10253 }, { "epoch": 1.8256766381766383, "grad_norm": 0.617420494556427, "learning_rate": 0.00011382705219126572, "loss": 1.1098, "step": 10254 }, { "epoch": 1.8258547008547008, "grad_norm": 0.6558036208152771, "learning_rate": 0.00011381318903642504, "loss": 1.0291, "step": 10255 }, { "epoch": 1.8260327635327636, "grad_norm": 0.6295637488365173, "learning_rate": 0.00011379932561094358, "loss": 1.0792, "step": 10256 }, { "epoch": 1.826210826210826, "grad_norm": 0.7475154399871826, "learning_rate": 0.00011378546191509303, "loss": 1.1362, "step": 10257 }, { "epoch": 1.8263888888888888, "grad_norm": 0.6814939379692078, "learning_rate": 0.00011377159794914498, "loss": 0.9131, "step": 10258 }, { "epoch": 1.8265669515669516, "grad_norm": 0.6726876497268677, "learning_rate": 0.00011375773371337111, "loss": 0.9147, "step": 10259 }, { "epoch": 1.8267450142450143, "grad_norm": 0.785943865776062, "learning_rate": 0.00011374386920804298, "loss": 1.0137, "step": 10260 }, { "epoch": 1.8269230769230769, "grad_norm": 0.7614478468894958, "learning_rate": 0.0001137300044334323, "loss": 1.2118, "step": 10261 }, { "epoch": 1.8271011396011396, "grad_norm": 0.7317564487457275, "learning_rate": 0.00011371613938981072, "loss": 1.0602, "step": 10262 }, { "epoch": 1.8272792022792022, "grad_norm": 0.6716432571411133, "learning_rate": 0.00011370227407744987, "loss": 0.952, "step": 10263 }, { "epoch": 1.827457264957265, "grad_norm": 0.6946425437927246, "learning_rate": 0.00011368840849662139, "loss": 1.0554, "step": 10264 }, { "epoch": 1.8276353276353277, "grad_norm": 0.6692264080047607, "learning_rate": 0.00011367454264759703, "loss": 0.8944, "step": 10265 }, { "epoch": 1.8278133903133904, "grad_norm": 0.6931505799293518, "learning_rate": 0.00011366067653064838, "loss": 0.9045, "step": 10266 }, { "epoch": 1.827991452991453, "grad_norm": 0.7233194708824158, "learning_rate": 0.00011364681014604716, "loss": 0.9441, "step": 10267 }, { "epoch": 1.8281695156695157, "grad_norm": 0.6451242566108704, "learning_rate": 0.00011363294349406506, "loss": 0.9948, "step": 10268 }, { "epoch": 1.8283475783475782, "grad_norm": 0.6993351578712463, "learning_rate": 0.00011361907657497375, "loss": 1.1057, "step": 10269 }, { "epoch": 1.828525641025641, "grad_norm": 0.7241137623786926, "learning_rate": 0.00011360520938904493, "loss": 0.974, "step": 10270 }, { "epoch": 1.8287037037037037, "grad_norm": 0.6349480152130127, "learning_rate": 0.00011359134193655027, "loss": 0.9026, "step": 10271 }, { "epoch": 1.8288817663817665, "grad_norm": 0.6916826963424683, "learning_rate": 0.00011357747421776151, "loss": 0.9153, "step": 10272 }, { "epoch": 1.8290598290598292, "grad_norm": 0.879770040512085, "learning_rate": 0.00011356360623295037, "loss": 1.0818, "step": 10273 }, { "epoch": 1.8292378917378918, "grad_norm": 0.6293807029724121, "learning_rate": 0.00011354973798238853, "loss": 1.1164, "step": 10274 }, { "epoch": 1.8294159544159543, "grad_norm": 0.7070622444152832, "learning_rate": 0.0001135358694663477, "loss": 0.8795, "step": 10275 }, { "epoch": 1.829594017094017, "grad_norm": 0.6847673654556274, "learning_rate": 0.00011352200068509962, "loss": 0.9173, "step": 10276 }, { "epoch": 1.8297720797720798, "grad_norm": 0.6552146077156067, "learning_rate": 0.00011350813163891605, "loss": 1.0425, "step": 10277 }, { "epoch": 1.8299501424501425, "grad_norm": 0.6432808041572571, "learning_rate": 0.0001134942623280687, "loss": 0.9418, "step": 10278 }, { "epoch": 1.8301282051282053, "grad_norm": 0.7412393093109131, "learning_rate": 0.00011348039275282931, "loss": 1.1212, "step": 10279 }, { "epoch": 1.8303062678062678, "grad_norm": 0.6543423533439636, "learning_rate": 0.00011346652291346965, "loss": 1.0553, "step": 10280 }, { "epoch": 1.8304843304843303, "grad_norm": 0.7159286141395569, "learning_rate": 0.00011345265281026138, "loss": 1.0582, "step": 10281 }, { "epoch": 1.830662393162393, "grad_norm": 0.6443323493003845, "learning_rate": 0.00011343878244347639, "loss": 0.9462, "step": 10282 }, { "epoch": 1.8308404558404558, "grad_norm": 0.7592014074325562, "learning_rate": 0.00011342491181338634, "loss": 1.2718, "step": 10283 }, { "epoch": 1.8310185185185186, "grad_norm": 0.627109944820404, "learning_rate": 0.00011341104092026302, "loss": 1.0177, "step": 10284 }, { "epoch": 1.8311965811965814, "grad_norm": 0.8061598539352417, "learning_rate": 0.00011339716976437827, "loss": 0.9416, "step": 10285 }, { "epoch": 1.8313746438746439, "grad_norm": 0.6584261059761047, "learning_rate": 0.00011338329834600377, "loss": 0.8297, "step": 10286 }, { "epoch": 1.8315527065527064, "grad_norm": 0.6329470276832581, "learning_rate": 0.00011336942666541133, "loss": 0.8386, "step": 10287 }, { "epoch": 1.8317307692307692, "grad_norm": 0.6833979487419128, "learning_rate": 0.00011335555472287275, "loss": 0.9407, "step": 10288 }, { "epoch": 1.831908831908832, "grad_norm": 0.7663840651512146, "learning_rate": 0.00011334168251865985, "loss": 1.0018, "step": 10289 }, { "epoch": 1.8320868945868947, "grad_norm": 0.7751262784004211, "learning_rate": 0.00011332781005304436, "loss": 1.0576, "step": 10290 }, { "epoch": 1.8322649572649574, "grad_norm": 0.6857370138168335, "learning_rate": 0.00011331393732629814, "loss": 0.9888, "step": 10291 }, { "epoch": 1.83244301994302, "grad_norm": 0.7534535527229309, "learning_rate": 0.00011330006433869296, "loss": 1.0834, "step": 10292 }, { "epoch": 1.8326210826210825, "grad_norm": 0.6785250306129456, "learning_rate": 0.00011328619109050065, "loss": 1.0471, "step": 10293 }, { "epoch": 1.8327991452991452, "grad_norm": 0.7023689150810242, "learning_rate": 0.00011327231758199303, "loss": 1.0652, "step": 10294 }, { "epoch": 1.832977207977208, "grad_norm": 0.6776610612869263, "learning_rate": 0.00011325844381344192, "loss": 0.9504, "step": 10295 }, { "epoch": 1.8331552706552707, "grad_norm": 0.7704112529754639, "learning_rate": 0.00011324456978511917, "loss": 0.9712, "step": 10296 }, { "epoch": 1.8333333333333335, "grad_norm": 0.601502537727356, "learning_rate": 0.00011323069549729654, "loss": 1.075, "step": 10297 }, { "epoch": 1.833511396011396, "grad_norm": 0.6282439231872559, "learning_rate": 0.00011321682095024596, "loss": 0.9238, "step": 10298 }, { "epoch": 1.8336894586894585, "grad_norm": 0.6873499155044556, "learning_rate": 0.00011320294614423921, "loss": 1.0464, "step": 10299 }, { "epoch": 1.8338675213675213, "grad_norm": 0.6063792705535889, "learning_rate": 0.00011318907107954815, "loss": 0.9732, "step": 10300 }, { "epoch": 1.834045584045584, "grad_norm": 0.5830921530723572, "learning_rate": 0.00011317519575644464, "loss": 0.7568, "step": 10301 }, { "epoch": 1.8342236467236468, "grad_norm": 0.6394222378730774, "learning_rate": 0.00011316132017520053, "loss": 0.9958, "step": 10302 }, { "epoch": 1.8344017094017095, "grad_norm": 0.7052412033081055, "learning_rate": 0.00011314744433608773, "loss": 0.9129, "step": 10303 }, { "epoch": 1.834579772079772, "grad_norm": 0.7287624478340149, "learning_rate": 0.00011313356823937801, "loss": 0.8608, "step": 10304 }, { "epoch": 1.8347578347578346, "grad_norm": 0.702937662601471, "learning_rate": 0.00011311969188534334, "loss": 1.3074, "step": 10305 }, { "epoch": 1.8349358974358974, "grad_norm": 0.6693850159645081, "learning_rate": 0.00011310581527425557, "loss": 0.928, "step": 10306 }, { "epoch": 1.83511396011396, "grad_norm": 0.8153932094573975, "learning_rate": 0.00011309193840638654, "loss": 1.1771, "step": 10307 }, { "epoch": 1.8352920227920229, "grad_norm": 0.6517418622970581, "learning_rate": 0.00011307806128200821, "loss": 0.9634, "step": 10308 }, { "epoch": 1.8354700854700856, "grad_norm": 0.6626226305961609, "learning_rate": 0.00011306418390139245, "loss": 0.9371, "step": 10309 }, { "epoch": 1.8356481481481481, "grad_norm": 0.7397477030754089, "learning_rate": 0.0001130503062648111, "loss": 0.9398, "step": 10310 }, { "epoch": 1.8358262108262107, "grad_norm": 0.6790265440940857, "learning_rate": 0.00011303642837253614, "loss": 0.9728, "step": 10311 }, { "epoch": 1.8360042735042734, "grad_norm": 0.6266449093818665, "learning_rate": 0.00011302255022483941, "loss": 0.847, "step": 10312 }, { "epoch": 1.8361823361823362, "grad_norm": 0.791657030582428, "learning_rate": 0.00011300867182199288, "loss": 0.8342, "step": 10313 }, { "epoch": 1.836360398860399, "grad_norm": 0.7128583788871765, "learning_rate": 0.00011299479316426846, "loss": 0.9591, "step": 10314 }, { "epoch": 1.8365384615384617, "grad_norm": 0.659928023815155, "learning_rate": 0.00011298091425193806, "loss": 1.0282, "step": 10315 }, { "epoch": 1.8367165242165242, "grad_norm": 0.6641396284103394, "learning_rate": 0.00011296703508527363, "loss": 1.0161, "step": 10316 }, { "epoch": 1.8368945868945867, "grad_norm": 0.7921316027641296, "learning_rate": 0.00011295315566454702, "loss": 0.8897, "step": 10317 }, { "epoch": 1.8370726495726495, "grad_norm": 0.6900694966316223, "learning_rate": 0.00011293927599003029, "loss": 1.0094, "step": 10318 }, { "epoch": 1.8372507122507122, "grad_norm": 0.8054366707801819, "learning_rate": 0.0001129253960619953, "loss": 0.9489, "step": 10319 }, { "epoch": 1.837428774928775, "grad_norm": 0.6623767018318176, "learning_rate": 0.00011291151588071405, "loss": 0.92, "step": 10320 }, { "epoch": 1.8376068376068377, "grad_norm": 0.6143901348114014, "learning_rate": 0.00011289763544645846, "loss": 0.8093, "step": 10321 }, { "epoch": 1.8377849002849003, "grad_norm": 0.8207027316093445, "learning_rate": 0.00011288375475950046, "loss": 1.2402, "step": 10322 }, { "epoch": 1.8379629629629628, "grad_norm": 0.6759985685348511, "learning_rate": 0.00011286987382011209, "loss": 0.9179, "step": 10323 }, { "epoch": 1.8381410256410255, "grad_norm": 0.745439887046814, "learning_rate": 0.00011285599262856523, "loss": 0.8157, "step": 10324 }, { "epoch": 1.8383190883190883, "grad_norm": 0.6873317360877991, "learning_rate": 0.00011284211118513194, "loss": 0.8681, "step": 10325 }, { "epoch": 1.838497150997151, "grad_norm": 0.7060160040855408, "learning_rate": 0.00011282822949008416, "loss": 1.0833, "step": 10326 }, { "epoch": 1.8386752136752138, "grad_norm": 0.8079642653465271, "learning_rate": 0.00011281434754369389, "loss": 0.8639, "step": 10327 }, { "epoch": 1.8388532763532763, "grad_norm": 0.6434001922607422, "learning_rate": 0.00011280046534623303, "loss": 0.9269, "step": 10328 }, { "epoch": 1.839031339031339, "grad_norm": 0.7005292773246765, "learning_rate": 0.0001127865828979737, "loss": 1.1475, "step": 10329 }, { "epoch": 1.8392094017094016, "grad_norm": 0.7004852890968323, "learning_rate": 0.00011277270019918784, "loss": 0.9467, "step": 10330 }, { "epoch": 1.8393874643874644, "grad_norm": 0.7542549967765808, "learning_rate": 0.00011275881725014743, "loss": 1.0371, "step": 10331 }, { "epoch": 1.8395655270655271, "grad_norm": 0.674051821231842, "learning_rate": 0.00011274493405112452, "loss": 1.1097, "step": 10332 }, { "epoch": 1.8397435897435899, "grad_norm": 0.8136405348777771, "learning_rate": 0.00011273105060239107, "loss": 0.9718, "step": 10333 }, { "epoch": 1.8399216524216524, "grad_norm": 0.6524073481559753, "learning_rate": 0.00011271716690421916, "loss": 0.9953, "step": 10334 }, { "epoch": 1.8400997150997151, "grad_norm": 0.7436625957489014, "learning_rate": 0.00011270328295688077, "loss": 1.0722, "step": 10335 }, { "epoch": 1.8402777777777777, "grad_norm": 0.6815723180770874, "learning_rate": 0.00011268939876064795, "loss": 1.0924, "step": 10336 }, { "epoch": 1.8404558404558404, "grad_norm": 0.6923388242721558, "learning_rate": 0.0001126755143157927, "loss": 0.921, "step": 10337 }, { "epoch": 1.8406339031339032, "grad_norm": 0.7464849948883057, "learning_rate": 0.00011266162962258708, "loss": 1.0549, "step": 10338 }, { "epoch": 1.840811965811966, "grad_norm": 0.6621805429458618, "learning_rate": 0.00011264774468130315, "loss": 1.0764, "step": 10339 }, { "epoch": 1.8409900284900285, "grad_norm": 0.7370132803916931, "learning_rate": 0.00011263385949221295, "loss": 0.7818, "step": 10340 }, { "epoch": 1.8411680911680912, "grad_norm": 0.673100471496582, "learning_rate": 0.00011261997405558848, "loss": 1.04, "step": 10341 }, { "epoch": 1.8413461538461537, "grad_norm": 0.5978201031684875, "learning_rate": 0.00011260608837170183, "loss": 0.9644, "step": 10342 }, { "epoch": 1.8415242165242165, "grad_norm": 0.6868628263473511, "learning_rate": 0.00011259220244082507, "loss": 0.9533, "step": 10343 }, { "epoch": 1.8417022792022792, "grad_norm": 0.6580314636230469, "learning_rate": 0.0001125783162632303, "loss": 0.9506, "step": 10344 }, { "epoch": 1.841880341880342, "grad_norm": 0.7238291501998901, "learning_rate": 0.00011256442983918951, "loss": 0.8663, "step": 10345 }, { "epoch": 1.8420584045584045, "grad_norm": 0.5838520526885986, "learning_rate": 0.00011255054316897484, "loss": 0.9606, "step": 10346 }, { "epoch": 1.8422364672364673, "grad_norm": 0.7102842926979065, "learning_rate": 0.00011253665625285836, "loss": 0.801, "step": 10347 }, { "epoch": 1.8424145299145298, "grad_norm": 0.6449147462844849, "learning_rate": 0.0001125227690911121, "loss": 1.0827, "step": 10348 }, { "epoch": 1.8425925925925926, "grad_norm": 0.6355304718017578, "learning_rate": 0.00011250888168400823, "loss": 1.0369, "step": 10349 }, { "epoch": 1.8427706552706553, "grad_norm": 0.678977906703949, "learning_rate": 0.0001124949940318188, "loss": 0.9491, "step": 10350 }, { "epoch": 1.842948717948718, "grad_norm": 0.6366633772850037, "learning_rate": 0.00011248110613481592, "loss": 0.7272, "step": 10351 }, { "epoch": 1.8431267806267806, "grad_norm": 0.6639098525047302, "learning_rate": 0.00011246721799327171, "loss": 1.0313, "step": 10352 }, { "epoch": 1.8433048433048433, "grad_norm": 0.6034720540046692, "learning_rate": 0.00011245332960745822, "loss": 0.7141, "step": 10353 }, { "epoch": 1.8434829059829059, "grad_norm": 0.8118346333503723, "learning_rate": 0.00011243944097764763, "loss": 1.171, "step": 10354 }, { "epoch": 1.8436609686609686, "grad_norm": 0.6706618070602417, "learning_rate": 0.00011242555210411203, "loss": 0.9578, "step": 10355 }, { "epoch": 1.8438390313390314, "grad_norm": 0.619562029838562, "learning_rate": 0.00011241166298712355, "loss": 0.9883, "step": 10356 }, { "epoch": 1.8440170940170941, "grad_norm": 0.6471936106681824, "learning_rate": 0.00011239777362695434, "loss": 0.8897, "step": 10357 }, { "epoch": 1.8441951566951567, "grad_norm": 0.7179005742073059, "learning_rate": 0.00011238388402387645, "loss": 0.9646, "step": 10358 }, { "epoch": 1.8443732193732194, "grad_norm": 0.7726966738700867, "learning_rate": 0.00011236999417816214, "loss": 0.8855, "step": 10359 }, { "epoch": 1.844551282051282, "grad_norm": 0.6733565330505371, "learning_rate": 0.00011235610409008346, "loss": 1.0379, "step": 10360 }, { "epoch": 1.8447293447293447, "grad_norm": 0.7317814826965332, "learning_rate": 0.0001123422137599126, "loss": 0.8528, "step": 10361 }, { "epoch": 1.8449074074074074, "grad_norm": 0.6727005839347839, "learning_rate": 0.0001123283231879217, "loss": 0.9612, "step": 10362 }, { "epoch": 1.8450854700854702, "grad_norm": 0.6350542306900024, "learning_rate": 0.00011231443237438289, "loss": 0.9939, "step": 10363 }, { "epoch": 1.8452635327635327, "grad_norm": 0.693148672580719, "learning_rate": 0.00011230054131956836, "loss": 1.0149, "step": 10364 }, { "epoch": 1.8454415954415955, "grad_norm": 0.7263579368591309, "learning_rate": 0.0001122866500237503, "loss": 1.1044, "step": 10365 }, { "epoch": 1.845619658119658, "grad_norm": 0.7044230699539185, "learning_rate": 0.00011227275848720085, "loss": 1.0677, "step": 10366 }, { "epoch": 1.8457977207977208, "grad_norm": 0.6895326972007751, "learning_rate": 0.00011225886671019219, "loss": 1.1025, "step": 10367 }, { "epoch": 1.8459757834757835, "grad_norm": 0.6045145988464355, "learning_rate": 0.00011224497469299651, "loss": 0.8079, "step": 10368 }, { "epoch": 1.8461538461538463, "grad_norm": 0.6613210439682007, "learning_rate": 0.00011223108243588599, "loss": 1.0345, "step": 10369 }, { "epoch": 1.8463319088319088, "grad_norm": 0.6288960576057434, "learning_rate": 0.0001122171899391328, "loss": 1.0166, "step": 10370 }, { "epoch": 1.8465099715099715, "grad_norm": 0.6158748865127563, "learning_rate": 0.00011220329720300917, "loss": 0.895, "step": 10371 }, { "epoch": 1.846688034188034, "grad_norm": 0.6583057641983032, "learning_rate": 0.00011218940422778728, "loss": 0.8059, "step": 10372 }, { "epoch": 1.8468660968660968, "grad_norm": 0.6761550903320312, "learning_rate": 0.00011217551101373932, "loss": 0.9253, "step": 10373 }, { "epoch": 1.8470441595441596, "grad_norm": 0.5969263315200806, "learning_rate": 0.0001121616175611375, "loss": 0.8549, "step": 10374 }, { "epoch": 1.8472222222222223, "grad_norm": 0.7994722723960876, "learning_rate": 0.00011214772387025407, "loss": 0.9918, "step": 10375 }, { "epoch": 1.8474002849002849, "grad_norm": 0.6949167847633362, "learning_rate": 0.00011213382994136123, "loss": 1.1853, "step": 10376 }, { "epoch": 1.8475783475783476, "grad_norm": 0.7356176376342773, "learning_rate": 0.00011211993577473121, "loss": 0.8809, "step": 10377 }, { "epoch": 1.8477564102564101, "grad_norm": 0.7110268473625183, "learning_rate": 0.0001121060413706362, "loss": 0.9805, "step": 10378 }, { "epoch": 1.8479344729344729, "grad_norm": 0.6509962677955627, "learning_rate": 0.00011209214672934846, "loss": 0.8899, "step": 10379 }, { "epoch": 1.8481125356125356, "grad_norm": 0.6103082299232483, "learning_rate": 0.00011207825185114025, "loss": 0.8576, "step": 10380 }, { "epoch": 1.8482905982905984, "grad_norm": 0.6261070966720581, "learning_rate": 0.00011206435673628377, "loss": 0.8884, "step": 10381 }, { "epoch": 1.848468660968661, "grad_norm": 0.7629222273826599, "learning_rate": 0.00011205046138505126, "loss": 1.1714, "step": 10382 }, { "epoch": 1.8486467236467237, "grad_norm": 0.617957353591919, "learning_rate": 0.000112036565797715, "loss": 0.9546, "step": 10383 }, { "epoch": 1.8488247863247862, "grad_norm": 0.6926987171173096, "learning_rate": 0.00011202266997454724, "loss": 0.8842, "step": 10384 }, { "epoch": 1.849002849002849, "grad_norm": 0.602758526802063, "learning_rate": 0.00011200877391582025, "loss": 0.9782, "step": 10385 }, { "epoch": 1.8491809116809117, "grad_norm": 0.706731915473938, "learning_rate": 0.00011199487762180627, "loss": 0.8176, "step": 10386 }, { "epoch": 1.8493589743589745, "grad_norm": 0.7135118842124939, "learning_rate": 0.0001119809810927776, "loss": 0.9277, "step": 10387 }, { "epoch": 1.8495370370370372, "grad_norm": 0.7484592199325562, "learning_rate": 0.00011196708432900647, "loss": 1.0733, "step": 10388 }, { "epoch": 1.8497150997150997, "grad_norm": 0.7087157964706421, "learning_rate": 0.00011195318733076519, "loss": 0.9443, "step": 10389 }, { "epoch": 1.8498931623931623, "grad_norm": 0.6511468291282654, "learning_rate": 0.00011193929009832602, "loss": 0.955, "step": 10390 }, { "epoch": 1.850071225071225, "grad_norm": 0.6386628746986389, "learning_rate": 0.0001119253926319613, "loss": 1.0357, "step": 10391 }, { "epoch": 1.8502492877492878, "grad_norm": 0.6400021314620972, "learning_rate": 0.00011191149493194327, "loss": 0.8094, "step": 10392 }, { "epoch": 1.8504273504273505, "grad_norm": 0.7942537069320679, "learning_rate": 0.00011189759699854423, "loss": 0.9717, "step": 10393 }, { "epoch": 1.8506054131054133, "grad_norm": 0.7230474948883057, "learning_rate": 0.00011188369883203647, "loss": 0.9043, "step": 10394 }, { "epoch": 1.8507834757834758, "grad_norm": 0.8837162852287292, "learning_rate": 0.00011186980043269235, "loss": 1.2821, "step": 10395 }, { "epoch": 1.8509615384615383, "grad_norm": 0.7260291576385498, "learning_rate": 0.00011185590180078413, "loss": 1.1672, "step": 10396 }, { "epoch": 1.851139601139601, "grad_norm": 0.6290066242218018, "learning_rate": 0.00011184200293658415, "loss": 0.8942, "step": 10397 }, { "epoch": 1.8513176638176638, "grad_norm": 0.6571013331413269, "learning_rate": 0.00011182810384036475, "loss": 1.0753, "step": 10398 }, { "epoch": 1.8514957264957266, "grad_norm": 0.6494737267494202, "learning_rate": 0.00011181420451239817, "loss": 0.8833, "step": 10399 }, { "epoch": 1.8516737891737893, "grad_norm": 0.7383694648742676, "learning_rate": 0.00011180030495295684, "loss": 1.0094, "step": 10400 }, { "epoch": 1.8518518518518519, "grad_norm": 0.6713876724243164, "learning_rate": 0.00011178640516231302, "loss": 0.975, "step": 10401 }, { "epoch": 1.8520299145299144, "grad_norm": 0.8041042685508728, "learning_rate": 0.00011177250514073912, "loss": 1.1419, "step": 10402 }, { "epoch": 1.8522079772079771, "grad_norm": 0.7035061120986938, "learning_rate": 0.00011175860488850738, "loss": 1.0921, "step": 10403 }, { "epoch": 1.85238603988604, "grad_norm": 0.6135673522949219, "learning_rate": 0.00011174470440589022, "loss": 0.9611, "step": 10404 }, { "epoch": 1.8525641025641026, "grad_norm": 0.7868386507034302, "learning_rate": 0.00011173080369315999, "loss": 0.8561, "step": 10405 }, { "epoch": 1.8527421652421654, "grad_norm": 0.6575735211372375, "learning_rate": 0.00011171690275058902, "loss": 1.0256, "step": 10406 }, { "epoch": 1.852920227920228, "grad_norm": 0.7514392137527466, "learning_rate": 0.00011170300157844969, "loss": 1.0868, "step": 10407 }, { "epoch": 1.8530982905982905, "grad_norm": 0.6915257573127747, "learning_rate": 0.00011168910017701436, "loss": 1.1223, "step": 10408 }, { "epoch": 1.8532763532763532, "grad_norm": 0.7406772971153259, "learning_rate": 0.00011167519854655535, "loss": 1.0922, "step": 10409 }, { "epoch": 1.853454415954416, "grad_norm": 0.6632742881774902, "learning_rate": 0.0001116612966873451, "loss": 0.9082, "step": 10410 }, { "epoch": 1.8536324786324787, "grad_norm": 0.8154461979866028, "learning_rate": 0.00011164739459965598, "loss": 1.1126, "step": 10411 }, { "epoch": 1.8538105413105415, "grad_norm": 0.895764172077179, "learning_rate": 0.00011163349228376037, "loss": 1.0589, "step": 10412 }, { "epoch": 1.853988603988604, "grad_norm": 0.6746504902839661, "learning_rate": 0.00011161958973993063, "loss": 1.0184, "step": 10413 }, { "epoch": 1.8541666666666665, "grad_norm": 0.7271263003349304, "learning_rate": 0.00011160568696843916, "loss": 0.9989, "step": 10414 }, { "epoch": 1.8543447293447293, "grad_norm": 0.7503132820129395, "learning_rate": 0.00011159178396955836, "loss": 1.0783, "step": 10415 }, { "epoch": 1.854522792022792, "grad_norm": 0.6768177151679993, "learning_rate": 0.00011157788074356066, "loss": 0.9916, "step": 10416 }, { "epoch": 1.8547008547008548, "grad_norm": 0.6804978251457214, "learning_rate": 0.00011156397729071842, "loss": 0.9534, "step": 10417 }, { "epoch": 1.8548789173789175, "grad_norm": 0.7144617438316345, "learning_rate": 0.00011155007361130408, "loss": 0.991, "step": 10418 }, { "epoch": 1.85505698005698, "grad_norm": 0.6816750168800354, "learning_rate": 0.00011153616970559, "loss": 0.9551, "step": 10419 }, { "epoch": 1.8552350427350426, "grad_norm": 0.6620030999183655, "learning_rate": 0.00011152226557384866, "loss": 0.8854, "step": 10420 }, { "epoch": 1.8554131054131053, "grad_norm": 0.8400058746337891, "learning_rate": 0.00011150836121635249, "loss": 1.1593, "step": 10421 }, { "epoch": 1.855591168091168, "grad_norm": 0.6666815280914307, "learning_rate": 0.00011149445663337385, "loss": 1.2112, "step": 10422 }, { "epoch": 1.8557692307692308, "grad_norm": 0.7298431396484375, "learning_rate": 0.00011148055182518522, "loss": 0.9721, "step": 10423 }, { "epoch": 1.8559472934472936, "grad_norm": 0.66816645860672, "learning_rate": 0.00011146664679205903, "loss": 1.0945, "step": 10424 }, { "epoch": 1.8561253561253561, "grad_norm": 0.5979483127593994, "learning_rate": 0.00011145274153426771, "loss": 1.0176, "step": 10425 }, { "epoch": 1.8563034188034186, "grad_norm": 0.6579445600509644, "learning_rate": 0.00011143883605208372, "loss": 0.9143, "step": 10426 }, { "epoch": 1.8564814814814814, "grad_norm": 0.6871697902679443, "learning_rate": 0.0001114249303457795, "loss": 1.071, "step": 10427 }, { "epoch": 1.8566595441595442, "grad_norm": 0.6683333516120911, "learning_rate": 0.0001114110244156275, "loss": 0.7809, "step": 10428 }, { "epoch": 1.856837606837607, "grad_norm": 0.6122907996177673, "learning_rate": 0.0001113971182619002, "loss": 0.8329, "step": 10429 }, { "epoch": 1.8570156695156697, "grad_norm": 0.6510575413703918, "learning_rate": 0.00011138321188487, "loss": 1.0068, "step": 10430 }, { "epoch": 1.8571937321937322, "grad_norm": 0.6417793035507202, "learning_rate": 0.00011136930528480945, "loss": 1.0093, "step": 10431 }, { "epoch": 1.8573717948717947, "grad_norm": 0.595824658870697, "learning_rate": 0.00011135539846199096, "loss": 0.9856, "step": 10432 }, { "epoch": 1.8575498575498575, "grad_norm": 0.7594470381736755, "learning_rate": 0.00011134149141668704, "loss": 0.8173, "step": 10433 }, { "epoch": 1.8577279202279202, "grad_norm": 0.7078324556350708, "learning_rate": 0.00011132758414917016, "loss": 1.0236, "step": 10434 }, { "epoch": 1.857905982905983, "grad_norm": 0.6830437779426575, "learning_rate": 0.00011131367665971275, "loss": 0.8483, "step": 10435 }, { "epoch": 1.8580840455840457, "grad_norm": 0.6856399774551392, "learning_rate": 0.0001112997689485874, "loss": 0.8729, "step": 10436 }, { "epoch": 1.8582621082621082, "grad_norm": 0.6530426144599915, "learning_rate": 0.00011128586101606653, "loss": 0.8616, "step": 10437 }, { "epoch": 1.8584401709401708, "grad_norm": 0.6341808438301086, "learning_rate": 0.00011127195286242267, "loss": 0.896, "step": 10438 }, { "epoch": 1.8586182336182335, "grad_norm": 0.6278257966041565, "learning_rate": 0.00011125804448792831, "loss": 0.8309, "step": 10439 }, { "epoch": 1.8587962962962963, "grad_norm": 0.708705723285675, "learning_rate": 0.00011124413589285594, "loss": 1.1065, "step": 10440 }, { "epoch": 1.858974358974359, "grad_norm": 0.6845232248306274, "learning_rate": 0.00011123022707747808, "loss": 0.9292, "step": 10441 }, { "epoch": 1.8591524216524218, "grad_norm": 0.749204695224762, "learning_rate": 0.00011121631804206726, "loss": 1.0487, "step": 10442 }, { "epoch": 1.8593304843304843, "grad_norm": 0.7123128771781921, "learning_rate": 0.00011120240878689599, "loss": 0.9138, "step": 10443 }, { "epoch": 1.859508547008547, "grad_norm": 0.6862115263938904, "learning_rate": 0.00011118849931223679, "loss": 1.0675, "step": 10444 }, { "epoch": 1.8596866096866096, "grad_norm": 0.7245760560035706, "learning_rate": 0.00011117458961836215, "loss": 0.9643, "step": 10445 }, { "epoch": 1.8598646723646723, "grad_norm": 0.701574444770813, "learning_rate": 0.0001111606797055447, "loss": 1.0022, "step": 10446 }, { "epoch": 1.860042735042735, "grad_norm": 0.7292088270187378, "learning_rate": 0.0001111467695740569, "loss": 0.9465, "step": 10447 }, { "epoch": 1.8602207977207978, "grad_norm": 0.7045044302940369, "learning_rate": 0.0001111328592241713, "loss": 1.0942, "step": 10448 }, { "epoch": 1.8603988603988604, "grad_norm": 0.7181426286697388, "learning_rate": 0.00011111894865616046, "loss": 1.2108, "step": 10449 }, { "epoch": 1.8605769230769231, "grad_norm": 0.6083306074142456, "learning_rate": 0.00011110503787029689, "loss": 0.929, "step": 10450 }, { "epoch": 1.8607549857549857, "grad_norm": 0.6847347617149353, "learning_rate": 0.00011109112686685319, "loss": 1.0911, "step": 10451 }, { "epoch": 1.8609330484330484, "grad_norm": 0.7131744027137756, "learning_rate": 0.0001110772156461019, "loss": 0.9649, "step": 10452 }, { "epoch": 1.8611111111111112, "grad_norm": 0.7920312881469727, "learning_rate": 0.00011106330420831559, "loss": 0.9965, "step": 10453 }, { "epoch": 1.861289173789174, "grad_norm": 0.6640987992286682, "learning_rate": 0.00011104939255376681, "loss": 1.2346, "step": 10454 }, { "epoch": 1.8614672364672364, "grad_norm": 0.5878208875656128, "learning_rate": 0.00011103548068272811, "loss": 0.8565, "step": 10455 }, { "epoch": 1.8616452991452992, "grad_norm": 0.6636882424354553, "learning_rate": 0.0001110215685954721, "loss": 0.8556, "step": 10456 }, { "epoch": 1.8618233618233617, "grad_norm": 0.5985570549964905, "learning_rate": 0.00011100765629227137, "loss": 1.0291, "step": 10457 }, { "epoch": 1.8620014245014245, "grad_norm": 0.7546643614768982, "learning_rate": 0.00011099374377339846, "loss": 1.0199, "step": 10458 }, { "epoch": 1.8621794871794872, "grad_norm": 0.6529727578163147, "learning_rate": 0.00011097983103912602, "loss": 1.0826, "step": 10459 }, { "epoch": 1.86235754985755, "grad_norm": 0.6394338607788086, "learning_rate": 0.00011096591808972654, "loss": 0.9896, "step": 10460 }, { "epoch": 1.8625356125356125, "grad_norm": 0.6508805751800537, "learning_rate": 0.00011095200492547271, "loss": 0.9659, "step": 10461 }, { "epoch": 1.8627136752136753, "grad_norm": 0.7085812091827393, "learning_rate": 0.00011093809154663705, "loss": 0.9998, "step": 10462 }, { "epoch": 1.8628917378917378, "grad_norm": 0.6488457322120667, "learning_rate": 0.00011092417795349226, "loss": 0.9757, "step": 10463 }, { "epoch": 1.8630698005698005, "grad_norm": 0.6405763626098633, "learning_rate": 0.0001109102641463109, "loss": 0.8188, "step": 10464 }, { "epoch": 1.8632478632478633, "grad_norm": 0.713361918926239, "learning_rate": 0.00011089635012536554, "loss": 0.886, "step": 10465 }, { "epoch": 1.863425925925926, "grad_norm": 0.5752255916595459, "learning_rate": 0.00011088243589092886, "loss": 1.0223, "step": 10466 }, { "epoch": 1.8636039886039886, "grad_norm": 0.6722734570503235, "learning_rate": 0.00011086852144327344, "loss": 0.9499, "step": 10467 }, { "epoch": 1.8637820512820513, "grad_norm": 0.5516420006752014, "learning_rate": 0.00011085460678267194, "loss": 0.7767, "step": 10468 }, { "epoch": 1.8639601139601139, "grad_norm": 0.731257438659668, "learning_rate": 0.00011084069190939697, "loss": 1.2299, "step": 10469 }, { "epoch": 1.8641381766381766, "grad_norm": 0.7977055907249451, "learning_rate": 0.00011082677682372114, "loss": 0.9109, "step": 10470 }, { "epoch": 1.8643162393162394, "grad_norm": 0.679900586605072, "learning_rate": 0.0001108128615259171, "loss": 0.9319, "step": 10471 }, { "epoch": 1.864494301994302, "grad_norm": 0.7428545951843262, "learning_rate": 0.00011079894601625754, "loss": 0.8585, "step": 10472 }, { "epoch": 1.8646723646723646, "grad_norm": 0.6560967564582825, "learning_rate": 0.00011078503029501504, "loss": 1.0069, "step": 10473 }, { "epoch": 1.8648504273504274, "grad_norm": 0.636202871799469, "learning_rate": 0.00011077111436246228, "loss": 1.0329, "step": 10474 }, { "epoch": 1.86502849002849, "grad_norm": 0.6666205525398254, "learning_rate": 0.00011075719821887191, "loss": 1.0123, "step": 10475 }, { "epoch": 1.8652065527065527, "grad_norm": 0.7089471220970154, "learning_rate": 0.00011074328186451657, "loss": 0.7851, "step": 10476 }, { "epoch": 1.8653846153846154, "grad_norm": 0.6054788827896118, "learning_rate": 0.00011072936529966895, "loss": 0.8224, "step": 10477 }, { "epoch": 1.8655626780626782, "grad_norm": 0.6009029150009155, "learning_rate": 0.00011071544852460172, "loss": 0.865, "step": 10478 }, { "epoch": 1.8657407407407407, "grad_norm": 0.6238716244697571, "learning_rate": 0.00011070153153958753, "loss": 0.8685, "step": 10479 }, { "epoch": 1.8659188034188035, "grad_norm": 0.719985842704773, "learning_rate": 0.00011068761434489903, "loss": 1.2204, "step": 10480 }, { "epoch": 1.866096866096866, "grad_norm": 0.72972172498703, "learning_rate": 0.00011067369694080895, "loss": 1.0454, "step": 10481 }, { "epoch": 1.8662749287749287, "grad_norm": 0.6741998791694641, "learning_rate": 0.00011065977932758995, "loss": 0.9992, "step": 10482 }, { "epoch": 1.8664529914529915, "grad_norm": 0.6150268912315369, "learning_rate": 0.00011064586150551472, "loss": 0.8866, "step": 10483 }, { "epoch": 1.8666310541310542, "grad_norm": 0.8253782391548157, "learning_rate": 0.00011063194347485597, "loss": 1.1173, "step": 10484 }, { "epoch": 1.8668091168091168, "grad_norm": 0.7176247835159302, "learning_rate": 0.00011061802523588636, "loss": 1.0414, "step": 10485 }, { "epoch": 1.8669871794871795, "grad_norm": 0.6372736096382141, "learning_rate": 0.00011060410678887858, "loss": 1.0548, "step": 10486 }, { "epoch": 1.867165242165242, "grad_norm": 0.7107454538345337, "learning_rate": 0.00011059018813410538, "loss": 1.2298, "step": 10487 }, { "epoch": 1.8673433048433048, "grad_norm": 0.7113911509513855, "learning_rate": 0.00011057626927183944, "loss": 0.9598, "step": 10488 }, { "epoch": 1.8675213675213675, "grad_norm": 0.6734410524368286, "learning_rate": 0.00011056235020235346, "loss": 0.9475, "step": 10489 }, { "epoch": 1.8676994301994303, "grad_norm": 0.6875202655792236, "learning_rate": 0.0001105484309259202, "loss": 1.0735, "step": 10490 }, { "epoch": 1.8678774928774928, "grad_norm": 0.6908353567123413, "learning_rate": 0.0001105345114428123, "loss": 1.0558, "step": 10491 }, { "epoch": 1.8680555555555556, "grad_norm": 0.6283324360847473, "learning_rate": 0.00011052059175330256, "loss": 0.8872, "step": 10492 }, { "epoch": 1.868233618233618, "grad_norm": 0.6422587633132935, "learning_rate": 0.00011050667185766368, "loss": 1.1022, "step": 10493 }, { "epoch": 1.8684116809116809, "grad_norm": 0.7075859904289246, "learning_rate": 0.0001104927517561684, "loss": 1.1389, "step": 10494 }, { "epoch": 1.8685897435897436, "grad_norm": 0.5896905064582825, "learning_rate": 0.00011047883144908944, "loss": 0.7732, "step": 10495 }, { "epoch": 1.8687678062678064, "grad_norm": 0.7647629976272583, "learning_rate": 0.00011046491093669953, "loss": 0.9983, "step": 10496 }, { "epoch": 1.868945868945869, "grad_norm": 0.5864735841751099, "learning_rate": 0.00011045099021927144, "loss": 0.8427, "step": 10497 }, { "epoch": 1.8691239316239316, "grad_norm": 0.6766837239265442, "learning_rate": 0.00011043706929707791, "loss": 0.9595, "step": 10498 }, { "epoch": 1.8693019943019942, "grad_norm": 0.5480074286460876, "learning_rate": 0.00011042314817039168, "loss": 0.691, "step": 10499 }, { "epoch": 1.869480056980057, "grad_norm": 0.6259615421295166, "learning_rate": 0.00011040922683948553, "loss": 0.9991, "step": 10500 }, { "epoch": 1.8696581196581197, "grad_norm": 0.5950598120689392, "learning_rate": 0.00011039530530463218, "loss": 0.7413, "step": 10501 }, { "epoch": 1.8698361823361824, "grad_norm": 0.8099377751350403, "learning_rate": 0.00011038138356610441, "loss": 1.1351, "step": 10502 }, { "epoch": 1.8700142450142452, "grad_norm": 0.6716185212135315, "learning_rate": 0.00011036746162417501, "loss": 1.1057, "step": 10503 }, { "epoch": 1.8701923076923077, "grad_norm": 0.7993219494819641, "learning_rate": 0.00011035353947911675, "loss": 1.2095, "step": 10504 }, { "epoch": 1.8703703703703702, "grad_norm": 0.6381276249885559, "learning_rate": 0.00011033961713120237, "loss": 1.0261, "step": 10505 }, { "epoch": 1.870548433048433, "grad_norm": 0.6326032280921936, "learning_rate": 0.00011032569458070469, "loss": 0.8664, "step": 10506 }, { "epoch": 1.8707264957264957, "grad_norm": 0.6864820718765259, "learning_rate": 0.00011031177182789644, "loss": 0.9959, "step": 10507 }, { "epoch": 1.8709045584045585, "grad_norm": 0.6341838240623474, "learning_rate": 0.00011029784887305048, "loss": 0.8029, "step": 10508 }, { "epoch": 1.8710826210826212, "grad_norm": 0.6559172868728638, "learning_rate": 0.00011028392571643957, "loss": 0.9282, "step": 10509 }, { "epoch": 1.8712606837606838, "grad_norm": 0.6976849436759949, "learning_rate": 0.0001102700023583365, "loss": 1.0198, "step": 10510 }, { "epoch": 1.8714387464387463, "grad_norm": 0.7159395217895508, "learning_rate": 0.00011025607879901402, "loss": 1.1585, "step": 10511 }, { "epoch": 1.871616809116809, "grad_norm": 0.7168624997138977, "learning_rate": 0.000110242155038745, "loss": 1.0558, "step": 10512 }, { "epoch": 1.8717948717948718, "grad_norm": 0.5784319043159485, "learning_rate": 0.00011022823107780224, "loss": 0.9481, "step": 10513 }, { "epoch": 1.8719729344729346, "grad_norm": 0.6602259874343872, "learning_rate": 0.00011021430691645856, "loss": 1.0538, "step": 10514 }, { "epoch": 1.8721509971509973, "grad_norm": 0.6874588131904602, "learning_rate": 0.00011020038255498672, "loss": 1.1396, "step": 10515 }, { "epoch": 1.8723290598290598, "grad_norm": 0.7311663031578064, "learning_rate": 0.00011018645799365956, "loss": 1.084, "step": 10516 }, { "epoch": 1.8725071225071224, "grad_norm": 0.7097118496894836, "learning_rate": 0.00011017253323274996, "loss": 0.9872, "step": 10517 }, { "epoch": 1.8726851851851851, "grad_norm": 0.6667875051498413, "learning_rate": 0.00011015860827253068, "loss": 1.105, "step": 10518 }, { "epoch": 1.8728632478632479, "grad_norm": 0.6807677745819092, "learning_rate": 0.0001101446831132746, "loss": 0.9093, "step": 10519 }, { "epoch": 1.8730413105413106, "grad_norm": 0.6885797381401062, "learning_rate": 0.0001101307577552545, "loss": 0.8479, "step": 10520 }, { "epoch": 1.8732193732193734, "grad_norm": 0.6269213557243347, "learning_rate": 0.00011011683219874323, "loss": 0.9457, "step": 10521 }, { "epoch": 1.873397435897436, "grad_norm": 0.7096766829490662, "learning_rate": 0.00011010290644401364, "loss": 1.0971, "step": 10522 }, { "epoch": 1.8735754985754984, "grad_norm": 0.6909209489822388, "learning_rate": 0.00011008898049133863, "loss": 0.9928, "step": 10523 }, { "epoch": 1.8737535612535612, "grad_norm": 0.6586211323738098, "learning_rate": 0.000110075054340991, "loss": 0.818, "step": 10524 }, { "epoch": 1.873931623931624, "grad_norm": 0.5934817790985107, "learning_rate": 0.0001100611279932436, "loss": 0.7698, "step": 10525 }, { "epoch": 1.8741096866096867, "grad_norm": 0.6361709237098694, "learning_rate": 0.00011004720144836931, "loss": 0.9465, "step": 10526 }, { "epoch": 1.8742877492877494, "grad_norm": 0.6742212176322937, "learning_rate": 0.00011003327470664095, "loss": 1.0998, "step": 10527 }, { "epoch": 1.874465811965812, "grad_norm": 0.6634946465492249, "learning_rate": 0.00011001934776833143, "loss": 0.8328, "step": 10528 }, { "epoch": 1.8746438746438745, "grad_norm": 0.6754063963890076, "learning_rate": 0.0001100054206337136, "loss": 1.147, "step": 10529 }, { "epoch": 1.8748219373219372, "grad_norm": 0.5951135158538818, "learning_rate": 0.00010999149330306036, "loss": 0.8956, "step": 10530 }, { "epoch": 1.875, "grad_norm": 0.6140317320823669, "learning_rate": 0.00010997756577664455, "loss": 0.9368, "step": 10531 }, { "epoch": 1.8751780626780628, "grad_norm": 0.6419258713722229, "learning_rate": 0.00010996363805473904, "loss": 0.9817, "step": 10532 }, { "epoch": 1.8753561253561255, "grad_norm": 0.7173396348953247, "learning_rate": 0.00010994971013761677, "loss": 0.9638, "step": 10533 }, { "epoch": 1.875534188034188, "grad_norm": 0.8125925660133362, "learning_rate": 0.0001099357820255506, "loss": 1.0996, "step": 10534 }, { "epoch": 1.8757122507122506, "grad_norm": 0.6191564798355103, "learning_rate": 0.00010992185371881341, "loss": 0.8266, "step": 10535 }, { "epoch": 1.8758903133903133, "grad_norm": 0.6632885336875916, "learning_rate": 0.0001099079252176781, "loss": 1.1884, "step": 10536 }, { "epoch": 1.876068376068376, "grad_norm": 0.7323372960090637, "learning_rate": 0.00010989399652241759, "loss": 1.0842, "step": 10537 }, { "epoch": 1.8762464387464388, "grad_norm": 0.7553854584693909, "learning_rate": 0.00010988006763330476, "loss": 0.9948, "step": 10538 }, { "epoch": 1.8764245014245016, "grad_norm": 0.5887658596038818, "learning_rate": 0.00010986613855061255, "loss": 0.7653, "step": 10539 }, { "epoch": 1.876602564102564, "grad_norm": 0.6849574446678162, "learning_rate": 0.00010985220927461384, "loss": 1.152, "step": 10540 }, { "epoch": 1.8767806267806266, "grad_norm": 0.6985000371932983, "learning_rate": 0.00010983827980558155, "loss": 0.9869, "step": 10541 }, { "epoch": 1.8769586894586894, "grad_norm": 0.6885373592376709, "learning_rate": 0.00010982435014378858, "loss": 1.1803, "step": 10542 }, { "epoch": 1.8771367521367521, "grad_norm": 0.7610142827033997, "learning_rate": 0.00010981042028950788, "loss": 0.9219, "step": 10543 }, { "epoch": 1.8773148148148149, "grad_norm": 0.6545612215995789, "learning_rate": 0.00010979649024301242, "loss": 1.0337, "step": 10544 }, { "epoch": 1.8774928774928776, "grad_norm": 0.7307698130607605, "learning_rate": 0.00010978256000457505, "loss": 0.9726, "step": 10545 }, { "epoch": 1.8776709401709402, "grad_norm": 0.68310546875, "learning_rate": 0.00010976862957446877, "loss": 1.161, "step": 10546 }, { "epoch": 1.8778490028490027, "grad_norm": 0.6114758253097534, "learning_rate": 0.00010975469895296646, "loss": 0.8863, "step": 10547 }, { "epoch": 1.8780270655270654, "grad_norm": 0.732390820980072, "learning_rate": 0.00010974076814034106, "loss": 1.0339, "step": 10548 }, { "epoch": 1.8782051282051282, "grad_norm": 0.6741712689399719, "learning_rate": 0.0001097268371368656, "loss": 1.0024, "step": 10549 }, { "epoch": 1.878383190883191, "grad_norm": 0.6374897360801697, "learning_rate": 0.00010971290594281294, "loss": 0.91, "step": 10550 }, { "epoch": 1.8785612535612537, "grad_norm": 0.6434261202812195, "learning_rate": 0.00010969897455845608, "loss": 1.0048, "step": 10551 }, { "epoch": 1.8787393162393162, "grad_norm": 0.6573047041893005, "learning_rate": 0.00010968504298406794, "loss": 1.118, "step": 10552 }, { "epoch": 1.8789173789173788, "grad_norm": 0.6686552166938782, "learning_rate": 0.00010967111121992152, "loss": 1.089, "step": 10553 }, { "epoch": 1.8790954415954415, "grad_norm": 0.7899606823921204, "learning_rate": 0.00010965717926628976, "loss": 1.059, "step": 10554 }, { "epoch": 1.8792735042735043, "grad_norm": 0.5808879733085632, "learning_rate": 0.00010964324712344564, "loss": 0.9369, "step": 10555 }, { "epoch": 1.879451566951567, "grad_norm": 0.6322834491729736, "learning_rate": 0.00010962931479166211, "loss": 0.8783, "step": 10556 }, { "epoch": 1.8796296296296298, "grad_norm": 0.647002637386322, "learning_rate": 0.00010961538227121218, "loss": 0.9468, "step": 10557 }, { "epoch": 1.8798076923076923, "grad_norm": 0.6581854820251465, "learning_rate": 0.0001096014495623688, "loss": 1.0077, "step": 10558 }, { "epoch": 1.8799857549857548, "grad_norm": 0.6879259943962097, "learning_rate": 0.00010958751666540496, "loss": 0.976, "step": 10559 }, { "epoch": 1.8801638176638176, "grad_norm": 0.7055090665817261, "learning_rate": 0.00010957358358059364, "loss": 0.8903, "step": 10560 }, { "epoch": 1.8803418803418803, "grad_norm": 0.6865016222000122, "learning_rate": 0.00010955965030820782, "loss": 0.9872, "step": 10561 }, { "epoch": 1.880519943019943, "grad_norm": 0.663436770439148, "learning_rate": 0.00010954571684852055, "loss": 1.0485, "step": 10562 }, { "epoch": 1.8806980056980058, "grad_norm": 0.6861656904220581, "learning_rate": 0.00010953178320180475, "loss": 1.0691, "step": 10563 }, { "epoch": 1.8808760683760684, "grad_norm": 0.8045449256896973, "learning_rate": 0.0001095178493683335, "loss": 1.1534, "step": 10564 }, { "epoch": 1.881054131054131, "grad_norm": 0.6493151187896729, "learning_rate": 0.00010950391534837973, "loss": 0.8756, "step": 10565 }, { "epoch": 1.8812321937321936, "grad_norm": 0.7057121992111206, "learning_rate": 0.00010948998114221651, "loss": 1.1709, "step": 10566 }, { "epoch": 1.8814102564102564, "grad_norm": 0.7708197236061096, "learning_rate": 0.0001094760467501168, "loss": 1.0037, "step": 10567 }, { "epoch": 1.8815883190883191, "grad_norm": 0.7234642505645752, "learning_rate": 0.00010946211217235364, "loss": 1.0757, "step": 10568 }, { "epoch": 1.881766381766382, "grad_norm": 0.6964395642280579, "learning_rate": 0.00010944817740920006, "loss": 1.0769, "step": 10569 }, { "epoch": 1.8819444444444444, "grad_norm": 0.7465848922729492, "learning_rate": 0.00010943424246092906, "loss": 0.9772, "step": 10570 }, { "epoch": 1.8821225071225072, "grad_norm": 0.7145788073539734, "learning_rate": 0.0001094203073278137, "loss": 0.9638, "step": 10571 }, { "epoch": 1.8823005698005697, "grad_norm": 0.7421764135360718, "learning_rate": 0.00010940637201012698, "loss": 1.0324, "step": 10572 }, { "epoch": 1.8824786324786325, "grad_norm": 0.7373253107070923, "learning_rate": 0.0001093924365081419, "loss": 1.1554, "step": 10573 }, { "epoch": 1.8826566951566952, "grad_norm": 0.6861984729766846, "learning_rate": 0.00010937850082213156, "loss": 0.9899, "step": 10574 }, { "epoch": 1.882834757834758, "grad_norm": 0.6173393130302429, "learning_rate": 0.000109364564952369, "loss": 0.8495, "step": 10575 }, { "epoch": 1.8830128205128205, "grad_norm": 0.6871610879898071, "learning_rate": 0.00010935062889912723, "loss": 1.2164, "step": 10576 }, { "epoch": 1.8831908831908832, "grad_norm": 0.7062903642654419, "learning_rate": 0.00010933669266267931, "loss": 1.1077, "step": 10577 }, { "epoch": 1.8833689458689458, "grad_norm": 0.6574689745903015, "learning_rate": 0.00010932275624329828, "loss": 0.9326, "step": 10578 }, { "epoch": 1.8835470085470085, "grad_norm": 0.636385440826416, "learning_rate": 0.00010930881964125723, "loss": 1.0581, "step": 10579 }, { "epoch": 1.8837250712250713, "grad_norm": 0.6178432106971741, "learning_rate": 0.0001092948828568292, "loss": 1.1288, "step": 10580 }, { "epoch": 1.883903133903134, "grad_norm": 0.6509431600570679, "learning_rate": 0.00010928094589028721, "loss": 1.0113, "step": 10581 }, { "epoch": 1.8840811965811965, "grad_norm": 0.6543706059455872, "learning_rate": 0.00010926700874190441, "loss": 1.0041, "step": 10582 }, { "epoch": 1.8842592592592593, "grad_norm": 0.6815463304519653, "learning_rate": 0.0001092530714119538, "loss": 1.0892, "step": 10583 }, { "epoch": 1.8844373219373218, "grad_norm": 0.6787421107292175, "learning_rate": 0.00010923913390070846, "loss": 1.2693, "step": 10584 }, { "epoch": 1.8846153846153846, "grad_norm": 0.6953850984573364, "learning_rate": 0.00010922519620844151, "loss": 0.9848, "step": 10585 }, { "epoch": 1.8847934472934473, "grad_norm": 0.7061360478401184, "learning_rate": 0.000109211258335426, "loss": 0.949, "step": 10586 }, { "epoch": 1.88497150997151, "grad_norm": 0.6845372915267944, "learning_rate": 0.00010919732028193504, "loss": 0.9554, "step": 10587 }, { "epoch": 1.8851495726495726, "grad_norm": 0.6524720788002014, "learning_rate": 0.00010918338204824165, "loss": 1.1037, "step": 10588 }, { "epoch": 1.8853276353276354, "grad_norm": 0.6410523653030396, "learning_rate": 0.00010916944363461899, "loss": 0.9085, "step": 10589 }, { "epoch": 1.885505698005698, "grad_norm": 0.7109059691429138, "learning_rate": 0.00010915550504134014, "loss": 1.0526, "step": 10590 }, { "epoch": 1.8856837606837606, "grad_norm": 0.7781991362571716, "learning_rate": 0.00010914156626867818, "loss": 0.9737, "step": 10591 }, { "epoch": 1.8858618233618234, "grad_norm": 0.7173767685890198, "learning_rate": 0.00010912762731690623, "loss": 0.8862, "step": 10592 }, { "epoch": 1.8860398860398861, "grad_norm": 0.7650504112243652, "learning_rate": 0.00010911368818629732, "loss": 1.2175, "step": 10593 }, { "epoch": 1.8862179487179487, "grad_norm": 0.6316116452217102, "learning_rate": 0.00010909974887712468, "loss": 0.8332, "step": 10594 }, { "epoch": 1.8863960113960114, "grad_norm": 0.6504800319671631, "learning_rate": 0.00010908580938966138, "loss": 0.8864, "step": 10595 }, { "epoch": 1.886574074074074, "grad_norm": 0.675507128238678, "learning_rate": 0.00010907186972418049, "loss": 0.8523, "step": 10596 }, { "epoch": 1.8867521367521367, "grad_norm": 0.6535763144493103, "learning_rate": 0.00010905792988095515, "loss": 1.0786, "step": 10597 }, { "epoch": 1.8869301994301995, "grad_norm": 0.7071853280067444, "learning_rate": 0.0001090439898602585, "loss": 0.9319, "step": 10598 }, { "epoch": 1.8871082621082622, "grad_norm": 0.699466347694397, "learning_rate": 0.00010903004966236365, "loss": 0.9573, "step": 10599 }, { "epoch": 1.8872863247863247, "grad_norm": 0.7099201083183289, "learning_rate": 0.00010901610928754375, "loss": 0.9447, "step": 10600 }, { "epoch": 1.8874643874643875, "grad_norm": 0.6140450835227966, "learning_rate": 0.00010900216873607189, "loss": 1.0227, "step": 10601 }, { "epoch": 1.88764245014245, "grad_norm": 0.6613629460334778, "learning_rate": 0.00010898822800822127, "loss": 1.0152, "step": 10602 }, { "epoch": 1.8878205128205128, "grad_norm": 0.7334819436073303, "learning_rate": 0.00010897428710426498, "loss": 1.1452, "step": 10603 }, { "epoch": 1.8879985754985755, "grad_norm": 0.6819368004798889, "learning_rate": 0.00010896034602447616, "loss": 1.0504, "step": 10604 }, { "epoch": 1.8881766381766383, "grad_norm": 0.6781361103057861, "learning_rate": 0.00010894640476912799, "loss": 0.8719, "step": 10605 }, { "epoch": 1.8883547008547008, "grad_norm": 0.621960461139679, "learning_rate": 0.00010893246333849361, "loss": 0.9264, "step": 10606 }, { "epoch": 1.8885327635327636, "grad_norm": 0.6350592374801636, "learning_rate": 0.00010891852173284615, "loss": 1.0042, "step": 10607 }, { "epoch": 1.888710826210826, "grad_norm": 0.6650694012641907, "learning_rate": 0.00010890457995245879, "loss": 1.1387, "step": 10608 }, { "epoch": 1.8888888888888888, "grad_norm": 0.6515723466873169, "learning_rate": 0.00010889063799760468, "loss": 0.9508, "step": 10609 }, { "epoch": 1.8890669515669516, "grad_norm": 0.6368890404701233, "learning_rate": 0.000108876695868557, "loss": 0.8051, "step": 10610 }, { "epoch": 1.8892450142450143, "grad_norm": 0.7971013188362122, "learning_rate": 0.00010886275356558888, "loss": 0.8629, "step": 10611 }, { "epoch": 1.8894230769230769, "grad_norm": 0.6739095449447632, "learning_rate": 0.00010884881108897353, "loss": 0.9606, "step": 10612 }, { "epoch": 1.8896011396011396, "grad_norm": 0.7754076719284058, "learning_rate": 0.00010883486843898412, "loss": 1.0751, "step": 10613 }, { "epoch": 1.8897792022792022, "grad_norm": 0.6538285613059998, "learning_rate": 0.00010882092561589379, "loss": 0.9288, "step": 10614 }, { "epoch": 1.889957264957265, "grad_norm": 0.7373257875442505, "learning_rate": 0.00010880698261997577, "loss": 0.9884, "step": 10615 }, { "epoch": 1.8901353276353277, "grad_norm": 0.6575660109519958, "learning_rate": 0.00010879303945150321, "loss": 1.0307, "step": 10616 }, { "epoch": 1.8903133903133904, "grad_norm": 0.7500179409980774, "learning_rate": 0.00010877909611074932, "loss": 1.0812, "step": 10617 }, { "epoch": 1.890491452991453, "grad_norm": 0.7607308030128479, "learning_rate": 0.00010876515259798727, "loss": 0.9746, "step": 10618 }, { "epoch": 1.8906695156695157, "grad_norm": 0.7930253744125366, "learning_rate": 0.00010875120891349024, "loss": 0.7911, "step": 10619 }, { "epoch": 1.8908475783475782, "grad_norm": 0.635254979133606, "learning_rate": 0.00010873726505753148, "loss": 1.0468, "step": 10620 }, { "epoch": 1.891025641025641, "grad_norm": 0.7579759359359741, "learning_rate": 0.00010872332103038414, "loss": 0.9558, "step": 10621 }, { "epoch": 1.8912037037037037, "grad_norm": 0.5841903686523438, "learning_rate": 0.00010870937683232146, "loss": 0.913, "step": 10622 }, { "epoch": 1.8913817663817665, "grad_norm": 0.7088860273361206, "learning_rate": 0.00010869543246361664, "loss": 1.0814, "step": 10623 }, { "epoch": 1.8915598290598292, "grad_norm": 0.6713772416114807, "learning_rate": 0.00010868148792454285, "loss": 0.9972, "step": 10624 }, { "epoch": 1.8917378917378918, "grad_norm": 0.6733243465423584, "learning_rate": 0.00010866754321537338, "loss": 0.9596, "step": 10625 }, { "epoch": 1.8919159544159543, "grad_norm": 0.7747747898101807, "learning_rate": 0.00010865359833638138, "loss": 1.0871, "step": 10626 }, { "epoch": 1.892094017094017, "grad_norm": 0.677175760269165, "learning_rate": 0.00010863965328784011, "loss": 0.9939, "step": 10627 }, { "epoch": 1.8922720797720798, "grad_norm": 0.7883930206298828, "learning_rate": 0.00010862570807002279, "loss": 1.0708, "step": 10628 }, { "epoch": 1.8924501424501425, "grad_norm": 0.7003030180931091, "learning_rate": 0.00010861176268320261, "loss": 0.9791, "step": 10629 }, { "epoch": 1.8926282051282053, "grad_norm": 0.7450358271598816, "learning_rate": 0.00010859781712765284, "loss": 0.9672, "step": 10630 }, { "epoch": 1.8928062678062678, "grad_norm": 0.7776696085929871, "learning_rate": 0.00010858387140364672, "loss": 1.1037, "step": 10631 }, { "epoch": 1.8929843304843303, "grad_norm": 0.6896173357963562, "learning_rate": 0.00010856992551145745, "loss": 1.0048, "step": 10632 }, { "epoch": 1.893162393162393, "grad_norm": 0.5997697710990906, "learning_rate": 0.00010855597945135834, "loss": 0.8025, "step": 10633 }, { "epoch": 1.8933404558404558, "grad_norm": 0.8781484365463257, "learning_rate": 0.00010854203322362251, "loss": 1.0014, "step": 10634 }, { "epoch": 1.8935185185185186, "grad_norm": 0.6348843574523926, "learning_rate": 0.00010852808682852334, "loss": 0.9857, "step": 10635 }, { "epoch": 1.8936965811965814, "grad_norm": 0.9704267978668213, "learning_rate": 0.000108514140266334, "loss": 1.0522, "step": 10636 }, { "epoch": 1.8938746438746439, "grad_norm": 0.70372074842453, "learning_rate": 0.00010850019353732779, "loss": 1.1044, "step": 10637 }, { "epoch": 1.8940527065527064, "grad_norm": 0.6528043150901794, "learning_rate": 0.00010848624664177793, "loss": 0.9328, "step": 10638 }, { "epoch": 1.8942307692307692, "grad_norm": 0.6299768090248108, "learning_rate": 0.00010847229957995768, "loss": 1.0099, "step": 10639 }, { "epoch": 1.894408831908832, "grad_norm": 0.6347038149833679, "learning_rate": 0.00010845835235214034, "loss": 1.1354, "step": 10640 }, { "epoch": 1.8945868945868947, "grad_norm": 0.7087811827659607, "learning_rate": 0.00010844440495859913, "loss": 1.0543, "step": 10641 }, { "epoch": 1.8947649572649574, "grad_norm": 0.7386305332183838, "learning_rate": 0.00010843045739960738, "loss": 0.9192, "step": 10642 }, { "epoch": 1.89494301994302, "grad_norm": 0.6047097444534302, "learning_rate": 0.00010841650967543833, "loss": 0.8668, "step": 10643 }, { "epoch": 1.8951210826210825, "grad_norm": 0.6779503226280212, "learning_rate": 0.00010840256178636523, "loss": 0.9263, "step": 10644 }, { "epoch": 1.8952991452991452, "grad_norm": 0.7398194670677185, "learning_rate": 0.00010838861373266138, "loss": 0.9534, "step": 10645 }, { "epoch": 1.895477207977208, "grad_norm": 0.8138558864593506, "learning_rate": 0.00010837466551460011, "loss": 0.9835, "step": 10646 }, { "epoch": 1.8956552706552707, "grad_norm": 0.8847818374633789, "learning_rate": 0.00010836071713245466, "loss": 0.9769, "step": 10647 }, { "epoch": 1.8958333333333335, "grad_norm": 0.6824164390563965, "learning_rate": 0.0001083467685864983, "loss": 0.9901, "step": 10648 }, { "epoch": 1.896011396011396, "grad_norm": 0.6318182945251465, "learning_rate": 0.00010833281987700436, "loss": 0.7677, "step": 10649 }, { "epoch": 1.8961894586894585, "grad_norm": 0.7372074127197266, "learning_rate": 0.00010831887100424612, "loss": 0.9858, "step": 10650 }, { "epoch": 1.8963675213675213, "grad_norm": 0.7246516346931458, "learning_rate": 0.00010830492196849688, "loss": 0.9644, "step": 10651 }, { "epoch": 1.896545584045584, "grad_norm": 0.6517095565795898, "learning_rate": 0.00010829097277002997, "loss": 1.1733, "step": 10652 }, { "epoch": 1.8967236467236468, "grad_norm": 0.6931695342063904, "learning_rate": 0.00010827702340911867, "loss": 0.9923, "step": 10653 }, { "epoch": 1.8969017094017095, "grad_norm": 0.6210272312164307, "learning_rate": 0.00010826307388603628, "loss": 0.8757, "step": 10654 }, { "epoch": 1.897079772079772, "grad_norm": 0.7011165618896484, "learning_rate": 0.00010824912420105611, "loss": 1.0011, "step": 10655 }, { "epoch": 1.8972578347578346, "grad_norm": 0.7431246638298035, "learning_rate": 0.0001082351743544515, "loss": 1.1498, "step": 10656 }, { "epoch": 1.8974358974358974, "grad_norm": 0.7099978923797607, "learning_rate": 0.00010822122434649576, "loss": 1.0673, "step": 10657 }, { "epoch": 1.89761396011396, "grad_norm": 0.7375551462173462, "learning_rate": 0.00010820727417746219, "loss": 1.0157, "step": 10658 }, { "epoch": 1.8977920227920229, "grad_norm": 0.8155642151832581, "learning_rate": 0.00010819332384762413, "loss": 1.229, "step": 10659 }, { "epoch": 1.8979700854700856, "grad_norm": 0.6917914748191833, "learning_rate": 0.00010817937335725493, "loss": 0.9701, "step": 10660 }, { "epoch": 1.8981481481481481, "grad_norm": 0.8498218059539795, "learning_rate": 0.00010816542270662786, "loss": 1.0123, "step": 10661 }, { "epoch": 1.8983262108262107, "grad_norm": 0.7234359979629517, "learning_rate": 0.00010815147189601634, "loss": 1.0755, "step": 10662 }, { "epoch": 1.8985042735042734, "grad_norm": 0.6997553110122681, "learning_rate": 0.00010813752092569365, "loss": 1.1594, "step": 10663 }, { "epoch": 1.8986823361823362, "grad_norm": 0.6519457101821899, "learning_rate": 0.00010812356979593314, "loss": 0.9609, "step": 10664 }, { "epoch": 1.898860398860399, "grad_norm": 0.7215374708175659, "learning_rate": 0.00010810961850700813, "loss": 1.1392, "step": 10665 }, { "epoch": 1.8990384615384617, "grad_norm": 0.7766093611717224, "learning_rate": 0.00010809566705919202, "loss": 1.0256, "step": 10666 }, { "epoch": 1.8992165242165242, "grad_norm": 0.6520358920097351, "learning_rate": 0.00010808171545275814, "loss": 1.0434, "step": 10667 }, { "epoch": 1.8993945868945867, "grad_norm": 0.7454953193664551, "learning_rate": 0.00010806776368797982, "loss": 1.2323, "step": 10668 }, { "epoch": 1.8995726495726495, "grad_norm": 0.6891530752182007, "learning_rate": 0.00010805381176513043, "loss": 1.1104, "step": 10669 }, { "epoch": 1.8997507122507122, "grad_norm": 0.6609626412391663, "learning_rate": 0.00010803985968448331, "loss": 0.8565, "step": 10670 }, { "epoch": 1.899928774928775, "grad_norm": 0.6650999188423157, "learning_rate": 0.00010802590744631187, "loss": 1.1003, "step": 10671 }, { "epoch": 1.9001068376068377, "grad_norm": 0.5794292092323303, "learning_rate": 0.00010801195505088945, "loss": 0.528, "step": 10672 }, { "epoch": 1.9002849002849003, "grad_norm": 1.0802743434906006, "learning_rate": 0.00010799800249848939, "loss": 0.8861, "step": 10673 }, { "epoch": 1.9004629629629628, "grad_norm": 0.650833249092102, "learning_rate": 0.00010798404978938513, "loss": 0.9962, "step": 10674 }, { "epoch": 1.9006410256410255, "grad_norm": 0.7290451526641846, "learning_rate": 0.00010797009692384994, "loss": 1.0764, "step": 10675 }, { "epoch": 1.9008190883190883, "grad_norm": 0.6273928880691528, "learning_rate": 0.00010795614390215727, "loss": 0.9478, "step": 10676 }, { "epoch": 1.900997150997151, "grad_norm": 0.6939455270767212, "learning_rate": 0.00010794219072458052, "loss": 0.8991, "step": 10677 }, { "epoch": 1.9011752136752138, "grad_norm": 0.7455828189849854, "learning_rate": 0.00010792823739139302, "loss": 0.8902, "step": 10678 }, { "epoch": 1.9013532763532763, "grad_norm": 0.6894607543945312, "learning_rate": 0.00010791428390286817, "loss": 0.9355, "step": 10679 }, { "epoch": 1.901531339031339, "grad_norm": 0.6844658851623535, "learning_rate": 0.00010790033025927936, "loss": 0.9835, "step": 10680 }, { "epoch": 1.9017094017094016, "grad_norm": 0.6646730899810791, "learning_rate": 0.00010788637646090001, "loss": 0.9376, "step": 10681 }, { "epoch": 1.9018874643874644, "grad_norm": 0.6494864225387573, "learning_rate": 0.00010787242250800349, "loss": 0.8533, "step": 10682 }, { "epoch": 1.9020655270655271, "grad_norm": 0.686198353767395, "learning_rate": 0.0001078584684008632, "loss": 0.8075, "step": 10683 }, { "epoch": 1.9022435897435899, "grad_norm": 0.7014855742454529, "learning_rate": 0.00010784451413975256, "loss": 1.0805, "step": 10684 }, { "epoch": 1.9024216524216524, "grad_norm": 0.7191864252090454, "learning_rate": 0.00010783055972494496, "loss": 0.9375, "step": 10685 }, { "epoch": 1.9025997150997151, "grad_norm": 0.8114212155342102, "learning_rate": 0.00010781660515671379, "loss": 0.9716, "step": 10686 }, { "epoch": 1.9027777777777777, "grad_norm": 0.7423529028892517, "learning_rate": 0.0001078026504353325, "loss": 0.9066, "step": 10687 }, { "epoch": 1.9029558404558404, "grad_norm": 0.6517882347106934, "learning_rate": 0.00010778869556107447, "loss": 0.9908, "step": 10688 }, { "epoch": 1.9031339031339032, "grad_norm": 0.6983367800712585, "learning_rate": 0.00010777474053421315, "loss": 1.1048, "step": 10689 }, { "epoch": 1.903311965811966, "grad_norm": 0.597766101360321, "learning_rate": 0.00010776078535502193, "loss": 0.84, "step": 10690 }, { "epoch": 1.9034900284900285, "grad_norm": 0.7335455417633057, "learning_rate": 0.00010774683002377422, "loss": 1.0387, "step": 10691 }, { "epoch": 1.9036680911680912, "grad_norm": 0.6742176413536072, "learning_rate": 0.0001077328745407435, "loss": 0.9743, "step": 10692 }, { "epoch": 1.9038461538461537, "grad_norm": 0.7954961657524109, "learning_rate": 0.00010771891890620316, "loss": 1.1025, "step": 10693 }, { "epoch": 1.9040242165242165, "grad_norm": 0.733351469039917, "learning_rate": 0.00010770496312042664, "loss": 1.028, "step": 10694 }, { "epoch": 1.9042022792022792, "grad_norm": 0.7059772610664368, "learning_rate": 0.00010769100718368734, "loss": 1.0103, "step": 10695 }, { "epoch": 1.904380341880342, "grad_norm": 0.6234813332557678, "learning_rate": 0.00010767705109625877, "loss": 0.6893, "step": 10696 }, { "epoch": 1.9045584045584045, "grad_norm": 0.6670311689376831, "learning_rate": 0.0001076630948584143, "loss": 1.1386, "step": 10697 }, { "epoch": 1.9047364672364673, "grad_norm": 0.7444894909858704, "learning_rate": 0.00010764913847042744, "loss": 0.8524, "step": 10698 }, { "epoch": 1.9049145299145298, "grad_norm": 0.6252964735031128, "learning_rate": 0.00010763518193257158, "loss": 0.9407, "step": 10699 }, { "epoch": 1.9050925925925926, "grad_norm": 0.7794382572174072, "learning_rate": 0.0001076212252451202, "loss": 1.05, "step": 10700 }, { "epoch": 1.9052706552706553, "grad_norm": 0.6313693523406982, "learning_rate": 0.00010760726840834671, "loss": 0.8667, "step": 10701 }, { "epoch": 1.905448717948718, "grad_norm": 0.6766461730003357, "learning_rate": 0.00010759331142252462, "loss": 0.9675, "step": 10702 }, { "epoch": 1.9056267806267806, "grad_norm": 0.7457365393638611, "learning_rate": 0.00010757935428792739, "loss": 0.9177, "step": 10703 }, { "epoch": 1.9058048433048433, "grad_norm": 0.6649872064590454, "learning_rate": 0.00010756539700482844, "loss": 0.8703, "step": 10704 }, { "epoch": 1.9059829059829059, "grad_norm": 0.8418740034103394, "learning_rate": 0.00010755143957350127, "loss": 0.8993, "step": 10705 }, { "epoch": 1.9061609686609686, "grad_norm": 0.6767167448997498, "learning_rate": 0.00010753748199421929, "loss": 1.0063, "step": 10706 }, { "epoch": 1.9063390313390314, "grad_norm": 0.6959242820739746, "learning_rate": 0.00010752352426725603, "loss": 1.0516, "step": 10707 }, { "epoch": 1.9065170940170941, "grad_norm": 0.7106529474258423, "learning_rate": 0.00010750956639288493, "loss": 0.9596, "step": 10708 }, { "epoch": 1.9066951566951567, "grad_norm": 0.7611243724822998, "learning_rate": 0.00010749560837137949, "loss": 1.0739, "step": 10709 }, { "epoch": 1.9068732193732194, "grad_norm": 0.6684338450431824, "learning_rate": 0.00010748165020301317, "loss": 1.1437, "step": 10710 }, { "epoch": 1.907051282051282, "grad_norm": 0.5957385897636414, "learning_rate": 0.00010746769188805945, "loss": 0.8802, "step": 10711 }, { "epoch": 1.9072293447293447, "grad_norm": 0.69919353723526, "learning_rate": 0.00010745373342679184, "loss": 1.1891, "step": 10712 }, { "epoch": 1.9074074074074074, "grad_norm": 0.7562127709388733, "learning_rate": 0.0001074397748194838, "loss": 0.8717, "step": 10713 }, { "epoch": 1.9075854700854702, "grad_norm": 0.6420038938522339, "learning_rate": 0.00010742581606640882, "loss": 1.1196, "step": 10714 }, { "epoch": 1.9077635327635327, "grad_norm": 0.7545611262321472, "learning_rate": 0.00010741185716784039, "loss": 1.161, "step": 10715 }, { "epoch": 1.9079415954415955, "grad_norm": 0.6467727422714233, "learning_rate": 0.000107397898124052, "loss": 0.8029, "step": 10716 }, { "epoch": 1.908119658119658, "grad_norm": 0.6129235625267029, "learning_rate": 0.00010738393893531722, "loss": 0.8802, "step": 10717 }, { "epoch": 1.9082977207977208, "grad_norm": 0.6416113376617432, "learning_rate": 0.00010736997960190946, "loss": 0.8465, "step": 10718 }, { "epoch": 1.9084757834757835, "grad_norm": 0.6609050631523132, "learning_rate": 0.00010735602012410229, "loss": 0.9484, "step": 10719 }, { "epoch": 1.9086538461538463, "grad_norm": 0.6302639842033386, "learning_rate": 0.00010734206050216913, "loss": 0.898, "step": 10720 }, { "epoch": 1.9088319088319088, "grad_norm": 0.7291215658187866, "learning_rate": 0.00010732810073638358, "loss": 0.9544, "step": 10721 }, { "epoch": 1.9090099715099715, "grad_norm": 0.6436966061592102, "learning_rate": 0.0001073141408270191, "loss": 0.956, "step": 10722 }, { "epoch": 1.909188034188034, "grad_norm": 0.6247875094413757, "learning_rate": 0.00010730018077434924, "loss": 0.8704, "step": 10723 }, { "epoch": 1.9093660968660968, "grad_norm": 0.7599029541015625, "learning_rate": 0.00010728622057864753, "loss": 1.2024, "step": 10724 }, { "epoch": 1.9095441595441596, "grad_norm": 0.6894544959068298, "learning_rate": 0.00010727226024018744, "loss": 1.1226, "step": 10725 }, { "epoch": 1.9097222222222223, "grad_norm": 0.6920733451843262, "learning_rate": 0.0001072582997592425, "loss": 0.7682, "step": 10726 }, { "epoch": 1.9099002849002849, "grad_norm": 0.6013005375862122, "learning_rate": 0.00010724433913608627, "loss": 0.9462, "step": 10727 }, { "epoch": 1.9100783475783476, "grad_norm": 0.7466302514076233, "learning_rate": 0.00010723037837099225, "loss": 0.9507, "step": 10728 }, { "epoch": 1.9102564102564101, "grad_norm": 0.7070091962814331, "learning_rate": 0.00010721641746423401, "loss": 1.0704, "step": 10729 }, { "epoch": 1.9104344729344729, "grad_norm": 0.6747950315475464, "learning_rate": 0.00010720245641608506, "loss": 0.7899, "step": 10730 }, { "epoch": 1.9106125356125356, "grad_norm": 0.7338371276855469, "learning_rate": 0.00010718849522681891, "loss": 0.9574, "step": 10731 }, { "epoch": 1.9107905982905984, "grad_norm": 0.6923216581344604, "learning_rate": 0.00010717453389670915, "loss": 1.0725, "step": 10732 }, { "epoch": 1.910968660968661, "grad_norm": 0.6050783395767212, "learning_rate": 0.0001071605724260293, "loss": 0.9224, "step": 10733 }, { "epoch": 1.9111467236467237, "grad_norm": 0.6854597330093384, "learning_rate": 0.00010714661081505291, "loss": 0.9749, "step": 10734 }, { "epoch": 1.9113247863247862, "grad_norm": 0.7661508321762085, "learning_rate": 0.00010713264906405351, "loss": 1.1564, "step": 10735 }, { "epoch": 1.911502849002849, "grad_norm": 0.6389622688293457, "learning_rate": 0.00010711868717330467, "loss": 0.8148, "step": 10736 }, { "epoch": 1.9116809116809117, "grad_norm": 0.6318161487579346, "learning_rate": 0.00010710472514307996, "loss": 0.7833, "step": 10737 }, { "epoch": 1.9118589743589745, "grad_norm": 0.8646727800369263, "learning_rate": 0.00010709076297365292, "loss": 1.2682, "step": 10738 }, { "epoch": 1.9120370370370372, "grad_norm": 0.6085501909255981, "learning_rate": 0.0001070768006652971, "loss": 0.8706, "step": 10739 }, { "epoch": 1.9122150997150997, "grad_norm": 0.8259731531143188, "learning_rate": 0.00010706283821828607, "loss": 0.9014, "step": 10740 }, { "epoch": 1.9123931623931623, "grad_norm": 0.6509148478507996, "learning_rate": 0.0001070488756328934, "loss": 0.8814, "step": 10741 }, { "epoch": 1.912571225071225, "grad_norm": 0.7241966128349304, "learning_rate": 0.00010703491290939264, "loss": 0.9925, "step": 10742 }, { "epoch": 1.9127492877492878, "grad_norm": 0.7736822366714478, "learning_rate": 0.00010702095004805738, "loss": 1.0881, "step": 10743 }, { "epoch": 1.9129273504273505, "grad_norm": 0.6912824511528015, "learning_rate": 0.00010700698704916123, "loss": 1.2334, "step": 10744 }, { "epoch": 1.9131054131054133, "grad_norm": 0.825065553188324, "learning_rate": 0.0001069930239129777, "loss": 0.9783, "step": 10745 }, { "epoch": 1.9132834757834758, "grad_norm": 0.7650560140609741, "learning_rate": 0.00010697906063978038, "loss": 0.9788, "step": 10746 }, { "epoch": 1.9134615384615383, "grad_norm": 0.7368232607841492, "learning_rate": 0.00010696509722984287, "loss": 0.8704, "step": 10747 }, { "epoch": 1.913639601139601, "grad_norm": 0.6630628108978271, "learning_rate": 0.00010695113368343875, "loss": 1.1993, "step": 10748 }, { "epoch": 1.9138176638176638, "grad_norm": 0.6842190027236938, "learning_rate": 0.0001069371700008416, "loss": 0.9128, "step": 10749 }, { "epoch": 1.9139957264957266, "grad_norm": 0.591655969619751, "learning_rate": 0.00010692320618232503, "loss": 1.0607, "step": 10750 }, { "epoch": 1.9141737891737893, "grad_norm": 0.74644535779953, "learning_rate": 0.0001069092422281626, "loss": 1.0937, "step": 10751 }, { "epoch": 1.9143518518518519, "grad_norm": 0.7123813629150391, "learning_rate": 0.00010689527813862792, "loss": 0.9043, "step": 10752 }, { "epoch": 1.9145299145299144, "grad_norm": 0.6850089430809021, "learning_rate": 0.0001068813139139946, "loss": 1.0908, "step": 10753 }, { "epoch": 1.9147079772079771, "grad_norm": 0.5882078409194946, "learning_rate": 0.00010686734955453623, "loss": 0.829, "step": 10754 }, { "epoch": 1.91488603988604, "grad_norm": 0.6741717457771301, "learning_rate": 0.00010685338506052642, "loss": 0.9197, "step": 10755 }, { "epoch": 1.9150641025641026, "grad_norm": 0.6597354412078857, "learning_rate": 0.00010683942043223876, "loss": 0.8778, "step": 10756 }, { "epoch": 1.9152421652421654, "grad_norm": 0.6682151556015015, "learning_rate": 0.00010682545566994684, "loss": 0.9305, "step": 10757 }, { "epoch": 1.915420227920228, "grad_norm": 0.8283176422119141, "learning_rate": 0.00010681149077392431, "loss": 1.0164, "step": 10758 }, { "epoch": 1.9155982905982905, "grad_norm": 0.648845374584198, "learning_rate": 0.00010679752574444477, "loss": 1.0114, "step": 10759 }, { "epoch": 1.9157763532763532, "grad_norm": 0.755913496017456, "learning_rate": 0.00010678356058178182, "loss": 1.1142, "step": 10760 }, { "epoch": 1.915954415954416, "grad_norm": 0.7334780097007751, "learning_rate": 0.00010676959528620911, "loss": 0.8758, "step": 10761 }, { "epoch": 1.9161324786324787, "grad_norm": 0.9132041335105896, "learning_rate": 0.00010675562985800025, "loss": 0.995, "step": 10762 }, { "epoch": 1.9163105413105415, "grad_norm": 0.7070860266685486, "learning_rate": 0.00010674166429742882, "loss": 0.9856, "step": 10763 }, { "epoch": 1.916488603988604, "grad_norm": 0.7143638134002686, "learning_rate": 0.00010672769860476853, "loss": 1.0612, "step": 10764 }, { "epoch": 1.9166666666666665, "grad_norm": 0.815717339515686, "learning_rate": 0.00010671373278029293, "loss": 1.1539, "step": 10765 }, { "epoch": 1.9168447293447293, "grad_norm": 0.6379499435424805, "learning_rate": 0.0001066997668242757, "loss": 0.8295, "step": 10766 }, { "epoch": 1.917022792022792, "grad_norm": 0.6482511758804321, "learning_rate": 0.00010668580073699044, "loss": 1.0079, "step": 10767 }, { "epoch": 1.9172008547008548, "grad_norm": 0.7382873296737671, "learning_rate": 0.00010667183451871082, "loss": 0.8973, "step": 10768 }, { "epoch": 1.9173789173789175, "grad_norm": 0.7818579077720642, "learning_rate": 0.00010665786816971044, "loss": 1.2131, "step": 10769 }, { "epoch": 1.91755698005698, "grad_norm": 0.6960901021957397, "learning_rate": 0.000106643901690263, "loss": 1.1466, "step": 10770 }, { "epoch": 1.9177350427350426, "grad_norm": 0.696966826915741, "learning_rate": 0.00010662993508064208, "loss": 0.854, "step": 10771 }, { "epoch": 1.9179131054131053, "grad_norm": 0.6745442152023315, "learning_rate": 0.00010661596834112133, "loss": 0.9559, "step": 10772 }, { "epoch": 1.918091168091168, "grad_norm": 0.7436230778694153, "learning_rate": 0.00010660200147197447, "loss": 1.1367, "step": 10773 }, { "epoch": 1.9182692307692308, "grad_norm": 0.6051676869392395, "learning_rate": 0.00010658803447347509, "loss": 1.05, "step": 10774 }, { "epoch": 1.9184472934472936, "grad_norm": 0.5662530660629272, "learning_rate": 0.00010657406734589686, "loss": 0.8697, "step": 10775 }, { "epoch": 1.9186253561253561, "grad_norm": 0.6640757322311401, "learning_rate": 0.00010656010008951344, "loss": 1.0636, "step": 10776 }, { "epoch": 1.9188034188034186, "grad_norm": 0.6994011998176575, "learning_rate": 0.00010654613270459848, "loss": 0.9326, "step": 10777 }, { "epoch": 1.9189814814814814, "grad_norm": 0.6827420592308044, "learning_rate": 0.00010653216519142563, "loss": 0.8667, "step": 10778 }, { "epoch": 1.9191595441595442, "grad_norm": 0.6814691424369812, "learning_rate": 0.00010651819755026862, "loss": 0.828, "step": 10779 }, { "epoch": 1.919337606837607, "grad_norm": 0.7033611536026001, "learning_rate": 0.00010650422978140103, "loss": 1.0427, "step": 10780 }, { "epoch": 1.9195156695156697, "grad_norm": 0.7098833322525024, "learning_rate": 0.00010649026188509657, "loss": 1.1723, "step": 10781 }, { "epoch": 1.9196937321937322, "grad_norm": 0.7184767723083496, "learning_rate": 0.00010647629386162893, "loss": 0.852, "step": 10782 }, { "epoch": 1.9198717948717947, "grad_norm": 0.6682565808296204, "learning_rate": 0.00010646232571127175, "loss": 0.8827, "step": 10783 }, { "epoch": 1.9200498575498575, "grad_norm": 0.6699280142784119, "learning_rate": 0.00010644835743429873, "loss": 0.8346, "step": 10784 }, { "epoch": 1.9202279202279202, "grad_norm": 0.8041857481002808, "learning_rate": 0.00010643438903098355, "loss": 0.9622, "step": 10785 }, { "epoch": 1.920405982905983, "grad_norm": 0.7315110564231873, "learning_rate": 0.00010642042050159986, "loss": 1.0443, "step": 10786 }, { "epoch": 1.9205840455840457, "grad_norm": 0.5850204229354858, "learning_rate": 0.0001064064518464214, "loss": 1.0155, "step": 10787 }, { "epoch": 1.9207621082621082, "grad_norm": 0.7320640683174133, "learning_rate": 0.00010639248306572178, "loss": 1.1556, "step": 10788 }, { "epoch": 1.9209401709401708, "grad_norm": 0.689804196357727, "learning_rate": 0.00010637851415977478, "loss": 1.1058, "step": 10789 }, { "epoch": 1.9211182336182335, "grad_norm": 0.6433262228965759, "learning_rate": 0.000106364545128854, "loss": 1.0916, "step": 10790 }, { "epoch": 1.9212962962962963, "grad_norm": 0.6802626252174377, "learning_rate": 0.00010635057597323323, "loss": 1.126, "step": 10791 }, { "epoch": 1.921474358974359, "grad_norm": 0.7503384351730347, "learning_rate": 0.00010633660669318608, "loss": 0.9354, "step": 10792 }, { "epoch": 1.9216524216524218, "grad_norm": 0.6370253562927246, "learning_rate": 0.00010632263728898629, "loss": 0.9976, "step": 10793 }, { "epoch": 1.9218304843304843, "grad_norm": 0.7566042542457581, "learning_rate": 0.00010630866776090755, "loss": 1.0311, "step": 10794 }, { "epoch": 1.922008547008547, "grad_norm": 0.7011943459510803, "learning_rate": 0.0001062946981092236, "loss": 0.8777, "step": 10795 }, { "epoch": 1.9221866096866096, "grad_norm": 0.6621114015579224, "learning_rate": 0.00010628072833420811, "loss": 0.9615, "step": 10796 }, { "epoch": 1.9223646723646723, "grad_norm": 0.6863150000572205, "learning_rate": 0.00010626675843613478, "loss": 1.071, "step": 10797 }, { "epoch": 1.922542735042735, "grad_norm": 0.597970724105835, "learning_rate": 0.00010625278841527733, "loss": 0.8661, "step": 10798 }, { "epoch": 1.9227207977207978, "grad_norm": 0.5958755612373352, "learning_rate": 0.00010623881827190947, "loss": 0.9075, "step": 10799 }, { "epoch": 1.9228988603988604, "grad_norm": 0.7764523029327393, "learning_rate": 0.00010622484800630494, "loss": 1.0576, "step": 10800 }, { "epoch": 1.9230769230769231, "grad_norm": 0.774156391620636, "learning_rate": 0.00010621087761873748, "loss": 0.9273, "step": 10801 }, { "epoch": 1.9232549857549857, "grad_norm": 0.6321687698364258, "learning_rate": 0.00010619690710948074, "loss": 0.8805, "step": 10802 }, { "epoch": 1.9234330484330484, "grad_norm": 0.659538984298706, "learning_rate": 0.00010618293647880846, "loss": 0.9845, "step": 10803 }, { "epoch": 1.9236111111111112, "grad_norm": 0.6931299567222595, "learning_rate": 0.00010616896572699442, "loss": 1.2005, "step": 10804 }, { "epoch": 1.923789173789174, "grad_norm": 0.6054762005805969, "learning_rate": 0.00010615499485431228, "loss": 0.825, "step": 10805 }, { "epoch": 1.9239672364672364, "grad_norm": 0.6631526947021484, "learning_rate": 0.00010614102386103584, "loss": 0.9149, "step": 10806 }, { "epoch": 1.9241452991452992, "grad_norm": 0.6667893528938293, "learning_rate": 0.00010612705274743878, "loss": 1.014, "step": 10807 }, { "epoch": 1.9243233618233617, "grad_norm": 0.861302375793457, "learning_rate": 0.00010611308151379482, "loss": 1.0809, "step": 10808 }, { "epoch": 1.9245014245014245, "grad_norm": 0.6997994780540466, "learning_rate": 0.00010609911016037777, "loss": 0.8897, "step": 10809 }, { "epoch": 1.9246794871794872, "grad_norm": 0.5689206123352051, "learning_rate": 0.00010608513868746131, "loss": 0.7517, "step": 10810 }, { "epoch": 1.92485754985755, "grad_norm": 0.5972287654876709, "learning_rate": 0.00010607116709531918, "loss": 0.9015, "step": 10811 }, { "epoch": 1.9250356125356125, "grad_norm": 0.7115643620491028, "learning_rate": 0.00010605719538422519, "loss": 0.6974, "step": 10812 }, { "epoch": 1.9252136752136753, "grad_norm": 0.6548098921775818, "learning_rate": 0.00010604322355445297, "loss": 0.7075, "step": 10813 }, { "epoch": 1.9253917378917378, "grad_norm": 0.6666337847709656, "learning_rate": 0.00010602925160627639, "loss": 1.0389, "step": 10814 }, { "epoch": 1.9255698005698005, "grad_norm": 0.7754444479942322, "learning_rate": 0.00010601527953996913, "loss": 1.0674, "step": 10815 }, { "epoch": 1.9257478632478633, "grad_norm": 0.6602712869644165, "learning_rate": 0.00010600130735580498, "loss": 1.2622, "step": 10816 }, { "epoch": 1.925925925925926, "grad_norm": 0.6974020004272461, "learning_rate": 0.00010598733505405767, "loss": 0.9748, "step": 10817 }, { "epoch": 1.9261039886039886, "grad_norm": 0.6236271858215332, "learning_rate": 0.00010597336263500095, "loss": 0.9463, "step": 10818 }, { "epoch": 1.9262820512820513, "grad_norm": 0.6856079697608948, "learning_rate": 0.00010595939009890859, "loss": 0.9484, "step": 10819 }, { "epoch": 1.9264601139601139, "grad_norm": 0.7300925850868225, "learning_rate": 0.00010594541744605437, "loss": 0.9702, "step": 10820 }, { "epoch": 1.9266381766381766, "grad_norm": 0.6546478867530823, "learning_rate": 0.00010593144467671208, "loss": 0.8235, "step": 10821 }, { "epoch": 1.9268162393162394, "grad_norm": 0.7215169072151184, "learning_rate": 0.00010591747179115543, "loss": 0.9986, "step": 10822 }, { "epoch": 1.926994301994302, "grad_norm": 0.7304712533950806, "learning_rate": 0.00010590349878965822, "loss": 1.099, "step": 10823 }, { "epoch": 1.9271723646723646, "grad_norm": 0.5883305668830872, "learning_rate": 0.0001058895256724942, "loss": 1.0647, "step": 10824 }, { "epoch": 1.9273504273504274, "grad_norm": 0.8067272305488586, "learning_rate": 0.00010587555243993716, "loss": 1.0295, "step": 10825 }, { "epoch": 1.92752849002849, "grad_norm": 0.6607550978660583, "learning_rate": 0.00010586157909226089, "loss": 0.8669, "step": 10826 }, { "epoch": 1.9277065527065527, "grad_norm": 0.7256106734275818, "learning_rate": 0.00010584760562973914, "loss": 1.1674, "step": 10827 }, { "epoch": 1.9278846153846154, "grad_norm": 0.6584621071815491, "learning_rate": 0.00010583363205264574, "loss": 0.8901, "step": 10828 }, { "epoch": 1.9280626780626782, "grad_norm": 0.7200617790222168, "learning_rate": 0.00010581965836125439, "loss": 1.0463, "step": 10829 }, { "epoch": 1.9282407407407407, "grad_norm": 0.7244223952293396, "learning_rate": 0.00010580568455583894, "loss": 1.0973, "step": 10830 }, { "epoch": 1.9284188034188035, "grad_norm": 0.7678009867668152, "learning_rate": 0.00010579171063667317, "loss": 1.1753, "step": 10831 }, { "epoch": 1.928596866096866, "grad_norm": 0.6455881595611572, "learning_rate": 0.00010577773660403085, "loss": 0.8988, "step": 10832 }, { "epoch": 1.9287749287749287, "grad_norm": 0.6804864406585693, "learning_rate": 0.0001057637624581858, "loss": 0.8156, "step": 10833 }, { "epoch": 1.9289529914529915, "grad_norm": 0.7874828577041626, "learning_rate": 0.00010574978819941176, "loss": 1.1876, "step": 10834 }, { "epoch": 1.9291310541310542, "grad_norm": 0.7396490573883057, "learning_rate": 0.00010573581382798261, "loss": 0.8709, "step": 10835 }, { "epoch": 1.9293091168091168, "grad_norm": 0.6800381541252136, "learning_rate": 0.00010572183934417209, "loss": 0.9906, "step": 10836 }, { "epoch": 1.9294871794871795, "grad_norm": 0.7077754139900208, "learning_rate": 0.000105707864748254, "loss": 0.9785, "step": 10837 }, { "epoch": 1.929665242165242, "grad_norm": 0.693249523639679, "learning_rate": 0.00010569389004050216, "loss": 0.9515, "step": 10838 }, { "epoch": 1.9298433048433048, "grad_norm": 0.706924319267273, "learning_rate": 0.00010567991522119037, "loss": 1.074, "step": 10839 }, { "epoch": 1.9300213675213675, "grad_norm": 0.6504101157188416, "learning_rate": 0.00010566594029059244, "loss": 1.0635, "step": 10840 }, { "epoch": 1.9301994301994303, "grad_norm": 0.7620238661766052, "learning_rate": 0.00010565196524898219, "loss": 0.944, "step": 10841 }, { "epoch": 1.9303774928774928, "grad_norm": 0.6713484525680542, "learning_rate": 0.00010563799009663344, "loss": 0.749, "step": 10842 }, { "epoch": 1.9305555555555556, "grad_norm": 0.9279242157936096, "learning_rate": 0.00010562401483381997, "loss": 0.961, "step": 10843 }, { "epoch": 1.930733618233618, "grad_norm": 0.6710723638534546, "learning_rate": 0.00010561003946081558, "loss": 1.1288, "step": 10844 }, { "epoch": 1.9309116809116809, "grad_norm": 0.7751701474189758, "learning_rate": 0.00010559606397789416, "loss": 0.9435, "step": 10845 }, { "epoch": 1.9310897435897436, "grad_norm": 0.6741766929626465, "learning_rate": 0.00010558208838532948, "loss": 1.0299, "step": 10846 }, { "epoch": 1.9312678062678064, "grad_norm": 0.6988041400909424, "learning_rate": 0.00010556811268339539, "loss": 1.0236, "step": 10847 }, { "epoch": 1.931445868945869, "grad_norm": 0.6353505253791809, "learning_rate": 0.00010555413687236568, "loss": 1.0361, "step": 10848 }, { "epoch": 1.9316239316239316, "grad_norm": 0.7162703275680542, "learning_rate": 0.0001055401609525142, "loss": 1.0931, "step": 10849 }, { "epoch": 1.9318019943019942, "grad_norm": 0.61545330286026, "learning_rate": 0.00010552618492411476, "loss": 0.8829, "step": 10850 }, { "epoch": 1.931980056980057, "grad_norm": 0.6304612159729004, "learning_rate": 0.00010551220878744124, "loss": 0.8574, "step": 10851 }, { "epoch": 1.9321581196581197, "grad_norm": 0.6372067928314209, "learning_rate": 0.00010549823254276743, "loss": 1.0949, "step": 10852 }, { "epoch": 1.9323361823361824, "grad_norm": 0.6952856779098511, "learning_rate": 0.00010548425619036715, "loss": 0.9232, "step": 10853 }, { "epoch": 1.9325142450142452, "grad_norm": 0.6510106325149536, "learning_rate": 0.00010547027973051427, "loss": 1.0753, "step": 10854 }, { "epoch": 1.9326923076923077, "grad_norm": 0.6377716064453125, "learning_rate": 0.00010545630316348263, "loss": 0.8466, "step": 10855 }, { "epoch": 1.9328703703703702, "grad_norm": 0.7366968393325806, "learning_rate": 0.00010544232648954606, "loss": 0.9351, "step": 10856 }, { "epoch": 1.933048433048433, "grad_norm": 0.703652024269104, "learning_rate": 0.00010542834970897843, "loss": 1.0032, "step": 10857 }, { "epoch": 1.9332264957264957, "grad_norm": 0.6685494780540466, "learning_rate": 0.00010541437282205355, "loss": 0.8818, "step": 10858 }, { "epoch": 1.9334045584045585, "grad_norm": 0.6594362854957581, "learning_rate": 0.00010540039582904527, "loss": 0.9535, "step": 10859 }, { "epoch": 1.9335826210826212, "grad_norm": 0.8003259301185608, "learning_rate": 0.00010538641873022744, "loss": 0.8852, "step": 10860 }, { "epoch": 1.9337606837606838, "grad_norm": 0.6567012071609497, "learning_rate": 0.00010537244152587393, "loss": 1.0832, "step": 10861 }, { "epoch": 1.9339387464387463, "grad_norm": 0.6714941263198853, "learning_rate": 0.00010535846421625862, "loss": 1.1047, "step": 10862 }, { "epoch": 1.934116809116809, "grad_norm": 0.6998924612998962, "learning_rate": 0.00010534448680165531, "loss": 0.8827, "step": 10863 }, { "epoch": 1.9342948717948718, "grad_norm": 0.6065765619277954, "learning_rate": 0.0001053305092823379, "loss": 0.5773, "step": 10864 }, { "epoch": 1.9344729344729346, "grad_norm": 0.7678273916244507, "learning_rate": 0.0001053165316585802, "loss": 0.9199, "step": 10865 }, { "epoch": 1.9346509971509973, "grad_norm": 0.7071540951728821, "learning_rate": 0.00010530255393065613, "loss": 1.0292, "step": 10866 }, { "epoch": 1.9348290598290598, "grad_norm": 0.6329835057258606, "learning_rate": 0.00010528857609883956, "loss": 0.9915, "step": 10867 }, { "epoch": 1.9350071225071224, "grad_norm": 0.6274038553237915, "learning_rate": 0.00010527459816340427, "loss": 0.8499, "step": 10868 }, { "epoch": 1.9351851851851851, "grad_norm": 0.6564371585845947, "learning_rate": 0.00010526062012462424, "loss": 1.1707, "step": 10869 }, { "epoch": 1.9353632478632479, "grad_norm": 0.8561269044876099, "learning_rate": 0.00010524664198277326, "loss": 1.148, "step": 10870 }, { "epoch": 1.9355413105413106, "grad_norm": 0.6322671175003052, "learning_rate": 0.00010523266373812521, "loss": 0.9165, "step": 10871 }, { "epoch": 1.9357193732193734, "grad_norm": 0.7602947354316711, "learning_rate": 0.00010521868539095403, "loss": 0.9647, "step": 10872 }, { "epoch": 1.935897435897436, "grad_norm": 0.5962168574333191, "learning_rate": 0.00010520470694153353, "loss": 0.8585, "step": 10873 }, { "epoch": 1.9360754985754984, "grad_norm": 0.7498637437820435, "learning_rate": 0.00010519072839013757, "loss": 0.9828, "step": 10874 }, { "epoch": 1.9362535612535612, "grad_norm": 0.6841256022453308, "learning_rate": 0.00010517674973704012, "loss": 0.9991, "step": 10875 }, { "epoch": 1.936431623931624, "grad_norm": 0.8281826972961426, "learning_rate": 0.00010516277098251499, "loss": 1.028, "step": 10876 }, { "epoch": 1.9366096866096867, "grad_norm": 0.6673563718795776, "learning_rate": 0.0001051487921268361, "loss": 1.1594, "step": 10877 }, { "epoch": 1.9367877492877494, "grad_norm": 0.7833667993545532, "learning_rate": 0.00010513481317027733, "loss": 0.7675, "step": 10878 }, { "epoch": 1.936965811965812, "grad_norm": 0.6087225675582886, "learning_rate": 0.00010512083411311253, "loss": 0.7803, "step": 10879 }, { "epoch": 1.9371438746438745, "grad_norm": 0.6758120656013489, "learning_rate": 0.00010510685495561563, "loss": 1.0621, "step": 10880 }, { "epoch": 1.9373219373219372, "grad_norm": 0.6720096468925476, "learning_rate": 0.00010509287569806055, "loss": 0.8502, "step": 10881 }, { "epoch": 1.9375, "grad_norm": 0.6233887672424316, "learning_rate": 0.00010507889634072113, "loss": 1.0127, "step": 10882 }, { "epoch": 1.9376780626780628, "grad_norm": 0.667742908000946, "learning_rate": 0.00010506491688387127, "loss": 0.9086, "step": 10883 }, { "epoch": 1.9378561253561255, "grad_norm": 0.6533677577972412, "learning_rate": 0.00010505093732778492, "loss": 0.9724, "step": 10884 }, { "epoch": 1.938034188034188, "grad_norm": 0.7171359062194824, "learning_rate": 0.00010503695767273591, "loss": 0.9915, "step": 10885 }, { "epoch": 1.9382122507122506, "grad_norm": 0.723655641078949, "learning_rate": 0.0001050229779189982, "loss": 0.8981, "step": 10886 }, { "epoch": 1.9383903133903133, "grad_norm": 0.6863494515419006, "learning_rate": 0.00010500899806684568, "loss": 1.2577, "step": 10887 }, { "epoch": 1.938568376068376, "grad_norm": 0.8174706697463989, "learning_rate": 0.00010499501811655224, "loss": 0.9848, "step": 10888 }, { "epoch": 1.9387464387464388, "grad_norm": 0.6378024220466614, "learning_rate": 0.00010498103806839179, "loss": 0.8499, "step": 10889 }, { "epoch": 1.9389245014245016, "grad_norm": 0.6734544634819031, "learning_rate": 0.00010496705792263823, "loss": 0.8446, "step": 10890 }, { "epoch": 1.939102564102564, "grad_norm": 0.6802361607551575, "learning_rate": 0.00010495307767956551, "loss": 0.9285, "step": 10891 }, { "epoch": 1.9392806267806266, "grad_norm": 0.7821299433708191, "learning_rate": 0.00010493909733944752, "loss": 1.08, "step": 10892 }, { "epoch": 1.9394586894586894, "grad_norm": 0.6204990148544312, "learning_rate": 0.00010492511690255818, "loss": 0.7861, "step": 10893 }, { "epoch": 1.9396367521367521, "grad_norm": 0.6386391520500183, "learning_rate": 0.0001049111363691714, "loss": 0.9162, "step": 10894 }, { "epoch": 1.9398148148148149, "grad_norm": 0.6885092854499817, "learning_rate": 0.0001048971557395611, "loss": 1.0026, "step": 10895 }, { "epoch": 1.9399928774928776, "grad_norm": 0.6962558627128601, "learning_rate": 0.00010488317501400122, "loss": 1.146, "step": 10896 }, { "epoch": 1.9401709401709402, "grad_norm": 0.6283716559410095, "learning_rate": 0.00010486919419276566, "loss": 1.0268, "step": 10897 }, { "epoch": 1.9403490028490027, "grad_norm": 0.7183622717857361, "learning_rate": 0.00010485521327612835, "loss": 1.0123, "step": 10898 }, { "epoch": 1.9405270655270654, "grad_norm": 0.6354197263717651, "learning_rate": 0.00010484123226436321, "loss": 0.871, "step": 10899 }, { "epoch": 1.9407051282051282, "grad_norm": 0.804358184337616, "learning_rate": 0.00010482725115774421, "loss": 1.1001, "step": 10900 }, { "epoch": 1.940883190883191, "grad_norm": 0.6896754503250122, "learning_rate": 0.00010481326995654524, "loss": 1.0976, "step": 10901 }, { "epoch": 1.9410612535612537, "grad_norm": 0.9108015894889832, "learning_rate": 0.00010479928866104023, "loss": 0.8785, "step": 10902 }, { "epoch": 1.9412393162393162, "grad_norm": 0.6963121294975281, "learning_rate": 0.00010478530727150316, "loss": 1.0458, "step": 10903 }, { "epoch": 1.9414173789173788, "grad_norm": 0.6657114624977112, "learning_rate": 0.00010477132578820792, "loss": 0.8188, "step": 10904 }, { "epoch": 1.9415954415954415, "grad_norm": 0.671716034412384, "learning_rate": 0.00010475734421142847, "loss": 1.0915, "step": 10905 }, { "epoch": 1.9417735042735043, "grad_norm": 0.6790717244148254, "learning_rate": 0.0001047433625414387, "loss": 0.9688, "step": 10906 }, { "epoch": 1.941951566951567, "grad_norm": 0.6411764621734619, "learning_rate": 0.00010472938077851264, "loss": 1.0387, "step": 10907 }, { "epoch": 1.9421296296296298, "grad_norm": 0.8579615950584412, "learning_rate": 0.00010471539892292417, "loss": 1.1635, "step": 10908 }, { "epoch": 1.9423076923076923, "grad_norm": 0.7031029462814331, "learning_rate": 0.00010470141697494726, "loss": 0.9813, "step": 10909 }, { "epoch": 1.9424857549857548, "grad_norm": 0.6657388806343079, "learning_rate": 0.00010468743493485584, "loss": 0.7947, "step": 10910 }, { "epoch": 1.9426638176638176, "grad_norm": 0.6364194750785828, "learning_rate": 0.00010467345280292389, "loss": 0.8554, "step": 10911 }, { "epoch": 1.9428418803418803, "grad_norm": 0.7394127249717712, "learning_rate": 0.00010465947057942534, "loss": 0.822, "step": 10912 }, { "epoch": 1.943019943019943, "grad_norm": 0.6557473540306091, "learning_rate": 0.00010464548826463411, "loss": 1.0025, "step": 10913 }, { "epoch": 1.9431980056980058, "grad_norm": 0.6530601382255554, "learning_rate": 0.00010463150585882422, "loss": 1.0828, "step": 10914 }, { "epoch": 1.9433760683760684, "grad_norm": 0.7376404404640198, "learning_rate": 0.00010461752336226957, "loss": 0.9413, "step": 10915 }, { "epoch": 1.943554131054131, "grad_norm": 0.7110656499862671, "learning_rate": 0.00010460354077524417, "loss": 0.9162, "step": 10916 }, { "epoch": 1.9437321937321936, "grad_norm": 0.6515666246414185, "learning_rate": 0.00010458955809802194, "loss": 0.9211, "step": 10917 }, { "epoch": 1.9439102564102564, "grad_norm": 0.6888720989227295, "learning_rate": 0.00010457557533087683, "loss": 1.0632, "step": 10918 }, { "epoch": 1.9440883190883191, "grad_norm": 0.7246627807617188, "learning_rate": 0.00010456159247408286, "loss": 0.9807, "step": 10919 }, { "epoch": 1.944266381766382, "grad_norm": 0.727834165096283, "learning_rate": 0.00010454760952791394, "loss": 1.0793, "step": 10920 }, { "epoch": 1.9444444444444444, "grad_norm": 0.6365306377410889, "learning_rate": 0.00010453362649264407, "loss": 1.0415, "step": 10921 }, { "epoch": 1.9446225071225072, "grad_norm": 0.7187839150428772, "learning_rate": 0.0001045196433685472, "loss": 1.007, "step": 10922 }, { "epoch": 1.9448005698005697, "grad_norm": 0.5905138254165649, "learning_rate": 0.00010450566015589732, "loss": 0.9818, "step": 10923 }, { "epoch": 1.9449786324786325, "grad_norm": 0.7008894085884094, "learning_rate": 0.00010449167685496837, "loss": 0.8444, "step": 10924 }, { "epoch": 1.9451566951566952, "grad_norm": 0.6126312017440796, "learning_rate": 0.00010447769346603435, "loss": 0.7207, "step": 10925 }, { "epoch": 1.945334757834758, "grad_norm": 0.7513176202774048, "learning_rate": 0.00010446370998936922, "loss": 0.8693, "step": 10926 }, { "epoch": 1.9455128205128205, "grad_norm": 0.6382531523704529, "learning_rate": 0.00010444972642524697, "loss": 0.8379, "step": 10927 }, { "epoch": 1.9456908831908832, "grad_norm": 0.7062170505523682, "learning_rate": 0.0001044357427739416, "loss": 1.0525, "step": 10928 }, { "epoch": 1.9458689458689458, "grad_norm": 0.6954067349433899, "learning_rate": 0.00010442175903572703, "loss": 1.0238, "step": 10929 }, { "epoch": 1.9460470085470085, "grad_norm": 0.7257117033004761, "learning_rate": 0.00010440777521087731, "loss": 1.1413, "step": 10930 }, { "epoch": 1.9462250712250713, "grad_norm": 0.6617701053619385, "learning_rate": 0.00010439379129966635, "loss": 1.0089, "step": 10931 }, { "epoch": 1.946403133903134, "grad_norm": 0.6860800385475159, "learning_rate": 0.00010437980730236821, "loss": 1.1778, "step": 10932 }, { "epoch": 1.9465811965811965, "grad_norm": 0.846235454082489, "learning_rate": 0.00010436582321925684, "loss": 0.9851, "step": 10933 }, { "epoch": 1.9467592592592593, "grad_norm": 0.6385617852210999, "learning_rate": 0.00010435183905060623, "loss": 0.9542, "step": 10934 }, { "epoch": 1.9469373219373218, "grad_norm": 0.7137401700019836, "learning_rate": 0.00010433785479669038, "loss": 1.0499, "step": 10935 }, { "epoch": 1.9471153846153846, "grad_norm": 0.6269308924674988, "learning_rate": 0.00010432387045778324, "loss": 0.8929, "step": 10936 }, { "epoch": 1.9472934472934473, "grad_norm": 0.7903163433074951, "learning_rate": 0.00010430988603415888, "loss": 0.9812, "step": 10937 }, { "epoch": 1.94747150997151, "grad_norm": 0.6006736159324646, "learning_rate": 0.00010429590152609121, "loss": 0.7959, "step": 10938 }, { "epoch": 1.9476495726495726, "grad_norm": 0.6061521768569946, "learning_rate": 0.00010428191693385431, "loss": 0.8748, "step": 10939 }, { "epoch": 1.9478276353276354, "grad_norm": 0.6637623906135559, "learning_rate": 0.00010426793225772216, "loss": 0.7047, "step": 10940 }, { "epoch": 1.948005698005698, "grad_norm": 0.7650586366653442, "learning_rate": 0.00010425394749796874, "loss": 1.0018, "step": 10941 }, { "epoch": 1.9481837606837606, "grad_norm": 0.6575125455856323, "learning_rate": 0.000104239962654868, "loss": 0.8915, "step": 10942 }, { "epoch": 1.9483618233618234, "grad_norm": 0.6315393447875977, "learning_rate": 0.00010422597772869404, "loss": 1.1884, "step": 10943 }, { "epoch": 1.9485398860398861, "grad_norm": 0.7607148885726929, "learning_rate": 0.00010421199271972083, "loss": 0.9341, "step": 10944 }, { "epoch": 1.9487179487179487, "grad_norm": 0.6491827964782715, "learning_rate": 0.00010419800762822239, "loss": 0.9991, "step": 10945 }, { "epoch": 1.9488960113960114, "grad_norm": 0.6294243335723877, "learning_rate": 0.00010418402245447265, "loss": 0.9253, "step": 10946 }, { "epoch": 1.949074074074074, "grad_norm": 0.6472215056419373, "learning_rate": 0.00010417003719874571, "loss": 1.0402, "step": 10947 }, { "epoch": 1.9492521367521367, "grad_norm": 0.7377899885177612, "learning_rate": 0.00010415605186131559, "loss": 1.046, "step": 10948 }, { "epoch": 1.9494301994301995, "grad_norm": 0.6391907334327698, "learning_rate": 0.00010414206644245623, "loss": 0.8529, "step": 10949 }, { "epoch": 1.9496082621082622, "grad_norm": 0.7101355195045471, "learning_rate": 0.0001041280809424417, "loss": 0.925, "step": 10950 }, { "epoch": 1.9497863247863247, "grad_norm": 0.7891978025436401, "learning_rate": 0.00010411409536154597, "loss": 1.0691, "step": 10951 }, { "epoch": 1.9499643874643875, "grad_norm": 0.7225242853164673, "learning_rate": 0.00010410010970004311, "loss": 1.158, "step": 10952 }, { "epoch": 1.95014245014245, "grad_norm": 0.6073256731033325, "learning_rate": 0.00010408612395820714, "loss": 0.9977, "step": 10953 }, { "epoch": 1.9503205128205128, "grad_norm": 0.6373769044876099, "learning_rate": 0.00010407213813631203, "loss": 1.019, "step": 10954 }, { "epoch": 1.9504985754985755, "grad_norm": 0.7451884746551514, "learning_rate": 0.00010405815223463184, "loss": 0.9497, "step": 10955 }, { "epoch": 1.9506766381766383, "grad_norm": 0.7760418057441711, "learning_rate": 0.00010404416625344058, "loss": 1.0378, "step": 10956 }, { "epoch": 1.9508547008547008, "grad_norm": 0.7057808041572571, "learning_rate": 0.00010403018019301228, "loss": 0.8953, "step": 10957 }, { "epoch": 1.9510327635327636, "grad_norm": 0.6599584817886353, "learning_rate": 0.00010401619405362095, "loss": 0.8859, "step": 10958 }, { "epoch": 1.951210826210826, "grad_norm": 0.6977253556251526, "learning_rate": 0.00010400220783554069, "loss": 0.9038, "step": 10959 }, { "epoch": 1.9513888888888888, "grad_norm": 0.6930267810821533, "learning_rate": 0.00010398822153904546, "loss": 1.1547, "step": 10960 }, { "epoch": 1.9515669515669516, "grad_norm": 0.6301694512367249, "learning_rate": 0.00010397423516440931, "loss": 0.8875, "step": 10961 }, { "epoch": 1.9517450142450143, "grad_norm": 0.7447484135627747, "learning_rate": 0.00010396024871190628, "loss": 1.0454, "step": 10962 }, { "epoch": 1.9519230769230769, "grad_norm": 0.8666765093803406, "learning_rate": 0.00010394626218181041, "loss": 1.2211, "step": 10963 }, { "epoch": 1.9521011396011396, "grad_norm": 0.599354088306427, "learning_rate": 0.00010393227557439573, "loss": 1.0419, "step": 10964 }, { "epoch": 1.9522792022792022, "grad_norm": 0.6991702914237976, "learning_rate": 0.00010391828888993627, "loss": 0.8217, "step": 10965 }, { "epoch": 1.952457264957265, "grad_norm": 0.7467028498649597, "learning_rate": 0.0001039043021287061, "loss": 0.8708, "step": 10966 }, { "epoch": 1.9526353276353277, "grad_norm": 0.6806215047836304, "learning_rate": 0.0001038903152909792, "loss": 1.218, "step": 10967 }, { "epoch": 1.9528133903133904, "grad_norm": 0.6704212427139282, "learning_rate": 0.00010387632837702968, "loss": 0.8428, "step": 10968 }, { "epoch": 1.952991452991453, "grad_norm": 0.6843154430389404, "learning_rate": 0.00010386234138713155, "loss": 0.9729, "step": 10969 }, { "epoch": 1.9531695156695157, "grad_norm": 0.6619821190834045, "learning_rate": 0.00010384835432155888, "loss": 1.021, "step": 10970 }, { "epoch": 1.9533475783475782, "grad_norm": 0.6249803900718689, "learning_rate": 0.0001038343671805857, "loss": 0.9321, "step": 10971 }, { "epoch": 1.953525641025641, "grad_norm": 0.7361689805984497, "learning_rate": 0.00010382037996448604, "loss": 0.9451, "step": 10972 }, { "epoch": 1.9537037037037037, "grad_norm": 0.6464847922325134, "learning_rate": 0.00010380639267353398, "loss": 1.0188, "step": 10973 }, { "epoch": 1.9538817663817665, "grad_norm": 0.5975635647773743, "learning_rate": 0.00010379240530800356, "loss": 0.9025, "step": 10974 }, { "epoch": 1.9540598290598292, "grad_norm": 0.6734475493431091, "learning_rate": 0.00010377841786816884, "loss": 1.0742, "step": 10975 }, { "epoch": 1.9542378917378918, "grad_norm": 0.7318592667579651, "learning_rate": 0.00010376443035430386, "loss": 1.1082, "step": 10976 }, { "epoch": 1.9544159544159543, "grad_norm": 0.7696142792701721, "learning_rate": 0.00010375044276668271, "loss": 0.8421, "step": 10977 }, { "epoch": 1.954594017094017, "grad_norm": 0.68442302942276, "learning_rate": 0.00010373645510557939, "loss": 1.0794, "step": 10978 }, { "epoch": 1.9547720797720798, "grad_norm": 0.7582547068595886, "learning_rate": 0.00010372246737126801, "loss": 1.0332, "step": 10979 }, { "epoch": 1.9549501424501425, "grad_norm": 0.6529998183250427, "learning_rate": 0.00010370847956402262, "loss": 1.1833, "step": 10980 }, { "epoch": 1.9551282051282053, "grad_norm": 0.7565605044364929, "learning_rate": 0.00010369449168411729, "loss": 1.0494, "step": 10981 }, { "epoch": 1.9553062678062678, "grad_norm": 0.6346915364265442, "learning_rate": 0.00010368050373182605, "loss": 1.0052, "step": 10982 }, { "epoch": 1.9554843304843303, "grad_norm": 0.7021830081939697, "learning_rate": 0.00010366651570742298, "loss": 0.9716, "step": 10983 }, { "epoch": 1.955662393162393, "grad_norm": 0.6464530825614929, "learning_rate": 0.00010365252761118218, "loss": 0.9802, "step": 10984 }, { "epoch": 1.9558404558404558, "grad_norm": 0.6845090985298157, "learning_rate": 0.00010363853944337768, "loss": 0.9529, "step": 10985 }, { "epoch": 1.9560185185185186, "grad_norm": 0.7178115248680115, "learning_rate": 0.00010362455120428356, "loss": 0.9968, "step": 10986 }, { "epoch": 1.9561965811965814, "grad_norm": 0.6131038069725037, "learning_rate": 0.00010361056289417385, "loss": 1.0559, "step": 10987 }, { "epoch": 1.9563746438746439, "grad_norm": 0.6946909427642822, "learning_rate": 0.0001035965745133227, "loss": 1.0457, "step": 10988 }, { "epoch": 1.9565527065527064, "grad_norm": 0.7376706600189209, "learning_rate": 0.00010358258606200413, "loss": 0.7775, "step": 10989 }, { "epoch": 1.9567307692307692, "grad_norm": 0.6864920854568481, "learning_rate": 0.00010356859754049225, "loss": 0.8798, "step": 10990 }, { "epoch": 1.956908831908832, "grad_norm": 0.6301153302192688, "learning_rate": 0.0001035546089490611, "loss": 0.8757, "step": 10991 }, { "epoch": 1.9570868945868947, "grad_norm": 0.7184807062149048, "learning_rate": 0.00010354062028798474, "loss": 1.0783, "step": 10992 }, { "epoch": 1.9572649572649574, "grad_norm": 0.7138563394546509, "learning_rate": 0.00010352663155753732, "loss": 1.0328, "step": 10993 }, { "epoch": 1.95744301994302, "grad_norm": 0.6565547585487366, "learning_rate": 0.00010351264275799286, "loss": 1.1312, "step": 10994 }, { "epoch": 1.9576210826210825, "grad_norm": 0.7055862545967102, "learning_rate": 0.00010349865388962547, "loss": 1.0787, "step": 10995 }, { "epoch": 1.9577991452991452, "grad_norm": 0.6184022426605225, "learning_rate": 0.00010348466495270926, "loss": 0.9635, "step": 10996 }, { "epoch": 1.957977207977208, "grad_norm": 0.6563652753829956, "learning_rate": 0.0001034706759475182, "loss": 0.772, "step": 10997 }, { "epoch": 1.9581552706552707, "grad_norm": 0.6103591322898865, "learning_rate": 0.00010345668687432651, "loss": 0.8113, "step": 10998 }, { "epoch": 1.9583333333333335, "grad_norm": 0.6715512275695801, "learning_rate": 0.0001034426977334082, "loss": 1.1841, "step": 10999 }, { "epoch": 1.958511396011396, "grad_norm": 0.680092453956604, "learning_rate": 0.00010342870852503739, "loss": 0.9992, "step": 11000 }, { "epoch": 1.9586894586894585, "grad_norm": 0.828472375869751, "learning_rate": 0.00010341471924948816, "loss": 1.0975, "step": 11001 }, { "epoch": 1.9588675213675213, "grad_norm": 0.758441686630249, "learning_rate": 0.00010340072990703463, "loss": 1.0632, "step": 11002 }, { "epoch": 1.959045584045584, "grad_norm": 0.6847560405731201, "learning_rate": 0.00010338674049795079, "loss": 1.0054, "step": 11003 }, { "epoch": 1.9592236467236468, "grad_norm": 0.707626223564148, "learning_rate": 0.00010337275102251085, "loss": 0.9427, "step": 11004 }, { "epoch": 1.9594017094017095, "grad_norm": 0.769036591053009, "learning_rate": 0.00010335876148098887, "loss": 1.0424, "step": 11005 }, { "epoch": 1.959579772079772, "grad_norm": 0.822695791721344, "learning_rate": 0.00010334477187365892, "loss": 1.1573, "step": 11006 }, { "epoch": 1.9597578347578346, "grad_norm": 0.6290286183357239, "learning_rate": 0.00010333078220079513, "loss": 0.936, "step": 11007 }, { "epoch": 1.9599358974358974, "grad_norm": 0.6802252531051636, "learning_rate": 0.00010331679246267155, "loss": 0.8049, "step": 11008 }, { "epoch": 1.96011396011396, "grad_norm": 0.6652607321739197, "learning_rate": 0.00010330280265956232, "loss": 0.926, "step": 11009 }, { "epoch": 1.9602920227920229, "grad_norm": 0.7057216763496399, "learning_rate": 0.00010328881279174154, "loss": 0.9464, "step": 11010 }, { "epoch": 1.9604700854700856, "grad_norm": 0.6951601505279541, "learning_rate": 0.00010327482285948331, "loss": 0.9882, "step": 11011 }, { "epoch": 1.9606481481481481, "grad_norm": 0.6537632942199707, "learning_rate": 0.00010326083286306174, "loss": 0.8663, "step": 11012 }, { "epoch": 1.9608262108262107, "grad_norm": 0.7252047657966614, "learning_rate": 0.0001032468428027509, "loss": 1.1377, "step": 11013 }, { "epoch": 1.9610042735042734, "grad_norm": 0.6494104266166687, "learning_rate": 0.00010323285267882492, "loss": 0.8072, "step": 11014 }, { "epoch": 1.9611823361823362, "grad_norm": 0.8463460206985474, "learning_rate": 0.00010321886249155792, "loss": 1.22, "step": 11015 }, { "epoch": 1.961360398860399, "grad_norm": 0.6071396470069885, "learning_rate": 0.00010320487224122401, "loss": 0.7975, "step": 11016 }, { "epoch": 1.9615384615384617, "grad_norm": 0.6546960473060608, "learning_rate": 0.00010319088192809725, "loss": 1.1729, "step": 11017 }, { "epoch": 1.9617165242165242, "grad_norm": 0.7399442791938782, "learning_rate": 0.00010317689155245178, "loss": 1.092, "step": 11018 }, { "epoch": 1.9618945868945867, "grad_norm": 0.7103837728500366, "learning_rate": 0.00010316290111456175, "loss": 0.8436, "step": 11019 }, { "epoch": 1.9620726495726495, "grad_norm": 0.6990065574645996, "learning_rate": 0.00010314891061470125, "loss": 0.9003, "step": 11020 }, { "epoch": 1.9622507122507122, "grad_norm": 0.7945666313171387, "learning_rate": 0.00010313492005314438, "loss": 0.8812, "step": 11021 }, { "epoch": 1.962428774928775, "grad_norm": 0.6177538633346558, "learning_rate": 0.00010312092943016527, "loss": 1.0091, "step": 11022 }, { "epoch": 1.9626068376068377, "grad_norm": 0.7260771989822388, "learning_rate": 0.000103106938746038, "loss": 0.9376, "step": 11023 }, { "epoch": 1.9627849002849003, "grad_norm": 0.6726518273353577, "learning_rate": 0.00010309294800103674, "loss": 0.8048, "step": 11024 }, { "epoch": 1.9629629629629628, "grad_norm": 0.8759992122650146, "learning_rate": 0.00010307895719543562, "loss": 1.0248, "step": 11025 }, { "epoch": 1.9631410256410255, "grad_norm": 0.683437168598175, "learning_rate": 0.00010306496632950868, "loss": 1.0314, "step": 11026 }, { "epoch": 1.9633190883190883, "grad_norm": 0.7255756258964539, "learning_rate": 0.00010305097540353012, "loss": 0.9828, "step": 11027 }, { "epoch": 1.963497150997151, "grad_norm": 0.6904804706573486, "learning_rate": 0.000103036984417774, "loss": 0.9054, "step": 11028 }, { "epoch": 1.9636752136752138, "grad_norm": 0.6906846761703491, "learning_rate": 0.00010302299337251451, "loss": 1.0287, "step": 11029 }, { "epoch": 1.9638532763532763, "grad_norm": 0.6677078008651733, "learning_rate": 0.00010300900226802575, "loss": 0.8742, "step": 11030 }, { "epoch": 1.964031339031339, "grad_norm": 0.6144888997077942, "learning_rate": 0.00010299501110458183, "loss": 0.6942, "step": 11031 }, { "epoch": 1.9642094017094016, "grad_norm": 0.753010094165802, "learning_rate": 0.0001029810198824569, "loss": 0.9018, "step": 11032 }, { "epoch": 1.9643874643874644, "grad_norm": 0.6872276663780212, "learning_rate": 0.00010296702860192505, "loss": 1.1647, "step": 11033 }, { "epoch": 1.9645655270655271, "grad_norm": 0.709000289440155, "learning_rate": 0.00010295303726326047, "loss": 0.9143, "step": 11034 }, { "epoch": 1.9647435897435899, "grad_norm": 0.6507021188735962, "learning_rate": 0.00010293904586673723, "loss": 1.006, "step": 11035 }, { "epoch": 1.9649216524216524, "grad_norm": 0.6789946556091309, "learning_rate": 0.00010292505441262952, "loss": 0.9049, "step": 11036 }, { "epoch": 1.9650997150997151, "grad_norm": 0.7156081795692444, "learning_rate": 0.00010291106290121143, "loss": 0.9195, "step": 11037 }, { "epoch": 1.9652777777777777, "grad_norm": 0.6770932078361511, "learning_rate": 0.0001028970713327571, "loss": 0.9524, "step": 11038 }, { "epoch": 1.9654558404558404, "grad_norm": 0.7304288148880005, "learning_rate": 0.00010288307970754067, "loss": 0.9276, "step": 11039 }, { "epoch": 1.9656339031339032, "grad_norm": 0.7603645324707031, "learning_rate": 0.0001028690880258363, "loss": 1.2157, "step": 11040 }, { "epoch": 1.965811965811966, "grad_norm": 0.6875246167182922, "learning_rate": 0.00010285509628791811, "loss": 1.0269, "step": 11041 }, { "epoch": 1.9659900284900285, "grad_norm": 0.7234818935394287, "learning_rate": 0.00010284110449406026, "loss": 0.9695, "step": 11042 }, { "epoch": 1.9661680911680912, "grad_norm": 0.7322804927825928, "learning_rate": 0.00010282711264453684, "loss": 0.9752, "step": 11043 }, { "epoch": 1.9663461538461537, "grad_norm": 0.7524822950363159, "learning_rate": 0.00010281312073962202, "loss": 1.2144, "step": 11044 }, { "epoch": 1.9665242165242165, "grad_norm": 0.6623101234436035, "learning_rate": 0.00010279912877958995, "loss": 1.1334, "step": 11045 }, { "epoch": 1.9667022792022792, "grad_norm": 0.7814893126487732, "learning_rate": 0.00010278513676471477, "loss": 1.266, "step": 11046 }, { "epoch": 1.966880341880342, "grad_norm": 0.7129884362220764, "learning_rate": 0.00010277114469527063, "loss": 1.0918, "step": 11047 }, { "epoch": 1.9670584045584045, "grad_norm": 0.6996828317642212, "learning_rate": 0.00010275715257153164, "loss": 0.9269, "step": 11048 }, { "epoch": 1.9672364672364673, "grad_norm": 0.6439059972763062, "learning_rate": 0.00010274316039377198, "loss": 1.1998, "step": 11049 }, { "epoch": 1.9674145299145298, "grad_norm": 0.6837672591209412, "learning_rate": 0.00010272916816226581, "loss": 0.8899, "step": 11050 }, { "epoch": 1.9675925925925926, "grad_norm": 0.702583909034729, "learning_rate": 0.00010271517587728726, "loss": 1.1862, "step": 11051 }, { "epoch": 1.9677706552706553, "grad_norm": 0.6627798676490784, "learning_rate": 0.00010270118353911047, "loss": 0.898, "step": 11052 }, { "epoch": 1.967948717948718, "grad_norm": 0.7628579139709473, "learning_rate": 0.00010268719114800957, "loss": 1.006, "step": 11053 }, { "epoch": 1.9681267806267806, "grad_norm": 0.6425395607948303, "learning_rate": 0.00010267319870425877, "loss": 0.962, "step": 11054 }, { "epoch": 1.9683048433048433, "grad_norm": 0.7462666630744934, "learning_rate": 0.00010265920620813219, "loss": 1.0703, "step": 11055 }, { "epoch": 1.9684829059829059, "grad_norm": 0.67641681432724, "learning_rate": 0.00010264521365990401, "loss": 1.1077, "step": 11056 }, { "epoch": 1.9686609686609686, "grad_norm": 0.6716381311416626, "learning_rate": 0.0001026312210598483, "loss": 1.1048, "step": 11057 }, { "epoch": 1.9688390313390314, "grad_norm": 0.7207448482513428, "learning_rate": 0.00010261722840823935, "loss": 0.9236, "step": 11058 }, { "epoch": 1.9690170940170941, "grad_norm": 0.7208544015884399, "learning_rate": 0.0001026032357053512, "loss": 1.0814, "step": 11059 }, { "epoch": 1.9691951566951567, "grad_norm": 0.6076363325119019, "learning_rate": 0.00010258924295145807, "loss": 0.9388, "step": 11060 }, { "epoch": 1.9693732193732194, "grad_norm": 0.6460439562797546, "learning_rate": 0.00010257525014683411, "loss": 0.9506, "step": 11061 }, { "epoch": 1.969551282051282, "grad_norm": 0.7449939250946045, "learning_rate": 0.00010256125729175348, "loss": 1.0209, "step": 11062 }, { "epoch": 1.9697293447293447, "grad_norm": 0.640885055065155, "learning_rate": 0.00010254726438649031, "loss": 1.0235, "step": 11063 }, { "epoch": 1.9699074074074074, "grad_norm": 0.6872261166572571, "learning_rate": 0.00010253327143131879, "loss": 0.9217, "step": 11064 }, { "epoch": 1.9700854700854702, "grad_norm": 0.6213285326957703, "learning_rate": 0.0001025192784265131, "loss": 0.8204, "step": 11065 }, { "epoch": 1.9702635327635327, "grad_norm": 0.6594449281692505, "learning_rate": 0.00010250528537234736, "loss": 0.9789, "step": 11066 }, { "epoch": 1.9704415954415955, "grad_norm": 0.7098729610443115, "learning_rate": 0.00010249129226909577, "loss": 1.2551, "step": 11067 }, { "epoch": 1.970619658119658, "grad_norm": 0.7455953359603882, "learning_rate": 0.0001024772991170325, "loss": 1.0281, "step": 11068 }, { "epoch": 1.9707977207977208, "grad_norm": 0.6657416224479675, "learning_rate": 0.00010246330591643166, "loss": 0.9421, "step": 11069 }, { "epoch": 1.9709757834757835, "grad_norm": 0.6480659246444702, "learning_rate": 0.00010244931266756748, "loss": 0.9424, "step": 11070 }, { "epoch": 1.9711538461538463, "grad_norm": 0.6440510749816895, "learning_rate": 0.00010243531937071411, "loss": 0.9651, "step": 11071 }, { "epoch": 1.9713319088319088, "grad_norm": 0.6329794526100159, "learning_rate": 0.00010242132602614571, "loss": 0.9233, "step": 11072 }, { "epoch": 1.9715099715099715, "grad_norm": 0.6694819927215576, "learning_rate": 0.00010240733263413646, "loss": 0.884, "step": 11073 }, { "epoch": 1.971688034188034, "grad_norm": 0.7702556848526001, "learning_rate": 0.0001023933391949605, "loss": 1.216, "step": 11074 }, { "epoch": 1.9718660968660968, "grad_norm": 0.6587536931037903, "learning_rate": 0.00010237934570889207, "loss": 0.9324, "step": 11075 }, { "epoch": 1.9720441595441596, "grad_norm": 0.7919837832450867, "learning_rate": 0.00010236535217620529, "loss": 1.0011, "step": 11076 }, { "epoch": 1.9722222222222223, "grad_norm": 0.6604606509208679, "learning_rate": 0.00010235135859717433, "loss": 0.929, "step": 11077 }, { "epoch": 1.9724002849002849, "grad_norm": 0.7158446907997131, "learning_rate": 0.0001023373649720734, "loss": 0.8912, "step": 11078 }, { "epoch": 1.9725783475783476, "grad_norm": 0.7450904846191406, "learning_rate": 0.00010232337130117666, "loss": 1.0782, "step": 11079 }, { "epoch": 1.9727564102564101, "grad_norm": 0.6687077283859253, "learning_rate": 0.00010230937758475827, "loss": 1.0662, "step": 11080 }, { "epoch": 1.9729344729344729, "grad_norm": 0.7188364267349243, "learning_rate": 0.00010229538382309245, "loss": 1.024, "step": 11081 }, { "epoch": 1.9731125356125356, "grad_norm": 0.6787814497947693, "learning_rate": 0.00010228139001645334, "loss": 0.9559, "step": 11082 }, { "epoch": 1.9732905982905984, "grad_norm": 0.6834072470664978, "learning_rate": 0.00010226739616511513, "loss": 0.8143, "step": 11083 }, { "epoch": 1.973468660968661, "grad_norm": 0.6651090979576111, "learning_rate": 0.00010225340226935201, "loss": 1.05, "step": 11084 }, { "epoch": 1.9736467236467237, "grad_norm": 0.7125018835067749, "learning_rate": 0.00010223940832943813, "loss": 1.0275, "step": 11085 }, { "epoch": 1.9738247863247862, "grad_norm": 0.6886870861053467, "learning_rate": 0.00010222541434564772, "loss": 1.0972, "step": 11086 }, { "epoch": 1.974002849002849, "grad_norm": 0.7068913578987122, "learning_rate": 0.00010221142031825492, "loss": 0.9248, "step": 11087 }, { "epoch": 1.9741809116809117, "grad_norm": 0.7752319574356079, "learning_rate": 0.00010219742624753397, "loss": 0.9754, "step": 11088 }, { "epoch": 1.9743589743589745, "grad_norm": 0.7915459871292114, "learning_rate": 0.00010218343213375896, "loss": 1.2589, "step": 11089 }, { "epoch": 1.9745370370370372, "grad_norm": 0.6597068309783936, "learning_rate": 0.00010216943797720418, "loss": 1.0004, "step": 11090 }, { "epoch": 1.9747150997150997, "grad_norm": 0.7060620188713074, "learning_rate": 0.00010215544377814375, "loss": 0.9968, "step": 11091 }, { "epoch": 1.9748931623931623, "grad_norm": 0.6815677881240845, "learning_rate": 0.0001021414495368519, "loss": 0.8889, "step": 11092 }, { "epoch": 1.975071225071225, "grad_norm": 0.6872935891151428, "learning_rate": 0.00010212745525360277, "loss": 1.1582, "step": 11093 }, { "epoch": 1.9752492877492878, "grad_norm": 0.6781140565872192, "learning_rate": 0.00010211346092867056, "loss": 0.9988, "step": 11094 }, { "epoch": 1.9754273504273505, "grad_norm": 0.6959224343299866, "learning_rate": 0.00010209946656232949, "loss": 1.1097, "step": 11095 }, { "epoch": 1.9756054131054133, "grad_norm": 0.7205058336257935, "learning_rate": 0.00010208547215485376, "loss": 0.9951, "step": 11096 }, { "epoch": 1.9757834757834758, "grad_norm": 0.6968751549720764, "learning_rate": 0.00010207147770651748, "loss": 0.9313, "step": 11097 }, { "epoch": 1.9759615384615383, "grad_norm": 0.6688823103904724, "learning_rate": 0.00010205748321759494, "loss": 0.9439, "step": 11098 }, { "epoch": 1.976139601139601, "grad_norm": 0.6169568300247192, "learning_rate": 0.00010204348868836028, "loss": 1.123, "step": 11099 }, { "epoch": 1.9763176638176638, "grad_norm": 0.6995537281036377, "learning_rate": 0.00010202949411908768, "loss": 1.1928, "step": 11100 }, { "epoch": 1.9764957264957266, "grad_norm": 0.7102637887001038, "learning_rate": 0.00010201549951005138, "loss": 1.0265, "step": 11101 }, { "epoch": 1.9766737891737893, "grad_norm": 0.6820045113563538, "learning_rate": 0.00010200150486152558, "loss": 0.9309, "step": 11102 }, { "epoch": 1.9768518518518519, "grad_norm": 0.7050938010215759, "learning_rate": 0.00010198751017378443, "loss": 1.0047, "step": 11103 }, { "epoch": 1.9770299145299144, "grad_norm": 0.6418201923370361, "learning_rate": 0.00010197351544710214, "loss": 1.1172, "step": 11104 }, { "epoch": 1.9772079772079771, "grad_norm": 0.6681215763092041, "learning_rate": 0.0001019595206817529, "loss": 1.0621, "step": 11105 }, { "epoch": 1.97738603988604, "grad_norm": 0.7725709676742554, "learning_rate": 0.00010194552587801094, "loss": 1.0044, "step": 11106 }, { "epoch": 1.9775641025641026, "grad_norm": 0.6870455741882324, "learning_rate": 0.00010193153103615045, "loss": 1.2652, "step": 11107 }, { "epoch": 1.9777421652421654, "grad_norm": 0.6352108120918274, "learning_rate": 0.00010191753615644561, "loss": 1.1081, "step": 11108 }, { "epoch": 1.977920227920228, "grad_norm": 0.7322626113891602, "learning_rate": 0.00010190354123917066, "loss": 1.0003, "step": 11109 }, { "epoch": 1.9780982905982905, "grad_norm": 0.6240935921669006, "learning_rate": 0.00010188954628459972, "loss": 0.8925, "step": 11110 }, { "epoch": 1.9782763532763532, "grad_norm": 0.6648945212364197, "learning_rate": 0.00010187555129300708, "loss": 1.0882, "step": 11111 }, { "epoch": 1.978454415954416, "grad_norm": 0.6704208850860596, "learning_rate": 0.00010186155626466692, "loss": 0.8873, "step": 11112 }, { "epoch": 1.9786324786324787, "grad_norm": 0.6716459393501282, "learning_rate": 0.00010184756119985341, "loss": 1.0045, "step": 11113 }, { "epoch": 1.9788105413105415, "grad_norm": 0.81277996301651, "learning_rate": 0.0001018335660988408, "loss": 0.8867, "step": 11114 }, { "epoch": 1.978988603988604, "grad_norm": 0.7008311748504639, "learning_rate": 0.00010181957096190323, "loss": 0.9391, "step": 11115 }, { "epoch": 1.9791666666666665, "grad_norm": 0.727676272392273, "learning_rate": 0.00010180557578931498, "loss": 1.0157, "step": 11116 }, { "epoch": 1.9793447293447293, "grad_norm": 0.7058015465736389, "learning_rate": 0.00010179158058135018, "loss": 1.0, "step": 11117 }, { "epoch": 1.979522792022792, "grad_norm": 0.7770412564277649, "learning_rate": 0.00010177758533828312, "loss": 1.0428, "step": 11118 }, { "epoch": 1.9797008547008548, "grad_norm": 0.6557414531707764, "learning_rate": 0.00010176359006038798, "loss": 0.8557, "step": 11119 }, { "epoch": 1.9798789173789175, "grad_norm": 0.7681090235710144, "learning_rate": 0.00010174959474793894, "loss": 0.867, "step": 11120 }, { "epoch": 1.98005698005698, "grad_norm": 0.7915860414505005, "learning_rate": 0.0001017355994012102, "loss": 0.9961, "step": 11121 }, { "epoch": 1.9802350427350426, "grad_norm": 0.8039166927337646, "learning_rate": 0.00010172160402047604, "loss": 1.1378, "step": 11122 }, { "epoch": 1.9804131054131053, "grad_norm": 0.6641189455986023, "learning_rate": 0.0001017076086060106, "loss": 0.8914, "step": 11123 }, { "epoch": 1.980591168091168, "grad_norm": 0.7673811316490173, "learning_rate": 0.00010169361315808812, "loss": 1.018, "step": 11124 }, { "epoch": 1.9807692307692308, "grad_norm": 0.7320558428764343, "learning_rate": 0.00010167961767698279, "loss": 1.0515, "step": 11125 }, { "epoch": 1.9809472934472936, "grad_norm": 0.5717357993125916, "learning_rate": 0.00010166562216296886, "loss": 0.7619, "step": 11126 }, { "epoch": 1.9811253561253561, "grad_norm": 0.6638465523719788, "learning_rate": 0.00010165162661632052, "loss": 1.0161, "step": 11127 }, { "epoch": 1.9813034188034186, "grad_norm": 0.7293243408203125, "learning_rate": 0.00010163763103731201, "loss": 1.063, "step": 11128 }, { "epoch": 1.9814814814814814, "grad_norm": 0.634694516658783, "learning_rate": 0.00010162363542621752, "loss": 0.8945, "step": 11129 }, { "epoch": 1.9816595441595442, "grad_norm": 0.7086902856826782, "learning_rate": 0.00010160963978331122, "loss": 1.0542, "step": 11130 }, { "epoch": 1.981837606837607, "grad_norm": 0.5939825773239136, "learning_rate": 0.00010159564410886742, "loss": 0.7822, "step": 11131 }, { "epoch": 1.9820156695156697, "grad_norm": 0.722183346748352, "learning_rate": 0.00010158164840316027, "loss": 1.0252, "step": 11132 }, { "epoch": 1.9821937321937322, "grad_norm": 0.7300103306770325, "learning_rate": 0.000101567652666464, "loss": 0.9099, "step": 11133 }, { "epoch": 1.9823717948717947, "grad_norm": 0.7148736119270325, "learning_rate": 0.00010155365689905285, "loss": 1.0149, "step": 11134 }, { "epoch": 1.9825498575498575, "grad_norm": 0.8214462995529175, "learning_rate": 0.000101539661101201, "loss": 1.0127, "step": 11135 }, { "epoch": 1.9827279202279202, "grad_norm": 0.7111126780509949, "learning_rate": 0.00010152566527318265, "loss": 1.045, "step": 11136 }, { "epoch": 1.982905982905983, "grad_norm": 0.6640021800994873, "learning_rate": 0.00010151166941527213, "loss": 0.9618, "step": 11137 }, { "epoch": 1.9830840455840457, "grad_norm": 0.7177722454071045, "learning_rate": 0.00010149767352774358, "loss": 1.0373, "step": 11138 }, { "epoch": 1.9832621082621082, "grad_norm": 0.6728883981704712, "learning_rate": 0.00010148367761087121, "loss": 0.9886, "step": 11139 }, { "epoch": 1.9834401709401708, "grad_norm": 0.7060428857803345, "learning_rate": 0.00010146968166492926, "loss": 1.042, "step": 11140 }, { "epoch": 1.9836182336182335, "grad_norm": 0.706253707408905, "learning_rate": 0.00010145568569019192, "loss": 1.2249, "step": 11141 }, { "epoch": 1.9837962962962963, "grad_norm": 0.618221640586853, "learning_rate": 0.00010144168968693348, "loss": 0.9223, "step": 11142 }, { "epoch": 1.983974358974359, "grad_norm": 0.7005748748779297, "learning_rate": 0.00010142769365542814, "loss": 1.2735, "step": 11143 }, { "epoch": 1.9841524216524218, "grad_norm": 0.6059799194335938, "learning_rate": 0.0001014136975959501, "loss": 0.7216, "step": 11144 }, { "epoch": 1.9843304843304843, "grad_norm": 0.7169116735458374, "learning_rate": 0.00010139970150877358, "loss": 0.9541, "step": 11145 }, { "epoch": 1.984508547008547, "grad_norm": 0.7402058839797974, "learning_rate": 0.00010138570539417281, "loss": 1.1268, "step": 11146 }, { "epoch": 1.9846866096866096, "grad_norm": 0.7204117178916931, "learning_rate": 0.00010137170925242201, "loss": 1.1557, "step": 11147 }, { "epoch": 1.9848646723646723, "grad_norm": 0.589163064956665, "learning_rate": 0.00010135771308379545, "loss": 0.9863, "step": 11148 }, { "epoch": 1.985042735042735, "grad_norm": 0.6342785358428955, "learning_rate": 0.00010134371688856732, "loss": 0.9294, "step": 11149 }, { "epoch": 1.9852207977207978, "grad_norm": 0.7144256234169006, "learning_rate": 0.00010132972066701183, "loss": 0.9428, "step": 11150 }, { "epoch": 1.9853988603988604, "grad_norm": 0.658032238483429, "learning_rate": 0.00010131572441940322, "loss": 0.9749, "step": 11151 }, { "epoch": 1.9855769230769231, "grad_norm": 0.7609163522720337, "learning_rate": 0.00010130172814601576, "loss": 1.1771, "step": 11152 }, { "epoch": 1.9857549857549857, "grad_norm": 0.6531760692596436, "learning_rate": 0.00010128773184712361, "loss": 0.8529, "step": 11153 }, { "epoch": 1.9859330484330484, "grad_norm": 0.6983599066734314, "learning_rate": 0.00010127373552300103, "loss": 1.0307, "step": 11154 }, { "epoch": 1.9861111111111112, "grad_norm": 0.7121559381484985, "learning_rate": 0.00010125973917392224, "loss": 0.9426, "step": 11155 }, { "epoch": 1.986289173789174, "grad_norm": 0.6282170414924622, "learning_rate": 0.0001012457428001615, "loss": 0.8983, "step": 11156 }, { "epoch": 1.9864672364672364, "grad_norm": 0.6960387825965881, "learning_rate": 0.000101231746401993, "loss": 0.9001, "step": 11157 }, { "epoch": 1.9866452991452992, "grad_norm": 0.7523152232170105, "learning_rate": 0.000101217749979691, "loss": 1.3462, "step": 11158 }, { "epoch": 1.9868233618233617, "grad_norm": 0.71713787317276, "learning_rate": 0.00010120375353352971, "loss": 1.0147, "step": 11159 }, { "epoch": 1.9870014245014245, "grad_norm": 0.7304390072822571, "learning_rate": 0.00010118975706378339, "loss": 0.8436, "step": 11160 }, { "epoch": 1.9871794871794872, "grad_norm": 0.789968729019165, "learning_rate": 0.00010117576057072622, "loss": 1.1162, "step": 11161 }, { "epoch": 1.98735754985755, "grad_norm": 0.6752170920372009, "learning_rate": 0.00010116176405463249, "loss": 1.0619, "step": 11162 }, { "epoch": 1.9875356125356125, "grad_norm": 0.681398868560791, "learning_rate": 0.0001011477675157764, "loss": 0.8981, "step": 11163 }, { "epoch": 1.9877136752136753, "grad_norm": 0.61469566822052, "learning_rate": 0.0001011337709544322, "loss": 1.0139, "step": 11164 }, { "epoch": 1.9878917378917378, "grad_norm": 0.7524265050888062, "learning_rate": 0.0001011197743708741, "loss": 1.1571, "step": 11165 }, { "epoch": 1.9880698005698005, "grad_norm": 0.6289594173431396, "learning_rate": 0.00010110577776537633, "loss": 0.93, "step": 11166 }, { "epoch": 1.9882478632478633, "grad_norm": 0.6991903781890869, "learning_rate": 0.00010109178113821318, "loss": 1.1176, "step": 11167 }, { "epoch": 1.988425925925926, "grad_norm": 0.7604053020477295, "learning_rate": 0.00010107778448965883, "loss": 1.0497, "step": 11168 }, { "epoch": 1.9886039886039886, "grad_norm": 0.7166453003883362, "learning_rate": 0.00010106378781998753, "loss": 1.1237, "step": 11169 }, { "epoch": 1.9887820512820513, "grad_norm": 0.6071686744689941, "learning_rate": 0.00010104979112947352, "loss": 0.8934, "step": 11170 }, { "epoch": 1.9889601139601139, "grad_norm": 0.6618169546127319, "learning_rate": 0.00010103579441839101, "loss": 1.0596, "step": 11171 }, { "epoch": 1.9891381766381766, "grad_norm": 0.6838458776473999, "learning_rate": 0.0001010217976870143, "loss": 1.0167, "step": 11172 }, { "epoch": 1.9893162393162394, "grad_norm": 0.6369979381561279, "learning_rate": 0.00010100780093561757, "loss": 0.9001, "step": 11173 }, { "epoch": 1.989494301994302, "grad_norm": 0.661313533782959, "learning_rate": 0.00010099380416447508, "loss": 0.8952, "step": 11174 }, { "epoch": 1.9896723646723646, "grad_norm": 0.6991600394248962, "learning_rate": 0.00010097980737386106, "loss": 1.0083, "step": 11175 }, { "epoch": 1.9898504273504274, "grad_norm": 0.618748664855957, "learning_rate": 0.00010096581056404972, "loss": 0.8797, "step": 11176 }, { "epoch": 1.99002849002849, "grad_norm": 0.7039223909378052, "learning_rate": 0.00010095181373531535, "loss": 1.0385, "step": 11177 }, { "epoch": 1.9902065527065527, "grad_norm": 0.7598999738693237, "learning_rate": 0.00010093781688793216, "loss": 0.9205, "step": 11178 }, { "epoch": 1.9903846153846154, "grad_norm": 0.6355955600738525, "learning_rate": 0.00010092382002217441, "loss": 0.8646, "step": 11179 }, { "epoch": 1.9905626780626782, "grad_norm": 0.8024569153785706, "learning_rate": 0.00010090982313831634, "loss": 1.1678, "step": 11180 }, { "epoch": 1.9907407407407407, "grad_norm": 0.5960529446601868, "learning_rate": 0.00010089582623663216, "loss": 0.8277, "step": 11181 }, { "epoch": 1.9909188034188035, "grad_norm": 0.6323728561401367, "learning_rate": 0.00010088182931739609, "loss": 0.948, "step": 11182 }, { "epoch": 1.991096866096866, "grad_norm": 0.7532381415367126, "learning_rate": 0.00010086783238088244, "loss": 1.2948, "step": 11183 }, { "epoch": 1.9912749287749287, "grad_norm": 0.5740166306495667, "learning_rate": 0.00010085383542736543, "loss": 0.7019, "step": 11184 }, { "epoch": 1.9914529914529915, "grad_norm": 0.616985559463501, "learning_rate": 0.00010083983845711929, "loss": 1.0802, "step": 11185 }, { "epoch": 1.9916310541310542, "grad_norm": 0.7505929470062256, "learning_rate": 0.00010082584147041824, "loss": 1.0523, "step": 11186 }, { "epoch": 1.9918091168091168, "grad_norm": 0.7147656679153442, "learning_rate": 0.00010081184446753653, "loss": 1.0019, "step": 11187 }, { "epoch": 1.9919871794871795, "grad_norm": 0.7301992774009705, "learning_rate": 0.00010079784744874845, "loss": 1.0329, "step": 11188 }, { "epoch": 1.992165242165242, "grad_norm": 0.6847206354141235, "learning_rate": 0.00010078385041432819, "loss": 1.0367, "step": 11189 }, { "epoch": 1.9923433048433048, "grad_norm": 0.7310990691184998, "learning_rate": 0.00010076985336455, "loss": 1.1675, "step": 11190 }, { "epoch": 1.9925213675213675, "grad_norm": 0.6916858553886414, "learning_rate": 0.00010075585629968813, "loss": 0.8615, "step": 11191 }, { "epoch": 1.9926994301994303, "grad_norm": 0.6519390344619751, "learning_rate": 0.00010074185922001685, "loss": 0.8105, "step": 11192 }, { "epoch": 1.9928774928774928, "grad_norm": 0.7437400817871094, "learning_rate": 0.00010072786212581036, "loss": 0.9993, "step": 11193 }, { "epoch": 1.9930555555555556, "grad_norm": 0.5048928260803223, "learning_rate": 0.00010071386501734292, "loss": 0.7912, "step": 11194 }, { "epoch": 1.993233618233618, "grad_norm": 0.8042343258857727, "learning_rate": 0.00010069986789488882, "loss": 0.9156, "step": 11195 }, { "epoch": 1.9934116809116809, "grad_norm": 0.7188669443130493, "learning_rate": 0.0001006858707587222, "loss": 1.0474, "step": 11196 }, { "epoch": 1.9935897435897436, "grad_norm": 0.7377660870552063, "learning_rate": 0.00010067187360911738, "loss": 0.7013, "step": 11197 }, { "epoch": 1.9937678062678064, "grad_norm": 0.6684696078300476, "learning_rate": 0.00010065787644634861, "loss": 0.9199, "step": 11198 }, { "epoch": 1.993945868945869, "grad_norm": 0.7341524958610535, "learning_rate": 0.00010064387927069012, "loss": 1.0925, "step": 11199 }, { "epoch": 1.9941239316239316, "grad_norm": 0.685745120048523, "learning_rate": 0.00010062988208241614, "loss": 1.083, "step": 11200 }, { "epoch": 1.9943019943019942, "grad_norm": 0.6923556327819824, "learning_rate": 0.00010061588488180096, "loss": 1.2728, "step": 11201 }, { "epoch": 1.994480056980057, "grad_norm": 0.6663293242454529, "learning_rate": 0.00010060188766911876, "loss": 1.0937, "step": 11202 }, { "epoch": 1.9946581196581197, "grad_norm": 0.7963639497756958, "learning_rate": 0.00010058789044464383, "loss": 1.0592, "step": 11203 }, { "epoch": 1.9948361823361824, "grad_norm": 0.6362990140914917, "learning_rate": 0.00010057389320865042, "loss": 0.8872, "step": 11204 }, { "epoch": 1.9950142450142452, "grad_norm": 0.7752974033355713, "learning_rate": 0.00010055989596141278, "loss": 1.043, "step": 11205 }, { "epoch": 1.9951923076923077, "grad_norm": 0.7125133275985718, "learning_rate": 0.00010054589870320512, "loss": 1.0015, "step": 11206 }, { "epoch": 1.9953703703703702, "grad_norm": 0.7102736830711365, "learning_rate": 0.00010053190143430169, "loss": 1.0052, "step": 11207 }, { "epoch": 1.995548433048433, "grad_norm": 0.8628628849983215, "learning_rate": 0.00010051790415497677, "loss": 1.2351, "step": 11208 }, { "epoch": 1.9957264957264957, "grad_norm": 0.7233129739761353, "learning_rate": 0.00010050390686550462, "loss": 1.0848, "step": 11209 }, { "epoch": 1.9959045584045585, "grad_norm": 0.5936228036880493, "learning_rate": 0.00010048990956615944, "loss": 0.7998, "step": 11210 }, { "epoch": 1.9960826210826212, "grad_norm": 0.7345388531684875, "learning_rate": 0.0001004759122572155, "loss": 1.0329, "step": 11211 }, { "epoch": 1.9962606837606838, "grad_norm": 0.7344130873680115, "learning_rate": 0.00010046191493894703, "loss": 1.1563, "step": 11212 }, { "epoch": 1.9964387464387463, "grad_norm": 0.6979942321777344, "learning_rate": 0.00010044791761162833, "loss": 0.9269, "step": 11213 }, { "epoch": 1.996616809116809, "grad_norm": 0.67514967918396, "learning_rate": 0.0001004339202755336, "loss": 0.9028, "step": 11214 }, { "epoch": 1.9967948717948718, "grad_norm": 0.6379111409187317, "learning_rate": 0.00010041992293093712, "loss": 0.7816, "step": 11215 }, { "epoch": 1.9969729344729346, "grad_norm": 0.693976104259491, "learning_rate": 0.00010040592557811308, "loss": 0.8411, "step": 11216 }, { "epoch": 1.9971509971509973, "grad_norm": 0.5952646732330322, "learning_rate": 0.0001003919282173358, "loss": 0.8681, "step": 11217 }, { "epoch": 1.9973290598290598, "grad_norm": 0.7452160716056824, "learning_rate": 0.00010037793084887948, "loss": 1.0198, "step": 11218 }, { "epoch": 1.9975071225071224, "grad_norm": 0.6683938503265381, "learning_rate": 0.00010036393347301841, "loss": 0.8162, "step": 11219 }, { "epoch": 1.9976851851851851, "grad_norm": 0.6849120855331421, "learning_rate": 0.00010034993609002683, "loss": 1.0668, "step": 11220 }, { "epoch": 1.9978632478632479, "grad_norm": 0.8782517910003662, "learning_rate": 0.00010033593870017897, "loss": 1.222, "step": 11221 }, { "epoch": 1.9980413105413106, "grad_norm": 0.6482772827148438, "learning_rate": 0.00010032194130374908, "loss": 0.7722, "step": 11222 }, { "epoch": 1.9982193732193734, "grad_norm": 0.8595399260520935, "learning_rate": 0.00010030794390101142, "loss": 1.3004, "step": 11223 }, { "epoch": 1.998397435897436, "grad_norm": 0.7258931994438171, "learning_rate": 0.00010029394649224024, "loss": 0.8825, "step": 11224 }, { "epoch": 1.9985754985754984, "grad_norm": 0.6291348934173584, "learning_rate": 0.00010027994907770981, "loss": 0.8681, "step": 11225 }, { "epoch": 1.9987535612535612, "grad_norm": 0.7528844475746155, "learning_rate": 0.00010026595165769434, "loss": 1.1443, "step": 11226 }, { "epoch": 1.998931623931624, "grad_norm": 0.654017984867096, "learning_rate": 0.0001002519542324681, "loss": 0.8585, "step": 11227 }, { "epoch": 1.9991096866096867, "grad_norm": 0.6812533736228943, "learning_rate": 0.00010023795680230532, "loss": 0.8757, "step": 11228 }, { "epoch": 1.9992877492877494, "grad_norm": 0.7120179533958435, "learning_rate": 0.0001002239593674803, "loss": 1.0159, "step": 11229 }, { "epoch": 1.999465811965812, "grad_norm": 0.6943802237510681, "learning_rate": 0.00010020996192826725, "loss": 1.0193, "step": 11230 }, { "epoch": 1.9996438746438745, "grad_norm": 0.7227906584739685, "learning_rate": 0.00010019596448494047, "loss": 1.1536, "step": 11231 }, { "epoch": 1.9998219373219372, "grad_norm": 0.6233312487602234, "learning_rate": 0.00010018196703777411, "loss": 0.9117, "step": 11232 }, { "epoch": 1.9998219373219372, "eval_loss": 1.0963108539581299, "eval_runtime": 24.4478, "eval_samples_per_second": 42.58, "eval_steps_per_second": 21.311, "step": 11232 } ], "logging_steps": 1, "max_steps": 22464, "num_input_tokens_seen": 0, "num_train_epochs": 4, "save_steps": 5616, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": false }, "attributes": {} } }, "total_flos": 4.3382624499400704e+17, "train_batch_size": 2, "trial_name": null, "trial_params": null }