diff --git "a/sft_full/hyperrouter/trainer_state.json" "b/sft_full/hyperrouter/trainer_state.json" new file mode 100644--- /dev/null +++ "b/sft_full/hyperrouter/trainer_state.json" @@ -0,0 +1,77658 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 0.9999549082382648, + "eval_steps": 500, + "global_step": 11088, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 9.018352347026198e-05, + "grad_norm": 21.170219463639764, + "learning_rate": 0.0, + "loss": 1.8276, + "step": 1 + }, + { + "epoch": 0.00018036704694052397, + "grad_norm": 17.21192954897353, + "learning_rate": 4.773623799730706e-07, + "loss": 1.7923, + "step": 2 + }, + { + "epoch": 0.0002705505704107859, + "grad_norm": 13.285309581774666, + "learning_rate": 7.566014715123208e-07, + "loss": 1.7026, + "step": 3 + }, + { + "epoch": 0.00036073409388104793, + "grad_norm": 16.88982367577673, + "learning_rate": 9.547247599461412e-07, + "loss": 1.8875, + "step": 4 + }, + { + "epoch": 0.0004509176173513099, + "grad_norm": 14.895143349768617, + "learning_rate": 1.108401121501769e-06, + "loss": 1.5898, + "step": 5 + }, + { + "epoch": 0.0005411011408215718, + "grad_norm": 13.98419277988823, + "learning_rate": 1.2339638514853914e-06, + "loss": 1.7509, + "step": 6 + }, + { + "epoch": 0.0006312846642918339, + "grad_norm": 12.585092451068968, + "learning_rate": 1.3401256270225321e-06, + "loss": 1.5623, + "step": 7 + }, + { + "epoch": 0.0007214681877620959, + "grad_norm": 10.430589775461637, + "learning_rate": 1.4320871399192119e-06, + "loss": 1.5204, + "step": 8 + }, + { + "epoch": 0.0008116517112323579, + "grad_norm": 7.855105019394881, + "learning_rate": 1.5132029430246416e-06, + "loss": 1.4845, + "step": 9 + }, + { + "epoch": 0.0009018352347026198, + "grad_norm": 8.35117926579202, + "learning_rate": 1.5857635014748399e-06, + "loss": 1.5665, + "step": 10 + }, + { + "epoch": 0.0009920187581728818, + "grad_norm": 8.585051528162829, + "learning_rate": 1.6514025108267924e-06, + "loss": 1.6114, + "step": 11 + }, + { + "epoch": 0.0010822022816431437, + "grad_norm": 7.432888877593314, + "learning_rate": 1.711326231458462e-06, + "loss": 1.5641, + "step": 12 + }, + { + "epoch": 0.0011723858051134058, + "grad_norm": 6.081750254427303, + "learning_rate": 1.7664507107987104e-06, + "loss": 1.4342, + "step": 13 + }, + { + "epoch": 0.0012625693285836677, + "grad_norm": 5.4376254012457, + "learning_rate": 1.8174880069956024e-06, + "loss": 1.4224, + "step": 14 + }, + { + "epoch": 0.0013527528520539298, + "grad_norm": 5.794374816190099, + "learning_rate": 1.8650025930140899e-06, + "loss": 1.3875, + "step": 15 + }, + { + "epoch": 0.0014429363755241917, + "grad_norm": 5.226310689701959, + "learning_rate": 1.9094495198922823e-06, + "loss": 1.4842, + "step": 16 + }, + { + "epoch": 0.0015331198989944536, + "grad_norm": 11.693238507027322, + "learning_rate": 1.9512009899507514e-06, + "loss": 1.3464, + "step": 17 + }, + { + "epoch": 0.0016233034224647158, + "grad_norm": 4.5572272363377015, + "learning_rate": 1.990565322997712e-06, + "loss": 1.3952, + "step": 18 + }, + { + "epoch": 0.0017134869459349777, + "grad_norm": 4.325581929301491, + "learning_rate": 2.027800787770518e-06, + "loss": 1.307, + "step": 19 + }, + { + "epoch": 0.0018036704694052396, + "grad_norm": 4.832334985338088, + "learning_rate": 2.06312588144791e-06, + "loss": 1.3976, + "step": 20 + }, + { + "epoch": 0.0018938539928755017, + "grad_norm": 3.24478773154586, + "learning_rate": 2.0967270985348526e-06, + "loss": 1.3244, + "step": 21 + }, + { + "epoch": 0.0019840375163457636, + "grad_norm": 2.8793462988928153, + "learning_rate": 2.128764890799863e-06, + "loss": 1.2159, + "step": 22 + }, + { + "epoch": 0.0020742210398160257, + "grad_norm": 2.752098323329749, + "learning_rate": 2.1593783012990145e-06, + "loss": 1.2168, + "step": 23 + }, + { + "epoch": 0.0021644045632862874, + "grad_norm": 2.668359507238368, + "learning_rate": 2.188688611431533e-06, + "loss": 1.3579, + "step": 24 + }, + { + "epoch": 0.0022545880867565495, + "grad_norm": 2.6237738334078515, + "learning_rate": 2.216802243003538e-06, + "loss": 1.3326, + "step": 25 + }, + { + "epoch": 0.0023447716102268116, + "grad_norm": 6.715751511139688, + "learning_rate": 2.243813090771781e-06, + "loss": 1.1712, + "step": 26 + }, + { + "epoch": 0.0024349551336970737, + "grad_norm": 2.4729884016456833, + "learning_rate": 2.269804414536962e-06, + "loss": 1.3425, + "step": 27 + }, + { + "epoch": 0.0025251386571673354, + "grad_norm": 2.3819258243981003, + "learning_rate": 2.2948503869686733e-06, + "loss": 1.2417, + "step": 28 + }, + { + "epoch": 0.0026153221806375975, + "grad_norm": 3.2332916656903015, + "learning_rate": 2.3190173696980436e-06, + "loss": 0.9887, + "step": 29 + }, + { + "epoch": 0.0027055057041078597, + "grad_norm": 2.8560077455987267, + "learning_rate": 2.3423649729871604e-06, + "loss": 1.2477, + "step": 30 + }, + { + "epoch": 0.0027956892275781214, + "grad_norm": 2.048173216088377, + "learning_rate": 2.364946941580084e-06, + "loss": 1.2309, + "step": 31 + }, + { + "epoch": 0.0028858727510483835, + "grad_norm": 2.1344144624829196, + "learning_rate": 2.3868118998653532e-06, + "loss": 1.2913, + "step": 32 + }, + { + "epoch": 0.0029760562745186456, + "grad_norm": 2.5640359322742357, + "learning_rate": 2.408003982339113e-06, + "loss": 0.9405, + "step": 33 + }, + { + "epoch": 0.0030662397979889073, + "grad_norm": 1.559340983843509, + "learning_rate": 2.4285633699238223e-06, + "loss": 1.1402, + "step": 34 + }, + { + "epoch": 0.0031564233214591694, + "grad_norm": 2.7889137548965146, + "learning_rate": 2.4485267485243007e-06, + "loss": 1.1781, + "step": 35 + }, + { + "epoch": 0.0032466068449294315, + "grad_norm": 1.74388053979678, + "learning_rate": 2.467927702970783e-06, + "loss": 1.2152, + "step": 36 + }, + { + "epoch": 0.003336790368399693, + "grad_norm": 1.6500901168720539, + "learning_rate": 2.4867970569753584e-06, + "loss": 1.2407, + "step": 37 + }, + { + "epoch": 0.0034269738918699553, + "grad_norm": 2.1663942410300945, + "learning_rate": 2.5051631677435883e-06, + "loss": 1.0715, + "step": 38 + }, + { + "epoch": 0.0035171574153402174, + "grad_norm": 2.3965601827481176, + "learning_rate": 2.523052182311031e-06, + "loss": 1.0502, + "step": 39 + }, + { + "epoch": 0.003607340938810479, + "grad_norm": 1.971322997101687, + "learning_rate": 2.540488261420981e-06, + "loss": 1.2499, + "step": 40 + }, + { + "epoch": 0.0036975244622807412, + "grad_norm": 2.1633839559093606, + "learning_rate": 2.557493775753984e-06, + "loss": 1.0498, + "step": 41 + }, + { + "epoch": 0.0037877079857510034, + "grad_norm": 1.6195278463958145, + "learning_rate": 2.5740894785079235e-06, + "loss": 1.161, + "step": 42 + }, + { + "epoch": 0.0038778915092212655, + "grad_norm": 1.8098688430054362, + "learning_rate": 2.5902946576685834e-06, + "loss": 1.1762, + "step": 43 + }, + { + "epoch": 0.003968075032691527, + "grad_norm": 4.901376818860083, + "learning_rate": 2.606127270772933e-06, + "loss": 1.087, + "step": 44 + }, + { + "epoch": 0.004058258556161789, + "grad_norm": 2.551541599204431, + "learning_rate": 2.62160406452641e-06, + "loss": 1.0625, + "step": 45 + }, + { + "epoch": 0.004148442079632051, + "grad_norm": 1.6610367179224976, + "learning_rate": 2.636740681272085e-06, + "loss": 1.2332, + "step": 46 + }, + { + "epoch": 0.004238625603102313, + "grad_norm": 1.8030917556316108, + "learning_rate": 2.651551754008722e-06, + "loss": 1.2694, + "step": 47 + }, + { + "epoch": 0.004328809126572575, + "grad_norm": 1.838919178475389, + "learning_rate": 2.6660509914046035e-06, + "loss": 1.1956, + "step": 48 + }, + { + "epoch": 0.004418992650042837, + "grad_norm": 2.074918524739673, + "learning_rate": 2.6802512540450642e-06, + "loss": 1.1602, + "step": 49 + }, + { + "epoch": 0.004509176173513099, + "grad_norm": 2.5930831235720913, + "learning_rate": 2.694164622976609e-06, + "loss": 1.1561, + "step": 50 + }, + { + "epoch": 0.0045993596969833616, + "grad_norm": 1.7506399347008048, + "learning_rate": 2.707802461463072e-06, + "loss": 1.0856, + "step": 51 + }, + { + "epoch": 0.004689543220453623, + "grad_norm": 2.8608696371683178, + "learning_rate": 2.7211754707448516e-06, + "loss": 1.1257, + "step": 52 + }, + { + "epoch": 0.004779726743923885, + "grad_norm": 1.9772885764392643, + "learning_rate": 2.734293740486721e-06, + "loss": 1.1112, + "step": 53 + }, + { + "epoch": 0.0048699102673941475, + "grad_norm": 1.6386910802286374, + "learning_rate": 2.747166794510033e-06, + "loss": 1.1511, + "step": 54 + }, + { + "epoch": 0.004960093790864409, + "grad_norm": 1.6926446834236977, + "learning_rate": 2.759803632328562e-06, + "loss": 1.116, + "step": 55 + }, + { + "epoch": 0.005050277314334671, + "grad_norm": 1.6577618015026285, + "learning_rate": 2.772212766941744e-06, + "loss": 1.2365, + "step": 56 + }, + { + "epoch": 0.005140460837804933, + "grad_norm": 1.7831745124518548, + "learning_rate": 2.7844022592828385e-06, + "loss": 1.2241, + "step": 57 + }, + { + "epoch": 0.005230644361275195, + "grad_norm": 3.0658078138183305, + "learning_rate": 2.7963797496711145e-06, + "loss": 1.08, + "step": 58 + }, + { + "epoch": 0.005320827884745457, + "grad_norm": 1.6175375297001313, + "learning_rate": 2.80815248657541e-06, + "loss": 1.1551, + "step": 59 + }, + { + "epoch": 0.005411011408215719, + "grad_norm": 1.714716530775366, + "learning_rate": 2.819727352960231e-06, + "loss": 0.8741, + "step": 60 + }, + { + "epoch": 0.005501194931685981, + "grad_norm": 1.636174295770887, + "learning_rate": 2.8311108904541717e-06, + "loss": 1.1187, + "step": 61 + }, + { + "epoch": 0.005591378455156243, + "grad_norm": 2.6418578751575073, + "learning_rate": 2.842309321553155e-06, + "loss": 1.075, + "step": 62 + }, + { + "epoch": 0.005681561978626505, + "grad_norm": 2.475257601962963, + "learning_rate": 2.8533285700471737e-06, + "loss": 1.0575, + "step": 63 + }, + { + "epoch": 0.005771745502096767, + "grad_norm": 1.7318269036951073, + "learning_rate": 2.8641742798384237e-06, + "loss": 1.1899, + "step": 64 + }, + { + "epoch": 0.005861929025567029, + "grad_norm": 1.6579376115967523, + "learning_rate": 2.874851832300479e-06, + "loss": 1.1446, + "step": 65 + }, + { + "epoch": 0.005952112549037291, + "grad_norm": 2.125529952443708, + "learning_rate": 2.8853663623121834e-06, + "loss": 1.1442, + "step": 66 + }, + { + "epoch": 0.006042296072507553, + "grad_norm": 1.7135125556799695, + "learning_rate": 2.895722773085839e-06, + "loss": 1.1811, + "step": 67 + }, + { + "epoch": 0.0061324795959778146, + "grad_norm": 1.5767605614611628, + "learning_rate": 2.905925749896893e-06, + "loss": 1.1403, + "step": 68 + }, + { + "epoch": 0.006222663119448077, + "grad_norm": 2.587374653751743, + "learning_rate": 2.915979772811335e-06, + "loss": 1.1291, + "step": 69 + }, + { + "epoch": 0.006312846642918339, + "grad_norm": 1.7839732183746968, + "learning_rate": 2.925889128497372e-06, + "loss": 1.0869, + "step": 70 + }, + { + "epoch": 0.0064030301663886005, + "grad_norm": 2.043890254723098, + "learning_rate": 2.9356579211992906e-06, + "loss": 1.1788, + "step": 71 + }, + { + "epoch": 0.006493213689858863, + "grad_norm": 2.1693749618420366, + "learning_rate": 2.9452900829438533e-06, + "loss": 1.0639, + "step": 72 + }, + { + "epoch": 0.006583397213329125, + "grad_norm": 2.655974878761113, + "learning_rate": 2.954789383042727e-06, + "loss": 1.0918, + "step": 73 + }, + { + "epoch": 0.006673580736799386, + "grad_norm": 2.236138135010916, + "learning_rate": 2.9641594369484293e-06, + "loss": 0.9273, + "step": 74 + }, + { + "epoch": 0.006763764260269649, + "grad_norm": 1.7544529663852304, + "learning_rate": 2.9734037145158586e-06, + "loss": 1.1183, + "step": 75 + }, + { + "epoch": 0.006853947783739911, + "grad_norm": 1.5527814366148347, + "learning_rate": 2.982525547716659e-06, + "loss": 1.1153, + "step": 76 + }, + { + "epoch": 0.006944131307210172, + "grad_norm": 2.5562932278958304, + "learning_rate": 2.9915281378493246e-06, + "loss": 1.0873, + "step": 77 + }, + { + "epoch": 0.007034314830680435, + "grad_norm": 1.81038180260255, + "learning_rate": 3.000414562284102e-06, + "loss": 1.1075, + "step": 78 + }, + { + "epoch": 0.0071244983541506966, + "grad_norm": 1.5449719523380845, + "learning_rate": 3.009187780778246e-06, + "loss": 1.1048, + "step": 79 + }, + { + "epoch": 0.007214681877620958, + "grad_norm": 2.082930322449585, + "learning_rate": 3.017850641394051e-06, + "loss": 1.1023, + "step": 80 + }, + { + "epoch": 0.007304865401091221, + "grad_norm": 1.598051810321073, + "learning_rate": 3.0264058860492832e-06, + "loss": 0.9743, + "step": 81 + }, + { + "epoch": 0.0073950489245614825, + "grad_norm": 1.8226807214538752, + "learning_rate": 3.0348561557270548e-06, + "loss": 1.1542, + "step": 82 + }, + { + "epoch": 0.007485232448031745, + "grad_norm": 1.1297221156049564, + "learning_rate": 3.043203995369939e-06, + "loss": 0.8781, + "step": 83 + }, + { + "epoch": 0.007575415971502007, + "grad_norm": 1.6828497639021733, + "learning_rate": 3.051451858480994e-06, + "loss": 1.1895, + "step": 84 + }, + { + "epoch": 0.007665599494972268, + "grad_norm": 1.9237388319022233, + "learning_rate": 3.05960211145252e-06, + "loss": 1.0222, + "step": 85 + }, + { + "epoch": 0.007755783018442531, + "grad_norm": 1.5234464204967195, + "learning_rate": 3.0676570376416543e-06, + "loss": 1.025, + "step": 86 + }, + { + "epoch": 0.007845966541912792, + "grad_norm": 6.72294478653389, + "learning_rate": 3.0756188412103647e-06, + "loss": 1.1251, + "step": 87 + }, + { + "epoch": 0.007936150065383054, + "grad_norm": 1.8749279008354691, + "learning_rate": 3.083489650746004e-06, + "loss": 1.1083, + "step": 88 + }, + { + "epoch": 0.008026333588853317, + "grad_norm": 1.6822541963953226, + "learning_rate": 3.0912715226772975e-06, + "loss": 1.1212, + "step": 89 + }, + { + "epoch": 0.008116517112323578, + "grad_norm": 1.4215376736763872, + "learning_rate": 3.098966444499481e-06, + "loss": 1.0315, + "step": 90 + }, + { + "epoch": 0.00820670063579384, + "grad_norm": 1.5550247670007438, + "learning_rate": 3.1065763378212426e-06, + "loss": 1.1088, + "step": 91 + }, + { + "epoch": 0.008296884159264103, + "grad_norm": 1.397215709055961, + "learning_rate": 3.1141030612451554e-06, + "loss": 1.1056, + "step": 92 + }, + { + "epoch": 0.008387067682734364, + "grad_norm": 1.1073285421772685, + "learning_rate": 3.1215484130924052e-06, + "loss": 0.9232, + "step": 93 + }, + { + "epoch": 0.008477251206204626, + "grad_norm": 1.910456783937029, + "learning_rate": 3.128914133981793e-06, + "loss": 1.1109, + "step": 94 + }, + { + "epoch": 0.008567434729674889, + "grad_norm": 1.8844336024545774, + "learning_rate": 3.136201909272287e-06, + "loss": 1.1182, + "step": 95 + }, + { + "epoch": 0.00865761825314515, + "grad_norm": 1.6988340446560248, + "learning_rate": 3.1434133713776735e-06, + "loss": 1.0663, + "step": 96 + }, + { + "epoch": 0.008747801776615412, + "grad_norm": 1.7506502850635994, + "learning_rate": 3.15055010196128e-06, + "loss": 1.1, + "step": 97 + }, + { + "epoch": 0.008837985300085675, + "grad_norm": 1.762627276431094, + "learning_rate": 3.157613634018135e-06, + "loss": 1.0864, + "step": 98 + }, + { + "epoch": 0.008928168823555935, + "grad_norm": 1.6187787911347509, + "learning_rate": 3.1646054538514336e-06, + "loss": 1.1117, + "step": 99 + }, + { + "epoch": 0.009018352347026198, + "grad_norm": 1.692721588865366, + "learning_rate": 3.1715270029496797e-06, + "loss": 1.1479, + "step": 100 + }, + { + "epoch": 0.00910853587049646, + "grad_norm": 1.4418135070254883, + "learning_rate": 3.1783796797704243e-06, + "loss": 1.125, + "step": 101 + }, + { + "epoch": 0.009198719393966723, + "grad_norm": 1.724500641899841, + "learning_rate": 3.185164841436142e-06, + "loss": 1.0539, + "step": 102 + }, + { + "epoch": 0.009288902917436984, + "grad_norm": 5.928839219524071, + "learning_rate": 3.1918838053473723e-06, + "loss": 1.0886, + "step": 103 + }, + { + "epoch": 0.009379086440907246, + "grad_norm": 1.4822591860713812, + "learning_rate": 3.198537850717922e-06, + "loss": 1.0013, + "step": 104 + }, + { + "epoch": 0.009469269964377509, + "grad_norm": 1.6880612323104696, + "learning_rate": 3.205128220036622e-06, + "loss": 1.0492, + "step": 105 + }, + { + "epoch": 0.00955945348784777, + "grad_norm": 1.6470370015734321, + "learning_rate": 3.2116561204597917e-06, + "loss": 1.0404, + "step": 106 + }, + { + "epoch": 0.009649637011318032, + "grad_norm": 1.0533120688171174, + "learning_rate": 3.218122725138335e-06, + "loss": 0.9136, + "step": 107 + }, + { + "epoch": 0.009739820534788295, + "grad_norm": 1.8607872480308119, + "learning_rate": 3.224529174483104e-06, + "loss": 1.0818, + "step": 108 + }, + { + "epoch": 0.009830004058258556, + "grad_norm": 3.175430484934041, + "learning_rate": 3.2308765773719435e-06, + "loss": 1.0216, + "step": 109 + }, + { + "epoch": 0.009920187581728818, + "grad_norm": 1.6493098405035027, + "learning_rate": 3.2371660123016323e-06, + "loss": 1.0798, + "step": 110 + }, + { + "epoch": 0.010010371105199081, + "grad_norm": 1.6798213461843006, + "learning_rate": 3.2433985284876787e-06, + "loss": 1.0206, + "step": 111 + }, + { + "epoch": 0.010100554628669342, + "grad_norm": 1.435254457754878, + "learning_rate": 3.2495751469148143e-06, + "loss": 1.0744, + "step": 112 + }, + { + "epoch": 0.010190738152139604, + "grad_norm": 2.168186925958746, + "learning_rate": 3.2556968613407816e-06, + "loss": 1.0506, + "step": 113 + }, + { + "epoch": 0.010280921675609867, + "grad_norm": 1.9154769342398406, + "learning_rate": 3.2617646392559094e-06, + "loss": 1.1113, + "step": 114 + }, + { + "epoch": 0.010371105199080128, + "grad_norm": 1.682423124895236, + "learning_rate": 3.2677794228007836e-06, + "loss": 1.1215, + "step": 115 + }, + { + "epoch": 0.01046128872255039, + "grad_norm": 1.6484865298821465, + "learning_rate": 3.273742129644185e-06, + "loss": 1.0561, + "step": 116 + }, + { + "epoch": 0.010551472246020653, + "grad_norm": 2.1790330786327865, + "learning_rate": 3.279653653823352e-06, + "loss": 1.0779, + "step": 117 + }, + { + "epoch": 0.010641655769490914, + "grad_norm": 2.111853379824456, + "learning_rate": 3.285514866548481e-06, + "loss": 1.0814, + "step": 118 + }, + { + "epoch": 0.010731839292961176, + "grad_norm": 1.7221917804297266, + "learning_rate": 3.2913266169732838e-06, + "loss": 1.1411, + "step": 119 + }, + { + "epoch": 0.010822022816431439, + "grad_norm": 1.8901344482597446, + "learning_rate": 3.2970897329333017e-06, + "loss": 1.0576, + "step": 120 + }, + { + "epoch": 0.0109122063399017, + "grad_norm": 2.091303064524707, + "learning_rate": 3.302805021653585e-06, + "loss": 1.0548, + "step": 121 + }, + { + "epoch": 0.011002389863371962, + "grad_norm": 3.431897122866371, + "learning_rate": 3.3084732704272426e-06, + "loss": 1.0703, + "step": 122 + }, + { + "epoch": 0.011092573386842225, + "grad_norm": 1.777451262523494, + "learning_rate": 3.314095247266304e-06, + "loss": 1.0654, + "step": 123 + }, + { + "epoch": 0.011182756910312485, + "grad_norm": 1.8917198478964625, + "learning_rate": 3.3196717015262255e-06, + "loss": 1.0389, + "step": 124 + }, + { + "epoch": 0.011272940433782748, + "grad_norm": 1.4929147116694141, + "learning_rate": 3.325203364505307e-06, + "loss": 1.0603, + "step": 125 + }, + { + "epoch": 0.01136312395725301, + "grad_norm": 1.930060408541216, + "learning_rate": 3.3306909500202442e-06, + "loss": 1.1043, + "step": 126 + }, + { + "epoch": 0.011453307480723271, + "grad_norm": 6.212354582345338, + "learning_rate": 3.3361351549589145e-06, + "loss": 1.1162, + "step": 127 + }, + { + "epoch": 0.011543491004193534, + "grad_norm": 2.0890753643635556, + "learning_rate": 3.341536659811494e-06, + "loss": 1.0105, + "step": 128 + }, + { + "epoch": 0.011633674527663796, + "grad_norm": 1.8031356546758228, + "learning_rate": 3.346896129180904e-06, + "loss": 1.0979, + "step": 129 + }, + { + "epoch": 0.011723858051134057, + "grad_norm": 1.9426061963051955, + "learning_rate": 3.35221421227355e-06, + "loss": 1.0705, + "step": 130 + }, + { + "epoch": 0.01181404157460432, + "grad_norm": 2.4892898024744174, + "learning_rate": 3.357491543371255e-06, + "loss": 1.0902, + "step": 131 + }, + { + "epoch": 0.011904225098074582, + "grad_norm": 1.6720346392415464, + "learning_rate": 3.3627287422852543e-06, + "loss": 1.0247, + "step": 132 + }, + { + "epoch": 0.011994408621544843, + "grad_norm": 1.4397182763869287, + "learning_rate": 3.3679264147930497e-06, + "loss": 1.0942, + "step": 133 + }, + { + "epoch": 0.012084592145015106, + "grad_norm": 2.509507411631941, + "learning_rate": 3.37308515305891e-06, + "loss": 1.0142, + "step": 134 + }, + { + "epoch": 0.012174775668485368, + "grad_norm": 1.5315699340415727, + "learning_rate": 3.3782055360387313e-06, + "loss": 1.1326, + "step": 135 + }, + { + "epoch": 0.012264959191955629, + "grad_norm": 1.7417664297026276, + "learning_rate": 3.3832881298699633e-06, + "loss": 1.0333, + "step": 136 + }, + { + "epoch": 0.012355142715425892, + "grad_norm": 1.529142837269449, + "learning_rate": 3.388333488247249e-06, + "loss": 1.0477, + "step": 137 + }, + { + "epoch": 0.012445326238896154, + "grad_norm": 1.538770928817708, + "learning_rate": 3.393342152784406e-06, + "loss": 1.1213, + "step": 138 + }, + { + "epoch": 0.012535509762366415, + "grad_norm": 1.8687553686364669, + "learning_rate": 3.3983146533633376e-06, + "loss": 1.089, + "step": 139 + }, + { + "epoch": 0.012625693285836678, + "grad_norm": 3.9165620722230576, + "learning_rate": 3.403251508470442e-06, + "loss": 1.1253, + "step": 140 + }, + { + "epoch": 0.01271587680930694, + "grad_norm": 1.7636801887392297, + "learning_rate": 3.408153225521043e-06, + "loss": 1.0245, + "step": 141 + }, + { + "epoch": 0.012806060332777201, + "grad_norm": 1.933952714371572, + "learning_rate": 3.413020301172361e-06, + "loss": 0.969, + "step": 142 + }, + { + "epoch": 0.012896243856247463, + "grad_norm": 1.5497936880566268, + "learning_rate": 3.4178532216255024e-06, + "loss": 1.0489, + "step": 143 + }, + { + "epoch": 0.012986427379717726, + "grad_norm": 1.9699747423789378, + "learning_rate": 3.422652462916924e-06, + "loss": 1.1052, + "step": 144 + }, + { + "epoch": 0.013076610903187987, + "grad_norm": 1.4298423308035195, + "learning_rate": 3.4274184911998124e-06, + "loss": 0.9803, + "step": 145 + }, + { + "epoch": 0.01316679442665825, + "grad_norm": 1.8650811736617616, + "learning_rate": 3.4321517630157976e-06, + "loss": 1.1528, + "step": 146 + }, + { + "epoch": 0.013256977950128512, + "grad_norm": 1.7446572626832464, + "learning_rate": 3.4368527255573845e-06, + "loss": 1.1299, + "step": 147 + }, + { + "epoch": 0.013347161473598773, + "grad_norm": 1.908727630404213, + "learning_rate": 3.4415218169214994e-06, + "loss": 1.0406, + "step": 148 + }, + { + "epoch": 0.013437344997069035, + "grad_norm": 1.4259736231789406, + "learning_rate": 3.4461594663544882e-06, + "loss": 1.0478, + "step": 149 + }, + { + "epoch": 0.013527528520539298, + "grad_norm": 1.9339768771906707, + "learning_rate": 3.450766094488929e-06, + "loss": 1.0107, + "step": 150 + }, + { + "epoch": 0.013617712044009559, + "grad_norm": 2.8748784246861767, + "learning_rate": 3.4553421135725735e-06, + "loss": 1.1073, + "step": 151 + }, + { + "epoch": 0.013707895567479821, + "grad_norm": 1.643080844220555, + "learning_rate": 3.45988792768973e-06, + "loss": 1.0764, + "step": 152 + }, + { + "epoch": 0.013798079090950084, + "grad_norm": 1.5447392217059752, + "learning_rate": 3.464403932975393e-06, + "loss": 1.1266, + "step": 153 + }, + { + "epoch": 0.013888262614420345, + "grad_norm": 2.517052192295387, + "learning_rate": 3.468890517822395e-06, + "loss": 1.1152, + "step": 154 + }, + { + "epoch": 0.013978446137890607, + "grad_norm": 2.004175714590014, + "learning_rate": 3.473348063081853e-06, + "loss": 1.0614, + "step": 155 + }, + { + "epoch": 0.01406862966136087, + "grad_norm": 1.3921593334632498, + "learning_rate": 3.4777769422571727e-06, + "loss": 1.012, + "step": 156 + }, + { + "epoch": 0.01415881318483113, + "grad_norm": 1.546686565597276, + "learning_rate": 3.4821775216918497e-06, + "loss": 1.0568, + "step": 157 + }, + { + "epoch": 0.014248996708301393, + "grad_norm": 1.93440547050179, + "learning_rate": 3.4865501607513164e-06, + "loss": 1.0002, + "step": 158 + }, + { + "epoch": 0.014339180231771656, + "grad_norm": 1.7351526115204141, + "learning_rate": 3.4908952119990423e-06, + "loss": 1.1337, + "step": 159 + }, + { + "epoch": 0.014429363755241916, + "grad_norm": 1.1068749532155695, + "learning_rate": 3.495213021367122e-06, + "loss": 0.8847, + "step": 160 + }, + { + "epoch": 0.014519547278712179, + "grad_norm": 2.004798616108337, + "learning_rate": 3.4995039283215464e-06, + "loss": 1.0942, + "step": 161 + }, + { + "epoch": 0.014609730802182442, + "grad_norm": 1.6134963365577646, + "learning_rate": 3.5037682660223533e-06, + "loss": 1.0712, + "step": 162 + }, + { + "epoch": 0.014699914325652702, + "grad_norm": 1.8417665837083552, + "learning_rate": 3.508006361478857e-06, + "loss": 1.0907, + "step": 163 + }, + { + "epoch": 0.014790097849122965, + "grad_norm": 1.687839403282369, + "learning_rate": 3.5122185357001253e-06, + "loss": 1.0488, + "step": 164 + }, + { + "epoch": 0.014880281372593228, + "grad_norm": 1.9971043963893111, + "learning_rate": 3.5164051038408817e-06, + "loss": 1.1128, + "step": 165 + }, + { + "epoch": 0.01497046489606349, + "grad_norm": 1.6374122770850736, + "learning_rate": 3.5205663753430093e-06, + "loss": 1.1092, + "step": 166 + }, + { + "epoch": 0.015060648419533751, + "grad_norm": 1.8988450480234553, + "learning_rate": 3.5247026540727915e-06, + "loss": 1.1497, + "step": 167 + }, + { + "epoch": 0.015150831943004013, + "grad_norm": 2.768835585672844, + "learning_rate": 3.5288142384540645e-06, + "loss": 0.9993, + "step": 168 + }, + { + "epoch": 0.015241015466474276, + "grad_norm": 2.1658350987101227, + "learning_rate": 3.532901421597421e-06, + "loss": 1.0865, + "step": 169 + }, + { + "epoch": 0.015331198989944537, + "grad_norm": 2.5358641213679047, + "learning_rate": 3.5369644914255915e-06, + "loss": 1.005, + "step": 170 + }, + { + "epoch": 0.0154213825134148, + "grad_norm": 2.0675463352754013, + "learning_rate": 3.5410037307951596e-06, + "loss": 1.0699, + "step": 171 + }, + { + "epoch": 0.015511566036885062, + "grad_norm": 1.7256238638526302, + "learning_rate": 3.545019417614725e-06, + "loss": 1.1178, + "step": 172 + }, + { + "epoch": 0.015601749560355323, + "grad_norm": 1.5888824180806198, + "learning_rate": 3.5490118249596387e-06, + "loss": 1.1022, + "step": 173 + }, + { + "epoch": 0.015691933083825584, + "grad_norm": 1.7208182635197804, + "learning_rate": 3.5529812211834352e-06, + "loss": 1.1119, + "step": 174 + }, + { + "epoch": 0.015782116607295848, + "grad_norm": 1.336287380156004, + "learning_rate": 3.5569278700260707e-06, + "loss": 1.0648, + "step": 175 + }, + { + "epoch": 0.01587230013076611, + "grad_norm": 1.7809983460527352, + "learning_rate": 3.5608520307190746e-06, + "loss": 1.0453, + "step": 176 + }, + { + "epoch": 0.01596248365423637, + "grad_norm": 1.356505291727039, + "learning_rate": 3.564753958087731e-06, + "loss": 1.0006, + "step": 177 + }, + { + "epoch": 0.016052667177706634, + "grad_norm": 1.4724558888743615, + "learning_rate": 3.5686339026503684e-06, + "loss": 1.0685, + "step": 178 + }, + { + "epoch": 0.016142850701176895, + "grad_norm": 2.162373568857598, + "learning_rate": 3.5724921107148806e-06, + "loss": 1.1382, + "step": 179 + }, + { + "epoch": 0.016233034224647155, + "grad_norm": 1.6868554049778115, + "learning_rate": 3.576328824472552e-06, + "loss": 1.1235, + "step": 180 + }, + { + "epoch": 0.01632321774811742, + "grad_norm": 1.6173642564088448, + "learning_rate": 3.5801442820892838e-06, + "loss": 1.1495, + "step": 181 + }, + { + "epoch": 0.01641340127158768, + "grad_norm": 1.2976241188733493, + "learning_rate": 3.583938717794313e-06, + "loss": 1.0835, + "step": 182 + }, + { + "epoch": 0.01650358479505794, + "grad_norm": 1.6615816347614223, + "learning_rate": 3.5877123619664928e-06, + "loss": 1.1104, + "step": 183 + }, + { + "epoch": 0.016593768318528206, + "grad_norm": 1.5766642391167327, + "learning_rate": 3.5914654412182268e-06, + "loss": 1.0801, + "step": 184 + }, + { + "epoch": 0.016683951841998466, + "grad_norm": 1.639582129950205, + "learning_rate": 3.595198178477127e-06, + "loss": 1.0933, + "step": 185 + }, + { + "epoch": 0.016774135365468727, + "grad_norm": 1.5019097092483613, + "learning_rate": 3.5989107930654757e-06, + "loss": 0.998, + "step": 186 + }, + { + "epoch": 0.01686431888893899, + "grad_norm": 1.6057944605500047, + "learning_rate": 3.6026035007775437e-06, + "loss": 1.1265, + "step": 187 + }, + { + "epoch": 0.016954502412409252, + "grad_norm": 1.718820119641224, + "learning_rate": 3.6062765139548636e-06, + "loss": 1.0349, + "step": 188 + }, + { + "epoch": 0.017044685935879513, + "grad_norm": 1.6308500917821493, + "learning_rate": 3.6099300415594945e-06, + "loss": 0.9263, + "step": 189 + }, + { + "epoch": 0.017134869459349777, + "grad_norm": 1.7536141676990018, + "learning_rate": 3.6135642892453575e-06, + "loss": 0.943, + "step": 190 + }, + { + "epoch": 0.01722505298282004, + "grad_norm": 1.4329292773932076, + "learning_rate": 3.6171794594277004e-06, + "loss": 1.0771, + "step": 191 + }, + { + "epoch": 0.0173152365062903, + "grad_norm": 1.572728968993739, + "learning_rate": 3.620775751350745e-06, + "loss": 1.014, + "step": 192 + }, + { + "epoch": 0.017405420029760563, + "grad_norm": 2.0484029876106167, + "learning_rate": 3.6243533611535794e-06, + "loss": 1.0977, + "step": 193 + }, + { + "epoch": 0.017495603553230824, + "grad_norm": 2.9197582390394032, + "learning_rate": 3.627912481934351e-06, + "loss": 1.0548, + "step": 194 + }, + { + "epoch": 0.017585787076701085, + "grad_norm": 1.6925579141149223, + "learning_rate": 3.6314533038128e-06, + "loss": 1.032, + "step": 195 + }, + { + "epoch": 0.01767597060017135, + "grad_norm": 0.8948920242879164, + "learning_rate": 3.6349760139912048e-06, + "loss": 0.8584, + "step": 196 + }, + { + "epoch": 0.01776615412364161, + "grad_norm": 1.7079826863503502, + "learning_rate": 3.638480796813769e-06, + "loss": 1.1224, + "step": 197 + }, + { + "epoch": 0.01785633764711187, + "grad_norm": 1.3817887476759012, + "learning_rate": 3.641967833824504e-06, + "loss": 1.061, + "step": 198 + }, + { + "epoch": 0.017946521170582135, + "grad_norm": 1.4363887472174566, + "learning_rate": 3.645437303823663e-06, + "loss": 1.0526, + "step": 199 + }, + { + "epoch": 0.018036704694052396, + "grad_norm": 1.7345025642840117, + "learning_rate": 3.64888938292275e-06, + "loss": 1.0394, + "step": 200 + }, + { + "epoch": 0.01812688821752266, + "grad_norm": 1.6589997695269663, + "learning_rate": 3.6523242445981603e-06, + "loss": 1.0632, + "step": 201 + }, + { + "epoch": 0.01821707174099292, + "grad_norm": 1.3871043783928814, + "learning_rate": 3.655742059743495e-06, + "loss": 1.0306, + "step": 202 + }, + { + "epoch": 0.018307255264463182, + "grad_norm": 1.6044239812134822, + "learning_rate": 3.659142996720576e-06, + "loss": 1.1207, + "step": 203 + }, + { + "epoch": 0.018397438787933446, + "grad_norm": 1.8469498233516355, + "learning_rate": 3.6625272214092135e-06, + "loss": 1.0291, + "step": 204 + }, + { + "epoch": 0.018487622311403707, + "grad_norm": 0.6837398917795217, + "learning_rate": 3.6658948972557535e-06, + "loss": 0.7976, + "step": 205 + }, + { + "epoch": 0.018577805834873968, + "grad_norm": 1.5259021873223948, + "learning_rate": 3.6692461853204432e-06, + "loss": 0.9769, + "step": 206 + }, + { + "epoch": 0.018667989358344232, + "grad_norm": 1.5295445033442379, + "learning_rate": 3.672581244323656e-06, + "loss": 0.9959, + "step": 207 + }, + { + "epoch": 0.018758172881814493, + "grad_norm": 1.5118868633307874, + "learning_rate": 3.6759002306909926e-06, + "loss": 0.9419, + "step": 208 + }, + { + "epoch": 0.018848356405284754, + "grad_norm": 1.3259077075899082, + "learning_rate": 3.67920329859731e-06, + "loss": 1.0699, + "step": 209 + }, + { + "epoch": 0.018938539928755018, + "grad_norm": 1.8327835588048287, + "learning_rate": 3.6824906000096923e-06, + "loss": 1.0613, + "step": 210 + }, + { + "epoch": 0.01902872345222528, + "grad_norm": 1.7256513582100421, + "learning_rate": 3.6857622847294067e-06, + "loss": 1.0655, + "step": 211 + }, + { + "epoch": 0.01911890697569554, + "grad_norm": 1.9555732946000717, + "learning_rate": 3.6890185004328626e-06, + "loss": 1.0455, + "step": 212 + }, + { + "epoch": 0.019209090499165804, + "grad_norm": 1.7848847097141696, + "learning_rate": 3.6922593927116113e-06, + "loss": 0.999, + "step": 213 + }, + { + "epoch": 0.019299274022636065, + "grad_norm": 1.9809604613716605, + "learning_rate": 3.695485105111406e-06, + "loss": 1.0491, + "step": 214 + }, + { + "epoch": 0.019389457546106326, + "grad_norm": 2.266120908987782, + "learning_rate": 3.698695779170352e-06, + "loss": 1.0176, + "step": 215 + }, + { + "epoch": 0.01947964106957659, + "grad_norm": 1.5214571870815794, + "learning_rate": 3.7018915544561744e-06, + "loss": 1.046, + "step": 216 + }, + { + "epoch": 0.01956982459304685, + "grad_norm": 1.8899881850988467, + "learning_rate": 3.7050725686026164e-06, + "loss": 1.0072, + "step": 217 + }, + { + "epoch": 0.01966000811651711, + "grad_norm": 1.825579313537785, + "learning_rate": 3.708238957345014e-06, + "loss": 1.1115, + "step": 218 + }, + { + "epoch": 0.019750191639987376, + "grad_norm": 1.3424199641290213, + "learning_rate": 3.7113908545550482e-06, + "loss": 1.0929, + "step": 219 + }, + { + "epoch": 0.019840375163457637, + "grad_norm": 1.4346808279950467, + "learning_rate": 3.7145283922747028e-06, + "loss": 1.0383, + "step": 220 + }, + { + "epoch": 0.019930558686927898, + "grad_norm": 1.4762914499637956, + "learning_rate": 3.7176517007494612e-06, + "loss": 1.1312, + "step": 221 + }, + { + "epoch": 0.020020742210398162, + "grad_norm": 1.513823785849959, + "learning_rate": 3.7207609084607496e-06, + "loss": 0.9942, + "step": 222 + }, + { + "epoch": 0.020110925733868423, + "grad_norm": 3.081632213602809, + "learning_rate": 3.723856142157645e-06, + "loss": 1.0956, + "step": 223 + }, + { + "epoch": 0.020201109257338683, + "grad_norm": 1.6283449079374839, + "learning_rate": 3.726937526887885e-06, + "loss": 1.0577, + "step": 224 + }, + { + "epoch": 0.020291292780808948, + "grad_norm": 1.876571505332004, + "learning_rate": 3.7300051860281798e-06, + "loss": 1.0122, + "step": 225 + }, + { + "epoch": 0.02038147630427921, + "grad_norm": 1.7335237355682687, + "learning_rate": 3.733059241313852e-06, + "loss": 1.1283, + "step": 226 + }, + { + "epoch": 0.02047165982774947, + "grad_norm": 1.5461138769750924, + "learning_rate": 3.736099812867827e-06, + "loss": 1.0454, + "step": 227 + }, + { + "epoch": 0.020561843351219734, + "grad_norm": 1.5887110468713466, + "learning_rate": 3.73912701922898e-06, + "loss": 1.0983, + "step": 228 + }, + { + "epoch": 0.020652026874689994, + "grad_norm": 0.8900856258692695, + "learning_rate": 3.742140977379868e-06, + "loss": 0.8312, + "step": 229 + }, + { + "epoch": 0.020742210398160255, + "grad_norm": 1.4863541108314817, + "learning_rate": 3.745141802773854e-06, + "loss": 1.0835, + "step": 230 + }, + { + "epoch": 0.02083239392163052, + "grad_norm": 1.6139445199896856, + "learning_rate": 3.748129609361645e-06, + "loss": 1.0488, + "step": 231 + }, + { + "epoch": 0.02092257744510078, + "grad_norm": 1.2940118275946453, + "learning_rate": 3.7511045096172555e-06, + "loss": 1.0274, + "step": 232 + }, + { + "epoch": 0.02101276096857104, + "grad_norm": 1.62517964763906, + "learning_rate": 3.7540666145634137e-06, + "loss": 1.0458, + "step": 233 + }, + { + "epoch": 0.021102944492041305, + "grad_norm": 1.5201552808650485, + "learning_rate": 3.7570160337964225e-06, + "loss": 1.0322, + "step": 234 + }, + { + "epoch": 0.021193128015511566, + "grad_norm": 1.6009288743974595, + "learning_rate": 3.7599528755104913e-06, + "loss": 1.073, + "step": 235 + }, + { + "epoch": 0.021283311538981827, + "grad_norm": 1.7098226878052356, + "learning_rate": 3.7628772465215515e-06, + "loss": 1.0445, + "step": 236 + }, + { + "epoch": 0.02137349506245209, + "grad_norm": 1.471004460279392, + "learning_rate": 3.7657892522905666e-06, + "loss": 1.0395, + "step": 237 + }, + { + "epoch": 0.021463678585922352, + "grad_norm": 5.755951827917994, + "learning_rate": 3.7686889969463542e-06, + "loss": 0.9692, + "step": 238 + }, + { + "epoch": 0.021553862109392613, + "grad_norm": 2.0534400779465276, + "learning_rate": 3.771576583307928e-06, + "loss": 1.0333, + "step": 239 + }, + { + "epoch": 0.021644045632862877, + "grad_norm": 1.8393185264739884, + "learning_rate": 3.7744521129063722e-06, + "loss": 1.0878, + "step": 240 + }, + { + "epoch": 0.021734229156333138, + "grad_norm": 1.6060187889558222, + "learning_rate": 3.7773156860062653e-06, + "loss": 1.0678, + "step": 241 + }, + { + "epoch": 0.0218244126798034, + "grad_norm": 1.8878738942461628, + "learning_rate": 3.7801674016266554e-06, + "loss": 1.1548, + "step": 242 + }, + { + "epoch": 0.021914596203273663, + "grad_norm": 0.8503066785922234, + "learning_rate": 3.7830073575616035e-06, + "loss": 0.7749, + "step": 243 + }, + { + "epoch": 0.022004779726743924, + "grad_norm": 1.4846681004314468, + "learning_rate": 3.785835650400313e-06, + "loss": 1.0811, + "step": 244 + }, + { + "epoch": 0.022094963250214185, + "grad_norm": 1.6466436524012, + "learning_rate": 3.7886523755468334e-06, + "loss": 0.9568, + "step": 245 + }, + { + "epoch": 0.02218514677368445, + "grad_norm": 1.4424231439012658, + "learning_rate": 3.7914576272393746e-06, + "loss": 1.0392, + "step": 246 + }, + { + "epoch": 0.02227533029715471, + "grad_norm": 1.6920746854617321, + "learning_rate": 3.7942514985692284e-06, + "loss": 1.1556, + "step": 247 + }, + { + "epoch": 0.02236551382062497, + "grad_norm": 1.5158685704784822, + "learning_rate": 3.797034081499296e-06, + "loss": 1.0482, + "step": 248 + }, + { + "epoch": 0.022455697344095235, + "grad_norm": 1.3548023883540612, + "learning_rate": 3.7998054668822595e-06, + "loss": 1.0285, + "step": 249 + }, + { + "epoch": 0.022545880867565496, + "grad_norm": 2.071088077015052, + "learning_rate": 3.8025657444783776e-06, + "loss": 1.0225, + "step": 250 + }, + { + "epoch": 0.022636064391035757, + "grad_norm": 1.6509131332410225, + "learning_rate": 3.80531500297293e-06, + "loss": 0.9576, + "step": 251 + }, + { + "epoch": 0.02272624791450602, + "grad_norm": 4.039943812895489, + "learning_rate": 3.8080533299933147e-06, + "loss": 1.0191, + "step": 252 + }, + { + "epoch": 0.022816431437976282, + "grad_norm": 1.598650112536202, + "learning_rate": 3.8107808121258067e-06, + "loss": 1.0099, + "step": 253 + }, + { + "epoch": 0.022906614961446543, + "grad_norm": 1.8323364003677243, + "learning_rate": 3.813497534931985e-06, + "loss": 1.105, + "step": 254 + }, + { + "epoch": 0.022996798484916807, + "grad_norm": 1.5203559422338624, + "learning_rate": 3.816203582964841e-06, + "loss": 1.0673, + "step": 255 + }, + { + "epoch": 0.023086982008387068, + "grad_norm": 1.8694247963515054, + "learning_rate": 3.818899039784565e-06, + "loss": 1.1018, + "step": 256 + }, + { + "epoch": 0.02317716553185733, + "grad_norm": 1.6003478864121843, + "learning_rate": 3.821583987974031e-06, + "loss": 1.0993, + "step": 257 + }, + { + "epoch": 0.023267349055327593, + "grad_norm": 1.5070988839151986, + "learning_rate": 3.8242585091539755e-06, + "loss": 0.9837, + "step": 258 + }, + { + "epoch": 0.023357532578797854, + "grad_norm": 1.9142130889821325, + "learning_rate": 3.8269226839978895e-06, + "loss": 1.1261, + "step": 259 + }, + { + "epoch": 0.023447716102268115, + "grad_norm": 2.7079279571765835, + "learning_rate": 3.82957659224662e-06, + "loss": 1.0026, + "step": 260 + }, + { + "epoch": 0.02353789962573838, + "grad_norm": 1.6767553241691278, + "learning_rate": 3.8322203127226855e-06, + "loss": 0.9357, + "step": 261 + }, + { + "epoch": 0.02362808314920864, + "grad_norm": 2.066454592970373, + "learning_rate": 3.834853923344326e-06, + "loss": 1.1619, + "step": 262 + }, + { + "epoch": 0.0237182666726789, + "grad_norm": 1.3707547108250788, + "learning_rate": 3.837477501139285e-06, + "loss": 1.0531, + "step": 263 + }, + { + "epoch": 0.023808450196149165, + "grad_norm": 1.6926652482247067, + "learning_rate": 3.840091122258324e-06, + "loss": 1.0636, + "step": 264 + }, + { + "epoch": 0.023898633719619426, + "grad_norm": 1.651934850433362, + "learning_rate": 3.84269486198849e-06, + "loss": 0.9769, + "step": 265 + }, + { + "epoch": 0.023988817243089686, + "grad_norm": 1.4737128962148531, + "learning_rate": 3.845288794766121e-06, + "loss": 1.0661, + "step": 266 + }, + { + "epoch": 0.02407900076655995, + "grad_norm": 1.588601116464629, + "learning_rate": 3.847872994189619e-06, + "loss": 1.0495, + "step": 267 + }, + { + "epoch": 0.02416918429003021, + "grad_norm": 1.4860159386951919, + "learning_rate": 3.8504475330319805e-06, + "loss": 1.1003, + "step": 268 + }, + { + "epoch": 0.024259367813500472, + "grad_norm": 1.4466677523460945, + "learning_rate": 3.853012483253093e-06, + "loss": 1.025, + "step": 269 + }, + { + "epoch": 0.024349551336970737, + "grad_norm": 1.5762515892849684, + "learning_rate": 3.855567916011802e-06, + "loss": 1.0513, + "step": 270 + }, + { + "epoch": 0.024439734860440997, + "grad_norm": 1.9184752997666714, + "learning_rate": 3.858113901677755e-06, + "loss": 1.0592, + "step": 271 + }, + { + "epoch": 0.024529918383911258, + "grad_norm": 1.4726220083935668, + "learning_rate": 3.860650509843034e-06, + "loss": 1.0096, + "step": 272 + }, + { + "epoch": 0.024620101907381522, + "grad_norm": 1.9729834559037482, + "learning_rate": 3.863177809333563e-06, + "loss": 1.0395, + "step": 273 + }, + { + "epoch": 0.024710285430851783, + "grad_norm": 1.4858673624680152, + "learning_rate": 3.86569586822032e-06, + "loss": 1.0289, + "step": 274 + }, + { + "epoch": 0.024800468954322044, + "grad_norm": 1.7311616978349837, + "learning_rate": 3.868204753830331e-06, + "loss": 1.009, + "step": 275 + }, + { + "epoch": 0.02489065247779231, + "grad_norm": 1.5393265241144931, + "learning_rate": 3.870704532757476e-06, + "loss": 0.9645, + "step": 276 + }, + { + "epoch": 0.02498083600126257, + "grad_norm": 1.6806437764337157, + "learning_rate": 3.8731952708730974e-06, + "loss": 1.0763, + "step": 277 + }, + { + "epoch": 0.02507101952473283, + "grad_norm": 1.4854218656316072, + "learning_rate": 3.8756770333364085e-06, + "loss": 1.0707, + "step": 278 + }, + { + "epoch": 0.025161203048203094, + "grad_norm": 1.6317789557488491, + "learning_rate": 3.878149884604725e-06, + "loss": 0.9698, + "step": 279 + }, + { + "epoch": 0.025251386571673355, + "grad_norm": 1.8406979143243727, + "learning_rate": 3.8806138884435125e-06, + "loss": 1.0216, + "step": 280 + }, + { + "epoch": 0.025341570095143616, + "grad_norm": 1.4031454360934996, + "learning_rate": 3.883069107936248e-06, + "loss": 0.9765, + "step": 281 + }, + { + "epoch": 0.02543175361861388, + "grad_norm": 1.8378537830061348, + "learning_rate": 3.885515605494114e-06, + "loss": 1.1096, + "step": 282 + }, + { + "epoch": 0.02552193714208414, + "grad_norm": 1.9841227333537994, + "learning_rate": 3.8879534428655145e-06, + "loss": 1.0952, + "step": 283 + }, + { + "epoch": 0.025612120665554402, + "grad_norm": 1.6247306747261585, + "learning_rate": 3.890382681145432e-06, + "loss": 1.0872, + "step": 284 + }, + { + "epoch": 0.025702304189024666, + "grad_norm": 1.8635001064021084, + "learning_rate": 3.892803380784608e-06, + "loss": 1.0816, + "step": 285 + }, + { + "epoch": 0.025792487712494927, + "grad_norm": 1.2180721647685675, + "learning_rate": 3.8952156015985725e-06, + "loss": 0.9108, + "step": 286 + }, + { + "epoch": 0.025882671235965188, + "grad_norm": 2.0512397180981834, + "learning_rate": 3.897619402776516e-06, + "loss": 1.0544, + "step": 287 + }, + { + "epoch": 0.025972854759435452, + "grad_norm": 1.9312777742530598, + "learning_rate": 3.900014842889995e-06, + "loss": 1.1216, + "step": 288 + }, + { + "epoch": 0.026063038282905713, + "grad_norm": 1.6301181593585352, + "learning_rate": 3.902401979901503e-06, + "loss": 0.967, + "step": 289 + }, + { + "epoch": 0.026153221806375974, + "grad_norm": 1.6022580008995222, + "learning_rate": 3.904780871172884e-06, + "loss": 1.0899, + "step": 290 + }, + { + "epoch": 0.026243405329846238, + "grad_norm": 2.646099307491119, + "learning_rate": 3.907151573473601e-06, + "loss": 0.9333, + "step": 291 + }, + { + "epoch": 0.0263335888533165, + "grad_norm": 1.3022584975359546, + "learning_rate": 3.909514142988868e-06, + "loss": 1.0591, + "step": 292 + }, + { + "epoch": 0.02642377237678676, + "grad_norm": 1.7466932799347776, + "learning_rate": 3.911868635327639e-06, + "loss": 1.0115, + "step": 293 + }, + { + "epoch": 0.026513955900257024, + "grad_norm": 1.339104050622602, + "learning_rate": 3.914215105530455e-06, + "loss": 1.0166, + "step": 294 + }, + { + "epoch": 0.026604139423727285, + "grad_norm": 1.4474169282678886, + "learning_rate": 3.916553608077179e-06, + "loss": 1.0325, + "step": 295 + }, + { + "epoch": 0.026694322947197546, + "grad_norm": 1.5858587987755817, + "learning_rate": 3.91888419689457e-06, + "loss": 1.0334, + "step": 296 + }, + { + "epoch": 0.02678450647066781, + "grad_norm": 1.4278838239903247, + "learning_rate": 3.921206925363754e-06, + "loss": 0.9764, + "step": 297 + }, + { + "epoch": 0.02687468999413807, + "grad_norm": 8.581468496854137, + "learning_rate": 3.923521846327559e-06, + "loss": 0.9548, + "step": 298 + }, + { + "epoch": 0.02696487351760833, + "grad_norm": 1.6479487123841814, + "learning_rate": 3.925829012097725e-06, + "loss": 1.0028, + "step": 299 + }, + { + "epoch": 0.027055057041078596, + "grad_norm": 1.9980574949696381, + "learning_rate": 3.928128474462e-06, + "loss": 0.9641, + "step": 300 + }, + { + "epoch": 0.027145240564548857, + "grad_norm": 2.1539859772192975, + "learning_rate": 3.930420284691115e-06, + "loss": 1.0521, + "step": 301 + }, + { + "epoch": 0.027235424088019117, + "grad_norm": 1.7521026105270068, + "learning_rate": 3.932704493545644e-06, + "loss": 1.0787, + "step": 302 + }, + { + "epoch": 0.02732560761148938, + "grad_norm": 2.6145386913688142, + "learning_rate": 3.934981151282745e-06, + "loss": 1.1213, + "step": 303 + }, + { + "epoch": 0.027415791134959643, + "grad_norm": 1.598701713256338, + "learning_rate": 3.9372503076628006e-06, + "loss": 1.0326, + "step": 304 + }, + { + "epoch": 0.027505974658429903, + "grad_norm": 1.5483168230658166, + "learning_rate": 3.939512011955941e-06, + "loss": 1.0641, + "step": 305 + }, + { + "epoch": 0.027596158181900168, + "grad_norm": 1.571979150677513, + "learning_rate": 3.941766312948463e-06, + "loss": 1.038, + "step": 306 + }, + { + "epoch": 0.02768634170537043, + "grad_norm": 1.9473569613264694, + "learning_rate": 3.944013258949147e-06, + "loss": 1.0102, + "step": 307 + }, + { + "epoch": 0.02777652522884069, + "grad_norm": 1.8392786668703296, + "learning_rate": 3.946252897795465e-06, + "loss": 0.8629, + "step": 308 + }, + { + "epoch": 0.027866708752310954, + "grad_norm": 1.941265937754489, + "learning_rate": 3.9484852768596935e-06, + "loss": 1.0496, + "step": 309 + }, + { + "epoch": 0.027956892275781214, + "grad_norm": 1.6542467922659818, + "learning_rate": 3.950710443054923e-06, + "loss": 0.9461, + "step": 310 + }, + { + "epoch": 0.028047075799251475, + "grad_norm": 1.6065258991470157, + "learning_rate": 3.952928442840981e-06, + "loss": 0.9504, + "step": 311 + }, + { + "epoch": 0.02813725932272174, + "grad_norm": 1.6196958834508266, + "learning_rate": 3.955139322230243e-06, + "loss": 1.0634, + "step": 312 + }, + { + "epoch": 0.028227442846192, + "grad_norm": 1.7465977975988485, + "learning_rate": 3.957343126793365e-06, + "loss": 0.9695, + "step": 313 + }, + { + "epoch": 0.02831762636966226, + "grad_norm": 1.5587462539444292, + "learning_rate": 3.959539901664921e-06, + "loss": 1.015, + "step": 314 + }, + { + "epoch": 0.028407809893132525, + "grad_norm": 2.013210772937591, + "learning_rate": 3.9617296915489425e-06, + "loss": 1.1384, + "step": 315 + }, + { + "epoch": 0.028497993416602786, + "grad_norm": 4.930641722784359, + "learning_rate": 3.963912540724387e-06, + "loss": 0.9817, + "step": 316 + }, + { + "epoch": 0.028588176940073047, + "grad_norm": 1.7573833704705055, + "learning_rate": 3.966088493050501e-06, + "loss": 1.0383, + "step": 317 + }, + { + "epoch": 0.02867836046354331, + "grad_norm": 1.7238803242746477, + "learning_rate": 3.968257591972113e-06, + "loss": 1.0153, + "step": 318 + }, + { + "epoch": 0.028768543987013572, + "grad_norm": 1.3738036152700874, + "learning_rate": 3.970419880524835e-06, + "loss": 1.111, + "step": 319 + }, + { + "epoch": 0.028858727510483833, + "grad_norm": 1.5216138047684165, + "learning_rate": 3.972575401340192e-06, + "loss": 1.0007, + "step": 320 + }, + { + "epoch": 0.028948911033954097, + "grad_norm": 1.9634337323893503, + "learning_rate": 3.974724196650656e-06, + "loss": 1.0694, + "step": 321 + }, + { + "epoch": 0.029039094557424358, + "grad_norm": 2.454366673435263, + "learning_rate": 3.976866308294617e-06, + "loss": 1.0176, + "step": 322 + }, + { + "epoch": 0.02912927808089462, + "grad_norm": 1.3531885233498129, + "learning_rate": 3.979001777721269e-06, + "loss": 0.9902, + "step": 323 + }, + { + "epoch": 0.029219461604364883, + "grad_norm": 1.638787757677157, + "learning_rate": 3.981130645995424e-06, + "loss": 0.9953, + "step": 324 + }, + { + "epoch": 0.029309645127835144, + "grad_norm": 1.9198204161184618, + "learning_rate": 3.983252953802248e-06, + "loss": 1.1025, + "step": 325 + }, + { + "epoch": 0.029399828651305405, + "grad_norm": 0.9938088633539907, + "learning_rate": 3.9853687414519285e-06, + "loss": 0.8625, + "step": 326 + }, + { + "epoch": 0.02949001217477567, + "grad_norm": 1.8037496664849262, + "learning_rate": 3.987478048884265e-06, + "loss": 1.0641, + "step": 327 + }, + { + "epoch": 0.02958019569824593, + "grad_norm": 1.5330312061505376, + "learning_rate": 3.989580915673196e-06, + "loss": 1.0101, + "step": 328 + }, + { + "epoch": 0.02967037922171619, + "grad_norm": 1.6494869128006104, + "learning_rate": 3.991677381031255e-06, + "loss": 1.0067, + "step": 329 + }, + { + "epoch": 0.029760562745186455, + "grad_norm": 1.7342976045952816, + "learning_rate": 3.993767483813953e-06, + "loss": 1.1007, + "step": 330 + }, + { + "epoch": 0.029850746268656716, + "grad_norm": 1.6469937913241441, + "learning_rate": 3.995851262524104e-06, + "loss": 0.9449, + "step": 331 + }, + { + "epoch": 0.02994092979212698, + "grad_norm": 1.9006536915252716, + "learning_rate": 3.997928755316079e-06, + "loss": 1.0227, + "step": 332 + }, + { + "epoch": 0.03003111331559724, + "grad_norm": 0.9688871040538025, + "learning_rate": 4e-06, + "loss": 0.8635, + "step": 333 + }, + { + "epoch": 0.030121296839067502, + "grad_norm": 1.646674492517529, + "learning_rate": 3.999999914674486e-06, + "loss": 0.9917, + "step": 334 + }, + { + "epoch": 0.030211480362537766, + "grad_norm": 1.5026608549517328, + "learning_rate": 3.999999658697952e-06, + "loss": 1.0031, + "step": 335 + }, + { + "epoch": 0.030301663886008027, + "grad_norm": 0.9794813861042179, + "learning_rate": 3.9999992320704185e-06, + "loss": 0.787, + "step": 336 + }, + { + "epoch": 0.030391847409478288, + "grad_norm": 2.300689021854271, + "learning_rate": 3.999998634791922e-06, + "loss": 1.042, + "step": 337 + }, + { + "epoch": 0.030482030932948552, + "grad_norm": 2.0882830630150777, + "learning_rate": 3.999997866862515e-06, + "loss": 0.9998, + "step": 338 + }, + { + "epoch": 0.030572214456418813, + "grad_norm": 1.4778504582428869, + "learning_rate": 3.999996928282262e-06, + "loss": 1.1081, + "step": 339 + }, + { + "epoch": 0.030662397979889074, + "grad_norm": 2.0831398937701233, + "learning_rate": 3.999995819051244e-06, + "loss": 1.0157, + "step": 340 + }, + { + "epoch": 0.030752581503359338, + "grad_norm": 1.474192013318675, + "learning_rate": 3.9999945391695536e-06, + "loss": 0.9876, + "step": 341 + }, + { + "epoch": 0.0308427650268296, + "grad_norm": 1.4796352002610105, + "learning_rate": 3.999993088637302e-06, + "loss": 1.1104, + "step": 342 + }, + { + "epoch": 0.03093294855029986, + "grad_norm": 2.2938809765416766, + "learning_rate": 3.999991467454612e-06, + "loss": 1.0822, + "step": 343 + }, + { + "epoch": 0.031023132073770124, + "grad_norm": 2.561589242000494, + "learning_rate": 3.999989675621622e-06, + "loss": 0.9405, + "step": 344 + }, + { + "epoch": 0.031113315597240385, + "grad_norm": 1.644828352741495, + "learning_rate": 3.999987713138485e-06, + "loss": 1.0312, + "step": 345 + }, + { + "epoch": 0.031203499120710645, + "grad_norm": 1.154647231787119, + "learning_rate": 3.999985580005369e-06, + "loss": 0.9006, + "step": 346 + }, + { + "epoch": 0.031293682644180906, + "grad_norm": 1.9070950495492187, + "learning_rate": 3.999983276222455e-06, + "loss": 1.0916, + "step": 347 + }, + { + "epoch": 0.03138386616765117, + "grad_norm": 1.6025083400610463, + "learning_rate": 3.999980801789941e-06, + "loss": 1.0347, + "step": 348 + }, + { + "epoch": 0.031474049691121435, + "grad_norm": 1.6160621597638443, + "learning_rate": 3.999978156708036e-06, + "loss": 1.0302, + "step": 349 + }, + { + "epoch": 0.031564233214591696, + "grad_norm": 1.8831350596324132, + "learning_rate": 3.9999753409769675e-06, + "loss": 1.131, + "step": 350 + }, + { + "epoch": 0.031654416738061956, + "grad_norm": 1.2270416004400626, + "learning_rate": 3.999972354596975e-06, + "loss": 0.8095, + "step": 351 + }, + { + "epoch": 0.03174460026153222, + "grad_norm": 1.706550512992981, + "learning_rate": 3.999969197568314e-06, + "loss": 0.976, + "step": 352 + }, + { + "epoch": 0.03183478378500248, + "grad_norm": 1.7558954933194266, + "learning_rate": 3.999965869891253e-06, + "loss": 1.0749, + "step": 353 + }, + { + "epoch": 0.03192496730847274, + "grad_norm": 1.5138045714265107, + "learning_rate": 3.999962371566075e-06, + "loss": 1.0533, + "step": 354 + }, + { + "epoch": 0.03201515083194301, + "grad_norm": 1.5369449512995563, + "learning_rate": 3.999958702593082e-06, + "loss": 1.0909, + "step": 355 + }, + { + "epoch": 0.03210533435541327, + "grad_norm": 1.5056331083555243, + "learning_rate": 3.999954862972583e-06, + "loss": 1.0446, + "step": 356 + }, + { + "epoch": 0.03219551787888353, + "grad_norm": 1.5646634954581018, + "learning_rate": 3.999950852704908e-06, + "loss": 0.9675, + "step": 357 + }, + { + "epoch": 0.03228570140235379, + "grad_norm": 1.374000793140299, + "learning_rate": 3.9999466717903995e-06, + "loss": 1.0774, + "step": 358 + }, + { + "epoch": 0.03237588492582405, + "grad_norm": 0.8887979941949778, + "learning_rate": 3.999942320229413e-06, + "loss": 0.8233, + "step": 359 + }, + { + "epoch": 0.03246606844929431, + "grad_norm": 1.8405398208260828, + "learning_rate": 3.99993779802232e-06, + "loss": 1.1475, + "step": 360 + }, + { + "epoch": 0.03255625197276458, + "grad_norm": 1.6612980046738308, + "learning_rate": 3.999933105169506e-06, + "loss": 1.0123, + "step": 361 + }, + { + "epoch": 0.03264643549623484, + "grad_norm": 1.4205752290263414, + "learning_rate": 3.999928241671373e-06, + "loss": 1.0176, + "step": 362 + }, + { + "epoch": 0.0327366190197051, + "grad_norm": 2.2642734047622586, + "learning_rate": 3.999923207528334e-06, + "loss": 0.9264, + "step": 363 + }, + { + "epoch": 0.03282680254317536, + "grad_norm": 1.348517897099934, + "learning_rate": 3.9999180027408196e-06, + "loss": 1.0209, + "step": 364 + }, + { + "epoch": 0.03291698606664562, + "grad_norm": 1.5275916169157544, + "learning_rate": 3.9999126273092735e-06, + "loss": 0.9914, + "step": 365 + }, + { + "epoch": 0.03300716959011588, + "grad_norm": 1.6634904478146855, + "learning_rate": 3.999907081234156e-06, + "loss": 1.104, + "step": 366 + }, + { + "epoch": 0.03309735311358615, + "grad_norm": 1.7096197897978178, + "learning_rate": 3.999901364515938e-06, + "loss": 0.9884, + "step": 367 + }, + { + "epoch": 0.03318753663705641, + "grad_norm": 1.6133399195313334, + "learning_rate": 3.999895477155108e-06, + "loss": 1.067, + "step": 368 + }, + { + "epoch": 0.03327772016052667, + "grad_norm": 1.3682636058680826, + "learning_rate": 3.999889419152169e-06, + "loss": 0.8024, + "step": 369 + }, + { + "epoch": 0.03336790368399693, + "grad_norm": 1.5631261715280198, + "learning_rate": 3.999883190507638e-06, + "loss": 1.0187, + "step": 370 + }, + { + "epoch": 0.033458087207467194, + "grad_norm": 1.7055476045923672, + "learning_rate": 3.999876791222044e-06, + "loss": 1.0107, + "step": 371 + }, + { + "epoch": 0.033548270730937454, + "grad_norm": 1.8680352237860232, + "learning_rate": 3.999870221295936e-06, + "loss": 0.9702, + "step": 372 + }, + { + "epoch": 0.03363845425440772, + "grad_norm": 2.6424768037959865, + "learning_rate": 3.999863480729875e-06, + "loss": 0.9973, + "step": 373 + }, + { + "epoch": 0.03372863777787798, + "grad_norm": 1.9268942937834663, + "learning_rate": 3.999856569524433e-06, + "loss": 0.9601, + "step": 374 + }, + { + "epoch": 0.033818821301348244, + "grad_norm": 1.3686193325431615, + "learning_rate": 3.999849487680202e-06, + "loss": 0.9698, + "step": 375 + }, + { + "epoch": 0.033909004824818505, + "grad_norm": 1.4978568343237517, + "learning_rate": 3.999842235197786e-06, + "loss": 0.9541, + "step": 376 + }, + { + "epoch": 0.033999188348288766, + "grad_norm": 1.953109008723598, + "learning_rate": 3.999834812077803e-06, + "loss": 0.96, + "step": 377 + }, + { + "epoch": 0.034089371871759026, + "grad_norm": 1.4379091095299754, + "learning_rate": 3.999827218320886e-06, + "loss": 1.0755, + "step": 378 + }, + { + "epoch": 0.034179555395229294, + "grad_norm": 1.903216465347793, + "learning_rate": 3.999819453927685e-06, + "loss": 0.9314, + "step": 379 + }, + { + "epoch": 0.034269738918699555, + "grad_norm": 1.7895406558802909, + "learning_rate": 3.999811518898861e-06, + "loss": 1.0295, + "step": 380 + }, + { + "epoch": 0.034359922442169816, + "grad_norm": 2.168511400097379, + "learning_rate": 3.999803413235092e-06, + "loss": 1.0324, + "step": 381 + }, + { + "epoch": 0.03445010596564008, + "grad_norm": 1.616908394874543, + "learning_rate": 3.999795136937068e-06, + "loss": 1.1318, + "step": 382 + }, + { + "epoch": 0.03454028948911034, + "grad_norm": 1.5938986166681601, + "learning_rate": 3.999786690005496e-06, + "loss": 1.0219, + "step": 383 + }, + { + "epoch": 0.0346304730125806, + "grad_norm": 1.5091069648908038, + "learning_rate": 3.999778072441098e-06, + "loss": 1.0028, + "step": 384 + }, + { + "epoch": 0.034720656536050866, + "grad_norm": 2.1446841103806156, + "learning_rate": 3.999769284244608e-06, + "loss": 1.0462, + "step": 385 + }, + { + "epoch": 0.03481084005952113, + "grad_norm": 1.3153752214445533, + "learning_rate": 3.999760325416775e-06, + "loss": 1.0295, + "step": 386 + }, + { + "epoch": 0.03490102358299139, + "grad_norm": 1.7410440156336597, + "learning_rate": 3.999751195958366e-06, + "loss": 1.1069, + "step": 387 + }, + { + "epoch": 0.03499120710646165, + "grad_norm": 1.4774328825240357, + "learning_rate": 3.999741895870157e-06, + "loss": 0.8309, + "step": 388 + }, + { + "epoch": 0.03508139062993191, + "grad_norm": 1.6915627774940092, + "learning_rate": 3.999732425152944e-06, + "loss": 1.0652, + "step": 389 + }, + { + "epoch": 0.03517157415340217, + "grad_norm": 1.3883773126463563, + "learning_rate": 3.999722783807533e-06, + "loss": 1.0826, + "step": 390 + }, + { + "epoch": 0.03526175767687244, + "grad_norm": 1.9244354064123246, + "learning_rate": 3.999712971834748e-06, + "loss": 0.9594, + "step": 391 + }, + { + "epoch": 0.0353519412003427, + "grad_norm": 1.5682423929218192, + "learning_rate": 3.999702989235427e-06, + "loss": 1.0277, + "step": 392 + }, + { + "epoch": 0.03544212472381296, + "grad_norm": 1.491972663153399, + "learning_rate": 3.999692836010419e-06, + "loss": 1.1111, + "step": 393 + }, + { + "epoch": 0.03553230824728322, + "grad_norm": 1.9250540762898103, + "learning_rate": 3.999682512160593e-06, + "loss": 1.1602, + "step": 394 + }, + { + "epoch": 0.03562249177075348, + "grad_norm": 1.0819464695075924, + "learning_rate": 3.99967201768683e-06, + "loss": 0.8123, + "step": 395 + }, + { + "epoch": 0.03571267529422374, + "grad_norm": 1.67712583152362, + "learning_rate": 3.999661352590023e-06, + "loss": 1.0141, + "step": 396 + }, + { + "epoch": 0.03580285881769401, + "grad_norm": 1.4471403368878317, + "learning_rate": 3.999650516871083e-06, + "loss": 1.0211, + "step": 397 + }, + { + "epoch": 0.03589304234116427, + "grad_norm": 1.6772913675430758, + "learning_rate": 3.9996395105309365e-06, + "loss": 1.0552, + "step": 398 + }, + { + "epoch": 0.03598322586463453, + "grad_norm": 1.4669132516665868, + "learning_rate": 3.99962833357052e-06, + "loss": 1.0379, + "step": 399 + }, + { + "epoch": 0.03607340938810479, + "grad_norm": 1.4527589890768626, + "learning_rate": 3.999616985990789e-06, + "loss": 1.0059, + "step": 400 + }, + { + "epoch": 0.03616359291157505, + "grad_norm": 1.3434120462956263, + "learning_rate": 3.9996054677927104e-06, + "loss": 0.9685, + "step": 401 + }, + { + "epoch": 0.03625377643504532, + "grad_norm": 1.9387896152878534, + "learning_rate": 3.9995937789772675e-06, + "loss": 0.9606, + "step": 402 + }, + { + "epoch": 0.03634395995851558, + "grad_norm": 0.9081511451858889, + "learning_rate": 3.999581919545458e-06, + "loss": 0.8138, + "step": 403 + }, + { + "epoch": 0.03643414348198584, + "grad_norm": 2.064016073947978, + "learning_rate": 3.9995698894982935e-06, + "loss": 1.0539, + "step": 404 + }, + { + "epoch": 0.0365243270054561, + "grad_norm": 1.624492199123787, + "learning_rate": 3.9995576888368e-06, + "loss": 1.0811, + "step": 405 + }, + { + "epoch": 0.036614510528926364, + "grad_norm": 1.7164629892547771, + "learning_rate": 3.9995453175620194e-06, + "loss": 1.022, + "step": 406 + }, + { + "epoch": 0.036704694052396625, + "grad_norm": 1.7617778895024472, + "learning_rate": 3.999532775675007e-06, + "loss": 0.8745, + "step": 407 + }, + { + "epoch": 0.03679487757586689, + "grad_norm": 1.7549467335195232, + "learning_rate": 3.9995200631768326e-06, + "loss": 1.0001, + "step": 408 + }, + { + "epoch": 0.03688506109933715, + "grad_norm": 1.844463210444499, + "learning_rate": 3.9995071800685815e-06, + "loss": 1.1587, + "step": 409 + }, + { + "epoch": 0.036975244622807414, + "grad_norm": 2.0460949059155764, + "learning_rate": 3.999494126351352e-06, + "loss": 0.9998, + "step": 410 + }, + { + "epoch": 0.037065428146277675, + "grad_norm": 1.6789750118384155, + "learning_rate": 3.99948090202626e-06, + "loss": 1.0336, + "step": 411 + }, + { + "epoch": 0.037155611669747936, + "grad_norm": 1.8558256749630475, + "learning_rate": 3.999467507094431e-06, + "loss": 1.0313, + "step": 412 + }, + { + "epoch": 0.0372457951932182, + "grad_norm": 1.494794635127329, + "learning_rate": 3.999453941557011e-06, + "loss": 0.9361, + "step": 413 + }, + { + "epoch": 0.037335978716688464, + "grad_norm": 1.0640383971140994, + "learning_rate": 3.999440205415154e-06, + "loss": 0.7834, + "step": 414 + }, + { + "epoch": 0.037426162240158725, + "grad_norm": 1.9139505781810575, + "learning_rate": 3.999426298670035e-06, + "loss": 1.0679, + "step": 415 + }, + { + "epoch": 0.037516345763628986, + "grad_norm": 1.6001529352291413, + "learning_rate": 3.9994122213228385e-06, + "loss": 1.0653, + "step": 416 + }, + { + "epoch": 0.03760652928709925, + "grad_norm": 2.0833558526411493, + "learning_rate": 3.9993979733747675e-06, + "loss": 1.0547, + "step": 417 + }, + { + "epoch": 0.03769671281056951, + "grad_norm": 1.5107624666595059, + "learning_rate": 3.999383554827037e-06, + "loss": 1.0219, + "step": 418 + }, + { + "epoch": 0.03778689633403977, + "grad_norm": 1.6424816706547716, + "learning_rate": 3.999368965680876e-06, + "loss": 0.9875, + "step": 419 + }, + { + "epoch": 0.037877079857510036, + "grad_norm": 1.7052137532863652, + "learning_rate": 3.999354205937531e-06, + "loss": 1.0616, + "step": 420 + }, + { + "epoch": 0.0379672633809803, + "grad_norm": 1.7004070467849337, + "learning_rate": 3.999339275598261e-06, + "loss": 1.0696, + "step": 421 + }, + { + "epoch": 0.03805744690445056, + "grad_norm": 1.6194253816058817, + "learning_rate": 3.99932417466434e-06, + "loss": 1.0053, + "step": 422 + }, + { + "epoch": 0.03814763042792082, + "grad_norm": 2.526025537393469, + "learning_rate": 3.999308903137056e-06, + "loss": 0.9384, + "step": 423 + }, + { + "epoch": 0.03823781395139108, + "grad_norm": 1.5753397771876, + "learning_rate": 3.999293461017711e-06, + "loss": 1.0718, + "step": 424 + }, + { + "epoch": 0.03832799747486134, + "grad_norm": 1.9979213402951668, + "learning_rate": 3.9992778483076255e-06, + "loss": 1.0164, + "step": 425 + }, + { + "epoch": 0.03841818099833161, + "grad_norm": 1.531662087556444, + "learning_rate": 3.99926206500813e-06, + "loss": 1.0775, + "step": 426 + }, + { + "epoch": 0.03850836452180187, + "grad_norm": 1.9286393009837144, + "learning_rate": 3.999246111120571e-06, + "loss": 0.9483, + "step": 427 + }, + { + "epoch": 0.03859854804527213, + "grad_norm": 1.391066448993959, + "learning_rate": 3.999229986646311e-06, + "loss": 1.0904, + "step": 428 + }, + { + "epoch": 0.03868873156874239, + "grad_norm": 1.6084454268428232, + "learning_rate": 3.999213691586723e-06, + "loss": 0.9871, + "step": 429 + }, + { + "epoch": 0.03877891509221265, + "grad_norm": 1.4641757917183151, + "learning_rate": 3.9991972259432e-06, + "loss": 1.0608, + "step": 430 + }, + { + "epoch": 0.03886909861568291, + "grad_norm": 1.4217631537075641, + "learning_rate": 3.999180589717147e-06, + "loss": 1.0996, + "step": 431 + }, + { + "epoch": 0.03895928213915318, + "grad_norm": 1.5365924668895647, + "learning_rate": 3.999163782909983e-06, + "loss": 0.9379, + "step": 432 + }, + { + "epoch": 0.03904946566262344, + "grad_norm": 1.7622591192253332, + "learning_rate": 3.99914680552314e-06, + "loss": 1.0562, + "step": 433 + }, + { + "epoch": 0.0391396491860937, + "grad_norm": 1.5163024489474255, + "learning_rate": 3.999129657558069e-06, + "loss": 0.9555, + "step": 434 + }, + { + "epoch": 0.03922983270956396, + "grad_norm": 1.8817262800699996, + "learning_rate": 3.999112339016234e-06, + "loss": 0.7898, + "step": 435 + }, + { + "epoch": 0.03932001623303422, + "grad_norm": 1.5231646171116642, + "learning_rate": 3.999094849899109e-06, + "loss": 1.0346, + "step": 436 + }, + { + "epoch": 0.039410199756504484, + "grad_norm": 1.3640995138593235, + "learning_rate": 3.99907719020819e-06, + "loss": 0.9404, + "step": 437 + }, + { + "epoch": 0.03950038327997475, + "grad_norm": 1.5046232025574358, + "learning_rate": 3.999059359944982e-06, + "loss": 1.0551, + "step": 438 + }, + { + "epoch": 0.03959056680344501, + "grad_norm": 1.804188118411387, + "learning_rate": 3.999041359111007e-06, + "loss": 1.0809, + "step": 439 + }, + { + "epoch": 0.03968075032691527, + "grad_norm": 1.7026685750577704, + "learning_rate": 3.999023187707801e-06, + "loss": 1.1028, + "step": 440 + }, + { + "epoch": 0.039770933850385534, + "grad_norm": 1.4188824696811764, + "learning_rate": 3.999004845736913e-06, + "loss": 1.0829, + "step": 441 + }, + { + "epoch": 0.039861117373855795, + "grad_norm": 1.5383460221571474, + "learning_rate": 3.9989863331999096e-06, + "loss": 1.0983, + "step": 442 + }, + { + "epoch": 0.039951300897326056, + "grad_norm": 1.9637920985574853, + "learning_rate": 3.99896765009837e-06, + "loss": 0.9565, + "step": 443 + }, + { + "epoch": 0.040041484420796324, + "grad_norm": 1.516058004603232, + "learning_rate": 3.998948796433888e-06, + "loss": 0.9866, + "step": 444 + }, + { + "epoch": 0.040131667944266584, + "grad_norm": 1.4506518740221177, + "learning_rate": 3.998929772208073e-06, + "loss": 1.0646, + "step": 445 + }, + { + "epoch": 0.040221851467736845, + "grad_norm": 0.8072360891264118, + "learning_rate": 3.998910577422547e-06, + "loss": 0.8338, + "step": 446 + }, + { + "epoch": 0.040312034991207106, + "grad_norm": 1.4664420804832312, + "learning_rate": 3.99889121207895e-06, + "loss": 1.0728, + "step": 447 + }, + { + "epoch": 0.04040221851467737, + "grad_norm": 1.7453513391283344, + "learning_rate": 3.9988716761789324e-06, + "loss": 1.0545, + "step": 448 + }, + { + "epoch": 0.04049240203814763, + "grad_norm": 1.716296299853729, + "learning_rate": 3.998851969724161e-06, + "loss": 0.9579, + "step": 449 + }, + { + "epoch": 0.040582585561617895, + "grad_norm": 1.6516573587277865, + "learning_rate": 3.998832092716319e-06, + "loss": 1.073, + "step": 450 + }, + { + "epoch": 0.040672769085088156, + "grad_norm": 1.4762216002908517, + "learning_rate": 3.998812045157102e-06, + "loss": 1.0027, + "step": 451 + }, + { + "epoch": 0.04076295260855842, + "grad_norm": 1.898967527645227, + "learning_rate": 3.998791827048219e-06, + "loss": 1.0774, + "step": 452 + }, + { + "epoch": 0.04085313613202868, + "grad_norm": 1.3622168290441337, + "learning_rate": 3.998771438391396e-06, + "loss": 0.9625, + "step": 453 + }, + { + "epoch": 0.04094331965549894, + "grad_norm": 0.8400435454003516, + "learning_rate": 3.9987508791883725e-06, + "loss": 0.7974, + "step": 454 + }, + { + "epoch": 0.0410335031789692, + "grad_norm": 1.6673233248610944, + "learning_rate": 3.998730149440904e-06, + "loss": 1.1396, + "step": 455 + }, + { + "epoch": 0.04112368670243947, + "grad_norm": 1.6437264328344103, + "learning_rate": 3.998709249150758e-06, + "loss": 1.0786, + "step": 456 + }, + { + "epoch": 0.04121387022590973, + "grad_norm": 1.5266195858576939, + "learning_rate": 3.998688178319717e-06, + "loss": 1.0426, + "step": 457 + }, + { + "epoch": 0.04130405374937999, + "grad_norm": 1.6066269606647534, + "learning_rate": 3.9986669369495805e-06, + "loss": 1.0102, + "step": 458 + }, + { + "epoch": 0.04139423727285025, + "grad_norm": 1.710609927238204, + "learning_rate": 3.998645525042161e-06, + "loss": 1.0812, + "step": 459 + }, + { + "epoch": 0.04148442079632051, + "grad_norm": 1.792907447847387, + "learning_rate": 3.998623942599284e-06, + "loss": 1.097, + "step": 460 + }, + { + "epoch": 0.04157460431979077, + "grad_norm": 1.8314355723644897, + "learning_rate": 3.998602189622793e-06, + "loss": 1.0186, + "step": 461 + }, + { + "epoch": 0.04166478784326104, + "grad_norm": 2.0441708939212218, + "learning_rate": 3.998580266114542e-06, + "loss": 1.0618, + "step": 462 + }, + { + "epoch": 0.0417549713667313, + "grad_norm": 1.7968975563292122, + "learning_rate": 3.998558172076404e-06, + "loss": 0.9284, + "step": 463 + }, + { + "epoch": 0.04184515489020156, + "grad_norm": 1.5554115487985372, + "learning_rate": 3.998535907510262e-06, + "loss": 0.9624, + "step": 464 + }, + { + "epoch": 0.04193533841367182, + "grad_norm": 1.4998369128260858, + "learning_rate": 3.998513472418016e-06, + "loss": 1.0439, + "step": 465 + }, + { + "epoch": 0.04202552193714208, + "grad_norm": 1.9089134869762663, + "learning_rate": 3.998490866801582e-06, + "loss": 1.0579, + "step": 466 + }, + { + "epoch": 0.04211570546061234, + "grad_norm": 1.5422519225913827, + "learning_rate": 3.998468090662886e-06, + "loss": 1.0117, + "step": 467 + }, + { + "epoch": 0.04220588898408261, + "grad_norm": 1.5208922422661801, + "learning_rate": 3.998445144003874e-06, + "loss": 0.9179, + "step": 468 + }, + { + "epoch": 0.04229607250755287, + "grad_norm": 1.6169421613418953, + "learning_rate": 3.998422026826504e-06, + "loss": 1.0436, + "step": 469 + }, + { + "epoch": 0.04238625603102313, + "grad_norm": 1.779200328297036, + "learning_rate": 3.998398739132746e-06, + "loss": 1.0574, + "step": 470 + }, + { + "epoch": 0.04247643955449339, + "grad_norm": 1.4528436696070437, + "learning_rate": 3.99837528092459e-06, + "loss": 0.9767, + "step": 471 + }, + { + "epoch": 0.042566623077963654, + "grad_norm": 1.484057342269716, + "learning_rate": 3.998351652204034e-06, + "loss": 1.0746, + "step": 472 + }, + { + "epoch": 0.042656806601433915, + "grad_norm": 1.3970753093018429, + "learning_rate": 3.998327852973098e-06, + "loss": 1.056, + "step": 473 + }, + { + "epoch": 0.04274699012490418, + "grad_norm": 1.7835624140669808, + "learning_rate": 3.99830388323381e-06, + "loss": 0.9733, + "step": 474 + }, + { + "epoch": 0.042837173648374444, + "grad_norm": 1.342976458373702, + "learning_rate": 3.998279742988216e-06, + "loss": 1.0535, + "step": 475 + }, + { + "epoch": 0.042927357171844704, + "grad_norm": 1.3982803853179682, + "learning_rate": 3.998255432238377e-06, + "loss": 0.9914, + "step": 476 + }, + { + "epoch": 0.043017540695314965, + "grad_norm": 1.8302704049536023, + "learning_rate": 3.9982309509863656e-06, + "loss": 1.0939, + "step": 477 + }, + { + "epoch": 0.043107724218785226, + "grad_norm": 1.821891656344717, + "learning_rate": 3.998206299234272e-06, + "loss": 0.9768, + "step": 478 + }, + { + "epoch": 0.04319790774225549, + "grad_norm": 1.350364109669489, + "learning_rate": 3.998181476984198e-06, + "loss": 1.0468, + "step": 479 + }, + { + "epoch": 0.043288091265725755, + "grad_norm": 1.4123288481329532, + "learning_rate": 3.998156484238263e-06, + "loss": 0.9485, + "step": 480 + }, + { + "epoch": 0.043378274789196015, + "grad_norm": 1.7610165027406202, + "learning_rate": 3.998131320998599e-06, + "loss": 0.9613, + "step": 481 + }, + { + "epoch": 0.043468458312666276, + "grad_norm": 1.7275826042730322, + "learning_rate": 3.998105987267353e-06, + "loss": 1.0665, + "step": 482 + }, + { + "epoch": 0.04355864183613654, + "grad_norm": 1.7991665367537513, + "learning_rate": 3.998080483046687e-06, + "loss": 0.9501, + "step": 483 + }, + { + "epoch": 0.0436488253596068, + "grad_norm": 1.2067950860249352, + "learning_rate": 3.998054808338776e-06, + "loss": 0.9687, + "step": 484 + }, + { + "epoch": 0.04373900888307706, + "grad_norm": 1.5191501639692722, + "learning_rate": 3.998028963145812e-06, + "loss": 0.9889, + "step": 485 + }, + { + "epoch": 0.043829192406547327, + "grad_norm": 1.5402688657462473, + "learning_rate": 3.99800294747e-06, + "loss": 1.0455, + "step": 486 + }, + { + "epoch": 0.04391937593001759, + "grad_norm": 1.3823527202140526, + "learning_rate": 3.99797676131356e-06, + "loss": 1.0492, + "step": 487 + }, + { + "epoch": 0.04400955945348785, + "grad_norm": 1.9797584117542582, + "learning_rate": 3.997950404678726e-06, + "loss": 0.8768, + "step": 488 + }, + { + "epoch": 0.04409974297695811, + "grad_norm": 1.5173664902805835, + "learning_rate": 3.997923877567746e-06, + "loss": 1.1409, + "step": 489 + }, + { + "epoch": 0.04418992650042837, + "grad_norm": 1.5127064240886683, + "learning_rate": 3.9978971799828855e-06, + "loss": 0.8594, + "step": 490 + }, + { + "epoch": 0.04428011002389863, + "grad_norm": 1.794218272051556, + "learning_rate": 3.997870311926421e-06, + "loss": 1.0152, + "step": 491 + }, + { + "epoch": 0.0443702935473689, + "grad_norm": 1.4962321483850565, + "learning_rate": 3.997843273400645e-06, + "loss": 0.978, + "step": 492 + }, + { + "epoch": 0.04446047707083916, + "grad_norm": 1.5566418502975101, + "learning_rate": 3.997816064407865e-06, + "loss": 1.0024, + "step": 493 + }, + { + "epoch": 0.04455066059430942, + "grad_norm": 1.3731462814355713, + "learning_rate": 3.997788684950402e-06, + "loss": 1.0454, + "step": 494 + }, + { + "epoch": 0.04464084411777968, + "grad_norm": 1.5051879969436337, + "learning_rate": 3.997761135030593e-06, + "loss": 1.0636, + "step": 495 + }, + { + "epoch": 0.04473102764124994, + "grad_norm": 1.6117372675399133, + "learning_rate": 3.997733414650789e-06, + "loss": 0.917, + "step": 496 + }, + { + "epoch": 0.0448212111647202, + "grad_norm": 1.7674604345099167, + "learning_rate": 3.9977055238133554e-06, + "loss": 0.9885, + "step": 497 + }, + { + "epoch": 0.04491139468819047, + "grad_norm": 1.3502033875161983, + "learning_rate": 3.99767746252067e-06, + "loss": 0.9001, + "step": 498 + }, + { + "epoch": 0.04500157821166073, + "grad_norm": 1.3915377846541934, + "learning_rate": 3.997649230775129e-06, + "loss": 1.0211, + "step": 499 + }, + { + "epoch": 0.04509176173513099, + "grad_norm": 1.3766109826061776, + "learning_rate": 3.9976208285791395e-06, + "loss": 0.9658, + "step": 500 + }, + { + "epoch": 0.04518194525860125, + "grad_norm": 1.0656198249758357, + "learning_rate": 3.997592255935127e-06, + "loss": 0.9367, + "step": 501 + }, + { + "epoch": 0.045272128782071513, + "grad_norm": 1.4693169818361804, + "learning_rate": 3.997563512845529e-06, + "loss": 1.0472, + "step": 502 + }, + { + "epoch": 0.045362312305541774, + "grad_norm": 1.5205534411807342, + "learning_rate": 3.9975345993127975e-06, + "loss": 0.9798, + "step": 503 + }, + { + "epoch": 0.04545249582901204, + "grad_norm": 1.526470900787076, + "learning_rate": 3.9975055153393985e-06, + "loss": 1.0357, + "step": 504 + }, + { + "epoch": 0.0455426793524823, + "grad_norm": 1.7976169547072118, + "learning_rate": 3.997476260927816e-06, + "loss": 1.0356, + "step": 505 + }, + { + "epoch": 0.045632862875952564, + "grad_norm": 1.4098332000713119, + "learning_rate": 3.997446836080545e-06, + "loss": 0.9972, + "step": 506 + }, + { + "epoch": 0.045723046399422825, + "grad_norm": 1.9845462034368058, + "learning_rate": 3.997417240800095e-06, + "loss": 1.0721, + "step": 507 + }, + { + "epoch": 0.045813229922893085, + "grad_norm": 1.4109379089287284, + "learning_rate": 3.997387475088994e-06, + "loss": 1.1188, + "step": 508 + }, + { + "epoch": 0.045903413446363346, + "grad_norm": 1.9071427427937824, + "learning_rate": 3.99735753894978e-06, + "loss": 1.1135, + "step": 509 + }, + { + "epoch": 0.045993596969833614, + "grad_norm": 1.4315715112175627, + "learning_rate": 3.997327432385006e-06, + "loss": 1.0155, + "step": 510 + }, + { + "epoch": 0.046083780493303875, + "grad_norm": 1.7450304314001093, + "learning_rate": 3.997297155397244e-06, + "loss": 0.9481, + "step": 511 + }, + { + "epoch": 0.046173964016774136, + "grad_norm": 1.573156561673186, + "learning_rate": 3.997266707989074e-06, + "loss": 0.9439, + "step": 512 + }, + { + "epoch": 0.046264147540244396, + "grad_norm": 2.040120869589991, + "learning_rate": 3.997236090163097e-06, + "loss": 0.9375, + "step": 513 + }, + { + "epoch": 0.04635433106371466, + "grad_norm": 1.5969920374911262, + "learning_rate": 3.9972053019219235e-06, + "loss": 1.1508, + "step": 514 + }, + { + "epoch": 0.04644451458718492, + "grad_norm": 1.6290098747389687, + "learning_rate": 3.997174343268181e-06, + "loss": 1.0365, + "step": 515 + }, + { + "epoch": 0.046534698110655186, + "grad_norm": 1.2811385315578625, + "learning_rate": 3.9971432142045115e-06, + "loss": 1.029, + "step": 516 + }, + { + "epoch": 0.04662488163412545, + "grad_norm": 0.9996387305948696, + "learning_rate": 3.99711191473357e-06, + "loss": 0.8898, + "step": 517 + }, + { + "epoch": 0.04671506515759571, + "grad_norm": 1.5890076334285617, + "learning_rate": 3.99708044485803e-06, + "loss": 1.0365, + "step": 518 + }, + { + "epoch": 0.04680524868106597, + "grad_norm": 1.6794519303606792, + "learning_rate": 3.997048804580574e-06, + "loss": 1.0327, + "step": 519 + }, + { + "epoch": 0.04689543220453623, + "grad_norm": 1.6822817618095203, + "learning_rate": 3.997016993903901e-06, + "loss": 0.9664, + "step": 520 + }, + { + "epoch": 0.04698561572800649, + "grad_norm": 1.4765141393421766, + "learning_rate": 3.996985012830728e-06, + "loss": 1.0906, + "step": 521 + }, + { + "epoch": 0.04707579925147676, + "grad_norm": 1.44424316438441, + "learning_rate": 3.996952861363782e-06, + "loss": 0.9693, + "step": 522 + }, + { + "epoch": 0.04716598277494702, + "grad_norm": 1.410218129160863, + "learning_rate": 3.9969205395058064e-06, + "loss": 1.0267, + "step": 523 + }, + { + "epoch": 0.04725616629841728, + "grad_norm": 1.8979757869932627, + "learning_rate": 3.99688804725956e-06, + "loss": 1.0127, + "step": 524 + }, + { + "epoch": 0.04734634982188754, + "grad_norm": 1.1362501404177807, + "learning_rate": 3.996855384627815e-06, + "loss": 0.9739, + "step": 525 + }, + { + "epoch": 0.0474365333453578, + "grad_norm": 1.7013517475182989, + "learning_rate": 3.996822551613357e-06, + "loss": 1.0545, + "step": 526 + }, + { + "epoch": 0.04752671686882806, + "grad_norm": 2.4640829662375316, + "learning_rate": 3.996789548218989e-06, + "loss": 0.9503, + "step": 527 + }, + { + "epoch": 0.04761690039229833, + "grad_norm": 1.4613260170748796, + "learning_rate": 3.996756374447526e-06, + "loss": 0.91, + "step": 528 + }, + { + "epoch": 0.04770708391576859, + "grad_norm": 2.625968663553804, + "learning_rate": 3.9967230303018005e-06, + "loss": 1.0346, + "step": 529 + }, + { + "epoch": 0.04779726743923885, + "grad_norm": 1.5135722465952697, + "learning_rate": 3.996689515784655e-06, + "loss": 1.0004, + "step": 530 + }, + { + "epoch": 0.04788745096270911, + "grad_norm": 2.2203368838620268, + "learning_rate": 3.996655830898951e-06, + "loss": 0.9902, + "step": 531 + }, + { + "epoch": 0.04797763448617937, + "grad_norm": 2.122244113733759, + "learning_rate": 3.996621975647562e-06, + "loss": 1.028, + "step": 532 + }, + { + "epoch": 0.04806781800964964, + "grad_norm": 1.2080473835538301, + "learning_rate": 3.996587950033377e-06, + "loss": 0.96, + "step": 533 + }, + { + "epoch": 0.0481580015331199, + "grad_norm": 1.9500339920798109, + "learning_rate": 3.996553754059299e-06, + "loss": 0.9481, + "step": 534 + }, + { + "epoch": 0.04824818505659016, + "grad_norm": 1.6884978840740976, + "learning_rate": 3.996519387728245e-06, + "loss": 1.0217, + "step": 535 + }, + { + "epoch": 0.04833836858006042, + "grad_norm": 1.2412011411026662, + "learning_rate": 3.9964848510431495e-06, + "loss": 1.003, + "step": 536 + }, + { + "epoch": 0.048428552103530684, + "grad_norm": 1.4051740132696589, + "learning_rate": 3.996450144006957e-06, + "loss": 1.0751, + "step": 537 + }, + { + "epoch": 0.048518735627000945, + "grad_norm": 2.076920009404278, + "learning_rate": 3.99641526662263e-06, + "loss": 0.9956, + "step": 538 + }, + { + "epoch": 0.04860891915047121, + "grad_norm": 1.5819113603540533, + "learning_rate": 3.996380218893145e-06, + "loss": 1.1033, + "step": 539 + }, + { + "epoch": 0.04869910267394147, + "grad_norm": 11.242888512933757, + "learning_rate": 3.996345000821491e-06, + "loss": 1.049, + "step": 540 + }, + { + "epoch": 0.048789286197411734, + "grad_norm": 1.5736129011409792, + "learning_rate": 3.996309612410674e-06, + "loss": 1.1249, + "step": 541 + }, + { + "epoch": 0.048879469720881995, + "grad_norm": 2.001249519076459, + "learning_rate": 3.996274053663713e-06, + "loss": 0.9526, + "step": 542 + }, + { + "epoch": 0.048969653244352256, + "grad_norm": 1.0645001042349402, + "learning_rate": 3.996238324583643e-06, + "loss": 0.8498, + "step": 543 + }, + { + "epoch": 0.049059836767822516, + "grad_norm": 1.8615732101688178, + "learning_rate": 3.996202425173512e-06, + "loss": 1.0792, + "step": 544 + }, + { + "epoch": 0.049150020291292784, + "grad_norm": 1.6608323100703037, + "learning_rate": 3.996166355436383e-06, + "loss": 0.974, + "step": 545 + }, + { + "epoch": 0.049240203814763045, + "grad_norm": 1.4611771330927237, + "learning_rate": 3.996130115375333e-06, + "loss": 1.0322, + "step": 546 + }, + { + "epoch": 0.049330387338233306, + "grad_norm": 1.1557462471239475, + "learning_rate": 3.996093704993456e-06, + "loss": 0.8507, + "step": 547 + }, + { + "epoch": 0.04942057086170357, + "grad_norm": 1.5244598088557562, + "learning_rate": 3.996057124293857e-06, + "loss": 1.1071, + "step": 548 + }, + { + "epoch": 0.04951075438517383, + "grad_norm": 1.285579437798576, + "learning_rate": 3.996020373279659e-06, + "loss": 1.0613, + "step": 549 + }, + { + "epoch": 0.04960093790864409, + "grad_norm": 1.5331922397105626, + "learning_rate": 3.995983451953996e-06, + "loss": 1.0316, + "step": 550 + }, + { + "epoch": 0.049691121432114356, + "grad_norm": 1.611556861132302, + "learning_rate": 3.99594636032002e-06, + "loss": 1.0461, + "step": 551 + }, + { + "epoch": 0.04978130495558462, + "grad_norm": 1.994108343657053, + "learning_rate": 3.995909098380894e-06, + "loss": 0.9155, + "step": 552 + }, + { + "epoch": 0.04987148847905488, + "grad_norm": 1.2527981909878898, + "learning_rate": 3.995871666139799e-06, + "loss": 0.8983, + "step": 553 + }, + { + "epoch": 0.04996167200252514, + "grad_norm": 1.5681336122825946, + "learning_rate": 3.995834063599928e-06, + "loss": 0.9977, + "step": 554 + }, + { + "epoch": 0.0500518555259954, + "grad_norm": 1.511610870060596, + "learning_rate": 3.99579629076449e-06, + "loss": 1.0397, + "step": 555 + }, + { + "epoch": 0.05014203904946566, + "grad_norm": 1.623428318265737, + "learning_rate": 3.9957583476367084e-06, + "loss": 0.9941, + "step": 556 + }, + { + "epoch": 0.05023222257293593, + "grad_norm": 1.6074921788430274, + "learning_rate": 3.995720234219819e-06, + "loss": 1.0297, + "step": 557 + }, + { + "epoch": 0.05032240609640619, + "grad_norm": 1.5465346453214968, + "learning_rate": 3.995681950517075e-06, + "loss": 0.9248, + "step": 558 + }, + { + "epoch": 0.05041258961987645, + "grad_norm": 1.9326207651005922, + "learning_rate": 3.995643496531743e-06, + "loss": 1.0438, + "step": 559 + }, + { + "epoch": 0.05050277314334671, + "grad_norm": 1.6400301626967273, + "learning_rate": 3.9956048722671044e-06, + "loss": 1.0317, + "step": 560 + }, + { + "epoch": 0.05059295666681697, + "grad_norm": 1.49573012432478, + "learning_rate": 3.995566077726454e-06, + "loss": 0.995, + "step": 561 + }, + { + "epoch": 0.05068314019028723, + "grad_norm": 1.5639601941713979, + "learning_rate": 3.995527112913103e-06, + "loss": 0.9859, + "step": 562 + }, + { + "epoch": 0.0507733237137575, + "grad_norm": 1.809391644916828, + "learning_rate": 3.995487977830375e-06, + "loss": 0.9725, + "step": 563 + }, + { + "epoch": 0.05086350723722776, + "grad_norm": 10.677829583613855, + "learning_rate": 3.9954486724816105e-06, + "loss": 1.1142, + "step": 564 + }, + { + "epoch": 0.05095369076069802, + "grad_norm": 1.8325024875139992, + "learning_rate": 3.995409196870161e-06, + "loss": 1.0643, + "step": 565 + }, + { + "epoch": 0.05104387428416828, + "grad_norm": 0.8855054848197921, + "learning_rate": 3.995369550999398e-06, + "loss": 0.8217, + "step": 566 + }, + { + "epoch": 0.05113405780763854, + "grad_norm": 1.885954518466574, + "learning_rate": 3.995329734872702e-06, + "loss": 1.0358, + "step": 567 + }, + { + "epoch": 0.051224241331108804, + "grad_norm": 1.593919483223743, + "learning_rate": 3.9952897484934706e-06, + "loss": 1.1348, + "step": 568 + }, + { + "epoch": 0.05131442485457907, + "grad_norm": 1.7118940074766524, + "learning_rate": 3.995249591865115e-06, + "loss": 1.0267, + "step": 569 + }, + { + "epoch": 0.05140460837804933, + "grad_norm": 1.729536024730287, + "learning_rate": 3.995209264991063e-06, + "loss": 0.9767, + "step": 570 + }, + { + "epoch": 0.05149479190151959, + "grad_norm": 1.569875855731222, + "learning_rate": 3.995168767874756e-06, + "loss": 1.0672, + "step": 571 + }, + { + "epoch": 0.051584975424989854, + "grad_norm": 1.323884229689632, + "learning_rate": 3.995128100519648e-06, + "loss": 1.0042, + "step": 572 + }, + { + "epoch": 0.051675158948460115, + "grad_norm": 1.7137340526548335, + "learning_rate": 3.995087262929209e-06, + "loss": 1.0397, + "step": 573 + }, + { + "epoch": 0.051765342471930376, + "grad_norm": 1.6406214396120222, + "learning_rate": 3.995046255106925e-06, + "loss": 1.0271, + "step": 574 + }, + { + "epoch": 0.05185552599540064, + "grad_norm": 1.5128796282308752, + "learning_rate": 3.995005077056293e-06, + "loss": 1.0835, + "step": 575 + }, + { + "epoch": 0.051945709518870904, + "grad_norm": 2.2332275218475073, + "learning_rate": 3.9949637287808284e-06, + "loss": 0.9157, + "step": 576 + }, + { + "epoch": 0.052035893042341165, + "grad_norm": 1.3250632504895197, + "learning_rate": 3.994922210284057e-06, + "loss": 0.9679, + "step": 577 + }, + { + "epoch": 0.052126076565811426, + "grad_norm": 1.7466604520721145, + "learning_rate": 3.994880521569524e-06, + "loss": 1.0508, + "step": 578 + }, + { + "epoch": 0.05221626008928169, + "grad_norm": 1.5162087329050942, + "learning_rate": 3.994838662640785e-06, + "loss": 1.0309, + "step": 579 + }, + { + "epoch": 0.05230644361275195, + "grad_norm": 1.9981192447983969, + "learning_rate": 3.9947966335014116e-06, + "loss": 1.0598, + "step": 580 + }, + { + "epoch": 0.052396627136222215, + "grad_norm": 2.054232586879123, + "learning_rate": 3.99475443415499e-06, + "loss": 0.9991, + "step": 581 + }, + { + "epoch": 0.052486810659692476, + "grad_norm": 1.4920853185369727, + "learning_rate": 3.994712064605121e-06, + "loss": 1.0155, + "step": 582 + }, + { + "epoch": 0.05257699418316274, + "grad_norm": 2.2873840427712957, + "learning_rate": 3.99466952485542e-06, + "loss": 1.1671, + "step": 583 + }, + { + "epoch": 0.052667177706633, + "grad_norm": 1.6579306370729057, + "learning_rate": 3.994626814909518e-06, + "loss": 1.0409, + "step": 584 + }, + { + "epoch": 0.05275736123010326, + "grad_norm": 1.5492675465773127, + "learning_rate": 3.994583934771056e-06, + "loss": 1.0043, + "step": 585 + }, + { + "epoch": 0.05284754475357352, + "grad_norm": 1.4212958026037879, + "learning_rate": 3.9945408844436955e-06, + "loss": 1.0726, + "step": 586 + }, + { + "epoch": 0.05293772827704379, + "grad_norm": 1.6934685589394853, + "learning_rate": 3.994497663931109e-06, + "loss": 1.049, + "step": 587 + }, + { + "epoch": 0.05302791180051405, + "grad_norm": 1.5459246557907782, + "learning_rate": 3.994454273236984e-06, + "loss": 1.0364, + "step": 588 + }, + { + "epoch": 0.05311809532398431, + "grad_norm": 1.3266699505929862, + "learning_rate": 3.994410712365023e-06, + "loss": 1.0261, + "step": 589 + }, + { + "epoch": 0.05320827884745457, + "grad_norm": 1.8720101997065015, + "learning_rate": 3.994366981318943e-06, + "loss": 1.1193, + "step": 590 + }, + { + "epoch": 0.05329846237092483, + "grad_norm": 1.5546326747409474, + "learning_rate": 3.9943230801024765e-06, + "loss": 1.0241, + "step": 591 + }, + { + "epoch": 0.05338864589439509, + "grad_norm": 1.7355383330793268, + "learning_rate": 3.9942790087193666e-06, + "loss": 1.0548, + "step": 592 + }, + { + "epoch": 0.05347882941786536, + "grad_norm": 1.606611527305344, + "learning_rate": 3.994234767173376e-06, + "loss": 1.018, + "step": 593 + }, + { + "epoch": 0.05356901294133562, + "grad_norm": 1.6562392756276836, + "learning_rate": 3.994190355468279e-06, + "loss": 0.9118, + "step": 594 + }, + { + "epoch": 0.05365919646480588, + "grad_norm": 1.4103653348581247, + "learning_rate": 3.994145773607865e-06, + "loss": 1.019, + "step": 595 + }, + { + "epoch": 0.05374937998827614, + "grad_norm": 1.573258630156355, + "learning_rate": 3.994101021595938e-06, + "loss": 0.9904, + "step": 596 + }, + { + "epoch": 0.0538395635117464, + "grad_norm": 1.5191430230880725, + "learning_rate": 3.9940560994363165e-06, + "loss": 1.0405, + "step": 597 + }, + { + "epoch": 0.05392974703521666, + "grad_norm": 1.6027959772256093, + "learning_rate": 3.994011007132833e-06, + "loss": 0.9002, + "step": 598 + }, + { + "epoch": 0.05401993055868693, + "grad_norm": 0.8778099513059332, + "learning_rate": 3.993965744689337e-06, + "loss": 0.8641, + "step": 599 + }, + { + "epoch": 0.05411011408215719, + "grad_norm": 1.8423624151405538, + "learning_rate": 3.993920312109687e-06, + "loss": 1.0013, + "step": 600 + }, + { + "epoch": 0.05420029760562745, + "grad_norm": 1.6457107916815694, + "learning_rate": 3.993874709397764e-06, + "loss": 1.0237, + "step": 601 + }, + { + "epoch": 0.05429048112909771, + "grad_norm": 1.7784563208431898, + "learning_rate": 3.993828936557454e-06, + "loss": 1.0122, + "step": 602 + }, + { + "epoch": 0.054380664652567974, + "grad_norm": 1.4286480235955552, + "learning_rate": 3.993782993592667e-06, + "loss": 0.9846, + "step": 603 + }, + { + "epoch": 0.054470848176038235, + "grad_norm": 1.6352069582597006, + "learning_rate": 3.993736880507321e-06, + "loss": 1.0595, + "step": 604 + }, + { + "epoch": 0.0545610316995085, + "grad_norm": 1.7371604477871931, + "learning_rate": 3.99369059730535e-06, + "loss": 0.962, + "step": 605 + }, + { + "epoch": 0.05465121522297876, + "grad_norm": 1.2861889803011437, + "learning_rate": 3.993644143990706e-06, + "loss": 1.0233, + "step": 606 + }, + { + "epoch": 0.054741398746449024, + "grad_norm": 1.8099042128407827, + "learning_rate": 3.99359752056735e-06, + "loss": 0.944, + "step": 607 + }, + { + "epoch": 0.054831582269919285, + "grad_norm": 1.7391528741021247, + "learning_rate": 3.993550727039261e-06, + "loss": 0.876, + "step": 608 + }, + { + "epoch": 0.054921765793389546, + "grad_norm": 1.557156543121589, + "learning_rate": 3.993503763410431e-06, + "loss": 1.037, + "step": 609 + }, + { + "epoch": 0.05501194931685981, + "grad_norm": 1.677977787430049, + "learning_rate": 3.9934566296848686e-06, + "loss": 0.9432, + "step": 610 + }, + { + "epoch": 0.055102132840330074, + "grad_norm": 1.7881087380356113, + "learning_rate": 3.993409325866595e-06, + "loss": 1.1036, + "step": 611 + }, + { + "epoch": 0.055192316363800335, + "grad_norm": 0.8076687004146667, + "learning_rate": 3.993361851959645e-06, + "loss": 0.839, + "step": 612 + }, + { + "epoch": 0.055282499887270596, + "grad_norm": 1.3228151878223287, + "learning_rate": 3.993314207968071e-06, + "loss": 1.072, + "step": 613 + }, + { + "epoch": 0.05537268341074086, + "grad_norm": 1.5897614928764592, + "learning_rate": 3.993266393895938e-06, + "loss": 1.0072, + "step": 614 + }, + { + "epoch": 0.05546286693421112, + "grad_norm": 1.6893352136644126, + "learning_rate": 3.993218409747326e-06, + "loss": 1.036, + "step": 615 + }, + { + "epoch": 0.05555305045768138, + "grad_norm": 1.3744205207450717, + "learning_rate": 3.993170255526328e-06, + "loss": 1.0185, + "step": 616 + }, + { + "epoch": 0.055643233981151646, + "grad_norm": 1.455056823984187, + "learning_rate": 3.993121931237054e-06, + "loss": 1.042, + "step": 617 + }, + { + "epoch": 0.05573341750462191, + "grad_norm": 1.5008243044267968, + "learning_rate": 3.993073436883627e-06, + "loss": 1.035, + "step": 618 + }, + { + "epoch": 0.05582360102809217, + "grad_norm": 1.4405910595518063, + "learning_rate": 3.993024772470184e-06, + "loss": 1.0028, + "step": 619 + }, + { + "epoch": 0.05591378455156243, + "grad_norm": 1.679888336239974, + "learning_rate": 3.992975938000878e-06, + "loss": 1.0041, + "step": 620 + }, + { + "epoch": 0.05600396807503269, + "grad_norm": 0.7057614383615137, + "learning_rate": 3.992926933479876e-06, + "loss": 0.798, + "step": 621 + }, + { + "epoch": 0.05609415159850295, + "grad_norm": 1.7193405523978165, + "learning_rate": 3.9928777589113595e-06, + "loss": 1.0424, + "step": 622 + }, + { + "epoch": 0.05618433512197322, + "grad_norm": 1.8323105719109536, + "learning_rate": 3.992828414299524e-06, + "loss": 0.9766, + "step": 623 + }, + { + "epoch": 0.05627451864544348, + "grad_norm": 1.3174430318745483, + "learning_rate": 3.992778899648579e-06, + "loss": 1.0178, + "step": 624 + }, + { + "epoch": 0.05636470216891374, + "grad_norm": 1.5175944637955716, + "learning_rate": 3.992729214962751e-06, + "loss": 1.0437, + "step": 625 + }, + { + "epoch": 0.056454885692384, + "grad_norm": 1.8751534081326178, + "learning_rate": 3.992679360246279e-06, + "loss": 1.0533, + "step": 626 + }, + { + "epoch": 0.05654506921585426, + "grad_norm": 1.6745022247609498, + "learning_rate": 3.992629335503416e-06, + "loss": 1.0426, + "step": 627 + }, + { + "epoch": 0.05663525273932452, + "grad_norm": 1.3597579065995302, + "learning_rate": 3.9925791407384304e-06, + "loss": 1.0066, + "step": 628 + }, + { + "epoch": 0.05672543626279479, + "grad_norm": 2.7626424105431187, + "learning_rate": 3.992528775955606e-06, + "loss": 1.0637, + "step": 629 + }, + { + "epoch": 0.05681561978626505, + "grad_norm": 1.64966403835383, + "learning_rate": 3.992478241159239e-06, + "loss": 0.9947, + "step": 630 + }, + { + "epoch": 0.05690580330973531, + "grad_norm": 2.107284074720308, + "learning_rate": 3.992427536353643e-06, + "loss": 1.046, + "step": 631 + }, + { + "epoch": 0.05699598683320557, + "grad_norm": 1.6283035444009635, + "learning_rate": 3.992376661543143e-06, + "loss": 1.0049, + "step": 632 + }, + { + "epoch": 0.05708617035667583, + "grad_norm": 1.782925269716883, + "learning_rate": 3.992325616732081e-06, + "loss": 0.9948, + "step": 633 + }, + { + "epoch": 0.057176353880146094, + "grad_norm": 1.4215728941860482, + "learning_rate": 3.992274401924811e-06, + "loss": 1.1237, + "step": 634 + }, + { + "epoch": 0.05726653740361636, + "grad_norm": 1.357732284511803, + "learning_rate": 3.992223017125704e-06, + "loss": 0.9324, + "step": 635 + }, + { + "epoch": 0.05735672092708662, + "grad_norm": 1.51917794246259, + "learning_rate": 3.992171462339145e-06, + "loss": 1.0517, + "step": 636 + }, + { + "epoch": 0.057446904450556883, + "grad_norm": 1.5369542587711216, + "learning_rate": 3.992119737569532e-06, + "loss": 1.09, + "step": 637 + }, + { + "epoch": 0.057537087974027144, + "grad_norm": 1.6384935611648395, + "learning_rate": 3.992067842821277e-06, + "loss": 1.0156, + "step": 638 + }, + { + "epoch": 0.057627271497497405, + "grad_norm": 1.8995339951491832, + "learning_rate": 3.99201577809881e-06, + "loss": 1.1469, + "step": 639 + }, + { + "epoch": 0.057717455020967666, + "grad_norm": 1.7943353369144934, + "learning_rate": 3.991963543406574e-06, + "loss": 1.0366, + "step": 640 + }, + { + "epoch": 0.057807638544437934, + "grad_norm": 1.6086936174899815, + "learning_rate": 3.991911138749024e-06, + "loss": 1.0246, + "step": 641 + }, + { + "epoch": 0.057897822067908195, + "grad_norm": 1.5964431447491338, + "learning_rate": 3.991858564130633e-06, + "loss": 1.0485, + "step": 642 + }, + { + "epoch": 0.057988005591378455, + "grad_norm": 1.7782930291401369, + "learning_rate": 3.991805819555885e-06, + "loss": 0.9892, + "step": 643 + }, + { + "epoch": 0.058078189114848716, + "grad_norm": 1.4278221259958386, + "learning_rate": 3.991752905029283e-06, + "loss": 1.0228, + "step": 644 + }, + { + "epoch": 0.05816837263831898, + "grad_norm": 1.4284869714021966, + "learning_rate": 3.991699820555341e-06, + "loss": 0.8919, + "step": 645 + }, + { + "epoch": 0.05825855616178924, + "grad_norm": 1.5021227280754412, + "learning_rate": 3.991646566138588e-06, + "loss": 1.0209, + "step": 646 + }, + { + "epoch": 0.058348739685259506, + "grad_norm": 1.3028908882888828, + "learning_rate": 3.991593141783567e-06, + "loss": 0.9423, + "step": 647 + }, + { + "epoch": 0.058438923208729766, + "grad_norm": 1.6343637790622965, + "learning_rate": 3.991539547494839e-06, + "loss": 1.0359, + "step": 648 + }, + { + "epoch": 0.05852910673220003, + "grad_norm": 1.5669579806606584, + "learning_rate": 3.991485783276974e-06, + "loss": 1.0112, + "step": 649 + }, + { + "epoch": 0.05861929025567029, + "grad_norm": 1.3214302072183508, + "learning_rate": 3.991431849134563e-06, + "loss": 1.0711, + "step": 650 + }, + { + "epoch": 0.05870947377914055, + "grad_norm": 1.7124257343088112, + "learning_rate": 3.991377745072205e-06, + "loss": 0.9143, + "step": 651 + }, + { + "epoch": 0.05879965730261081, + "grad_norm": 1.417154798372231, + "learning_rate": 3.991323471094517e-06, + "loss": 1.044, + "step": 652 + }, + { + "epoch": 0.05888984082608108, + "grad_norm": 1.59737671736092, + "learning_rate": 3.991269027206131e-06, + "loss": 1.0013, + "step": 653 + }, + { + "epoch": 0.05898002434955134, + "grad_norm": 1.1384637292337256, + "learning_rate": 3.9912144134116916e-06, + "loss": 0.8777, + "step": 654 + }, + { + "epoch": 0.0590702078730216, + "grad_norm": 1.5824603970409892, + "learning_rate": 3.99115962971586e-06, + "loss": 0.9414, + "step": 655 + }, + { + "epoch": 0.05916039139649186, + "grad_norm": 1.928353161040863, + "learning_rate": 3.991104676123308e-06, + "loss": 1.055, + "step": 656 + }, + { + "epoch": 0.05925057491996212, + "grad_norm": 0.928384910180309, + "learning_rate": 3.991049552638727e-06, + "loss": 0.8243, + "step": 657 + }, + { + "epoch": 0.05934075844343238, + "grad_norm": 1.7679500293962669, + "learning_rate": 3.99099425926682e-06, + "loss": 0.9684, + "step": 658 + }, + { + "epoch": 0.05943094196690265, + "grad_norm": 1.459975634854951, + "learning_rate": 3.990938796012304e-06, + "loss": 0.988, + "step": 659 + }, + { + "epoch": 0.05952112549037291, + "grad_norm": 1.6556372318701604, + "learning_rate": 3.990883162879912e-06, + "loss": 1.1668, + "step": 660 + }, + { + "epoch": 0.05961130901384317, + "grad_norm": 1.554532757443092, + "learning_rate": 3.990827359874391e-06, + "loss": 1.0848, + "step": 661 + }, + { + "epoch": 0.05970149253731343, + "grad_norm": 1.63424619674065, + "learning_rate": 3.990771387000503e-06, + "loss": 1.0176, + "step": 662 + }, + { + "epoch": 0.05979167606078369, + "grad_norm": 1.6953526505317167, + "learning_rate": 3.990715244263023e-06, + "loss": 1.0436, + "step": 663 + }, + { + "epoch": 0.05988185958425396, + "grad_norm": 0.7360957661914683, + "learning_rate": 3.990658931666741e-06, + "loss": 0.8252, + "step": 664 + }, + { + "epoch": 0.05997204310772422, + "grad_norm": 1.5723248393595133, + "learning_rate": 3.990602449216463e-06, + "loss": 0.941, + "step": 665 + }, + { + "epoch": 0.06006222663119448, + "grad_norm": 1.8519097497507604, + "learning_rate": 3.990545796917008e-06, + "loss": 0.919, + "step": 666 + }, + { + "epoch": 0.06015241015466474, + "grad_norm": 1.3858473497937054, + "learning_rate": 3.99048897477321e-06, + "loss": 1.0719, + "step": 667 + }, + { + "epoch": 0.060242593678135004, + "grad_norm": 1.6607892434098919, + "learning_rate": 3.990431982789917e-06, + "loss": 1.031, + "step": 668 + }, + { + "epoch": 0.060332777201605264, + "grad_norm": 1.4493126029088126, + "learning_rate": 3.990374820971992e-06, + "loss": 0.9519, + "step": 669 + }, + { + "epoch": 0.06042296072507553, + "grad_norm": 1.607767209378084, + "learning_rate": 3.990317489324312e-06, + "loss": 0.9087, + "step": 670 + }, + { + "epoch": 0.06051314424854579, + "grad_norm": 1.6292679565760697, + "learning_rate": 3.99025998785177e-06, + "loss": 1.0044, + "step": 671 + }, + { + "epoch": 0.060603327772016054, + "grad_norm": 1.4252035564462815, + "learning_rate": 3.990202316559271e-06, + "loss": 0.9553, + "step": 672 + }, + { + "epoch": 0.060693511295486315, + "grad_norm": 2.3352227281620292, + "learning_rate": 3.990144475451738e-06, + "loss": 1.0648, + "step": 673 + }, + { + "epoch": 0.060783694818956575, + "grad_norm": 1.2998301835285881, + "learning_rate": 3.9900864645341036e-06, + "loss": 1.0162, + "step": 674 + }, + { + "epoch": 0.060873878342426836, + "grad_norm": 1.6748728746596715, + "learning_rate": 3.990028283811319e-06, + "loss": 1.0231, + "step": 675 + }, + { + "epoch": 0.060964061865897104, + "grad_norm": 1.5104108963779415, + "learning_rate": 3.989969933288348e-06, + "loss": 1.0121, + "step": 676 + }, + { + "epoch": 0.061054245389367365, + "grad_norm": 0.8666673767258107, + "learning_rate": 3.98991141297017e-06, + "loss": 0.7852, + "step": 677 + }, + { + "epoch": 0.061144428912837626, + "grad_norm": 1.335426322001497, + "learning_rate": 3.989852722861778e-06, + "loss": 0.9561, + "step": 678 + }, + { + "epoch": 0.061234612436307886, + "grad_norm": 0.7904125033603939, + "learning_rate": 3.98979386296818e-06, + "loss": 0.794, + "step": 679 + }, + { + "epoch": 0.06132479595977815, + "grad_norm": 1.7999621864161692, + "learning_rate": 3.989734833294398e-06, + "loss": 0.9999, + "step": 680 + }, + { + "epoch": 0.06141497948324841, + "grad_norm": 1.5990243291334008, + "learning_rate": 3.989675633845469e-06, + "loss": 1.0912, + "step": 681 + }, + { + "epoch": 0.061505163006718676, + "grad_norm": 2.1903018990672534, + "learning_rate": 3.989616264626443e-06, + "loss": 0.9954, + "step": 682 + }, + { + "epoch": 0.06159534653018894, + "grad_norm": 1.2756177340744004, + "learning_rate": 3.989556725642388e-06, + "loss": 1.0498, + "step": 683 + }, + { + "epoch": 0.0616855300536592, + "grad_norm": 2.024710472036669, + "learning_rate": 3.989497016898382e-06, + "loss": 1.0638, + "step": 684 + }, + { + "epoch": 0.06177571357712946, + "grad_norm": 1.3169761804962559, + "learning_rate": 3.98943713839952e-06, + "loss": 1.0037, + "step": 685 + }, + { + "epoch": 0.06186589710059972, + "grad_norm": 0.7926390765927973, + "learning_rate": 3.9893770901509125e-06, + "loss": 0.8326, + "step": 686 + }, + { + "epoch": 0.06195608062406998, + "grad_norm": 1.4879923863062345, + "learning_rate": 3.989316872157682e-06, + "loss": 1.0104, + "step": 687 + }, + { + "epoch": 0.06204626414754025, + "grad_norm": 1.6262520588210119, + "learning_rate": 3.989256484424968e-06, + "loss": 1.0871, + "step": 688 + }, + { + "epoch": 0.06213644767101051, + "grad_norm": 1.49766000380068, + "learning_rate": 3.98919592695792e-06, + "loss": 0.9808, + "step": 689 + }, + { + "epoch": 0.06222663119448077, + "grad_norm": 1.4859897865057368, + "learning_rate": 3.9891351997617096e-06, + "loss": 1.0435, + "step": 690 + }, + { + "epoch": 0.06231681471795103, + "grad_norm": 1.6206644615844596, + "learning_rate": 3.989074302841514e-06, + "loss": 0.9583, + "step": 691 + }, + { + "epoch": 0.06240699824142129, + "grad_norm": 1.6183099951405182, + "learning_rate": 3.989013236202533e-06, + "loss": 1.1294, + "step": 692 + }, + { + "epoch": 0.06249718176489155, + "grad_norm": 1.3343393682863858, + "learning_rate": 3.988951999849974e-06, + "loss": 1.0507, + "step": 693 + }, + { + "epoch": 0.06258736528836181, + "grad_norm": 1.7281980750860004, + "learning_rate": 3.988890593789064e-06, + "loss": 0.9322, + "step": 694 + }, + { + "epoch": 0.06267754881183207, + "grad_norm": 1.2916165567748046, + "learning_rate": 3.9888290180250415e-06, + "loss": 0.9962, + "step": 695 + }, + { + "epoch": 0.06276773233530233, + "grad_norm": 1.4044035763585538, + "learning_rate": 3.988767272563161e-06, + "loss": 0.978, + "step": 696 + }, + { + "epoch": 0.06285791585877261, + "grad_norm": 1.5488451347042405, + "learning_rate": 3.988705357408691e-06, + "loss": 0.9551, + "step": 697 + }, + { + "epoch": 0.06294809938224287, + "grad_norm": 1.4837057914028997, + "learning_rate": 3.9886432725669146e-06, + "loss": 0.9701, + "step": 698 + }, + { + "epoch": 0.06303828290571313, + "grad_norm": 1.5481893946931073, + "learning_rate": 3.988581018043128e-06, + "loss": 0.9743, + "step": 699 + }, + { + "epoch": 0.06312846642918339, + "grad_norm": 1.5560455525013883, + "learning_rate": 3.988518593842645e-06, + "loss": 1.0373, + "step": 700 + }, + { + "epoch": 0.06321864995265365, + "grad_norm": 1.5514192205773025, + "learning_rate": 3.9884559999707906e-06, + "loss": 1.0075, + "step": 701 + }, + { + "epoch": 0.06330883347612391, + "grad_norm": 1.5411912167763937, + "learning_rate": 3.988393236432906e-06, + "loss": 1.0285, + "step": 702 + }, + { + "epoch": 0.06339901699959417, + "grad_norm": 1.4995432794169306, + "learning_rate": 3.988330303234347e-06, + "loss": 1.0917, + "step": 703 + }, + { + "epoch": 0.06348920052306443, + "grad_norm": 1.4337837446743178, + "learning_rate": 3.988267200380483e-06, + "loss": 0.9666, + "step": 704 + }, + { + "epoch": 0.0635793840465347, + "grad_norm": 1.280561448210322, + "learning_rate": 3.988203927876698e-06, + "loss": 1.0608, + "step": 705 + }, + { + "epoch": 0.06366956757000496, + "grad_norm": 1.428905335640341, + "learning_rate": 3.988140485728391e-06, + "loss": 0.8983, + "step": 706 + }, + { + "epoch": 0.06375975109347522, + "grad_norm": 1.4049563497502955, + "learning_rate": 3.988076873940975e-06, + "loss": 0.9629, + "step": 707 + }, + { + "epoch": 0.06384993461694548, + "grad_norm": 1.845229831946603, + "learning_rate": 3.9880130925198786e-06, + "loss": 1.0113, + "step": 708 + }, + { + "epoch": 0.06394011814041575, + "grad_norm": 1.8437267237341666, + "learning_rate": 3.987949141470543e-06, + "loss": 1.0303, + "step": 709 + }, + { + "epoch": 0.06403030166388601, + "grad_norm": 1.415488351453945, + "learning_rate": 3.987885020798425e-06, + "loss": 1.0076, + "step": 710 + }, + { + "epoch": 0.06412048518735627, + "grad_norm": 1.4404301840571179, + "learning_rate": 3.987820730508996e-06, + "loss": 0.9847, + "step": 711 + }, + { + "epoch": 0.06421066871082654, + "grad_norm": 1.4708568801652053, + "learning_rate": 3.987756270607742e-06, + "loss": 1.1106, + "step": 712 + }, + { + "epoch": 0.0643008522342968, + "grad_norm": 0.8478524117332622, + "learning_rate": 3.987691641100162e-06, + "loss": 0.8099, + "step": 713 + }, + { + "epoch": 0.06439103575776706, + "grad_norm": 1.7719501190139868, + "learning_rate": 3.987626841991771e-06, + "loss": 1.1029, + "step": 714 + }, + { + "epoch": 0.06448121928123732, + "grad_norm": 1.7417964068675433, + "learning_rate": 3.987561873288099e-06, + "loss": 1.0507, + "step": 715 + }, + { + "epoch": 0.06457140280470758, + "grad_norm": 1.5570976706356758, + "learning_rate": 3.987496734994688e-06, + "loss": 0.9612, + "step": 716 + }, + { + "epoch": 0.06466158632817784, + "grad_norm": 1.806637985639964, + "learning_rate": 3.987431427117097e-06, + "loss": 1.0408, + "step": 717 + }, + { + "epoch": 0.0647517698516481, + "grad_norm": 1.91199397205456, + "learning_rate": 3.9873659496608985e-06, + "loss": 1.0274, + "step": 718 + }, + { + "epoch": 0.06484195337511836, + "grad_norm": 1.5930880499558426, + "learning_rate": 3.987300302631678e-06, + "loss": 0.9649, + "step": 719 + }, + { + "epoch": 0.06493213689858862, + "grad_norm": 1.561835033062618, + "learning_rate": 3.987234486035039e-06, + "loss": 1.0473, + "step": 720 + }, + { + "epoch": 0.0650223204220589, + "grad_norm": 1.786133279690856, + "learning_rate": 3.987168499876595e-06, + "loss": 1.0576, + "step": 721 + }, + { + "epoch": 0.06511250394552916, + "grad_norm": 1.8897490560865107, + "learning_rate": 3.987102344161978e-06, + "loss": 1.0243, + "step": 722 + }, + { + "epoch": 0.06520268746899942, + "grad_norm": 1.5117059751458775, + "learning_rate": 3.987036018896832e-06, + "loss": 1.0471, + "step": 723 + }, + { + "epoch": 0.06529287099246968, + "grad_norm": 1.4999816416570662, + "learning_rate": 3.986969524086817e-06, + "loss": 1.0775, + "step": 724 + }, + { + "epoch": 0.06538305451593994, + "grad_norm": 1.8743366975550022, + "learning_rate": 3.986902859737605e-06, + "loss": 0.9977, + "step": 725 + }, + { + "epoch": 0.0654732380394102, + "grad_norm": 1.6055407728389495, + "learning_rate": 3.986836025854886e-06, + "loss": 0.9512, + "step": 726 + }, + { + "epoch": 0.06556342156288046, + "grad_norm": 1.5387482298059143, + "learning_rate": 3.986769022444362e-06, + "loss": 0.9699, + "step": 727 + }, + { + "epoch": 0.06565360508635072, + "grad_norm": 1.5267888692952305, + "learning_rate": 3.986701849511751e-06, + "loss": 1.0101, + "step": 728 + }, + { + "epoch": 0.06574378860982098, + "grad_norm": 1.7956485709300702, + "learning_rate": 3.986634507062782e-06, + "loss": 0.9943, + "step": 729 + }, + { + "epoch": 0.06583397213329124, + "grad_norm": 1.695425425341465, + "learning_rate": 3.986566995103204e-06, + "loss": 1.0303, + "step": 730 + }, + { + "epoch": 0.0659241556567615, + "grad_norm": 1.5436384461904844, + "learning_rate": 3.986499313638776e-06, + "loss": 0.9566, + "step": 731 + }, + { + "epoch": 0.06601433918023177, + "grad_norm": 1.3577904509585765, + "learning_rate": 3.986431462675272e-06, + "loss": 0.9471, + "step": 732 + }, + { + "epoch": 0.06610452270370204, + "grad_norm": 1.2617487948120722, + "learning_rate": 3.9863634422184835e-06, + "loss": 1.0259, + "step": 733 + }, + { + "epoch": 0.0661947062271723, + "grad_norm": 1.546262754790141, + "learning_rate": 3.986295252274213e-06, + "loss": 1.0488, + "step": 734 + }, + { + "epoch": 0.06628488975064256, + "grad_norm": 1.2855603820406483, + "learning_rate": 3.9862268928482796e-06, + "loss": 1.061, + "step": 735 + }, + { + "epoch": 0.06637507327411282, + "grad_norm": 1.3447536446386683, + "learning_rate": 3.986158363946515e-06, + "loss": 0.9269, + "step": 736 + }, + { + "epoch": 0.06646525679758308, + "grad_norm": 1.525209697001455, + "learning_rate": 3.9860896655747685e-06, + "loss": 1.011, + "step": 737 + }, + { + "epoch": 0.06655544032105334, + "grad_norm": 1.5839940081396349, + "learning_rate": 3.9860207977388994e-06, + "loss": 1.0847, + "step": 738 + }, + { + "epoch": 0.0666456238445236, + "grad_norm": 4.9585837720353165, + "learning_rate": 3.9859517604447854e-06, + "loss": 0.9452, + "step": 739 + }, + { + "epoch": 0.06673580736799387, + "grad_norm": 1.5163043357881223, + "learning_rate": 3.985882553698317e-06, + "loss": 0.9865, + "step": 740 + }, + { + "epoch": 0.06682599089146413, + "grad_norm": 1.8411196680291546, + "learning_rate": 3.985813177505399e-06, + "loss": 1.0783, + "step": 741 + }, + { + "epoch": 0.06691617441493439, + "grad_norm": 1.7140441977011485, + "learning_rate": 3.985743631871951e-06, + "loss": 0.966, + "step": 742 + }, + { + "epoch": 0.06700635793840465, + "grad_norm": 1.3143314118170923, + "learning_rate": 3.985673916803907e-06, + "loss": 1.0177, + "step": 743 + }, + { + "epoch": 0.06709654146187491, + "grad_norm": 1.6760632115463667, + "learning_rate": 3.985604032307215e-06, + "loss": 1.0323, + "step": 744 + }, + { + "epoch": 0.06718672498534518, + "grad_norm": 1.4332551062476495, + "learning_rate": 3.985533978387839e-06, + "loss": 1.026, + "step": 745 + }, + { + "epoch": 0.06727690850881544, + "grad_norm": 1.41328514187152, + "learning_rate": 3.985463755051756e-06, + "loss": 1.0521, + "step": 746 + }, + { + "epoch": 0.0673670920322857, + "grad_norm": 1.3963708791428677, + "learning_rate": 3.9853933623049575e-06, + "loss": 1.0059, + "step": 747 + }, + { + "epoch": 0.06745727555575597, + "grad_norm": 1.641261603960096, + "learning_rate": 3.98532280015345e-06, + "loss": 1.0116, + "step": 748 + }, + { + "epoch": 0.06754745907922623, + "grad_norm": 1.4849272654281962, + "learning_rate": 3.985252068603254e-06, + "loss": 0.9315, + "step": 749 + }, + { + "epoch": 0.06763764260269649, + "grad_norm": 1.3866374677020026, + "learning_rate": 3.985181167660406e-06, + "loss": 1.0404, + "step": 750 + }, + { + "epoch": 0.06772782612616675, + "grad_norm": 1.3968885143744245, + "learning_rate": 3.985110097330953e-06, + "loss": 1.0347, + "step": 751 + }, + { + "epoch": 0.06781800964963701, + "grad_norm": 1.510806811577367, + "learning_rate": 3.985038857620962e-06, + "loss": 1.0103, + "step": 752 + }, + { + "epoch": 0.06790819317310727, + "grad_norm": 0.8007182026676507, + "learning_rate": 3.9849674485365094e-06, + "loss": 0.8138, + "step": 753 + }, + { + "epoch": 0.06799837669657753, + "grad_norm": 2.1207275074435272, + "learning_rate": 3.98489587008369e-06, + "loss": 0.9473, + "step": 754 + }, + { + "epoch": 0.06808856022004779, + "grad_norm": 1.845267754124485, + "learning_rate": 3.98482412226861e-06, + "loss": 0.8576, + "step": 755 + }, + { + "epoch": 0.06817874374351805, + "grad_norm": 1.4430730016830906, + "learning_rate": 3.984752205097391e-06, + "loss": 1.0452, + "step": 756 + }, + { + "epoch": 0.06826892726698833, + "grad_norm": 3.4740993732301506, + "learning_rate": 3.984680118576171e-06, + "loss": 1.0211, + "step": 757 + }, + { + "epoch": 0.06835911079045859, + "grad_norm": 1.4048942388321521, + "learning_rate": 3.984607862711099e-06, + "loss": 0.9735, + "step": 758 + }, + { + "epoch": 0.06844929431392885, + "grad_norm": 1.9092795889783036, + "learning_rate": 3.984535437508341e-06, + "loss": 1.0446, + "step": 759 + }, + { + "epoch": 0.06853947783739911, + "grad_norm": 0.773138123187335, + "learning_rate": 3.984462842974078e-06, + "loss": 0.8163, + "step": 760 + }, + { + "epoch": 0.06862966136086937, + "grad_norm": 1.6245109849337087, + "learning_rate": 3.984390079114502e-06, + "loss": 1.0292, + "step": 761 + }, + { + "epoch": 0.06871984488433963, + "grad_norm": 1.3668556028506411, + "learning_rate": 3.984317145935824e-06, + "loss": 1.1337, + "step": 762 + }, + { + "epoch": 0.06881002840780989, + "grad_norm": 1.3401604992555756, + "learning_rate": 3.984244043444264e-06, + "loss": 0.9838, + "step": 763 + }, + { + "epoch": 0.06890021193128015, + "grad_norm": 1.2782950402450763, + "learning_rate": 3.984170771646062e-06, + "loss": 0.9626, + "step": 764 + }, + { + "epoch": 0.06899039545475041, + "grad_norm": 1.6415568294024168, + "learning_rate": 3.9840973305474695e-06, + "loss": 1.0541, + "step": 765 + }, + { + "epoch": 0.06908057897822067, + "grad_norm": 1.4815230427757897, + "learning_rate": 3.984023720154752e-06, + "loss": 1.0619, + "step": 766 + }, + { + "epoch": 0.06917076250169094, + "grad_norm": 1.6562643950214653, + "learning_rate": 3.9839499404741915e-06, + "loss": 1.0282, + "step": 767 + }, + { + "epoch": 0.0692609460251612, + "grad_norm": 0.905500101726659, + "learning_rate": 3.983875991512082e-06, + "loss": 0.8502, + "step": 768 + }, + { + "epoch": 0.06935112954863147, + "grad_norm": 1.5271771820637774, + "learning_rate": 3.9838018732747345e-06, + "loss": 1.0322, + "step": 769 + }, + { + "epoch": 0.06944131307210173, + "grad_norm": 1.5687465926053554, + "learning_rate": 3.9837275857684716e-06, + "loss": 0.9955, + "step": 770 + }, + { + "epoch": 0.06953149659557199, + "grad_norm": 1.892944426464911, + "learning_rate": 3.983653128999634e-06, + "loss": 1.1336, + "step": 771 + }, + { + "epoch": 0.06962168011904225, + "grad_norm": 1.3295161870922887, + "learning_rate": 3.983578502974574e-06, + "loss": 1.0443, + "step": 772 + }, + { + "epoch": 0.06971186364251251, + "grad_norm": 1.852589833273218, + "learning_rate": 3.983503707699658e-06, + "loss": 1.0014, + "step": 773 + }, + { + "epoch": 0.06980204716598278, + "grad_norm": 6.286601202183336, + "learning_rate": 3.983428743181268e-06, + "loss": 1.0414, + "step": 774 + }, + { + "epoch": 0.06989223068945304, + "grad_norm": 1.6413430232163049, + "learning_rate": 3.983353609425802e-06, + "loss": 1.087, + "step": 775 + }, + { + "epoch": 0.0699824142129233, + "grad_norm": 1.7267314431229055, + "learning_rate": 3.983278306439671e-06, + "loss": 1.0161, + "step": 776 + }, + { + "epoch": 0.07007259773639356, + "grad_norm": 1.39047621884769, + "learning_rate": 3.983202834229297e-06, + "loss": 1.0471, + "step": 777 + }, + { + "epoch": 0.07016278125986382, + "grad_norm": 1.3646400041921551, + "learning_rate": 3.983127192801123e-06, + "loss": 1.0929, + "step": 778 + }, + { + "epoch": 0.07025296478333408, + "grad_norm": 1.4346733497714501, + "learning_rate": 3.983051382161602e-06, + "loss": 1.0506, + "step": 779 + }, + { + "epoch": 0.07034314830680434, + "grad_norm": 1.5388463831944228, + "learning_rate": 3.982975402317203e-06, + "loss": 1.039, + "step": 780 + }, + { + "epoch": 0.07043333183027461, + "grad_norm": 1.7632436982408264, + "learning_rate": 3.982899253274409e-06, + "loss": 1.0402, + "step": 781 + }, + { + "epoch": 0.07052351535374488, + "grad_norm": 1.8214842336361328, + "learning_rate": 3.982822935039717e-06, + "loss": 1.0, + "step": 782 + }, + { + "epoch": 0.07061369887721514, + "grad_norm": 1.4938102805771254, + "learning_rate": 3.982746447619638e-06, + "loss": 0.9456, + "step": 783 + }, + { + "epoch": 0.0707038824006854, + "grad_norm": 1.0209319333058118, + "learning_rate": 3.9826697910207e-06, + "loss": 0.8254, + "step": 784 + }, + { + "epoch": 0.07079406592415566, + "grad_norm": 1.5374712003166038, + "learning_rate": 3.982592965249442e-06, + "loss": 0.9978, + "step": 785 + }, + { + "epoch": 0.07088424944762592, + "grad_norm": 1.7862307864224267, + "learning_rate": 3.982515970312422e-06, + "loss": 1.0617, + "step": 786 + }, + { + "epoch": 0.07097443297109618, + "grad_norm": 1.67660580425384, + "learning_rate": 3.982438806216207e-06, + "loss": 0.9859, + "step": 787 + }, + { + "epoch": 0.07106461649456644, + "grad_norm": 1.5343287909672079, + "learning_rate": 3.982361472967382e-06, + "loss": 0.9999, + "step": 788 + }, + { + "epoch": 0.0711548000180367, + "grad_norm": 1.612988387674438, + "learning_rate": 3.982283970572546e-06, + "loss": 1.0848, + "step": 789 + }, + { + "epoch": 0.07124498354150696, + "grad_norm": 1.6623262472349083, + "learning_rate": 3.982206299038311e-06, + "loss": 0.9874, + "step": 790 + }, + { + "epoch": 0.07133516706497722, + "grad_norm": 1.3997597862973943, + "learning_rate": 3.9821284583713054e-06, + "loss": 1.084, + "step": 791 + }, + { + "epoch": 0.07142535058844748, + "grad_norm": 1.818075286930424, + "learning_rate": 3.98205044857817e-06, + "loss": 1.0011, + "step": 792 + }, + { + "epoch": 0.07151553411191776, + "grad_norm": 1.130466689481617, + "learning_rate": 3.981972269665561e-06, + "loss": 0.8632, + "step": 793 + }, + { + "epoch": 0.07160571763538802, + "grad_norm": 1.6225767407247693, + "learning_rate": 3.98189392164015e-06, + "loss": 1.0027, + "step": 794 + }, + { + "epoch": 0.07169590115885828, + "grad_norm": 1.7166900775140246, + "learning_rate": 3.981815404508621e-06, + "loss": 1.0536, + "step": 795 + }, + { + "epoch": 0.07178608468232854, + "grad_norm": 1.4750430246317463, + "learning_rate": 3.981736718277674e-06, + "loss": 1.0132, + "step": 796 + }, + { + "epoch": 0.0718762682057988, + "grad_norm": 1.4823614015168607, + "learning_rate": 3.9816578629540235e-06, + "loss": 1.0525, + "step": 797 + }, + { + "epoch": 0.07196645172926906, + "grad_norm": 1.803760769521771, + "learning_rate": 3.981578838544398e-06, + "loss": 1.0217, + "step": 798 + }, + { + "epoch": 0.07205663525273932, + "grad_norm": 1.5855977864349111, + "learning_rate": 3.981499645055539e-06, + "loss": 0.7949, + "step": 799 + }, + { + "epoch": 0.07214681877620958, + "grad_norm": 1.1617710678951785, + "learning_rate": 3.981420282494204e-06, + "loss": 0.8412, + "step": 800 + }, + { + "epoch": 0.07223700229967984, + "grad_norm": 1.1485158154001112, + "learning_rate": 3.981340750867166e-06, + "loss": 0.8398, + "step": 801 + }, + { + "epoch": 0.0723271858231501, + "grad_norm": 1.6398252445247226, + "learning_rate": 3.981261050181209e-06, + "loss": 1.007, + "step": 802 + }, + { + "epoch": 0.07241736934662037, + "grad_norm": 1.7359385273241081, + "learning_rate": 3.9811811804431355e-06, + "loss": 0.9745, + "step": 803 + }, + { + "epoch": 0.07250755287009064, + "grad_norm": 1.8623895650803999, + "learning_rate": 3.981101141659759e-06, + "loss": 1.0504, + "step": 804 + }, + { + "epoch": 0.0725977363935609, + "grad_norm": 1.5914730187231358, + "learning_rate": 3.98102093383791e-06, + "loss": 1.0421, + "step": 805 + }, + { + "epoch": 0.07268791991703116, + "grad_norm": 1.649117352193452, + "learning_rate": 3.9809405569844315e-06, + "loss": 0.9366, + "step": 806 + }, + { + "epoch": 0.07277810344050142, + "grad_norm": 1.5735938864821426, + "learning_rate": 3.980860011106182e-06, + "loss": 0.9769, + "step": 807 + }, + { + "epoch": 0.07286828696397168, + "grad_norm": 1.2628333775211236, + "learning_rate": 3.980779296210033e-06, + "loss": 0.9502, + "step": 808 + }, + { + "epoch": 0.07295847048744195, + "grad_norm": 1.7357960970445627, + "learning_rate": 3.980698412302874e-06, + "loss": 1.089, + "step": 809 + }, + { + "epoch": 0.0730486540109122, + "grad_norm": 1.5450645409840569, + "learning_rate": 3.980617359391604e-06, + "loss": 1.0246, + "step": 810 + }, + { + "epoch": 0.07313883753438247, + "grad_norm": 1.4001671095295944, + "learning_rate": 3.98053613748314e-06, + "loss": 0.9696, + "step": 811 + }, + { + "epoch": 0.07322902105785273, + "grad_norm": 1.423057232341829, + "learning_rate": 3.980454746584413e-06, + "loss": 0.878, + "step": 812 + }, + { + "epoch": 0.07331920458132299, + "grad_norm": 1.7734160569024733, + "learning_rate": 3.9803731867023665e-06, + "loss": 1.094, + "step": 813 + }, + { + "epoch": 0.07340938810479325, + "grad_norm": 1.7154080585556861, + "learning_rate": 3.9802914578439596e-06, + "loss": 0.9986, + "step": 814 + }, + { + "epoch": 0.07349957162826351, + "grad_norm": 1.768391492656029, + "learning_rate": 3.980209560016167e-06, + "loss": 1.1065, + "step": 815 + }, + { + "epoch": 0.07358975515173379, + "grad_norm": 1.6802362299773335, + "learning_rate": 3.980127493225975e-06, + "loss": 1.1083, + "step": 816 + }, + { + "epoch": 0.07367993867520405, + "grad_norm": 1.678240769582385, + "learning_rate": 3.980045257480387e-06, + "loss": 1.025, + "step": 817 + }, + { + "epoch": 0.0737701221986743, + "grad_norm": 1.5911119356514365, + "learning_rate": 3.9799628527864205e-06, + "loss": 0.9696, + "step": 818 + }, + { + "epoch": 0.07386030572214457, + "grad_norm": 1.457693322841197, + "learning_rate": 3.979880279151106e-06, + "loss": 0.9913, + "step": 819 + }, + { + "epoch": 0.07395048924561483, + "grad_norm": 1.8777477475811661, + "learning_rate": 3.979797536581489e-06, + "loss": 0.9609, + "step": 820 + }, + { + "epoch": 0.07404067276908509, + "grad_norm": 1.7938415796381644, + "learning_rate": 3.97971462508463e-06, + "loss": 0.8708, + "step": 821 + }, + { + "epoch": 0.07413085629255535, + "grad_norm": 1.3216678953145022, + "learning_rate": 3.979631544667603e-06, + "loss": 0.9926, + "step": 822 + }, + { + "epoch": 0.07422103981602561, + "grad_norm": 1.5557150967088047, + "learning_rate": 3.979548295337496e-06, + "loss": 1.0233, + "step": 823 + }, + { + "epoch": 0.07431122333949587, + "grad_norm": 1.4750846450570245, + "learning_rate": 3.9794648771014146e-06, + "loss": 0.9777, + "step": 824 + }, + { + "epoch": 0.07440140686296613, + "grad_norm": 1.458912225902845, + "learning_rate": 3.9793812899664745e-06, + "loss": 1.0207, + "step": 825 + }, + { + "epoch": 0.0744915903864364, + "grad_norm": 2.4094407365789134, + "learning_rate": 3.979297533939809e-06, + "loss": 0.9681, + "step": 826 + }, + { + "epoch": 0.07458177390990665, + "grad_norm": 1.772852393605381, + "learning_rate": 3.979213609028564e-06, + "loss": 0.9855, + "step": 827 + }, + { + "epoch": 0.07467195743337693, + "grad_norm": 1.541422394998086, + "learning_rate": 3.979129515239901e-06, + "loss": 1.0265, + "step": 828 + }, + { + "epoch": 0.07476214095684719, + "grad_norm": 1.2503510935766209, + "learning_rate": 3.979045252580994e-06, + "loss": 1.0203, + "step": 829 + }, + { + "epoch": 0.07485232448031745, + "grad_norm": 1.3810329301210287, + "learning_rate": 3.978960821059034e-06, + "loss": 0.9597, + "step": 830 + }, + { + "epoch": 0.07494250800378771, + "grad_norm": 1.3899222752226015, + "learning_rate": 3.978876220681225e-06, + "loss": 1.0441, + "step": 831 + }, + { + "epoch": 0.07503269152725797, + "grad_norm": 1.5255180823380643, + "learning_rate": 3.978791451454786e-06, + "loss": 0.9353, + "step": 832 + }, + { + "epoch": 0.07512287505072823, + "grad_norm": 1.4839801463935212, + "learning_rate": 3.978706513386949e-06, + "loss": 0.9788, + "step": 833 + }, + { + "epoch": 0.0752130585741985, + "grad_norm": 1.757897472447089, + "learning_rate": 3.978621406484962e-06, + "loss": 1.0267, + "step": 834 + }, + { + "epoch": 0.07530324209766875, + "grad_norm": 1.496031199609309, + "learning_rate": 3.978536130756086e-06, + "loss": 0.9999, + "step": 835 + }, + { + "epoch": 0.07539342562113902, + "grad_norm": 1.355710626397972, + "learning_rate": 3.978450686207599e-06, + "loss": 1.0635, + "step": 836 + }, + { + "epoch": 0.07548360914460928, + "grad_norm": 1.6867628960179355, + "learning_rate": 3.978365072846789e-06, + "loss": 0.9918, + "step": 837 + }, + { + "epoch": 0.07557379266807954, + "grad_norm": 1.569660184130052, + "learning_rate": 3.9782792906809625e-06, + "loss": 0.9642, + "step": 838 + }, + { + "epoch": 0.0756639761915498, + "grad_norm": 1.62631905645875, + "learning_rate": 3.97819333971744e-06, + "loss": 0.9942, + "step": 839 + }, + { + "epoch": 0.07575415971502007, + "grad_norm": 1.7169947591533892, + "learning_rate": 3.978107219963553e-06, + "loss": 1.0169, + "step": 840 + }, + { + "epoch": 0.07584434323849033, + "grad_norm": 1.40355228638431, + "learning_rate": 3.978020931426651e-06, + "loss": 0.9157, + "step": 841 + }, + { + "epoch": 0.0759345267619606, + "grad_norm": 1.4040256710558485, + "learning_rate": 3.977934474114096e-06, + "loss": 0.9534, + "step": 842 + }, + { + "epoch": 0.07602471028543085, + "grad_norm": 0.7840366256780757, + "learning_rate": 3.977847848033267e-06, + "loss": 0.7914, + "step": 843 + }, + { + "epoch": 0.07611489380890112, + "grad_norm": 1.5884297899550288, + "learning_rate": 3.977761053191553e-06, + "loss": 1.0603, + "step": 844 + }, + { + "epoch": 0.07620507733237138, + "grad_norm": 1.359836587695257, + "learning_rate": 3.977674089596361e-06, + "loss": 0.9926, + "step": 845 + }, + { + "epoch": 0.07629526085584164, + "grad_norm": 0.8510510369807921, + "learning_rate": 3.97758695725511e-06, + "loss": 0.7918, + "step": 846 + }, + { + "epoch": 0.0763854443793119, + "grad_norm": 1.2234418496240926, + "learning_rate": 3.977499656175236e-06, + "loss": 0.9987, + "step": 847 + }, + { + "epoch": 0.07647562790278216, + "grad_norm": 1.5459019847065272, + "learning_rate": 3.977412186364187e-06, + "loss": 0.9168, + "step": 848 + }, + { + "epoch": 0.07656581142625242, + "grad_norm": 6.598512204411505, + "learning_rate": 3.977324547829428e-06, + "loss": 1.0294, + "step": 849 + }, + { + "epoch": 0.07665599494972268, + "grad_norm": 2.518810080292092, + "learning_rate": 3.977236740578435e-06, + "loss": 1.026, + "step": 850 + }, + { + "epoch": 0.07674617847319294, + "grad_norm": 2.0327722567445985, + "learning_rate": 3.9771487646187015e-06, + "loss": 1.0011, + "step": 851 + }, + { + "epoch": 0.07683636199666322, + "grad_norm": 1.737927347225251, + "learning_rate": 3.9770606199577325e-06, + "loss": 1.0502, + "step": 852 + }, + { + "epoch": 0.07692654552013348, + "grad_norm": 1.878787855831642, + "learning_rate": 3.9769723066030505e-06, + "loss": 0.9435, + "step": 853 + }, + { + "epoch": 0.07701672904360374, + "grad_norm": 1.7149373161493493, + "learning_rate": 3.976883824562191e-06, + "loss": 1.073, + "step": 854 + }, + { + "epoch": 0.077106912567074, + "grad_norm": 1.5231038045993324, + "learning_rate": 3.976795173842703e-06, + "loss": 1.0102, + "step": 855 + }, + { + "epoch": 0.07719709609054426, + "grad_norm": 1.629328245392788, + "learning_rate": 3.97670635445215e-06, + "loss": 0.9801, + "step": 856 + }, + { + "epoch": 0.07728727961401452, + "grad_norm": 1.7347602759976501, + "learning_rate": 3.976617366398112e-06, + "loss": 0.985, + "step": 857 + }, + { + "epoch": 0.07737746313748478, + "grad_norm": 1.2347433771858314, + "learning_rate": 3.976528209688181e-06, + "loss": 1.038, + "step": 858 + }, + { + "epoch": 0.07746764666095504, + "grad_norm": 1.2608752191565513, + "learning_rate": 3.976438884329965e-06, + "loss": 1.0072, + "step": 859 + }, + { + "epoch": 0.0775578301844253, + "grad_norm": 1.450024815051079, + "learning_rate": 3.976349390331085e-06, + "loss": 1.1366, + "step": 860 + }, + { + "epoch": 0.07764801370789556, + "grad_norm": 1.573096545041261, + "learning_rate": 3.976259727699178e-06, + "loss": 0.9701, + "step": 861 + }, + { + "epoch": 0.07773819723136582, + "grad_norm": 1.8612720845323238, + "learning_rate": 3.976169896441895e-06, + "loss": 0.9704, + "step": 862 + }, + { + "epoch": 0.07782838075483609, + "grad_norm": 1.608766829751644, + "learning_rate": 3.976079896566898e-06, + "loss": 1.0404, + "step": 863 + }, + { + "epoch": 0.07791856427830636, + "grad_norm": 1.609402628541101, + "learning_rate": 3.97598972808187e-06, + "loss": 1.0667, + "step": 864 + }, + { + "epoch": 0.07800874780177662, + "grad_norm": 1.9634604466755066, + "learning_rate": 3.975899390994501e-06, + "loss": 1.0064, + "step": 865 + }, + { + "epoch": 0.07809893132524688, + "grad_norm": 1.5281622759245295, + "learning_rate": 3.975808885312502e-06, + "loss": 1.0703, + "step": 866 + }, + { + "epoch": 0.07818911484871714, + "grad_norm": 1.4846126968402031, + "learning_rate": 3.975718211043594e-06, + "loss": 1.0539, + "step": 867 + }, + { + "epoch": 0.0782792983721874, + "grad_norm": 1.594409432213231, + "learning_rate": 3.975627368195515e-06, + "loss": 0.9261, + "step": 868 + }, + { + "epoch": 0.07836948189565766, + "grad_norm": 1.2545109891900494, + "learning_rate": 3.975536356776015e-06, + "loss": 0.9797, + "step": 869 + }, + { + "epoch": 0.07845966541912792, + "grad_norm": 0.7964163937819829, + "learning_rate": 3.975445176792861e-06, + "loss": 0.8772, + "step": 870 + }, + { + "epoch": 0.07854984894259819, + "grad_norm": 1.4585281332792372, + "learning_rate": 3.975353828253831e-06, + "loss": 1.0062, + "step": 871 + }, + { + "epoch": 0.07864003246606845, + "grad_norm": 1.4309551684956399, + "learning_rate": 3.97526231116672e-06, + "loss": 1.0113, + "step": 872 + }, + { + "epoch": 0.07873021598953871, + "grad_norm": 1.559532322716504, + "learning_rate": 3.975170625539338e-06, + "loss": 0.9523, + "step": 873 + }, + { + "epoch": 0.07882039951300897, + "grad_norm": 1.2616214773751981, + "learning_rate": 3.975078771379507e-06, + "loss": 1.0056, + "step": 874 + }, + { + "epoch": 0.07891058303647923, + "grad_norm": 1.7574230525616332, + "learning_rate": 3.974986748695064e-06, + "loss": 0.9143, + "step": 875 + }, + { + "epoch": 0.0790007665599495, + "grad_norm": 1.0101896535686075, + "learning_rate": 3.974894557493862e-06, + "loss": 0.8277, + "step": 876 + }, + { + "epoch": 0.07909095008341976, + "grad_norm": 1.366436823071569, + "learning_rate": 3.974802197783768e-06, + "loss": 1.0536, + "step": 877 + }, + { + "epoch": 0.07918113360689003, + "grad_norm": 1.9243365395380099, + "learning_rate": 3.974709669572661e-06, + "loss": 1.0676, + "step": 878 + }, + { + "epoch": 0.07927131713036029, + "grad_norm": 1.920199524525839, + "learning_rate": 3.974616972868436e-06, + "loss": 1.0004, + "step": 879 + }, + { + "epoch": 0.07936150065383055, + "grad_norm": 1.5368224752354431, + "learning_rate": 3.974524107679003e-06, + "loss": 1.0127, + "step": 880 + }, + { + "epoch": 0.07945168417730081, + "grad_norm": 1.3157479652332515, + "learning_rate": 3.974431074012286e-06, + "loss": 1.0003, + "step": 881 + }, + { + "epoch": 0.07954186770077107, + "grad_norm": 1.764612701003494, + "learning_rate": 3.974337871876223e-06, + "loss": 1.0338, + "step": 882 + }, + { + "epoch": 0.07963205122424133, + "grad_norm": 1.5019098679919696, + "learning_rate": 3.974244501278766e-06, + "loss": 0.9779, + "step": 883 + }, + { + "epoch": 0.07972223474771159, + "grad_norm": 1.3413779591780206, + "learning_rate": 3.974150962227883e-06, + "loss": 0.9545, + "step": 884 + }, + { + "epoch": 0.07981241827118185, + "grad_norm": 1.5107456594376718, + "learning_rate": 3.9740572547315535e-06, + "loss": 0.9321, + "step": 885 + }, + { + "epoch": 0.07990260179465211, + "grad_norm": 1.9632184262590042, + "learning_rate": 3.973963378797775e-06, + "loss": 1.0402, + "step": 886 + }, + { + "epoch": 0.07999278531812237, + "grad_norm": 1.3714451787786053, + "learning_rate": 3.973869334434556e-06, + "loss": 1.0221, + "step": 887 + }, + { + "epoch": 0.08008296884159265, + "grad_norm": 1.438490982138776, + "learning_rate": 3.973775121649922e-06, + "loss": 1.0241, + "step": 888 + }, + { + "epoch": 0.08017315236506291, + "grad_norm": 1.4890728144573662, + "learning_rate": 3.973680740451911e-06, + "loss": 0.9167, + "step": 889 + }, + { + "epoch": 0.08026333588853317, + "grad_norm": 1.476559755030104, + "learning_rate": 3.9735861908485776e-06, + "loss": 0.9732, + "step": 890 + }, + { + "epoch": 0.08035351941200343, + "grad_norm": 1.4387464509683248, + "learning_rate": 3.973491472847987e-06, + "loss": 1.0227, + "step": 891 + }, + { + "epoch": 0.08044370293547369, + "grad_norm": 1.4657128769286993, + "learning_rate": 3.973396586458222e-06, + "loss": 0.9305, + "step": 892 + }, + { + "epoch": 0.08053388645894395, + "grad_norm": 1.5259343739918367, + "learning_rate": 3.97330153168738e-06, + "loss": 0.9909, + "step": 893 + }, + { + "epoch": 0.08062406998241421, + "grad_norm": 1.4964282904311552, + "learning_rate": 3.973206308543571e-06, + "loss": 0.9431, + "step": 894 + }, + { + "epoch": 0.08071425350588447, + "grad_norm": 1.6286555767428668, + "learning_rate": 3.973110917034918e-06, + "loss": 0.9725, + "step": 895 + }, + { + "epoch": 0.08080443702935473, + "grad_norm": 1.66991445082414, + "learning_rate": 3.973015357169563e-06, + "loss": 0.9208, + "step": 896 + }, + { + "epoch": 0.080894620552825, + "grad_norm": 1.7225095379348025, + "learning_rate": 3.972919628955659e-06, + "loss": 0.9007, + "step": 897 + }, + { + "epoch": 0.08098480407629526, + "grad_norm": 1.7673928516721242, + "learning_rate": 3.972823732401373e-06, + "loss": 0.9847, + "step": 898 + }, + { + "epoch": 0.08107498759976552, + "grad_norm": 1.463934695142251, + "learning_rate": 3.972727667514888e-06, + "loss": 0.9967, + "step": 899 + }, + { + "epoch": 0.08116517112323579, + "grad_norm": 2.0480923126843806, + "learning_rate": 3.972631434304402e-06, + "loss": 0.9598, + "step": 900 + }, + { + "epoch": 0.08125535464670605, + "grad_norm": 1.612746623496804, + "learning_rate": 3.972535032778124e-06, + "loss": 1.1111, + "step": 901 + }, + { + "epoch": 0.08134553817017631, + "grad_norm": 1.2325262880907066, + "learning_rate": 3.97243846294428e-06, + "loss": 1.0911, + "step": 902 + }, + { + "epoch": 0.08143572169364657, + "grad_norm": 1.4321092616599311, + "learning_rate": 3.972341724811111e-06, + "loss": 1.1384, + "step": 903 + }, + { + "epoch": 0.08152590521711683, + "grad_norm": 1.5654298398023163, + "learning_rate": 3.972244818386872e-06, + "loss": 0.9456, + "step": 904 + }, + { + "epoch": 0.0816160887405871, + "grad_norm": 1.529807364859341, + "learning_rate": 3.972147743679828e-06, + "loss": 0.9535, + "step": 905 + }, + { + "epoch": 0.08170627226405736, + "grad_norm": 1.3082776043598623, + "learning_rate": 3.972050500698265e-06, + "loss": 0.9649, + "step": 906 + }, + { + "epoch": 0.08179645578752762, + "grad_norm": 1.586449300932725, + "learning_rate": 3.971953089450481e-06, + "loss": 0.9928, + "step": 907 + }, + { + "epoch": 0.08188663931099788, + "grad_norm": 1.5666896344929446, + "learning_rate": 3.971855509944784e-06, + "loss": 1.0706, + "step": 908 + }, + { + "epoch": 0.08197682283446814, + "grad_norm": 1.4520886735348173, + "learning_rate": 3.971757762189504e-06, + "loss": 0.9892, + "step": 909 + }, + { + "epoch": 0.0820670063579384, + "grad_norm": 1.6700944777849496, + "learning_rate": 3.9716598461929785e-06, + "loss": 0.9869, + "step": 910 + }, + { + "epoch": 0.08215718988140866, + "grad_norm": 1.76072146995403, + "learning_rate": 3.971561761963563e-06, + "loss": 0.9734, + "step": 911 + }, + { + "epoch": 0.08224737340487893, + "grad_norm": 1.713800760487087, + "learning_rate": 3.971463509509628e-06, + "loss": 1.0768, + "step": 912 + }, + { + "epoch": 0.0823375569283492, + "grad_norm": 1.877944922422602, + "learning_rate": 3.9713650888395555e-06, + "loss": 1.0034, + "step": 913 + }, + { + "epoch": 0.08242774045181946, + "grad_norm": 1.712991117793993, + "learning_rate": 3.9712664999617425e-06, + "loss": 0.9985, + "step": 914 + }, + { + "epoch": 0.08251792397528972, + "grad_norm": 1.8855844567036575, + "learning_rate": 3.971167742884603e-06, + "loss": 0.9743, + "step": 915 + }, + { + "epoch": 0.08260810749875998, + "grad_norm": 5.39136267880615, + "learning_rate": 3.971068817616564e-06, + "loss": 1.0507, + "step": 916 + }, + { + "epoch": 0.08269829102223024, + "grad_norm": 1.3419769921384248, + "learning_rate": 3.970969724166064e-06, + "loss": 1.027, + "step": 917 + }, + { + "epoch": 0.0827884745457005, + "grad_norm": 1.1007801280614957, + "learning_rate": 3.970870462541559e-06, + "loss": 0.8166, + "step": 918 + }, + { + "epoch": 0.08287865806917076, + "grad_norm": 1.3798097238804305, + "learning_rate": 3.97077103275152e-06, + "loss": 1.0262, + "step": 919 + }, + { + "epoch": 0.08296884159264102, + "grad_norm": 1.4599208456294952, + "learning_rate": 3.970671434804428e-06, + "loss": 0.9811, + "step": 920 + }, + { + "epoch": 0.08305902511611128, + "grad_norm": 1.7318270413639292, + "learning_rate": 3.970571668708784e-06, + "loss": 0.9972, + "step": 921 + }, + { + "epoch": 0.08314920863958154, + "grad_norm": 0.9301282014467449, + "learning_rate": 3.9704717344731e-06, + "loss": 0.8237, + "step": 922 + }, + { + "epoch": 0.0832393921630518, + "grad_norm": 0.7001432774573607, + "learning_rate": 3.9703716321059026e-06, + "loss": 0.8366, + "step": 923 + }, + { + "epoch": 0.08332957568652208, + "grad_norm": 1.7513468191868935, + "learning_rate": 3.9702713616157325e-06, + "loss": 1.0908, + "step": 924 + }, + { + "epoch": 0.08341975920999234, + "grad_norm": 1.8917310017584201, + "learning_rate": 3.9701709230111455e-06, + "loss": 1.0204, + "step": 925 + }, + { + "epoch": 0.0835099427334626, + "grad_norm": 1.4598323293323086, + "learning_rate": 3.970070316300713e-06, + "loss": 0.9669, + "step": 926 + }, + { + "epoch": 0.08360012625693286, + "grad_norm": 1.4897528954234684, + "learning_rate": 3.969969541493017e-06, + "loss": 1.0247, + "step": 927 + }, + { + "epoch": 0.08369030978040312, + "grad_norm": 1.4836817678624192, + "learning_rate": 3.969868598596658e-06, + "loss": 1.0523, + "step": 928 + }, + { + "epoch": 0.08378049330387338, + "grad_norm": 1.2703527052406667, + "learning_rate": 3.969767487620249e-06, + "loss": 0.9477, + "step": 929 + }, + { + "epoch": 0.08387067682734364, + "grad_norm": 1.453880031867901, + "learning_rate": 3.969666208572416e-06, + "loss": 1.0492, + "step": 930 + }, + { + "epoch": 0.0839608603508139, + "grad_norm": 1.6940843906617273, + "learning_rate": 3.969564761461802e-06, + "loss": 1.0556, + "step": 931 + }, + { + "epoch": 0.08405104387428416, + "grad_norm": 1.3984837444480407, + "learning_rate": 3.969463146297062e-06, + "loss": 0.9812, + "step": 932 + }, + { + "epoch": 0.08414122739775443, + "grad_norm": 1.991423636203803, + "learning_rate": 3.969361363086867e-06, + "loss": 1.0347, + "step": 933 + }, + { + "epoch": 0.08423141092122469, + "grad_norm": 1.5573652966634344, + "learning_rate": 3.9692594118399014e-06, + "loss": 0.9766, + "step": 934 + }, + { + "epoch": 0.08432159444469496, + "grad_norm": 1.6403088219561976, + "learning_rate": 3.969157292564865e-06, + "loss": 1.0415, + "step": 935 + }, + { + "epoch": 0.08441177796816522, + "grad_norm": 1.6123639101072478, + "learning_rate": 3.96905500527047e-06, + "loss": 0.9931, + "step": 936 + }, + { + "epoch": 0.08450196149163548, + "grad_norm": 1.5321910068721578, + "learning_rate": 3.968952549965445e-06, + "loss": 0.9856, + "step": 937 + }, + { + "epoch": 0.08459214501510574, + "grad_norm": 1.4557463270164297, + "learning_rate": 3.968849926658532e-06, + "loss": 1.0352, + "step": 938 + }, + { + "epoch": 0.084682328538576, + "grad_norm": 2.99254349067463, + "learning_rate": 3.9687471353584866e-06, + "loss": 0.9211, + "step": 939 + }, + { + "epoch": 0.08477251206204627, + "grad_norm": 1.3273307220376458, + "learning_rate": 3.9686441760740795e-06, + "loss": 1.0182, + "step": 940 + }, + { + "epoch": 0.08486269558551653, + "grad_norm": 1.641339754902921, + "learning_rate": 3.968541048814098e-06, + "loss": 0.9427, + "step": 941 + }, + { + "epoch": 0.08495287910898679, + "grad_norm": 1.7519917733682984, + "learning_rate": 3.968437753587339e-06, + "loss": 0.9272, + "step": 942 + }, + { + "epoch": 0.08504306263245705, + "grad_norm": 1.3047123067176443, + "learning_rate": 3.968334290402616e-06, + "loss": 1.0417, + "step": 943 + }, + { + "epoch": 0.08513324615592731, + "grad_norm": 1.4003838149241665, + "learning_rate": 3.968230659268759e-06, + "loss": 0.8944, + "step": 944 + }, + { + "epoch": 0.08522342967939757, + "grad_norm": 1.6926070745454391, + "learning_rate": 3.968126860194609e-06, + "loss": 1.0679, + "step": 945 + }, + { + "epoch": 0.08531361320286783, + "grad_norm": 1.4511052523243668, + "learning_rate": 3.968022893189025e-06, + "loss": 1.0547, + "step": 946 + }, + { + "epoch": 0.0854037967263381, + "grad_norm": 1.340259053896148, + "learning_rate": 3.967918758260874e-06, + "loss": 1.0388, + "step": 947 + }, + { + "epoch": 0.08549398024980837, + "grad_norm": 1.7423755903450324, + "learning_rate": 3.967814455419044e-06, + "loss": 1.0437, + "step": 948 + }, + { + "epoch": 0.08558416377327863, + "grad_norm": 1.5843847029951283, + "learning_rate": 3.967709984672434e-06, + "loss": 1.0467, + "step": 949 + }, + { + "epoch": 0.08567434729674889, + "grad_norm": 1.5292171296707164, + "learning_rate": 3.967605346029959e-06, + "loss": 1.0333, + "step": 950 + }, + { + "epoch": 0.08576453082021915, + "grad_norm": 1.263867983569474, + "learning_rate": 3.9675005395005466e-06, + "loss": 1.0719, + "step": 951 + }, + { + "epoch": 0.08585471434368941, + "grad_norm": 1.864152556655727, + "learning_rate": 3.967395565093139e-06, + "loss": 1.0227, + "step": 952 + }, + { + "epoch": 0.08594489786715967, + "grad_norm": 1.3827931893072554, + "learning_rate": 3.967290422816693e-06, + "loss": 1.0067, + "step": 953 + }, + { + "epoch": 0.08603508139062993, + "grad_norm": 1.6661312514828983, + "learning_rate": 3.967185112680183e-06, + "loss": 0.9717, + "step": 954 + }, + { + "epoch": 0.08612526491410019, + "grad_norm": 1.4672491643620196, + "learning_rate": 3.96707963469259e-06, + "loss": 1.0926, + "step": 955 + }, + { + "epoch": 0.08621544843757045, + "grad_norm": 1.6322133548389812, + "learning_rate": 3.966973988862917e-06, + "loss": 0.9915, + "step": 956 + }, + { + "epoch": 0.08630563196104071, + "grad_norm": 1.3971264196878255, + "learning_rate": 3.966868175200178e-06, + "loss": 1.0541, + "step": 957 + }, + { + "epoch": 0.08639581548451097, + "grad_norm": 1.4785128030283918, + "learning_rate": 3.9667621937134e-06, + "loss": 1.0582, + "step": 958 + }, + { + "epoch": 0.08648599900798125, + "grad_norm": 1.6451579370018108, + "learning_rate": 3.966656044411627e-06, + "loss": 0.9216, + "step": 959 + }, + { + "epoch": 0.08657618253145151, + "grad_norm": 1.3326593523484744, + "learning_rate": 3.966549727303918e-06, + "loss": 0.9799, + "step": 960 + }, + { + "epoch": 0.08666636605492177, + "grad_norm": 1.4468517458140742, + "learning_rate": 3.966443242399341e-06, + "loss": 1.0404, + "step": 961 + }, + { + "epoch": 0.08675654957839203, + "grad_norm": 0.9016084193232593, + "learning_rate": 3.966336589706985e-06, + "loss": 0.8202, + "step": 962 + }, + { + "epoch": 0.08684673310186229, + "grad_norm": 1.566694047702111, + "learning_rate": 3.966229769235948e-06, + "loss": 1.0247, + "step": 963 + }, + { + "epoch": 0.08693691662533255, + "grad_norm": 1.6389546201566971, + "learning_rate": 3.966122780995345e-06, + "loss": 1.0812, + "step": 964 + }, + { + "epoch": 0.08702710014880281, + "grad_norm": 1.7360142702008363, + "learning_rate": 3.966015624994306e-06, + "loss": 1.0407, + "step": 965 + }, + { + "epoch": 0.08711728367227307, + "grad_norm": 1.3899780654548757, + "learning_rate": 3.9659083012419735e-06, + "loss": 0.9763, + "step": 966 + }, + { + "epoch": 0.08720746719574334, + "grad_norm": 1.5260286645259433, + "learning_rate": 3.965800809747505e-06, + "loss": 0.9401, + "step": 967 + }, + { + "epoch": 0.0872976507192136, + "grad_norm": 1.5275279370639157, + "learning_rate": 3.965693150520071e-06, + "loss": 0.9796, + "step": 968 + }, + { + "epoch": 0.08738783424268386, + "grad_norm": 1.887492398379347, + "learning_rate": 3.96558532356886e-06, + "loss": 0.9421, + "step": 969 + }, + { + "epoch": 0.08747801776615412, + "grad_norm": 1.889090679958729, + "learning_rate": 3.9654773289030704e-06, + "loss": 1.0567, + "step": 970 + }, + { + "epoch": 0.08756820128962439, + "grad_norm": 1.624136915617833, + "learning_rate": 3.9653691665319176e-06, + "loss": 1.0279, + "step": 971 + }, + { + "epoch": 0.08765838481309465, + "grad_norm": 2.7853830039235437, + "learning_rate": 3.96526083646463e-06, + "loss": 0.941, + "step": 972 + }, + { + "epoch": 0.08774856833656491, + "grad_norm": 1.4197157509486966, + "learning_rate": 3.9651523387104526e-06, + "loss": 0.9635, + "step": 973 + }, + { + "epoch": 0.08783875186003517, + "grad_norm": 1.7268292665448715, + "learning_rate": 3.965043673278641e-06, + "loss": 1.0191, + "step": 974 + }, + { + "epoch": 0.08792893538350544, + "grad_norm": 1.3122513399268105, + "learning_rate": 3.964934840178469e-06, + "loss": 1.0171, + "step": 975 + }, + { + "epoch": 0.0880191189069757, + "grad_norm": 1.498420599085352, + "learning_rate": 3.964825839419221e-06, + "loss": 1.0475, + "step": 976 + }, + { + "epoch": 0.08810930243044596, + "grad_norm": 1.8439195199396323, + "learning_rate": 3.964716671010199e-06, + "loss": 0.9972, + "step": 977 + }, + { + "epoch": 0.08819948595391622, + "grad_norm": 1.3472913607436805, + "learning_rate": 3.9646073349607165e-06, + "loss": 0.9767, + "step": 978 + }, + { + "epoch": 0.08828966947738648, + "grad_norm": 1.5759293796285467, + "learning_rate": 3.964497831280105e-06, + "loss": 1.1389, + "step": 979 + }, + { + "epoch": 0.08837985300085674, + "grad_norm": 1.562024921672671, + "learning_rate": 3.964388159977705e-06, + "loss": 1.0089, + "step": 980 + }, + { + "epoch": 0.088470036524327, + "grad_norm": 1.3223597888859568, + "learning_rate": 3.964278321062876e-06, + "loss": 0.9221, + "step": 981 + }, + { + "epoch": 0.08856022004779726, + "grad_norm": 1.530121289974452, + "learning_rate": 3.96416831454499e-06, + "loss": 1.035, + "step": 982 + }, + { + "epoch": 0.08865040357126754, + "grad_norm": 0.9015767524425701, + "learning_rate": 3.964058140433434e-06, + "loss": 0.8575, + "step": 983 + }, + { + "epoch": 0.0887405870947378, + "grad_norm": 1.7084325280892563, + "learning_rate": 3.963947798737606e-06, + "loss": 1.0771, + "step": 984 + }, + { + "epoch": 0.08883077061820806, + "grad_norm": 1.5368678522428558, + "learning_rate": 3.963837289466923e-06, + "loss": 1.0314, + "step": 985 + }, + { + "epoch": 0.08892095414167832, + "grad_norm": 1.3405165692919976, + "learning_rate": 3.9637266126308145e-06, + "loss": 0.9732, + "step": 986 + }, + { + "epoch": 0.08901113766514858, + "grad_norm": 0.9707984800726863, + "learning_rate": 3.963615768238724e-06, + "loss": 0.8669, + "step": 987 + }, + { + "epoch": 0.08910132118861884, + "grad_norm": 1.5122376963259476, + "learning_rate": 3.963504756300107e-06, + "loss": 0.8994, + "step": 988 + }, + { + "epoch": 0.0891915047120891, + "grad_norm": 1.4964803091704426, + "learning_rate": 3.96339357682444e-06, + "loss": 1.0527, + "step": 989 + }, + { + "epoch": 0.08928168823555936, + "grad_norm": 1.6993477607948675, + "learning_rate": 3.963282229821206e-06, + "loss": 1.0303, + "step": 990 + }, + { + "epoch": 0.08937187175902962, + "grad_norm": 1.4602452234191046, + "learning_rate": 3.963170715299906e-06, + "loss": 1.0174, + "step": 991 + }, + { + "epoch": 0.08946205528249988, + "grad_norm": 1.3957980943852917, + "learning_rate": 3.963059033270056e-06, + "loss": 0.933, + "step": 992 + }, + { + "epoch": 0.08955223880597014, + "grad_norm": 1.3358166288607343, + "learning_rate": 3.9629471837411855e-06, + "loss": 0.9525, + "step": 993 + }, + { + "epoch": 0.0896424223294404, + "grad_norm": 2.1496681711376286, + "learning_rate": 3.962835166722838e-06, + "loss": 0.9537, + "step": 994 + }, + { + "epoch": 0.08973260585291068, + "grad_norm": 1.6163139010820438, + "learning_rate": 3.96272298222457e-06, + "loss": 1.0179, + "step": 995 + }, + { + "epoch": 0.08982278937638094, + "grad_norm": 1.5271332344482016, + "learning_rate": 3.962610630255956e-06, + "loss": 1.139, + "step": 996 + }, + { + "epoch": 0.0899129728998512, + "grad_norm": 1.565649901171111, + "learning_rate": 3.96249811082658e-06, + "loss": 0.981, + "step": 997 + }, + { + "epoch": 0.09000315642332146, + "grad_norm": 0.8466400524396898, + "learning_rate": 3.962385423946046e-06, + "loss": 0.8937, + "step": 998 + }, + { + "epoch": 0.09009333994679172, + "grad_norm": 1.7031956929225405, + "learning_rate": 3.962272569623966e-06, + "loss": 0.9912, + "step": 999 + }, + { + "epoch": 0.09018352347026198, + "grad_norm": 1.676186679267092, + "learning_rate": 3.9621595478699704e-06, + "loss": 0.8882, + "step": 1000 + }, + { + "epoch": 0.09027370699373224, + "grad_norm": 1.6116646340142693, + "learning_rate": 3.962046358693703e-06, + "loss": 1.0013, + "step": 1001 + }, + { + "epoch": 0.0903638905172025, + "grad_norm": 4.096770824559856, + "learning_rate": 3.961933002104822e-06, + "loss": 1.0924, + "step": 1002 + }, + { + "epoch": 0.09045407404067277, + "grad_norm": 1.6418991953704645, + "learning_rate": 3.961819478112999e-06, + "loss": 0.9352, + "step": 1003 + }, + { + "epoch": 0.09054425756414303, + "grad_norm": 1.6236381693118056, + "learning_rate": 3.961705786727921e-06, + "loss": 1.0283, + "step": 1004 + }, + { + "epoch": 0.09063444108761329, + "grad_norm": 1.3878562452726937, + "learning_rate": 3.961591927959288e-06, + "loss": 0.9348, + "step": 1005 + }, + { + "epoch": 0.09072462461108355, + "grad_norm": 1.2637997876480582, + "learning_rate": 3.961477901816816e-06, + "loss": 0.8009, + "step": 1006 + }, + { + "epoch": 0.09081480813455382, + "grad_norm": 1.5717304716916067, + "learning_rate": 3.961363708310233e-06, + "loss": 1.0727, + "step": 1007 + }, + { + "epoch": 0.09090499165802408, + "grad_norm": 1.574323324133039, + "learning_rate": 3.961249347449286e-06, + "loss": 1.0328, + "step": 1008 + }, + { + "epoch": 0.09099517518149434, + "grad_norm": 1.5176803181335412, + "learning_rate": 3.961134819243728e-06, + "loss": 0.993, + "step": 1009 + }, + { + "epoch": 0.0910853587049646, + "grad_norm": 1.6583155581545632, + "learning_rate": 3.961020123703335e-06, + "loss": 1.0124, + "step": 1010 + }, + { + "epoch": 0.09117554222843487, + "grad_norm": 1.5255668430894023, + "learning_rate": 3.960905260837892e-06, + "loss": 1.0237, + "step": 1011 + }, + { + "epoch": 0.09126572575190513, + "grad_norm": 1.5754494979304081, + "learning_rate": 3.960790230657199e-06, + "loss": 0.9837, + "step": 1012 + }, + { + "epoch": 0.09135590927537539, + "grad_norm": 1.1885914555027632, + "learning_rate": 3.960675033171072e-06, + "loss": 0.9596, + "step": 1013 + }, + { + "epoch": 0.09144609279884565, + "grad_norm": 1.5284303668192445, + "learning_rate": 3.960559668389341e-06, + "loss": 0.9516, + "step": 1014 + }, + { + "epoch": 0.09153627632231591, + "grad_norm": 1.4712490167995333, + "learning_rate": 3.960444136321847e-06, + "loss": 1.0073, + "step": 1015 + }, + { + "epoch": 0.09162645984578617, + "grad_norm": 1.833676118304986, + "learning_rate": 3.960328436978451e-06, + "loss": 1.1012, + "step": 1016 + }, + { + "epoch": 0.09171664336925643, + "grad_norm": 1.2235301736588178, + "learning_rate": 3.960212570369024e-06, + "loss": 0.8064, + "step": 1017 + }, + { + "epoch": 0.09180682689272669, + "grad_norm": 1.5954058404081766, + "learning_rate": 3.9600965365034515e-06, + "loss": 0.9395, + "step": 1018 + }, + { + "epoch": 0.09189701041619697, + "grad_norm": 1.4448692673147758, + "learning_rate": 3.959980335391634e-06, + "loss": 1.0645, + "step": 1019 + }, + { + "epoch": 0.09198719393966723, + "grad_norm": 1.483234730490023, + "learning_rate": 3.959863967043487e-06, + "loss": 0.9516, + "step": 1020 + }, + { + "epoch": 0.09207737746313749, + "grad_norm": 1.4135645627233888, + "learning_rate": 3.9597474314689405e-06, + "loss": 1.0412, + "step": 1021 + }, + { + "epoch": 0.09216756098660775, + "grad_norm": 1.716798888672916, + "learning_rate": 3.959630728677937e-06, + "loss": 1.0177, + "step": 1022 + }, + { + "epoch": 0.09225774451007801, + "grad_norm": 1.555172791770086, + "learning_rate": 3.959513858680434e-06, + "loss": 0.9885, + "step": 1023 + }, + { + "epoch": 0.09234792803354827, + "grad_norm": 1.631968594996899, + "learning_rate": 3.959396821486405e-06, + "loss": 1.0932, + "step": 1024 + }, + { + "epoch": 0.09243811155701853, + "grad_norm": 1.2952573407879422, + "learning_rate": 3.959279617105835e-06, + "loss": 0.871, + "step": 1025 + }, + { + "epoch": 0.09252829508048879, + "grad_norm": 1.6017313054141298, + "learning_rate": 3.9591622455487235e-06, + "loss": 1.0166, + "step": 1026 + }, + { + "epoch": 0.09261847860395905, + "grad_norm": 1.7023955716722659, + "learning_rate": 3.959044706825087e-06, + "loss": 1.0775, + "step": 1027 + }, + { + "epoch": 0.09270866212742931, + "grad_norm": 1.6461037119965447, + "learning_rate": 3.958927000944954e-06, + "loss": 0.9199, + "step": 1028 + }, + { + "epoch": 0.09279884565089958, + "grad_norm": 1.6542325958770752, + "learning_rate": 3.958809127918368e-06, + "loss": 1.0938, + "step": 1029 + }, + { + "epoch": 0.09288902917436984, + "grad_norm": 1.388401525607929, + "learning_rate": 3.958691087755387e-06, + "loss": 0.9475, + "step": 1030 + }, + { + "epoch": 0.09297921269784011, + "grad_norm": 1.5490951788900829, + "learning_rate": 3.958572880466081e-06, + "loss": 0.8844, + "step": 1031 + }, + { + "epoch": 0.09306939622131037, + "grad_norm": 1.3566546350706368, + "learning_rate": 3.9584545060605385e-06, + "loss": 0.9914, + "step": 1032 + }, + { + "epoch": 0.09315957974478063, + "grad_norm": 1.6849485818505634, + "learning_rate": 3.958335964548859e-06, + "loss": 0.9751, + "step": 1033 + }, + { + "epoch": 0.0932497632682509, + "grad_norm": 1.67013216535026, + "learning_rate": 3.958217255941156e-06, + "loss": 1.0504, + "step": 1034 + }, + { + "epoch": 0.09333994679172115, + "grad_norm": 1.3556320575760792, + "learning_rate": 3.95809838024756e-06, + "loss": 0.9575, + "step": 1035 + }, + { + "epoch": 0.09343013031519141, + "grad_norm": 1.6781760579161278, + "learning_rate": 3.957979337478212e-06, + "loss": 0.9733, + "step": 1036 + }, + { + "epoch": 0.09352031383866168, + "grad_norm": 1.7198002467489215, + "learning_rate": 3.957860127643272e-06, + "loss": 1.031, + "step": 1037 + }, + { + "epoch": 0.09361049736213194, + "grad_norm": 1.5925890115866868, + "learning_rate": 3.95774075075291e-06, + "loss": 1.0013, + "step": 1038 + }, + { + "epoch": 0.0937006808856022, + "grad_norm": 1.4088524790793118, + "learning_rate": 3.957621206817312e-06, + "loss": 1.0545, + "step": 1039 + }, + { + "epoch": 0.09379086440907246, + "grad_norm": 1.5630653884301515, + "learning_rate": 3.957501495846679e-06, + "loss": 0.9871, + "step": 1040 + }, + { + "epoch": 0.09388104793254272, + "grad_norm": 0.8317684421174503, + "learning_rate": 3.957381617851225e-06, + "loss": 0.8254, + "step": 1041 + }, + { + "epoch": 0.09397123145601298, + "grad_norm": 1.461382711370436, + "learning_rate": 3.9572615728411776e-06, + "loss": 0.9276, + "step": 1042 + }, + { + "epoch": 0.09406141497948325, + "grad_norm": 2.105636561682718, + "learning_rate": 3.957141360826781e-06, + "loss": 0.8948, + "step": 1043 + }, + { + "epoch": 0.09415159850295352, + "grad_norm": 1.6798698858556103, + "learning_rate": 3.957020981818292e-06, + "loss": 0.9909, + "step": 1044 + }, + { + "epoch": 0.09424178202642378, + "grad_norm": 1.3240065503088043, + "learning_rate": 3.956900435825982e-06, + "loss": 1.001, + "step": 1045 + }, + { + "epoch": 0.09433196554989404, + "grad_norm": 1.4029956885480264, + "learning_rate": 3.9567797228601364e-06, + "loss": 0.9577, + "step": 1046 + }, + { + "epoch": 0.0944221490733643, + "grad_norm": 1.4410769698998243, + "learning_rate": 3.956658842931055e-06, + "loss": 1.0837, + "step": 1047 + }, + { + "epoch": 0.09451233259683456, + "grad_norm": 1.3399768018137885, + "learning_rate": 3.956537796049052e-06, + "loss": 1.0149, + "step": 1048 + }, + { + "epoch": 0.09460251612030482, + "grad_norm": 1.1532924044702368, + "learning_rate": 3.956416582224457e-06, + "loss": 0.8617, + "step": 1049 + }, + { + "epoch": 0.09469269964377508, + "grad_norm": 1.5323412373933432, + "learning_rate": 3.956295201467611e-06, + "loss": 1.0452, + "step": 1050 + }, + { + "epoch": 0.09478288316724534, + "grad_norm": 1.2544598649630205, + "learning_rate": 3.956173653788872e-06, + "loss": 1.0338, + "step": 1051 + }, + { + "epoch": 0.0948730666907156, + "grad_norm": 1.8238476655574676, + "learning_rate": 3.95605193919861e-06, + "loss": 1.0473, + "step": 1052 + }, + { + "epoch": 0.09496325021418586, + "grad_norm": 6.8996749580228185, + "learning_rate": 3.955930057707211e-06, + "loss": 0.9896, + "step": 1053 + }, + { + "epoch": 0.09505343373765612, + "grad_norm": 1.5668872275558199, + "learning_rate": 3.955808009325075e-06, + "loss": 1.0197, + "step": 1054 + }, + { + "epoch": 0.0951436172611264, + "grad_norm": 1.5134041629438084, + "learning_rate": 3.955685794062615e-06, + "loss": 0.9792, + "step": 1055 + }, + { + "epoch": 0.09523380078459666, + "grad_norm": 1.7103129601334202, + "learning_rate": 3.95556341193026e-06, + "loss": 1.0999, + "step": 1056 + }, + { + "epoch": 0.09532398430806692, + "grad_norm": 1.465196002568596, + "learning_rate": 3.955440862938452e-06, + "loss": 1.0925, + "step": 1057 + }, + { + "epoch": 0.09541416783153718, + "grad_norm": 1.6166944258885843, + "learning_rate": 3.955318147097647e-06, + "loss": 1.0083, + "step": 1058 + }, + { + "epoch": 0.09550435135500744, + "grad_norm": 1.7495804011165972, + "learning_rate": 3.955195264418316e-06, + "loss": 0.9766, + "step": 1059 + }, + { + "epoch": 0.0955945348784777, + "grad_norm": 1.412761827693768, + "learning_rate": 3.955072214910944e-06, + "loss": 0.8689, + "step": 1060 + }, + { + "epoch": 0.09568471840194796, + "grad_norm": 1.7316711932512636, + "learning_rate": 3.954948998586032e-06, + "loss": 0.9312, + "step": 1061 + }, + { + "epoch": 0.09577490192541822, + "grad_norm": 1.4917148021127515, + "learning_rate": 3.954825615454089e-06, + "loss": 1.0144, + "step": 1062 + }, + { + "epoch": 0.09586508544888848, + "grad_norm": 1.5915047781270621, + "learning_rate": 3.954702065525649e-06, + "loss": 1.0057, + "step": 1063 + }, + { + "epoch": 0.09595526897235875, + "grad_norm": 1.8248683254867981, + "learning_rate": 3.954578348811248e-06, + "loss": 1.0372, + "step": 1064 + }, + { + "epoch": 0.096045452495829, + "grad_norm": 1.588872289385632, + "learning_rate": 3.954454465321447e-06, + "loss": 0.9944, + "step": 1065 + }, + { + "epoch": 0.09613563601929928, + "grad_norm": 1.8129012880379554, + "learning_rate": 3.954330415066813e-06, + "loss": 1.0816, + "step": 1066 + }, + { + "epoch": 0.09622581954276954, + "grad_norm": 1.5060763468698086, + "learning_rate": 3.954206198057932e-06, + "loss": 0.996, + "step": 1067 + }, + { + "epoch": 0.0963160030662398, + "grad_norm": 1.6036567951094942, + "learning_rate": 3.954081814305403e-06, + "loss": 1.0588, + "step": 1068 + }, + { + "epoch": 0.09640618658971006, + "grad_norm": 1.514894997889133, + "learning_rate": 3.953957263819839e-06, + "loss": 0.944, + "step": 1069 + }, + { + "epoch": 0.09649637011318032, + "grad_norm": 1.5207818775965454, + "learning_rate": 3.953832546611867e-06, + "loss": 0.9436, + "step": 1070 + }, + { + "epoch": 0.09658655363665059, + "grad_norm": 1.6386295354054188, + "learning_rate": 3.953707662692129e-06, + "loss": 1.0355, + "step": 1071 + }, + { + "epoch": 0.09667673716012085, + "grad_norm": 1.4574786360992291, + "learning_rate": 3.95358261207128e-06, + "loss": 0.9329, + "step": 1072 + }, + { + "epoch": 0.0967669206835911, + "grad_norm": 1.3881194441682878, + "learning_rate": 3.953457394759992e-06, + "loss": 0.9951, + "step": 1073 + }, + { + "epoch": 0.09685710420706137, + "grad_norm": 1.4912987903629713, + "learning_rate": 3.953332010768947e-06, + "loss": 1.0198, + "step": 1074 + }, + { + "epoch": 0.09694728773053163, + "grad_norm": 2.190025288858241, + "learning_rate": 3.9532064601088436e-06, + "loss": 0.9758, + "step": 1075 + }, + { + "epoch": 0.09703747125400189, + "grad_norm": 1.7982683170127653, + "learning_rate": 3.953080742790396e-06, + "loss": 1.1497, + "step": 1076 + }, + { + "epoch": 0.09712765477747215, + "grad_norm": 1.1803647049116885, + "learning_rate": 3.95295485882433e-06, + "loss": 0.9152, + "step": 1077 + }, + { + "epoch": 0.09721783830094242, + "grad_norm": 0.7175052894503647, + "learning_rate": 3.952828808221387e-06, + "loss": 0.8494, + "step": 1078 + }, + { + "epoch": 0.09730802182441269, + "grad_norm": 1.8363555432260619, + "learning_rate": 3.9527025909923225e-06, + "loss": 0.9855, + "step": 1079 + }, + { + "epoch": 0.09739820534788295, + "grad_norm": 1.5665958128118447, + "learning_rate": 3.952576207147906e-06, + "loss": 1.0939, + "step": 1080 + }, + { + "epoch": 0.09748838887135321, + "grad_norm": 2.027986810381733, + "learning_rate": 3.95244965669892e-06, + "loss": 1.0383, + "step": 1081 + }, + { + "epoch": 0.09757857239482347, + "grad_norm": 1.4235689741065223, + "learning_rate": 3.952322939656165e-06, + "loss": 0.9549, + "step": 1082 + }, + { + "epoch": 0.09766875591829373, + "grad_norm": 1.4355892253000533, + "learning_rate": 3.952196056030451e-06, + "loss": 0.9964, + "step": 1083 + }, + { + "epoch": 0.09775893944176399, + "grad_norm": 1.7227133397613839, + "learning_rate": 3.952069005832605e-06, + "loss": 0.9717, + "step": 1084 + }, + { + "epoch": 0.09784912296523425, + "grad_norm": 1.4788356016507023, + "learning_rate": 3.951941789073468e-06, + "loss": 1.0366, + "step": 1085 + }, + { + "epoch": 0.09793930648870451, + "grad_norm": 1.4317942454936858, + "learning_rate": 3.9518144057638955e-06, + "loss": 1.0316, + "step": 1086 + }, + { + "epoch": 0.09802949001217477, + "grad_norm": 1.547924186855992, + "learning_rate": 3.951686855914755e-06, + "loss": 0.9967, + "step": 1087 + }, + { + "epoch": 0.09811967353564503, + "grad_norm": 1.928993440050329, + "learning_rate": 3.9515591395369305e-06, + "loss": 1.0321, + "step": 1088 + }, + { + "epoch": 0.0982098570591153, + "grad_norm": 1.5868722200012495, + "learning_rate": 3.95143125664132e-06, + "loss": 0.9856, + "step": 1089 + }, + { + "epoch": 0.09830004058258557, + "grad_norm": 1.5788355823314113, + "learning_rate": 3.951303207238833e-06, + "loss": 1.0301, + "step": 1090 + }, + { + "epoch": 0.09839022410605583, + "grad_norm": 1.4209987749152169, + "learning_rate": 3.951174991340399e-06, + "loss": 0.9752, + "step": 1091 + }, + { + "epoch": 0.09848040762952609, + "grad_norm": 1.5923268565098834, + "learning_rate": 3.9510466089569546e-06, + "loss": 0.9619, + "step": 1092 + }, + { + "epoch": 0.09857059115299635, + "grad_norm": 1.532024265644935, + "learning_rate": 3.950918060099456e-06, + "loss": 0.9686, + "step": 1093 + }, + { + "epoch": 0.09866077467646661, + "grad_norm": 1.3877752013971993, + "learning_rate": 3.950789344778871e-06, + "loss": 1.0601, + "step": 1094 + }, + { + "epoch": 0.09875095819993687, + "grad_norm": 1.5461986874305524, + "learning_rate": 3.950660463006184e-06, + "loss": 0.9485, + "step": 1095 + }, + { + "epoch": 0.09884114172340713, + "grad_norm": 1.297007680333441, + "learning_rate": 3.950531414792389e-06, + "loss": 1.0831, + "step": 1096 + }, + { + "epoch": 0.0989313252468774, + "grad_norm": 1.3949327505302511, + "learning_rate": 3.950402200148498e-06, + "loss": 1.0091, + "step": 1097 + }, + { + "epoch": 0.09902150877034765, + "grad_norm": 1.5809743105630782, + "learning_rate": 3.950272819085538e-06, + "loss": 0.9842, + "step": 1098 + }, + { + "epoch": 0.09911169229381792, + "grad_norm": 2.176917062156437, + "learning_rate": 3.9501432716145474e-06, + "loss": 1.0954, + "step": 1099 + }, + { + "epoch": 0.09920187581728818, + "grad_norm": 1.4655828213671929, + "learning_rate": 3.950013557746579e-06, + "loss": 0.9039, + "step": 1100 + }, + { + "epoch": 0.09929205934075844, + "grad_norm": 1.316084870633775, + "learning_rate": 3.949883677492703e-06, + "loss": 1.0456, + "step": 1101 + }, + { + "epoch": 0.09938224286422871, + "grad_norm": 1.5335634622094716, + "learning_rate": 3.9497536308639994e-06, + "loss": 1.0201, + "step": 1102 + }, + { + "epoch": 0.09947242638769897, + "grad_norm": 1.302522885200232, + "learning_rate": 3.949623417871565e-06, + "loss": 1.0479, + "step": 1103 + }, + { + "epoch": 0.09956260991116923, + "grad_norm": 1.401578178870919, + "learning_rate": 3.949493038526511e-06, + "loss": 0.9895, + "step": 1104 + }, + { + "epoch": 0.0996527934346395, + "grad_norm": 1.3461025846593244, + "learning_rate": 3.949362492839961e-06, + "loss": 0.9607, + "step": 1105 + }, + { + "epoch": 0.09974297695810976, + "grad_norm": 1.4403725822896545, + "learning_rate": 3.949231780823054e-06, + "loss": 0.9639, + "step": 1106 + }, + { + "epoch": 0.09983316048158002, + "grad_norm": 1.6052293436539027, + "learning_rate": 3.949100902486945e-06, + "loss": 0.9484, + "step": 1107 + }, + { + "epoch": 0.09992334400505028, + "grad_norm": 1.4818557241176415, + "learning_rate": 3.948969857842799e-06, + "loss": 0.9738, + "step": 1108 + }, + { + "epoch": 0.10001352752852054, + "grad_norm": 1.3961100470088559, + "learning_rate": 3.948838646901798e-06, + "loss": 0.9886, + "step": 1109 + }, + { + "epoch": 0.1001037110519908, + "grad_norm": 1.594423563123039, + "learning_rate": 3.948707269675138e-06, + "loss": 0.993, + "step": 1110 + }, + { + "epoch": 0.10019389457546106, + "grad_norm": 1.6260529554595509, + "learning_rate": 3.948575726174028e-06, + "loss": 0.989, + "step": 1111 + }, + { + "epoch": 0.10028407809893132, + "grad_norm": 1.8230107819012877, + "learning_rate": 3.9484440164096935e-06, + "loss": 1.0707, + "step": 1112 + }, + { + "epoch": 0.10037426162240158, + "grad_norm": 2.135267695991517, + "learning_rate": 3.948312140393372e-06, + "loss": 1.0206, + "step": 1113 + }, + { + "epoch": 0.10046444514587186, + "grad_norm": 1.724446030827139, + "learning_rate": 3.948180098136316e-06, + "loss": 0.975, + "step": 1114 + }, + { + "epoch": 0.10055462866934212, + "grad_norm": 1.466868311212972, + "learning_rate": 3.948047889649791e-06, + "loss": 0.9722, + "step": 1115 + }, + { + "epoch": 0.10064481219281238, + "grad_norm": 1.5082637921876731, + "learning_rate": 3.947915514945079e-06, + "loss": 0.9819, + "step": 1116 + }, + { + "epoch": 0.10073499571628264, + "grad_norm": 1.477760111831456, + "learning_rate": 3.947782974033474e-06, + "loss": 1.071, + "step": 1117 + }, + { + "epoch": 0.1008251792397529, + "grad_norm": 1.6847126157059906, + "learning_rate": 3.9476502669262866e-06, + "loss": 0.9617, + "step": 1118 + }, + { + "epoch": 0.10091536276322316, + "grad_norm": 1.509203096635009, + "learning_rate": 3.947517393634839e-06, + "loss": 0.9773, + "step": 1119 + }, + { + "epoch": 0.10100554628669342, + "grad_norm": 1.5840054474168608, + "learning_rate": 3.947384354170469e-06, + "loss": 1.0414, + "step": 1120 + }, + { + "epoch": 0.10109572981016368, + "grad_norm": 1.4736605591686616, + "learning_rate": 3.947251148544528e-06, + "loss": 1.0297, + "step": 1121 + }, + { + "epoch": 0.10118591333363394, + "grad_norm": 1.54469122050997, + "learning_rate": 3.947117776768382e-06, + "loss": 1.0361, + "step": 1122 + }, + { + "epoch": 0.1012760968571042, + "grad_norm": 1.2811508140319303, + "learning_rate": 3.9469842388534105e-06, + "loss": 0.9809, + "step": 1123 + }, + { + "epoch": 0.10136628038057446, + "grad_norm": 1.35240698214243, + "learning_rate": 3.946850534811009e-06, + "loss": 1.0125, + "step": 1124 + }, + { + "epoch": 0.10145646390404472, + "grad_norm": 1.4175847481987824, + "learning_rate": 3.946716664652585e-06, + "loss": 0.9855, + "step": 1125 + }, + { + "epoch": 0.101546647427515, + "grad_norm": 2.438952966609594, + "learning_rate": 3.94658262838956e-06, + "loss": 0.9075, + "step": 1126 + }, + { + "epoch": 0.10163683095098526, + "grad_norm": 1.2969050576277226, + "learning_rate": 3.946448426033373e-06, + "loss": 0.9257, + "step": 1127 + }, + { + "epoch": 0.10172701447445552, + "grad_norm": 1.3761339280383484, + "learning_rate": 3.946314057595473e-06, + "loss": 1.0303, + "step": 1128 + }, + { + "epoch": 0.10181719799792578, + "grad_norm": 1.63838253262995, + "learning_rate": 3.946179523087326e-06, + "loss": 1.0591, + "step": 1129 + }, + { + "epoch": 0.10190738152139604, + "grad_norm": 1.4867480132309547, + "learning_rate": 3.9460448225204104e-06, + "loss": 0.9178, + "step": 1130 + }, + { + "epoch": 0.1019975650448663, + "grad_norm": 1.6028164304498032, + "learning_rate": 3.945909955906221e-06, + "loss": 0.997, + "step": 1131 + }, + { + "epoch": 0.10208774856833656, + "grad_norm": 1.7300910623651946, + "learning_rate": 3.945774923256264e-06, + "loss": 1.0028, + "step": 1132 + }, + { + "epoch": 0.10217793209180683, + "grad_norm": 2.5597703863524885, + "learning_rate": 3.945639724582062e-06, + "loss": 1.0752, + "step": 1133 + }, + { + "epoch": 0.10226811561527709, + "grad_norm": 1.5247097083036287, + "learning_rate": 3.94550435989515e-06, + "loss": 0.9576, + "step": 1134 + }, + { + "epoch": 0.10235829913874735, + "grad_norm": 1.351456169399934, + "learning_rate": 3.945368829207079e-06, + "loss": 1.0258, + "step": 1135 + }, + { + "epoch": 0.10244848266221761, + "grad_norm": 1.643391628869212, + "learning_rate": 3.945233132529414e-06, + "loss": 0.9573, + "step": 1136 + }, + { + "epoch": 0.10253866618568787, + "grad_norm": 1.7817625680252456, + "learning_rate": 3.9450972698737304e-06, + "loss": 1.0025, + "step": 1137 + }, + { + "epoch": 0.10262884970915814, + "grad_norm": 1.4365056786907906, + "learning_rate": 3.944961241251623e-06, + "loss": 0.9837, + "step": 1138 + }, + { + "epoch": 0.1027190332326284, + "grad_norm": 1.2618196996216744, + "learning_rate": 3.9448250466746985e-06, + "loss": 0.9698, + "step": 1139 + }, + { + "epoch": 0.10280921675609866, + "grad_norm": 1.6329631827152489, + "learning_rate": 3.944688686154578e-06, + "loss": 1.0293, + "step": 1140 + }, + { + "epoch": 0.10289940027956893, + "grad_norm": 1.4732417149488006, + "learning_rate": 3.944552159702894e-06, + "loss": 0.9634, + "step": 1141 + }, + { + "epoch": 0.10298958380303919, + "grad_norm": 1.5130616366935339, + "learning_rate": 3.944415467331299e-06, + "loss": 0.9897, + "step": 1142 + }, + { + "epoch": 0.10307976732650945, + "grad_norm": 1.3729785885929913, + "learning_rate": 3.944278609051455e-06, + "loss": 0.9521, + "step": 1143 + }, + { + "epoch": 0.10316995084997971, + "grad_norm": 1.492928126414751, + "learning_rate": 3.944141584875039e-06, + "loss": 0.9293, + "step": 1144 + }, + { + "epoch": 0.10326013437344997, + "grad_norm": 1.4864055999334191, + "learning_rate": 3.944004394813743e-06, + "loss": 1.0019, + "step": 1145 + }, + { + "epoch": 0.10335031789692023, + "grad_norm": 1.4436408195662689, + "learning_rate": 3.943867038879273e-06, + "loss": 1.0276, + "step": 1146 + }, + { + "epoch": 0.10344050142039049, + "grad_norm": 1.4664704509454303, + "learning_rate": 3.943729517083349e-06, + "loss": 1.0351, + "step": 1147 + }, + { + "epoch": 0.10353068494386075, + "grad_norm": 1.4955296660669029, + "learning_rate": 3.943591829437705e-06, + "loss": 1.039, + "step": 1148 + }, + { + "epoch": 0.10362086846733101, + "grad_norm": 1.403896821276421, + "learning_rate": 3.9434539759540895e-06, + "loss": 0.9439, + "step": 1149 + }, + { + "epoch": 0.10371105199080129, + "grad_norm": 1.3614559250420906, + "learning_rate": 3.943315956644264e-06, + "loss": 0.936, + "step": 1150 + }, + { + "epoch": 0.10380123551427155, + "grad_norm": 1.602577818053804, + "learning_rate": 3.943177771520006e-06, + "loss": 1.074, + "step": 1151 + }, + { + "epoch": 0.10389141903774181, + "grad_norm": 1.5945043835392791, + "learning_rate": 3.9430394205931065e-06, + "loss": 1.0525, + "step": 1152 + }, + { + "epoch": 0.10398160256121207, + "grad_norm": 1.535033941856654, + "learning_rate": 3.942900903875369e-06, + "loss": 0.8784, + "step": 1153 + }, + { + "epoch": 0.10407178608468233, + "grad_norm": 1.413236851203034, + "learning_rate": 3.942762221378614e-06, + "loss": 1.0087, + "step": 1154 + }, + { + "epoch": 0.10416196960815259, + "grad_norm": 1.3702626113770595, + "learning_rate": 3.942623373114673e-06, + "loss": 1.0257, + "step": 1155 + }, + { + "epoch": 0.10425215313162285, + "grad_norm": 1.4828755886858431, + "learning_rate": 3.942484359095396e-06, + "loss": 0.989, + "step": 1156 + }, + { + "epoch": 0.10434233665509311, + "grad_norm": 1.3565449688351374, + "learning_rate": 3.942345179332642e-06, + "loss": 0.9277, + "step": 1157 + }, + { + "epoch": 0.10443252017856337, + "grad_norm": 1.2222803775120157, + "learning_rate": 3.942205833838287e-06, + "loss": 0.953, + "step": 1158 + }, + { + "epoch": 0.10452270370203363, + "grad_norm": 1.4925348965959766, + "learning_rate": 3.9420663226242204e-06, + "loss": 1.1056, + "step": 1159 + }, + { + "epoch": 0.1046128872255039, + "grad_norm": 1.4125196776243165, + "learning_rate": 3.941926645702348e-06, + "loss": 1.0472, + "step": 1160 + }, + { + "epoch": 0.10470307074897416, + "grad_norm": 1.8709225506597051, + "learning_rate": 3.941786803084586e-06, + "loss": 0.9912, + "step": 1161 + }, + { + "epoch": 0.10479325427244443, + "grad_norm": 2.0238969789303116, + "learning_rate": 3.941646794782867e-06, + "loss": 0.9485, + "step": 1162 + }, + { + "epoch": 0.10488343779591469, + "grad_norm": 1.4673340648046735, + "learning_rate": 3.941506620809137e-06, + "loss": 1.015, + "step": 1163 + }, + { + "epoch": 0.10497362131938495, + "grad_norm": 1.5934239970366568, + "learning_rate": 3.941366281175357e-06, + "loss": 0.9892, + "step": 1164 + }, + { + "epoch": 0.10506380484285521, + "grad_norm": 1.211203908840534, + "learning_rate": 3.941225775893502e-06, + "loss": 0.94, + "step": 1165 + }, + { + "epoch": 0.10515398836632547, + "grad_norm": 1.301963337773218, + "learning_rate": 3.941085104975559e-06, + "loss": 0.9806, + "step": 1166 + }, + { + "epoch": 0.10524417188979573, + "grad_norm": 0.8418055841477772, + "learning_rate": 3.9409442684335325e-06, + "loss": 0.8652, + "step": 1167 + }, + { + "epoch": 0.105334355413266, + "grad_norm": 1.5496246867941523, + "learning_rate": 3.940803266279438e-06, + "loss": 0.9963, + "step": 1168 + }, + { + "epoch": 0.10542453893673626, + "grad_norm": 1.5385002232236207, + "learning_rate": 3.9406620985253076e-06, + "loss": 0.9394, + "step": 1169 + }, + { + "epoch": 0.10551472246020652, + "grad_norm": 1.332988138018727, + "learning_rate": 3.940520765183187e-06, + "loss": 0.9781, + "step": 1170 + }, + { + "epoch": 0.10560490598367678, + "grad_norm": 1.6263268262468844, + "learning_rate": 3.940379266265134e-06, + "loss": 1.0419, + "step": 1171 + }, + { + "epoch": 0.10569508950714704, + "grad_norm": 1.7387317545171421, + "learning_rate": 3.940237601783223e-06, + "loss": 0.9665, + "step": 1172 + }, + { + "epoch": 0.1057852730306173, + "grad_norm": 1.3323151853551967, + "learning_rate": 3.940095771749542e-06, + "loss": 0.9961, + "step": 1173 + }, + { + "epoch": 0.10587545655408757, + "grad_norm": 1.5675080721706636, + "learning_rate": 3.939953776176192e-06, + "loss": 1.0539, + "step": 1174 + }, + { + "epoch": 0.10596564007755783, + "grad_norm": 1.1298501579557698, + "learning_rate": 3.939811615075288e-06, + "loss": 1.0215, + "step": 1175 + }, + { + "epoch": 0.1060558236010281, + "grad_norm": 1.650903962787704, + "learning_rate": 3.9396692884589616e-06, + "loss": 1.067, + "step": 1176 + }, + { + "epoch": 0.10614600712449836, + "grad_norm": 1.690562824227095, + "learning_rate": 3.9395267963393565e-06, + "loss": 0.9778, + "step": 1177 + }, + { + "epoch": 0.10623619064796862, + "grad_norm": 1.4594763318318973, + "learning_rate": 3.939384138728631e-06, + "loss": 1.0349, + "step": 1178 + }, + { + "epoch": 0.10632637417143888, + "grad_norm": 1.4938043752054875, + "learning_rate": 3.939241315638956e-06, + "loss": 0.9912, + "step": 1179 + }, + { + "epoch": 0.10641655769490914, + "grad_norm": 1.3140109765365109, + "learning_rate": 3.93909832708252e-06, + "loss": 0.9861, + "step": 1180 + }, + { + "epoch": 0.1065067412183794, + "grad_norm": 1.242375149738469, + "learning_rate": 3.938955173071523e-06, + "loss": 1.0036, + "step": 1181 + }, + { + "epoch": 0.10659692474184966, + "grad_norm": 0.8040279907933853, + "learning_rate": 3.938811853618179e-06, + "loss": 0.8417, + "step": 1182 + }, + { + "epoch": 0.10668710826531992, + "grad_norm": 1.9731747993804076, + "learning_rate": 3.938668368734717e-06, + "loss": 1.039, + "step": 1183 + }, + { + "epoch": 0.10677729178879018, + "grad_norm": 1.5179925104893817, + "learning_rate": 3.93852471843338e-06, + "loss": 0.8761, + "step": 1184 + }, + { + "epoch": 0.10686747531226044, + "grad_norm": 1.5560547457235696, + "learning_rate": 3.9383809027264254e-06, + "loss": 0.9749, + "step": 1185 + }, + { + "epoch": 0.10695765883573072, + "grad_norm": 1.3660346265763377, + "learning_rate": 3.938236921626124e-06, + "loss": 1.0399, + "step": 1186 + }, + { + "epoch": 0.10704784235920098, + "grad_norm": 1.6488807407742205, + "learning_rate": 3.938092775144761e-06, + "loss": 1.014, + "step": 1187 + }, + { + "epoch": 0.10713802588267124, + "grad_norm": 1.256517774596602, + "learning_rate": 3.9379484632946355e-06, + "loss": 0.9175, + "step": 1188 + }, + { + "epoch": 0.1072282094061415, + "grad_norm": 1.4180055723847522, + "learning_rate": 3.937803986088062e-06, + "loss": 1.0583, + "step": 1189 + }, + { + "epoch": 0.10731839292961176, + "grad_norm": 2.381367930298026, + "learning_rate": 3.937659343537367e-06, + "loss": 0.9544, + "step": 1190 + }, + { + "epoch": 0.10740857645308202, + "grad_norm": 1.5944312640423277, + "learning_rate": 3.937514535654893e-06, + "loss": 1.0689, + "step": 1191 + }, + { + "epoch": 0.10749875997655228, + "grad_norm": 3.6118106963187424, + "learning_rate": 3.937369562452996e-06, + "loss": 1.0106, + "step": 1192 + }, + { + "epoch": 0.10758894350002254, + "grad_norm": 1.5894467469708757, + "learning_rate": 3.937224423944044e-06, + "loss": 1.057, + "step": 1193 + }, + { + "epoch": 0.1076791270234928, + "grad_norm": 1.4897854630061007, + "learning_rate": 3.937079120140423e-06, + "loss": 0.9309, + "step": 1194 + }, + { + "epoch": 0.10776931054696307, + "grad_norm": 1.3637297146806855, + "learning_rate": 3.936933651054531e-06, + "loss": 1.0399, + "step": 1195 + }, + { + "epoch": 0.10785949407043333, + "grad_norm": 1.4069896871897811, + "learning_rate": 3.936788016698779e-06, + "loss": 1.1145, + "step": 1196 + }, + { + "epoch": 0.1079496775939036, + "grad_norm": 1.414896092323441, + "learning_rate": 3.936642217085594e-06, + "loss": 1.0282, + "step": 1197 + }, + { + "epoch": 0.10803986111737386, + "grad_norm": 1.9252356323094428, + "learning_rate": 3.936496252227417e-06, + "loss": 0.9158, + "step": 1198 + }, + { + "epoch": 0.10813004464084412, + "grad_norm": 1.4349645562396949, + "learning_rate": 3.936350122136703e-06, + "loss": 0.9798, + "step": 1199 + }, + { + "epoch": 0.10822022816431438, + "grad_norm": 0.8732623489362934, + "learning_rate": 3.936203826825919e-06, + "loss": 0.8703, + "step": 1200 + }, + { + "epoch": 0.10831041168778464, + "grad_norm": 1.4162713602581432, + "learning_rate": 3.9360573663075475e-06, + "loss": 0.959, + "step": 1201 + }, + { + "epoch": 0.1084005952112549, + "grad_norm": 1.8179996187862253, + "learning_rate": 3.935910740594087e-06, + "loss": 1.0397, + "step": 1202 + }, + { + "epoch": 0.10849077873472517, + "grad_norm": 1.651530974492347, + "learning_rate": 3.935763949698047e-06, + "loss": 1.0029, + "step": 1203 + }, + { + "epoch": 0.10858096225819543, + "grad_norm": 1.5463457068981403, + "learning_rate": 3.935616993631954e-06, + "loss": 1.068, + "step": 1204 + }, + { + "epoch": 0.10867114578166569, + "grad_norm": 1.5616668768889057, + "learning_rate": 3.935469872408345e-06, + "loss": 0.9896, + "step": 1205 + }, + { + "epoch": 0.10876132930513595, + "grad_norm": 1.4683637719927016, + "learning_rate": 3.935322586039776e-06, + "loss": 1.0286, + "step": 1206 + }, + { + "epoch": 0.10885151282860621, + "grad_norm": 1.8696132847553506, + "learning_rate": 3.935175134538811e-06, + "loss": 0.9415, + "step": 1207 + }, + { + "epoch": 0.10894169635207647, + "grad_norm": 2.0734427342064916, + "learning_rate": 3.935027517918034e-06, + "loss": 0.9372, + "step": 1208 + }, + { + "epoch": 0.10903187987554674, + "grad_norm": 2.01736020649729, + "learning_rate": 3.93487973619004e-06, + "loss": 1.0261, + "step": 1209 + }, + { + "epoch": 0.109122063399017, + "grad_norm": 1.722998483445734, + "learning_rate": 3.934731789367438e-06, + "loss": 1.0197, + "step": 1210 + }, + { + "epoch": 0.10921224692248727, + "grad_norm": 0.895508711054194, + "learning_rate": 3.9345836774628505e-06, + "loss": 0.8555, + "step": 1211 + }, + { + "epoch": 0.10930243044595753, + "grad_norm": 1.3934015137738804, + "learning_rate": 3.934435400488917e-06, + "loss": 0.9905, + "step": 1212 + }, + { + "epoch": 0.10939261396942779, + "grad_norm": 1.699942318554831, + "learning_rate": 3.934286958458289e-06, + "loss": 1.0324, + "step": 1213 + }, + { + "epoch": 0.10948279749289805, + "grad_norm": 1.3678370649307707, + "learning_rate": 3.934138351383632e-06, + "loss": 1.0625, + "step": 1214 + }, + { + "epoch": 0.10957298101636831, + "grad_norm": 1.3869129313593385, + "learning_rate": 3.933989579277626e-06, + "loss": 0.9315, + "step": 1215 + }, + { + "epoch": 0.10966316453983857, + "grad_norm": 1.305371773186589, + "learning_rate": 3.933840642152966e-06, + "loss": 0.9958, + "step": 1216 + }, + { + "epoch": 0.10975334806330883, + "grad_norm": 2.0494001115596587, + "learning_rate": 3.933691540022359e-06, + "loss": 1.0336, + "step": 1217 + }, + { + "epoch": 0.10984353158677909, + "grad_norm": 1.4502231787884539, + "learning_rate": 3.933542272898527e-06, + "loss": 0.9663, + "step": 1218 + }, + { + "epoch": 0.10993371511024935, + "grad_norm": 1.0120531508959114, + "learning_rate": 3.933392840794207e-06, + "loss": 0.788, + "step": 1219 + }, + { + "epoch": 0.11002389863371961, + "grad_norm": 1.9980257542184645, + "learning_rate": 3.93324324372215e-06, + "loss": 1.1041, + "step": 1220 + }, + { + "epoch": 0.11011408215718989, + "grad_norm": 1.539009441874729, + "learning_rate": 3.9330934816951185e-06, + "loss": 1.0331, + "step": 1221 + }, + { + "epoch": 0.11020426568066015, + "grad_norm": 1.78703453032311, + "learning_rate": 3.932943554725893e-06, + "loss": 1.0205, + "step": 1222 + }, + { + "epoch": 0.11029444920413041, + "grad_norm": 1.2023718321893637, + "learning_rate": 3.932793462827265e-06, + "loss": 0.8769, + "step": 1223 + }, + { + "epoch": 0.11038463272760067, + "grad_norm": 1.3056521475796803, + "learning_rate": 3.932643206012041e-06, + "loss": 1.0539, + "step": 1224 + }, + { + "epoch": 0.11047481625107093, + "grad_norm": 1.529408963554717, + "learning_rate": 3.932492784293043e-06, + "loss": 0.9267, + "step": 1225 + }, + { + "epoch": 0.11056499977454119, + "grad_norm": 1.4930289726946575, + "learning_rate": 3.932342197683104e-06, + "loss": 0.9974, + "step": 1226 + }, + { + "epoch": 0.11065518329801145, + "grad_norm": 1.5980962692978178, + "learning_rate": 3.932191446195075e-06, + "loss": 0.8737, + "step": 1227 + }, + { + "epoch": 0.11074536682148171, + "grad_norm": 1.701465042728171, + "learning_rate": 3.9320405298418175e-06, + "loss": 0.9985, + "step": 1228 + }, + { + "epoch": 0.11083555034495197, + "grad_norm": 1.6738662982446997, + "learning_rate": 3.9318894486362076e-06, + "loss": 0.9194, + "step": 1229 + }, + { + "epoch": 0.11092573386842224, + "grad_norm": 4.088155636715628, + "learning_rate": 3.9317382025911395e-06, + "loss": 1.0766, + "step": 1230 + }, + { + "epoch": 0.1110159173918925, + "grad_norm": 1.8157049322342937, + "learning_rate": 3.9315867917195145e-06, + "loss": 1.0984, + "step": 1231 + }, + { + "epoch": 0.11110610091536276, + "grad_norm": 1.3259238009674557, + "learning_rate": 3.931435216034256e-06, + "loss": 1.0066, + "step": 1232 + }, + { + "epoch": 0.11119628443883303, + "grad_norm": 1.7323476953978505, + "learning_rate": 3.931283475548293e-06, + "loss": 1.0089, + "step": 1233 + }, + { + "epoch": 0.11128646796230329, + "grad_norm": 1.210058225302709, + "learning_rate": 3.931131570274576e-06, + "loss": 0.9991, + "step": 1234 + }, + { + "epoch": 0.11137665148577355, + "grad_norm": 1.4782771090739917, + "learning_rate": 3.930979500226065e-06, + "loss": 1.026, + "step": 1235 + }, + { + "epoch": 0.11146683500924381, + "grad_norm": 1.5600389553977165, + "learning_rate": 3.930827265415736e-06, + "loss": 0.8427, + "step": 1236 + }, + { + "epoch": 0.11155701853271408, + "grad_norm": 1.54887746032416, + "learning_rate": 3.930674865856578e-06, + "loss": 1.0089, + "step": 1237 + }, + { + "epoch": 0.11164720205618434, + "grad_norm": 1.7463326173433682, + "learning_rate": 3.930522301561595e-06, + "loss": 1.0326, + "step": 1238 + }, + { + "epoch": 0.1117373855796546, + "grad_norm": 1.6529915624173566, + "learning_rate": 3.930369572543804e-06, + "loss": 1.0448, + "step": 1239 + }, + { + "epoch": 0.11182756910312486, + "grad_norm": 1.709992796787907, + "learning_rate": 3.930216678816237e-06, + "loss": 0.939, + "step": 1240 + }, + { + "epoch": 0.11191775262659512, + "grad_norm": 1.3963733975789958, + "learning_rate": 3.930063620391941e-06, + "loss": 0.8313, + "step": 1241 + }, + { + "epoch": 0.11200793615006538, + "grad_norm": 1.7200933581898024, + "learning_rate": 3.9299103972839735e-06, + "loss": 1.0096, + "step": 1242 + }, + { + "epoch": 0.11209811967353564, + "grad_norm": 0.7906338747755284, + "learning_rate": 3.92975700950541e-06, + "loss": 0.804, + "step": 1243 + }, + { + "epoch": 0.1121883031970059, + "grad_norm": 1.6588727230389544, + "learning_rate": 3.929603457069338e-06, + "loss": 1.0316, + "step": 1244 + }, + { + "epoch": 0.11227848672047618, + "grad_norm": 1.4450262662618862, + "learning_rate": 3.929449739988859e-06, + "loss": 0.975, + "step": 1245 + }, + { + "epoch": 0.11236867024394644, + "grad_norm": 1.5248939789986318, + "learning_rate": 3.929295858277089e-06, + "loss": 0.9992, + "step": 1246 + }, + { + "epoch": 0.1124588537674167, + "grad_norm": 1.91292609909658, + "learning_rate": 3.9291418119471585e-06, + "loss": 0.9947, + "step": 1247 + }, + { + "epoch": 0.11254903729088696, + "grad_norm": 1.6160579551369971, + "learning_rate": 3.928987601012212e-06, + "loss": 1.0306, + "step": 1248 + }, + { + "epoch": 0.11263922081435722, + "grad_norm": 1.5797056081999872, + "learning_rate": 3.928833225485407e-06, + "loss": 0.9846, + "step": 1249 + }, + { + "epoch": 0.11272940433782748, + "grad_norm": 1.483969783686121, + "learning_rate": 3.928678685379915e-06, + "loss": 0.9913, + "step": 1250 + }, + { + "epoch": 0.11281958786129774, + "grad_norm": 1.4689635263619094, + "learning_rate": 3.928523980708924e-06, + "loss": 1.0471, + "step": 1251 + }, + { + "epoch": 0.112909771384768, + "grad_norm": 1.758812920611363, + "learning_rate": 3.928369111485632e-06, + "loss": 0.9633, + "step": 1252 + }, + { + "epoch": 0.11299995490823826, + "grad_norm": 1.5060813334515475, + "learning_rate": 3.928214077723255e-06, + "loss": 0.9918, + "step": 1253 + }, + { + "epoch": 0.11309013843170852, + "grad_norm": 1.903139735072427, + "learning_rate": 3.928058879435021e-06, + "loss": 0.9853, + "step": 1254 + }, + { + "epoch": 0.11318032195517878, + "grad_norm": 1.720380391263313, + "learning_rate": 3.9279035166341725e-06, + "loss": 0.9795, + "step": 1255 + }, + { + "epoch": 0.11327050547864904, + "grad_norm": 1.6220129876798608, + "learning_rate": 3.927747989333965e-06, + "loss": 0.9812, + "step": 1256 + }, + { + "epoch": 0.11336068900211932, + "grad_norm": 1.6843274989180252, + "learning_rate": 3.927592297547669e-06, + "loss": 0.9701, + "step": 1257 + }, + { + "epoch": 0.11345087252558958, + "grad_norm": 1.9307525413819495, + "learning_rate": 3.927436441288571e-06, + "loss": 1.0203, + "step": 1258 + }, + { + "epoch": 0.11354105604905984, + "grad_norm": 1.4984442272174494, + "learning_rate": 3.927280420569968e-06, + "loss": 1.0237, + "step": 1259 + }, + { + "epoch": 0.1136312395725301, + "grad_norm": 2.962514331638099, + "learning_rate": 3.927124235405171e-06, + "loss": 1.0324, + "step": 1260 + }, + { + "epoch": 0.11372142309600036, + "grad_norm": 1.9689484753548772, + "learning_rate": 3.92696788580751e-06, + "loss": 0.857, + "step": 1261 + }, + { + "epoch": 0.11381160661947062, + "grad_norm": 1.4721545267835745, + "learning_rate": 3.9268113717903225e-06, + "loss": 0.8335, + "step": 1262 + }, + { + "epoch": 0.11390179014294088, + "grad_norm": 1.5781932098453373, + "learning_rate": 3.926654693366965e-06, + "loss": 1.0608, + "step": 1263 + }, + { + "epoch": 0.11399197366641114, + "grad_norm": 1.7175338604072723, + "learning_rate": 3.926497850550805e-06, + "loss": 1.0076, + "step": 1264 + }, + { + "epoch": 0.1140821571898814, + "grad_norm": 1.7199607745954122, + "learning_rate": 3.926340843355226e-06, + "loss": 1.0007, + "step": 1265 + }, + { + "epoch": 0.11417234071335167, + "grad_norm": 1.5372797521372539, + "learning_rate": 3.926183671793625e-06, + "loss": 1.0199, + "step": 1266 + }, + { + "epoch": 0.11426252423682193, + "grad_norm": 1.474552299961261, + "learning_rate": 3.926026335879412e-06, + "loss": 0.9721, + "step": 1267 + }, + { + "epoch": 0.11435270776029219, + "grad_norm": 0.7858781674629017, + "learning_rate": 3.925868835626012e-06, + "loss": 0.7612, + "step": 1268 + }, + { + "epoch": 0.11444289128376246, + "grad_norm": 1.7462440784438595, + "learning_rate": 3.925711171046864e-06, + "loss": 1.0116, + "step": 1269 + }, + { + "epoch": 0.11453307480723272, + "grad_norm": 1.3025709332793993, + "learning_rate": 3.925553342155421e-06, + "loss": 0.9567, + "step": 1270 + }, + { + "epoch": 0.11462325833070298, + "grad_norm": 0.8060244222495342, + "learning_rate": 3.9253953489651485e-06, + "loss": 0.867, + "step": 1271 + }, + { + "epoch": 0.11471344185417325, + "grad_norm": 0.7688458096106404, + "learning_rate": 3.925237191489529e-06, + "loss": 0.8187, + "step": 1272 + }, + { + "epoch": 0.1148036253776435, + "grad_norm": 1.4088307753035432, + "learning_rate": 3.925078869742056e-06, + "loss": 0.9677, + "step": 1273 + }, + { + "epoch": 0.11489380890111377, + "grad_norm": 1.3802106269481482, + "learning_rate": 3.92492038373624e-06, + "loss": 0.9375, + "step": 1274 + }, + { + "epoch": 0.11498399242458403, + "grad_norm": 1.433932895718352, + "learning_rate": 3.924761733485602e-06, + "loss": 1.0484, + "step": 1275 + }, + { + "epoch": 0.11507417594805429, + "grad_norm": 1.5930218996888221, + "learning_rate": 3.92460291900368e-06, + "loss": 0.9936, + "step": 1276 + }, + { + "epoch": 0.11516435947152455, + "grad_norm": 1.466151841073971, + "learning_rate": 3.924443940304025e-06, + "loss": 1.0188, + "step": 1277 + }, + { + "epoch": 0.11525454299499481, + "grad_norm": 1.9470972047629087, + "learning_rate": 3.924284797400202e-06, + "loss": 1.0409, + "step": 1278 + }, + { + "epoch": 0.11534472651846507, + "grad_norm": 1.702330727809697, + "learning_rate": 3.924125490305789e-06, + "loss": 1.0215, + "step": 1279 + }, + { + "epoch": 0.11543491004193533, + "grad_norm": 1.3858499733726228, + "learning_rate": 3.923966019034381e-06, + "loss": 0.9772, + "step": 1280 + }, + { + "epoch": 0.1155250935654056, + "grad_norm": 1.4689377198075697, + "learning_rate": 3.923806383599583e-06, + "loss": 1.0083, + "step": 1281 + }, + { + "epoch": 0.11561527708887587, + "grad_norm": 1.3682570279521333, + "learning_rate": 3.923646584015017e-06, + "loss": 0.9668, + "step": 1282 + }, + { + "epoch": 0.11570546061234613, + "grad_norm": 1.822152382622325, + "learning_rate": 3.923486620294316e-06, + "loss": 0.9915, + "step": 1283 + }, + { + "epoch": 0.11579564413581639, + "grad_norm": 1.568883444896999, + "learning_rate": 3.923326492451132e-06, + "loss": 0.9988, + "step": 1284 + }, + { + "epoch": 0.11588582765928665, + "grad_norm": 1.424720432229712, + "learning_rate": 3.923166200499125e-06, + "loss": 0.9738, + "step": 1285 + }, + { + "epoch": 0.11597601118275691, + "grad_norm": 1.2789718098460416, + "learning_rate": 3.923005744451975e-06, + "loss": 0.9672, + "step": 1286 + }, + { + "epoch": 0.11606619470622717, + "grad_norm": 2.1620367050019373, + "learning_rate": 3.9228451243233715e-06, + "loss": 0.9462, + "step": 1287 + }, + { + "epoch": 0.11615637822969743, + "grad_norm": 1.4392849993509589, + "learning_rate": 3.9226843401270195e-06, + "loss": 1.0297, + "step": 1288 + }, + { + "epoch": 0.1162465617531677, + "grad_norm": 1.4096108858755516, + "learning_rate": 3.9225233918766376e-06, + "loss": 0.9967, + "step": 1289 + }, + { + "epoch": 0.11633674527663795, + "grad_norm": 1.3038430136311048, + "learning_rate": 3.92236227958596e-06, + "loss": 1.0075, + "step": 1290 + }, + { + "epoch": 0.11642692880010821, + "grad_norm": 2.0750394013122477, + "learning_rate": 3.922201003268731e-06, + "loss": 1.0154, + "step": 1291 + }, + { + "epoch": 0.11651711232357848, + "grad_norm": 0.8738507148097603, + "learning_rate": 3.922039562938715e-06, + "loss": 0.8516, + "step": 1292 + }, + { + "epoch": 0.11660729584704875, + "grad_norm": 1.3678853461058962, + "learning_rate": 3.921877958609685e-06, + "loss": 1.0505, + "step": 1293 + }, + { + "epoch": 0.11669747937051901, + "grad_norm": 0.9229274496534999, + "learning_rate": 3.921716190295431e-06, + "loss": 0.9517, + "step": 1294 + }, + { + "epoch": 0.11678766289398927, + "grad_norm": 1.5806751608591516, + "learning_rate": 3.921554258009755e-06, + "loss": 0.9555, + "step": 1295 + }, + { + "epoch": 0.11687784641745953, + "grad_norm": 1.3614455491243822, + "learning_rate": 3.921392161766474e-06, + "loss": 0.9789, + "step": 1296 + }, + { + "epoch": 0.1169680299409298, + "grad_norm": 2.1338169960271145, + "learning_rate": 3.92122990157942e-06, + "loss": 0.9691, + "step": 1297 + }, + { + "epoch": 0.11705821346440005, + "grad_norm": 1.6536858341054816, + "learning_rate": 3.921067477462437e-06, + "loss": 1.0379, + "step": 1298 + }, + { + "epoch": 0.11714839698787032, + "grad_norm": 1.772358435827479, + "learning_rate": 3.920904889429385e-06, + "loss": 1.0538, + "step": 1299 + }, + { + "epoch": 0.11723858051134058, + "grad_norm": 1.6399756463498865, + "learning_rate": 3.920742137494135e-06, + "loss": 1.0349, + "step": 1300 + }, + { + "epoch": 0.11732876403481084, + "grad_norm": 2.2330674834041653, + "learning_rate": 3.920579221670575e-06, + "loss": 1.0585, + "step": 1301 + }, + { + "epoch": 0.1174189475582811, + "grad_norm": 1.6315347152650073, + "learning_rate": 3.920416141972606e-06, + "loss": 1.0006, + "step": 1302 + }, + { + "epoch": 0.11750913108175136, + "grad_norm": 1.3128663414636297, + "learning_rate": 3.920252898414143e-06, + "loss": 0.9954, + "step": 1303 + }, + { + "epoch": 0.11759931460522162, + "grad_norm": 1.7934361598635804, + "learning_rate": 3.920089491009114e-06, + "loss": 1.0063, + "step": 1304 + }, + { + "epoch": 0.1176894981286919, + "grad_norm": 1.5102705610795617, + "learning_rate": 3.919925919771463e-06, + "loss": 1.0675, + "step": 1305 + }, + { + "epoch": 0.11777968165216215, + "grad_norm": 1.8925846757175073, + "learning_rate": 3.919762184715146e-06, + "loss": 0.915, + "step": 1306 + }, + { + "epoch": 0.11786986517563242, + "grad_norm": 1.6021743722090998, + "learning_rate": 3.919598285854134e-06, + "loss": 0.9994, + "step": 1307 + }, + { + "epoch": 0.11796004869910268, + "grad_norm": 1.6463693236928791, + "learning_rate": 3.919434223202411e-06, + "loss": 1.0559, + "step": 1308 + }, + { + "epoch": 0.11805023222257294, + "grad_norm": 1.4596917864361505, + "learning_rate": 3.919269996773977e-06, + "loss": 1.0496, + "step": 1309 + }, + { + "epoch": 0.1181404157460432, + "grad_norm": 1.2947350736952015, + "learning_rate": 3.919105606582844e-06, + "loss": 1.0063, + "step": 1310 + }, + { + "epoch": 0.11823059926951346, + "grad_norm": 1.6332399821690224, + "learning_rate": 3.918941052643039e-06, + "loss": 0.972, + "step": 1311 + }, + { + "epoch": 0.11832078279298372, + "grad_norm": 1.7991907870413406, + "learning_rate": 3.918776334968602e-06, + "loss": 1.0086, + "step": 1312 + }, + { + "epoch": 0.11841096631645398, + "grad_norm": 2.427043887187554, + "learning_rate": 3.918611453573589e-06, + "loss": 1.1042, + "step": 1313 + }, + { + "epoch": 0.11850114983992424, + "grad_norm": 1.7665288181626615, + "learning_rate": 3.918446408472066e-06, + "loss": 0.9321, + "step": 1314 + }, + { + "epoch": 0.1185913333633945, + "grad_norm": 1.473617685087407, + "learning_rate": 3.918281199678119e-06, + "loss": 1.0382, + "step": 1315 + }, + { + "epoch": 0.11868151688686476, + "grad_norm": 0.7943662909173658, + "learning_rate": 3.9181158272058414e-06, + "loss": 0.82, + "step": 1316 + }, + { + "epoch": 0.11877170041033504, + "grad_norm": 1.4879909442502997, + "learning_rate": 3.9179502910693455e-06, + "loss": 1.0273, + "step": 1317 + }, + { + "epoch": 0.1188618839338053, + "grad_norm": 1.3581214035414901, + "learning_rate": 3.917784591282756e-06, + "loss": 0.9564, + "step": 1318 + }, + { + "epoch": 0.11895206745727556, + "grad_norm": 1.3274793514075431, + "learning_rate": 3.9176187278602105e-06, + "loss": 1.0271, + "step": 1319 + }, + { + "epoch": 0.11904225098074582, + "grad_norm": 3.1336705276643473, + "learning_rate": 3.9174527008158606e-06, + "loss": 0.9417, + "step": 1320 + }, + { + "epoch": 0.11913243450421608, + "grad_norm": 1.470535005279272, + "learning_rate": 3.917286510163874e-06, + "loss": 0.9215, + "step": 1321 + }, + { + "epoch": 0.11922261802768634, + "grad_norm": 1.1170766281918907, + "learning_rate": 3.917120155918431e-06, + "loss": 0.9767, + "step": 1322 + }, + { + "epoch": 0.1193128015511566, + "grad_norm": 0.8102690071058763, + "learning_rate": 3.916953638093725e-06, + "loss": 0.8748, + "step": 1323 + }, + { + "epoch": 0.11940298507462686, + "grad_norm": 1.501131663998632, + "learning_rate": 3.916786956703964e-06, + "loss": 0.991, + "step": 1324 + }, + { + "epoch": 0.11949316859809712, + "grad_norm": 1.8059971008657714, + "learning_rate": 3.916620111763372e-06, + "loss": 0.9875, + "step": 1325 + }, + { + "epoch": 0.11958335212156739, + "grad_norm": 1.4840202309136854, + "learning_rate": 3.916453103286183e-06, + "loss": 1.0498, + "step": 1326 + }, + { + "epoch": 0.11967353564503765, + "grad_norm": 1.8602272452721291, + "learning_rate": 3.916285931286648e-06, + "loss": 1.0418, + "step": 1327 + }, + { + "epoch": 0.11976371916850792, + "grad_norm": 1.522853719903764, + "learning_rate": 3.916118595779031e-06, + "loss": 0.9436, + "step": 1328 + }, + { + "epoch": 0.11985390269197818, + "grad_norm": 1.58698534996131, + "learning_rate": 3.915951096777611e-06, + "loss": 0.9678, + "step": 1329 + }, + { + "epoch": 0.11994408621544844, + "grad_norm": 1.3215546492593502, + "learning_rate": 3.915783434296678e-06, + "loss": 0.9389, + "step": 1330 + }, + { + "epoch": 0.1200342697389187, + "grad_norm": 1.7595396246040491, + "learning_rate": 3.91561560835054e-06, + "loss": 0.9766, + "step": 1331 + }, + { + "epoch": 0.12012445326238896, + "grad_norm": 1.404376031716498, + "learning_rate": 3.915447618953515e-06, + "loss": 1.0157, + "step": 1332 + }, + { + "epoch": 0.12021463678585922, + "grad_norm": 1.5954952034491128, + "learning_rate": 3.915279466119937e-06, + "loss": 0.9498, + "step": 1333 + }, + { + "epoch": 0.12030482030932949, + "grad_norm": 0.94287061962904, + "learning_rate": 3.9151111498641546e-06, + "loss": 0.842, + "step": 1334 + }, + { + "epoch": 0.12039500383279975, + "grad_norm": 1.8022796950183169, + "learning_rate": 3.914942670200529e-06, + "loss": 0.8736, + "step": 1335 + }, + { + "epoch": 0.12048518735627001, + "grad_norm": 1.7613007923001203, + "learning_rate": 3.914774027143436e-06, + "loss": 1.0433, + "step": 1336 + }, + { + "epoch": 0.12057537087974027, + "grad_norm": 3.638868309202232, + "learning_rate": 3.914605220707265e-06, + "loss": 0.9831, + "step": 1337 + }, + { + "epoch": 0.12066555440321053, + "grad_norm": 1.638024003147602, + "learning_rate": 3.9144362509064194e-06, + "loss": 1.147, + "step": 1338 + }, + { + "epoch": 0.12075573792668079, + "grad_norm": 1.7698978271736572, + "learning_rate": 3.914267117755317e-06, + "loss": 1.065, + "step": 1339 + }, + { + "epoch": 0.12084592145015106, + "grad_norm": 1.4213790867140945, + "learning_rate": 3.914097821268389e-06, + "loss": 0.9585, + "step": 1340 + }, + { + "epoch": 0.12093610497362133, + "grad_norm": 1.2899177257412655, + "learning_rate": 3.913928361460081e-06, + "loss": 1.0088, + "step": 1341 + }, + { + "epoch": 0.12102628849709159, + "grad_norm": 1.3988068162221319, + "learning_rate": 3.913758738344851e-06, + "loss": 0.9817, + "step": 1342 + }, + { + "epoch": 0.12111647202056185, + "grad_norm": 0.8639067373207113, + "learning_rate": 3.913588951937174e-06, + "loss": 0.8329, + "step": 1343 + }, + { + "epoch": 0.12120665554403211, + "grad_norm": 0.7577051696277768, + "learning_rate": 3.9134190022515355e-06, + "loss": 0.8292, + "step": 1344 + }, + { + "epoch": 0.12129683906750237, + "grad_norm": 1.6608442249824613, + "learning_rate": 3.913248889302438e-06, + "loss": 0.959, + "step": 1345 + }, + { + "epoch": 0.12138702259097263, + "grad_norm": 1.696371705672478, + "learning_rate": 3.913078613104395e-06, + "loss": 0.9321, + "step": 1346 + }, + { + "epoch": 0.12147720611444289, + "grad_norm": 1.6174740744728482, + "learning_rate": 3.912908173671936e-06, + "loss": 0.8786, + "step": 1347 + }, + { + "epoch": 0.12156738963791315, + "grad_norm": 1.668546697112291, + "learning_rate": 3.9127375710196044e-06, + "loss": 0.9299, + "step": 1348 + }, + { + "epoch": 0.12165757316138341, + "grad_norm": 2.087644871236111, + "learning_rate": 3.912566805161957e-06, + "loss": 0.9414, + "step": 1349 + }, + { + "epoch": 0.12174775668485367, + "grad_norm": 1.6608384828667913, + "learning_rate": 3.912395876113564e-06, + "loss": 1.0157, + "step": 1350 + }, + { + "epoch": 0.12183794020832393, + "grad_norm": 1.5137340114514592, + "learning_rate": 3.912224783889009e-06, + "loss": 1.004, + "step": 1351 + }, + { + "epoch": 0.12192812373179421, + "grad_norm": 1.5252621409757234, + "learning_rate": 3.912053528502892e-06, + "loss": 0.9975, + "step": 1352 + }, + { + "epoch": 0.12201830725526447, + "grad_norm": 1.3681790924880408, + "learning_rate": 3.911882109969825e-06, + "loss": 0.9168, + "step": 1353 + }, + { + "epoch": 0.12210849077873473, + "grad_norm": 1.6562367564697522, + "learning_rate": 3.911710528304435e-06, + "loss": 0.998, + "step": 1354 + }, + { + "epoch": 0.12219867430220499, + "grad_norm": 1.9334324624967205, + "learning_rate": 3.911538783521361e-06, + "loss": 0.9454, + "step": 1355 + }, + { + "epoch": 0.12228885782567525, + "grad_norm": 1.7024513802328938, + "learning_rate": 3.9113668756352575e-06, + "loss": 1.0103, + "step": 1356 + }, + { + "epoch": 0.12237904134914551, + "grad_norm": 1.2927011172543792, + "learning_rate": 3.911194804660793e-06, + "loss": 0.9242, + "step": 1357 + }, + { + "epoch": 0.12246922487261577, + "grad_norm": 1.4775700436689132, + "learning_rate": 3.91102257061265e-06, + "loss": 1.075, + "step": 1358 + }, + { + "epoch": 0.12255940839608603, + "grad_norm": 1.323157411231502, + "learning_rate": 3.910850173505524e-06, + "loss": 1.0352, + "step": 1359 + }, + { + "epoch": 0.1226495919195563, + "grad_norm": 1.5131176530737644, + "learning_rate": 3.9106776133541255e-06, + "loss": 1.0732, + "step": 1360 + }, + { + "epoch": 0.12273977544302656, + "grad_norm": 1.6073329525497113, + "learning_rate": 3.9105048901731766e-06, + "loss": 0.9543, + "step": 1361 + }, + { + "epoch": 0.12282995896649682, + "grad_norm": 1.5454200298361525, + "learning_rate": 3.9103320039774165e-06, + "loss": 0.9744, + "step": 1362 + }, + { + "epoch": 0.12292014248996708, + "grad_norm": 1.4037588731004653, + "learning_rate": 3.9101589547815965e-06, + "loss": 1.0558, + "step": 1363 + }, + { + "epoch": 0.12301032601343735, + "grad_norm": 1.2193290117547686, + "learning_rate": 3.909985742600482e-06, + "loss": 0.981, + "step": 1364 + }, + { + "epoch": 0.12310050953690761, + "grad_norm": 1.4588837901708507, + "learning_rate": 3.909812367448852e-06, + "loss": 0.9591, + "step": 1365 + }, + { + "epoch": 0.12319069306037787, + "grad_norm": 2.0059084640186673, + "learning_rate": 3.909638829341501e-06, + "loss": 0.9743, + "step": 1366 + }, + { + "epoch": 0.12328087658384813, + "grad_norm": 1.448585721593291, + "learning_rate": 3.909465128293234e-06, + "loss": 1.0511, + "step": 1367 + }, + { + "epoch": 0.1233710601073184, + "grad_norm": 1.5617561095417294, + "learning_rate": 3.9092912643188745e-06, + "loss": 1.0542, + "step": 1368 + }, + { + "epoch": 0.12346124363078866, + "grad_norm": 1.6268941403764072, + "learning_rate": 3.909117237433256e-06, + "loss": 0.9855, + "step": 1369 + }, + { + "epoch": 0.12355142715425892, + "grad_norm": 1.7383957042946474, + "learning_rate": 3.908943047651229e-06, + "loss": 1.0384, + "step": 1370 + }, + { + "epoch": 0.12364161067772918, + "grad_norm": 1.5227964960044424, + "learning_rate": 3.908768694987655e-06, + "loss": 0.9756, + "step": 1371 + }, + { + "epoch": 0.12373179420119944, + "grad_norm": 2.2313825936549647, + "learning_rate": 3.908594179457411e-06, + "loss": 0.9143, + "step": 1372 + }, + { + "epoch": 0.1238219777246697, + "grad_norm": 1.7504807901687691, + "learning_rate": 3.908419501075388e-06, + "loss": 0.993, + "step": 1373 + }, + { + "epoch": 0.12391216124813996, + "grad_norm": 1.4240299704483854, + "learning_rate": 3.90824465985649e-06, + "loss": 1.0706, + "step": 1374 + }, + { + "epoch": 0.12400234477161022, + "grad_norm": 1.6360183710405212, + "learning_rate": 3.908069655815636e-06, + "loss": 1.0352, + "step": 1375 + }, + { + "epoch": 0.1240925282950805, + "grad_norm": 1.883393194153135, + "learning_rate": 3.907894488967758e-06, + "loss": 0.9351, + "step": 1376 + }, + { + "epoch": 0.12418271181855076, + "grad_norm": 1.6056221473344798, + "learning_rate": 3.9077191593278005e-06, + "loss": 1.0571, + "step": 1377 + }, + { + "epoch": 0.12427289534202102, + "grad_norm": 1.007830715754105, + "learning_rate": 3.9075436669107265e-06, + "loss": 0.8395, + "step": 1378 + }, + { + "epoch": 0.12436307886549128, + "grad_norm": 1.7285690040751527, + "learning_rate": 3.90736801173151e-06, + "loss": 1.0084, + "step": 1379 + }, + { + "epoch": 0.12445326238896154, + "grad_norm": 1.5167082228923539, + "learning_rate": 3.907192193805136e-06, + "loss": 1.0103, + "step": 1380 + }, + { + "epoch": 0.1245434459124318, + "grad_norm": 1.869303952279155, + "learning_rate": 3.907016213146608e-06, + "loss": 1.0437, + "step": 1381 + }, + { + "epoch": 0.12463362943590206, + "grad_norm": 1.4770522339868888, + "learning_rate": 3.906840069770942e-06, + "loss": 1.109, + "step": 1382 + }, + { + "epoch": 0.12472381295937232, + "grad_norm": 1.266371000859516, + "learning_rate": 3.906663763693167e-06, + "loss": 1.0273, + "step": 1383 + }, + { + "epoch": 0.12481399648284258, + "grad_norm": 1.532315097874319, + "learning_rate": 3.906487294928327e-06, + "loss": 0.9724, + "step": 1384 + }, + { + "epoch": 0.12490418000631284, + "grad_norm": 1.6990400549223643, + "learning_rate": 3.906310663491478e-06, + "loss": 0.9954, + "step": 1385 + }, + { + "epoch": 0.1249943635297831, + "grad_norm": 1.5455831662882378, + "learning_rate": 3.906133869397692e-06, + "loss": 1.0519, + "step": 1386 + }, + { + "epoch": 0.12508454705325336, + "grad_norm": 1.5198709064174076, + "learning_rate": 3.905956912662054e-06, + "loss": 0.9525, + "step": 1387 + }, + { + "epoch": 0.12517473057672363, + "grad_norm": 1.955581645930007, + "learning_rate": 3.905779793299662e-06, + "loss": 1.0594, + "step": 1388 + }, + { + "epoch": 0.12526491410019389, + "grad_norm": 1.536099520235823, + "learning_rate": 3.905602511325631e-06, + "loss": 1.0305, + "step": 1389 + }, + { + "epoch": 0.12535509762366415, + "grad_norm": 1.6318593140530884, + "learning_rate": 3.905425066755086e-06, + "loss": 0.9577, + "step": 1390 + }, + { + "epoch": 0.1254452811471344, + "grad_norm": 1.258873107329957, + "learning_rate": 3.905247459603168e-06, + "loss": 0.9548, + "step": 1391 + }, + { + "epoch": 0.12553546467060467, + "grad_norm": 1.4791444678275047, + "learning_rate": 3.905069689885031e-06, + "loss": 0.8763, + "step": 1392 + }, + { + "epoch": 0.12562564819407493, + "grad_norm": 1.326338879683841, + "learning_rate": 3.904891757615843e-06, + "loss": 0.8768, + "step": 1393 + }, + { + "epoch": 0.12571583171754522, + "grad_norm": 4.066282653396044, + "learning_rate": 3.9047136628107874e-06, + "loss": 0.791, + "step": 1394 + }, + { + "epoch": 0.12580601524101548, + "grad_norm": 1.3939584362368456, + "learning_rate": 3.904535405485059e-06, + "loss": 1.1232, + "step": 1395 + }, + { + "epoch": 0.12589619876448574, + "grad_norm": 1.5615777918151041, + "learning_rate": 3.90435698565387e-06, + "loss": 0.95, + "step": 1396 + }, + { + "epoch": 0.125986382287956, + "grad_norm": 1.5043171903297867, + "learning_rate": 3.904178403332441e-06, + "loss": 0.965, + "step": 1397 + }, + { + "epoch": 0.12607656581142626, + "grad_norm": 1.706784507674181, + "learning_rate": 3.903999658536012e-06, + "loss": 1.072, + "step": 1398 + }, + { + "epoch": 0.12616674933489652, + "grad_norm": 1.4078839875592883, + "learning_rate": 3.903820751279833e-06, + "loss": 0.9939, + "step": 1399 + }, + { + "epoch": 0.12625693285836678, + "grad_norm": 1.7493844312036784, + "learning_rate": 3.90364168157917e-06, + "loss": 1.0251, + "step": 1400 + }, + { + "epoch": 0.12634711638183704, + "grad_norm": 1.6341642050535707, + "learning_rate": 3.903462449449302e-06, + "loss": 1.0253, + "step": 1401 + }, + { + "epoch": 0.1264372999053073, + "grad_norm": 1.475883206530533, + "learning_rate": 3.903283054905522e-06, + "loss": 0.9723, + "step": 1402 + }, + { + "epoch": 0.12652748342877757, + "grad_norm": 1.836426625128595, + "learning_rate": 3.9031034979631385e-06, + "loss": 0.9115, + "step": 1403 + }, + { + "epoch": 0.12661766695224783, + "grad_norm": 1.3801059852895425, + "learning_rate": 3.902923778637469e-06, + "loss": 1.0279, + "step": 1404 + }, + { + "epoch": 0.1267078504757181, + "grad_norm": 1.3655229735507421, + "learning_rate": 3.902743896943852e-06, + "loss": 0.9315, + "step": 1405 + }, + { + "epoch": 0.12679803399918835, + "grad_norm": 0.870203666799312, + "learning_rate": 3.902563852897633e-06, + "loss": 0.872, + "step": 1406 + }, + { + "epoch": 0.1268882175226586, + "grad_norm": 0.9617812011671142, + "learning_rate": 3.9023836465141755e-06, + "loss": 0.8413, + "step": 1407 + }, + { + "epoch": 0.12697840104612887, + "grad_norm": 1.6258318312556714, + "learning_rate": 3.902203277808856e-06, + "loss": 1.0342, + "step": 1408 + }, + { + "epoch": 0.12706858456959913, + "grad_norm": 1.5831048783907202, + "learning_rate": 3.902022746797064e-06, + "loss": 1.0501, + "step": 1409 + }, + { + "epoch": 0.1271587680930694, + "grad_norm": 1.728313541991838, + "learning_rate": 3.9018420534942035e-06, + "loss": 1.1079, + "step": 1410 + }, + { + "epoch": 0.12724895161653965, + "grad_norm": 1.8221027918899793, + "learning_rate": 3.9016611979156935e-06, + "loss": 0.9862, + "step": 1411 + }, + { + "epoch": 0.1273391351400099, + "grad_norm": 1.9281244163574127, + "learning_rate": 3.9014801800769635e-06, + "loss": 0.9065, + "step": 1412 + }, + { + "epoch": 0.12742931866348017, + "grad_norm": 1.5212160003548865, + "learning_rate": 3.901298999993459e-06, + "loss": 1.0211, + "step": 1413 + }, + { + "epoch": 0.12751950218695043, + "grad_norm": 1.6248075664727244, + "learning_rate": 3.901117657680642e-06, + "loss": 0.9277, + "step": 1414 + }, + { + "epoch": 0.1276096857104207, + "grad_norm": 1.3822996406162282, + "learning_rate": 3.900936153153982e-06, + "loss": 0.9237, + "step": 1415 + }, + { + "epoch": 0.12769986923389096, + "grad_norm": 0.7938054373297587, + "learning_rate": 3.900754486428968e-06, + "loss": 0.7911, + "step": 1416 + }, + { + "epoch": 0.12779005275736122, + "grad_norm": 1.5130858240488212, + "learning_rate": 3.900572657521102e-06, + "loss": 1.0836, + "step": 1417 + }, + { + "epoch": 0.1278802362808315, + "grad_norm": 1.2844214382269037, + "learning_rate": 3.900390666445896e-06, + "loss": 0.9745, + "step": 1418 + }, + { + "epoch": 0.12797041980430177, + "grad_norm": 1.5540763382148863, + "learning_rate": 3.9002085132188795e-06, + "loss": 0.9589, + "step": 1419 + }, + { + "epoch": 0.12806060332777203, + "grad_norm": 3.4423679908812126, + "learning_rate": 3.9000261978555964e-06, + "loss": 1.0351, + "step": 1420 + }, + { + "epoch": 0.1281507868512423, + "grad_norm": 1.5608324689535154, + "learning_rate": 3.8998437203716e-06, + "loss": 1.0581, + "step": 1421 + }, + { + "epoch": 0.12824097037471255, + "grad_norm": 1.5608713435758532, + "learning_rate": 3.899661080782462e-06, + "loss": 1.0189, + "step": 1422 + }, + { + "epoch": 0.1283311538981828, + "grad_norm": 1.5284061103113626, + "learning_rate": 3.899478279103767e-06, + "loss": 1.0241, + "step": 1423 + }, + { + "epoch": 0.12842133742165307, + "grad_norm": 1.7580368237940631, + "learning_rate": 3.8992953153511105e-06, + "loss": 1.0243, + "step": 1424 + }, + { + "epoch": 0.12851152094512333, + "grad_norm": 1.3606259958511515, + "learning_rate": 3.899112189540106e-06, + "loss": 1.0526, + "step": 1425 + }, + { + "epoch": 0.1286017044685936, + "grad_norm": 1.7805329602904474, + "learning_rate": 3.898928901686377e-06, + "loss": 1.0367, + "step": 1426 + }, + { + "epoch": 0.12869188799206385, + "grad_norm": 0.8696835654385956, + "learning_rate": 3.898745451805564e-06, + "loss": 0.814, + "step": 1427 + }, + { + "epoch": 0.1287820715155341, + "grad_norm": 1.5218424736792615, + "learning_rate": 3.898561839913319e-06, + "loss": 0.9346, + "step": 1428 + }, + { + "epoch": 0.12887225503900437, + "grad_norm": 2.350068083243574, + "learning_rate": 3.89837806602531e-06, + "loss": 0.8478, + "step": 1429 + }, + { + "epoch": 0.12896243856247463, + "grad_norm": 1.5038323718664057, + "learning_rate": 3.898194130157217e-06, + "loss": 0.9993, + "step": 1430 + }, + { + "epoch": 0.1290526220859449, + "grad_norm": 1.8253323996931372, + "learning_rate": 3.8980100323247335e-06, + "loss": 0.9307, + "step": 1431 + }, + { + "epoch": 0.12914280560941516, + "grad_norm": 1.6615159155295454, + "learning_rate": 3.897825772543568e-06, + "loss": 0.93, + "step": 1432 + }, + { + "epoch": 0.12923298913288542, + "grad_norm": 1.731613779236376, + "learning_rate": 3.897641350829444e-06, + "loss": 1.0513, + "step": 1433 + }, + { + "epoch": 0.12932317265635568, + "grad_norm": 1.3579695440677828, + "learning_rate": 3.897456767198096e-06, + "loss": 1.0744, + "step": 1434 + }, + { + "epoch": 0.12941335617982594, + "grad_norm": 1.4648442382811686, + "learning_rate": 3.897272021665275e-06, + "loss": 0.934, + "step": 1435 + }, + { + "epoch": 0.1295035397032962, + "grad_norm": 1.3365675126651348, + "learning_rate": 3.897087114246743e-06, + "loss": 1.0992, + "step": 1436 + }, + { + "epoch": 0.12959372322676646, + "grad_norm": 1.677008972730841, + "learning_rate": 3.896902044958279e-06, + "loss": 1.1165, + "step": 1437 + }, + { + "epoch": 0.12968390675023672, + "grad_norm": 1.569585071816261, + "learning_rate": 3.896716813815672e-06, + "loss": 1.052, + "step": 1438 + }, + { + "epoch": 0.12977409027370698, + "grad_norm": 1.712867867156578, + "learning_rate": 3.896531420834728e-06, + "loss": 0.9386, + "step": 1439 + }, + { + "epoch": 0.12986427379717724, + "grad_norm": 1.426779536004197, + "learning_rate": 3.896345866031266e-06, + "loss": 0.9001, + "step": 1440 + }, + { + "epoch": 0.1299544573206475, + "grad_norm": 1.4089465249109328, + "learning_rate": 3.896160149421119e-06, + "loss": 1.0569, + "step": 1441 + }, + { + "epoch": 0.1300446408441178, + "grad_norm": 1.5848298362340056, + "learning_rate": 3.8959742710201314e-06, + "loss": 1.0638, + "step": 1442 + }, + { + "epoch": 0.13013482436758805, + "grad_norm": 2.049666852165127, + "learning_rate": 3.895788230844166e-06, + "loss": 0.891, + "step": 1443 + }, + { + "epoch": 0.13022500789105831, + "grad_norm": 1.4954132843333623, + "learning_rate": 3.895602028909095e-06, + "loss": 1.1467, + "step": 1444 + }, + { + "epoch": 0.13031519141452858, + "grad_norm": 0.7534055875699099, + "learning_rate": 3.895415665230807e-06, + "loss": 0.8156, + "step": 1445 + }, + { + "epoch": 0.13040537493799884, + "grad_norm": 1.2373205363064717, + "learning_rate": 3.895229139825203e-06, + "loss": 1.0146, + "step": 1446 + }, + { + "epoch": 0.1304955584614691, + "grad_norm": 1.5456174883137401, + "learning_rate": 3.895042452708198e-06, + "loss": 0.9132, + "step": 1447 + }, + { + "epoch": 0.13058574198493936, + "grad_norm": 1.4645785078870885, + "learning_rate": 3.894855603895723e-06, + "loss": 0.9011, + "step": 1448 + }, + { + "epoch": 0.13067592550840962, + "grad_norm": 1.3455290882534352, + "learning_rate": 3.894668593403718e-06, + "loss": 1.0722, + "step": 1449 + }, + { + "epoch": 0.13076610903187988, + "grad_norm": 1.3983342649405963, + "learning_rate": 3.8944814212481425e-06, + "loss": 1.0012, + "step": 1450 + }, + { + "epoch": 0.13085629255535014, + "grad_norm": 1.6916037922091225, + "learning_rate": 3.894294087444966e-06, + "loss": 0.9818, + "step": 1451 + }, + { + "epoch": 0.1309464760788204, + "grad_norm": 1.5323499504672624, + "learning_rate": 3.894106592010173e-06, + "loss": 0.8826, + "step": 1452 + }, + { + "epoch": 0.13103665960229066, + "grad_norm": 1.4963927605132386, + "learning_rate": 3.893918934959762e-06, + "loss": 0.8764, + "step": 1453 + }, + { + "epoch": 0.13112684312576092, + "grad_norm": 1.548190472681032, + "learning_rate": 3.893731116309743e-06, + "loss": 0.9281, + "step": 1454 + }, + { + "epoch": 0.13121702664923118, + "grad_norm": 1.4148080451626583, + "learning_rate": 3.893543136076145e-06, + "loss": 1.0134, + "step": 1455 + }, + { + "epoch": 0.13130721017270144, + "grad_norm": 1.6002742144923832, + "learning_rate": 3.893354994275006e-06, + "loss": 1.0175, + "step": 1456 + }, + { + "epoch": 0.1313973936961717, + "grad_norm": 1.520193784181554, + "learning_rate": 3.893166690922378e-06, + "loss": 0.9678, + "step": 1457 + }, + { + "epoch": 0.13148757721964197, + "grad_norm": 2.4488577690770335, + "learning_rate": 3.892978226034329e-06, + "loss": 1.0276, + "step": 1458 + }, + { + "epoch": 0.13157776074311223, + "grad_norm": 1.622288716200337, + "learning_rate": 3.89278959962694e-06, + "loss": 1.0448, + "step": 1459 + }, + { + "epoch": 0.1316679442665825, + "grad_norm": 1.46240910956803, + "learning_rate": 3.8926008117163056e-06, + "loss": 0.9709, + "step": 1460 + }, + { + "epoch": 0.13175812779005275, + "grad_norm": 1.7361990469808768, + "learning_rate": 3.892411862318535e-06, + "loss": 0.9741, + "step": 1461 + }, + { + "epoch": 0.131848311313523, + "grad_norm": 1.3271684680182207, + "learning_rate": 3.892222751449749e-06, + "loss": 1.0195, + "step": 1462 + }, + { + "epoch": 0.13193849483699327, + "grad_norm": 1.391871804215283, + "learning_rate": 3.892033479126084e-06, + "loss": 0.9436, + "step": 1463 + }, + { + "epoch": 0.13202867836046353, + "grad_norm": 2.2412772108597343, + "learning_rate": 3.891844045363691e-06, + "loss": 0.9772, + "step": 1464 + }, + { + "epoch": 0.13211886188393382, + "grad_norm": 1.5334163055914274, + "learning_rate": 3.891654450178732e-06, + "loss": 0.9155, + "step": 1465 + }, + { + "epoch": 0.13220904540740408, + "grad_norm": 2.216359852468905, + "learning_rate": 3.891464693587385e-06, + "loss": 1.0368, + "step": 1466 + }, + { + "epoch": 0.13229922893087434, + "grad_norm": 1.787156201358857, + "learning_rate": 3.89127477560584e-06, + "loss": 1.0286, + "step": 1467 + }, + { + "epoch": 0.1323894124543446, + "grad_norm": 1.6086415962266032, + "learning_rate": 3.891084696250304e-06, + "loss": 1.0337, + "step": 1468 + }, + { + "epoch": 0.13247959597781486, + "grad_norm": 1.611844288776365, + "learning_rate": 3.890894455536993e-06, + "loss": 1.0422, + "step": 1469 + }, + { + "epoch": 0.13256977950128512, + "grad_norm": 2.3711700678551377, + "learning_rate": 3.890704053482142e-06, + "loss": 1.0317, + "step": 1470 + }, + { + "epoch": 0.13265996302475538, + "grad_norm": 1.5612662212668356, + "learning_rate": 3.890513490101995e-06, + "loss": 0.9405, + "step": 1471 + }, + { + "epoch": 0.13275014654822564, + "grad_norm": 0.8118574095619829, + "learning_rate": 3.890322765412814e-06, + "loss": 0.8214, + "step": 1472 + }, + { + "epoch": 0.1328403300716959, + "grad_norm": 1.2369661297027494, + "learning_rate": 3.890131879430871e-06, + "loss": 1.015, + "step": 1473 + }, + { + "epoch": 0.13293051359516617, + "grad_norm": 1.4413433112899807, + "learning_rate": 3.889940832172454e-06, + "loss": 1.0157, + "step": 1474 + }, + { + "epoch": 0.13302069711863643, + "grad_norm": 1.6829602671628938, + "learning_rate": 3.889749623653864e-06, + "loss": 1.0428, + "step": 1475 + }, + { + "epoch": 0.1331108806421067, + "grad_norm": 1.3222896060994662, + "learning_rate": 3.889558253891416e-06, + "loss": 0.9363, + "step": 1476 + }, + { + "epoch": 0.13320106416557695, + "grad_norm": 0.7253711736861813, + "learning_rate": 3.8893667229014385e-06, + "loss": 0.8155, + "step": 1477 + }, + { + "epoch": 0.1332912476890472, + "grad_norm": 2.18985893579036, + "learning_rate": 3.8891750307002746e-06, + "loss": 0.9013, + "step": 1478 + }, + { + "epoch": 0.13338143121251747, + "grad_norm": 1.280338148959306, + "learning_rate": 3.888983177304281e-06, + "loss": 0.9313, + "step": 1479 + }, + { + "epoch": 0.13347161473598773, + "grad_norm": 1.0162452564386861, + "learning_rate": 3.888791162729826e-06, + "loss": 0.8199, + "step": 1480 + }, + { + "epoch": 0.133561798259458, + "grad_norm": 1.833581589738517, + "learning_rate": 3.888598986993295e-06, + "loss": 1.0168, + "step": 1481 + }, + { + "epoch": 0.13365198178292825, + "grad_norm": 1.5285838524526163, + "learning_rate": 3.888406650111085e-06, + "loss": 1.0292, + "step": 1482 + }, + { + "epoch": 0.1337421653063985, + "grad_norm": 1.5730745686816034, + "learning_rate": 3.888214152099607e-06, + "loss": 0.998, + "step": 1483 + }, + { + "epoch": 0.13383234882986877, + "grad_norm": 1.5690339610336914, + "learning_rate": 3.888021492975285e-06, + "loss": 1.06, + "step": 1484 + }, + { + "epoch": 0.13392253235333904, + "grad_norm": 1.6381206471932226, + "learning_rate": 3.88782867275456e-06, + "loss": 1.0511, + "step": 1485 + }, + { + "epoch": 0.1340127158768093, + "grad_norm": 1.2438753764539425, + "learning_rate": 3.8876356914538824e-06, + "loss": 0.958, + "step": 1486 + }, + { + "epoch": 0.13410289940027956, + "grad_norm": 1.3771542233194214, + "learning_rate": 3.88744254908972e-06, + "loss": 0.956, + "step": 1487 + }, + { + "epoch": 0.13419308292374982, + "grad_norm": 1.7141650688202266, + "learning_rate": 3.887249245678552e-06, + "loss": 0.9725, + "step": 1488 + }, + { + "epoch": 0.1342832664472201, + "grad_norm": 1.4464105507402538, + "learning_rate": 3.887055781236872e-06, + "loss": 1.0622, + "step": 1489 + }, + { + "epoch": 0.13437344997069037, + "grad_norm": 1.7797949352440539, + "learning_rate": 3.886862155781186e-06, + "loss": 0.8784, + "step": 1490 + }, + { + "epoch": 0.13446363349416063, + "grad_norm": 0.7966082631401125, + "learning_rate": 3.886668369328019e-06, + "loss": 0.827, + "step": 1491 + }, + { + "epoch": 0.1345538170176309, + "grad_norm": 1.8266967671877428, + "learning_rate": 3.886474421893904e-06, + "loss": 1.0277, + "step": 1492 + }, + { + "epoch": 0.13464400054110115, + "grad_norm": 1.3995198158613935, + "learning_rate": 3.886280313495388e-06, + "loss": 0.9197, + "step": 1493 + }, + { + "epoch": 0.1347341840645714, + "grad_norm": 1.604469898182825, + "learning_rate": 3.886086044149035e-06, + "loss": 0.9645, + "step": 1494 + }, + { + "epoch": 0.13482436758804167, + "grad_norm": 1.3563109757740168, + "learning_rate": 3.885891613871421e-06, + "loss": 0.9964, + "step": 1495 + }, + { + "epoch": 0.13491455111151193, + "grad_norm": 1.4924196392258844, + "learning_rate": 3.885697022679136e-06, + "loss": 1.0468, + "step": 1496 + }, + { + "epoch": 0.1350047346349822, + "grad_norm": 1.5749398749849166, + "learning_rate": 3.885502270588784e-06, + "loss": 1.018, + "step": 1497 + }, + { + "epoch": 0.13509491815845245, + "grad_norm": 1.5033109204946888, + "learning_rate": 3.885307357616981e-06, + "loss": 0.9924, + "step": 1498 + }, + { + "epoch": 0.13518510168192271, + "grad_norm": 1.4685991087977175, + "learning_rate": 3.885112283780359e-06, + "loss": 0.9916, + "step": 1499 + }, + { + "epoch": 0.13527528520539298, + "grad_norm": 1.4466102341369937, + "learning_rate": 3.8849170490955624e-06, + "loss": 0.8882, + "step": 1500 + }, + { + "epoch": 0.13536546872886324, + "grad_norm": 1.7541136757041946, + "learning_rate": 3.88472165357925e-06, + "loss": 0.9883, + "step": 1501 + }, + { + "epoch": 0.1354556522523335, + "grad_norm": 1.4527915654076229, + "learning_rate": 3.884526097248093e-06, + "loss": 0.9511, + "step": 1502 + }, + { + "epoch": 0.13554583577580376, + "grad_norm": 1.5476674063606366, + "learning_rate": 3.884330380118779e-06, + "loss": 1.0097, + "step": 1503 + }, + { + "epoch": 0.13563601929927402, + "grad_norm": 1.4628660966474618, + "learning_rate": 3.884134502208007e-06, + "loss": 0.9448, + "step": 1504 + }, + { + "epoch": 0.13572620282274428, + "grad_norm": 1.6319274694042374, + "learning_rate": 3.88393846353249e-06, + "loss": 1.1032, + "step": 1505 + }, + { + "epoch": 0.13581638634621454, + "grad_norm": 1.314893719544709, + "learning_rate": 3.883742264108955e-06, + "loss": 1.0506, + "step": 1506 + }, + { + "epoch": 0.1359065698696848, + "grad_norm": 1.3989262499317496, + "learning_rate": 3.883545903954145e-06, + "loss": 1.0037, + "step": 1507 + }, + { + "epoch": 0.13599675339315506, + "grad_norm": 1.3744775039452715, + "learning_rate": 3.883349383084811e-06, + "loss": 0.9841, + "step": 1508 + }, + { + "epoch": 0.13608693691662532, + "grad_norm": 1.5229842856755524, + "learning_rate": 3.883152701517723e-06, + "loss": 0.9158, + "step": 1509 + }, + { + "epoch": 0.13617712044009558, + "grad_norm": 1.5543033757904312, + "learning_rate": 3.882955859269664e-06, + "loss": 0.9834, + "step": 1510 + }, + { + "epoch": 0.13626730396356584, + "grad_norm": 1.2838125422025368, + "learning_rate": 3.882758856357428e-06, + "loss": 1.0014, + "step": 1511 + }, + { + "epoch": 0.1363574874870361, + "grad_norm": 1.6262854113817387, + "learning_rate": 3.882561692797824e-06, + "loss": 0.9803, + "step": 1512 + }, + { + "epoch": 0.1364476710105064, + "grad_norm": 1.4606034820107705, + "learning_rate": 3.882364368607677e-06, + "loss": 0.9987, + "step": 1513 + }, + { + "epoch": 0.13653785453397665, + "grad_norm": 1.6468705960817467, + "learning_rate": 3.8821668838038225e-06, + "loss": 1.0067, + "step": 1514 + }, + { + "epoch": 0.13662803805744692, + "grad_norm": 1.5499938964723579, + "learning_rate": 3.881969238403111e-06, + "loss": 0.897, + "step": 1515 + }, + { + "epoch": 0.13671822158091718, + "grad_norm": 1.8656084734058487, + "learning_rate": 3.881771432422408e-06, + "loss": 1.1118, + "step": 1516 + }, + { + "epoch": 0.13680840510438744, + "grad_norm": 1.3627216753733944, + "learning_rate": 3.88157346587859e-06, + "loss": 1.048, + "step": 1517 + }, + { + "epoch": 0.1368985886278577, + "grad_norm": 1.2146036579313808, + "learning_rate": 3.881375338788549e-06, + "loss": 0.971, + "step": 1518 + }, + { + "epoch": 0.13698877215132796, + "grad_norm": 3.9071967846747064, + "learning_rate": 3.88117705116919e-06, + "loss": 1.0115, + "step": 1519 + }, + { + "epoch": 0.13707895567479822, + "grad_norm": 0.909876211413722, + "learning_rate": 3.880978603037432e-06, + "loss": 0.874, + "step": 1520 + }, + { + "epoch": 0.13716913919826848, + "grad_norm": 1.4065995099924233, + "learning_rate": 3.880779994410209e-06, + "loss": 0.9953, + "step": 1521 + }, + { + "epoch": 0.13725932272173874, + "grad_norm": 1.6124454577780551, + "learning_rate": 3.880581225304466e-06, + "loss": 0.8671, + "step": 1522 + }, + { + "epoch": 0.137349506245209, + "grad_norm": 1.587662233488365, + "learning_rate": 3.880382295737163e-06, + "loss": 1.022, + "step": 1523 + }, + { + "epoch": 0.13743968976867926, + "grad_norm": 1.9977579066340037, + "learning_rate": 3.880183205725274e-06, + "loss": 0.9677, + "step": 1524 + }, + { + "epoch": 0.13752987329214952, + "grad_norm": 1.9183659346213382, + "learning_rate": 3.879983955285788e-06, + "loss": 0.9359, + "step": 1525 + }, + { + "epoch": 0.13762005681561978, + "grad_norm": 1.6426446744702738, + "learning_rate": 3.879784544435703e-06, + "loss": 0.9707, + "step": 1526 + }, + { + "epoch": 0.13771024033909005, + "grad_norm": 1.9366282070672334, + "learning_rate": 3.879584973192037e-06, + "loss": 0.9308, + "step": 1527 + }, + { + "epoch": 0.1378004238625603, + "grad_norm": 1.547584679155588, + "learning_rate": 3.8793852415718165e-06, + "loss": 0.9234, + "step": 1528 + }, + { + "epoch": 0.13789060738603057, + "grad_norm": 1.653418513816098, + "learning_rate": 3.879185349592085e-06, + "loss": 0.9949, + "step": 1529 + }, + { + "epoch": 0.13798079090950083, + "grad_norm": 1.277983665579819, + "learning_rate": 3.878985297269897e-06, + "loss": 1.0154, + "step": 1530 + }, + { + "epoch": 0.1380709744329711, + "grad_norm": 1.6587925953918945, + "learning_rate": 3.878785084622323e-06, + "loss": 1.0331, + "step": 1531 + }, + { + "epoch": 0.13816115795644135, + "grad_norm": 1.7993417807954695, + "learning_rate": 3.878584711666447e-06, + "loss": 1.1737, + "step": 1532 + }, + { + "epoch": 0.1382513414799116, + "grad_norm": 1.5510340563824598, + "learning_rate": 3.8783841784193635e-06, + "loss": 1.0112, + "step": 1533 + }, + { + "epoch": 0.13834152500338187, + "grad_norm": 1.4724436639394194, + "learning_rate": 3.8781834848981855e-06, + "loss": 0.972, + "step": 1534 + }, + { + "epoch": 0.13843170852685213, + "grad_norm": 1.510956249101008, + "learning_rate": 3.877982631120037e-06, + "loss": 0.9281, + "step": 1535 + }, + { + "epoch": 0.1385218920503224, + "grad_norm": 2.0549935812745472, + "learning_rate": 3.877781617102053e-06, + "loss": 1.0608, + "step": 1536 + }, + { + "epoch": 0.13861207557379268, + "grad_norm": 1.7027703712195816, + "learning_rate": 3.877580442861389e-06, + "loss": 1.0326, + "step": 1537 + }, + { + "epoch": 0.13870225909726294, + "grad_norm": 1.4815911942273867, + "learning_rate": 3.877379108415209e-06, + "loss": 1.0259, + "step": 1538 + }, + { + "epoch": 0.1387924426207332, + "grad_norm": 1.3767083998920886, + "learning_rate": 3.8771776137806915e-06, + "loss": 1.0168, + "step": 1539 + }, + { + "epoch": 0.13888262614420346, + "grad_norm": 1.6180405877566637, + "learning_rate": 3.8769759589750295e-06, + "loss": 0.9321, + "step": 1540 + }, + { + "epoch": 0.13897280966767372, + "grad_norm": 1.391180506133351, + "learning_rate": 3.876774144015429e-06, + "loss": 0.9984, + "step": 1541 + }, + { + "epoch": 0.13906299319114399, + "grad_norm": 1.8804649184995825, + "learning_rate": 3.87657216891911e-06, + "loss": 1.0248, + "step": 1542 + }, + { + "epoch": 0.13915317671461425, + "grad_norm": 1.6064321778290926, + "learning_rate": 3.876370033703307e-06, + "loss": 0.9938, + "step": 1543 + }, + { + "epoch": 0.1392433602380845, + "grad_norm": 1.7829493397411762, + "learning_rate": 3.876167738385265e-06, + "loss": 0.8931, + "step": 1544 + }, + { + "epoch": 0.13933354376155477, + "grad_norm": 2.0745709078998233, + "learning_rate": 3.875965282982247e-06, + "loss": 1.0456, + "step": 1545 + }, + { + "epoch": 0.13942372728502503, + "grad_norm": 1.4569242300048062, + "learning_rate": 3.875762667511528e-06, + "loss": 1.0049, + "step": 1546 + }, + { + "epoch": 0.1395139108084953, + "grad_norm": 1.4248989772122864, + "learning_rate": 3.875559891990394e-06, + "loss": 1.0087, + "step": 1547 + }, + { + "epoch": 0.13960409433196555, + "grad_norm": 1.8279028211384765, + "learning_rate": 3.875356956436149e-06, + "loss": 0.9642, + "step": 1548 + }, + { + "epoch": 0.1396942778554358, + "grad_norm": 1.5718401410160328, + "learning_rate": 3.875153860866108e-06, + "loss": 0.9778, + "step": 1549 + }, + { + "epoch": 0.13978446137890607, + "grad_norm": 1.700267809922238, + "learning_rate": 3.8749506052976e-06, + "loss": 1.0455, + "step": 1550 + }, + { + "epoch": 0.13987464490237633, + "grad_norm": 2.438833214431635, + "learning_rate": 3.874747189747968e-06, + "loss": 1.0247, + "step": 1551 + }, + { + "epoch": 0.1399648284258466, + "grad_norm": 1.5842668725912588, + "learning_rate": 3.874543614234568e-06, + "loss": 1.0068, + "step": 1552 + }, + { + "epoch": 0.14005501194931685, + "grad_norm": 1.3676172507474198, + "learning_rate": 3.874339878774771e-06, + "loss": 0.9774, + "step": 1553 + }, + { + "epoch": 0.14014519547278712, + "grad_norm": 1.5171952126618298, + "learning_rate": 3.874135983385961e-06, + "loss": 1.0014, + "step": 1554 + }, + { + "epoch": 0.14023537899625738, + "grad_norm": 1.5720403474332743, + "learning_rate": 3.873931928085535e-06, + "loss": 1.0022, + "step": 1555 + }, + { + "epoch": 0.14032556251972764, + "grad_norm": 1.2800202399382166, + "learning_rate": 3.873727712890904e-06, + "loss": 1.0755, + "step": 1556 + }, + { + "epoch": 0.1404157460431979, + "grad_norm": 1.3389783906282648, + "learning_rate": 3.873523337819493e-06, + "loss": 0.9665, + "step": 1557 + }, + { + "epoch": 0.14050592956666816, + "grad_norm": 1.257517098752331, + "learning_rate": 3.873318802888739e-06, + "loss": 0.9251, + "step": 1558 + }, + { + "epoch": 0.14059611309013842, + "grad_norm": 0.8472732170753239, + "learning_rate": 3.873114108116097e-06, + "loss": 0.7462, + "step": 1559 + }, + { + "epoch": 0.14068629661360868, + "grad_norm": 1.6888222635593353, + "learning_rate": 3.872909253519031e-06, + "loss": 1.0829, + "step": 1560 + }, + { + "epoch": 0.14077648013707897, + "grad_norm": 1.7198453014134927, + "learning_rate": 3.8727042391150195e-06, + "loss": 1.0721, + "step": 1561 + }, + { + "epoch": 0.14086666366054923, + "grad_norm": 1.5000507028275634, + "learning_rate": 3.872499064921556e-06, + "loss": 0.9306, + "step": 1562 + }, + { + "epoch": 0.1409568471840195, + "grad_norm": 1.5161481131516088, + "learning_rate": 3.872293730956149e-06, + "loss": 1.0098, + "step": 1563 + }, + { + "epoch": 0.14104703070748975, + "grad_norm": 1.7329993075915855, + "learning_rate": 3.872088237236316e-06, + "loss": 0.9894, + "step": 1564 + }, + { + "epoch": 0.14113721423096, + "grad_norm": 1.6708629075982113, + "learning_rate": 3.871882583779592e-06, + "loss": 0.9114, + "step": 1565 + }, + { + "epoch": 0.14122739775443027, + "grad_norm": 1.3514509210118162, + "learning_rate": 3.871676770603525e-06, + "loss": 0.9896, + "step": 1566 + }, + { + "epoch": 0.14131758127790053, + "grad_norm": 1.6412397413666773, + "learning_rate": 3.871470797725676e-06, + "loss": 0.9885, + "step": 1567 + }, + { + "epoch": 0.1414077648013708, + "grad_norm": 1.517471682867207, + "learning_rate": 3.8712646651636185e-06, + "loss": 1.0739, + "step": 1568 + }, + { + "epoch": 0.14149794832484106, + "grad_norm": 1.2156531774398154, + "learning_rate": 3.871058372934942e-06, + "loss": 0.989, + "step": 1569 + }, + { + "epoch": 0.14158813184831132, + "grad_norm": 1.3743667444776562, + "learning_rate": 3.8708519210572485e-06, + "loss": 0.8318, + "step": 1570 + }, + { + "epoch": 0.14167831537178158, + "grad_norm": 1.4821794032648425, + "learning_rate": 3.870645309548153e-06, + "loss": 0.9318, + "step": 1571 + }, + { + "epoch": 0.14176849889525184, + "grad_norm": 0.8729955670942254, + "learning_rate": 3.870438538425284e-06, + "loss": 0.7728, + "step": 1572 + }, + { + "epoch": 0.1418586824187221, + "grad_norm": 1.7650953865752146, + "learning_rate": 3.870231607706287e-06, + "loss": 0.9456, + "step": 1573 + }, + { + "epoch": 0.14194886594219236, + "grad_norm": 1.528761651906285, + "learning_rate": 3.870024517408817e-06, + "loss": 1.0007, + "step": 1574 + }, + { + "epoch": 0.14203904946566262, + "grad_norm": 1.9376066855314475, + "learning_rate": 3.8698172675505425e-06, + "loss": 0.9246, + "step": 1575 + }, + { + "epoch": 0.14212923298913288, + "grad_norm": 1.0981099558772967, + "learning_rate": 3.86960985814915e-06, + "loss": 0.8068, + "step": 1576 + }, + { + "epoch": 0.14221941651260314, + "grad_norm": 1.3553923219796264, + "learning_rate": 3.869402289222335e-06, + "loss": 0.9229, + "step": 1577 + }, + { + "epoch": 0.1423096000360734, + "grad_norm": 1.585284244188542, + "learning_rate": 3.869194560787808e-06, + "loss": 0.9751, + "step": 1578 + }, + { + "epoch": 0.14239978355954366, + "grad_norm": 3.650062738166799, + "learning_rate": 3.868986672863296e-06, + "loss": 0.9266, + "step": 1579 + }, + { + "epoch": 0.14248996708301392, + "grad_norm": 1.5287713990854293, + "learning_rate": 3.868778625466535e-06, + "loss": 1.0956, + "step": 1580 + }, + { + "epoch": 0.14258015060648419, + "grad_norm": 1.7272791691253857, + "learning_rate": 3.868570418615278e-06, + "loss": 0.9201, + "step": 1581 + }, + { + "epoch": 0.14267033412995445, + "grad_norm": 1.12936043760013, + "learning_rate": 3.8683620523272885e-06, + "loss": 0.7921, + "step": 1582 + }, + { + "epoch": 0.1427605176534247, + "grad_norm": 1.647150486411012, + "learning_rate": 3.8681535266203464e-06, + "loss": 1.102, + "step": 1583 + }, + { + "epoch": 0.14285070117689497, + "grad_norm": 1.5590348539732266, + "learning_rate": 3.867944841512246e-06, + "loss": 1.0243, + "step": 1584 + }, + { + "epoch": 0.14294088470036526, + "grad_norm": 1.7075101062927975, + "learning_rate": 3.867735997020791e-06, + "loss": 1.0078, + "step": 1585 + }, + { + "epoch": 0.14303106822383552, + "grad_norm": 1.609925999806673, + "learning_rate": 3.867526993163802e-06, + "loss": 0.8975, + "step": 1586 + }, + { + "epoch": 0.14312125174730578, + "grad_norm": 2.2641604195027614, + "learning_rate": 3.867317829959113e-06, + "loss": 0.8644, + "step": 1587 + }, + { + "epoch": 0.14321143527077604, + "grad_norm": 1.5681380974476151, + "learning_rate": 3.8671085074245704e-06, + "loss": 0.7954, + "step": 1588 + }, + { + "epoch": 0.1433016187942463, + "grad_norm": 1.6334124471357152, + "learning_rate": 3.866899025578035e-06, + "loss": 1.0565, + "step": 1589 + }, + { + "epoch": 0.14339180231771656, + "grad_norm": 1.404890293168774, + "learning_rate": 3.86668938443738e-06, + "loss": 0.9673, + "step": 1590 + }, + { + "epoch": 0.14348198584118682, + "grad_norm": 1.643568395873647, + "learning_rate": 3.866479584020495e-06, + "loss": 1.0112, + "step": 1591 + }, + { + "epoch": 0.14357216936465708, + "grad_norm": 1.5514391985531535, + "learning_rate": 3.866269624345279e-06, + "loss": 1.0299, + "step": 1592 + }, + { + "epoch": 0.14366235288812734, + "grad_norm": 1.5594908921592525, + "learning_rate": 3.866059505429649e-06, + "loss": 1.073, + "step": 1593 + }, + { + "epoch": 0.1437525364115976, + "grad_norm": 0.901050383832509, + "learning_rate": 3.865849227291532e-06, + "loss": 0.7938, + "step": 1594 + }, + { + "epoch": 0.14384271993506786, + "grad_norm": 1.6168663701202037, + "learning_rate": 3.865638789948872e-06, + "loss": 0.9917, + "step": 1595 + }, + { + "epoch": 0.14393290345853813, + "grad_norm": 1.7422478498603513, + "learning_rate": 3.865428193419622e-06, + "loss": 0.9564, + "step": 1596 + }, + { + "epoch": 0.14402308698200839, + "grad_norm": 1.600158671617608, + "learning_rate": 3.865217437721753e-06, + "loss": 0.984, + "step": 1597 + }, + { + "epoch": 0.14411327050547865, + "grad_norm": 1.5695766413928873, + "learning_rate": 3.865006522873249e-06, + "loss": 0.9291, + "step": 1598 + }, + { + "epoch": 0.1442034540289489, + "grad_norm": 1.666460413886395, + "learning_rate": 3.864795448892103e-06, + "loss": 0.9997, + "step": 1599 + }, + { + "epoch": 0.14429363755241917, + "grad_norm": 1.527771326494564, + "learning_rate": 3.864584215796327e-06, + "loss": 1.0409, + "step": 1600 + }, + { + "epoch": 0.14438382107588943, + "grad_norm": 1.5875746071172379, + "learning_rate": 3.8643728236039455e-06, + "loss": 1.0168, + "step": 1601 + }, + { + "epoch": 0.1444740045993597, + "grad_norm": 1.38267697462202, + "learning_rate": 3.864161272332994e-06, + "loss": 0.9766, + "step": 1602 + }, + { + "epoch": 0.14456418812282995, + "grad_norm": 1.4304598197054073, + "learning_rate": 3.863949562001524e-06, + "loss": 1.0396, + "step": 1603 + }, + { + "epoch": 0.1446543716463002, + "grad_norm": 1.492757558532295, + "learning_rate": 3.8637376926276005e-06, + "loss": 0.9696, + "step": 1604 + }, + { + "epoch": 0.14474455516977047, + "grad_norm": 1.8868385419074785, + "learning_rate": 3.8635256642293e-06, + "loss": 0.993, + "step": 1605 + }, + { + "epoch": 0.14483473869324073, + "grad_norm": 1.5436642395167393, + "learning_rate": 3.863313476824714e-06, + "loss": 0.9737, + "step": 1606 + }, + { + "epoch": 0.144924922216711, + "grad_norm": 1.7868373309418277, + "learning_rate": 3.863101130431948e-06, + "loss": 0.9688, + "step": 1607 + }, + { + "epoch": 0.14501510574018128, + "grad_norm": 1.4532354169343902, + "learning_rate": 3.862888625069121e-06, + "loss": 1.0323, + "step": 1608 + }, + { + "epoch": 0.14510528926365154, + "grad_norm": 1.314282886653133, + "learning_rate": 3.8626759607543645e-06, + "loss": 0.9647, + "step": 1609 + }, + { + "epoch": 0.1451954727871218, + "grad_norm": 2.4726717739009625, + "learning_rate": 3.862463137505825e-06, + "loss": 0.9394, + "step": 1610 + }, + { + "epoch": 0.14528565631059207, + "grad_norm": 1.518406390342289, + "learning_rate": 3.862250155341659e-06, + "loss": 0.9488, + "step": 1611 + }, + { + "epoch": 0.14537583983406233, + "grad_norm": 1.4359492354385623, + "learning_rate": 3.862037014280043e-06, + "loss": 1.1215, + "step": 1612 + }, + { + "epoch": 0.1454660233575326, + "grad_norm": 1.3202821175053392, + "learning_rate": 3.861823714339162e-06, + "loss": 0.9805, + "step": 1613 + }, + { + "epoch": 0.14555620688100285, + "grad_norm": 1.3108359641945821, + "learning_rate": 3.861610255537215e-06, + "loss": 0.9993, + "step": 1614 + }, + { + "epoch": 0.1456463904044731, + "grad_norm": 1.4427612882551621, + "learning_rate": 3.8613966378924165e-06, + "loss": 1.028, + "step": 1615 + }, + { + "epoch": 0.14573657392794337, + "grad_norm": 1.620810242646136, + "learning_rate": 3.861182861422993e-06, + "loss": 0.9401, + "step": 1616 + }, + { + "epoch": 0.14582675745141363, + "grad_norm": 1.3756758156125501, + "learning_rate": 3.860968926147185e-06, + "loss": 1.0679, + "step": 1617 + }, + { + "epoch": 0.1459169409748839, + "grad_norm": 4.742592305686216, + "learning_rate": 3.860754832083247e-06, + "loss": 0.8327, + "step": 1618 + }, + { + "epoch": 0.14600712449835415, + "grad_norm": 1.3533041432400619, + "learning_rate": 3.8605405792494475e-06, + "loss": 1.064, + "step": 1619 + }, + { + "epoch": 0.1460973080218244, + "grad_norm": 1.6329181399691197, + "learning_rate": 3.860326167664066e-06, + "loss": 1.0066, + "step": 1620 + }, + { + "epoch": 0.14618749154529467, + "grad_norm": 1.5032702401589375, + "learning_rate": 3.860111597345399e-06, + "loss": 0.984, + "step": 1621 + }, + { + "epoch": 0.14627767506876493, + "grad_norm": 2.072328557560837, + "learning_rate": 3.859896868311753e-06, + "loss": 0.856, + "step": 1622 + }, + { + "epoch": 0.1463678585922352, + "grad_norm": 1.3332690084358785, + "learning_rate": 3.859681980581452e-06, + "loss": 1.075, + "step": 1623 + }, + { + "epoch": 0.14645804211570546, + "grad_norm": 1.6617093350063485, + "learning_rate": 3.859466934172829e-06, + "loss": 0.9618, + "step": 1624 + }, + { + "epoch": 0.14654822563917572, + "grad_norm": 1.3321186383798762, + "learning_rate": 3.859251729104235e-06, + "loss": 0.9292, + "step": 1625 + }, + { + "epoch": 0.14663840916264598, + "grad_norm": 1.2743247749941933, + "learning_rate": 3.859036365394031e-06, + "loss": 0.94, + "step": 1626 + }, + { + "epoch": 0.14672859268611624, + "grad_norm": 1.5844649069292394, + "learning_rate": 3.858820843060594e-06, + "loss": 0.9776, + "step": 1627 + }, + { + "epoch": 0.1468187762095865, + "grad_norm": 1.441119157686484, + "learning_rate": 3.858605162122314e-06, + "loss": 0.9686, + "step": 1628 + }, + { + "epoch": 0.14690895973305676, + "grad_norm": 1.466718445571928, + "learning_rate": 3.858389322597592e-06, + "loss": 0.9424, + "step": 1629 + }, + { + "epoch": 0.14699914325652702, + "grad_norm": 1.9524026984710583, + "learning_rate": 3.858173324504847e-06, + "loss": 1.0043, + "step": 1630 + }, + { + "epoch": 0.14708932677999728, + "grad_norm": 1.4777708407568264, + "learning_rate": 3.857957167862508e-06, + "loss": 0.9513, + "step": 1631 + }, + { + "epoch": 0.14717951030346757, + "grad_norm": 1.0479340391949963, + "learning_rate": 3.857740852689018e-06, + "loss": 0.8193, + "step": 1632 + }, + { + "epoch": 0.14726969382693783, + "grad_norm": 1.756485841059335, + "learning_rate": 3.857524379002835e-06, + "loss": 0.9911, + "step": 1633 + }, + { + "epoch": 0.1473598773504081, + "grad_norm": 2.767562876937309, + "learning_rate": 3.85730774682243e-06, + "loss": 1.0159, + "step": 1634 + }, + { + "epoch": 0.14745006087387835, + "grad_norm": 1.484215255975752, + "learning_rate": 3.8570909561662875e-06, + "loss": 1.0621, + "step": 1635 + }, + { + "epoch": 0.1475402443973486, + "grad_norm": 1.4694094699206066, + "learning_rate": 3.8568740070529045e-06, + "loss": 1.0694, + "step": 1636 + }, + { + "epoch": 0.14763042792081887, + "grad_norm": 1.6036655667230586, + "learning_rate": 3.856656899500792e-06, + "loss": 0.9527, + "step": 1637 + }, + { + "epoch": 0.14772061144428913, + "grad_norm": 1.6053993971201315, + "learning_rate": 3.856439633528476e-06, + "loss": 0.9238, + "step": 1638 + }, + { + "epoch": 0.1478107949677594, + "grad_norm": 2.072269421716892, + "learning_rate": 3.856222209154494e-06, + "loss": 1.0449, + "step": 1639 + }, + { + "epoch": 0.14790097849122966, + "grad_norm": 1.4896840770594886, + "learning_rate": 3.856004626397397e-06, + "loss": 1.0143, + "step": 1640 + }, + { + "epoch": 0.14799116201469992, + "grad_norm": 1.547999118065398, + "learning_rate": 3.855786885275753e-06, + "loss": 0.9635, + "step": 1641 + }, + { + "epoch": 0.14808134553817018, + "grad_norm": 1.871851057049145, + "learning_rate": 3.855568985808138e-06, + "loss": 0.9083, + "step": 1642 + }, + { + "epoch": 0.14817152906164044, + "grad_norm": 1.7336863619807763, + "learning_rate": 3.855350928013145e-06, + "loss": 0.954, + "step": 1643 + }, + { + "epoch": 0.1482617125851107, + "grad_norm": 1.4733731980921614, + "learning_rate": 3.8551327119093825e-06, + "loss": 0.999, + "step": 1644 + }, + { + "epoch": 0.14835189610858096, + "grad_norm": 1.6456394403260888, + "learning_rate": 3.854914337515467e-06, + "loss": 0.9302, + "step": 1645 + }, + { + "epoch": 0.14844207963205122, + "grad_norm": 1.4559759250473983, + "learning_rate": 3.8546958048500324e-06, + "loss": 0.8833, + "step": 1646 + }, + { + "epoch": 0.14853226315552148, + "grad_norm": 0.762271660869504, + "learning_rate": 3.854477113931725e-06, + "loss": 0.8455, + "step": 1647 + }, + { + "epoch": 0.14862244667899174, + "grad_norm": 1.5171391111096226, + "learning_rate": 3.854258264779205e-06, + "loss": 1.0087, + "step": 1648 + }, + { + "epoch": 0.148712630202462, + "grad_norm": 1.413608077571072, + "learning_rate": 3.854039257411145e-06, + "loss": 0.9646, + "step": 1649 + }, + { + "epoch": 0.14880281372593226, + "grad_norm": 1.6782676906053922, + "learning_rate": 3.853820091846232e-06, + "loss": 0.9513, + "step": 1650 + }, + { + "epoch": 0.14889299724940253, + "grad_norm": 1.5004006486521626, + "learning_rate": 3.853600768103169e-06, + "loss": 0.9899, + "step": 1651 + }, + { + "epoch": 0.1489831807728728, + "grad_norm": 1.8144804559297882, + "learning_rate": 3.853381286200667e-06, + "loss": 0.9752, + "step": 1652 + }, + { + "epoch": 0.14907336429634305, + "grad_norm": 1.5730778272646633, + "learning_rate": 3.853161646157453e-06, + "loss": 1.0744, + "step": 1653 + }, + { + "epoch": 0.1491635478198133, + "grad_norm": 2.918697812514404, + "learning_rate": 3.852941847992269e-06, + "loss": 0.9545, + "step": 1654 + }, + { + "epoch": 0.14925373134328357, + "grad_norm": 7.9086300128669516, + "learning_rate": 3.852721891723871e-06, + "loss": 0.99, + "step": 1655 + }, + { + "epoch": 0.14934391486675386, + "grad_norm": 1.56751978385645, + "learning_rate": 3.852501777371025e-06, + "loss": 0.9674, + "step": 1656 + }, + { + "epoch": 0.14943409839022412, + "grad_norm": 1.4866592499557947, + "learning_rate": 3.8522815049525125e-06, + "loss": 1.0213, + "step": 1657 + }, + { + "epoch": 0.14952428191369438, + "grad_norm": 1.5380331557163798, + "learning_rate": 3.852061074487129e-06, + "loss": 1.0434, + "step": 1658 + }, + { + "epoch": 0.14961446543716464, + "grad_norm": 1.4971396672158697, + "learning_rate": 3.851840485993682e-06, + "loss": 0.8953, + "step": 1659 + }, + { + "epoch": 0.1497046489606349, + "grad_norm": 1.3893846273496242, + "learning_rate": 3.851619739490994e-06, + "loss": 0.9885, + "step": 1660 + }, + { + "epoch": 0.14979483248410516, + "grad_norm": 1.4350718017053379, + "learning_rate": 3.8513988349978996e-06, + "loss": 1.0156, + "step": 1661 + }, + { + "epoch": 0.14988501600757542, + "grad_norm": 1.4854621519995699, + "learning_rate": 3.851177772533249e-06, + "loss": 1.0151, + "step": 1662 + }, + { + "epoch": 0.14997519953104568, + "grad_norm": 1.9720226516699397, + "learning_rate": 3.850956552115903e-06, + "loss": 1.1515, + "step": 1663 + }, + { + "epoch": 0.15006538305451594, + "grad_norm": 0.8978548565601452, + "learning_rate": 3.850735173764738e-06, + "loss": 0.802, + "step": 1664 + }, + { + "epoch": 0.1501555665779862, + "grad_norm": 1.7888492640390707, + "learning_rate": 3.850513637498642e-06, + "loss": 1.0141, + "step": 1665 + }, + { + "epoch": 0.15024575010145647, + "grad_norm": 1.7856906616828099, + "learning_rate": 3.850291943336521e-06, + "loss": 1.0122, + "step": 1666 + }, + { + "epoch": 0.15033593362492673, + "grad_norm": 1.7240327279287955, + "learning_rate": 3.850070091297287e-06, + "loss": 1.04, + "step": 1667 + }, + { + "epoch": 0.150426117148397, + "grad_norm": 0.928261408430863, + "learning_rate": 3.8498480813998735e-06, + "loss": 0.8649, + "step": 1668 + }, + { + "epoch": 0.15051630067186725, + "grad_norm": 1.5140562295537805, + "learning_rate": 3.84962591366322e-06, + "loss": 0.9888, + "step": 1669 + }, + { + "epoch": 0.1506064841953375, + "grad_norm": 1.8413268507945322, + "learning_rate": 3.8494035881062855e-06, + "loss": 1.0356, + "step": 1670 + }, + { + "epoch": 0.15069666771880777, + "grad_norm": 1.6822881393474374, + "learning_rate": 3.84918110474804e-06, + "loss": 0.9668, + "step": 1671 + }, + { + "epoch": 0.15078685124227803, + "grad_norm": 1.4905500765030857, + "learning_rate": 3.8489584636074655e-06, + "loss": 0.9744, + "step": 1672 + }, + { + "epoch": 0.1508770347657483, + "grad_norm": 3.8015084585416576, + "learning_rate": 3.848735664703561e-06, + "loss": 0.7903, + "step": 1673 + }, + { + "epoch": 0.15096721828921855, + "grad_norm": 1.37094189272947, + "learning_rate": 3.8485127080553346e-06, + "loss": 0.9837, + "step": 1674 + }, + { + "epoch": 0.1510574018126888, + "grad_norm": 1.5719181032129563, + "learning_rate": 3.8482895936818115e-06, + "loss": 0.9823, + "step": 1675 + }, + { + "epoch": 0.15114758533615907, + "grad_norm": 1.555761225288863, + "learning_rate": 3.848066321602029e-06, + "loss": 1.0064, + "step": 1676 + }, + { + "epoch": 0.15123776885962933, + "grad_norm": 1.575060522339568, + "learning_rate": 3.847842891835038e-06, + "loss": 0.9527, + "step": 1677 + }, + { + "epoch": 0.1513279523830996, + "grad_norm": 0.7369345841329464, + "learning_rate": 3.847619304399902e-06, + "loss": 0.8062, + "step": 1678 + }, + { + "epoch": 0.15141813590656986, + "grad_norm": 1.484724866894249, + "learning_rate": 3.8473955593157e-06, + "loss": 1.0147, + "step": 1679 + }, + { + "epoch": 0.15150831943004014, + "grad_norm": 1.5014592066713286, + "learning_rate": 3.847171656601522e-06, + "loss": 0.955, + "step": 1680 + }, + { + "epoch": 0.1515985029535104, + "grad_norm": 1.4877013030200137, + "learning_rate": 3.846947596276473e-06, + "loss": 1.01, + "step": 1681 + }, + { + "epoch": 0.15168868647698067, + "grad_norm": 1.64671930376987, + "learning_rate": 3.846723378359672e-06, + "loss": 1.0338, + "step": 1682 + }, + { + "epoch": 0.15177887000045093, + "grad_norm": 1.5912380243826154, + "learning_rate": 3.846499002870249e-06, + "loss": 0.9936, + "step": 1683 + }, + { + "epoch": 0.1518690535239212, + "grad_norm": 1.6666473228603607, + "learning_rate": 3.846274469827349e-06, + "loss": 1.002, + "step": 1684 + }, + { + "epoch": 0.15195923704739145, + "grad_norm": 1.5884884768059964, + "learning_rate": 3.846049779250132e-06, + "loss": 0.9979, + "step": 1685 + }, + { + "epoch": 0.1520494205708617, + "grad_norm": 0.8206430768174132, + "learning_rate": 3.845824931157769e-06, + "loss": 0.8477, + "step": 1686 + }, + { + "epoch": 0.15213960409433197, + "grad_norm": 1.3531057995145357, + "learning_rate": 3.845599925569444e-06, + "loss": 0.98, + "step": 1687 + }, + { + "epoch": 0.15222978761780223, + "grad_norm": 1.7147650559428016, + "learning_rate": 3.845374762504357e-06, + "loss": 0.9504, + "step": 1688 + }, + { + "epoch": 0.1523199711412725, + "grad_norm": 1.7252073191023383, + "learning_rate": 3.8451494419817204e-06, + "loss": 1.0361, + "step": 1689 + }, + { + "epoch": 0.15241015466474275, + "grad_norm": 1.9967542775992955, + "learning_rate": 3.8449239640207594e-06, + "loss": 0.9222, + "step": 1690 + }, + { + "epoch": 0.152500338188213, + "grad_norm": 1.7027466379954934, + "learning_rate": 3.844698328640713e-06, + "loss": 1.0197, + "step": 1691 + }, + { + "epoch": 0.15259052171168327, + "grad_norm": 1.2980408541240986, + "learning_rate": 3.844472535860833e-06, + "loss": 0.9996, + "step": 1692 + }, + { + "epoch": 0.15268070523515354, + "grad_norm": 1.954758655158637, + "learning_rate": 3.8442465857003864e-06, + "loss": 1.0428, + "step": 1693 + }, + { + "epoch": 0.1527708887586238, + "grad_norm": 1.477918053106463, + "learning_rate": 3.844020478178653e-06, + "loss": 0.9338, + "step": 1694 + }, + { + "epoch": 0.15286107228209406, + "grad_norm": 1.6351452093940904, + "learning_rate": 3.843794213314923e-06, + "loss": 1.0164, + "step": 1695 + }, + { + "epoch": 0.15295125580556432, + "grad_norm": 1.5626705839500759, + "learning_rate": 3.843567791128505e-06, + "loss": 1.0192, + "step": 1696 + }, + { + "epoch": 0.15304143932903458, + "grad_norm": 1.377829328379277, + "learning_rate": 3.843341211638717e-06, + "loss": 0.9754, + "step": 1697 + }, + { + "epoch": 0.15313162285250484, + "grad_norm": 1.5810212101729064, + "learning_rate": 3.843114474864894e-06, + "loss": 0.9479, + "step": 1698 + }, + { + "epoch": 0.1532218063759751, + "grad_norm": 1.5572572106809766, + "learning_rate": 3.84288758082638e-06, + "loss": 0.9872, + "step": 1699 + }, + { + "epoch": 0.15331198989944536, + "grad_norm": 1.4268891511032429, + "learning_rate": 3.842660529542536e-06, + "loss": 1.0726, + "step": 1700 + }, + { + "epoch": 0.15340217342291562, + "grad_norm": 1.4778091577115804, + "learning_rate": 3.842433321032736e-06, + "loss": 1.0047, + "step": 1701 + }, + { + "epoch": 0.15349235694638588, + "grad_norm": 1.394619337874076, + "learning_rate": 3.842205955316365e-06, + "loss": 0.9156, + "step": 1702 + }, + { + "epoch": 0.15358254046985614, + "grad_norm": 1.7365350417608703, + "learning_rate": 3.8419784324128256e-06, + "loss": 0.9001, + "step": 1703 + }, + { + "epoch": 0.15367272399332643, + "grad_norm": 1.5667113199701725, + "learning_rate": 3.841750752341529e-06, + "loss": 0.9583, + "step": 1704 + }, + { + "epoch": 0.1537629075167967, + "grad_norm": 1.5763743293003951, + "learning_rate": 3.841522915121902e-06, + "loss": 0.9866, + "step": 1705 + }, + { + "epoch": 0.15385309104026695, + "grad_norm": 1.4024219544262833, + "learning_rate": 3.841294920773387e-06, + "loss": 0.9203, + "step": 1706 + }, + { + "epoch": 0.15394327456373721, + "grad_norm": 1.7716464102093332, + "learning_rate": 3.841066769315436e-06, + "loss": 1.0038, + "step": 1707 + }, + { + "epoch": 0.15403345808720748, + "grad_norm": 1.4104086103260893, + "learning_rate": 3.840838460767518e-06, + "loss": 0.9983, + "step": 1708 + }, + { + "epoch": 0.15412364161067774, + "grad_norm": 1.61660815195324, + "learning_rate": 3.840609995149111e-06, + "loss": 0.9594, + "step": 1709 + }, + { + "epoch": 0.154213825134148, + "grad_norm": 1.6076083069380978, + "learning_rate": 3.84038137247971e-06, + "loss": 0.9866, + "step": 1710 + }, + { + "epoch": 0.15430400865761826, + "grad_norm": 1.42192874010416, + "learning_rate": 3.840152592778823e-06, + "loss": 0.9719, + "step": 1711 + }, + { + "epoch": 0.15439419218108852, + "grad_norm": 1.3207649042753284, + "learning_rate": 3.83992365606597e-06, + "loss": 1.033, + "step": 1712 + }, + { + "epoch": 0.15448437570455878, + "grad_norm": 1.7299639307793342, + "learning_rate": 3.8396945623606855e-06, + "loss": 0.944, + "step": 1713 + }, + { + "epoch": 0.15457455922802904, + "grad_norm": 1.3438397754576439, + "learning_rate": 3.8394653116825174e-06, + "loss": 0.9496, + "step": 1714 + }, + { + "epoch": 0.1546647427514993, + "grad_norm": 1.666605980086141, + "learning_rate": 3.839235904051026e-06, + "loss": 0.9762, + "step": 1715 + }, + { + "epoch": 0.15475492627496956, + "grad_norm": 1.5067133084113569, + "learning_rate": 3.8390063394857855e-06, + "loss": 0.9642, + "step": 1716 + }, + { + "epoch": 0.15484510979843982, + "grad_norm": 1.2139339658823907, + "learning_rate": 3.838776618006385e-06, + "loss": 1.0687, + "step": 1717 + }, + { + "epoch": 0.15493529332191008, + "grad_norm": 1.3541228898626279, + "learning_rate": 3.838546739632423e-06, + "loss": 0.9954, + "step": 1718 + }, + { + "epoch": 0.15502547684538034, + "grad_norm": 1.392128249768902, + "learning_rate": 3.838316704383517e-06, + "loss": 1.0388, + "step": 1719 + }, + { + "epoch": 0.1551156603688506, + "grad_norm": 1.550993782296621, + "learning_rate": 3.838086512279292e-06, + "loss": 0.99, + "step": 1720 + }, + { + "epoch": 0.15520584389232087, + "grad_norm": 1.861410165201599, + "learning_rate": 3.837856163339391e-06, + "loss": 0.9777, + "step": 1721 + }, + { + "epoch": 0.15529602741579113, + "grad_norm": 1.386676927392545, + "learning_rate": 3.837625657583469e-06, + "loss": 0.9139, + "step": 1722 + }, + { + "epoch": 0.1553862109392614, + "grad_norm": 1.429727105597891, + "learning_rate": 3.837394995031193e-06, + "loss": 0.942, + "step": 1723 + }, + { + "epoch": 0.15547639446273165, + "grad_norm": 1.4590302936319026, + "learning_rate": 3.837164175702245e-06, + "loss": 0.9942, + "step": 1724 + }, + { + "epoch": 0.1555665779862019, + "grad_norm": 1.7870848273587299, + "learning_rate": 3.836933199616319e-06, + "loss": 0.8981, + "step": 1725 + }, + { + "epoch": 0.15565676150967217, + "grad_norm": 1.3904836347070257, + "learning_rate": 3.836702066793124e-06, + "loss": 1.0556, + "step": 1726 + }, + { + "epoch": 0.15574694503314246, + "grad_norm": 1.4829239026620435, + "learning_rate": 3.836470777252381e-06, + "loss": 1.0461, + "step": 1727 + }, + { + "epoch": 0.15583712855661272, + "grad_norm": 1.7072154628775573, + "learning_rate": 3.836239331013825e-06, + "loss": 1.004, + "step": 1728 + }, + { + "epoch": 0.15592731208008298, + "grad_norm": 1.7238266617046745, + "learning_rate": 3.836007728097205e-06, + "loss": 0.9491, + "step": 1729 + }, + { + "epoch": 0.15601749560355324, + "grad_norm": 1.307111487060577, + "learning_rate": 3.835775968522282e-06, + "loss": 0.9466, + "step": 1730 + }, + { + "epoch": 0.1561076791270235, + "grad_norm": 1.7059724435268935, + "learning_rate": 3.83554405230883e-06, + "loss": 1.0026, + "step": 1731 + }, + { + "epoch": 0.15619786265049376, + "grad_norm": 1.2228520148288176, + "learning_rate": 3.835311979476639e-06, + "loss": 0.971, + "step": 1732 + }, + { + "epoch": 0.15628804617396402, + "grad_norm": 1.4169740997779487, + "learning_rate": 3.83507975004551e-06, + "loss": 0.8878, + "step": 1733 + }, + { + "epoch": 0.15637822969743428, + "grad_norm": 1.5844981610283573, + "learning_rate": 3.834847364035258e-06, + "loss": 0.9664, + "step": 1734 + }, + { + "epoch": 0.15646841322090455, + "grad_norm": 1.611492726559607, + "learning_rate": 3.834614821465712e-06, + "loss": 0.923, + "step": 1735 + }, + { + "epoch": 0.1565585967443748, + "grad_norm": 1.477221710151111, + "learning_rate": 3.834382122356713e-06, + "loss": 1.0234, + "step": 1736 + }, + { + "epoch": 0.15664878026784507, + "grad_norm": 1.4669107324370787, + "learning_rate": 3.834149266728117e-06, + "loss": 0.9486, + "step": 1737 + }, + { + "epoch": 0.15673896379131533, + "grad_norm": 0.7975175548283557, + "learning_rate": 3.833916254599792e-06, + "loss": 0.8532, + "step": 1738 + }, + { + "epoch": 0.1568291473147856, + "grad_norm": 0.7284024229895498, + "learning_rate": 3.83368308599162e-06, + "loss": 0.8092, + "step": 1739 + }, + { + "epoch": 0.15691933083825585, + "grad_norm": 0.6736913348686085, + "learning_rate": 3.833449760923498e-06, + "loss": 0.7586, + "step": 1740 + }, + { + "epoch": 0.1570095143617261, + "grad_norm": 1.4536306670420658, + "learning_rate": 3.83321627941533e-06, + "loss": 1.0286, + "step": 1741 + }, + { + "epoch": 0.15709969788519637, + "grad_norm": 1.8408203125, + "learning_rate": 3.832982641487043e-06, + "loss": 1.0413, + "step": 1742 + }, + { + "epoch": 0.15718988140866663, + "grad_norm": 1.456056488670499, + "learning_rate": 3.832748847158568e-06, + "loss": 1.0725, + "step": 1743 + }, + { + "epoch": 0.1572800649321369, + "grad_norm": 1.3964892834130302, + "learning_rate": 3.832514896449858e-06, + "loss": 1.01, + "step": 1744 + }, + { + "epoch": 0.15737024845560715, + "grad_norm": 1.493718984114287, + "learning_rate": 3.832280789380871e-06, + "loss": 0.9218, + "step": 1745 + }, + { + "epoch": 0.15746043197907741, + "grad_norm": 1.5478904550762427, + "learning_rate": 3.832046525971584e-06, + "loss": 0.8882, + "step": 1746 + }, + { + "epoch": 0.15755061550254768, + "grad_norm": 1.543081776788835, + "learning_rate": 3.831812106241987e-06, + "loss": 1.0415, + "step": 1747 + }, + { + "epoch": 0.15764079902601794, + "grad_norm": 1.4285378758713654, + "learning_rate": 3.8315775302120796e-06, + "loss": 0.9043, + "step": 1748 + }, + { + "epoch": 0.1577309825494882, + "grad_norm": 1.3591245223168795, + "learning_rate": 3.831342797901878e-06, + "loss": 1.0005, + "step": 1749 + }, + { + "epoch": 0.15782116607295846, + "grad_norm": 0.7454602013191539, + "learning_rate": 3.831107909331411e-06, + "loss": 0.838, + "step": 1750 + }, + { + "epoch": 0.15791134959642875, + "grad_norm": 1.6862681095629786, + "learning_rate": 3.830872864520721e-06, + "loss": 1.0287, + "step": 1751 + }, + { + "epoch": 0.158001533119899, + "grad_norm": 0.8131075568237749, + "learning_rate": 3.830637663489862e-06, + "loss": 0.8955, + "step": 1752 + }, + { + "epoch": 0.15809171664336927, + "grad_norm": 0.8013792456831423, + "learning_rate": 3.830402306258904e-06, + "loss": 0.9283, + "step": 1753 + }, + { + "epoch": 0.15818190016683953, + "grad_norm": 1.639475247004593, + "learning_rate": 3.830166792847929e-06, + "loss": 0.9764, + "step": 1754 + }, + { + "epoch": 0.1582720836903098, + "grad_norm": 1.5098309225196567, + "learning_rate": 3.829931123277031e-06, + "loss": 0.9262, + "step": 1755 + }, + { + "epoch": 0.15836226721378005, + "grad_norm": 1.3960477132950455, + "learning_rate": 3.8296952975663204e-06, + "loss": 0.9743, + "step": 1756 + }, + { + "epoch": 0.1584524507372503, + "grad_norm": 1.3788920717888293, + "learning_rate": 3.829459315735918e-06, + "loss": 1.0483, + "step": 1757 + }, + { + "epoch": 0.15854263426072057, + "grad_norm": 1.5319655945602237, + "learning_rate": 3.829223177805959e-06, + "loss": 0.9116, + "step": 1758 + }, + { + "epoch": 0.15863281778419083, + "grad_norm": 1.465606002847172, + "learning_rate": 3.828986883796591e-06, + "loss": 1.0252, + "step": 1759 + }, + { + "epoch": 0.1587230013076611, + "grad_norm": 1.4428140025454292, + "learning_rate": 3.828750433727979e-06, + "loss": 0.9731, + "step": 1760 + }, + { + "epoch": 0.15881318483113135, + "grad_norm": 1.5426804671603043, + "learning_rate": 3.828513827620296e-06, + "loss": 1.0313, + "step": 1761 + }, + { + "epoch": 0.15890336835460162, + "grad_norm": 1.594779336666961, + "learning_rate": 3.82827706549373e-06, + "loss": 1.0397, + "step": 1762 + }, + { + "epoch": 0.15899355187807188, + "grad_norm": 1.6286437191251575, + "learning_rate": 3.828040147368484e-06, + "loss": 0.9713, + "step": 1763 + }, + { + "epoch": 0.15908373540154214, + "grad_norm": 1.3910646493698635, + "learning_rate": 3.827803073264774e-06, + "loss": 0.9811, + "step": 1764 + }, + { + "epoch": 0.1591739189250124, + "grad_norm": 1.5017390501646513, + "learning_rate": 3.827565843202826e-06, + "loss": 0.981, + "step": 1765 + }, + { + "epoch": 0.15926410244848266, + "grad_norm": 1.6612766927924014, + "learning_rate": 3.827328457202884e-06, + "loss": 0.8632, + "step": 1766 + }, + { + "epoch": 0.15935428597195292, + "grad_norm": 1.2814683960654047, + "learning_rate": 3.8270909152852014e-06, + "loss": 0.9172, + "step": 1767 + }, + { + "epoch": 0.15944446949542318, + "grad_norm": 1.7949797877286786, + "learning_rate": 3.826853217470048e-06, + "loss": 0.9868, + "step": 1768 + }, + { + "epoch": 0.15953465301889344, + "grad_norm": 1.4504506923443627, + "learning_rate": 3.826615363777705e-06, + "loss": 1.0586, + "step": 1769 + }, + { + "epoch": 0.1596248365423637, + "grad_norm": 1.4237332619879657, + "learning_rate": 3.826377354228468e-06, + "loss": 0.9637, + "step": 1770 + }, + { + "epoch": 0.15971502006583396, + "grad_norm": 0.8581115104242935, + "learning_rate": 3.826139188842643e-06, + "loss": 0.8154, + "step": 1771 + }, + { + "epoch": 0.15980520358930422, + "grad_norm": 1.3302811567087878, + "learning_rate": 3.825900867640554e-06, + "loss": 1.0416, + "step": 1772 + }, + { + "epoch": 0.15989538711277448, + "grad_norm": 1.4708963497736147, + "learning_rate": 3.825662390642535e-06, + "loss": 1.0248, + "step": 1773 + }, + { + "epoch": 0.15998557063624474, + "grad_norm": 1.5635685890653443, + "learning_rate": 3.825423757868934e-06, + "loss": 1.0834, + "step": 1774 + }, + { + "epoch": 0.16007575415971503, + "grad_norm": 1.53282854886456, + "learning_rate": 3.825184969340114e-06, + "loss": 1.0115, + "step": 1775 + }, + { + "epoch": 0.1601659376831853, + "grad_norm": 1.2880904723254614, + "learning_rate": 3.824946025076447e-06, + "loss": 1.0099, + "step": 1776 + }, + { + "epoch": 0.16025612120665556, + "grad_norm": 1.6110086390182565, + "learning_rate": 3.824706925098323e-06, + "loss": 0.9284, + "step": 1777 + }, + { + "epoch": 0.16034630473012582, + "grad_norm": 1.915260344953026, + "learning_rate": 3.824467669426143e-06, + "loss": 1.0347, + "step": 1778 + }, + { + "epoch": 0.16043648825359608, + "grad_norm": 1.506158029747988, + "learning_rate": 3.824228258080321e-06, + "loss": 0.9481, + "step": 1779 + }, + { + "epoch": 0.16052667177706634, + "grad_norm": 1.600467661222538, + "learning_rate": 3.823988691081285e-06, + "loss": 0.9004, + "step": 1780 + }, + { + "epoch": 0.1606168553005366, + "grad_norm": 1.3507869457788884, + "learning_rate": 3.823748968449478e-06, + "loss": 1.012, + "step": 1781 + }, + { + "epoch": 0.16070703882400686, + "grad_norm": 1.5730803280331314, + "learning_rate": 3.823509090205352e-06, + "loss": 0.8798, + "step": 1782 + }, + { + "epoch": 0.16079722234747712, + "grad_norm": 2.4310762907208265, + "learning_rate": 3.823269056369376e-06, + "loss": 0.9335, + "step": 1783 + }, + { + "epoch": 0.16088740587094738, + "grad_norm": 1.343335730989902, + "learning_rate": 3.8230288669620295e-06, + "loss": 1.0118, + "step": 1784 + }, + { + "epoch": 0.16097758939441764, + "grad_norm": 1.3244146241565318, + "learning_rate": 3.822788522003809e-06, + "loss": 0.9865, + "step": 1785 + }, + { + "epoch": 0.1610677729178879, + "grad_norm": 1.7300305640547153, + "learning_rate": 3.822548021515221e-06, + "loss": 1.0552, + "step": 1786 + }, + { + "epoch": 0.16115795644135816, + "grad_norm": 1.401626105763033, + "learning_rate": 3.822307365516787e-06, + "loss": 0.9579, + "step": 1787 + }, + { + "epoch": 0.16124813996482842, + "grad_norm": 1.6274972946683604, + "learning_rate": 3.8220665540290395e-06, + "loss": 0.9993, + "step": 1788 + }, + { + "epoch": 0.16133832348829868, + "grad_norm": 1.474552299961261, + "learning_rate": 3.8218255870725265e-06, + "loss": 1.0448, + "step": 1789 + }, + { + "epoch": 0.16142850701176895, + "grad_norm": 1.4162031799280705, + "learning_rate": 3.82158446466781e-06, + "loss": 0.9302, + "step": 1790 + }, + { + "epoch": 0.1615186905352392, + "grad_norm": 1.4048524057509653, + "learning_rate": 3.821343186835462e-06, + "loss": 0.9942, + "step": 1791 + }, + { + "epoch": 0.16160887405870947, + "grad_norm": 1.3810056530686898, + "learning_rate": 3.821101753596072e-06, + "loss": 1.0517, + "step": 1792 + }, + { + "epoch": 0.16169905758217973, + "grad_norm": 1.6401009585544113, + "learning_rate": 3.820860164970237e-06, + "loss": 0.9722, + "step": 1793 + }, + { + "epoch": 0.16178924110565, + "grad_norm": 1.3101437171696, + "learning_rate": 3.820618420978574e-06, + "loss": 1.0543, + "step": 1794 + }, + { + "epoch": 0.16187942462912025, + "grad_norm": 1.824658163272239, + "learning_rate": 3.820376521641708e-06, + "loss": 0.9762, + "step": 1795 + }, + { + "epoch": 0.1619696081525905, + "grad_norm": 1.4873110995840113, + "learning_rate": 3.82013446698028e-06, + "loss": 0.9943, + "step": 1796 + }, + { + "epoch": 0.16205979167606077, + "grad_norm": 1.595644329953581, + "learning_rate": 3.819892257014943e-06, + "loss": 0.9438, + "step": 1797 + }, + { + "epoch": 0.16214997519953103, + "grad_norm": 1.451821347451039, + "learning_rate": 3.819649891766364e-06, + "loss": 1.0077, + "step": 1798 + }, + { + "epoch": 0.16224015872300132, + "grad_norm": 1.4132088460769006, + "learning_rate": 3.819407371255222e-06, + "loss": 0.9696, + "step": 1799 + }, + { + "epoch": 0.16233034224647158, + "grad_norm": 1.44091904425896, + "learning_rate": 3.819164695502212e-06, + "loss": 0.9858, + "step": 1800 + }, + { + "epoch": 0.16242052576994184, + "grad_norm": 1.5378041036431267, + "learning_rate": 3.818921864528039e-06, + "loss": 0.9291, + "step": 1801 + }, + { + "epoch": 0.1625107092934121, + "grad_norm": 1.705226057174501, + "learning_rate": 3.818678878353423e-06, + "loss": 1.0387, + "step": 1802 + }, + { + "epoch": 0.16260089281688236, + "grad_norm": 1.5346383733077358, + "learning_rate": 3.818435736999097e-06, + "loss": 0.9335, + "step": 1803 + }, + { + "epoch": 0.16269107634035262, + "grad_norm": 1.4746090516348984, + "learning_rate": 3.818192440485807e-06, + "loss": 0.9803, + "step": 1804 + }, + { + "epoch": 0.16278125986382289, + "grad_norm": 1.8357867239866335, + "learning_rate": 3.817948988834314e-06, + "loss": 0.923, + "step": 1805 + }, + { + "epoch": 0.16287144338729315, + "grad_norm": 1.2918877822546282, + "learning_rate": 3.817705382065388e-06, + "loss": 0.981, + "step": 1806 + }, + { + "epoch": 0.1629616269107634, + "grad_norm": 1.2196722209422028, + "learning_rate": 3.8174616201998155e-06, + "loss": 0.9525, + "step": 1807 + }, + { + "epoch": 0.16305181043423367, + "grad_norm": 1.7794060783499368, + "learning_rate": 3.817217703258397e-06, + "loss": 1.0233, + "step": 1808 + }, + { + "epoch": 0.16314199395770393, + "grad_norm": 1.3333691453097993, + "learning_rate": 3.816973631261943e-06, + "loss": 0.9725, + "step": 1809 + }, + { + "epoch": 0.1632321774811742, + "grad_norm": 1.4710912508950909, + "learning_rate": 3.816729404231281e-06, + "loss": 1.01, + "step": 1810 + }, + { + "epoch": 0.16332236100464445, + "grad_norm": 2.2319540499807755, + "learning_rate": 3.816485022187249e-06, + "loss": 1.048, + "step": 1811 + }, + { + "epoch": 0.1634125445281147, + "grad_norm": 1.5114067603403911, + "learning_rate": 3.816240485150698e-06, + "loss": 0.9313, + "step": 1812 + }, + { + "epoch": 0.16350272805158497, + "grad_norm": 1.3497475582326108, + "learning_rate": 3.815995793142495e-06, + "loss": 0.9778, + "step": 1813 + }, + { + "epoch": 0.16359291157505523, + "grad_norm": 1.667936588307831, + "learning_rate": 3.815750946183518e-06, + "loss": 0.922, + "step": 1814 + }, + { + "epoch": 0.1636830950985255, + "grad_norm": 1.4856148611195379, + "learning_rate": 3.815505944294658e-06, + "loss": 1.0348, + "step": 1815 + }, + { + "epoch": 0.16377327862199575, + "grad_norm": 1.224189893313056, + "learning_rate": 3.81526078749682e-06, + "loss": 0.9387, + "step": 1816 + }, + { + "epoch": 0.16386346214546602, + "grad_norm": 1.4786492190356413, + "learning_rate": 3.8150154758109225e-06, + "loss": 0.92, + "step": 1817 + }, + { + "epoch": 0.16395364566893628, + "grad_norm": 1.40555165222859, + "learning_rate": 3.814770009257896e-06, + "loss": 0.9534, + "step": 1818 + }, + { + "epoch": 0.16404382919240654, + "grad_norm": 1.448673032439129, + "learning_rate": 3.814524387858687e-06, + "loss": 0.9997, + "step": 1819 + }, + { + "epoch": 0.1641340127158768, + "grad_norm": 1.525541447059226, + "learning_rate": 3.814278611634251e-06, + "loss": 1.0069, + "step": 1820 + }, + { + "epoch": 0.16422419623934706, + "grad_norm": 1.514576106554899, + "learning_rate": 3.8140326806055606e-06, + "loss": 0.9522, + "step": 1821 + }, + { + "epoch": 0.16431437976281732, + "grad_norm": 1.6154216763852713, + "learning_rate": 3.8137865947935992e-06, + "loss": 1.0372, + "step": 1822 + }, + { + "epoch": 0.1644045632862876, + "grad_norm": 1.5869046724481803, + "learning_rate": 3.8135403542193646e-06, + "loss": 0.9966, + "step": 1823 + }, + { + "epoch": 0.16449474680975787, + "grad_norm": 1.8119023093948203, + "learning_rate": 3.813293958903867e-06, + "loss": 0.9166, + "step": 1824 + }, + { + "epoch": 0.16458493033322813, + "grad_norm": 1.4679610385661777, + "learning_rate": 3.8130474088681306e-06, + "loss": 0.9467, + "step": 1825 + }, + { + "epoch": 0.1646751138566984, + "grad_norm": 1.6930634659139117, + "learning_rate": 3.8128007041331927e-06, + "loss": 0.919, + "step": 1826 + }, + { + "epoch": 0.16476529738016865, + "grad_norm": 1.4291694411240645, + "learning_rate": 3.812553844720102e-06, + "loss": 0.8961, + "step": 1827 + }, + { + "epoch": 0.1648554809036389, + "grad_norm": 1.54458487171103, + "learning_rate": 3.8123068306499236e-06, + "loss": 1.0048, + "step": 1828 + }, + { + "epoch": 0.16494566442710917, + "grad_norm": 1.4618763074669245, + "learning_rate": 3.812059661943733e-06, + "loss": 0.9837, + "step": 1829 + }, + { + "epoch": 0.16503584795057943, + "grad_norm": 1.3051173706913155, + "learning_rate": 3.811812338622621e-06, + "loss": 0.9585, + "step": 1830 + }, + { + "epoch": 0.1651260314740497, + "grad_norm": 1.3147399498714427, + "learning_rate": 3.81156486070769e-06, + "loss": 0.9726, + "step": 1831 + }, + { + "epoch": 0.16521621499751996, + "grad_norm": 1.4293053955424666, + "learning_rate": 3.811317228220056e-06, + "loss": 1.0292, + "step": 1832 + }, + { + "epoch": 0.16530639852099022, + "grad_norm": 1.5775048576558282, + "learning_rate": 3.811069441180849e-06, + "loss": 1.0235, + "step": 1833 + }, + { + "epoch": 0.16539658204446048, + "grad_norm": 1.7318334429520632, + "learning_rate": 3.8108214996112107e-06, + "loss": 0.9853, + "step": 1834 + }, + { + "epoch": 0.16548676556793074, + "grad_norm": 1.4565111310895613, + "learning_rate": 3.810573403532297e-06, + "loss": 1.0231, + "step": 1835 + }, + { + "epoch": 0.165576949091401, + "grad_norm": 1.3041648960203067, + "learning_rate": 3.8103251529652774e-06, + "loss": 0.9597, + "step": 1836 + }, + { + "epoch": 0.16566713261487126, + "grad_norm": 1.4011293641430052, + "learning_rate": 3.810076747931334e-06, + "loss": 0.9584, + "step": 1837 + }, + { + "epoch": 0.16575731613834152, + "grad_norm": 1.4175613700862533, + "learning_rate": 3.809828188451662e-06, + "loss": 0.9399, + "step": 1838 + }, + { + "epoch": 0.16584749966181178, + "grad_norm": 1.4032168419904858, + "learning_rate": 3.809579474547469e-06, + "loss": 0.9766, + "step": 1839 + }, + { + "epoch": 0.16593768318528204, + "grad_norm": 1.7179281870939667, + "learning_rate": 3.809330606239977e-06, + "loss": 1.0261, + "step": 1840 + }, + { + "epoch": 0.1660278667087523, + "grad_norm": 1.420435165966656, + "learning_rate": 3.809081583550422e-06, + "loss": 1.0516, + "step": 1841 + }, + { + "epoch": 0.16611805023222256, + "grad_norm": 1.7401620355335043, + "learning_rate": 3.808832406500051e-06, + "loss": 1.0608, + "step": 1842 + }, + { + "epoch": 0.16620823375569282, + "grad_norm": 1.5224990693138047, + "learning_rate": 3.8085830751101253e-06, + "loss": 1.051, + "step": 1843 + }, + { + "epoch": 0.16629841727916309, + "grad_norm": 1.527694700852982, + "learning_rate": 3.808333589401919e-06, + "loss": 1.0195, + "step": 1844 + }, + { + "epoch": 0.16638860080263335, + "grad_norm": 1.3182815189684578, + "learning_rate": 3.8080839493967194e-06, + "loss": 0.9091, + "step": 1845 + }, + { + "epoch": 0.1664787843261036, + "grad_norm": 0.7688160394975209, + "learning_rate": 3.807834155115828e-06, + "loss": 0.7841, + "step": 1846 + }, + { + "epoch": 0.1665689678495739, + "grad_norm": 0.806694971059604, + "learning_rate": 3.8075842065805584e-06, + "loss": 0.8938, + "step": 1847 + }, + { + "epoch": 0.16665915137304416, + "grad_norm": 1.4671338706325676, + "learning_rate": 3.8073341038122374e-06, + "loss": 0.9696, + "step": 1848 + }, + { + "epoch": 0.16674933489651442, + "grad_norm": 1.3435167065750437, + "learning_rate": 3.8070838468322048e-06, + "loss": 1.0459, + "step": 1849 + }, + { + "epoch": 0.16683951841998468, + "grad_norm": 1.6565905526762816, + "learning_rate": 3.8068334356618143e-06, + "loss": 0.9239, + "step": 1850 + }, + { + "epoch": 0.16692970194345494, + "grad_norm": 1.9076686020678622, + "learning_rate": 3.8065828703224324e-06, + "loss": 1.0078, + "step": 1851 + }, + { + "epoch": 0.1670198854669252, + "grad_norm": 1.6228567073977824, + "learning_rate": 3.8063321508354386e-06, + "loss": 0.9722, + "step": 1852 + }, + { + "epoch": 0.16711006899039546, + "grad_norm": 1.4490446824160337, + "learning_rate": 3.8060812772222255e-06, + "loss": 0.9662, + "step": 1853 + }, + { + "epoch": 0.16720025251386572, + "grad_norm": 1.2083032319276752, + "learning_rate": 3.8058302495041993e-06, + "loss": 1.0468, + "step": 1854 + }, + { + "epoch": 0.16729043603733598, + "grad_norm": 1.8230192827679108, + "learning_rate": 3.805579067702779e-06, + "loss": 1.0084, + "step": 1855 + }, + { + "epoch": 0.16738061956080624, + "grad_norm": 1.6487701225065055, + "learning_rate": 3.8053277318393967e-06, + "loss": 0.9852, + "step": 1856 + }, + { + "epoch": 0.1674708030842765, + "grad_norm": 1.3970777838043356, + "learning_rate": 3.805076241935498e-06, + "loss": 0.9861, + "step": 1857 + }, + { + "epoch": 0.16756098660774676, + "grad_norm": 1.7732714604695665, + "learning_rate": 3.804824598012541e-06, + "loss": 0.8535, + "step": 1858 + }, + { + "epoch": 0.16765117013121703, + "grad_norm": 1.300019599693442, + "learning_rate": 3.8045728000919975e-06, + "loss": 1.0237, + "step": 1859 + }, + { + "epoch": 0.1677413536546873, + "grad_norm": 1.4864101713095335, + "learning_rate": 3.8043208481953524e-06, + "loss": 1.0121, + "step": 1860 + }, + { + "epoch": 0.16783153717815755, + "grad_norm": 1.4358516032332758, + "learning_rate": 3.804068742344104e-06, + "loss": 0.9174, + "step": 1861 + }, + { + "epoch": 0.1679217207016278, + "grad_norm": 1.3470362467175887, + "learning_rate": 3.8038164825597628e-06, + "loss": 0.9496, + "step": 1862 + }, + { + "epoch": 0.16801190422509807, + "grad_norm": 2.19179842834472, + "learning_rate": 3.8035640688638537e-06, + "loss": 1.0644, + "step": 1863 + }, + { + "epoch": 0.16810208774856833, + "grad_norm": 2.0843770909334642, + "learning_rate": 3.8033115012779125e-06, + "loss": 1.06, + "step": 1864 + }, + { + "epoch": 0.1681922712720386, + "grad_norm": 1.2734542096685768, + "learning_rate": 3.8030587798234915e-06, + "loss": 1.0236, + "step": 1865 + }, + { + "epoch": 0.16828245479550885, + "grad_norm": 1.4799666144497532, + "learning_rate": 3.802805904522153e-06, + "loss": 0.9207, + "step": 1866 + }, + { + "epoch": 0.1683726383189791, + "grad_norm": 1.2010544634245104, + "learning_rate": 3.8025528753954742e-06, + "loss": 0.974, + "step": 1867 + }, + { + "epoch": 0.16846282184244937, + "grad_norm": 1.6511766717469731, + "learning_rate": 3.802299692465045e-06, + "loss": 0.919, + "step": 1868 + }, + { + "epoch": 0.16855300536591963, + "grad_norm": 1.4649479129632264, + "learning_rate": 3.802046355752468e-06, + "loss": 0.9723, + "step": 1869 + }, + { + "epoch": 0.16864318888938992, + "grad_norm": 1.7274158147015743, + "learning_rate": 3.80179286527936e-06, + "loss": 1.0419, + "step": 1870 + }, + { + "epoch": 0.16873337241286018, + "grad_norm": 1.3834451639787335, + "learning_rate": 3.801539221067349e-06, + "loss": 0.8981, + "step": 1871 + }, + { + "epoch": 0.16882355593633044, + "grad_norm": 1.347884758635979, + "learning_rate": 3.801285423138079e-06, + "loss": 0.9362, + "step": 1872 + }, + { + "epoch": 0.1689137394598007, + "grad_norm": 1.336291082337688, + "learning_rate": 3.8010314715132037e-06, + "loss": 0.9302, + "step": 1873 + }, + { + "epoch": 0.16900392298327097, + "grad_norm": 1.463197316920937, + "learning_rate": 3.800777366214393e-06, + "loss": 0.9374, + "step": 1874 + }, + { + "epoch": 0.16909410650674123, + "grad_norm": 1.8634413164590062, + "learning_rate": 3.800523107263328e-06, + "loss": 0.9602, + "step": 1875 + }, + { + "epoch": 0.1691842900302115, + "grad_norm": 1.4809413346874645, + "learning_rate": 3.800268694681703e-06, + "loss": 1.0324, + "step": 1876 + }, + { + "epoch": 0.16927447355368175, + "grad_norm": 0.941413404508764, + "learning_rate": 3.800014128491227e-06, + "loss": 0.8205, + "step": 1877 + }, + { + "epoch": 0.169364657077152, + "grad_norm": 1.4667271420986578, + "learning_rate": 3.79975940871362e-06, + "loss": 1.0324, + "step": 1878 + }, + { + "epoch": 0.16945484060062227, + "grad_norm": 1.8251807476052428, + "learning_rate": 3.799504535370617e-06, + "loss": 0.989, + "step": 1879 + }, + { + "epoch": 0.16954502412409253, + "grad_norm": 2.1979255736352834, + "learning_rate": 3.799249508483964e-06, + "loss": 0.9169, + "step": 1880 + }, + { + "epoch": 0.1696352076475628, + "grad_norm": 1.2467033784394168, + "learning_rate": 3.798994328075422e-06, + "loss": 0.8715, + "step": 1881 + }, + { + "epoch": 0.16972539117103305, + "grad_norm": 1.5652696095730845, + "learning_rate": 3.798738994166765e-06, + "loss": 0.9166, + "step": 1882 + }, + { + "epoch": 0.1698155746945033, + "grad_norm": 1.5585779939777693, + "learning_rate": 3.7984835067797788e-06, + "loss": 0.9812, + "step": 1883 + }, + { + "epoch": 0.16990575821797357, + "grad_norm": 1.4948371410652417, + "learning_rate": 3.798227865936263e-06, + "loss": 1.0726, + "step": 1884 + }, + { + "epoch": 0.16999594174144383, + "grad_norm": 1.5130311458934358, + "learning_rate": 3.7979720716580297e-06, + "loss": 0.8463, + "step": 1885 + }, + { + "epoch": 0.1700861252649141, + "grad_norm": 1.5241177336254368, + "learning_rate": 3.7977161239669057e-06, + "loss": 0.9687, + "step": 1886 + }, + { + "epoch": 0.17017630878838436, + "grad_norm": 1.6615281125266677, + "learning_rate": 3.7974600228847294e-06, + "loss": 0.9636, + "step": 1887 + }, + { + "epoch": 0.17026649231185462, + "grad_norm": 1.9216137219976923, + "learning_rate": 3.7972037684333534e-06, + "loss": 0.9124, + "step": 1888 + }, + { + "epoch": 0.17035667583532488, + "grad_norm": 1.5082708265022833, + "learning_rate": 3.796947360634642e-06, + "loss": 0.9135, + "step": 1889 + }, + { + "epoch": 0.17044685935879514, + "grad_norm": 1.6710767890458222, + "learning_rate": 3.796690799510473e-06, + "loss": 1.0272, + "step": 1890 + }, + { + "epoch": 0.1705370428822654, + "grad_norm": 1.4259800602507267, + "learning_rate": 3.7964340850827387e-06, + "loss": 0.9446, + "step": 1891 + }, + { + "epoch": 0.17062722640573566, + "grad_norm": 1.0397406638025068, + "learning_rate": 3.7961772173733425e-06, + "loss": 0.7836, + "step": 1892 + }, + { + "epoch": 0.17071740992920592, + "grad_norm": 1.2909132185355905, + "learning_rate": 3.7959201964042024e-06, + "loss": 0.9718, + "step": 1893 + }, + { + "epoch": 0.1708075934526762, + "grad_norm": 1.5199080835960332, + "learning_rate": 3.795663022197248e-06, + "loss": 1.0046, + "step": 1894 + }, + { + "epoch": 0.17089777697614647, + "grad_norm": 1.9461530225403512, + "learning_rate": 3.7954056947744242e-06, + "loss": 0.9835, + "step": 1895 + }, + { + "epoch": 0.17098796049961673, + "grad_norm": 1.5241976675416413, + "learning_rate": 3.7951482141576863e-06, + "loss": 0.959, + "step": 1896 + }, + { + "epoch": 0.171078144023087, + "grad_norm": 1.2394409044434531, + "learning_rate": 3.794890580369004e-06, + "loss": 1.0239, + "step": 1897 + }, + { + "epoch": 0.17116832754655725, + "grad_norm": 1.6579687449381832, + "learning_rate": 3.7946327934303612e-06, + "loss": 1.0135, + "step": 1898 + }, + { + "epoch": 0.1712585110700275, + "grad_norm": 1.4321779333099334, + "learning_rate": 3.794374853363752e-06, + "loss": 1.0739, + "step": 1899 + }, + { + "epoch": 0.17134869459349777, + "grad_norm": 1.3115984227039827, + "learning_rate": 3.794116760191187e-06, + "loss": 0.998, + "step": 1900 + }, + { + "epoch": 0.17143887811696804, + "grad_norm": 1.4813594504050398, + "learning_rate": 3.7938585139346877e-06, + "loss": 1.0776, + "step": 1901 + }, + { + "epoch": 0.1715290616404383, + "grad_norm": 1.5074365176725095, + "learning_rate": 3.793600114616288e-06, + "loss": 0.9579, + "step": 1902 + }, + { + "epoch": 0.17161924516390856, + "grad_norm": 1.1733871875852129, + "learning_rate": 3.793341562258037e-06, + "loss": 0.7897, + "step": 1903 + }, + { + "epoch": 0.17170942868737882, + "grad_norm": 1.392565241643048, + "learning_rate": 3.7930828568819953e-06, + "loss": 0.9925, + "step": 1904 + }, + { + "epoch": 0.17179961221084908, + "grad_norm": 1.6308258966904352, + "learning_rate": 3.7928239985102378e-06, + "loss": 0.9786, + "step": 1905 + }, + { + "epoch": 0.17188979573431934, + "grad_norm": 1.421209493980831, + "learning_rate": 3.7925649871648505e-06, + "loss": 0.975, + "step": 1906 + }, + { + "epoch": 0.1719799792577896, + "grad_norm": 1.4106237846372656, + "learning_rate": 3.792305822867935e-06, + "loss": 0.9087, + "step": 1907 + }, + { + "epoch": 0.17207016278125986, + "grad_norm": 1.5325993412048764, + "learning_rate": 3.792046505641604e-06, + "loss": 0.9454, + "step": 1908 + }, + { + "epoch": 0.17216034630473012, + "grad_norm": 1.3537542840074435, + "learning_rate": 3.791787035507984e-06, + "loss": 0.9427, + "step": 1909 + }, + { + "epoch": 0.17225052982820038, + "grad_norm": 1.6125635208511655, + "learning_rate": 3.7915274124892136e-06, + "loss": 0.931, + "step": 1910 + }, + { + "epoch": 0.17234071335167064, + "grad_norm": 1.3560419107588217, + "learning_rate": 3.7912676366074466e-06, + "loss": 0.9643, + "step": 1911 + }, + { + "epoch": 0.1724308968751409, + "grad_norm": 1.8754154380868333, + "learning_rate": 3.7910077078848478e-06, + "loss": 0.9478, + "step": 1912 + }, + { + "epoch": 0.17252108039861117, + "grad_norm": 1.624093316295779, + "learning_rate": 3.7907476263435957e-06, + "loss": 0.9404, + "step": 1913 + }, + { + "epoch": 0.17261126392208143, + "grad_norm": 1.5412670167353288, + "learning_rate": 3.7904873920058826e-06, + "loss": 0.9493, + "step": 1914 + }, + { + "epoch": 0.1727014474455517, + "grad_norm": 1.8659151602426591, + "learning_rate": 3.7902270048939114e-06, + "loss": 0.8517, + "step": 1915 + }, + { + "epoch": 0.17279163096902195, + "grad_norm": 2.1846833760415367, + "learning_rate": 3.7899664650299023e-06, + "loss": 0.9403, + "step": 1916 + }, + { + "epoch": 0.1728818144924922, + "grad_norm": 1.136408340701841, + "learning_rate": 3.7897057724360836e-06, + "loss": 0.9834, + "step": 1917 + }, + { + "epoch": 0.1729719980159625, + "grad_norm": 1.7415273467684154, + "learning_rate": 3.7894449271347004e-06, + "loss": 0.9919, + "step": 1918 + }, + { + "epoch": 0.17306218153943276, + "grad_norm": 1.4560325001715402, + "learning_rate": 3.789183929148009e-06, + "loss": 1.0358, + "step": 1919 + }, + { + "epoch": 0.17315236506290302, + "grad_norm": 1.4022817783954469, + "learning_rate": 3.7889227784982795e-06, + "loss": 1.0029, + "step": 1920 + }, + { + "epoch": 0.17324254858637328, + "grad_norm": 1.3268946614832429, + "learning_rate": 3.7886614752077945e-06, + "loss": 0.98, + "step": 1921 + }, + { + "epoch": 0.17333273210984354, + "grad_norm": 1.8075255170316293, + "learning_rate": 3.7884000192988495e-06, + "loss": 0.9785, + "step": 1922 + }, + { + "epoch": 0.1734229156333138, + "grad_norm": 1.6558562476472147, + "learning_rate": 3.7881384107937546e-06, + "loss": 1.0302, + "step": 1923 + }, + { + "epoch": 0.17351309915678406, + "grad_norm": 1.6228902031924533, + "learning_rate": 3.78787664971483e-06, + "loss": 0.9704, + "step": 1924 + }, + { + "epoch": 0.17360328268025432, + "grad_norm": 1.258351938897491, + "learning_rate": 3.7876147360844115e-06, + "loss": 0.9463, + "step": 1925 + }, + { + "epoch": 0.17369346620372458, + "grad_norm": 1.730641582460542, + "learning_rate": 3.7873526699248474e-06, + "loss": 1.049, + "step": 1926 + }, + { + "epoch": 0.17378364972719484, + "grad_norm": 1.376146185594829, + "learning_rate": 3.7870904512584974e-06, + "loss": 0.9791, + "step": 1927 + }, + { + "epoch": 0.1738738332506651, + "grad_norm": 1.5165725577504536, + "learning_rate": 3.7868280801077368e-06, + "loss": 0.9812, + "step": 1928 + }, + { + "epoch": 0.17396401677413537, + "grad_norm": 1.1818937172459105, + "learning_rate": 3.7865655564949517e-06, + "loss": 0.9715, + "step": 1929 + }, + { + "epoch": 0.17405420029760563, + "grad_norm": 1.309099424218892, + "learning_rate": 3.786302880442542e-06, + "loss": 0.9494, + "step": 1930 + }, + { + "epoch": 0.1741443838210759, + "grad_norm": 1.3731793140335624, + "learning_rate": 3.7860400519729215e-06, + "loss": 0.9802, + "step": 1931 + }, + { + "epoch": 0.17423456734454615, + "grad_norm": 1.7912855038581643, + "learning_rate": 3.7857770711085157e-06, + "loss": 0.9985, + "step": 1932 + }, + { + "epoch": 0.1743247508680164, + "grad_norm": 1.7225565978829713, + "learning_rate": 3.785513937871763e-06, + "loss": 1.0278, + "step": 1933 + }, + { + "epoch": 0.17441493439148667, + "grad_norm": 1.4932198673096657, + "learning_rate": 3.785250652285116e-06, + "loss": 0.9518, + "step": 1934 + }, + { + "epoch": 0.17450511791495693, + "grad_norm": 1.6358434845034033, + "learning_rate": 3.78498721437104e-06, + "loss": 1.0471, + "step": 1935 + }, + { + "epoch": 0.1745953014384272, + "grad_norm": 2.210935343279646, + "learning_rate": 3.784723624152012e-06, + "loss": 0.9752, + "step": 1936 + }, + { + "epoch": 0.17468548496189745, + "grad_norm": 2.7213409180143757, + "learning_rate": 3.784459881650524e-06, + "loss": 0.9232, + "step": 1937 + }, + { + "epoch": 0.1747756684853677, + "grad_norm": 1.3597829360101081, + "learning_rate": 3.784195986889079e-06, + "loss": 0.9542, + "step": 1938 + }, + { + "epoch": 0.17486585200883797, + "grad_norm": 2.2192301700724983, + "learning_rate": 3.7839319398901946e-06, + "loss": 0.9027, + "step": 1939 + }, + { + "epoch": 0.17495603553230824, + "grad_norm": 1.373480433985351, + "learning_rate": 3.7836677406764013e-06, + "loss": 1.0451, + "step": 1940 + }, + { + "epoch": 0.1750462190557785, + "grad_norm": 1.5087654383516602, + "learning_rate": 3.7834033892702407e-06, + "loss": 1.0252, + "step": 1941 + }, + { + "epoch": 0.17513640257924878, + "grad_norm": 1.4759311840173424, + "learning_rate": 3.783138885694269e-06, + "loss": 0.9343, + "step": 1942 + }, + { + "epoch": 0.17522658610271905, + "grad_norm": 1.593511675798763, + "learning_rate": 3.7828742299710558e-06, + "loss": 1.0553, + "step": 1943 + }, + { + "epoch": 0.1753167696261893, + "grad_norm": 1.5176935139781074, + "learning_rate": 3.782609422123183e-06, + "loss": 0.9584, + "step": 1944 + }, + { + "epoch": 0.17540695314965957, + "grad_norm": 1.7494824870438528, + "learning_rate": 3.7823444621732444e-06, + "loss": 0.9764, + "step": 1945 + }, + { + "epoch": 0.17549713667312983, + "grad_norm": 1.9180576902072723, + "learning_rate": 3.782079350143849e-06, + "loss": 0.9965, + "step": 1946 + }, + { + "epoch": 0.1755873201966001, + "grad_norm": 1.6744392196454505, + "learning_rate": 3.781814086057617e-06, + "loss": 1.0797, + "step": 1947 + }, + { + "epoch": 0.17567750372007035, + "grad_norm": 1.404054029156017, + "learning_rate": 3.7815486699371826e-06, + "loss": 0.9687, + "step": 1948 + }, + { + "epoch": 0.1757676872435406, + "grad_norm": 1.437831259996635, + "learning_rate": 3.7812831018051918e-06, + "loss": 0.9558, + "step": 1949 + }, + { + "epoch": 0.17585787076701087, + "grad_norm": 1.6963443692183735, + "learning_rate": 3.7810173816843058e-06, + "loss": 0.9985, + "step": 1950 + }, + { + "epoch": 0.17594805429048113, + "grad_norm": 1.4574299693745507, + "learning_rate": 3.7807515095971955e-06, + "loss": 1.0422, + "step": 1951 + }, + { + "epoch": 0.1760382378139514, + "grad_norm": 1.4043222565671731, + "learning_rate": 3.7804854855665475e-06, + "loss": 0.9713, + "step": 1952 + }, + { + "epoch": 0.17612842133742165, + "grad_norm": 1.4175867664350228, + "learning_rate": 3.7802193096150606e-06, + "loss": 0.9848, + "step": 1953 + }, + { + "epoch": 0.17621860486089191, + "grad_norm": 1.3147287519079534, + "learning_rate": 3.779952981765446e-06, + "loss": 0.9362, + "step": 1954 + }, + { + "epoch": 0.17630878838436218, + "grad_norm": 1.280804161098614, + "learning_rate": 3.779686502040429e-06, + "loss": 0.9885, + "step": 1955 + }, + { + "epoch": 0.17639897190783244, + "grad_norm": 0.7804669843163053, + "learning_rate": 3.779419870462746e-06, + "loss": 0.798, + "step": 1956 + }, + { + "epoch": 0.1764891554313027, + "grad_norm": 1.5710220631653364, + "learning_rate": 3.779153087055148e-06, + "loss": 0.9554, + "step": 1957 + }, + { + "epoch": 0.17657933895477296, + "grad_norm": 1.6743183286160224, + "learning_rate": 3.7788861518403988e-06, + "loss": 0.9833, + "step": 1958 + }, + { + "epoch": 0.17666952247824322, + "grad_norm": 1.6619697987352113, + "learning_rate": 3.7786190648412742e-06, + "loss": 0.8767, + "step": 1959 + }, + { + "epoch": 0.17675970600171348, + "grad_norm": 0.7996803345383436, + "learning_rate": 3.778351826080564e-06, + "loss": 0.8113, + "step": 1960 + }, + { + "epoch": 0.17684988952518374, + "grad_norm": 1.4304672366206665, + "learning_rate": 3.7780844355810704e-06, + "loss": 1.0829, + "step": 1961 + }, + { + "epoch": 0.176940073048654, + "grad_norm": 1.4196306058452715, + "learning_rate": 3.777816893365608e-06, + "loss": 0.9897, + "step": 1962 + }, + { + "epoch": 0.17703025657212426, + "grad_norm": 1.4963104648849512, + "learning_rate": 3.7775491994570057e-06, + "loss": 1.0549, + "step": 1963 + }, + { + "epoch": 0.17712044009559452, + "grad_norm": 1.4604543611212124, + "learning_rate": 3.777281353878105e-06, + "loss": 0.9459, + "step": 1964 + }, + { + "epoch": 0.17721062361906478, + "grad_norm": 0.6698608370631051, + "learning_rate": 3.777013356651758e-06, + "loss": 0.8109, + "step": 1965 + }, + { + "epoch": 0.17730080714253507, + "grad_norm": 1.5727955937893328, + "learning_rate": 3.776745207800834e-06, + "loss": 0.9925, + "step": 1966 + }, + { + "epoch": 0.17739099066600533, + "grad_norm": 1.4490169580018815, + "learning_rate": 3.7764769073482122e-06, + "loss": 1.0127, + "step": 1967 + }, + { + "epoch": 0.1774811741894756, + "grad_norm": 1.3208518196440857, + "learning_rate": 3.7762084553167846e-06, + "loss": 1.044, + "step": 1968 + }, + { + "epoch": 0.17757135771294585, + "grad_norm": 1.5303894271662846, + "learning_rate": 3.775939851729458e-06, + "loss": 1.0667, + "step": 1969 + }, + { + "epoch": 0.17766154123641612, + "grad_norm": 1.528471703389413, + "learning_rate": 3.775671096609151e-06, + "loss": 0.9387, + "step": 1970 + }, + { + "epoch": 0.17775172475988638, + "grad_norm": 1.849223143907169, + "learning_rate": 3.775402189978795e-06, + "loss": 0.9644, + "step": 1971 + }, + { + "epoch": 0.17784190828335664, + "grad_norm": 1.2653205411188675, + "learning_rate": 3.7751331318613343e-06, + "loss": 0.9644, + "step": 1972 + }, + { + "epoch": 0.1779320918068269, + "grad_norm": 1.5592517658839549, + "learning_rate": 3.774863922279727e-06, + "loss": 0.9988, + "step": 1973 + }, + { + "epoch": 0.17802227533029716, + "grad_norm": 1.4362022512813761, + "learning_rate": 3.7745945612569435e-06, + "loss": 1.0456, + "step": 1974 + }, + { + "epoch": 0.17811245885376742, + "grad_norm": 1.2714485128241073, + "learning_rate": 3.7743250488159674e-06, + "loss": 0.8606, + "step": 1975 + }, + { + "epoch": 0.17820264237723768, + "grad_norm": 1.5314433695865115, + "learning_rate": 3.774055384979794e-06, + "loss": 0.8769, + "step": 1976 + }, + { + "epoch": 0.17829282590070794, + "grad_norm": 1.4326855019765687, + "learning_rate": 3.773785569771433e-06, + "loss": 0.9635, + "step": 1977 + }, + { + "epoch": 0.1783830094241782, + "grad_norm": 1.50942551111721, + "learning_rate": 3.7735156032139066e-06, + "loss": 0.9642, + "step": 1978 + }, + { + "epoch": 0.17847319294764846, + "grad_norm": 1.5881685246039616, + "learning_rate": 3.773245485330251e-06, + "loss": 1.0785, + "step": 1979 + }, + { + "epoch": 0.17856337647111872, + "grad_norm": 1.5390918554614372, + "learning_rate": 3.7729752161435115e-06, + "loss": 0.9514, + "step": 1980 + }, + { + "epoch": 0.17865355999458898, + "grad_norm": 1.5322695860405275, + "learning_rate": 3.7727047956767514e-06, + "loss": 1.0512, + "step": 1981 + }, + { + "epoch": 0.17874374351805924, + "grad_norm": 1.3117290684996783, + "learning_rate": 3.7724342239530436e-06, + "loss": 1.1063, + "step": 1982 + }, + { + "epoch": 0.1788339270415295, + "grad_norm": 1.6061575124512124, + "learning_rate": 3.772163500995474e-06, + "loss": 0.9304, + "step": 1983 + }, + { + "epoch": 0.17892411056499977, + "grad_norm": 1.7784309164276286, + "learning_rate": 3.7718926268271437e-06, + "loss": 1.0691, + "step": 1984 + }, + { + "epoch": 0.17901429408847003, + "grad_norm": 2.3890030289028377, + "learning_rate": 3.771621601471164e-06, + "loss": 0.9927, + "step": 1985 + }, + { + "epoch": 0.1791044776119403, + "grad_norm": 1.4091657292870297, + "learning_rate": 3.771350424950661e-06, + "loss": 1.0225, + "step": 1986 + }, + { + "epoch": 0.17919466113541055, + "grad_norm": 1.5873824428975933, + "learning_rate": 3.771079097288772e-06, + "loss": 0.9274, + "step": 1987 + }, + { + "epoch": 0.1792848446588808, + "grad_norm": 1.5795921078036326, + "learning_rate": 3.770807618508649e-06, + "loss": 0.989, + "step": 1988 + }, + { + "epoch": 0.17937502818235107, + "grad_norm": 1.4047007609962416, + "learning_rate": 3.7705359886334555e-06, + "loss": 0.9713, + "step": 1989 + }, + { + "epoch": 0.17946521170582136, + "grad_norm": 1.5745942304560014, + "learning_rate": 3.7702642076863694e-06, + "loss": 0.9527, + "step": 1990 + }, + { + "epoch": 0.17955539522929162, + "grad_norm": 1.5974619404493122, + "learning_rate": 3.7699922756905795e-06, + "loss": 1.0348, + "step": 1991 + }, + { + "epoch": 0.17964557875276188, + "grad_norm": 1.5135690961830213, + "learning_rate": 3.7697201926692895e-06, + "loss": 0.9373, + "step": 1992 + }, + { + "epoch": 0.17973576227623214, + "grad_norm": 1.6190532297216165, + "learning_rate": 3.7694479586457144e-06, + "loss": 0.9876, + "step": 1993 + }, + { + "epoch": 0.1798259457997024, + "grad_norm": 1.959612638186931, + "learning_rate": 3.7691755736430827e-06, + "loss": 0.9774, + "step": 1994 + }, + { + "epoch": 0.17991612932317266, + "grad_norm": 1.4209264588624428, + "learning_rate": 3.768903037684636e-06, + "loss": 1.0291, + "step": 1995 + }, + { + "epoch": 0.18000631284664292, + "grad_norm": 1.7250186006952943, + "learning_rate": 3.7686303507936284e-06, + "loss": 1.026, + "step": 1996 + }, + { + "epoch": 0.18009649637011318, + "grad_norm": 1.600784336010782, + "learning_rate": 3.7683575129933272e-06, + "loss": 1.0222, + "step": 1997 + }, + { + "epoch": 0.18018667989358345, + "grad_norm": 1.281125923289654, + "learning_rate": 3.7680845243070128e-06, + "loss": 0.9818, + "step": 1998 + }, + { + "epoch": 0.1802768634170537, + "grad_norm": 0.8585888127470505, + "learning_rate": 3.7678113847579767e-06, + "loss": 0.7771, + "step": 1999 + }, + { + "epoch": 0.18036704694052397, + "grad_norm": 1.7350354011287972, + "learning_rate": 3.7675380943695264e-06, + "loss": 1.0513, + "step": 2000 + }, + { + "epoch": 0.18045723046399423, + "grad_norm": 1.6610429617270965, + "learning_rate": 3.7672646531649795e-06, + "loss": 0.9436, + "step": 2001 + }, + { + "epoch": 0.1805474139874645, + "grad_norm": 0.9806495894879965, + "learning_rate": 3.7669910611676682e-06, + "loss": 0.7989, + "step": 2002 + }, + { + "epoch": 0.18063759751093475, + "grad_norm": 1.5754709115014027, + "learning_rate": 3.7667173184009356e-06, + "loss": 0.9745, + "step": 2003 + }, + { + "epoch": 0.180727781034405, + "grad_norm": 1.35094486302512, + "learning_rate": 3.7664434248881403e-06, + "loss": 1.0422, + "step": 2004 + }, + { + "epoch": 0.18081796455787527, + "grad_norm": 1.7791126880304169, + "learning_rate": 3.766169380652652e-06, + "loss": 0.9943, + "step": 2005 + }, + { + "epoch": 0.18090814808134553, + "grad_norm": 1.6140194143949897, + "learning_rate": 3.7658951857178537e-06, + "loss": 1.027, + "step": 2006 + }, + { + "epoch": 0.1809983316048158, + "grad_norm": 1.7830274982182597, + "learning_rate": 3.7656208401071414e-06, + "loss": 0.9965, + "step": 2007 + }, + { + "epoch": 0.18108851512828605, + "grad_norm": 1.6331412974499553, + "learning_rate": 3.7653463438439225e-06, + "loss": 0.9267, + "step": 2008 + }, + { + "epoch": 0.18117869865175631, + "grad_norm": 1.5709089218827519, + "learning_rate": 3.7650716969516203e-06, + "loss": 0.9897, + "step": 2009 + }, + { + "epoch": 0.18126888217522658, + "grad_norm": 1.660436126454553, + "learning_rate": 3.764796899453668e-06, + "loss": 0.8892, + "step": 2010 + }, + { + "epoch": 0.18135906569869684, + "grad_norm": 1.5092188152120094, + "learning_rate": 3.7645219513735134e-06, + "loss": 1.0243, + "step": 2011 + }, + { + "epoch": 0.1814492492221671, + "grad_norm": 1.4863045448708552, + "learning_rate": 3.764246852734617e-06, + "loss": 1.0201, + "step": 2012 + }, + { + "epoch": 0.18153943274563739, + "grad_norm": 1.7158083624904013, + "learning_rate": 3.7639716035604502e-06, + "loss": 0.9487, + "step": 2013 + }, + { + "epoch": 0.18162961626910765, + "grad_norm": 1.0714358261407744, + "learning_rate": 3.7636962038745e-06, + "loss": 0.7955, + "step": 2014 + }, + { + "epoch": 0.1817197997925779, + "grad_norm": 1.5669821729248492, + "learning_rate": 3.763420653700265e-06, + "loss": 0.947, + "step": 2015 + }, + { + "epoch": 0.18180998331604817, + "grad_norm": 1.4427451761363719, + "learning_rate": 3.7631449530612565e-06, + "loss": 0.9489, + "step": 2016 + }, + { + "epoch": 0.18190016683951843, + "grad_norm": 1.4125839427884948, + "learning_rate": 3.762869101980999e-06, + "loss": 1.0727, + "step": 2017 + }, + { + "epoch": 0.1819903503629887, + "grad_norm": 1.542765698170232, + "learning_rate": 3.7625931004830287e-06, + "loss": 0.9783, + "step": 2018 + }, + { + "epoch": 0.18208053388645895, + "grad_norm": 1.5201212465716556, + "learning_rate": 3.7623169485908966e-06, + "loss": 0.9401, + "step": 2019 + }, + { + "epoch": 0.1821707174099292, + "grad_norm": 1.0667201547821572, + "learning_rate": 3.7620406463281647e-06, + "loss": 0.857, + "step": 2020 + }, + { + "epoch": 0.18226090093339947, + "grad_norm": 1.654027563375128, + "learning_rate": 3.7617641937184095e-06, + "loss": 0.9997, + "step": 2021 + }, + { + "epoch": 0.18235108445686973, + "grad_norm": 1.5699110134685854, + "learning_rate": 3.761487590785219e-06, + "loss": 1.0111, + "step": 2022 + }, + { + "epoch": 0.18244126798034, + "grad_norm": 1.4579320173753672, + "learning_rate": 3.7612108375521942e-06, + "loss": 0.9584, + "step": 2023 + }, + { + "epoch": 0.18253145150381025, + "grad_norm": 1.4886227513788883, + "learning_rate": 3.76093393404295e-06, + "loss": 1.0274, + "step": 2024 + }, + { + "epoch": 0.18262163502728052, + "grad_norm": 2.966114681483327, + "learning_rate": 3.7606568802811126e-06, + "loss": 0.9731, + "step": 2025 + }, + { + "epoch": 0.18271181855075078, + "grad_norm": 1.856341347549975, + "learning_rate": 3.760379676290322e-06, + "loss": 0.996, + "step": 2026 + }, + { + "epoch": 0.18280200207422104, + "grad_norm": 1.6116698116651829, + "learning_rate": 3.760102322094231e-06, + "loss": 0.9189, + "step": 2027 + }, + { + "epoch": 0.1828921855976913, + "grad_norm": 1.2463448011485083, + "learning_rate": 3.759824817716504e-06, + "loss": 0.9437, + "step": 2028 + }, + { + "epoch": 0.18298236912116156, + "grad_norm": 1.1938347751413987, + "learning_rate": 3.759547163180821e-06, + "loss": 1.0308, + "step": 2029 + }, + { + "epoch": 0.18307255264463182, + "grad_norm": 1.299891720443876, + "learning_rate": 3.759269358510871e-06, + "loss": 0.7876, + "step": 2030 + }, + { + "epoch": 0.18316273616810208, + "grad_norm": 1.5945214293204126, + "learning_rate": 3.75899140373036e-06, + "loss": 0.884, + "step": 2031 + }, + { + "epoch": 0.18325291969157234, + "grad_norm": 2.4892125086292762, + "learning_rate": 3.7587132988630028e-06, + "loss": 1.0544, + "step": 2032 + }, + { + "epoch": 0.1833431032150426, + "grad_norm": 1.6696138153581181, + "learning_rate": 3.7584350439325295e-06, + "loss": 1.0146, + "step": 2033 + }, + { + "epoch": 0.18343328673851286, + "grad_norm": 1.5773958088945497, + "learning_rate": 3.758156638962682e-06, + "loss": 0.9728, + "step": 2034 + }, + { + "epoch": 0.18352347026198312, + "grad_norm": 1.4605026005790671, + "learning_rate": 3.757878083977216e-06, + "loss": 0.9372, + "step": 2035 + }, + { + "epoch": 0.18361365378545338, + "grad_norm": 1.3365843249918898, + "learning_rate": 3.7575993789999e-06, + "loss": 0.9749, + "step": 2036 + }, + { + "epoch": 0.18370383730892367, + "grad_norm": 1.3630142168995436, + "learning_rate": 3.757320524054512e-06, + "loss": 1.0115, + "step": 2037 + }, + { + "epoch": 0.18379402083239393, + "grad_norm": 1.4259082476348952, + "learning_rate": 3.757041519164848e-06, + "loss": 0.9168, + "step": 2038 + }, + { + "epoch": 0.1838842043558642, + "grad_norm": 1.7284576923554407, + "learning_rate": 3.7567623643547133e-06, + "loss": 0.9906, + "step": 2039 + }, + { + "epoch": 0.18397438787933446, + "grad_norm": 1.4379834730394996, + "learning_rate": 3.756483059647927e-06, + "loss": 0.9869, + "step": 2040 + }, + { + "epoch": 0.18406457140280472, + "grad_norm": 1.360347487641398, + "learning_rate": 3.756203605068321e-06, + "loss": 0.9539, + "step": 2041 + }, + { + "epoch": 0.18415475492627498, + "grad_norm": 1.506800494763015, + "learning_rate": 3.7559240006397396e-06, + "loss": 0.9688, + "step": 2042 + }, + { + "epoch": 0.18424493844974524, + "grad_norm": 2.9316502214512323, + "learning_rate": 3.7556442463860406e-06, + "loss": 1.0989, + "step": 2043 + }, + { + "epoch": 0.1843351219732155, + "grad_norm": 1.4249946828374878, + "learning_rate": 3.7553643423310934e-06, + "loss": 1.0874, + "step": 2044 + }, + { + "epoch": 0.18442530549668576, + "grad_norm": 2.0933348970144148, + "learning_rate": 3.755084288498782e-06, + "loss": 0.8506, + "step": 2045 + }, + { + "epoch": 0.18451548902015602, + "grad_norm": 1.3716738090622738, + "learning_rate": 3.754804084913002e-06, + "loss": 0.9935, + "step": 2046 + }, + { + "epoch": 0.18460567254362628, + "grad_norm": 1.3532537121597643, + "learning_rate": 3.754523731597661e-06, + "loss": 1.0476, + "step": 2047 + }, + { + "epoch": 0.18469585606709654, + "grad_norm": 1.8192795208769865, + "learning_rate": 3.754243228576681e-06, + "loss": 0.8782, + "step": 2048 + }, + { + "epoch": 0.1847860395905668, + "grad_norm": 1.6570208213294533, + "learning_rate": 3.753962575873996e-06, + "loss": 1.0311, + "step": 2049 + }, + { + "epoch": 0.18487622311403706, + "grad_norm": 1.3828675857175539, + "learning_rate": 3.7536817735135527e-06, + "loss": 0.94, + "step": 2050 + }, + { + "epoch": 0.18496640663750732, + "grad_norm": 1.3580443786763177, + "learning_rate": 3.753400821519311e-06, + "loss": 1.0263, + "step": 2051 + }, + { + "epoch": 0.18505659016097759, + "grad_norm": 1.346129904785629, + "learning_rate": 3.7531197199152426e-06, + "loss": 0.9098, + "step": 2052 + }, + { + "epoch": 0.18514677368444785, + "grad_norm": 1.3202025689401584, + "learning_rate": 3.7528384687253335e-06, + "loss": 0.8515, + "step": 2053 + }, + { + "epoch": 0.1852369572079181, + "grad_norm": 1.552664893173203, + "learning_rate": 3.7525570679735815e-06, + "loss": 1.0908, + "step": 2054 + }, + { + "epoch": 0.18532714073138837, + "grad_norm": 1.694610731723948, + "learning_rate": 3.7522755176839965e-06, + "loss": 0.9929, + "step": 2055 + }, + { + "epoch": 0.18541732425485863, + "grad_norm": 1.6827318857589808, + "learning_rate": 3.7519938178806027e-06, + "loss": 0.9894, + "step": 2056 + }, + { + "epoch": 0.1855075077783289, + "grad_norm": 1.9002627166013395, + "learning_rate": 3.7517119685874358e-06, + "loss": 0.9281, + "step": 2057 + }, + { + "epoch": 0.18559769130179915, + "grad_norm": 1.441783907244709, + "learning_rate": 3.7514299698285447e-06, + "loss": 0.9682, + "step": 2058 + }, + { + "epoch": 0.1856878748252694, + "grad_norm": 1.3076276714653934, + "learning_rate": 3.751147821627991e-06, + "loss": 0.9642, + "step": 2059 + }, + { + "epoch": 0.18577805834873967, + "grad_norm": 1.6856169614221836, + "learning_rate": 3.75086552400985e-06, + "loss": 1.0006, + "step": 2060 + }, + { + "epoch": 0.18586824187220996, + "grad_norm": 1.5096096733056248, + "learning_rate": 3.750583076998208e-06, + "loss": 0.9573, + "step": 2061 + }, + { + "epoch": 0.18595842539568022, + "grad_norm": 1.5840590302998843, + "learning_rate": 3.7503004806171655e-06, + "loss": 0.967, + "step": 2062 + }, + { + "epoch": 0.18604860891915048, + "grad_norm": 1.8044737321818451, + "learning_rate": 3.7500177348908354e-06, + "loss": 0.9151, + "step": 2063 + }, + { + "epoch": 0.18613879244262074, + "grad_norm": 1.3360118733723632, + "learning_rate": 3.749734839843342e-06, + "loss": 0.9982, + "step": 2064 + }, + { + "epoch": 0.186228975966091, + "grad_norm": 1.4805830858512758, + "learning_rate": 3.7494517954988245e-06, + "loss": 0.9814, + "step": 2065 + }, + { + "epoch": 0.18631915948956126, + "grad_norm": 1.7020676156496863, + "learning_rate": 3.749168601881433e-06, + "loss": 0.9731, + "step": 2066 + }, + { + "epoch": 0.18640934301303153, + "grad_norm": 1.7558289589968474, + "learning_rate": 3.7488852590153315e-06, + "loss": 0.9613, + "step": 2067 + }, + { + "epoch": 0.1864995265365018, + "grad_norm": 1.4227671915670308, + "learning_rate": 3.748601766924697e-06, + "loss": 0.9778, + "step": 2068 + }, + { + "epoch": 0.18658971005997205, + "grad_norm": 1.6233402726079262, + "learning_rate": 3.7483181256337176e-06, + "loss": 0.9597, + "step": 2069 + }, + { + "epoch": 0.1866798935834423, + "grad_norm": 1.485198585512868, + "learning_rate": 3.7480343351665962e-06, + "loss": 0.9828, + "step": 2070 + }, + { + "epoch": 0.18677007710691257, + "grad_norm": 1.4555766435766713, + "learning_rate": 3.747750395547546e-06, + "loss": 0.9676, + "step": 2071 + }, + { + "epoch": 0.18686026063038283, + "grad_norm": 1.2261165610065878, + "learning_rate": 3.7474663068007956e-06, + "loss": 0.9615, + "step": 2072 + }, + { + "epoch": 0.1869504441538531, + "grad_norm": 1.223214262842013, + "learning_rate": 3.747182068950584e-06, + "loss": 0.9637, + "step": 2073 + }, + { + "epoch": 0.18704062767732335, + "grad_norm": 1.4708932700476054, + "learning_rate": 3.7468976820211643e-06, + "loss": 0.99, + "step": 2074 + }, + { + "epoch": 0.1871308112007936, + "grad_norm": 1.5012569723991707, + "learning_rate": 3.746613146036803e-06, + "loss": 0.9829, + "step": 2075 + }, + { + "epoch": 0.18722099472426387, + "grad_norm": 1.6084211911860107, + "learning_rate": 3.7463284610217766e-06, + "loss": 1.0171, + "step": 2076 + }, + { + "epoch": 0.18731117824773413, + "grad_norm": 1.713455576796964, + "learning_rate": 3.746043627000377e-06, + "loss": 0.916, + "step": 2077 + }, + { + "epoch": 0.1874013617712044, + "grad_norm": 1.4205462776388005, + "learning_rate": 3.7457586439969076e-06, + "loss": 0.9317, + "step": 2078 + }, + { + "epoch": 0.18749154529467466, + "grad_norm": 1.205593351400606, + "learning_rate": 3.7454735120356842e-06, + "loss": 0.9667, + "step": 2079 + }, + { + "epoch": 0.18758172881814492, + "grad_norm": 1.4550026688436566, + "learning_rate": 3.7451882311410373e-06, + "loss": 1.047, + "step": 2080 + }, + { + "epoch": 0.18767191234161518, + "grad_norm": 1.457036731699432, + "learning_rate": 3.7449028013373074e-06, + "loss": 1.0116, + "step": 2081 + }, + { + "epoch": 0.18776209586508544, + "grad_norm": 1.400742873577268, + "learning_rate": 3.7446172226488485e-06, + "loss": 0.9781, + "step": 2082 + }, + { + "epoch": 0.1878522793885557, + "grad_norm": 1.60587478292539, + "learning_rate": 3.7443314951000285e-06, + "loss": 1.0414, + "step": 2083 + }, + { + "epoch": 0.18794246291202596, + "grad_norm": 1.8743508168765932, + "learning_rate": 3.7440456187152276e-06, + "loss": 0.9612, + "step": 2084 + }, + { + "epoch": 0.18803264643549625, + "grad_norm": 1.7733337775327849, + "learning_rate": 3.7437595935188377e-06, + "loss": 1.0688, + "step": 2085 + }, + { + "epoch": 0.1881228299589665, + "grad_norm": 1.867252620056251, + "learning_rate": 3.7434734195352647e-06, + "loss": 0.9726, + "step": 2086 + }, + { + "epoch": 0.18821301348243677, + "grad_norm": 0.7550795522045811, + "learning_rate": 3.743187096788926e-06, + "loss": 0.8108, + "step": 2087 + }, + { + "epoch": 0.18830319700590703, + "grad_norm": 1.5479737820191677, + "learning_rate": 3.7429006253042524e-06, + "loss": 0.9852, + "step": 2088 + }, + { + "epoch": 0.1883933805293773, + "grad_norm": 1.663360447608459, + "learning_rate": 3.7426140051056867e-06, + "loss": 0.921, + "step": 2089 + }, + { + "epoch": 0.18848356405284755, + "grad_norm": 2.2131394922255443, + "learning_rate": 3.7423272362176856e-06, + "loss": 0.9161, + "step": 2090 + }, + { + "epoch": 0.1885737475763178, + "grad_norm": 3.9094551059922713, + "learning_rate": 3.742040318664718e-06, + "loss": 1.0245, + "step": 2091 + }, + { + "epoch": 0.18866393109978807, + "grad_norm": 1.596767411461526, + "learning_rate": 3.7417532524712643e-06, + "loss": 1.0303, + "step": 2092 + }, + { + "epoch": 0.18875411462325833, + "grad_norm": 1.3888426942560586, + "learning_rate": 3.7414660376618195e-06, + "loss": 1.0091, + "step": 2093 + }, + { + "epoch": 0.1888442981467286, + "grad_norm": 0.7309535248799084, + "learning_rate": 3.74117867426089e-06, + "loss": 0.8141, + "step": 2094 + }, + { + "epoch": 0.18893448167019886, + "grad_norm": 1.476412568730319, + "learning_rate": 3.7408911622929954e-06, + "loss": 0.9601, + "step": 2095 + }, + { + "epoch": 0.18902466519366912, + "grad_norm": 1.418202362573087, + "learning_rate": 3.740603501782668e-06, + "loss": 1.0135, + "step": 2096 + }, + { + "epoch": 0.18911484871713938, + "grad_norm": 1.5860799340240224, + "learning_rate": 3.7403156927544516e-06, + "loss": 0.9222, + "step": 2097 + }, + { + "epoch": 0.18920503224060964, + "grad_norm": 1.4747379878463975, + "learning_rate": 3.740027735232904e-06, + "loss": 0.9018, + "step": 2098 + }, + { + "epoch": 0.1892952157640799, + "grad_norm": 1.445066771367021, + "learning_rate": 3.7397396292425966e-06, + "loss": 0.9569, + "step": 2099 + }, + { + "epoch": 0.18938539928755016, + "grad_norm": 1.6886435978234042, + "learning_rate": 3.7394513748081105e-06, + "loss": 0.9573, + "step": 2100 + }, + { + "epoch": 0.18947558281102042, + "grad_norm": 1.425491178085876, + "learning_rate": 3.7391629719540418e-06, + "loss": 1.0062, + "step": 2101 + }, + { + "epoch": 0.18956576633449068, + "grad_norm": 1.6873246561010924, + "learning_rate": 3.7388744207049998e-06, + "loss": 0.9678, + "step": 2102 + }, + { + "epoch": 0.18965594985796094, + "grad_norm": 1.7680491457404515, + "learning_rate": 3.7385857210856027e-06, + "loss": 0.9551, + "step": 2103 + }, + { + "epoch": 0.1897461333814312, + "grad_norm": 1.378118879170568, + "learning_rate": 3.738296873120486e-06, + "loss": 0.9612, + "step": 2104 + }, + { + "epoch": 0.18983631690490146, + "grad_norm": 2.272601416310879, + "learning_rate": 3.7380078768342955e-06, + "loss": 0.9607, + "step": 2105 + }, + { + "epoch": 0.18992650042837173, + "grad_norm": 1.6103820612825954, + "learning_rate": 3.7377187322516895e-06, + "loss": 1.0029, + "step": 2106 + }, + { + "epoch": 0.19001668395184199, + "grad_norm": 1.48133667639954, + "learning_rate": 3.7374294393973395e-06, + "loss": 0.9373, + "step": 2107 + }, + { + "epoch": 0.19010686747531225, + "grad_norm": 1.490320125727502, + "learning_rate": 3.7371399982959294e-06, + "loss": 1.0368, + "step": 2108 + }, + { + "epoch": 0.19019705099878254, + "grad_norm": 1.5812528979610379, + "learning_rate": 3.7368504089721565e-06, + "loss": 0.8909, + "step": 2109 + }, + { + "epoch": 0.1902872345222528, + "grad_norm": 1.339698805907024, + "learning_rate": 3.73656067145073e-06, + "loss": 0.9258, + "step": 2110 + }, + { + "epoch": 0.19037741804572306, + "grad_norm": 1.6020920040985653, + "learning_rate": 3.736270785756371e-06, + "loss": 1.0215, + "step": 2111 + }, + { + "epoch": 0.19046760156919332, + "grad_norm": 1.8294875455121196, + "learning_rate": 3.7359807519138156e-06, + "loss": 0.9786, + "step": 2112 + }, + { + "epoch": 0.19055778509266358, + "grad_norm": 1.266551961640838, + "learning_rate": 3.73569056994781e-06, + "loss": 0.9762, + "step": 2113 + }, + { + "epoch": 0.19064796861613384, + "grad_norm": 1.7792964060141052, + "learning_rate": 3.7354002398831144e-06, + "loss": 1.0174, + "step": 2114 + }, + { + "epoch": 0.1907381521396041, + "grad_norm": 1.8835469313269013, + "learning_rate": 3.7351097617445015e-06, + "loss": 0.9729, + "step": 2115 + }, + { + "epoch": 0.19082833566307436, + "grad_norm": 0.9809344883897141, + "learning_rate": 3.7348191355567567e-06, + "loss": 0.8468, + "step": 2116 + }, + { + "epoch": 0.19091851918654462, + "grad_norm": 1.5484154477363148, + "learning_rate": 3.734528361344677e-06, + "loss": 1.0037, + "step": 2117 + }, + { + "epoch": 0.19100870271001488, + "grad_norm": 1.7220889171500893, + "learning_rate": 3.734237439133074e-06, + "loss": 0.9698, + "step": 2118 + }, + { + "epoch": 0.19109888623348514, + "grad_norm": 1.586870642434831, + "learning_rate": 3.7339463689467702e-06, + "loss": 1.0231, + "step": 2119 + }, + { + "epoch": 0.1911890697569554, + "grad_norm": 1.4548570708241753, + "learning_rate": 3.733655150810601e-06, + "loss": 0.9446, + "step": 2120 + }, + { + "epoch": 0.19127925328042567, + "grad_norm": 1.4709414102904725, + "learning_rate": 3.7333637847494154e-06, + "loss": 1.1067, + "step": 2121 + }, + { + "epoch": 0.19136943680389593, + "grad_norm": 1.4407370236216361, + "learning_rate": 3.7330722707880734e-06, + "loss": 1.133, + "step": 2122 + }, + { + "epoch": 0.1914596203273662, + "grad_norm": 1.4799200566068234, + "learning_rate": 3.7327806089514497e-06, + "loss": 0.8888, + "step": 2123 + }, + { + "epoch": 0.19154980385083645, + "grad_norm": 1.863434215480706, + "learning_rate": 3.7324887992644297e-06, + "loss": 1.0728, + "step": 2124 + }, + { + "epoch": 0.1916399873743067, + "grad_norm": 0.8513939366926521, + "learning_rate": 3.7321968417519123e-06, + "loss": 0.8057, + "step": 2125 + }, + { + "epoch": 0.19173017089777697, + "grad_norm": 1.3618850772759683, + "learning_rate": 3.7319047364388097e-06, + "loss": 0.9645, + "step": 2126 + }, + { + "epoch": 0.19182035442124723, + "grad_norm": 1.000048159393315, + "learning_rate": 3.7316124833500453e-06, + "loss": 0.7902, + "step": 2127 + }, + { + "epoch": 0.1919105379447175, + "grad_norm": 1.307914033546645, + "learning_rate": 3.731320082510556e-06, + "loss": 0.9586, + "step": 2128 + }, + { + "epoch": 0.19200072146818775, + "grad_norm": 1.4483198066638727, + "learning_rate": 3.7310275339452906e-06, + "loss": 0.9985, + "step": 2129 + }, + { + "epoch": 0.192090904991658, + "grad_norm": 1.8376404351785731, + "learning_rate": 3.7307348376792113e-06, + "loss": 0.9643, + "step": 2130 + }, + { + "epoch": 0.19218108851512827, + "grad_norm": 1.345931522381796, + "learning_rate": 3.730441993737292e-06, + "loss": 0.9648, + "step": 2131 + }, + { + "epoch": 0.19227127203859856, + "grad_norm": 1.6357915250255262, + "learning_rate": 3.7301490021445205e-06, + "loss": 0.9598, + "step": 2132 + }, + { + "epoch": 0.19236145556206882, + "grad_norm": 7.202820214962489, + "learning_rate": 3.7298558629258966e-06, + "loss": 0.9134, + "step": 2133 + }, + { + "epoch": 0.19245163908553908, + "grad_norm": 1.3548152348576603, + "learning_rate": 3.7295625761064314e-06, + "loss": 1.0162, + "step": 2134 + }, + { + "epoch": 0.19254182260900934, + "grad_norm": 1.8015295364791788, + "learning_rate": 3.7292691417111504e-06, + "loss": 0.9414, + "step": 2135 + }, + { + "epoch": 0.1926320061324796, + "grad_norm": 1.4570908930539397, + "learning_rate": 3.728975559765092e-06, + "loss": 0.9363, + "step": 2136 + }, + { + "epoch": 0.19272218965594987, + "grad_norm": 1.6604564440217293, + "learning_rate": 3.728681830293305e-06, + "loss": 1.0076, + "step": 2137 + }, + { + "epoch": 0.19281237317942013, + "grad_norm": 1.542015766449118, + "learning_rate": 3.7283879533208523e-06, + "loss": 0.9318, + "step": 2138 + }, + { + "epoch": 0.1929025567028904, + "grad_norm": 1.4780471039261776, + "learning_rate": 3.7280939288728094e-06, + "loss": 0.951, + "step": 2139 + }, + { + "epoch": 0.19299274022636065, + "grad_norm": 1.5521381889053874, + "learning_rate": 3.7277997569742637e-06, + "loss": 0.9432, + "step": 2140 + }, + { + "epoch": 0.1930829237498309, + "grad_norm": 2.09935000214794, + "learning_rate": 3.7275054376503155e-06, + "loss": 0.9329, + "step": 2141 + }, + { + "epoch": 0.19317310727330117, + "grad_norm": 1.6976869189761317, + "learning_rate": 3.7272109709260783e-06, + "loss": 0.9175, + "step": 2142 + }, + { + "epoch": 0.19326329079677143, + "grad_norm": 0.7975120615869189, + "learning_rate": 3.7269163568266774e-06, + "loss": 0.812, + "step": 2143 + }, + { + "epoch": 0.1933534743202417, + "grad_norm": 1.693408864144008, + "learning_rate": 3.7266215953772512e-06, + "loss": 0.9565, + "step": 2144 + }, + { + "epoch": 0.19344365784371195, + "grad_norm": 1.628640791305026, + "learning_rate": 3.7263266866029492e-06, + "loss": 1.0088, + "step": 2145 + }, + { + "epoch": 0.1935338413671822, + "grad_norm": 1.3367818651398349, + "learning_rate": 3.726031630528936e-06, + "loss": 0.9361, + "step": 2146 + }, + { + "epoch": 0.19362402489065247, + "grad_norm": 1.2934328381532028, + "learning_rate": 3.7257364271803865e-06, + "loss": 0.8652, + "step": 2147 + }, + { + "epoch": 0.19371420841412273, + "grad_norm": 1.8663980898193708, + "learning_rate": 3.7254410765824896e-06, + "loss": 0.9047, + "step": 2148 + }, + { + "epoch": 0.193804391937593, + "grad_norm": 1.7073200601292444, + "learning_rate": 3.725145578760446e-06, + "loss": 0.8901, + "step": 2149 + }, + { + "epoch": 0.19389457546106326, + "grad_norm": 1.9152431661336773, + "learning_rate": 3.7248499337394696e-06, + "loss": 0.8844, + "step": 2150 + }, + { + "epoch": 0.19398475898453352, + "grad_norm": 1.8288809362694125, + "learning_rate": 3.7245541415447848e-06, + "loss": 1.0672, + "step": 2151 + }, + { + "epoch": 0.19407494250800378, + "grad_norm": 1.5282012015535813, + "learning_rate": 3.724258202201633e-06, + "loss": 0.9952, + "step": 2152 + }, + { + "epoch": 0.19416512603147404, + "grad_norm": 1.4694828883340034, + "learning_rate": 3.7239621157352633e-06, + "loss": 0.9928, + "step": 2153 + }, + { + "epoch": 0.1942553095549443, + "grad_norm": 1.481817270455237, + "learning_rate": 3.7236658821709403e-06, + "loss": 0.9592, + "step": 2154 + }, + { + "epoch": 0.19434549307841456, + "grad_norm": 1.2947559279125487, + "learning_rate": 3.7233695015339404e-06, + "loss": 1.0147, + "step": 2155 + }, + { + "epoch": 0.19443567660188485, + "grad_norm": 1.5959822785246922, + "learning_rate": 3.7230729738495513e-06, + "loss": 0.9556, + "step": 2156 + }, + { + "epoch": 0.1945258601253551, + "grad_norm": 1.3140608271566623, + "learning_rate": 3.722776299143075e-06, + "loss": 0.9679, + "step": 2157 + }, + { + "epoch": 0.19461604364882537, + "grad_norm": 0.9669637208561316, + "learning_rate": 3.722479477439826e-06, + "loss": 0.8162, + "step": 2158 + }, + { + "epoch": 0.19470622717229563, + "grad_norm": 1.32745124332238, + "learning_rate": 3.7221825087651306e-06, + "loss": 0.9494, + "step": 2159 + }, + { + "epoch": 0.1947964106957659, + "grad_norm": 1.8808002403208823, + "learning_rate": 3.7218853931443274e-06, + "loss": 0.9032, + "step": 2160 + }, + { + "epoch": 0.19488659421923615, + "grad_norm": 1.2647969869318378, + "learning_rate": 3.721588130602768e-06, + "loss": 0.9106, + "step": 2161 + }, + { + "epoch": 0.19497677774270641, + "grad_norm": 1.642025812756953, + "learning_rate": 3.7212907211658164e-06, + "loss": 0.9465, + "step": 2162 + }, + { + "epoch": 0.19506696126617667, + "grad_norm": 1.3874068151576342, + "learning_rate": 3.72099316485885e-06, + "loss": 0.9285, + "step": 2163 + }, + { + "epoch": 0.19515714478964694, + "grad_norm": 1.3961569235436955, + "learning_rate": 3.720695461707256e-06, + "loss": 1.0154, + "step": 2164 + }, + { + "epoch": 0.1952473283131172, + "grad_norm": 1.575448892595549, + "learning_rate": 3.7203976117364383e-06, + "loss": 0.9957, + "step": 2165 + }, + { + "epoch": 0.19533751183658746, + "grad_norm": 1.7711176550505097, + "learning_rate": 3.7200996149718105e-06, + "loss": 0.9473, + "step": 2166 + }, + { + "epoch": 0.19542769536005772, + "grad_norm": 1.1885953168381038, + "learning_rate": 3.7198014714387985e-06, + "loss": 0.9587, + "step": 2167 + }, + { + "epoch": 0.19551787888352798, + "grad_norm": 1.526440599742098, + "learning_rate": 3.7195031811628422e-06, + "loss": 0.9022, + "step": 2168 + }, + { + "epoch": 0.19560806240699824, + "grad_norm": 1.9643617801057571, + "learning_rate": 3.719204744169393e-06, + "loss": 1.0081, + "step": 2169 + }, + { + "epoch": 0.1956982459304685, + "grad_norm": 1.5105819962440665, + "learning_rate": 3.718906160483916e-06, + "loss": 0.9432, + "step": 2170 + }, + { + "epoch": 0.19578842945393876, + "grad_norm": 1.5070703290793637, + "learning_rate": 3.7186074301318868e-06, + "loss": 0.941, + "step": 2171 + }, + { + "epoch": 0.19587861297740902, + "grad_norm": 1.3371280473836817, + "learning_rate": 3.7183085531387957e-06, + "loss": 1.0419, + "step": 2172 + }, + { + "epoch": 0.19596879650087928, + "grad_norm": 1.4391787304986488, + "learning_rate": 3.7180095295301443e-06, + "loss": 0.9527, + "step": 2173 + }, + { + "epoch": 0.19605898002434954, + "grad_norm": 1.155078294255409, + "learning_rate": 3.7177103593314465e-06, + "loss": 1.0317, + "step": 2174 + }, + { + "epoch": 0.1961491635478198, + "grad_norm": 1.4854950543979017, + "learning_rate": 3.7174110425682297e-06, + "loss": 1.0012, + "step": 2175 + }, + { + "epoch": 0.19623934707129007, + "grad_norm": 2.8282940218667503, + "learning_rate": 3.7171115792660333e-06, + "loss": 1.097, + "step": 2176 + }, + { + "epoch": 0.19632953059476033, + "grad_norm": 1.8032397122974844, + "learning_rate": 3.7168119694504083e-06, + "loss": 1.0304, + "step": 2177 + }, + { + "epoch": 0.1964197141182306, + "grad_norm": 1.4756184946100166, + "learning_rate": 3.71651221314692e-06, + "loss": 0.9684, + "step": 2178 + }, + { + "epoch": 0.19650989764170085, + "grad_norm": 1.5049240829482469, + "learning_rate": 3.716212310381145e-06, + "loss": 0.9758, + "step": 2179 + }, + { + "epoch": 0.19660008116517114, + "grad_norm": 1.8507056205114645, + "learning_rate": 3.7159122611786725e-06, + "loss": 1.0169, + "step": 2180 + }, + { + "epoch": 0.1966902646886414, + "grad_norm": 1.6275142146367276, + "learning_rate": 3.7156120655651045e-06, + "loss": 0.9568, + "step": 2181 + }, + { + "epoch": 0.19678044821211166, + "grad_norm": 1.2985582573255838, + "learning_rate": 3.7153117235660553e-06, + "loss": 0.9587, + "step": 2182 + }, + { + "epoch": 0.19687063173558192, + "grad_norm": 0.9495533796816629, + "learning_rate": 3.7150112352071514e-06, + "loss": 0.828, + "step": 2183 + }, + { + "epoch": 0.19696081525905218, + "grad_norm": 1.6138058010532692, + "learning_rate": 3.7147106005140326e-06, + "loss": 0.9393, + "step": 2184 + }, + { + "epoch": 0.19705099878252244, + "grad_norm": 0.997746999223288, + "learning_rate": 3.714409819512351e-06, + "loss": 0.8121, + "step": 2185 + }, + { + "epoch": 0.1971411823059927, + "grad_norm": 1.4913835207054922, + "learning_rate": 3.7141088922277695e-06, + "loss": 0.9988, + "step": 2186 + }, + { + "epoch": 0.19723136582946296, + "grad_norm": 1.9690724668869592, + "learning_rate": 3.7138078186859664e-06, + "loss": 1.0155, + "step": 2187 + }, + { + "epoch": 0.19732154935293322, + "grad_norm": 1.4649273250898147, + "learning_rate": 3.7135065989126303e-06, + "loss": 0.9847, + "step": 2188 + }, + { + "epoch": 0.19741173287640348, + "grad_norm": 1.6508976084279678, + "learning_rate": 3.713205232933463e-06, + "loss": 0.9028, + "step": 2189 + }, + { + "epoch": 0.19750191639987374, + "grad_norm": 1.5704105023979782, + "learning_rate": 3.7129037207741792e-06, + "loss": 0.9841, + "step": 2190 + }, + { + "epoch": 0.197592099923344, + "grad_norm": 1.7546347143941474, + "learning_rate": 3.7126020624605046e-06, + "loss": 0.982, + "step": 2191 + }, + { + "epoch": 0.19768228344681427, + "grad_norm": 1.4803669675599285, + "learning_rate": 3.7123002580181785e-06, + "loss": 0.9254, + "step": 2192 + }, + { + "epoch": 0.19777246697028453, + "grad_norm": 1.294644241144352, + "learning_rate": 3.7119983074729532e-06, + "loss": 1.0431, + "step": 2193 + }, + { + "epoch": 0.1978626504937548, + "grad_norm": 1.565699924648264, + "learning_rate": 3.7116962108505926e-06, + "loss": 1.022, + "step": 2194 + }, + { + "epoch": 0.19795283401722505, + "grad_norm": 1.2985249789409261, + "learning_rate": 3.711393968176873e-06, + "loss": 1.0736, + "step": 2195 + }, + { + "epoch": 0.1980430175406953, + "grad_norm": 1.2502475970145421, + "learning_rate": 3.711091579477584e-06, + "loss": 0.9884, + "step": 2196 + }, + { + "epoch": 0.19813320106416557, + "grad_norm": 1.8159691069239714, + "learning_rate": 3.7107890447785255e-06, + "loss": 0.9949, + "step": 2197 + }, + { + "epoch": 0.19822338458763583, + "grad_norm": 1.6802558824401876, + "learning_rate": 3.710486364105513e-06, + "loss": 1.0265, + "step": 2198 + }, + { + "epoch": 0.1983135681111061, + "grad_norm": 1.6837257988428778, + "learning_rate": 3.7101835374843728e-06, + "loss": 0.9658, + "step": 2199 + }, + { + "epoch": 0.19840375163457635, + "grad_norm": 1.359877964673307, + "learning_rate": 3.7098805649409427e-06, + "loss": 0.9976, + "step": 2200 + }, + { + "epoch": 0.1984939351580466, + "grad_norm": 1.4841881082232908, + "learning_rate": 3.7095774465010748e-06, + "loss": 1.0407, + "step": 2201 + }, + { + "epoch": 0.19858411868151687, + "grad_norm": 1.5981704145713718, + "learning_rate": 3.7092741821906328e-06, + "loss": 0.9621, + "step": 2202 + }, + { + "epoch": 0.19867430220498714, + "grad_norm": 1.7857452689616446, + "learning_rate": 3.708970772035493e-06, + "loss": 0.9543, + "step": 2203 + }, + { + "epoch": 0.19876448572845742, + "grad_norm": 1.36153183757865, + "learning_rate": 3.7086672160615427e-06, + "loss": 0.9419, + "step": 2204 + }, + { + "epoch": 0.19885466925192768, + "grad_norm": 1.3424140143962158, + "learning_rate": 3.7083635142946852e-06, + "loss": 0.957, + "step": 2205 + }, + { + "epoch": 0.19894485277539795, + "grad_norm": 1.442637178982257, + "learning_rate": 3.7080596667608327e-06, + "loss": 1.0031, + "step": 2206 + }, + { + "epoch": 0.1990350362988682, + "grad_norm": 1.3118692426477945, + "learning_rate": 3.707755673485911e-06, + "loss": 1.0169, + "step": 2207 + }, + { + "epoch": 0.19912521982233847, + "grad_norm": 1.611681498301815, + "learning_rate": 3.7074515344958584e-06, + "loss": 0.8906, + "step": 2208 + }, + { + "epoch": 0.19921540334580873, + "grad_norm": 0.9381914132313421, + "learning_rate": 3.707147249816627e-06, + "loss": 0.8352, + "step": 2209 + }, + { + "epoch": 0.199305586869279, + "grad_norm": 1.3893178732964293, + "learning_rate": 3.706842819474178e-06, + "loss": 1.0002, + "step": 2210 + }, + { + "epoch": 0.19939577039274925, + "grad_norm": 1.712031811387427, + "learning_rate": 3.706538243494489e-06, + "loss": 0.9142, + "step": 2211 + }, + { + "epoch": 0.1994859539162195, + "grad_norm": 1.8649790324446358, + "learning_rate": 3.706233521903547e-06, + "loss": 0.9492, + "step": 2212 + }, + { + "epoch": 0.19957613743968977, + "grad_norm": 1.7260182070782948, + "learning_rate": 3.705928654727353e-06, + "loss": 1.056, + "step": 2213 + }, + { + "epoch": 0.19966632096316003, + "grad_norm": 1.4237842526891378, + "learning_rate": 3.7056236419919195e-06, + "loss": 0.9441, + "step": 2214 + }, + { + "epoch": 0.1997565044866303, + "grad_norm": 1.4029513772028412, + "learning_rate": 3.705318483723273e-06, + "loss": 1.029, + "step": 2215 + }, + { + "epoch": 0.19984668801010055, + "grad_norm": 1.6447487005508978, + "learning_rate": 3.7050131799474493e-06, + "loss": 0.9531, + "step": 2216 + }, + { + "epoch": 0.19993687153357081, + "grad_norm": 1.3991806357350776, + "learning_rate": 3.7047077306905e-06, + "loss": 0.9729, + "step": 2217 + }, + { + "epoch": 0.20002705505704108, + "grad_norm": 1.3568748195091067, + "learning_rate": 3.704402135978488e-06, + "loss": 1.0152, + "step": 2218 + }, + { + "epoch": 0.20011723858051134, + "grad_norm": 2.0005398260193425, + "learning_rate": 3.7040963958374877e-06, + "loss": 0.9979, + "step": 2219 + }, + { + "epoch": 0.2002074221039816, + "grad_norm": 1.3502884962742114, + "learning_rate": 3.7037905102935864e-06, + "loss": 1.0053, + "step": 2220 + }, + { + "epoch": 0.20029760562745186, + "grad_norm": 1.2993414531302039, + "learning_rate": 3.7034844793728837e-06, + "loss": 0.975, + "step": 2221 + }, + { + "epoch": 0.20038778915092212, + "grad_norm": 1.3270582964265598, + "learning_rate": 3.7031783031014933e-06, + "loss": 0.8657, + "step": 2222 + }, + { + "epoch": 0.20047797267439238, + "grad_norm": 1.409194956780688, + "learning_rate": 3.702871981505538e-06, + "loss": 0.9525, + "step": 2223 + }, + { + "epoch": 0.20056815619786264, + "grad_norm": 1.4126399772625764, + "learning_rate": 3.7025655146111563e-06, + "loss": 0.9094, + "step": 2224 + }, + { + "epoch": 0.2006583397213329, + "grad_norm": 0.730567884603044, + "learning_rate": 3.702258902444497e-06, + "loss": 0.8915, + "step": 2225 + }, + { + "epoch": 0.20074852324480316, + "grad_norm": 1.3814956084190921, + "learning_rate": 3.701952145031722e-06, + "loss": 1.0293, + "step": 2226 + }, + { + "epoch": 0.20083870676827342, + "grad_norm": 1.4855862945534546, + "learning_rate": 3.701645242399005e-06, + "loss": 1.0538, + "step": 2227 + }, + { + "epoch": 0.2009288902917437, + "grad_norm": 1.6160021874724586, + "learning_rate": 3.701338194572533e-06, + "loss": 1.0471, + "step": 2228 + }, + { + "epoch": 0.20101907381521397, + "grad_norm": 1.3552575290360316, + "learning_rate": 3.7010310015785056e-06, + "loss": 0.9615, + "step": 2229 + }, + { + "epoch": 0.20110925733868423, + "grad_norm": 1.6264090664454671, + "learning_rate": 3.700723663443134e-06, + "loss": 0.9174, + "step": 2230 + }, + { + "epoch": 0.2011994408621545, + "grad_norm": 0.9832790571255299, + "learning_rate": 3.7004161801926416e-06, + "loss": 0.814, + "step": 2231 + }, + { + "epoch": 0.20128962438562475, + "grad_norm": 1.5653294693629995, + "learning_rate": 3.7001085518532643e-06, + "loss": 0.9967, + "step": 2232 + }, + { + "epoch": 0.20137980790909502, + "grad_norm": 1.5225993663013362, + "learning_rate": 3.6998007784512515e-06, + "loss": 0.9726, + "step": 2233 + }, + { + "epoch": 0.20146999143256528, + "grad_norm": 1.410585755430618, + "learning_rate": 3.6994928600128637e-06, + "loss": 0.8561, + "step": 2234 + }, + { + "epoch": 0.20156017495603554, + "grad_norm": 1.5342193928612422, + "learning_rate": 3.6991847965643742e-06, + "loss": 0.9476, + "step": 2235 + }, + { + "epoch": 0.2016503584795058, + "grad_norm": 1.4527072100806186, + "learning_rate": 3.698876588132068e-06, + "loss": 0.9347, + "step": 2236 + }, + { + "epoch": 0.20174054200297606, + "grad_norm": 1.2934109948542274, + "learning_rate": 3.6985682347422446e-06, + "loss": 0.9282, + "step": 2237 + }, + { + "epoch": 0.20183072552644632, + "grad_norm": 1.525814998661696, + "learning_rate": 3.698259736421213e-06, + "loss": 0.9836, + "step": 2238 + }, + { + "epoch": 0.20192090904991658, + "grad_norm": 1.6277280063564066, + "learning_rate": 3.697951093195297e-06, + "loss": 0.9508, + "step": 2239 + }, + { + "epoch": 0.20201109257338684, + "grad_norm": 1.348724472610395, + "learning_rate": 3.6976423050908307e-06, + "loss": 0.9443, + "step": 2240 + }, + { + "epoch": 0.2021012760968571, + "grad_norm": 1.3832346385004244, + "learning_rate": 3.697333372134163e-06, + "loss": 0.9632, + "step": 2241 + }, + { + "epoch": 0.20219145962032736, + "grad_norm": 1.7524512017784883, + "learning_rate": 3.697024294351653e-06, + "loss": 1.0714, + "step": 2242 + }, + { + "epoch": 0.20228164314379762, + "grad_norm": 2.2678747122805984, + "learning_rate": 3.696715071769672e-06, + "loss": 0.9344, + "step": 2243 + }, + { + "epoch": 0.20237182666726788, + "grad_norm": 1.4901210996840601, + "learning_rate": 3.696405704414606e-06, + "loss": 0.9753, + "step": 2244 + }, + { + "epoch": 0.20246201019073815, + "grad_norm": 1.362457507459544, + "learning_rate": 3.6960961923128514e-06, + "loss": 1.0743, + "step": 2245 + }, + { + "epoch": 0.2025521937142084, + "grad_norm": 1.9125376953199895, + "learning_rate": 3.6957865354908177e-06, + "loss": 0.9156, + "step": 2246 + }, + { + "epoch": 0.20264237723767867, + "grad_norm": 1.8321824001271858, + "learning_rate": 3.6954767339749262e-06, + "loss": 0.9906, + "step": 2247 + }, + { + "epoch": 0.20273256076114893, + "grad_norm": 1.776915446225987, + "learning_rate": 3.6951667877916113e-06, + "loss": 0.9551, + "step": 2248 + }, + { + "epoch": 0.2028227442846192, + "grad_norm": 1.4659582348920293, + "learning_rate": 3.694856696967319e-06, + "loss": 0.9833, + "step": 2249 + }, + { + "epoch": 0.20291292780808945, + "grad_norm": 1.574408205052449, + "learning_rate": 3.6945464615285077e-06, + "loss": 0.9581, + "step": 2250 + }, + { + "epoch": 0.2030031113315597, + "grad_norm": 1.0247225665077557, + "learning_rate": 3.694236081501648e-06, + "loss": 0.8997, + "step": 2251 + }, + { + "epoch": 0.20309329485503, + "grad_norm": 1.5021838344579248, + "learning_rate": 3.6939255569132246e-06, + "loss": 0.9382, + "step": 2252 + }, + { + "epoch": 0.20318347837850026, + "grad_norm": 0.9197071902117727, + "learning_rate": 3.693614887789733e-06, + "loss": 0.8542, + "step": 2253 + }, + { + "epoch": 0.20327366190197052, + "grad_norm": 0.802922321973389, + "learning_rate": 3.69330407415768e-06, + "loss": 0.8514, + "step": 2254 + }, + { + "epoch": 0.20336384542544078, + "grad_norm": 1.5315219092774357, + "learning_rate": 3.6929931160435867e-06, + "loss": 0.9776, + "step": 2255 + }, + { + "epoch": 0.20345402894891104, + "grad_norm": 1.4876783857049962, + "learning_rate": 3.6926820134739858e-06, + "loss": 1.0244, + "step": 2256 + }, + { + "epoch": 0.2035442124723813, + "grad_norm": 1.4154103636393083, + "learning_rate": 3.692370766475422e-06, + "loss": 0.9121, + "step": 2257 + }, + { + "epoch": 0.20363439599585156, + "grad_norm": 1.4894330231205224, + "learning_rate": 3.692059375074453e-06, + "loss": 1.0564, + "step": 2258 + }, + { + "epoch": 0.20372457951932182, + "grad_norm": 1.3100647814217456, + "learning_rate": 3.6917478392976475e-06, + "loss": 1.0341, + "step": 2259 + }, + { + "epoch": 0.20381476304279209, + "grad_norm": 1.691229196492409, + "learning_rate": 3.691436159171589e-06, + "loss": 1.0296, + "step": 2260 + }, + { + "epoch": 0.20390494656626235, + "grad_norm": 1.62590243883692, + "learning_rate": 3.6911243347228703e-06, + "loss": 0.952, + "step": 2261 + }, + { + "epoch": 0.2039951300897326, + "grad_norm": 0.6993372939226145, + "learning_rate": 3.690812365978099e-06, + "loss": 0.7507, + "step": 2262 + }, + { + "epoch": 0.20408531361320287, + "grad_norm": 1.3146004898878236, + "learning_rate": 3.690500252963893e-06, + "loss": 0.9335, + "step": 2263 + }, + { + "epoch": 0.20417549713667313, + "grad_norm": 1.3880613892471052, + "learning_rate": 3.6901879957068846e-06, + "loss": 0.9618, + "step": 2264 + }, + { + "epoch": 0.2042656806601434, + "grad_norm": 1.4421076528281727, + "learning_rate": 3.689875594233717e-06, + "loss": 1.0614, + "step": 2265 + }, + { + "epoch": 0.20435586418361365, + "grad_norm": 1.993743169408117, + "learning_rate": 3.689563048571046e-06, + "loss": 0.9483, + "step": 2266 + }, + { + "epoch": 0.2044460477070839, + "grad_norm": 1.324948540174107, + "learning_rate": 3.6892503587455395e-06, + "loss": 0.9708, + "step": 2267 + }, + { + "epoch": 0.20453623123055417, + "grad_norm": 1.4852738720642134, + "learning_rate": 3.6889375247838766e-06, + "loss": 0.9806, + "step": 2268 + }, + { + "epoch": 0.20462641475402443, + "grad_norm": 1.3712612386710643, + "learning_rate": 3.688624546712753e-06, + "loss": 0.969, + "step": 2269 + }, + { + "epoch": 0.2047165982774947, + "grad_norm": 1.331500397894496, + "learning_rate": 3.688311424558871e-06, + "loss": 0.9303, + "step": 2270 + }, + { + "epoch": 0.20480678180096495, + "grad_norm": 1.9994066669595263, + "learning_rate": 3.6879981583489496e-06, + "loss": 0.8782, + "step": 2271 + }, + { + "epoch": 0.20489696532443522, + "grad_norm": 1.4043969979492323, + "learning_rate": 3.687684748109718e-06, + "loss": 0.9876, + "step": 2272 + }, + { + "epoch": 0.20498714884790548, + "grad_norm": 1.6905752347436696, + "learning_rate": 3.6873711938679174e-06, + "loss": 1.0297, + "step": 2273 + }, + { + "epoch": 0.20507733237137574, + "grad_norm": 1.431462254673156, + "learning_rate": 3.6870574956503027e-06, + "loss": 0.9549, + "step": 2274 + }, + { + "epoch": 0.20516751589484603, + "grad_norm": 1.6832511535577124, + "learning_rate": 3.68674365348364e-06, + "loss": 0.97, + "step": 2275 + }, + { + "epoch": 0.2052576994183163, + "grad_norm": 1.5574579139521876, + "learning_rate": 3.6864296673947086e-06, + "loss": 0.9722, + "step": 2276 + }, + { + "epoch": 0.20534788294178655, + "grad_norm": 1.395661096716797, + "learning_rate": 3.686115537410298e-06, + "loss": 0.9563, + "step": 2277 + }, + { + "epoch": 0.2054380664652568, + "grad_norm": 1.505767859066579, + "learning_rate": 3.685801263557214e-06, + "loss": 0.9236, + "step": 2278 + }, + { + "epoch": 0.20552824998872707, + "grad_norm": 1.5329891371370112, + "learning_rate": 3.68548684586227e-06, + "loss": 1.0539, + "step": 2279 + }, + { + "epoch": 0.20561843351219733, + "grad_norm": 1.5677327843805, + "learning_rate": 3.685172284352295e-06, + "loss": 0.9756, + "step": 2280 + }, + { + "epoch": 0.2057086170356676, + "grad_norm": 1.1377006563380399, + "learning_rate": 3.684857579054128e-06, + "loss": 0.9077, + "step": 2281 + }, + { + "epoch": 0.20579880055913785, + "grad_norm": 1.7859653500765893, + "learning_rate": 3.6845427299946233e-06, + "loss": 1.034, + "step": 2282 + }, + { + "epoch": 0.2058889840826081, + "grad_norm": 1.4290323058975358, + "learning_rate": 3.6842277372006434e-06, + "loss": 1.068, + "step": 2283 + }, + { + "epoch": 0.20597916760607837, + "grad_norm": 1.4220996721105705, + "learning_rate": 3.6839126006990664e-06, + "loss": 0.95, + "step": 2284 + }, + { + "epoch": 0.20606935112954863, + "grad_norm": 1.2968422299576576, + "learning_rate": 3.6835973205167818e-06, + "loss": 1.0228, + "step": 2285 + }, + { + "epoch": 0.2061595346530189, + "grad_norm": 1.5559664119341197, + "learning_rate": 3.6832818966806904e-06, + "loss": 1.0072, + "step": 2286 + }, + { + "epoch": 0.20624971817648916, + "grad_norm": 1.564792477191012, + "learning_rate": 3.682966329217706e-06, + "loss": 1.0481, + "step": 2287 + }, + { + "epoch": 0.20633990169995942, + "grad_norm": 1.7099602403535843, + "learning_rate": 3.6826506181547543e-06, + "loss": 1.006, + "step": 2288 + }, + { + "epoch": 0.20643008522342968, + "grad_norm": 1.4805746317420412, + "learning_rate": 3.682334763518774e-06, + "loss": 0.9899, + "step": 2289 + }, + { + "epoch": 0.20652026874689994, + "grad_norm": 1.823498863619426, + "learning_rate": 3.6820187653367158e-06, + "loss": 0.9436, + "step": 2290 + }, + { + "epoch": 0.2066104522703702, + "grad_norm": 1.428870045582339, + "learning_rate": 3.6817026236355412e-06, + "loss": 0.8715, + "step": 2291 + }, + { + "epoch": 0.20670063579384046, + "grad_norm": 1.8578259723453965, + "learning_rate": 3.681386338442227e-06, + "loss": 0.965, + "step": 2292 + }, + { + "epoch": 0.20679081931731072, + "grad_norm": 1.374395844638436, + "learning_rate": 3.681069909783758e-06, + "loss": 0.9459, + "step": 2293 + }, + { + "epoch": 0.20688100284078098, + "grad_norm": 1.4244734945887512, + "learning_rate": 3.680753337687136e-06, + "loss": 0.9818, + "step": 2294 + }, + { + "epoch": 0.20697118636425124, + "grad_norm": 1.4908550764658677, + "learning_rate": 3.680436622179371e-06, + "loss": 0.9077, + "step": 2295 + }, + { + "epoch": 0.2070613698877215, + "grad_norm": 1.4754939173667092, + "learning_rate": 3.680119763287488e-06, + "loss": 0.9583, + "step": 2296 + }, + { + "epoch": 0.20715155341119176, + "grad_norm": 1.540959926512428, + "learning_rate": 3.6798027610385227e-06, + "loss": 0.9131, + "step": 2297 + }, + { + "epoch": 0.20724173693466202, + "grad_norm": 1.6680500805789915, + "learning_rate": 3.6794856154595235e-06, + "loss": 1.0215, + "step": 2298 + }, + { + "epoch": 0.2073319204581323, + "grad_norm": 1.8267474078458503, + "learning_rate": 3.6791683265775506e-06, + "loss": 0.9815, + "step": 2299 + }, + { + "epoch": 0.20742210398160257, + "grad_norm": 1.6898795411106542, + "learning_rate": 3.6788508944196773e-06, + "loss": 0.9369, + "step": 2300 + }, + { + "epoch": 0.20751228750507283, + "grad_norm": 1.2751537529484227, + "learning_rate": 3.678533319012989e-06, + "loss": 1.0402, + "step": 2301 + }, + { + "epoch": 0.2076024710285431, + "grad_norm": 1.407826324293403, + "learning_rate": 3.6782156003845826e-06, + "loss": 0.9275, + "step": 2302 + }, + { + "epoch": 0.20769265455201336, + "grad_norm": 0.8799155310708882, + "learning_rate": 3.6778977385615676e-06, + "loss": 0.824, + "step": 2303 + }, + { + "epoch": 0.20778283807548362, + "grad_norm": 1.4236637643313954, + "learning_rate": 3.6775797335710656e-06, + "loss": 1.0153, + "step": 2304 + }, + { + "epoch": 0.20787302159895388, + "grad_norm": 1.6248474049446824, + "learning_rate": 3.6772615854402105e-06, + "loss": 0.9246, + "step": 2305 + }, + { + "epoch": 0.20796320512242414, + "grad_norm": 1.2519290344491387, + "learning_rate": 3.6769432941961487e-06, + "loss": 0.8504, + "step": 2306 + }, + { + "epoch": 0.2080533886458944, + "grad_norm": 1.4166803733312199, + "learning_rate": 3.676624859866038e-06, + "loss": 0.9701, + "step": 2307 + }, + { + "epoch": 0.20814357216936466, + "grad_norm": 2.230341865952342, + "learning_rate": 3.67630628247705e-06, + "loss": 0.8989, + "step": 2308 + }, + { + "epoch": 0.20823375569283492, + "grad_norm": 1.4668933415089045, + "learning_rate": 3.675987562056367e-06, + "loss": 1.0276, + "step": 2309 + }, + { + "epoch": 0.20832393921630518, + "grad_norm": 1.3600664243885607, + "learning_rate": 3.675668698631184e-06, + "loss": 1.0039, + "step": 2310 + }, + { + "epoch": 0.20841412273977544, + "grad_norm": 1.4905571944131815, + "learning_rate": 3.675349692228708e-06, + "loss": 1.0431, + "step": 2311 + }, + { + "epoch": 0.2085043062632457, + "grad_norm": 1.442354464139373, + "learning_rate": 3.6750305428761578e-06, + "loss": 0.9377, + "step": 2312 + }, + { + "epoch": 0.20859448978671596, + "grad_norm": 1.757125516799272, + "learning_rate": 3.674711250600766e-06, + "loss": 0.9908, + "step": 2313 + }, + { + "epoch": 0.20868467331018623, + "grad_norm": 0.7801152571473343, + "learning_rate": 3.6743918154297765e-06, + "loss": 0.7917, + "step": 2314 + }, + { + "epoch": 0.20877485683365649, + "grad_norm": 1.4262804803424736, + "learning_rate": 3.6740722373904446e-06, + "loss": 0.9597, + "step": 2315 + }, + { + "epoch": 0.20886504035712675, + "grad_norm": 1.2627032900010227, + "learning_rate": 3.6737525165100383e-06, + "loss": 0.9499, + "step": 2316 + }, + { + "epoch": 0.208955223880597, + "grad_norm": 1.526810184559922, + "learning_rate": 3.6734326528158385e-06, + "loss": 0.9955, + "step": 2317 + }, + { + "epoch": 0.20904540740406727, + "grad_norm": 1.262289290265641, + "learning_rate": 3.673112646335138e-06, + "loss": 0.9612, + "step": 2318 + }, + { + "epoch": 0.20913559092753753, + "grad_norm": 1.508223087363439, + "learning_rate": 3.672792497095241e-06, + "loss": 0.9269, + "step": 2319 + }, + { + "epoch": 0.2092257744510078, + "grad_norm": 1.6900811411773777, + "learning_rate": 3.672472205123464e-06, + "loss": 0.9747, + "step": 2320 + }, + { + "epoch": 0.20931595797447805, + "grad_norm": 1.6079150494345527, + "learning_rate": 3.6721517704471363e-06, + "loss": 0.9009, + "step": 2321 + }, + { + "epoch": 0.2094061414979483, + "grad_norm": 1.4209459224775984, + "learning_rate": 3.6718311930936e-06, + "loss": 1.001, + "step": 2322 + }, + { + "epoch": 0.2094963250214186, + "grad_norm": 1.7328599378113787, + "learning_rate": 3.6715104730902074e-06, + "loss": 0.9321, + "step": 2323 + }, + { + "epoch": 0.20958650854488886, + "grad_norm": 1.917952527756466, + "learning_rate": 3.671189610464325e-06, + "loss": 1.0072, + "step": 2324 + }, + { + "epoch": 0.20967669206835912, + "grad_norm": 1.5339492828808134, + "learning_rate": 3.6708686052433303e-06, + "loss": 0.9235, + "step": 2325 + }, + { + "epoch": 0.20976687559182938, + "grad_norm": 1.3491043934735911, + "learning_rate": 3.6705474574546127e-06, + "loss": 0.9842, + "step": 2326 + }, + { + "epoch": 0.20985705911529964, + "grad_norm": 1.4806659335673196, + "learning_rate": 3.670226167125575e-06, + "loss": 0.9874, + "step": 2327 + }, + { + "epoch": 0.2099472426387699, + "grad_norm": 2.0401699522694345, + "learning_rate": 3.6699047342836313e-06, + "loss": 0.9689, + "step": 2328 + }, + { + "epoch": 0.21003742616224017, + "grad_norm": 1.263868690976761, + "learning_rate": 3.669583158956208e-06, + "loss": 1.0122, + "step": 2329 + }, + { + "epoch": 0.21012760968571043, + "grad_norm": 0.7124420008560277, + "learning_rate": 3.669261441170743e-06, + "loss": 0.8279, + "step": 2330 + }, + { + "epoch": 0.2102177932091807, + "grad_norm": 1.4646594122294772, + "learning_rate": 3.668939580954688e-06, + "loss": 0.975, + "step": 2331 + }, + { + "epoch": 0.21030797673265095, + "grad_norm": 1.2160261967999337, + "learning_rate": 3.668617578335506e-06, + "loss": 0.9438, + "step": 2332 + }, + { + "epoch": 0.2103981602561212, + "grad_norm": 1.478064202314409, + "learning_rate": 3.6682954333406707e-06, + "loss": 1.0039, + "step": 2333 + }, + { + "epoch": 0.21048834377959147, + "grad_norm": 2.1723534585323834, + "learning_rate": 3.6679731459976707e-06, + "loss": 1.0995, + "step": 2334 + }, + { + "epoch": 0.21057852730306173, + "grad_norm": 1.3561386959267945, + "learning_rate": 3.6676507163340046e-06, + "loss": 0.9723, + "step": 2335 + }, + { + "epoch": 0.210668710826532, + "grad_norm": 1.5194199234453973, + "learning_rate": 3.6673281443771842e-06, + "loss": 1.0081, + "step": 2336 + }, + { + "epoch": 0.21075889435000225, + "grad_norm": 1.7397017600432854, + "learning_rate": 3.667005430154733e-06, + "loss": 0.8574, + "step": 2337 + }, + { + "epoch": 0.2108490778734725, + "grad_norm": 0.7370343855984633, + "learning_rate": 3.666682573694186e-06, + "loss": 0.896, + "step": 2338 + }, + { + "epoch": 0.21093926139694277, + "grad_norm": 0.6669520174723081, + "learning_rate": 3.6663595750230924e-06, + "loss": 0.7808, + "step": 2339 + }, + { + "epoch": 0.21102944492041303, + "grad_norm": 1.545056362543739, + "learning_rate": 3.666036434169012e-06, + "loss": 1.0233, + "step": 2340 + }, + { + "epoch": 0.2111196284438833, + "grad_norm": 1.3693920563088702, + "learning_rate": 3.665713151159516e-06, + "loss": 0.9548, + "step": 2341 + }, + { + "epoch": 0.21120981196735356, + "grad_norm": 1.5153282897592286, + "learning_rate": 3.665389726022189e-06, + "loss": 1.0729, + "step": 2342 + }, + { + "epoch": 0.21129999549082382, + "grad_norm": 1.4198616781159705, + "learning_rate": 3.6650661587846283e-06, + "loss": 0.9484, + "step": 2343 + }, + { + "epoch": 0.21139017901429408, + "grad_norm": 1.5904899614791406, + "learning_rate": 3.6647424494744418e-06, + "loss": 0.9379, + "step": 2344 + }, + { + "epoch": 0.21148036253776434, + "grad_norm": 1.3012161215293863, + "learning_rate": 3.6644185981192503e-06, + "loss": 1.0243, + "step": 2345 + }, + { + "epoch": 0.2115705460612346, + "grad_norm": 1.7240944047070517, + "learning_rate": 3.6640946047466868e-06, + "loss": 0.9014, + "step": 2346 + }, + { + "epoch": 0.2116607295847049, + "grad_norm": 1.597451119906369, + "learning_rate": 3.6637704693843953e-06, + "loss": 0.9385, + "step": 2347 + }, + { + "epoch": 0.21175091310817515, + "grad_norm": 1.5513076465356022, + "learning_rate": 3.6634461920600337e-06, + "loss": 0.911, + "step": 2348 + }, + { + "epoch": 0.2118410966316454, + "grad_norm": 1.6392867670373419, + "learning_rate": 3.66312177280127e-06, + "loss": 0.9989, + "step": 2349 + }, + { + "epoch": 0.21193128015511567, + "grad_norm": 1.3257298582655754, + "learning_rate": 3.6627972116357872e-06, + "loss": 1.0219, + "step": 2350 + }, + { + "epoch": 0.21202146367858593, + "grad_norm": 1.4437761543844378, + "learning_rate": 3.662472508591278e-06, + "loss": 1.035, + "step": 2351 + }, + { + "epoch": 0.2121116472020562, + "grad_norm": 1.6763162536954548, + "learning_rate": 3.662147663695447e-06, + "loss": 0.968, + "step": 2352 + }, + { + "epoch": 0.21220183072552645, + "grad_norm": 1.4637653908380548, + "learning_rate": 3.6618226769760127e-06, + "loss": 0.9608, + "step": 2353 + }, + { + "epoch": 0.2122920142489967, + "grad_norm": 1.7297786257895202, + "learning_rate": 3.661497548460704e-06, + "loss": 0.8507, + "step": 2354 + }, + { + "epoch": 0.21238219777246697, + "grad_norm": 3.260576205671808, + "learning_rate": 3.6611722781772635e-06, + "loss": 0.9901, + "step": 2355 + }, + { + "epoch": 0.21247238129593723, + "grad_norm": 0.9087821181339625, + "learning_rate": 3.6608468661534444e-06, + "loss": 0.8883, + "step": 2356 + }, + { + "epoch": 0.2125625648194075, + "grad_norm": 4.421958032913546, + "learning_rate": 3.660521312417013e-06, + "loss": 0.8927, + "step": 2357 + }, + { + "epoch": 0.21265274834287776, + "grad_norm": 1.6108890559463203, + "learning_rate": 3.660195616995747e-06, + "loss": 0.9381, + "step": 2358 + }, + { + "epoch": 0.21274293186634802, + "grad_norm": 1.4884402372890486, + "learning_rate": 3.6598697799174367e-06, + "loss": 0.9961, + "step": 2359 + }, + { + "epoch": 0.21283311538981828, + "grad_norm": 1.6466231643989704, + "learning_rate": 3.6595438012098844e-06, + "loss": 1.0329, + "step": 2360 + }, + { + "epoch": 0.21292329891328854, + "grad_norm": 1.3754811312131123, + "learning_rate": 3.6592176809009045e-06, + "loss": 0.9477, + "step": 2361 + }, + { + "epoch": 0.2130134824367588, + "grad_norm": 1.4109879680069177, + "learning_rate": 3.6588914190183227e-06, + "loss": 1.0053, + "step": 2362 + }, + { + "epoch": 0.21310366596022906, + "grad_norm": 1.5889485156083272, + "learning_rate": 3.658565015589978e-06, + "loss": 0.8397, + "step": 2363 + }, + { + "epoch": 0.21319384948369932, + "grad_norm": 1.96386828012308, + "learning_rate": 3.6582384706437217e-06, + "loss": 0.9922, + "step": 2364 + }, + { + "epoch": 0.21328403300716958, + "grad_norm": 1.5241741257963508, + "learning_rate": 3.6579117842074156e-06, + "loss": 0.9883, + "step": 2365 + }, + { + "epoch": 0.21337421653063984, + "grad_norm": 1.630353910955987, + "learning_rate": 3.657584956308934e-06, + "loss": 0.9101, + "step": 2366 + }, + { + "epoch": 0.2134644000541101, + "grad_norm": 1.5851913726510267, + "learning_rate": 3.6572579869761648e-06, + "loss": 1.0335, + "step": 2367 + }, + { + "epoch": 0.21355458357758036, + "grad_norm": 1.7126407593286794, + "learning_rate": 3.6569308762370056e-06, + "loss": 1.0215, + "step": 2368 + }, + { + "epoch": 0.21364476710105063, + "grad_norm": 3.45661163369651, + "learning_rate": 3.6566036241193676e-06, + "loss": 1.0109, + "step": 2369 + }, + { + "epoch": 0.2137349506245209, + "grad_norm": 0.9315540905316627, + "learning_rate": 3.656276230651174e-06, + "loss": 0.8586, + "step": 2370 + }, + { + "epoch": 0.21382513414799117, + "grad_norm": 0.8005626100862767, + "learning_rate": 3.65594869586036e-06, + "loss": 0.8533, + "step": 2371 + }, + { + "epoch": 0.21391531767146144, + "grad_norm": 1.4954423009008024, + "learning_rate": 3.6556210197748724e-06, + "loss": 0.8978, + "step": 2372 + }, + { + "epoch": 0.2140055011949317, + "grad_norm": 1.4566271019428343, + "learning_rate": 3.655293202422671e-06, + "loss": 0.9355, + "step": 2373 + }, + { + "epoch": 0.21409568471840196, + "grad_norm": 1.3367047253959317, + "learning_rate": 3.654965243831725e-06, + "loss": 0.9902, + "step": 2374 + }, + { + "epoch": 0.21418586824187222, + "grad_norm": 1.3654942517573394, + "learning_rate": 3.65463714403002e-06, + "loss": 1.0084, + "step": 2375 + }, + { + "epoch": 0.21427605176534248, + "grad_norm": 1.4449359300130409, + "learning_rate": 3.65430890304555e-06, + "loss": 0.9275, + "step": 2376 + }, + { + "epoch": 0.21436623528881274, + "grad_norm": 1.266114458171093, + "learning_rate": 3.653980520906323e-06, + "loss": 1.0001, + "step": 2377 + }, + { + "epoch": 0.214456418812283, + "grad_norm": 1.3784403675839094, + "learning_rate": 3.653651997640358e-06, + "loss": 0.9565, + "step": 2378 + }, + { + "epoch": 0.21454660233575326, + "grad_norm": 1.4613170436754066, + "learning_rate": 3.653323333275686e-06, + "loss": 0.9726, + "step": 2379 + }, + { + "epoch": 0.21463678585922352, + "grad_norm": 1.457526483311519, + "learning_rate": 3.652994527840351e-06, + "loss": 0.9557, + "step": 2380 + }, + { + "epoch": 0.21472696938269378, + "grad_norm": 1.4526999067139348, + "learning_rate": 3.6526655813624087e-06, + "loss": 0.9327, + "step": 2381 + }, + { + "epoch": 0.21481715290616404, + "grad_norm": 1.1152807650295824, + "learning_rate": 3.652336493869925e-06, + "loss": 0.8315, + "step": 2382 + }, + { + "epoch": 0.2149073364296343, + "grad_norm": 1.4918762201912297, + "learning_rate": 3.6520072653909823e-06, + "loss": 0.9782, + "step": 2383 + }, + { + "epoch": 0.21499751995310457, + "grad_norm": 1.7000972187068626, + "learning_rate": 3.6516778959536702e-06, + "loss": 0.9684, + "step": 2384 + }, + { + "epoch": 0.21508770347657483, + "grad_norm": 1.5719470726159437, + "learning_rate": 3.6513483855860923e-06, + "loss": 1.0264, + "step": 2385 + }, + { + "epoch": 0.2151778870000451, + "grad_norm": 1.6296980509137573, + "learning_rate": 3.6510187343163654e-06, + "loss": 0.9145, + "step": 2386 + }, + { + "epoch": 0.21526807052351535, + "grad_norm": 1.44024148002426, + "learning_rate": 3.650688942172616e-06, + "loss": 1.0014, + "step": 2387 + }, + { + "epoch": 0.2153582540469856, + "grad_norm": 1.5279773851488665, + "learning_rate": 3.650359009182984e-06, + "loss": 0.9109, + "step": 2388 + }, + { + "epoch": 0.21544843757045587, + "grad_norm": 1.6321004382670297, + "learning_rate": 3.650028935375622e-06, + "loss": 1.0607, + "step": 2389 + }, + { + "epoch": 0.21553862109392613, + "grad_norm": 1.430327142095016, + "learning_rate": 3.6496987207786926e-06, + "loss": 0.9449, + "step": 2390 + }, + { + "epoch": 0.2156288046173964, + "grad_norm": 1.7315850026710267, + "learning_rate": 3.6493683654203724e-06, + "loss": 0.9168, + "step": 2391 + }, + { + "epoch": 0.21571898814086665, + "grad_norm": 2.041230200480524, + "learning_rate": 3.6490378693288484e-06, + "loss": 0.9506, + "step": 2392 + }, + { + "epoch": 0.2158091716643369, + "grad_norm": 1.5923197443345454, + "learning_rate": 3.648707232532321e-06, + "loss": 1.0184, + "step": 2393 + }, + { + "epoch": 0.2158993551878072, + "grad_norm": 1.744409418699348, + "learning_rate": 3.6483764550590017e-06, + "loss": 0.9788, + "step": 2394 + }, + { + "epoch": 0.21598953871127746, + "grad_norm": 1.5342435574606852, + "learning_rate": 3.6480455369371133e-06, + "loss": 0.9891, + "step": 2395 + }, + { + "epoch": 0.21607972223474772, + "grad_norm": 1.4039360081443508, + "learning_rate": 3.647714478194893e-06, + "loss": 0.9826, + "step": 2396 + }, + { + "epoch": 0.21616990575821798, + "grad_norm": 1.8051594699146416, + "learning_rate": 3.647383278860588e-06, + "loss": 0.918, + "step": 2397 + }, + { + "epoch": 0.21626008928168824, + "grad_norm": 1.5519033835462124, + "learning_rate": 3.6470519389624587e-06, + "loss": 0.9656, + "step": 2398 + }, + { + "epoch": 0.2163502728051585, + "grad_norm": 1.323484549198387, + "learning_rate": 3.646720458528776e-06, + "loss": 1.0282, + "step": 2399 + }, + { + "epoch": 0.21644045632862877, + "grad_norm": 2.006262273542943, + "learning_rate": 3.6463888375878235e-06, + "loss": 0.9354, + "step": 2400 + }, + { + "epoch": 0.21653063985209903, + "grad_norm": 1.7719814692404499, + "learning_rate": 3.646057076167897e-06, + "loss": 0.9782, + "step": 2401 + }, + { + "epoch": 0.2166208233755693, + "grad_norm": 1.4393887757782133, + "learning_rate": 3.645725174297305e-06, + "loss": 0.9676, + "step": 2402 + }, + { + "epoch": 0.21671100689903955, + "grad_norm": 1.6294641797089962, + "learning_rate": 3.645393132004367e-06, + "loss": 0.9493, + "step": 2403 + }, + { + "epoch": 0.2168011904225098, + "grad_norm": 1.4229456465280121, + "learning_rate": 3.6450609493174135e-06, + "loss": 0.9305, + "step": 2404 + }, + { + "epoch": 0.21689137394598007, + "grad_norm": 1.4028946158512796, + "learning_rate": 3.6447286262647896e-06, + "loss": 0.9356, + "step": 2405 + }, + { + "epoch": 0.21698155746945033, + "grad_norm": 1.5485744196370728, + "learning_rate": 3.64439616287485e-06, + "loss": 0.9455, + "step": 2406 + }, + { + "epoch": 0.2170717409929206, + "grad_norm": 1.2695367431521782, + "learning_rate": 3.644063559175963e-06, + "loss": 0.9397, + "step": 2407 + }, + { + "epoch": 0.21716192451639085, + "grad_norm": 1.4449821300787593, + "learning_rate": 3.6437308151965074e-06, + "loss": 0.957, + "step": 2408 + }, + { + "epoch": 0.2172521080398611, + "grad_norm": 1.2361292867479146, + "learning_rate": 3.643397930964876e-06, + "loss": 0.9818, + "step": 2409 + }, + { + "epoch": 0.21734229156333137, + "grad_norm": 1.6007951340229174, + "learning_rate": 3.6430649065094707e-06, + "loss": 1.0354, + "step": 2410 + }, + { + "epoch": 0.21743247508680164, + "grad_norm": 1.7415819013614549, + "learning_rate": 3.6427317418587086e-06, + "loss": 1.0339, + "step": 2411 + }, + { + "epoch": 0.2175226586102719, + "grad_norm": 1.4542172336621697, + "learning_rate": 3.6423984370410157e-06, + "loss": 0.921, + "step": 2412 + }, + { + "epoch": 0.21761284213374216, + "grad_norm": 1.3834032854907863, + "learning_rate": 3.6420649920848324e-06, + "loss": 0.9341, + "step": 2413 + }, + { + "epoch": 0.21770302565721242, + "grad_norm": 1.2929772321993298, + "learning_rate": 3.6417314070186096e-06, + "loss": 0.9725, + "step": 2414 + }, + { + "epoch": 0.21779320918068268, + "grad_norm": 1.3097531776280456, + "learning_rate": 3.641397681870811e-06, + "loss": 0.976, + "step": 2415 + }, + { + "epoch": 0.21788339270415294, + "grad_norm": 2.880632936016067, + "learning_rate": 3.641063816669911e-06, + "loss": 0.9942, + "step": 2416 + }, + { + "epoch": 0.2179735762276232, + "grad_norm": 1.4817388315437998, + "learning_rate": 3.640729811444398e-06, + "loss": 1.0396, + "step": 2417 + }, + { + "epoch": 0.2180637597510935, + "grad_norm": 1.352590225298685, + "learning_rate": 3.6403956662227706e-06, + "loss": 0.9698, + "step": 2418 + }, + { + "epoch": 0.21815394327456375, + "grad_norm": 1.4793134625443953, + "learning_rate": 3.6400613810335396e-06, + "loss": 1.0323, + "step": 2419 + }, + { + "epoch": 0.218244126798034, + "grad_norm": 1.8888148950906385, + "learning_rate": 3.639726955905228e-06, + "loss": 0.8869, + "step": 2420 + }, + { + "epoch": 0.21833431032150427, + "grad_norm": 1.4932326406494576, + "learning_rate": 3.639392390866372e-06, + "loss": 1.0101, + "step": 2421 + }, + { + "epoch": 0.21842449384497453, + "grad_norm": 1.6340002093367518, + "learning_rate": 3.639057685945517e-06, + "loss": 0.9494, + "step": 2422 + }, + { + "epoch": 0.2185146773684448, + "grad_norm": 1.3301503614171863, + "learning_rate": 3.638722841171223e-06, + "loss": 0.8609, + "step": 2423 + }, + { + "epoch": 0.21860486089191505, + "grad_norm": 18.747606455298083, + "learning_rate": 3.638387856572061e-06, + "loss": 1.0162, + "step": 2424 + }, + { + "epoch": 0.21869504441538531, + "grad_norm": 0.8401517436364443, + "learning_rate": 3.638052732176612e-06, + "loss": 0.8367, + "step": 2425 + }, + { + "epoch": 0.21878522793885558, + "grad_norm": 1.521078386116513, + "learning_rate": 3.637717468013472e-06, + "loss": 0.9756, + "step": 2426 + }, + { + "epoch": 0.21887541146232584, + "grad_norm": 0.8106984927377868, + "learning_rate": 3.6373820641112475e-06, + "loss": 0.7885, + "step": 2427 + }, + { + "epoch": 0.2189655949857961, + "grad_norm": 1.783151180851978, + "learning_rate": 3.6370465204985567e-06, + "loss": 0.993, + "step": 2428 + }, + { + "epoch": 0.21905577850926636, + "grad_norm": 0.760522186239081, + "learning_rate": 3.6367108372040304e-06, + "loss": 0.8578, + "step": 2429 + }, + { + "epoch": 0.21914596203273662, + "grad_norm": 1.4779224087553682, + "learning_rate": 3.6363750142563107e-06, + "loss": 0.9753, + "step": 2430 + }, + { + "epoch": 0.21923614555620688, + "grad_norm": 0.8498283717844846, + "learning_rate": 3.636039051684052e-06, + "loss": 0.8854, + "step": 2431 + }, + { + "epoch": 0.21932632907967714, + "grad_norm": 1.7284745206094292, + "learning_rate": 3.6357029495159203e-06, + "loss": 0.9451, + "step": 2432 + }, + { + "epoch": 0.2194165126031474, + "grad_norm": 1.316811926389918, + "learning_rate": 3.6353667077805934e-06, + "loss": 0.9435, + "step": 2433 + }, + { + "epoch": 0.21950669612661766, + "grad_norm": 1.7202381973781986, + "learning_rate": 3.6350303265067625e-06, + "loss": 1.0196, + "step": 2434 + }, + { + "epoch": 0.21959687965008792, + "grad_norm": 1.398550018387138, + "learning_rate": 3.6346938057231285e-06, + "loss": 0.999, + "step": 2435 + }, + { + "epoch": 0.21968706317355818, + "grad_norm": 1.8242978848726583, + "learning_rate": 3.6343571454584047e-06, + "loss": 0.9156, + "step": 2436 + }, + { + "epoch": 0.21977724669702844, + "grad_norm": 1.4155922723822767, + "learning_rate": 3.6340203457413176e-06, + "loss": 1.0119, + "step": 2437 + }, + { + "epoch": 0.2198674302204987, + "grad_norm": 1.4771297919208644, + "learning_rate": 3.633683406600605e-06, + "loss": 0.9951, + "step": 2438 + }, + { + "epoch": 0.21995761374396897, + "grad_norm": 1.3424953103294368, + "learning_rate": 3.6333463280650165e-06, + "loss": 1.0381, + "step": 2439 + }, + { + "epoch": 0.22004779726743923, + "grad_norm": 0.8201226514292229, + "learning_rate": 3.6330091101633126e-06, + "loss": 0.8059, + "step": 2440 + }, + { + "epoch": 0.2201379807909095, + "grad_norm": 1.74236450664618, + "learning_rate": 3.632671752924267e-06, + "loss": 1.0096, + "step": 2441 + }, + { + "epoch": 0.22022816431437978, + "grad_norm": 1.8674137505118105, + "learning_rate": 3.632334256376665e-06, + "loss": 0.989, + "step": 2442 + }, + { + "epoch": 0.22031834783785004, + "grad_norm": 1.6267409169078484, + "learning_rate": 3.6319966205493044e-06, + "loss": 1.03, + "step": 2443 + }, + { + "epoch": 0.2204085313613203, + "grad_norm": 1.0853242858018324, + "learning_rate": 3.6316588454709922e-06, + "loss": 0.8785, + "step": 2444 + }, + { + "epoch": 0.22049871488479056, + "grad_norm": 1.5659997274610493, + "learning_rate": 3.6313209311705514e-06, + "loss": 0.9252, + "step": 2445 + }, + { + "epoch": 0.22058889840826082, + "grad_norm": 1.6904533115721792, + "learning_rate": 3.6309828776768133e-06, + "loss": 1.009, + "step": 2446 + }, + { + "epoch": 0.22067908193173108, + "grad_norm": 1.8032205407599966, + "learning_rate": 3.630644685018623e-06, + "loss": 0.9331, + "step": 2447 + }, + { + "epoch": 0.22076926545520134, + "grad_norm": 3.7466340535555656, + "learning_rate": 3.6303063532248367e-06, + "loss": 1.0618, + "step": 2448 + }, + { + "epoch": 0.2208594489786716, + "grad_norm": 1.514893896208381, + "learning_rate": 3.6299678823243236e-06, + "loss": 1.0548, + "step": 2449 + }, + { + "epoch": 0.22094963250214186, + "grad_norm": 1.7456580520926244, + "learning_rate": 3.629629272345963e-06, + "loss": 0.8761, + "step": 2450 + }, + { + "epoch": 0.22103981602561212, + "grad_norm": 1.6136002860145144, + "learning_rate": 3.6292905233186468e-06, + "loss": 1.0901, + "step": 2451 + }, + { + "epoch": 0.22112999954908238, + "grad_norm": 1.5046260547867174, + "learning_rate": 3.6289516352712796e-06, + "loss": 0.9411, + "step": 2452 + }, + { + "epoch": 0.22122018307255265, + "grad_norm": 0.854877354598146, + "learning_rate": 3.6286126082327764e-06, + "loss": 0.852, + "step": 2453 + }, + { + "epoch": 0.2213103665960229, + "grad_norm": 1.2996026275432087, + "learning_rate": 3.628273442232066e-06, + "loss": 1.0092, + "step": 2454 + }, + { + "epoch": 0.22140055011949317, + "grad_norm": 1.3706332571856814, + "learning_rate": 3.627934137298087e-06, + "loss": 0.9694, + "step": 2455 + }, + { + "epoch": 0.22149073364296343, + "grad_norm": 1.8499853442874539, + "learning_rate": 3.627594693459792e-06, + "loss": 0.9455, + "step": 2456 + }, + { + "epoch": 0.2215809171664337, + "grad_norm": 1.5339430657505622, + "learning_rate": 3.6272551107461424e-06, + "loss": 0.9397, + "step": 2457 + }, + { + "epoch": 0.22167110068990395, + "grad_norm": 1.43576243332719, + "learning_rate": 3.6269153891861137e-06, + "loss": 0.9592, + "step": 2458 + }, + { + "epoch": 0.2217612842133742, + "grad_norm": 1.352721450572079, + "learning_rate": 3.6265755288086944e-06, + "loss": 0.9494, + "step": 2459 + }, + { + "epoch": 0.22185146773684447, + "grad_norm": 1.684825685466697, + "learning_rate": 3.626235529642881e-06, + "loss": 0.9545, + "step": 2460 + }, + { + "epoch": 0.22194165126031473, + "grad_norm": 1.5347444792562916, + "learning_rate": 3.625895391717686e-06, + "loss": 0.8984, + "step": 2461 + }, + { + "epoch": 0.222031834783785, + "grad_norm": 1.4871769209627967, + "learning_rate": 3.625555115062131e-06, + "loss": 0.9433, + "step": 2462 + }, + { + "epoch": 0.22212201830725525, + "grad_norm": 1.4636139042992706, + "learning_rate": 3.6252146997052507e-06, + "loss": 1.0457, + "step": 2463 + }, + { + "epoch": 0.22221220183072551, + "grad_norm": 1.1507183443630282, + "learning_rate": 3.6248741456760898e-06, + "loss": 0.787, + "step": 2464 + }, + { + "epoch": 0.22230238535419578, + "grad_norm": 1.4568856910630774, + "learning_rate": 3.624533453003708e-06, + "loss": 0.9631, + "step": 2465 + }, + { + "epoch": 0.22239256887766606, + "grad_norm": 1.6824554347899743, + "learning_rate": 3.6241926217171745e-06, + "loss": 0.9878, + "step": 2466 + }, + { + "epoch": 0.22248275240113632, + "grad_norm": 1.7523045352216393, + "learning_rate": 3.6238516518455703e-06, + "loss": 0.8893, + "step": 2467 + }, + { + "epoch": 0.22257293592460659, + "grad_norm": 1.8035714798297362, + "learning_rate": 3.62351054341799e-06, + "loss": 1.0873, + "step": 2468 + }, + { + "epoch": 0.22266311944807685, + "grad_norm": 1.5158785578416312, + "learning_rate": 3.623169296463538e-06, + "loss": 0.9875, + "step": 2469 + }, + { + "epoch": 0.2227533029715471, + "grad_norm": 1.690045379729143, + "learning_rate": 3.6228279110113316e-06, + "loss": 0.9807, + "step": 2470 + }, + { + "epoch": 0.22284348649501737, + "grad_norm": 1.723436589729297, + "learning_rate": 3.6224863870904994e-06, + "loss": 0.984, + "step": 2471 + }, + { + "epoch": 0.22293367001848763, + "grad_norm": 1.512158785707218, + "learning_rate": 3.6221447247301827e-06, + "loss": 0.9828, + "step": 2472 + }, + { + "epoch": 0.2230238535419579, + "grad_norm": 1.401367315099624, + "learning_rate": 3.6218029239595332e-06, + "loss": 0.9258, + "step": 2473 + }, + { + "epoch": 0.22311403706542815, + "grad_norm": 1.4838781227899676, + "learning_rate": 3.621460984807716e-06, + "loss": 1.0008, + "step": 2474 + }, + { + "epoch": 0.2232042205888984, + "grad_norm": 1.6154356972858528, + "learning_rate": 3.621118907303907e-06, + "loss": 0.9474, + "step": 2475 + }, + { + "epoch": 0.22329440411236867, + "grad_norm": 1.5058870028523046, + "learning_rate": 3.620776691477294e-06, + "loss": 0.9707, + "step": 2476 + }, + { + "epoch": 0.22338458763583893, + "grad_norm": 0.9868386939475009, + "learning_rate": 3.6204343373570765e-06, + "loss": 0.784, + "step": 2477 + }, + { + "epoch": 0.2234747711593092, + "grad_norm": 1.476764079871347, + "learning_rate": 3.620091844972467e-06, + "loss": 1.0005, + "step": 2478 + }, + { + "epoch": 0.22356495468277945, + "grad_norm": 1.7418653934516348, + "learning_rate": 3.619749214352688e-06, + "loss": 0.9975, + "step": 2479 + }, + { + "epoch": 0.22365513820624972, + "grad_norm": 1.7152129103955784, + "learning_rate": 3.6194064455269744e-06, + "loss": 1.0827, + "step": 2480 + }, + { + "epoch": 0.22374532172971998, + "grad_norm": 1.632057562981618, + "learning_rate": 3.6190635385245737e-06, + "loss": 0.8913, + "step": 2481 + }, + { + "epoch": 0.22383550525319024, + "grad_norm": 1.474244088913878, + "learning_rate": 3.618720493374745e-06, + "loss": 0.9073, + "step": 2482 + }, + { + "epoch": 0.2239256887766605, + "grad_norm": 1.6824525297590354, + "learning_rate": 3.6183773101067575e-06, + "loss": 0.8389, + "step": 2483 + }, + { + "epoch": 0.22401587230013076, + "grad_norm": 1.66099652735412, + "learning_rate": 3.6180339887498948e-06, + "loss": 0.9008, + "step": 2484 + }, + { + "epoch": 0.22410605582360102, + "grad_norm": 1.5842450511843642, + "learning_rate": 3.61769052933345e-06, + "loss": 0.9774, + "step": 2485 + }, + { + "epoch": 0.22419623934707128, + "grad_norm": 1.7249921328600184, + "learning_rate": 3.6173469318867297e-06, + "loss": 1.1253, + "step": 2486 + }, + { + "epoch": 0.22428642287054154, + "grad_norm": 1.6358807952001686, + "learning_rate": 3.617003196439051e-06, + "loss": 1.0008, + "step": 2487 + }, + { + "epoch": 0.2243766063940118, + "grad_norm": 1.3484038117798753, + "learning_rate": 3.616659323019744e-06, + "loss": 0.9839, + "step": 2488 + }, + { + "epoch": 0.22446678991748206, + "grad_norm": 1.5378803030941315, + "learning_rate": 3.616315311658149e-06, + "loss": 1.003, + "step": 2489 + }, + { + "epoch": 0.22455697344095235, + "grad_norm": 1.871116430985315, + "learning_rate": 3.6159711623836195e-06, + "loss": 1.0306, + "step": 2490 + }, + { + "epoch": 0.2246471569644226, + "grad_norm": 0.9559447680244109, + "learning_rate": 3.6156268752255203e-06, + "loss": 0.8028, + "step": 2491 + }, + { + "epoch": 0.22473734048789287, + "grad_norm": 1.508253596325113, + "learning_rate": 3.615282450213227e-06, + "loss": 0.941, + "step": 2492 + }, + { + "epoch": 0.22482752401136313, + "grad_norm": 1.689940841926011, + "learning_rate": 3.614937887376128e-06, + "loss": 0.992, + "step": 2493 + }, + { + "epoch": 0.2249177075348334, + "grad_norm": 1.8203707837118377, + "learning_rate": 3.614593186743625e-06, + "loss": 0.9855, + "step": 2494 + }, + { + "epoch": 0.22500789105830366, + "grad_norm": 2.0578579537738158, + "learning_rate": 3.614248348345128e-06, + "loss": 0.9213, + "step": 2495 + }, + { + "epoch": 0.22509807458177392, + "grad_norm": 1.4804436271367725, + "learning_rate": 3.6139033722100614e-06, + "loss": 1.0219, + "step": 2496 + }, + { + "epoch": 0.22518825810524418, + "grad_norm": 1.9527832342583777, + "learning_rate": 3.6135582583678596e-06, + "loss": 1.0499, + "step": 2497 + }, + { + "epoch": 0.22527844162871444, + "grad_norm": 1.3692327841135596, + "learning_rate": 3.61321300684797e-06, + "loss": 0.9943, + "step": 2498 + }, + { + "epoch": 0.2253686251521847, + "grad_norm": 1.577698224969419, + "learning_rate": 3.6128676176798527e-06, + "loss": 0.9998, + "step": 2499 + }, + { + "epoch": 0.22545880867565496, + "grad_norm": 1.480152267407206, + "learning_rate": 3.612522090892976e-06, + "loss": 0.992, + "step": 2500 + }, + { + "epoch": 0.22554899219912522, + "grad_norm": 1.2450392036700018, + "learning_rate": 3.6121764265168232e-06, + "loss": 0.9382, + "step": 2501 + }, + { + "epoch": 0.22563917572259548, + "grad_norm": 1.8680697475620636, + "learning_rate": 3.611830624580888e-06, + "loss": 0.9974, + "step": 2502 + }, + { + "epoch": 0.22572935924606574, + "grad_norm": 1.6900510931474026, + "learning_rate": 3.6114846851146767e-06, + "loss": 0.9837, + "step": 2503 + }, + { + "epoch": 0.225819542769536, + "grad_norm": 1.497991807067224, + "learning_rate": 3.6111386081477068e-06, + "loss": 0.8807, + "step": 2504 + }, + { + "epoch": 0.22590972629300626, + "grad_norm": 1.4692573482737947, + "learning_rate": 3.6107923937095066e-06, + "loss": 0.9838, + "step": 2505 + }, + { + "epoch": 0.22599990981647652, + "grad_norm": 1.8239641359049203, + "learning_rate": 3.6104460418296173e-06, + "loss": 0.9009, + "step": 2506 + }, + { + "epoch": 0.22609009333994678, + "grad_norm": 1.5082588128215453, + "learning_rate": 3.6100995525375924e-06, + "loss": 1.0365, + "step": 2507 + }, + { + "epoch": 0.22618027686341705, + "grad_norm": 1.4124994227315348, + "learning_rate": 3.6097529258629952e-06, + "loss": 0.9253, + "step": 2508 + }, + { + "epoch": 0.2262704603868873, + "grad_norm": 1.5296473777426565, + "learning_rate": 3.6094061618354027e-06, + "loss": 1.0074, + "step": 2509 + }, + { + "epoch": 0.22636064391035757, + "grad_norm": 1.540939348506341, + "learning_rate": 3.609059260484402e-06, + "loss": 0.9787, + "step": 2510 + }, + { + "epoch": 0.22645082743382783, + "grad_norm": 2.030048425782732, + "learning_rate": 3.6087122218395935e-06, + "loss": 0.9172, + "step": 2511 + }, + { + "epoch": 0.2265410109572981, + "grad_norm": 1.4282305021152017, + "learning_rate": 3.608365045930587e-06, + "loss": 0.9347, + "step": 2512 + }, + { + "epoch": 0.22663119448076835, + "grad_norm": 1.7767847544495674, + "learning_rate": 3.608017732787007e-06, + "loss": 0.9589, + "step": 2513 + }, + { + "epoch": 0.22672137800423864, + "grad_norm": 1.2871485980574038, + "learning_rate": 3.6076702824384875e-06, + "loss": 0.907, + "step": 2514 + }, + { + "epoch": 0.2268115615277089, + "grad_norm": 1.3134476101885766, + "learning_rate": 3.607322694914675e-06, + "loss": 0.9921, + "step": 2515 + }, + { + "epoch": 0.22690174505117916, + "grad_norm": 1.9296137374803286, + "learning_rate": 3.606974970245227e-06, + "loss": 0.9501, + "step": 2516 + }, + { + "epoch": 0.22699192857464942, + "grad_norm": 1.627312335490824, + "learning_rate": 3.606627108459814e-06, + "loss": 0.923, + "step": 2517 + }, + { + "epoch": 0.22708211209811968, + "grad_norm": 1.2226058413318117, + "learning_rate": 3.6062791095881174e-06, + "loss": 0.9011, + "step": 2518 + }, + { + "epoch": 0.22717229562158994, + "grad_norm": 1.6614792522520594, + "learning_rate": 3.6059309736598303e-06, + "loss": 1.0473, + "step": 2519 + }, + { + "epoch": 0.2272624791450602, + "grad_norm": 2.7102778179034943, + "learning_rate": 3.605582700704657e-06, + "loss": 0.9524, + "step": 2520 + }, + { + "epoch": 0.22735266266853046, + "grad_norm": 1.429775714758416, + "learning_rate": 3.6052342907523146e-06, + "loss": 0.9786, + "step": 2521 + }, + { + "epoch": 0.22744284619200072, + "grad_norm": 1.473331367497118, + "learning_rate": 3.604885743832532e-06, + "loss": 0.9055, + "step": 2522 + }, + { + "epoch": 0.22753302971547099, + "grad_norm": 0.9697677434530731, + "learning_rate": 3.6045370599750482e-06, + "loss": 0.7998, + "step": 2523 + }, + { + "epoch": 0.22762321323894125, + "grad_norm": 1.4188850741848358, + "learning_rate": 3.604188239209615e-06, + "loss": 0.9522, + "step": 2524 + }, + { + "epoch": 0.2277133967624115, + "grad_norm": 1.5097169066312845, + "learning_rate": 3.603839281565996e-06, + "loss": 0.9031, + "step": 2525 + }, + { + "epoch": 0.22780358028588177, + "grad_norm": 2.063146778767159, + "learning_rate": 3.603490187073966e-06, + "loss": 0.9926, + "step": 2526 + }, + { + "epoch": 0.22789376380935203, + "grad_norm": 1.6219541908879138, + "learning_rate": 3.6031409557633117e-06, + "loss": 1.0341, + "step": 2527 + }, + { + "epoch": 0.2279839473328223, + "grad_norm": 1.5537959470576481, + "learning_rate": 3.602791587663831e-06, + "loss": 1.0194, + "step": 2528 + }, + { + "epoch": 0.22807413085629255, + "grad_norm": 1.4581702640733385, + "learning_rate": 3.6024420828053348e-06, + "loss": 1.0165, + "step": 2529 + }, + { + "epoch": 0.2281643143797628, + "grad_norm": 1.4931218282890244, + "learning_rate": 3.6020924412176445e-06, + "loss": 1.0487, + "step": 2530 + }, + { + "epoch": 0.22825449790323307, + "grad_norm": 7.239961712140591, + "learning_rate": 3.601742662930593e-06, + "loss": 0.9549, + "step": 2531 + }, + { + "epoch": 0.22834468142670333, + "grad_norm": 1.6038217380145092, + "learning_rate": 3.6013927479740248e-06, + "loss": 0.9383, + "step": 2532 + }, + { + "epoch": 0.2284348649501736, + "grad_norm": 1.4305123206377706, + "learning_rate": 3.6010426963777985e-06, + "loss": 1.0562, + "step": 2533 + }, + { + "epoch": 0.22852504847364385, + "grad_norm": 1.4889521651004358, + "learning_rate": 3.6006925081717804e-06, + "loss": 1.0187, + "step": 2534 + }, + { + "epoch": 0.22861523199711412, + "grad_norm": 1.2942420245008306, + "learning_rate": 3.600342183385852e-06, + "loss": 0.9391, + "step": 2535 + }, + { + "epoch": 0.22870541552058438, + "grad_norm": 1.2914346312204088, + "learning_rate": 3.5999917220499043e-06, + "loss": 0.9122, + "step": 2536 + }, + { + "epoch": 0.22879559904405466, + "grad_norm": 1.6975883291238816, + "learning_rate": 3.5996411241938404e-06, + "loss": 0.9808, + "step": 2537 + }, + { + "epoch": 0.22888578256752493, + "grad_norm": 1.4490444356135, + "learning_rate": 3.5992903898475752e-06, + "loss": 0.9269, + "step": 2538 + }, + { + "epoch": 0.2289759660909952, + "grad_norm": 1.325626716306907, + "learning_rate": 3.5989395190410365e-06, + "loss": 1.076, + "step": 2539 + }, + { + "epoch": 0.22906614961446545, + "grad_norm": 1.4487372984177154, + "learning_rate": 3.598588511804161e-06, + "loss": 0.9831, + "step": 2540 + }, + { + "epoch": 0.2291563331379357, + "grad_norm": 1.9244121149243545, + "learning_rate": 3.5982373681668987e-06, + "loss": 0.9882, + "step": 2541 + }, + { + "epoch": 0.22924651666140597, + "grad_norm": 1.702392490594299, + "learning_rate": 3.597886088159212e-06, + "loss": 0.9821, + "step": 2542 + }, + { + "epoch": 0.22933670018487623, + "grad_norm": 1.3397748390026394, + "learning_rate": 3.597534671811074e-06, + "loss": 0.9035, + "step": 2543 + }, + { + "epoch": 0.2294268837083465, + "grad_norm": 1.4779031308786326, + "learning_rate": 3.5971831191524684e-06, + "loss": 0.9509, + "step": 2544 + }, + { + "epoch": 0.22951706723181675, + "grad_norm": 1.3834813973265623, + "learning_rate": 3.5968314302133925e-06, + "loss": 1.0213, + "step": 2545 + }, + { + "epoch": 0.229607250755287, + "grad_norm": 1.6324832712107897, + "learning_rate": 3.596479605023854e-06, + "loss": 0.9753, + "step": 2546 + }, + { + "epoch": 0.22969743427875727, + "grad_norm": 1.4725139356902541, + "learning_rate": 3.596127643613873e-06, + "loss": 0.9892, + "step": 2547 + }, + { + "epoch": 0.22978761780222753, + "grad_norm": 1.343568434757928, + "learning_rate": 3.59577554601348e-06, + "loss": 0.9963, + "step": 2548 + }, + { + "epoch": 0.2298778013256978, + "grad_norm": 1.3005832372483561, + "learning_rate": 3.595423312252719e-06, + "loss": 1.0246, + "step": 2549 + }, + { + "epoch": 0.22996798484916806, + "grad_norm": 1.691305602429113, + "learning_rate": 3.5950709423616436e-06, + "loss": 0.9754, + "step": 2550 + }, + { + "epoch": 0.23005816837263832, + "grad_norm": 1.596761737546817, + "learning_rate": 3.5947184363703203e-06, + "loss": 0.9506, + "step": 2551 + }, + { + "epoch": 0.23014835189610858, + "grad_norm": 1.2570366688025343, + "learning_rate": 3.5943657943088274e-06, + "loss": 0.9248, + "step": 2552 + }, + { + "epoch": 0.23023853541957884, + "grad_norm": 1.2257245840245936, + "learning_rate": 3.5940130162072525e-06, + "loss": 0.9011, + "step": 2553 + }, + { + "epoch": 0.2303287189430491, + "grad_norm": 1.8201967783805622, + "learning_rate": 3.5936601020956985e-06, + "loss": 0.9835, + "step": 2554 + }, + { + "epoch": 0.23041890246651936, + "grad_norm": 1.4588167842427655, + "learning_rate": 3.5933070520042772e-06, + "loss": 1.0086, + "step": 2555 + }, + { + "epoch": 0.23050908598998962, + "grad_norm": 0.8975945872350166, + "learning_rate": 3.5929538659631133e-06, + "loss": 0.848, + "step": 2556 + }, + { + "epoch": 0.23059926951345988, + "grad_norm": 1.4357370263625606, + "learning_rate": 3.592600544002341e-06, + "loss": 1.0069, + "step": 2557 + }, + { + "epoch": 0.23068945303693014, + "grad_norm": 1.5577873878406296, + "learning_rate": 3.5922470861521098e-06, + "loss": 0.9745, + "step": 2558 + }, + { + "epoch": 0.2307796365604004, + "grad_norm": 1.5211526021754769, + "learning_rate": 3.591893492442577e-06, + "loss": 0.9439, + "step": 2559 + }, + { + "epoch": 0.23086982008387066, + "grad_norm": 1.7556710317246118, + "learning_rate": 3.591539762903914e-06, + "loss": 1.042, + "step": 2560 + }, + { + "epoch": 0.23096000360734095, + "grad_norm": 1.3896434191400735, + "learning_rate": 3.591185897566303e-06, + "loss": 0.9117, + "step": 2561 + }, + { + "epoch": 0.2310501871308112, + "grad_norm": 1.4210902133790688, + "learning_rate": 3.590831896459937e-06, + "loss": 0.9737, + "step": 2562 + }, + { + "epoch": 0.23114037065428147, + "grad_norm": 1.4901860580510955, + "learning_rate": 3.5904777596150222e-06, + "loss": 0.9806, + "step": 2563 + }, + { + "epoch": 0.23123055417775173, + "grad_norm": 1.5921309980525211, + "learning_rate": 3.590123487061775e-06, + "loss": 0.9793, + "step": 2564 + }, + { + "epoch": 0.231320737701222, + "grad_norm": 1.6523620027296002, + "learning_rate": 3.589769078830424e-06, + "loss": 1.0711, + "step": 2565 + }, + { + "epoch": 0.23141092122469226, + "grad_norm": 1.38649677058056, + "learning_rate": 3.58941453495121e-06, + "loss": 0.9557, + "step": 2566 + }, + { + "epoch": 0.23150110474816252, + "grad_norm": 1.4905493567125319, + "learning_rate": 3.5890598554543834e-06, + "loss": 0.9816, + "step": 2567 + }, + { + "epoch": 0.23159128827163278, + "grad_norm": 2.22529217805437, + "learning_rate": 3.5887050403702073e-06, + "loss": 1.0337, + "step": 2568 + }, + { + "epoch": 0.23168147179510304, + "grad_norm": 1.9707435323997908, + "learning_rate": 3.588350089728958e-06, + "loss": 0.9518, + "step": 2569 + }, + { + "epoch": 0.2317716553185733, + "grad_norm": 1.4319826474763373, + "learning_rate": 3.5879950035609204e-06, + "loss": 0.9947, + "step": 2570 + }, + { + "epoch": 0.23186183884204356, + "grad_norm": 1.4246450417114598, + "learning_rate": 3.5876397818963933e-06, + "loss": 0.9745, + "step": 2571 + }, + { + "epoch": 0.23195202236551382, + "grad_norm": 1.238859071904952, + "learning_rate": 3.5872844247656858e-06, + "loss": 1.0253, + "step": 2572 + }, + { + "epoch": 0.23204220588898408, + "grad_norm": 1.2857401254872953, + "learning_rate": 3.5869289321991195e-06, + "loss": 0.9814, + "step": 2573 + }, + { + "epoch": 0.23213238941245434, + "grad_norm": 2.032276891683669, + "learning_rate": 3.5865733042270263e-06, + "loss": 0.9653, + "step": 2574 + }, + { + "epoch": 0.2322225729359246, + "grad_norm": 1.5992850136644314, + "learning_rate": 3.5862175408797498e-06, + "loss": 1.0075, + "step": 2575 + }, + { + "epoch": 0.23231275645939486, + "grad_norm": 1.2636178666944686, + "learning_rate": 3.585861642187647e-06, + "loss": 0.915, + "step": 2576 + }, + { + "epoch": 0.23240293998286513, + "grad_norm": 1.342159261173282, + "learning_rate": 3.5855056081810845e-06, + "loss": 0.9682, + "step": 2577 + }, + { + "epoch": 0.2324931235063354, + "grad_norm": 1.3453603897697863, + "learning_rate": 3.5851494388904406e-06, + "loss": 0.9744, + "step": 2578 + }, + { + "epoch": 0.23258330702980565, + "grad_norm": 1.425501547773438, + "learning_rate": 3.5847931343461064e-06, + "loss": 0.9139, + "step": 2579 + }, + { + "epoch": 0.2326734905532759, + "grad_norm": 1.6317149584867792, + "learning_rate": 3.5844366945784835e-06, + "loss": 0.9044, + "step": 2580 + }, + { + "epoch": 0.23276367407674617, + "grad_norm": 1.5126470037224427, + "learning_rate": 3.5840801196179856e-06, + "loss": 1.032, + "step": 2581 + }, + { + "epoch": 0.23285385760021643, + "grad_norm": 1.504473056743389, + "learning_rate": 3.583723409495037e-06, + "loss": 1.0252, + "step": 2582 + }, + { + "epoch": 0.2329440411236867, + "grad_norm": 1.5220238019508217, + "learning_rate": 3.5833665642400747e-06, + "loss": 1.0268, + "step": 2583 + }, + { + "epoch": 0.23303422464715695, + "grad_norm": 1.4569725863431153, + "learning_rate": 3.5830095838835472e-06, + "loss": 0.9034, + "step": 2584 + }, + { + "epoch": 0.23312440817062724, + "grad_norm": 1.2871441988365688, + "learning_rate": 3.5826524684559125e-06, + "loss": 0.9419, + "step": 2585 + }, + { + "epoch": 0.2332145916940975, + "grad_norm": 1.8598298870763166, + "learning_rate": 3.5822952179876433e-06, + "loss": 1.0626, + "step": 2586 + }, + { + "epoch": 0.23330477521756776, + "grad_norm": 1.2616868147927307, + "learning_rate": 3.5819378325092205e-06, + "loss": 0.9481, + "step": 2587 + }, + { + "epoch": 0.23339495874103802, + "grad_norm": 1.313524482164905, + "learning_rate": 3.581580312051139e-06, + "loss": 0.9999, + "step": 2588 + }, + { + "epoch": 0.23348514226450828, + "grad_norm": 1.3891716981753601, + "learning_rate": 3.5812226566439057e-06, + "loss": 0.9031, + "step": 2589 + }, + { + "epoch": 0.23357532578797854, + "grad_norm": 1.4297366107781324, + "learning_rate": 3.580864866318036e-06, + "loss": 0.954, + "step": 2590 + }, + { + "epoch": 0.2336655093114488, + "grad_norm": 1.563551205837454, + "learning_rate": 3.580506941104059e-06, + "loss": 0.9875, + "step": 2591 + }, + { + "epoch": 0.23375569283491907, + "grad_norm": 1.3694113383483821, + "learning_rate": 3.580148881032515e-06, + "loss": 0.9684, + "step": 2592 + }, + { + "epoch": 0.23384587635838933, + "grad_norm": 1.3555109638599503, + "learning_rate": 3.5797906861339556e-06, + "loss": 0.9876, + "step": 2593 + }, + { + "epoch": 0.2339360598818596, + "grad_norm": 1.4221656418430115, + "learning_rate": 3.5794323564389435e-06, + "loss": 1.0083, + "step": 2594 + }, + { + "epoch": 0.23402624340532985, + "grad_norm": 1.5403682378713208, + "learning_rate": 3.579073891978055e-06, + "loss": 0.9865, + "step": 2595 + }, + { + "epoch": 0.2341164269288001, + "grad_norm": 1.9036018864002888, + "learning_rate": 3.5787152927818746e-06, + "loss": 0.9428, + "step": 2596 + }, + { + "epoch": 0.23420661045227037, + "grad_norm": 1.399007457326083, + "learning_rate": 3.5783565588810003e-06, + "loss": 1.031, + "step": 2597 + }, + { + "epoch": 0.23429679397574063, + "grad_norm": 1.4431713017849879, + "learning_rate": 3.5779976903060412e-06, + "loss": 0.9528, + "step": 2598 + }, + { + "epoch": 0.2343869774992109, + "grad_norm": 1.5009894286716783, + "learning_rate": 3.577638687087619e-06, + "loss": 1.0038, + "step": 2599 + }, + { + "epoch": 0.23447716102268115, + "grad_norm": 1.333242765966733, + "learning_rate": 3.577279549256364e-06, + "loss": 0.9806, + "step": 2600 + }, + { + "epoch": 0.2345673445461514, + "grad_norm": 1.4121934119157993, + "learning_rate": 3.5769202768429213e-06, + "loss": 1.0135, + "step": 2601 + }, + { + "epoch": 0.23465752806962167, + "grad_norm": 1.5858601518382238, + "learning_rate": 3.5765608698779454e-06, + "loss": 0.9617, + "step": 2602 + }, + { + "epoch": 0.23474771159309193, + "grad_norm": 1.4293732845027556, + "learning_rate": 3.5762013283921033e-06, + "loss": 0.9876, + "step": 2603 + }, + { + "epoch": 0.2348378951165622, + "grad_norm": 1.6117429626983384, + "learning_rate": 3.5758416524160728e-06, + "loss": 0.9773, + "step": 2604 + }, + { + "epoch": 0.23492807864003246, + "grad_norm": 1.4708237313595633, + "learning_rate": 3.5754818419805427e-06, + "loss": 0.967, + "step": 2605 + }, + { + "epoch": 0.23501826216350272, + "grad_norm": 1.3159019794802023, + "learning_rate": 3.575121897116216e-06, + "loss": 0.9508, + "step": 2606 + }, + { + "epoch": 0.23510844568697298, + "grad_norm": 1.3759085081339646, + "learning_rate": 3.574761817853803e-06, + "loss": 1.0126, + "step": 2607 + }, + { + "epoch": 0.23519862921044324, + "grad_norm": 5.287326364394216, + "learning_rate": 3.5744016042240287e-06, + "loss": 0.9131, + "step": 2608 + }, + { + "epoch": 0.23528881273391353, + "grad_norm": 1.6767928620253851, + "learning_rate": 3.5740412562576286e-06, + "loss": 0.9093, + "step": 2609 + }, + { + "epoch": 0.2353789962573838, + "grad_norm": 1.5823697705190358, + "learning_rate": 3.573680773985349e-06, + "loss": 0.9634, + "step": 2610 + }, + { + "epoch": 0.23546917978085405, + "grad_norm": 1.383483422230012, + "learning_rate": 3.5733201574379486e-06, + "loss": 0.9721, + "step": 2611 + }, + { + "epoch": 0.2355593633043243, + "grad_norm": 1.8003281294242424, + "learning_rate": 3.5729594066461975e-06, + "loss": 0.9241, + "step": 2612 + }, + { + "epoch": 0.23564954682779457, + "grad_norm": 1.1897961853098284, + "learning_rate": 3.572598521640876e-06, + "loss": 0.9956, + "step": 2613 + }, + { + "epoch": 0.23573973035126483, + "grad_norm": 0.853121529037625, + "learning_rate": 3.5722375024527782e-06, + "loss": 0.8514, + "step": 2614 + }, + { + "epoch": 0.2358299138747351, + "grad_norm": 1.231832614287549, + "learning_rate": 3.571876349112707e-06, + "loss": 0.9959, + "step": 2615 + }, + { + "epoch": 0.23592009739820535, + "grad_norm": 1.358981020606129, + "learning_rate": 3.5715150616514784e-06, + "loss": 0.9756, + "step": 2616 + }, + { + "epoch": 0.2360102809216756, + "grad_norm": 1.729246375150302, + "learning_rate": 3.5711536400999196e-06, + "loss": 1.033, + "step": 2617 + }, + { + "epoch": 0.23610046444514587, + "grad_norm": 0.8090614772066714, + "learning_rate": 3.570792084488869e-06, + "loss": 0.8649, + "step": 2618 + }, + { + "epoch": 0.23619064796861614, + "grad_norm": 0.8481189150721973, + "learning_rate": 3.5704303948491764e-06, + "loss": 0.9155, + "step": 2619 + }, + { + "epoch": 0.2362808314920864, + "grad_norm": 1.7798274868508546, + "learning_rate": 3.5700685712117035e-06, + "loss": 0.9958, + "step": 2620 + }, + { + "epoch": 0.23637101501555666, + "grad_norm": 1.4023963259561245, + "learning_rate": 3.5697066136073227e-06, + "loss": 0.9611, + "step": 2621 + }, + { + "epoch": 0.23646119853902692, + "grad_norm": 1.3904531994254536, + "learning_rate": 3.5693445220669184e-06, + "loss": 0.9766, + "step": 2622 + }, + { + "epoch": 0.23655138206249718, + "grad_norm": 1.2101939718180486, + "learning_rate": 3.568982296621386e-06, + "loss": 0.9628, + "step": 2623 + }, + { + "epoch": 0.23664156558596744, + "grad_norm": 1.5095743746609693, + "learning_rate": 3.5686199373016325e-06, + "loss": 1.0436, + "step": 2624 + }, + { + "epoch": 0.2367317491094377, + "grad_norm": 1.7505400369353727, + "learning_rate": 3.568257444138577e-06, + "loss": 0.9232, + "step": 2625 + }, + { + "epoch": 0.23682193263290796, + "grad_norm": 1.5294712400846953, + "learning_rate": 3.5678948171631495e-06, + "loss": 0.9756, + "step": 2626 + }, + { + "epoch": 0.23691211615637822, + "grad_norm": 1.5412373159711148, + "learning_rate": 3.5675320564062908e-06, + "loss": 0.986, + "step": 2627 + }, + { + "epoch": 0.23700229967984848, + "grad_norm": 1.4968903415347692, + "learning_rate": 3.5671691618989533e-06, + "loss": 0.8905, + "step": 2628 + }, + { + "epoch": 0.23709248320331874, + "grad_norm": 1.7013177644910151, + "learning_rate": 3.5668061336721024e-06, + "loss": 0.9812, + "step": 2629 + }, + { + "epoch": 0.237182666726789, + "grad_norm": 1.5089940792900407, + "learning_rate": 3.5664429717567117e-06, + "loss": 0.9694, + "step": 2630 + }, + { + "epoch": 0.23727285025025927, + "grad_norm": 1.603316820949192, + "learning_rate": 3.56607967618377e-06, + "loss": 1.006, + "step": 2631 + }, + { + "epoch": 0.23736303377372953, + "grad_norm": 1.638046709188836, + "learning_rate": 3.5657162469842754e-06, + "loss": 0.9965, + "step": 2632 + }, + { + "epoch": 0.23745321729719981, + "grad_norm": 1.4004329318541835, + "learning_rate": 3.5653526841892374e-06, + "loss": 0.9772, + "step": 2633 + }, + { + "epoch": 0.23754340082067008, + "grad_norm": 1.4721076408918168, + "learning_rate": 3.564988987829676e-06, + "loss": 1.009, + "step": 2634 + }, + { + "epoch": 0.23763358434414034, + "grad_norm": 1.6370284850357617, + "learning_rate": 3.564625157936626e-06, + "loss": 0.9198, + "step": 2635 + }, + { + "epoch": 0.2377237678676106, + "grad_norm": 1.4492668699105549, + "learning_rate": 3.56426119454113e-06, + "loss": 1.013, + "step": 2636 + }, + { + "epoch": 0.23781395139108086, + "grad_norm": 1.8068710262712757, + "learning_rate": 3.5638970976742436e-06, + "loss": 1.0453, + "step": 2637 + }, + { + "epoch": 0.23790413491455112, + "grad_norm": 2.142700636915875, + "learning_rate": 3.5635328673670335e-06, + "loss": 0.9968, + "step": 2638 + }, + { + "epoch": 0.23799431843802138, + "grad_norm": 1.3211327888229574, + "learning_rate": 3.5631685036505783e-06, + "loss": 1.069, + "step": 2639 + }, + { + "epoch": 0.23808450196149164, + "grad_norm": 1.2738657974151826, + "learning_rate": 3.562804006555966e-06, + "loss": 0.987, + "step": 2640 + }, + { + "epoch": 0.2381746854849619, + "grad_norm": 1.4853215462224358, + "learning_rate": 3.5624393761143e-06, + "loss": 0.9268, + "step": 2641 + }, + { + "epoch": 0.23826486900843216, + "grad_norm": 1.7617592817494803, + "learning_rate": 3.5620746123566906e-06, + "loss": 0.8923, + "step": 2642 + }, + { + "epoch": 0.23835505253190242, + "grad_norm": 1.6751588319086588, + "learning_rate": 3.5617097153142623e-06, + "loss": 0.9209, + "step": 2643 + }, + { + "epoch": 0.23844523605537268, + "grad_norm": 1.40129624030991, + "learning_rate": 3.5613446850181497e-06, + "loss": 1.0217, + "step": 2644 + }, + { + "epoch": 0.23853541957884294, + "grad_norm": 0.7051948387361204, + "learning_rate": 3.5609795214994996e-06, + "loss": 0.7601, + "step": 2645 + }, + { + "epoch": 0.2386256031023132, + "grad_norm": 1.2983914437141941, + "learning_rate": 3.560614224789469e-06, + "loss": 1.027, + "step": 2646 + }, + { + "epoch": 0.23871578662578347, + "grad_norm": 1.425595206050112, + "learning_rate": 3.5602487949192285e-06, + "loss": 0.9217, + "step": 2647 + }, + { + "epoch": 0.23880597014925373, + "grad_norm": 2.10486125497857, + "learning_rate": 3.559883231919957e-06, + "loss": 0.926, + "step": 2648 + }, + { + "epoch": 0.238896153672724, + "grad_norm": 1.4848675362432868, + "learning_rate": 3.5595175358228473e-06, + "loss": 0.9509, + "step": 2649 + }, + { + "epoch": 0.23898633719619425, + "grad_norm": 1.6361712357088531, + "learning_rate": 3.5591517066591027e-06, + "loss": 0.9811, + "step": 2650 + }, + { + "epoch": 0.2390765207196645, + "grad_norm": 1.571848180098411, + "learning_rate": 3.5587857444599364e-06, + "loss": 0.9687, + "step": 2651 + }, + { + "epoch": 0.23916670424313477, + "grad_norm": 1.453064209681694, + "learning_rate": 3.5584196492565766e-06, + "loss": 0.9423, + "step": 2652 + }, + { + "epoch": 0.23925688776660503, + "grad_norm": 1.622920980517131, + "learning_rate": 3.5580534210802587e-06, + "loss": 1.0353, + "step": 2653 + }, + { + "epoch": 0.2393470712900753, + "grad_norm": 1.4303228915409842, + "learning_rate": 3.557687059962232e-06, + "loss": 1.0212, + "step": 2654 + }, + { + "epoch": 0.23943725481354555, + "grad_norm": 1.3715355316243385, + "learning_rate": 3.5573205659337558e-06, + "loss": 0.9738, + "step": 2655 + }, + { + "epoch": 0.23952743833701584, + "grad_norm": 1.3565055114262925, + "learning_rate": 3.5569539390261025e-06, + "loss": 0.9582, + "step": 2656 + }, + { + "epoch": 0.2396176218604861, + "grad_norm": 1.1650051933794958, + "learning_rate": 3.5565871792705543e-06, + "loss": 0.9948, + "step": 2657 + }, + { + "epoch": 0.23970780538395636, + "grad_norm": 1.3173893729711785, + "learning_rate": 3.5562202866984045e-06, + "loss": 0.9703, + "step": 2658 + }, + { + "epoch": 0.23979798890742662, + "grad_norm": 1.4848166360901465, + "learning_rate": 3.5558532613409594e-06, + "loss": 0.9833, + "step": 2659 + }, + { + "epoch": 0.23988817243089688, + "grad_norm": 1.7047813830235854, + "learning_rate": 3.555486103229535e-06, + "loss": 0.9128, + "step": 2660 + }, + { + "epoch": 0.23997835595436715, + "grad_norm": 1.5577045858611138, + "learning_rate": 3.5551188123954595e-06, + "loss": 1.0023, + "step": 2661 + }, + { + "epoch": 0.2400685394778374, + "grad_norm": 1.4911445048285548, + "learning_rate": 3.5547513888700715e-06, + "loss": 0.8706, + "step": 2662 + }, + { + "epoch": 0.24015872300130767, + "grad_norm": 1.250095840594631, + "learning_rate": 3.5543838326847224e-06, + "loss": 1.0089, + "step": 2663 + }, + { + "epoch": 0.24024890652477793, + "grad_norm": 1.4530514113958755, + "learning_rate": 3.5540161438707744e-06, + "loss": 0.9517, + "step": 2664 + }, + { + "epoch": 0.2403390900482482, + "grad_norm": 1.3193921826145911, + "learning_rate": 3.5536483224596e-06, + "loss": 1.1342, + "step": 2665 + }, + { + "epoch": 0.24042927357171845, + "grad_norm": 2.8692463231466596, + "learning_rate": 3.553280368482584e-06, + "loss": 0.9535, + "step": 2666 + }, + { + "epoch": 0.2405194570951887, + "grad_norm": 1.8279073210663497, + "learning_rate": 3.5529122819711227e-06, + "loss": 0.8063, + "step": 2667 + }, + { + "epoch": 0.24060964061865897, + "grad_norm": 1.333834414898794, + "learning_rate": 3.5525440629566223e-06, + "loss": 0.9256, + "step": 2668 + }, + { + "epoch": 0.24069982414212923, + "grad_norm": 1.3518956826612383, + "learning_rate": 3.552175711470502e-06, + "loss": 0.999, + "step": 2669 + }, + { + "epoch": 0.2407900076655995, + "grad_norm": 1.7836708884282209, + "learning_rate": 3.5518072275441912e-06, + "loss": 1.0248, + "step": 2670 + }, + { + "epoch": 0.24088019118906975, + "grad_norm": 1.4725174977634332, + "learning_rate": 3.551438611209131e-06, + "loss": 1.0056, + "step": 2671 + }, + { + "epoch": 0.24097037471254001, + "grad_norm": 1.4650426297283594, + "learning_rate": 3.551069862496774e-06, + "loss": 1.0702, + "step": 2672 + }, + { + "epoch": 0.24106055823601027, + "grad_norm": 0.8078972585693679, + "learning_rate": 3.5507009814385846e-06, + "loss": 0.8266, + "step": 2673 + }, + { + "epoch": 0.24115074175948054, + "grad_norm": 0.7059643319591868, + "learning_rate": 3.550331968066036e-06, + "loss": 0.8193, + "step": 2674 + }, + { + "epoch": 0.2412409252829508, + "grad_norm": 1.6739088148077654, + "learning_rate": 3.549962822410616e-06, + "loss": 0.9996, + "step": 2675 + }, + { + "epoch": 0.24133110880642106, + "grad_norm": 0.8990433681813915, + "learning_rate": 3.5495935445038217e-06, + "loss": 0.8036, + "step": 2676 + }, + { + "epoch": 0.24142129232989132, + "grad_norm": 1.429084859273116, + "learning_rate": 3.5492241343771612e-06, + "loss": 1.0188, + "step": 2677 + }, + { + "epoch": 0.24151147585336158, + "grad_norm": 1.4546773678665417, + "learning_rate": 3.548854592062156e-06, + "loss": 0.9787, + "step": 2678 + }, + { + "epoch": 0.24160165937683184, + "grad_norm": 1.426208515683927, + "learning_rate": 3.548484917590336e-06, + "loss": 0.951, + "step": 2679 + }, + { + "epoch": 0.24169184290030213, + "grad_norm": 1.7096453799799727, + "learning_rate": 3.5481151109932447e-06, + "loss": 0.8981, + "step": 2680 + }, + { + "epoch": 0.2417820264237724, + "grad_norm": 1.3414143512638939, + "learning_rate": 3.5477451723024364e-06, + "loss": 0.9966, + "step": 2681 + }, + { + "epoch": 0.24187220994724265, + "grad_norm": 0.7466812616721324, + "learning_rate": 3.5473751015494757e-06, + "loss": 0.7821, + "step": 2682 + }, + { + "epoch": 0.2419623934707129, + "grad_norm": 1.3959203901709027, + "learning_rate": 3.547004898765939e-06, + "loss": 0.9838, + "step": 2683 + }, + { + "epoch": 0.24205257699418317, + "grad_norm": 1.2639457016822944, + "learning_rate": 3.546634563983414e-06, + "loss": 0.997, + "step": 2684 + }, + { + "epoch": 0.24214276051765343, + "grad_norm": 1.4064793717559632, + "learning_rate": 3.5462640972335002e-06, + "loss": 0.9397, + "step": 2685 + }, + { + "epoch": 0.2422329440411237, + "grad_norm": 1.3553107881436257, + "learning_rate": 3.5458934985478077e-06, + "loss": 0.9985, + "step": 2686 + }, + { + "epoch": 0.24232312756459395, + "grad_norm": 1.7922575294228984, + "learning_rate": 3.5455227679579577e-06, + "loss": 0.9557, + "step": 2687 + }, + { + "epoch": 0.24241331108806422, + "grad_norm": 1.3566997116494577, + "learning_rate": 3.545151905495584e-06, + "loss": 1.0737, + "step": 2688 + }, + { + "epoch": 0.24250349461153448, + "grad_norm": 2.02053589614666, + "learning_rate": 3.544780911192329e-06, + "loss": 0.9582, + "step": 2689 + }, + { + "epoch": 0.24259367813500474, + "grad_norm": 1.4647870268965482, + "learning_rate": 3.544409785079849e-06, + "loss": 0.9239, + "step": 2690 + }, + { + "epoch": 0.242683861658475, + "grad_norm": 1.3223062844272562, + "learning_rate": 3.5440385271898103e-06, + "loss": 0.9258, + "step": 2691 + }, + { + "epoch": 0.24277404518194526, + "grad_norm": 1.472257767654063, + "learning_rate": 3.5436671375538903e-06, + "loss": 0.9098, + "step": 2692 + }, + { + "epoch": 0.24286422870541552, + "grad_norm": 1.65973771148789, + "learning_rate": 3.543295616203779e-06, + "loss": 0.9793, + "step": 2693 + }, + { + "epoch": 0.24295441222888578, + "grad_norm": 1.5879540612568839, + "learning_rate": 3.542923963171176e-06, + "loss": 1.073, + "step": 2694 + }, + { + "epoch": 0.24304459575235604, + "grad_norm": 1.360098591414466, + "learning_rate": 3.542552178487793e-06, + "loss": 0.894, + "step": 2695 + }, + { + "epoch": 0.2431347792758263, + "grad_norm": 1.6061605554754512, + "learning_rate": 3.5421802621853523e-06, + "loss": 0.9689, + "step": 2696 + }, + { + "epoch": 0.24322496279929656, + "grad_norm": 1.6812757213567782, + "learning_rate": 3.5418082142955887e-06, + "loss": 0.9748, + "step": 2697 + }, + { + "epoch": 0.24331514632276682, + "grad_norm": 0.9629266202129654, + "learning_rate": 3.5414360348502463e-06, + "loss": 0.8339, + "step": 2698 + }, + { + "epoch": 0.24340532984623708, + "grad_norm": 1.3514820141062995, + "learning_rate": 3.5410637238810825e-06, + "loss": 0.8809, + "step": 2699 + }, + { + "epoch": 0.24349551336970734, + "grad_norm": 1.5166647579195078, + "learning_rate": 3.5406912814198635e-06, + "loss": 0.9252, + "step": 2700 + }, + { + "epoch": 0.2435856968931776, + "grad_norm": 1.827517416645349, + "learning_rate": 3.54031870749837e-06, + "loss": 0.9397, + "step": 2701 + }, + { + "epoch": 0.24367588041664787, + "grad_norm": 1.4803133357059568, + "learning_rate": 3.539946002148391e-06, + "loss": 0.9894, + "step": 2702 + }, + { + "epoch": 0.24376606394011813, + "grad_norm": 1.7344162394801488, + "learning_rate": 3.5395731654017277e-06, + "loss": 0.9731, + "step": 2703 + }, + { + "epoch": 0.24385624746358842, + "grad_norm": 1.9631072423035238, + "learning_rate": 3.5392001972901923e-06, + "loss": 1.0201, + "step": 2704 + }, + { + "epoch": 0.24394643098705868, + "grad_norm": 1.420153739289868, + "learning_rate": 3.5388270978456098e-06, + "loss": 0.9309, + "step": 2705 + }, + { + "epoch": 0.24403661451052894, + "grad_norm": 1.442885056378085, + "learning_rate": 3.5384538670998137e-06, + "loss": 0.9413, + "step": 2706 + }, + { + "epoch": 0.2441267980339992, + "grad_norm": 1.5318356386666163, + "learning_rate": 3.538080505084651e-06, + "loss": 0.9965, + "step": 2707 + }, + { + "epoch": 0.24421698155746946, + "grad_norm": 1.3361989261868974, + "learning_rate": 3.5377070118319788e-06, + "loss": 0.9902, + "step": 2708 + }, + { + "epoch": 0.24430716508093972, + "grad_norm": 2.6744589998301453, + "learning_rate": 3.5373333873736657e-06, + "loss": 0.9347, + "step": 2709 + }, + { + "epoch": 0.24439734860440998, + "grad_norm": 1.3548358681992503, + "learning_rate": 3.536959631741591e-06, + "loss": 0.9838, + "step": 2710 + }, + { + "epoch": 0.24448753212788024, + "grad_norm": 1.0540306589093884, + "learning_rate": 3.536585744967646e-06, + "loss": 1.0024, + "step": 2711 + }, + { + "epoch": 0.2445777156513505, + "grad_norm": 1.5806879062284789, + "learning_rate": 3.5362117270837326e-06, + "loss": 0.9632, + "step": 2712 + }, + { + "epoch": 0.24466789917482076, + "grad_norm": 1.3162415172162276, + "learning_rate": 3.5358375781217634e-06, + "loss": 0.8795, + "step": 2713 + }, + { + "epoch": 0.24475808269829102, + "grad_norm": 1.4729206849528549, + "learning_rate": 3.535463298113664e-06, + "loss": 1.0075, + "step": 2714 + }, + { + "epoch": 0.24484826622176128, + "grad_norm": 1.526221133497286, + "learning_rate": 3.5350888870913697e-06, + "loss": 0.9402, + "step": 2715 + }, + { + "epoch": 0.24493844974523155, + "grad_norm": 1.5114663872043061, + "learning_rate": 3.5347143450868273e-06, + "loss": 0.9811, + "step": 2716 + }, + { + "epoch": 0.2450286332687018, + "grad_norm": 1.5796048619117982, + "learning_rate": 3.534339672131994e-06, + "loss": 0.9848, + "step": 2717 + }, + { + "epoch": 0.24511881679217207, + "grad_norm": 1.4789812571773961, + "learning_rate": 3.5339648682588397e-06, + "loss": 0.9065, + "step": 2718 + }, + { + "epoch": 0.24520900031564233, + "grad_norm": 1.6682376689458016, + "learning_rate": 3.533589933499345e-06, + "loss": 1.0207, + "step": 2719 + }, + { + "epoch": 0.2452991838391126, + "grad_norm": 1.5311234090988162, + "learning_rate": 3.533214867885501e-06, + "loss": 0.9139, + "step": 2720 + }, + { + "epoch": 0.24538936736258285, + "grad_norm": 3.5215442248276525, + "learning_rate": 3.53283967144931e-06, + "loss": 0.9207, + "step": 2721 + }, + { + "epoch": 0.2454795508860531, + "grad_norm": 1.4747045221011201, + "learning_rate": 3.532464344222787e-06, + "loss": 0.9886, + "step": 2722 + }, + { + "epoch": 0.24556973440952337, + "grad_norm": 1.5740332861001838, + "learning_rate": 3.532088886237956e-06, + "loss": 1.0132, + "step": 2723 + }, + { + "epoch": 0.24565991793299363, + "grad_norm": 0.9598187082195069, + "learning_rate": 3.5317132975268535e-06, + "loss": 0.7524, + "step": 2724 + }, + { + "epoch": 0.2457501014564639, + "grad_norm": 1.3709337624785216, + "learning_rate": 3.531337578121526e-06, + "loss": 0.9991, + "step": 2725 + }, + { + "epoch": 0.24584028497993415, + "grad_norm": 1.4942189713425769, + "learning_rate": 3.530961728054033e-06, + "loss": 1.0073, + "step": 2726 + }, + { + "epoch": 0.24593046850340441, + "grad_norm": 1.4274376849253676, + "learning_rate": 3.5305857473564435e-06, + "loss": 0.9439, + "step": 2727 + }, + { + "epoch": 0.2460206520268747, + "grad_norm": 1.6847954021947205, + "learning_rate": 3.5302096360608385e-06, + "loss": 0.901, + "step": 2728 + }, + { + "epoch": 0.24611083555034496, + "grad_norm": 1.4440728722410123, + "learning_rate": 3.5298333941993105e-06, + "loss": 1.0198, + "step": 2729 + }, + { + "epoch": 0.24620101907381522, + "grad_norm": 1.5404794434658864, + "learning_rate": 3.529457021803962e-06, + "loss": 0.9672, + "step": 2730 + }, + { + "epoch": 0.24629120259728549, + "grad_norm": 1.6819266365145258, + "learning_rate": 3.529080518906906e-06, + "loss": 0.9914, + "step": 2731 + }, + { + "epoch": 0.24638138612075575, + "grad_norm": 1.4074451877633556, + "learning_rate": 3.5287038855402696e-06, + "loss": 0.9562, + "step": 2732 + }, + { + "epoch": 0.246471569644226, + "grad_norm": 1.6737495679904646, + "learning_rate": 3.528327121736188e-06, + "loss": 1.0105, + "step": 2733 + }, + { + "epoch": 0.24656175316769627, + "grad_norm": 1.5075112471785652, + "learning_rate": 3.52795022752681e-06, + "loss": 1.0581, + "step": 2734 + }, + { + "epoch": 0.24665193669116653, + "grad_norm": 1.5867569784032314, + "learning_rate": 3.5275732029442925e-06, + "loss": 1.0403, + "step": 2735 + }, + { + "epoch": 0.2467421202146368, + "grad_norm": 1.888620496206855, + "learning_rate": 3.5271960480208077e-06, + "loss": 0.9189, + "step": 2736 + }, + { + "epoch": 0.24683230373810705, + "grad_norm": 1.0296039738445764, + "learning_rate": 3.526818762788534e-06, + "loss": 0.8481, + "step": 2737 + }, + { + "epoch": 0.2469224872615773, + "grad_norm": 1.4571431707600917, + "learning_rate": 3.5264413472796653e-06, + "loss": 1.0066, + "step": 2738 + }, + { + "epoch": 0.24701267078504757, + "grad_norm": 1.6104959084317598, + "learning_rate": 3.5260638015264037e-06, + "loss": 0.9452, + "step": 2739 + }, + { + "epoch": 0.24710285430851783, + "grad_norm": 1.3010284830386991, + "learning_rate": 3.5256861255609644e-06, + "loss": 0.9538, + "step": 2740 + }, + { + "epoch": 0.2471930378319881, + "grad_norm": 1.6176958989608932, + "learning_rate": 3.5253083194155723e-06, + "loss": 0.9897, + "step": 2741 + }, + { + "epoch": 0.24728322135545835, + "grad_norm": 1.2191293077270537, + "learning_rate": 3.5249303831224637e-06, + "loss": 0.9944, + "step": 2742 + }, + { + "epoch": 0.24737340487892862, + "grad_norm": 1.3142221142947237, + "learning_rate": 3.524552316713887e-06, + "loss": 0.9673, + "step": 2743 + }, + { + "epoch": 0.24746358840239888, + "grad_norm": 1.6046117272543878, + "learning_rate": 3.5241741202220995e-06, + "loss": 1.0377, + "step": 2744 + }, + { + "epoch": 0.24755377192586914, + "grad_norm": 1.3320685686270237, + "learning_rate": 3.5237957936793724e-06, + "loss": 0.9593, + "step": 2745 + }, + { + "epoch": 0.2476439554493394, + "grad_norm": 1.2756288081125837, + "learning_rate": 3.523417337117986e-06, + "loss": 0.9327, + "step": 2746 + }, + { + "epoch": 0.24773413897280966, + "grad_norm": 1.2050163958055327, + "learning_rate": 3.523038750570232e-06, + "loss": 0.9205, + "step": 2747 + }, + { + "epoch": 0.24782432249627992, + "grad_norm": 1.4368121948056016, + "learning_rate": 3.522660034068414e-06, + "loss": 1.007, + "step": 2748 + }, + { + "epoch": 0.24791450601975018, + "grad_norm": 1.5871000491973117, + "learning_rate": 3.5222811876448464e-06, + "loss": 0.9957, + "step": 2749 + }, + { + "epoch": 0.24800468954322044, + "grad_norm": 1.1187186252733836, + "learning_rate": 3.521902211331854e-06, + "loss": 0.9923, + "step": 2750 + }, + { + "epoch": 0.2480948730666907, + "grad_norm": 1.5534715354136142, + "learning_rate": 3.5215231051617726e-06, + "loss": 0.9277, + "step": 2751 + }, + { + "epoch": 0.248185056590161, + "grad_norm": 16.75715598218648, + "learning_rate": 3.521143869166951e-06, + "loss": 0.8804, + "step": 2752 + }, + { + "epoch": 0.24827524011363125, + "grad_norm": 1.739947191511296, + "learning_rate": 3.5207645033797464e-06, + "loss": 0.999, + "step": 2753 + }, + { + "epoch": 0.2483654236371015, + "grad_norm": 1.4748212447390754, + "learning_rate": 3.5203850078325293e-06, + "loss": 0.9058, + "step": 2754 + }, + { + "epoch": 0.24845560716057177, + "grad_norm": 1.5147867145788785, + "learning_rate": 3.5200053825576797e-06, + "loss": 0.8926, + "step": 2755 + }, + { + "epoch": 0.24854579068404203, + "grad_norm": 1.2726164903512673, + "learning_rate": 3.51962562758759e-06, + "loss": 0.9768, + "step": 2756 + }, + { + "epoch": 0.2486359742075123, + "grad_norm": 1.4597981952303303, + "learning_rate": 3.5192457429546627e-06, + "loss": 0.9506, + "step": 2757 + }, + { + "epoch": 0.24872615773098256, + "grad_norm": 1.822723167685498, + "learning_rate": 3.5188657286913115e-06, + "loss": 0.9444, + "step": 2758 + }, + { + "epoch": 0.24881634125445282, + "grad_norm": 1.7715061779425918, + "learning_rate": 3.518485584829961e-06, + "loss": 0.9387, + "step": 2759 + }, + { + "epoch": 0.24890652477792308, + "grad_norm": 1.251194335659674, + "learning_rate": 3.5181053114030485e-06, + "loss": 0.9915, + "step": 2760 + }, + { + "epoch": 0.24899670830139334, + "grad_norm": 1.494053816922713, + "learning_rate": 3.5177249084430198e-06, + "loss": 0.9787, + "step": 2761 + }, + { + "epoch": 0.2490868918248636, + "grad_norm": 1.821512440746042, + "learning_rate": 3.517344375982333e-06, + "loss": 1.0166, + "step": 2762 + }, + { + "epoch": 0.24917707534833386, + "grad_norm": 1.529574898772996, + "learning_rate": 3.5169637140534565e-06, + "loss": 1.0129, + "step": 2763 + }, + { + "epoch": 0.24926725887180412, + "grad_norm": 1.8501576433803923, + "learning_rate": 3.5165829226888733e-06, + "loss": 0.9471, + "step": 2764 + }, + { + "epoch": 0.24935744239527438, + "grad_norm": 1.3456524533502399, + "learning_rate": 3.516202001921072e-06, + "loss": 0.9987, + "step": 2765 + }, + { + "epoch": 0.24944762591874464, + "grad_norm": 1.3306471269952866, + "learning_rate": 3.515820951782555e-06, + "loss": 1.0058, + "step": 2766 + }, + { + "epoch": 0.2495378094422149, + "grad_norm": 1.5078956699657522, + "learning_rate": 3.5154397723058366e-06, + "loss": 0.9, + "step": 2767 + }, + { + "epoch": 0.24962799296568516, + "grad_norm": 1.5149490579532234, + "learning_rate": 3.5150584635234416e-06, + "loss": 1.0226, + "step": 2768 + }, + { + "epoch": 0.24971817648915542, + "grad_norm": 1.195681446455922, + "learning_rate": 3.5146770254679035e-06, + "loss": 1.0129, + "step": 2769 + }, + { + "epoch": 0.24980836001262569, + "grad_norm": 1.3967877678654539, + "learning_rate": 3.51429545817177e-06, + "loss": 0.8975, + "step": 2770 + }, + { + "epoch": 0.24989854353609595, + "grad_norm": 1.4753377366124525, + "learning_rate": 3.5139137616675985e-06, + "loss": 0.9817, + "step": 2771 + }, + { + "epoch": 0.2499887270595662, + "grad_norm": 1.3894711540195708, + "learning_rate": 3.513531935987957e-06, + "loss": 0.921, + "step": 2772 + }, + { + "epoch": 0.2500789105830365, + "grad_norm": 1.4155003103284856, + "learning_rate": 3.5131499811654253e-06, + "loss": 0.9934, + "step": 2773 + }, + { + "epoch": 0.25016909410650673, + "grad_norm": 1.5980074247061224, + "learning_rate": 3.512767897232594e-06, + "loss": 0.9346, + "step": 2774 + }, + { + "epoch": 0.250259277629977, + "grad_norm": 1.4940692161500064, + "learning_rate": 3.512385684222064e-06, + "loss": 0.9643, + "step": 2775 + }, + { + "epoch": 0.25034946115344725, + "grad_norm": 1.540701056841175, + "learning_rate": 3.512003342166449e-06, + "loss": 1.0702, + "step": 2776 + }, + { + "epoch": 0.25043964467691754, + "grad_norm": 1.2477390823146481, + "learning_rate": 3.511620871098371e-06, + "loss": 1.0156, + "step": 2777 + }, + { + "epoch": 0.25052982820038777, + "grad_norm": 1.415029374346795, + "learning_rate": 3.511238271050465e-06, + "loss": 0.9981, + "step": 2778 + }, + { + "epoch": 0.25062001172385806, + "grad_norm": 1.425777069503717, + "learning_rate": 3.5108555420553778e-06, + "loss": 0.8874, + "step": 2779 + }, + { + "epoch": 0.2507101952473283, + "grad_norm": 1.6351850876176715, + "learning_rate": 3.510472684145764e-06, + "loss": 0.787, + "step": 2780 + }, + { + "epoch": 0.2508003787707986, + "grad_norm": 1.353731168508538, + "learning_rate": 3.5100896973542926e-06, + "loss": 0.8625, + "step": 2781 + }, + { + "epoch": 0.2508905622942688, + "grad_norm": 1.6778425863449207, + "learning_rate": 3.509706581713642e-06, + "loss": 1.0109, + "step": 2782 + }, + { + "epoch": 0.2509807458177391, + "grad_norm": 1.41431319401344, + "learning_rate": 3.509323337256501e-06, + "loss": 0.989, + "step": 2783 + }, + { + "epoch": 0.25107092934120934, + "grad_norm": 1.4172454754832382, + "learning_rate": 3.5089399640155703e-06, + "loss": 0.9225, + "step": 2784 + }, + { + "epoch": 0.2511611128646796, + "grad_norm": 1.1779960480803437, + "learning_rate": 3.508556462023562e-06, + "loss": 0.9607, + "step": 2785 + }, + { + "epoch": 0.25125129638814986, + "grad_norm": 1.6681862896964919, + "learning_rate": 3.5081728313131984e-06, + "loss": 0.958, + "step": 2786 + }, + { + "epoch": 0.25134147991162015, + "grad_norm": 1.528837600281756, + "learning_rate": 3.5077890719172125e-06, + "loss": 0.9632, + "step": 2787 + }, + { + "epoch": 0.25143166343509044, + "grad_norm": 1.3678551923587812, + "learning_rate": 3.5074051838683497e-06, + "loss": 0.9704, + "step": 2788 + }, + { + "epoch": 0.25152184695856067, + "grad_norm": 1.5105357506823052, + "learning_rate": 3.5070211671993643e-06, + "loss": 0.8953, + "step": 2789 + }, + { + "epoch": 0.25161203048203096, + "grad_norm": 1.4303369766618041, + "learning_rate": 3.5066370219430238e-06, + "loss": 0.9896, + "step": 2790 + }, + { + "epoch": 0.2517022140055012, + "grad_norm": 1.3887716651826612, + "learning_rate": 3.5062527481321044e-06, + "loss": 0.9712, + "step": 2791 + }, + { + "epoch": 0.2517923975289715, + "grad_norm": 1.5575542760120396, + "learning_rate": 3.5058683457993954e-06, + "loss": 0.9818, + "step": 2792 + }, + { + "epoch": 0.2518825810524417, + "grad_norm": 1.6274440432303119, + "learning_rate": 3.5054838149776963e-06, + "loss": 0.7846, + "step": 2793 + }, + { + "epoch": 0.251972764575912, + "grad_norm": 1.6110160386691887, + "learning_rate": 3.505099155699816e-06, + "loss": 1.007, + "step": 2794 + }, + { + "epoch": 0.25206294809938223, + "grad_norm": 1.4944834995169298, + "learning_rate": 3.5047143679985775e-06, + "loss": 0.8585, + "step": 2795 + }, + { + "epoch": 0.2521531316228525, + "grad_norm": 1.335219385430008, + "learning_rate": 3.5043294519068126e-06, + "loss": 0.9856, + "step": 2796 + }, + { + "epoch": 0.25224331514632276, + "grad_norm": 1.6362323630062148, + "learning_rate": 3.503944407457363e-06, + "loss": 0.8937, + "step": 2797 + }, + { + "epoch": 0.25233349866979304, + "grad_norm": 1.376490002389026, + "learning_rate": 3.5035592346830846e-06, + "loss": 0.9877, + "step": 2798 + }, + { + "epoch": 0.2524236821932633, + "grad_norm": 1.5306759750955992, + "learning_rate": 3.503173933616841e-06, + "loss": 1.0388, + "step": 2799 + }, + { + "epoch": 0.25251386571673357, + "grad_norm": 1.5174151987328515, + "learning_rate": 3.50278850429151e-06, + "loss": 1.0488, + "step": 2800 + }, + { + "epoch": 0.2526040492402038, + "grad_norm": 1.44385946279942, + "learning_rate": 3.502402946739977e-06, + "loss": 0.9965, + "step": 2801 + }, + { + "epoch": 0.2526942327636741, + "grad_norm": 1.9211186106819624, + "learning_rate": 3.5020172609951405e-06, + "loss": 0.9919, + "step": 2802 + }, + { + "epoch": 0.2527844162871443, + "grad_norm": 1.2176792625348725, + "learning_rate": 3.501631447089909e-06, + "loss": 0.9841, + "step": 2803 + }, + { + "epoch": 0.2528745998106146, + "grad_norm": 1.9012887524148467, + "learning_rate": 3.501245505057203e-06, + "loss": 0.9462, + "step": 2804 + }, + { + "epoch": 0.25296478333408484, + "grad_norm": 3.9551691492226224, + "learning_rate": 3.5008594349299526e-06, + "loss": 1.0439, + "step": 2805 + }, + { + "epoch": 0.25305496685755513, + "grad_norm": 1.5186280849173317, + "learning_rate": 3.500473236741099e-06, + "loss": 0.9743, + "step": 2806 + }, + { + "epoch": 0.25314515038102536, + "grad_norm": 1.317104211167304, + "learning_rate": 3.500086910523596e-06, + "loss": 0.9309, + "step": 2807 + }, + { + "epoch": 0.25323533390449565, + "grad_norm": 1.2318515818328122, + "learning_rate": 3.499700456310406e-06, + "loss": 0.9224, + "step": 2808 + }, + { + "epoch": 0.2533255174279659, + "grad_norm": 1.2121996792768777, + "learning_rate": 3.499313874134504e-06, + "loss": 1.0216, + "step": 2809 + }, + { + "epoch": 0.2534157009514362, + "grad_norm": 1.504451187290224, + "learning_rate": 3.498927164028875e-06, + "loss": 0.846, + "step": 2810 + }, + { + "epoch": 0.25350588447490646, + "grad_norm": 1.1105165652534277, + "learning_rate": 3.498540326026515e-06, + "loss": 1.0093, + "step": 2811 + }, + { + "epoch": 0.2535960679983767, + "grad_norm": 1.3584371817662806, + "learning_rate": 3.4981533601604323e-06, + "loss": 1.0423, + "step": 2812 + }, + { + "epoch": 0.253686251521847, + "grad_norm": 1.4447238857630755, + "learning_rate": 3.4977662664636443e-06, + "loss": 0.9812, + "step": 2813 + }, + { + "epoch": 0.2537764350453172, + "grad_norm": 1.5937224741035971, + "learning_rate": 3.497379044969179e-06, + "loss": 0.9369, + "step": 2814 + }, + { + "epoch": 0.2538666185687875, + "grad_norm": 1.5490960253854078, + "learning_rate": 3.4969916957100777e-06, + "loss": 0.965, + "step": 2815 + }, + { + "epoch": 0.25395680209225774, + "grad_norm": 1.3225976253885652, + "learning_rate": 3.4966042187193905e-06, + "loss": 0.9667, + "step": 2816 + }, + { + "epoch": 0.254046985615728, + "grad_norm": 1.3826030421702111, + "learning_rate": 3.496216614030179e-06, + "loss": 0.976, + "step": 2817 + }, + { + "epoch": 0.25413716913919826, + "grad_norm": 1.5990305914275584, + "learning_rate": 3.495828881675516e-06, + "loss": 0.8738, + "step": 2818 + }, + { + "epoch": 0.25422735266266855, + "grad_norm": 1.8384958825250726, + "learning_rate": 3.4954410216884845e-06, + "loss": 1.0475, + "step": 2819 + }, + { + "epoch": 0.2543175361861388, + "grad_norm": 1.555727740060335, + "learning_rate": 3.49505303410218e-06, + "loss": 0.9873, + "step": 2820 + }, + { + "epoch": 0.25440771970960907, + "grad_norm": 1.23710860796496, + "learning_rate": 3.4946649189497067e-06, + "loss": 1.0035, + "step": 2821 + }, + { + "epoch": 0.2544979032330793, + "grad_norm": 2.0581502418580384, + "learning_rate": 3.4942766762641805e-06, + "loss": 0.9536, + "step": 2822 + }, + { + "epoch": 0.2545880867565496, + "grad_norm": 1.9021583795801758, + "learning_rate": 3.49388830607873e-06, + "loss": 0.9944, + "step": 2823 + }, + { + "epoch": 0.2546782702800198, + "grad_norm": 1.1556318022309595, + "learning_rate": 3.493499808426491e-06, + "loss": 0.9351, + "step": 2824 + }, + { + "epoch": 0.2547684538034901, + "grad_norm": 1.816778526303717, + "learning_rate": 3.493111183340614e-06, + "loss": 1.0183, + "step": 2825 + }, + { + "epoch": 0.25485863732696035, + "grad_norm": 1.5480828241818114, + "learning_rate": 3.4927224308542576e-06, + "loss": 0.8804, + "step": 2826 + }, + { + "epoch": 0.25494882085043064, + "grad_norm": 1.2560536662924924, + "learning_rate": 3.4923335510005923e-06, + "loss": 1.0501, + "step": 2827 + }, + { + "epoch": 0.25503900437390087, + "grad_norm": 1.5455550910759477, + "learning_rate": 3.4919445438128e-06, + "loss": 0.9952, + "step": 2828 + }, + { + "epoch": 0.25512918789737116, + "grad_norm": 1.3458437908384386, + "learning_rate": 3.491555409324073e-06, + "loss": 1.052, + "step": 2829 + }, + { + "epoch": 0.2552193714208414, + "grad_norm": 0.7458741828628853, + "learning_rate": 3.4911661475676136e-06, + "loss": 0.7879, + "step": 2830 + }, + { + "epoch": 0.2553095549443117, + "grad_norm": 1.3508892256657599, + "learning_rate": 3.490776758576637e-06, + "loss": 0.9493, + "step": 2831 + }, + { + "epoch": 0.2553997384677819, + "grad_norm": 1.3567023476603461, + "learning_rate": 3.4903872423843668e-06, + "loss": 1.0244, + "step": 2832 + }, + { + "epoch": 0.2554899219912522, + "grad_norm": 1.4463713195611534, + "learning_rate": 3.4899975990240396e-06, + "loss": 0.9364, + "step": 2833 + }, + { + "epoch": 0.25558010551472243, + "grad_norm": 1.6616089691786928, + "learning_rate": 3.489607828528901e-06, + "loss": 0.9961, + "step": 2834 + }, + { + "epoch": 0.2556702890381927, + "grad_norm": 0.7277510570962685, + "learning_rate": 3.4892179309322093e-06, + "loss": 0.8817, + "step": 2835 + }, + { + "epoch": 0.255760472561663, + "grad_norm": 1.457222933599179, + "learning_rate": 3.488827906267232e-06, + "loss": 0.8903, + "step": 2836 + }, + { + "epoch": 0.25585065608513324, + "grad_norm": 1.8672624517105556, + "learning_rate": 3.4884377545672485e-06, + "loss": 1.0111, + "step": 2837 + }, + { + "epoch": 0.25594083960860353, + "grad_norm": 1.3449106305151175, + "learning_rate": 3.4880474758655485e-06, + "loss": 0.9505, + "step": 2838 + }, + { + "epoch": 0.25603102313207377, + "grad_norm": 1.347816833658944, + "learning_rate": 3.487657070195433e-06, + "loss": 1.0378, + "step": 2839 + }, + { + "epoch": 0.25612120665554405, + "grad_norm": 1.3787431917688213, + "learning_rate": 3.487266537590213e-06, + "loss": 1.0454, + "step": 2840 + }, + { + "epoch": 0.2562113901790143, + "grad_norm": 1.4743250288505552, + "learning_rate": 3.4868758780832116e-06, + "loss": 0.8896, + "step": 2841 + }, + { + "epoch": 0.2563015737024846, + "grad_norm": 1.5239824934126611, + "learning_rate": 3.486485091707762e-06, + "loss": 0.9789, + "step": 2842 + }, + { + "epoch": 0.2563917572259548, + "grad_norm": 1.5372222897694603, + "learning_rate": 3.4860941784972077e-06, + "loss": 0.8504, + "step": 2843 + }, + { + "epoch": 0.2564819407494251, + "grad_norm": 1.9109777388621023, + "learning_rate": 3.485703138484904e-06, + "loss": 1.073, + "step": 2844 + }, + { + "epoch": 0.25657212427289533, + "grad_norm": 1.744677897677473, + "learning_rate": 3.485311971704216e-06, + "loss": 1.0206, + "step": 2845 + }, + { + "epoch": 0.2566623077963656, + "grad_norm": 1.345156886795962, + "learning_rate": 3.484920678188521e-06, + "loss": 1.0025, + "step": 2846 + }, + { + "epoch": 0.25675249131983585, + "grad_norm": 1.6690969470822106, + "learning_rate": 3.4845292579712063e-06, + "loss": 0.9894, + "step": 2847 + }, + { + "epoch": 0.25684267484330614, + "grad_norm": 1.5040719869866481, + "learning_rate": 3.484137711085669e-06, + "loss": 0.8566, + "step": 2848 + }, + { + "epoch": 0.2569328583667764, + "grad_norm": 1.2944936835502148, + "learning_rate": 3.4837460375653198e-06, + "loss": 0.9657, + "step": 2849 + }, + { + "epoch": 0.25702304189024666, + "grad_norm": 1.3410640753358858, + "learning_rate": 3.483354237443576e-06, + "loss": 1.0062, + "step": 2850 + }, + { + "epoch": 0.2571132254137169, + "grad_norm": 1.2520467214367532, + "learning_rate": 3.48296231075387e-06, + "loss": 0.9177, + "step": 2851 + }, + { + "epoch": 0.2572034089371872, + "grad_norm": 1.3828076292480205, + "learning_rate": 3.4825702575296433e-06, + "loss": 0.9885, + "step": 2852 + }, + { + "epoch": 0.2572935924606574, + "grad_norm": 0.8818216755707521, + "learning_rate": 3.482178077804347e-06, + "loss": 0.8363, + "step": 2853 + }, + { + "epoch": 0.2573837759841277, + "grad_norm": 1.4194612241194067, + "learning_rate": 3.4817857716114443e-06, + "loss": 0.9312, + "step": 2854 + }, + { + "epoch": 0.25747395950759794, + "grad_norm": 2.1108551483693665, + "learning_rate": 3.4813933389844094e-06, + "loss": 0.9263, + "step": 2855 + }, + { + "epoch": 0.2575641430310682, + "grad_norm": 1.6201001166972857, + "learning_rate": 3.4810007799567264e-06, + "loss": 0.9776, + "step": 2856 + }, + { + "epoch": 0.25765432655453846, + "grad_norm": 1.8762907671581646, + "learning_rate": 3.480608094561891e-06, + "loss": 0.9928, + "step": 2857 + }, + { + "epoch": 0.25774451007800875, + "grad_norm": 1.289024352462889, + "learning_rate": 3.4802152828334083e-06, + "loss": 0.9973, + "step": 2858 + }, + { + "epoch": 0.25783469360147904, + "grad_norm": 1.4129403651612527, + "learning_rate": 3.479822344804796e-06, + "loss": 0.9171, + "step": 2859 + }, + { + "epoch": 0.25792487712494927, + "grad_norm": 1.7324057732041318, + "learning_rate": 3.479429280509582e-06, + "loss": 1.0261, + "step": 2860 + }, + { + "epoch": 0.25801506064841956, + "grad_norm": 1.389739150881773, + "learning_rate": 3.4790360899813038e-06, + "loss": 1.0172, + "step": 2861 + }, + { + "epoch": 0.2581052441718898, + "grad_norm": 2.0852296463993256, + "learning_rate": 3.4786427732535115e-06, + "loss": 0.9037, + "step": 2862 + }, + { + "epoch": 0.2581954276953601, + "grad_norm": 1.4407125318170577, + "learning_rate": 3.478249330359764e-06, + "loss": 1.0189, + "step": 2863 + }, + { + "epoch": 0.2582856112188303, + "grad_norm": 1.5813034832620707, + "learning_rate": 3.4778557613336333e-06, + "loss": 0.9102, + "step": 2864 + }, + { + "epoch": 0.2583757947423006, + "grad_norm": 1.4229762245921027, + "learning_rate": 3.4774620662087004e-06, + "loss": 0.9855, + "step": 2865 + }, + { + "epoch": 0.25846597826577083, + "grad_norm": 1.5799823067072798, + "learning_rate": 3.477068245018557e-06, + "loss": 0.9778, + "step": 2866 + }, + { + "epoch": 0.2585561617892411, + "grad_norm": 1.441104516182813, + "learning_rate": 3.476674297796807e-06, + "loss": 0.9787, + "step": 2867 + }, + { + "epoch": 0.25864634531271136, + "grad_norm": 1.4932969846871411, + "learning_rate": 3.4762802245770627e-06, + "loss": 1.0118, + "step": 2868 + }, + { + "epoch": 0.25873652883618165, + "grad_norm": 1.2655391899500539, + "learning_rate": 3.4758860253929497e-06, + "loss": 0.9267, + "step": 2869 + }, + { + "epoch": 0.2588267123596519, + "grad_norm": 1.5013355031943578, + "learning_rate": 3.4754917002781038e-06, + "loss": 0.9891, + "step": 2870 + }, + { + "epoch": 0.25891689588312217, + "grad_norm": 1.5493240236034378, + "learning_rate": 3.475097249266169e-06, + "loss": 1.017, + "step": 2871 + }, + { + "epoch": 0.2590070794065924, + "grad_norm": 1.4472566467912726, + "learning_rate": 3.4747026723908044e-06, + "loss": 0.9595, + "step": 2872 + }, + { + "epoch": 0.2590972629300627, + "grad_norm": 1.335546737541466, + "learning_rate": 3.474307969685676e-06, + "loss": 0.9511, + "step": 2873 + }, + { + "epoch": 0.2591874464535329, + "grad_norm": 1.4141907607442332, + "learning_rate": 3.473913141184462e-06, + "loss": 0.913, + "step": 2874 + }, + { + "epoch": 0.2592776299770032, + "grad_norm": 1.3038676992743545, + "learning_rate": 3.4735181869208523e-06, + "loss": 0.8427, + "step": 2875 + }, + { + "epoch": 0.25936781350047344, + "grad_norm": 1.8621412584581296, + "learning_rate": 3.473123106928546e-06, + "loss": 0.8561, + "step": 2876 + }, + { + "epoch": 0.25945799702394373, + "grad_norm": 1.4903017281326771, + "learning_rate": 3.4727279012412533e-06, + "loss": 0.9305, + "step": 2877 + }, + { + "epoch": 0.25954818054741396, + "grad_norm": 1.0418725128235675, + "learning_rate": 3.4723325698926953e-06, + "loss": 0.8064, + "step": 2878 + }, + { + "epoch": 0.25963836407088425, + "grad_norm": 1.5485826564772902, + "learning_rate": 3.4719371129166045e-06, + "loss": 0.9804, + "step": 2879 + }, + { + "epoch": 0.2597285475943545, + "grad_norm": 1.4772242117964776, + "learning_rate": 3.471541530346723e-06, + "loss": 0.8935, + "step": 2880 + }, + { + "epoch": 0.2598187311178248, + "grad_norm": 1.760172027503633, + "learning_rate": 3.4711458222168037e-06, + "loss": 1.0301, + "step": 2881 + }, + { + "epoch": 0.259908914641295, + "grad_norm": 1.3731722821935992, + "learning_rate": 3.4707499885606114e-06, + "loss": 1.0106, + "step": 2882 + }, + { + "epoch": 0.2599990981647653, + "grad_norm": 1.3680769288813412, + "learning_rate": 3.4703540294119204e-06, + "loss": 1.0074, + "step": 2883 + }, + { + "epoch": 0.2600892816882356, + "grad_norm": 1.4342284084036, + "learning_rate": 3.4699579448045163e-06, + "loss": 1.0233, + "step": 2884 + }, + { + "epoch": 0.2601794652117058, + "grad_norm": 1.2098764015943984, + "learning_rate": 3.4695617347721947e-06, + "loss": 0.8621, + "step": 2885 + }, + { + "epoch": 0.2602696487351761, + "grad_norm": 1.6342957253130441, + "learning_rate": 3.469165399348763e-06, + "loss": 0.9329, + "step": 2886 + }, + { + "epoch": 0.26035983225864634, + "grad_norm": 1.501387987108143, + "learning_rate": 3.4687689385680384e-06, + "loss": 0.9535, + "step": 2887 + }, + { + "epoch": 0.26045001578211663, + "grad_norm": 1.3714138428998428, + "learning_rate": 3.4683723524638494e-06, + "loss": 0.9479, + "step": 2888 + }, + { + "epoch": 0.26054019930558686, + "grad_norm": 1.6427901014426862, + "learning_rate": 3.4679756410700354e-06, + "loss": 1.0393, + "step": 2889 + }, + { + "epoch": 0.26063038282905715, + "grad_norm": 1.6857207066088076, + "learning_rate": 3.4675788044204445e-06, + "loss": 0.9712, + "step": 2890 + }, + { + "epoch": 0.2607205663525274, + "grad_norm": 1.542229506704134, + "learning_rate": 3.467181842548938e-06, + "loss": 0.9069, + "step": 2891 + }, + { + "epoch": 0.26081074987599767, + "grad_norm": 1.2960480500868208, + "learning_rate": 3.466784755489387e-06, + "loss": 0.9626, + "step": 2892 + }, + { + "epoch": 0.2609009333994679, + "grad_norm": 1.37143061920138, + "learning_rate": 3.4663875432756726e-06, + "loss": 1.086, + "step": 2893 + }, + { + "epoch": 0.2609911169229382, + "grad_norm": 1.5610980801851118, + "learning_rate": 3.465990205941687e-06, + "loss": 1.1427, + "step": 2894 + }, + { + "epoch": 0.2610813004464084, + "grad_norm": 1.447084155526858, + "learning_rate": 3.465592743521335e-06, + "loss": 0.9542, + "step": 2895 + }, + { + "epoch": 0.2611714839698787, + "grad_norm": 1.4830557331254117, + "learning_rate": 3.465195156048528e-06, + "loss": 1.0276, + "step": 2896 + }, + { + "epoch": 0.26126166749334895, + "grad_norm": 0.6253630537332769, + "learning_rate": 3.464797443557191e-06, + "loss": 0.8102, + "step": 2897 + }, + { + "epoch": 0.26135185101681924, + "grad_norm": 1.4755639630643729, + "learning_rate": 3.46439960608126e-06, + "loss": 0.9479, + "step": 2898 + }, + { + "epoch": 0.26144203454028947, + "grad_norm": 1.4724667374071259, + "learning_rate": 3.4640016436546797e-06, + "loss": 0.9157, + "step": 2899 + }, + { + "epoch": 0.26153221806375976, + "grad_norm": 1.5868996393575323, + "learning_rate": 3.4636035563114065e-06, + "loss": 0.9533, + "step": 2900 + }, + { + "epoch": 0.26162240158723, + "grad_norm": 1.2816426675647108, + "learning_rate": 3.4632053440854085e-06, + "loss": 0.908, + "step": 2901 + }, + { + "epoch": 0.2617125851107003, + "grad_norm": 1.5192950145988395, + "learning_rate": 3.462807007010662e-06, + "loss": 1.026, + "step": 2902 + }, + { + "epoch": 0.2618027686341705, + "grad_norm": 1.42168081445724, + "learning_rate": 3.462408545121155e-06, + "loss": 1.0082, + "step": 2903 + }, + { + "epoch": 0.2618929521576408, + "grad_norm": 1.3959732080473677, + "learning_rate": 3.4620099584508883e-06, + "loss": 1.0243, + "step": 2904 + }, + { + "epoch": 0.26198313568111103, + "grad_norm": 1.4312602080268475, + "learning_rate": 3.46161124703387e-06, + "loss": 0.9318, + "step": 2905 + }, + { + "epoch": 0.2620733192045813, + "grad_norm": 1.4498251908260724, + "learning_rate": 3.461212410904122e-06, + "loss": 0.9656, + "step": 2906 + }, + { + "epoch": 0.2621635027280516, + "grad_norm": 1.475844758799243, + "learning_rate": 3.4608134500956726e-06, + "loss": 1.0253, + "step": 2907 + }, + { + "epoch": 0.26225368625152184, + "grad_norm": 1.2007248457577746, + "learning_rate": 3.4604143646425655e-06, + "loss": 0.9558, + "step": 2908 + }, + { + "epoch": 0.26234386977499213, + "grad_norm": 1.220619088904253, + "learning_rate": 3.460015154578852e-06, + "loss": 1.0428, + "step": 2909 + }, + { + "epoch": 0.26243405329846237, + "grad_norm": 1.4377118659417139, + "learning_rate": 3.459615819938595e-06, + "loss": 0.9425, + "step": 2910 + }, + { + "epoch": 0.26252423682193265, + "grad_norm": 1.6359233516748062, + "learning_rate": 3.4592163607558684e-06, + "loss": 0.9328, + "step": 2911 + }, + { + "epoch": 0.2626144203454029, + "grad_norm": 1.375470861095094, + "learning_rate": 3.4588167770647553e-06, + "loss": 0.9717, + "step": 2912 + }, + { + "epoch": 0.2627046038688732, + "grad_norm": 1.7097381150331388, + "learning_rate": 3.458417068899351e-06, + "loss": 1.0708, + "step": 2913 + }, + { + "epoch": 0.2627947873923434, + "grad_norm": 1.42681821973315, + "learning_rate": 3.4580172362937612e-06, + "loss": 0.9442, + "step": 2914 + }, + { + "epoch": 0.2628849709158137, + "grad_norm": 1.1462379723669773, + "learning_rate": 3.457617279282101e-06, + "loss": 0.9353, + "step": 2915 + }, + { + "epoch": 0.26297515443928393, + "grad_norm": 1.6684679628839645, + "learning_rate": 3.4572171978984975e-06, + "loss": 0.9461, + "step": 2916 + }, + { + "epoch": 0.2630653379627542, + "grad_norm": 2.0223355035643937, + "learning_rate": 3.456816992177088e-06, + "loss": 0.9736, + "step": 2917 + }, + { + "epoch": 0.26315552148622445, + "grad_norm": 1.239761094028492, + "learning_rate": 3.4564166621520193e-06, + "loss": 0.9433, + "step": 2918 + }, + { + "epoch": 0.26324570500969474, + "grad_norm": 1.3534094038068245, + "learning_rate": 3.4560162078574507e-06, + "loss": 0.9702, + "step": 2919 + }, + { + "epoch": 0.263335888533165, + "grad_norm": 1.7775678891551288, + "learning_rate": 3.455615629327551e-06, + "loss": 0.9771, + "step": 2920 + }, + { + "epoch": 0.26342607205663526, + "grad_norm": 2.207213159676921, + "learning_rate": 3.4552149265964994e-06, + "loss": 0.9511, + "step": 2921 + }, + { + "epoch": 0.2635162555801055, + "grad_norm": 1.3552047076556735, + "learning_rate": 3.4548140996984866e-06, + "loss": 0.9225, + "step": 2922 + }, + { + "epoch": 0.2636064391035758, + "grad_norm": 1.2813564349370132, + "learning_rate": 3.4544131486677124e-06, + "loss": 0.9977, + "step": 2923 + }, + { + "epoch": 0.263696622627046, + "grad_norm": 1.386463410415735, + "learning_rate": 3.454012073538389e-06, + "loss": 0.9125, + "step": 2924 + }, + { + "epoch": 0.2637868061505163, + "grad_norm": 1.2576670592198154, + "learning_rate": 3.453610874344738e-06, + "loss": 0.9717, + "step": 2925 + }, + { + "epoch": 0.26387698967398654, + "grad_norm": 1.4709877660723663, + "learning_rate": 3.453209551120993e-06, + "loss": 1.0145, + "step": 2926 + }, + { + "epoch": 0.26396717319745683, + "grad_norm": 1.5905187425177651, + "learning_rate": 3.452808103901395e-06, + "loss": 0.9456, + "step": 2927 + }, + { + "epoch": 0.26405735672092706, + "grad_norm": 1.4690108372007447, + "learning_rate": 3.4524065327202e-06, + "loss": 1.0394, + "step": 2928 + }, + { + "epoch": 0.26414754024439735, + "grad_norm": 0.8003598476764717, + "learning_rate": 3.4520048376116702e-06, + "loss": 0.7577, + "step": 2929 + }, + { + "epoch": 0.26423772376786764, + "grad_norm": 1.3220716866694457, + "learning_rate": 3.4516030186100817e-06, + "loss": 1.016, + "step": 2930 + }, + { + "epoch": 0.26432790729133787, + "grad_norm": 1.2174704265723448, + "learning_rate": 3.4512010757497197e-06, + "loss": 0.8877, + "step": 2931 + }, + { + "epoch": 0.26441809081480816, + "grad_norm": 1.5462595167923379, + "learning_rate": 3.4507990090648804e-06, + "loss": 0.8897, + "step": 2932 + }, + { + "epoch": 0.2645082743382784, + "grad_norm": 1.4515864933851927, + "learning_rate": 3.4503968185898696e-06, + "loss": 0.9918, + "step": 2933 + }, + { + "epoch": 0.2645984578617487, + "grad_norm": 1.3719984151119275, + "learning_rate": 3.4499945043590047e-06, + "loss": 0.9773, + "step": 2934 + }, + { + "epoch": 0.2646886413852189, + "grad_norm": 1.9750249257506665, + "learning_rate": 3.4495920664066137e-06, + "loss": 0.9877, + "step": 2935 + }, + { + "epoch": 0.2647788249086892, + "grad_norm": 1.8303457612006109, + "learning_rate": 3.449189504767035e-06, + "loss": 0.9649, + "step": 2936 + }, + { + "epoch": 0.26486900843215944, + "grad_norm": 1.4666065239857313, + "learning_rate": 3.4487868194746163e-06, + "loss": 0.9879, + "step": 2937 + }, + { + "epoch": 0.2649591919556297, + "grad_norm": 1.4285362069011378, + "learning_rate": 3.4483840105637175e-06, + "loss": 1.0115, + "step": 2938 + }, + { + "epoch": 0.26504937547909996, + "grad_norm": 1.5072198206983227, + "learning_rate": 3.4479810780687097e-06, + "loss": 0.9626, + "step": 2939 + }, + { + "epoch": 0.26513955900257025, + "grad_norm": 1.3338096135553632, + "learning_rate": 3.4475780220239714e-06, + "loss": 0.9651, + "step": 2940 + }, + { + "epoch": 0.2652297425260405, + "grad_norm": 1.3284922821228482, + "learning_rate": 3.4471748424638948e-06, + "loss": 0.9586, + "step": 2941 + }, + { + "epoch": 0.26531992604951077, + "grad_norm": 0.8820092589590864, + "learning_rate": 3.4467715394228803e-06, + "loss": 0.8038, + "step": 2942 + }, + { + "epoch": 0.265410109572981, + "grad_norm": 1.4354448139474223, + "learning_rate": 3.4463681129353413e-06, + "loss": 0.9771, + "step": 2943 + }, + { + "epoch": 0.2655002930964513, + "grad_norm": 1.4587362096244163, + "learning_rate": 3.4459645630357e-06, + "loss": 0.9662, + "step": 2944 + }, + { + "epoch": 0.2655904766199215, + "grad_norm": 1.5325015655808285, + "learning_rate": 3.4455608897583884e-06, + "loss": 1.0057, + "step": 2945 + }, + { + "epoch": 0.2656806601433918, + "grad_norm": 1.8081233358260143, + "learning_rate": 3.4451570931378514e-06, + "loss": 0.9752, + "step": 2946 + }, + { + "epoch": 0.26577084366686204, + "grad_norm": 1.2691158143140076, + "learning_rate": 3.444753173208543e-06, + "loss": 0.981, + "step": 2947 + }, + { + "epoch": 0.26586102719033233, + "grad_norm": 1.522303545574727, + "learning_rate": 3.444349130004927e-06, + "loss": 0.9665, + "step": 2948 + }, + { + "epoch": 0.26595121071380257, + "grad_norm": 1.7274655013221123, + "learning_rate": 3.4439449635614794e-06, + "loss": 0.8952, + "step": 2949 + }, + { + "epoch": 0.26604139423727285, + "grad_norm": 1.3450153957606095, + "learning_rate": 3.4435406739126854e-06, + "loss": 0.9981, + "step": 2950 + }, + { + "epoch": 0.2661315777607431, + "grad_norm": 1.7781357562069975, + "learning_rate": 3.443136261093042e-06, + "loss": 0.7874, + "step": 2951 + }, + { + "epoch": 0.2662217612842134, + "grad_norm": 1.370247952236127, + "learning_rate": 3.4427317251370553e-06, + "loss": 0.9216, + "step": 2952 + }, + { + "epoch": 0.2663119448076836, + "grad_norm": 1.5482328216127001, + "learning_rate": 3.4423270660792422e-06, + "loss": 0.8927, + "step": 2953 + }, + { + "epoch": 0.2664021283311539, + "grad_norm": 1.5046544975551495, + "learning_rate": 3.4419222839541314e-06, + "loss": 0.8635, + "step": 2954 + }, + { + "epoch": 0.2664923118546242, + "grad_norm": 1.330128448923517, + "learning_rate": 3.4415173787962607e-06, + "loss": 0.9443, + "step": 2955 + }, + { + "epoch": 0.2665824953780944, + "grad_norm": 2.627287639888789, + "learning_rate": 3.4411123506401783e-06, + "loss": 0.8294, + "step": 2956 + }, + { + "epoch": 0.2666726789015647, + "grad_norm": 1.6426623092651373, + "learning_rate": 3.440707199520444e-06, + "loss": 0.83, + "step": 2957 + }, + { + "epoch": 0.26676286242503494, + "grad_norm": 1.3769149883210552, + "learning_rate": 3.440301925471628e-06, + "loss": 0.9319, + "step": 2958 + }, + { + "epoch": 0.26685304594850523, + "grad_norm": 1.3120577384961547, + "learning_rate": 3.43989652852831e-06, + "loss": 1.0079, + "step": 2959 + }, + { + "epoch": 0.26694322947197546, + "grad_norm": 1.323584930963354, + "learning_rate": 3.4394910087250804e-06, + "loss": 1.0297, + "step": 2960 + }, + { + "epoch": 0.26703341299544575, + "grad_norm": 1.2327004675490383, + "learning_rate": 3.4390853660965405e-06, + "loss": 0.984, + "step": 2961 + }, + { + "epoch": 0.267123596518916, + "grad_norm": 1.5765578543043544, + "learning_rate": 3.438679600677302e-06, + "loss": 1.041, + "step": 2962 + }, + { + "epoch": 0.2672137800423863, + "grad_norm": 1.1430621303916717, + "learning_rate": 3.4382737125019874e-06, + "loss": 0.9256, + "step": 2963 + }, + { + "epoch": 0.2673039635658565, + "grad_norm": 1.3293230375349074, + "learning_rate": 3.4378677016052294e-06, + "loss": 0.9402, + "step": 2964 + }, + { + "epoch": 0.2673941470893268, + "grad_norm": 1.5327486761806373, + "learning_rate": 3.43746156802167e-06, + "loss": 0.9885, + "step": 2965 + }, + { + "epoch": 0.267484330612797, + "grad_norm": 1.312881232707311, + "learning_rate": 3.4370553117859643e-06, + "loss": 0.9039, + "step": 2966 + }, + { + "epoch": 0.2675745141362673, + "grad_norm": 1.363578436083502, + "learning_rate": 3.4366489329327754e-06, + "loss": 0.9979, + "step": 2967 + }, + { + "epoch": 0.26766469765973755, + "grad_norm": 1.669416313160051, + "learning_rate": 3.4362424314967777e-06, + "loss": 0.9878, + "step": 2968 + }, + { + "epoch": 0.26775488118320784, + "grad_norm": 1.5311979168189138, + "learning_rate": 3.4358358075126567e-06, + "loss": 0.9801, + "step": 2969 + }, + { + "epoch": 0.26784506470667807, + "grad_norm": 1.4295488300716432, + "learning_rate": 3.4354290610151077e-06, + "loss": 0.9911, + "step": 2970 + }, + { + "epoch": 0.26793524823014836, + "grad_norm": 1.243547091394216, + "learning_rate": 3.4350221920388354e-06, + "loss": 0.9705, + "step": 2971 + }, + { + "epoch": 0.2680254317536186, + "grad_norm": 1.3294892428889364, + "learning_rate": 3.4346152006185574e-06, + "loss": 1.0607, + "step": 2972 + }, + { + "epoch": 0.2681156152770889, + "grad_norm": 1.648970748202382, + "learning_rate": 3.4342080867890006e-06, + "loss": 1.0651, + "step": 2973 + }, + { + "epoch": 0.2682057988005591, + "grad_norm": 1.7885172317011768, + "learning_rate": 3.4338008505849016e-06, + "loss": 0.8894, + "step": 2974 + }, + { + "epoch": 0.2682959823240294, + "grad_norm": 1.3030360843319835, + "learning_rate": 3.433393492041008e-06, + "loss": 0.9758, + "step": 2975 + }, + { + "epoch": 0.26838616584749964, + "grad_norm": 1.3703113725542326, + "learning_rate": 3.432986011192078e-06, + "loss": 1.0242, + "step": 2976 + }, + { + "epoch": 0.2684763493709699, + "grad_norm": 1.5775872249035725, + "learning_rate": 3.4325784080728796e-06, + "loss": 1.037, + "step": 2977 + }, + { + "epoch": 0.2685665328944402, + "grad_norm": 1.3679190721973868, + "learning_rate": 3.4321706827181926e-06, + "loss": 1.0039, + "step": 2978 + }, + { + "epoch": 0.26865671641791045, + "grad_norm": 1.1479281379325004, + "learning_rate": 3.4317628351628064e-06, + "loss": 0.9217, + "step": 2979 + }, + { + "epoch": 0.26874689994138073, + "grad_norm": 1.281816961870775, + "learning_rate": 3.43135486544152e-06, + "loss": 0.9499, + "step": 2980 + }, + { + "epoch": 0.26883708346485097, + "grad_norm": 1.3697839985718663, + "learning_rate": 3.4309467735891442e-06, + "loss": 1.0092, + "step": 2981 + }, + { + "epoch": 0.26892726698832126, + "grad_norm": 1.449557694028092, + "learning_rate": 3.4305385596405e-06, + "loss": 1.0489, + "step": 2982 + }, + { + "epoch": 0.2690174505117915, + "grad_norm": 1.2707974736308343, + "learning_rate": 3.4301302236304174e-06, + "loss": 0.9367, + "step": 2983 + }, + { + "epoch": 0.2691076340352618, + "grad_norm": 0.7644891682637754, + "learning_rate": 3.429721765593739e-06, + "loss": 0.7678, + "step": 2984 + }, + { + "epoch": 0.269197817558732, + "grad_norm": 1.233597039477436, + "learning_rate": 3.4293131855653155e-06, + "loss": 0.9871, + "step": 2985 + }, + { + "epoch": 0.2692880010822023, + "grad_norm": 1.4065406498952453, + "learning_rate": 3.4289044835800102e-06, + "loss": 0.9044, + "step": 2986 + }, + { + "epoch": 0.26937818460567253, + "grad_norm": 1.6648965813762928, + "learning_rate": 3.4284956596726953e-06, + "loss": 0.9659, + "step": 2987 + }, + { + "epoch": 0.2694683681291428, + "grad_norm": 1.2399525720232223, + "learning_rate": 3.4280867138782544e-06, + "loss": 0.963, + "step": 2988 + }, + { + "epoch": 0.26955855165261305, + "grad_norm": 1.6173775234165875, + "learning_rate": 3.4276776462315803e-06, + "loss": 0.9687, + "step": 2989 + }, + { + "epoch": 0.26964873517608334, + "grad_norm": 1.3529093211477532, + "learning_rate": 3.427268456767578e-06, + "loss": 0.9723, + "step": 2990 + }, + { + "epoch": 0.2697389186995536, + "grad_norm": 1.4341841061625467, + "learning_rate": 3.42685914552116e-06, + "loss": 1.0161, + "step": 2991 + }, + { + "epoch": 0.26982910222302386, + "grad_norm": 6.441709160839365, + "learning_rate": 3.426449712527253e-06, + "loss": 0.9431, + "step": 2992 + }, + { + "epoch": 0.2699192857464941, + "grad_norm": 1.3422088656873188, + "learning_rate": 3.4260401578207904e-06, + "loss": 0.8852, + "step": 2993 + }, + { + "epoch": 0.2700094692699644, + "grad_norm": 2.0556435337083623, + "learning_rate": 3.4256304814367185e-06, + "loss": 0.9427, + "step": 2994 + }, + { + "epoch": 0.2700996527934346, + "grad_norm": 1.5390750477855408, + "learning_rate": 3.4252206834099936e-06, + "loss": 0.9285, + "step": 2995 + }, + { + "epoch": 0.2701898363169049, + "grad_norm": 1.4706241743520745, + "learning_rate": 3.424810763775581e-06, + "loss": 0.972, + "step": 2996 + }, + { + "epoch": 0.27028001984037514, + "grad_norm": 1.337423067465054, + "learning_rate": 3.4244007225684587e-06, + "loss": 0.9125, + "step": 2997 + }, + { + "epoch": 0.27037020336384543, + "grad_norm": 1.3151765054378166, + "learning_rate": 3.4239905598236115e-06, + "loss": 0.9804, + "step": 2998 + }, + { + "epoch": 0.27046038688731566, + "grad_norm": 1.3235731323351212, + "learning_rate": 3.4235802755760386e-06, + "loss": 1.0, + "step": 2999 + }, + { + "epoch": 0.27055057041078595, + "grad_norm": 3.702687684324251, + "learning_rate": 3.4231698698607464e-06, + "loss": 0.9393, + "step": 3000 + }, + { + "epoch": 0.2706407539342562, + "grad_norm": 1.6450406189515467, + "learning_rate": 3.4227593427127543e-06, + "loss": 1.0242, + "step": 3001 + }, + { + "epoch": 0.2707309374577265, + "grad_norm": 1.5741377210942897, + "learning_rate": 3.42234869416709e-06, + "loss": 0.9388, + "step": 3002 + }, + { + "epoch": 0.27082112098119676, + "grad_norm": 1.8140720258753444, + "learning_rate": 3.421937924258792e-06, + "loss": 0.9789, + "step": 3003 + }, + { + "epoch": 0.270911304504667, + "grad_norm": 1.5924502289808011, + "learning_rate": 3.4215270330229096e-06, + "loss": 1.0118, + "step": 3004 + }, + { + "epoch": 0.2710014880281373, + "grad_norm": 1.53094876498773, + "learning_rate": 3.421116020494503e-06, + "loss": 0.9351, + "step": 3005 + }, + { + "epoch": 0.2710916715516075, + "grad_norm": 1.238123646041692, + "learning_rate": 3.420704886708642e-06, + "loss": 0.9174, + "step": 3006 + }, + { + "epoch": 0.2711818550750778, + "grad_norm": 1.06846905412073, + "learning_rate": 3.4202936317004056e-06, + "loss": 0.9231, + "step": 3007 + }, + { + "epoch": 0.27127203859854804, + "grad_norm": 1.5378873569724136, + "learning_rate": 3.4198822555048856e-06, + "loss": 0.9815, + "step": 3008 + }, + { + "epoch": 0.2713622221220183, + "grad_norm": 1.631456606120374, + "learning_rate": 3.419470758157182e-06, + "loss": 1.0531, + "step": 3009 + }, + { + "epoch": 0.27145240564548856, + "grad_norm": 1.469740837762899, + "learning_rate": 3.4190591396924068e-06, + "loss": 0.9684, + "step": 3010 + }, + { + "epoch": 0.27154258916895885, + "grad_norm": 0.6914285343695409, + "learning_rate": 3.418647400145681e-06, + "loss": 0.8198, + "step": 3011 + }, + { + "epoch": 0.2716327726924291, + "grad_norm": 0.7758600846078862, + "learning_rate": 3.4182355395521367e-06, + "loss": 0.8015, + "step": 3012 + }, + { + "epoch": 0.27172295621589937, + "grad_norm": 1.3590682549616824, + "learning_rate": 3.417823557946916e-06, + "loss": 0.9692, + "step": 3013 + }, + { + "epoch": 0.2718131397393696, + "grad_norm": 1.4316707675565725, + "learning_rate": 3.417411455365172e-06, + "loss": 0.864, + "step": 3014 + }, + { + "epoch": 0.2719033232628399, + "grad_norm": 1.5031247969724395, + "learning_rate": 3.416999231842066e-06, + "loss": 0.873, + "step": 3015 + }, + { + "epoch": 0.2719935067863101, + "grad_norm": 1.6643030413644244, + "learning_rate": 3.416586887412773e-06, + "loss": 1.0214, + "step": 3016 + }, + { + "epoch": 0.2720836903097804, + "grad_norm": 1.4230499443462987, + "learning_rate": 3.416174422112476e-06, + "loss": 0.9325, + "step": 3017 + }, + { + "epoch": 0.27217387383325065, + "grad_norm": 0.901737554470104, + "learning_rate": 3.4157618359763687e-06, + "loss": 0.7673, + "step": 3018 + }, + { + "epoch": 0.27226405735672093, + "grad_norm": 1.350697058397568, + "learning_rate": 3.4153491290396542e-06, + "loss": 0.8945, + "step": 3019 + }, + { + "epoch": 0.27235424088019117, + "grad_norm": 1.598035921158567, + "learning_rate": 3.4149363013375485e-06, + "loss": 0.981, + "step": 3020 + }, + { + "epoch": 0.27244442440366146, + "grad_norm": 0.9326043732247106, + "learning_rate": 3.414523352905276e-06, + "loss": 0.7823, + "step": 3021 + }, + { + "epoch": 0.2725346079271317, + "grad_norm": 1.4428487036915494, + "learning_rate": 3.414110283778071e-06, + "loss": 0.9362, + "step": 3022 + }, + { + "epoch": 0.272624791450602, + "grad_norm": 1.9294098565524558, + "learning_rate": 3.4136970939911797e-06, + "loss": 0.919, + "step": 3023 + }, + { + "epoch": 0.2727149749740722, + "grad_norm": 1.5093414776793508, + "learning_rate": 3.413283783579857e-06, + "loss": 0.9425, + "step": 3024 + }, + { + "epoch": 0.2728051584975425, + "grad_norm": 1.7273812402515472, + "learning_rate": 3.412870352579369e-06, + "loss": 0.9008, + "step": 3025 + }, + { + "epoch": 0.2728953420210128, + "grad_norm": 1.044514514272599, + "learning_rate": 3.4124568010249915e-06, + "loss": 0.8095, + "step": 3026 + }, + { + "epoch": 0.272985525544483, + "grad_norm": 1.842594625250744, + "learning_rate": 3.4120431289520124e-06, + "loss": 0.9567, + "step": 3027 + }, + { + "epoch": 0.2730757090679533, + "grad_norm": 1.5237230864628817, + "learning_rate": 3.4116293363957276e-06, + "loss": 0.986, + "step": 3028 + }, + { + "epoch": 0.27316589259142354, + "grad_norm": 1.883665785802391, + "learning_rate": 3.4112154233914438e-06, + "loss": 0.9798, + "step": 3029 + }, + { + "epoch": 0.27325607611489383, + "grad_norm": 1.704531657423187, + "learning_rate": 3.410801389974479e-06, + "loss": 0.8689, + "step": 3030 + }, + { + "epoch": 0.27334625963836406, + "grad_norm": 1.6010169612026581, + "learning_rate": 3.410387236180161e-06, + "loss": 1.0063, + "step": 3031 + }, + { + "epoch": 0.27343644316183435, + "grad_norm": 1.5145473778543954, + "learning_rate": 3.409972962043826e-06, + "loss": 0.8484, + "step": 3032 + }, + { + "epoch": 0.2735266266853046, + "grad_norm": 1.5561784658881808, + "learning_rate": 3.4095585676008234e-06, + "loss": 1.0223, + "step": 3033 + }, + { + "epoch": 0.2736168102087749, + "grad_norm": 1.394840238303303, + "learning_rate": 3.4091440528865125e-06, + "loss": 0.9683, + "step": 3034 + }, + { + "epoch": 0.2737069937322451, + "grad_norm": 2.463298815208791, + "learning_rate": 3.4087294179362606e-06, + "loss": 0.9682, + "step": 3035 + }, + { + "epoch": 0.2737971772557154, + "grad_norm": 1.5008277993079884, + "learning_rate": 3.4083146627854474e-06, + "loss": 1.0176, + "step": 3036 + }, + { + "epoch": 0.27388736077918563, + "grad_norm": 3.257396497626444, + "learning_rate": 3.4078997874694614e-06, + "loss": 0.8886, + "step": 3037 + }, + { + "epoch": 0.2739775443026559, + "grad_norm": 1.5983541964752797, + "learning_rate": 3.407484792023703e-06, + "loss": 0.8668, + "step": 3038 + }, + { + "epoch": 0.27406772782612615, + "grad_norm": 1.873033764169768, + "learning_rate": 3.407069676483581e-06, + "loss": 0.948, + "step": 3039 + }, + { + "epoch": 0.27415791134959644, + "grad_norm": 1.4747146265728435, + "learning_rate": 3.406654440884516e-06, + "loss": 1.0038, + "step": 3040 + }, + { + "epoch": 0.2742480948730667, + "grad_norm": 1.7039480058187848, + "learning_rate": 3.4062390852619372e-06, + "loss": 0.9969, + "step": 3041 + }, + { + "epoch": 0.27433827839653696, + "grad_norm": 1.9690932322835621, + "learning_rate": 3.4058236096512867e-06, + "loss": 0.9666, + "step": 3042 + }, + { + "epoch": 0.2744284619200072, + "grad_norm": 1.9613915635823813, + "learning_rate": 3.405408014088013e-06, + "loss": 0.8723, + "step": 3043 + }, + { + "epoch": 0.2745186454434775, + "grad_norm": 2.2200305091204897, + "learning_rate": 3.404992298607579e-06, + "loss": 0.8587, + "step": 3044 + }, + { + "epoch": 0.2746088289669477, + "grad_norm": 5.374434507918691, + "learning_rate": 3.4045764632454547e-06, + "loss": 0.9435, + "step": 3045 + }, + { + "epoch": 0.274699012490418, + "grad_norm": 1.3847179323923045, + "learning_rate": 3.4041605080371223e-06, + "loss": 0.9257, + "step": 3046 + }, + { + "epoch": 0.27478919601388824, + "grad_norm": 1.361081597706708, + "learning_rate": 3.4037444330180726e-06, + "loss": 0.9727, + "step": 3047 + }, + { + "epoch": 0.2748793795373585, + "grad_norm": 1.8038246767443233, + "learning_rate": 3.403328238223808e-06, + "loss": 0.9795, + "step": 3048 + }, + { + "epoch": 0.27496956306082876, + "grad_norm": 1.5625892613663863, + "learning_rate": 3.4029119236898395e-06, + "loss": 0.9956, + "step": 3049 + }, + { + "epoch": 0.27505974658429905, + "grad_norm": 1.2018776905587125, + "learning_rate": 3.4024954894516906e-06, + "loss": 0.8571, + "step": 3050 + }, + { + "epoch": 0.27514993010776934, + "grad_norm": 1.5417044179607253, + "learning_rate": 3.4020789355448933e-06, + "loss": 0.9699, + "step": 3051 + }, + { + "epoch": 0.27524011363123957, + "grad_norm": 1.3460694632439776, + "learning_rate": 3.40166226200499e-06, + "loss": 1.0389, + "step": 3052 + }, + { + "epoch": 0.27533029715470986, + "grad_norm": 1.80716037181722, + "learning_rate": 3.401245468867534e-06, + "loss": 0.9506, + "step": 3053 + }, + { + "epoch": 0.2754204806781801, + "grad_norm": 1.159158708082056, + "learning_rate": 3.400828556168088e-06, + "loss": 0.8682, + "step": 3054 + }, + { + "epoch": 0.2755106642016504, + "grad_norm": 1.607135807506298, + "learning_rate": 3.4004115239422255e-06, + "loss": 0.9774, + "step": 3055 + }, + { + "epoch": 0.2756008477251206, + "grad_norm": 1.6640521125290157, + "learning_rate": 3.3999943722255305e-06, + "loss": 0.9528, + "step": 3056 + }, + { + "epoch": 0.2756910312485909, + "grad_norm": 1.5284107120578803, + "learning_rate": 3.3995771010535955e-06, + "loss": 0.8713, + "step": 3057 + }, + { + "epoch": 0.27578121477206113, + "grad_norm": 1.436571152839218, + "learning_rate": 3.3991597104620253e-06, + "loss": 0.7899, + "step": 3058 + }, + { + "epoch": 0.2758713982955314, + "grad_norm": 2.2032823438799274, + "learning_rate": 3.398742200486434e-06, + "loss": 0.9472, + "step": 3059 + }, + { + "epoch": 0.27596158181900166, + "grad_norm": 1.7126216873037787, + "learning_rate": 3.3983245711624453e-06, + "loss": 0.98, + "step": 3060 + }, + { + "epoch": 0.27605176534247194, + "grad_norm": 1.7418779175086854, + "learning_rate": 3.3979068225256946e-06, + "loss": 0.8975, + "step": 3061 + }, + { + "epoch": 0.2761419488659422, + "grad_norm": 2.4237095929102934, + "learning_rate": 3.3974889546118246e-06, + "loss": 0.934, + "step": 3062 + }, + { + "epoch": 0.27623213238941247, + "grad_norm": 1.576761695519652, + "learning_rate": 3.3970709674564918e-06, + "loss": 0.9106, + "step": 3063 + }, + { + "epoch": 0.2763223159128827, + "grad_norm": 1.3942728096975636, + "learning_rate": 3.3966528610953607e-06, + "loss": 0.9596, + "step": 3064 + }, + { + "epoch": 0.276412499436353, + "grad_norm": 1.44271179455871, + "learning_rate": 3.3962346355641067e-06, + "loss": 0.9701, + "step": 3065 + }, + { + "epoch": 0.2765026829598232, + "grad_norm": 1.4021875831122785, + "learning_rate": 3.3958162908984146e-06, + "loss": 1.0034, + "step": 3066 + }, + { + "epoch": 0.2765928664832935, + "grad_norm": 1.3870557348718653, + "learning_rate": 3.39539782713398e-06, + "loss": 0.9559, + "step": 3067 + }, + { + "epoch": 0.27668305000676374, + "grad_norm": 1.820675989622767, + "learning_rate": 3.394979244306509e-06, + "loss": 0.9851, + "step": 3068 + }, + { + "epoch": 0.27677323353023403, + "grad_norm": 1.3770152409388108, + "learning_rate": 3.3945605424517166e-06, + "loss": 0.9771, + "step": 3069 + }, + { + "epoch": 0.27686341705370426, + "grad_norm": 1.5454923056513068, + "learning_rate": 3.3941417216053294e-06, + "loss": 0.9617, + "step": 3070 + }, + { + "epoch": 0.27695360057717455, + "grad_norm": 1.8292156780631277, + "learning_rate": 3.3937227818030835e-06, + "loss": 0.9295, + "step": 3071 + }, + { + "epoch": 0.2770437841006448, + "grad_norm": 1.7195859090115473, + "learning_rate": 3.393303723080725e-06, + "loss": 0.934, + "step": 3072 + }, + { + "epoch": 0.2771339676241151, + "grad_norm": 1.5105183095823016, + "learning_rate": 3.3928845454740097e-06, + "loss": 0.9742, + "step": 3073 + }, + { + "epoch": 0.27722415114758536, + "grad_norm": 1.8673375919302106, + "learning_rate": 3.392465249018705e-06, + "loss": 0.9506, + "step": 3074 + }, + { + "epoch": 0.2773143346710556, + "grad_norm": 1.6292941502923528, + "learning_rate": 3.3920458337505872e-06, + "loss": 0.9517, + "step": 3075 + }, + { + "epoch": 0.2774045181945259, + "grad_norm": 5.0643311002943125, + "learning_rate": 3.391626299705443e-06, + "loss": 0.8047, + "step": 3076 + }, + { + "epoch": 0.2774947017179961, + "grad_norm": 1.422161199250652, + "learning_rate": 3.39120664691907e-06, + "loss": 0.9685, + "step": 3077 + }, + { + "epoch": 0.2775848852414664, + "grad_norm": 1.916836592840173, + "learning_rate": 3.390786875427275e-06, + "loss": 0.942, + "step": 3078 + }, + { + "epoch": 0.27767506876493664, + "grad_norm": 1.431638210237348, + "learning_rate": 3.390366985265875e-06, + "loss": 0.9519, + "step": 3079 + }, + { + "epoch": 0.2777652522884069, + "grad_norm": 1.3664867676206962, + "learning_rate": 3.389946976470697e-06, + "loss": 0.9199, + "step": 3080 + }, + { + "epoch": 0.27785543581187716, + "grad_norm": 1.3452780263916586, + "learning_rate": 3.3895268490775787e-06, + "loss": 1.0516, + "step": 3081 + }, + { + "epoch": 0.27794561933534745, + "grad_norm": 1.5637761054844477, + "learning_rate": 3.3891066031223685e-06, + "loss": 0.926, + "step": 3082 + }, + { + "epoch": 0.2780358028588177, + "grad_norm": 1.5488945464507196, + "learning_rate": 3.3886862386409237e-06, + "loss": 0.9931, + "step": 3083 + }, + { + "epoch": 0.27812598638228797, + "grad_norm": 1.8458900316656692, + "learning_rate": 3.388265755669111e-06, + "loss": 0.9313, + "step": 3084 + }, + { + "epoch": 0.2782161699057582, + "grad_norm": 3.0220447259871417, + "learning_rate": 3.3878451542428093e-06, + "loss": 1.0198, + "step": 3085 + }, + { + "epoch": 0.2783063534292285, + "grad_norm": 1.407823784007111, + "learning_rate": 3.387424434397907e-06, + "loss": 0.9483, + "step": 3086 + }, + { + "epoch": 0.2783965369526987, + "grad_norm": 1.7606500399443745, + "learning_rate": 3.3870035961703013e-06, + "loss": 0.8801, + "step": 3087 + }, + { + "epoch": 0.278486720476169, + "grad_norm": 1.8356691854300442, + "learning_rate": 3.3865826395959014e-06, + "loss": 1.087, + "step": 3088 + }, + { + "epoch": 0.27857690399963925, + "grad_norm": 1.2445492634783681, + "learning_rate": 3.3861615647106253e-06, + "loss": 1.0496, + "step": 3089 + }, + { + "epoch": 0.27866708752310954, + "grad_norm": 1.9402149006692038, + "learning_rate": 3.3857403715504012e-06, + "loss": 0.9733, + "step": 3090 + }, + { + "epoch": 0.27875727104657977, + "grad_norm": 1.582663327386853, + "learning_rate": 3.385319060151167e-06, + "loss": 0.9262, + "step": 3091 + }, + { + "epoch": 0.27884745457005006, + "grad_norm": 1.7675874319937985, + "learning_rate": 3.3848976305488728e-06, + "loss": 0.9522, + "step": 3092 + }, + { + "epoch": 0.2789376380935203, + "grad_norm": 1.7480803587706157, + "learning_rate": 3.384476082779476e-06, + "loss": 0.9957, + "step": 3093 + }, + { + "epoch": 0.2790278216169906, + "grad_norm": 1.8868612231686348, + "learning_rate": 3.3840544168789463e-06, + "loss": 0.9501, + "step": 3094 + }, + { + "epoch": 0.2791180051404608, + "grad_norm": 1.5306547915455488, + "learning_rate": 3.3836326328832617e-06, + "loss": 1.072, + "step": 3095 + }, + { + "epoch": 0.2792081886639311, + "grad_norm": 1.4904434637373576, + "learning_rate": 3.383210730828412e-06, + "loss": 0.9175, + "step": 3096 + }, + { + "epoch": 0.2792983721874014, + "grad_norm": 1.2080619386652625, + "learning_rate": 3.3827887107503953e-06, + "loss": 1.0258, + "step": 3097 + }, + { + "epoch": 0.2793885557108716, + "grad_norm": 1.380451320156041, + "learning_rate": 3.3823665726852216e-06, + "loss": 0.9078, + "step": 3098 + }, + { + "epoch": 0.2794787392343419, + "grad_norm": 1.216776252192187, + "learning_rate": 3.3819443166689095e-06, + "loss": 1.0777, + "step": 3099 + }, + { + "epoch": 0.27956892275781214, + "grad_norm": 1.3832642846320478, + "learning_rate": 3.3815219427374886e-06, + "loss": 0.9496, + "step": 3100 + }, + { + "epoch": 0.27965910628128243, + "grad_norm": 1.347930791781831, + "learning_rate": 3.3810994509269975e-06, + "loss": 0.9727, + "step": 3101 + }, + { + "epoch": 0.27974928980475267, + "grad_norm": 1.4106974738707585, + "learning_rate": 3.3806768412734864e-06, + "loss": 0.9921, + "step": 3102 + }, + { + "epoch": 0.27983947332822295, + "grad_norm": 1.5942708080764896, + "learning_rate": 3.380254113813014e-06, + "loss": 1.0474, + "step": 3103 + }, + { + "epoch": 0.2799296568516932, + "grad_norm": 1.5227349642141388, + "learning_rate": 3.3798312685816496e-06, + "loss": 0.9998, + "step": 3104 + }, + { + "epoch": 0.2800198403751635, + "grad_norm": 1.4245348355180263, + "learning_rate": 3.3794083056154738e-06, + "loss": 0.9623, + "step": 3105 + }, + { + "epoch": 0.2801100238986337, + "grad_norm": 1.430056248205509, + "learning_rate": 3.3789852249505746e-06, + "loss": 0.97, + "step": 3106 + }, + { + "epoch": 0.280200207422104, + "grad_norm": 1.553899440683335, + "learning_rate": 3.378562026623053e-06, + "loss": 1.0253, + "step": 3107 + }, + { + "epoch": 0.28029039094557423, + "grad_norm": 1.8699712711310157, + "learning_rate": 3.3781387106690175e-06, + "loss": 1.033, + "step": 3108 + }, + { + "epoch": 0.2803805744690445, + "grad_norm": 1.2801262183841509, + "learning_rate": 3.3777152771245885e-06, + "loss": 0.9883, + "step": 3109 + }, + { + "epoch": 0.28047075799251475, + "grad_norm": 1.9033894572804657, + "learning_rate": 3.377291726025895e-06, + "loss": 0.9986, + "step": 3110 + }, + { + "epoch": 0.28056094151598504, + "grad_norm": 1.824841999373538, + "learning_rate": 3.3768680574090782e-06, + "loss": 1.0721, + "step": 3111 + }, + { + "epoch": 0.2806511250394553, + "grad_norm": 2.1340396690292516, + "learning_rate": 3.3764442713102857e-06, + "loss": 0.9375, + "step": 3112 + }, + { + "epoch": 0.28074130856292556, + "grad_norm": 1.3241586587150393, + "learning_rate": 3.3760203677656786e-06, + "loss": 0.9658, + "step": 3113 + }, + { + "epoch": 0.2808314920863958, + "grad_norm": 1.5592068109419353, + "learning_rate": 3.3755963468114262e-06, + "loss": 1.0179, + "step": 3114 + }, + { + "epoch": 0.2809216756098661, + "grad_norm": 1.3964213323701675, + "learning_rate": 3.3751722084837095e-06, + "loss": 0.8708, + "step": 3115 + }, + { + "epoch": 0.2810118591333363, + "grad_norm": 1.389445415364975, + "learning_rate": 3.3747479528187166e-06, + "loss": 0.9442, + "step": 3116 + }, + { + "epoch": 0.2811020426568066, + "grad_norm": 1.4596070948452642, + "learning_rate": 3.3743235798526485e-06, + "loss": 1.0096, + "step": 3117 + }, + { + "epoch": 0.28119222618027684, + "grad_norm": 1.8025690500945828, + "learning_rate": 3.373899089621714e-06, + "loss": 0.901, + "step": 3118 + }, + { + "epoch": 0.2812824097037471, + "grad_norm": 1.4882527346144547, + "learning_rate": 3.373474482162134e-06, + "loss": 0.9449, + "step": 3119 + }, + { + "epoch": 0.28137259322721736, + "grad_norm": 1.6594787651420544, + "learning_rate": 3.3730497575101376e-06, + "loss": 0.9665, + "step": 3120 + }, + { + "epoch": 0.28146277675068765, + "grad_norm": 1.6575266398364699, + "learning_rate": 3.3726249157019654e-06, + "loss": 0.9579, + "step": 3121 + }, + { + "epoch": 0.28155296027415794, + "grad_norm": 1.6502084484789783, + "learning_rate": 3.372199956773866e-06, + "loss": 1.0421, + "step": 3122 + }, + { + "epoch": 0.28164314379762817, + "grad_norm": 1.4637074857380288, + "learning_rate": 3.371774880762101e-06, + "loss": 0.9179, + "step": 3123 + }, + { + "epoch": 0.28173332732109846, + "grad_norm": 1.6166321911363937, + "learning_rate": 3.3713496877029392e-06, + "loss": 0.9445, + "step": 3124 + }, + { + "epoch": 0.2818235108445687, + "grad_norm": 1.7676267501296896, + "learning_rate": 3.37092437763266e-06, + "loss": 1.0259, + "step": 3125 + }, + { + "epoch": 0.281913694368039, + "grad_norm": 1.6395436674342365, + "learning_rate": 3.3704989505875537e-06, + "loss": 0.9935, + "step": 3126 + }, + { + "epoch": 0.2820038778915092, + "grad_norm": 1.5726700730528789, + "learning_rate": 3.3700734066039205e-06, + "loss": 0.9343, + "step": 3127 + }, + { + "epoch": 0.2820940614149795, + "grad_norm": 1.6421892973711247, + "learning_rate": 3.36964774571807e-06, + "loss": 1.0007, + "step": 3128 + }, + { + "epoch": 0.28218424493844974, + "grad_norm": 1.8289500274046664, + "learning_rate": 3.3692219679663206e-06, + "loss": 0.9582, + "step": 3129 + }, + { + "epoch": 0.28227442846192, + "grad_norm": 1.9455661838071021, + "learning_rate": 3.3687960733850043e-06, + "loss": 0.9857, + "step": 3130 + }, + { + "epoch": 0.28236461198539026, + "grad_norm": 1.4310290603352707, + "learning_rate": 3.3683700620104586e-06, + "loss": 0.9093, + "step": 3131 + }, + { + "epoch": 0.28245479550886055, + "grad_norm": 1.4516590886989431, + "learning_rate": 3.3679439338790347e-06, + "loss": 0.9854, + "step": 3132 + }, + { + "epoch": 0.2825449790323308, + "grad_norm": 1.426558024304358, + "learning_rate": 3.3675176890270916e-06, + "loss": 1.0899, + "step": 3133 + }, + { + "epoch": 0.28263516255580107, + "grad_norm": 1.4767778834934158, + "learning_rate": 3.367091327490998e-06, + "loss": 0.9881, + "step": 3134 + }, + { + "epoch": 0.2827253460792713, + "grad_norm": 1.627493119584173, + "learning_rate": 3.3666648493071347e-06, + "loss": 0.9468, + "step": 3135 + }, + { + "epoch": 0.2828155296027416, + "grad_norm": 1.7114027949597574, + "learning_rate": 3.3662382545118914e-06, + "loss": 0.9511, + "step": 3136 + }, + { + "epoch": 0.2829057131262118, + "grad_norm": 1.5208307118153732, + "learning_rate": 3.3658115431416663e-06, + "loss": 1.0489, + "step": 3137 + }, + { + "epoch": 0.2829958966496821, + "grad_norm": 0.7930655960550481, + "learning_rate": 3.36538471523287e-06, + "loss": 0.8446, + "step": 3138 + }, + { + "epoch": 0.28308608017315234, + "grad_norm": 1.3726568197076578, + "learning_rate": 3.3649577708219204e-06, + "loss": 1.0278, + "step": 3139 + }, + { + "epoch": 0.28317626369662263, + "grad_norm": 1.736828417625215, + "learning_rate": 3.3645307099452477e-06, + "loss": 0.911, + "step": 3140 + }, + { + "epoch": 0.28326644722009287, + "grad_norm": 1.5275966893388224, + "learning_rate": 3.3641035326392907e-06, + "loss": 0.9733, + "step": 3141 + }, + { + "epoch": 0.28335663074356315, + "grad_norm": 1.644690426623281, + "learning_rate": 3.363676238940499e-06, + "loss": 0.9743, + "step": 3142 + }, + { + "epoch": 0.2834468142670334, + "grad_norm": 1.5235349183846203, + "learning_rate": 3.363248828885331e-06, + "loss": 0.9948, + "step": 3143 + }, + { + "epoch": 0.2835369977905037, + "grad_norm": 1.669619455894798, + "learning_rate": 3.3628213025102562e-06, + "loss": 0.9908, + "step": 3144 + }, + { + "epoch": 0.28362718131397396, + "grad_norm": 1.4237996583874155, + "learning_rate": 3.3623936598517536e-06, + "loss": 1.0054, + "step": 3145 + }, + { + "epoch": 0.2837173648374442, + "grad_norm": 1.5721010868388807, + "learning_rate": 3.3619659009463117e-06, + "loss": 0.8153, + "step": 3146 + }, + { + "epoch": 0.2838075483609145, + "grad_norm": 1.3514219442500006, + "learning_rate": 3.3615380258304287e-06, + "loss": 1.0201, + "step": 3147 + }, + { + "epoch": 0.2838977318843847, + "grad_norm": 1.4782359817487924, + "learning_rate": 3.3611100345406146e-06, + "loss": 0.9561, + "step": 3148 + }, + { + "epoch": 0.283987915407855, + "grad_norm": 1.414320611316776, + "learning_rate": 3.3606819271133873e-06, + "loss": 0.9538, + "step": 3149 + }, + { + "epoch": 0.28407809893132524, + "grad_norm": 0.8669954035614031, + "learning_rate": 3.360253703585275e-06, + "loss": 0.8271, + "step": 3150 + }, + { + "epoch": 0.28416828245479553, + "grad_norm": 1.4605434926787861, + "learning_rate": 3.3598253639928164e-06, + "loss": 0.9124, + "step": 3151 + }, + { + "epoch": 0.28425846597826576, + "grad_norm": 1.7514786604077623, + "learning_rate": 3.3593969083725596e-06, + "loss": 0.927, + "step": 3152 + }, + { + "epoch": 0.28434864950173605, + "grad_norm": 1.5296188541708795, + "learning_rate": 3.358968336761063e-06, + "loss": 0.9281, + "step": 3153 + }, + { + "epoch": 0.2844388330252063, + "grad_norm": 1.3738731188181181, + "learning_rate": 3.3585396491948945e-06, + "loss": 0.9317, + "step": 3154 + }, + { + "epoch": 0.28452901654867657, + "grad_norm": 1.2864640836805972, + "learning_rate": 3.358110845710633e-06, + "loss": 1.0195, + "step": 3155 + }, + { + "epoch": 0.2846192000721468, + "grad_norm": 1.6717511425308196, + "learning_rate": 3.357681926344865e-06, + "loss": 0.953, + "step": 3156 + }, + { + "epoch": 0.2847093835956171, + "grad_norm": 1.4144521776989976, + "learning_rate": 3.357252891134189e-06, + "loss": 0.9602, + "step": 3157 + }, + { + "epoch": 0.2847995671190873, + "grad_norm": 1.4994877894388703, + "learning_rate": 3.356823740115212e-06, + "loss": 1.0054, + "step": 3158 + }, + { + "epoch": 0.2848897506425576, + "grad_norm": 1.1850814785690784, + "learning_rate": 3.3563944733245525e-06, + "loss": 0.9115, + "step": 3159 + }, + { + "epoch": 0.28497993416602785, + "grad_norm": 1.6830204742192638, + "learning_rate": 3.3559650907988375e-06, + "loss": 0.9589, + "step": 3160 + }, + { + "epoch": 0.28507011768949814, + "grad_norm": 1.263734559475141, + "learning_rate": 3.3555355925747045e-06, + "loss": 0.8203, + "step": 3161 + }, + { + "epoch": 0.28516030121296837, + "grad_norm": 1.5086289799279242, + "learning_rate": 3.3551059786888e-06, + "loss": 0.9479, + "step": 3162 + }, + { + "epoch": 0.28525048473643866, + "grad_norm": 1.4120966276276727, + "learning_rate": 3.3546762491777807e-06, + "loss": 1.0148, + "step": 3163 + }, + { + "epoch": 0.2853406682599089, + "grad_norm": 1.473191222303447, + "learning_rate": 3.3542464040783156e-06, + "loss": 0.9577, + "step": 3164 + }, + { + "epoch": 0.2854308517833792, + "grad_norm": 1.9840908297620474, + "learning_rate": 3.353816443427079e-06, + "loss": 0.982, + "step": 3165 + }, + { + "epoch": 0.2855210353068494, + "grad_norm": 1.7584598959741766, + "learning_rate": 3.3533863672607597e-06, + "loss": 0.9483, + "step": 3166 + }, + { + "epoch": 0.2856112188303197, + "grad_norm": 1.3893074909511003, + "learning_rate": 3.352956175616052e-06, + "loss": 0.9922, + "step": 3167 + }, + { + "epoch": 0.28570140235378993, + "grad_norm": 1.6467345060178755, + "learning_rate": 3.352525868529664e-06, + "loss": 0.9357, + "step": 3168 + }, + { + "epoch": 0.2857915858772602, + "grad_norm": 1.4305319871576196, + "learning_rate": 3.3520954460383103e-06, + "loss": 0.9012, + "step": 3169 + }, + { + "epoch": 0.2858817694007305, + "grad_norm": 1.6245884741034633, + "learning_rate": 3.3516649081787182e-06, + "loss": 0.9068, + "step": 3170 + }, + { + "epoch": 0.28597195292420075, + "grad_norm": 2.2906959095969373, + "learning_rate": 3.3512342549876236e-06, + "loss": 1.0569, + "step": 3171 + }, + { + "epoch": 0.28606213644767103, + "grad_norm": 1.3355485227161294, + "learning_rate": 3.350803486501771e-06, + "loss": 0.9225, + "step": 3172 + }, + { + "epoch": 0.28615231997114127, + "grad_norm": 1.2888085779887277, + "learning_rate": 3.3503726027579175e-06, + "loss": 0.9868, + "step": 3173 + }, + { + "epoch": 0.28624250349461156, + "grad_norm": 1.3722232611176493, + "learning_rate": 3.349941603792827e-06, + "loss": 1.0236, + "step": 3174 + }, + { + "epoch": 0.2863326870180818, + "grad_norm": 1.4611590212391363, + "learning_rate": 3.3495104896432755e-06, + "loss": 0.9539, + "step": 3175 + }, + { + "epoch": 0.2864228705415521, + "grad_norm": 1.5206997259666932, + "learning_rate": 3.3490792603460477e-06, + "loss": 0.9297, + "step": 3176 + }, + { + "epoch": 0.2865130540650223, + "grad_norm": 1.5850599144041768, + "learning_rate": 3.3486479159379393e-06, + "loss": 0.9008, + "step": 3177 + }, + { + "epoch": 0.2866032375884926, + "grad_norm": 0.8302756399889556, + "learning_rate": 3.3482164564557537e-06, + "loss": 0.8106, + "step": 3178 + }, + { + "epoch": 0.28669342111196283, + "grad_norm": 1.5458410641596017, + "learning_rate": 3.3477848819363065e-06, + "loss": 1.0177, + "step": 3179 + }, + { + "epoch": 0.2867836046354331, + "grad_norm": 0.8262737382719249, + "learning_rate": 3.3473531924164213e-06, + "loss": 0.8416, + "step": 3180 + }, + { + "epoch": 0.28687378815890335, + "grad_norm": 2.0176723048338654, + "learning_rate": 3.3469213879329325e-06, + "loss": 0.9156, + "step": 3181 + }, + { + "epoch": 0.28696397168237364, + "grad_norm": 1.6095509849547933, + "learning_rate": 3.3464894685226837e-06, + "loss": 0.9299, + "step": 3182 + }, + { + "epoch": 0.2870541552058439, + "grad_norm": 1.3423371095063494, + "learning_rate": 3.34605743422253e-06, + "loss": 1.0347, + "step": 3183 + }, + { + "epoch": 0.28714433872931416, + "grad_norm": 1.5470748155795861, + "learning_rate": 3.345625285069333e-06, + "loss": 0.9337, + "step": 3184 + }, + { + "epoch": 0.2872345222527844, + "grad_norm": 1.543054119597388, + "learning_rate": 3.345193021099967e-06, + "loss": 0.9485, + "step": 3185 + }, + { + "epoch": 0.2873247057762547, + "grad_norm": 1.4562617256445596, + "learning_rate": 3.3447606423513157e-06, + "loss": 0.931, + "step": 3186 + }, + { + "epoch": 0.2874148892997249, + "grad_norm": 1.5029437267101349, + "learning_rate": 3.344328148860271e-06, + "loss": 0.9323, + "step": 3187 + }, + { + "epoch": 0.2875050728231952, + "grad_norm": 1.2985334248462033, + "learning_rate": 3.3438955406637365e-06, + "loss": 0.9753, + "step": 3188 + }, + { + "epoch": 0.28759525634666544, + "grad_norm": 1.3651627288486483, + "learning_rate": 3.343462817798624e-06, + "loss": 0.9828, + "step": 3189 + }, + { + "epoch": 0.28768543987013573, + "grad_norm": 1.6911387595438088, + "learning_rate": 3.343029980301856e-06, + "loss": 0.9272, + "step": 3190 + }, + { + "epoch": 0.28777562339360596, + "grad_norm": 2.06115250930043, + "learning_rate": 3.342597028210365e-06, + "loss": 0.9849, + "step": 3191 + }, + { + "epoch": 0.28786580691707625, + "grad_norm": 1.415595977687807, + "learning_rate": 3.342163961561092e-06, + "loss": 0.9038, + "step": 3192 + }, + { + "epoch": 0.28795599044054654, + "grad_norm": 1.316701657800043, + "learning_rate": 3.34173078039099e-06, + "loss": 0.9352, + "step": 3193 + }, + { + "epoch": 0.28804617396401677, + "grad_norm": 1.4824821047804464, + "learning_rate": 3.3412974847370193e-06, + "loss": 1.1333, + "step": 3194 + }, + { + "epoch": 0.28813635748748706, + "grad_norm": 1.322016231846964, + "learning_rate": 3.3408640746361514e-06, + "loss": 0.9852, + "step": 3195 + }, + { + "epoch": 0.2882265410109573, + "grad_norm": 0.7894133363839618, + "learning_rate": 3.3404305501253663e-06, + "loss": 0.8147, + "step": 3196 + }, + { + "epoch": 0.2883167245344276, + "grad_norm": 1.4708818425868493, + "learning_rate": 3.3399969112416565e-06, + "loss": 1.0385, + "step": 3197 + }, + { + "epoch": 0.2884069080578978, + "grad_norm": 1.540903529742541, + "learning_rate": 3.3395631580220213e-06, + "loss": 0.8986, + "step": 3198 + }, + { + "epoch": 0.2884970915813681, + "grad_norm": 1.4337468286427657, + "learning_rate": 3.3391292905034714e-06, + "loss": 0.9424, + "step": 3199 + }, + { + "epoch": 0.28858727510483834, + "grad_norm": 1.961301063179199, + "learning_rate": 3.338695308723027e-06, + "loss": 1.032, + "step": 3200 + }, + { + "epoch": 0.2886774586283086, + "grad_norm": 1.4582123660825084, + "learning_rate": 3.338261212717716e-06, + "loss": 1.0179, + "step": 3201 + }, + { + "epoch": 0.28876764215177886, + "grad_norm": 1.3918939865454891, + "learning_rate": 3.33782700252458e-06, + "loss": 0.993, + "step": 3202 + }, + { + "epoch": 0.28885782567524915, + "grad_norm": 1.9426797723247347, + "learning_rate": 3.337392678180668e-06, + "loss": 1.002, + "step": 3203 + }, + { + "epoch": 0.2889480091987194, + "grad_norm": 1.574459994551967, + "learning_rate": 3.3369582397230377e-06, + "loss": 0.9657, + "step": 3204 + }, + { + "epoch": 0.28903819272218967, + "grad_norm": 1.5838237638690542, + "learning_rate": 3.336523687188759e-06, + "loss": 1.0097, + "step": 3205 + }, + { + "epoch": 0.2891283762456599, + "grad_norm": 0.7321222938746804, + "learning_rate": 3.336089020614909e-06, + "loss": 0.8252, + "step": 3206 + }, + { + "epoch": 0.2892185597691302, + "grad_norm": 1.4729910148766103, + "learning_rate": 3.3356542400385774e-06, + "loss": 0.846, + "step": 3207 + }, + { + "epoch": 0.2893087432926004, + "grad_norm": 1.2600570932956026, + "learning_rate": 3.3352193454968607e-06, + "loss": 0.8987, + "step": 3208 + }, + { + "epoch": 0.2893989268160707, + "grad_norm": 1.3860836240166134, + "learning_rate": 3.3347843370268675e-06, + "loss": 1.027, + "step": 3209 + }, + { + "epoch": 0.28948911033954094, + "grad_norm": 1.3440190422945708, + "learning_rate": 3.334349214665715e-06, + "loss": 0.985, + "step": 3210 + }, + { + "epoch": 0.28957929386301123, + "grad_norm": 1.3938938254829025, + "learning_rate": 3.3339139784505293e-06, + "loss": 1.0503, + "step": 3211 + }, + { + "epoch": 0.28966947738648147, + "grad_norm": 1.2175929617199637, + "learning_rate": 3.333478628418448e-06, + "loss": 1.0363, + "step": 3212 + }, + { + "epoch": 0.28975966090995176, + "grad_norm": 1.2007474816363626, + "learning_rate": 3.333043164606618e-06, + "loss": 0.8499, + "step": 3213 + }, + { + "epoch": 0.289849844433422, + "grad_norm": 1.3995414936746569, + "learning_rate": 3.3326075870521948e-06, + "loss": 1.0468, + "step": 3214 + }, + { + "epoch": 0.2899400279568923, + "grad_norm": 1.778773811915472, + "learning_rate": 3.3321718957923437e-06, + "loss": 0.9731, + "step": 3215 + }, + { + "epoch": 0.29003021148036257, + "grad_norm": 1.3747139980071612, + "learning_rate": 3.3317360908642413e-06, + "loss": 0.963, + "step": 3216 + }, + { + "epoch": 0.2901203950038328, + "grad_norm": 1.6995028554247633, + "learning_rate": 3.331300172305072e-06, + "loss": 0.9148, + "step": 3217 + }, + { + "epoch": 0.2902105785273031, + "grad_norm": 1.494914972445359, + "learning_rate": 3.330864140152032e-06, + "loss": 0.9565, + "step": 3218 + }, + { + "epoch": 0.2903007620507733, + "grad_norm": 1.3603902949551478, + "learning_rate": 3.330427994442325e-06, + "loss": 0.9765, + "step": 3219 + }, + { + "epoch": 0.2903909455742436, + "grad_norm": 1.37109375, + "learning_rate": 3.3299917352131657e-06, + "loss": 1.0027, + "step": 3220 + }, + { + "epoch": 0.29048112909771384, + "grad_norm": 2.242953817489914, + "learning_rate": 3.329555362501778e-06, + "loss": 0.9329, + "step": 3221 + }, + { + "epoch": 0.29057131262118413, + "grad_norm": 1.4175130989505569, + "learning_rate": 3.3291188763453954e-06, + "loss": 0.929, + "step": 3222 + }, + { + "epoch": 0.29066149614465436, + "grad_norm": 1.6056689209534512, + "learning_rate": 3.3286822767812618e-06, + "loss": 0.9519, + "step": 3223 + }, + { + "epoch": 0.29075167966812465, + "grad_norm": 2.3142218494349454, + "learning_rate": 3.32824556384663e-06, + "loss": 0.95, + "step": 3224 + }, + { + "epoch": 0.2908418631915949, + "grad_norm": 1.3610591759580457, + "learning_rate": 3.3278087375787628e-06, + "loss": 0.9719, + "step": 3225 + }, + { + "epoch": 0.2909320467150652, + "grad_norm": 1.6328847768748291, + "learning_rate": 3.327371798014933e-06, + "loss": 0.9774, + "step": 3226 + }, + { + "epoch": 0.2910222302385354, + "grad_norm": 1.6410301071650302, + "learning_rate": 3.3269347451924218e-06, + "loss": 0.9693, + "step": 3227 + }, + { + "epoch": 0.2911124137620057, + "grad_norm": 1.4456262196470955, + "learning_rate": 3.326497579148522e-06, + "loss": 1.0271, + "step": 3228 + }, + { + "epoch": 0.29120259728547593, + "grad_norm": 1.557473910926065, + "learning_rate": 3.3260602999205345e-06, + "loss": 0.97, + "step": 3229 + }, + { + "epoch": 0.2912927808089462, + "grad_norm": 1.4249043315428873, + "learning_rate": 3.32562290754577e-06, + "loss": 0.9797, + "step": 3230 + }, + { + "epoch": 0.29138296433241645, + "grad_norm": 1.3659522443580823, + "learning_rate": 3.3251854020615494e-06, + "loss": 0.9388, + "step": 3231 + }, + { + "epoch": 0.29147314785588674, + "grad_norm": 1.5214959708519926, + "learning_rate": 3.324747783505204e-06, + "loss": 0.9715, + "step": 3232 + }, + { + "epoch": 0.29156333137935697, + "grad_norm": 1.4792439973344162, + "learning_rate": 3.324310051914073e-06, + "loss": 0.8813, + "step": 3233 + }, + { + "epoch": 0.29165351490282726, + "grad_norm": 1.6860883247571719, + "learning_rate": 3.3238722073255056e-06, + "loss": 0.9338, + "step": 3234 + }, + { + "epoch": 0.2917436984262975, + "grad_norm": 1.7480914744223193, + "learning_rate": 3.323434249776863e-06, + "loss": 0.9306, + "step": 3235 + }, + { + "epoch": 0.2918338819497678, + "grad_norm": 0.9409854945849522, + "learning_rate": 3.3229961793055117e-06, + "loss": 0.8253, + "step": 3236 + }, + { + "epoch": 0.291924065473238, + "grad_norm": 1.293153179188009, + "learning_rate": 3.3225579959488314e-06, + "loss": 0.8985, + "step": 3237 + }, + { + "epoch": 0.2920142489967083, + "grad_norm": 1.4206725684952883, + "learning_rate": 3.322119699744211e-06, + "loss": 0.9859, + "step": 3238 + }, + { + "epoch": 0.29210443252017854, + "grad_norm": 1.731325234661665, + "learning_rate": 3.3216812907290476e-06, + "loss": 0.9812, + "step": 3239 + }, + { + "epoch": 0.2921946160436488, + "grad_norm": 1.3059859068170019, + "learning_rate": 3.3212427689407484e-06, + "loss": 0.9855, + "step": 3240 + }, + { + "epoch": 0.2922847995671191, + "grad_norm": 0.7258728610099703, + "learning_rate": 3.3208041344167317e-06, + "loss": 0.778, + "step": 3241 + }, + { + "epoch": 0.29237498309058935, + "grad_norm": 1.8034183944459548, + "learning_rate": 3.3203653871944224e-06, + "loss": 0.8781, + "step": 3242 + }, + { + "epoch": 0.29246516661405964, + "grad_norm": 1.4786254358267603, + "learning_rate": 3.3199265273112587e-06, + "loss": 0.9332, + "step": 3243 + }, + { + "epoch": 0.29255535013752987, + "grad_norm": 1.575475905437237, + "learning_rate": 3.3194875548046852e-06, + "loss": 0.9053, + "step": 3244 + }, + { + "epoch": 0.29264553366100016, + "grad_norm": 1.4605978504639905, + "learning_rate": 3.319048469712158e-06, + "loss": 0.9438, + "step": 3245 + }, + { + "epoch": 0.2927357171844704, + "grad_norm": 1.6207698636347787, + "learning_rate": 3.3186092720711423e-06, + "loss": 1.0627, + "step": 3246 + }, + { + "epoch": 0.2928259007079407, + "grad_norm": 1.5577388703939379, + "learning_rate": 3.3181699619191125e-06, + "loss": 0.8907, + "step": 3247 + }, + { + "epoch": 0.2929160842314109, + "grad_norm": 1.5016371217221098, + "learning_rate": 3.3177305392935536e-06, + "loss": 1.0028, + "step": 3248 + }, + { + "epoch": 0.2930062677548812, + "grad_norm": 0.8989341358333247, + "learning_rate": 3.3172910042319595e-06, + "loss": 0.8635, + "step": 3249 + }, + { + "epoch": 0.29309645127835143, + "grad_norm": 1.3448067878722034, + "learning_rate": 3.316851356771833e-06, + "loss": 1.0046, + "step": 3250 + }, + { + "epoch": 0.2931866348018217, + "grad_norm": 1.285004487976074, + "learning_rate": 3.3164115969506876e-06, + "loss": 0.9933, + "step": 3251 + }, + { + "epoch": 0.29327681832529195, + "grad_norm": 2.872426747316835, + "learning_rate": 3.315971724806046e-06, + "loss": 1.052, + "step": 3252 + }, + { + "epoch": 0.29336700184876224, + "grad_norm": 1.6591201960162807, + "learning_rate": 3.315531740375441e-06, + "loss": 0.9059, + "step": 3253 + }, + { + "epoch": 0.2934571853722325, + "grad_norm": 1.471882260055617, + "learning_rate": 3.315091643696414e-06, + "loss": 0.9271, + "step": 3254 + }, + { + "epoch": 0.29354736889570276, + "grad_norm": 1.6877867137208342, + "learning_rate": 3.3146514348065164e-06, + "loss": 0.8934, + "step": 3255 + }, + { + "epoch": 0.293637552419173, + "grad_norm": 2.0444555762006327, + "learning_rate": 3.31421111374331e-06, + "loss": 0.9729, + "step": 3256 + }, + { + "epoch": 0.2937277359426433, + "grad_norm": 1.3540593275908568, + "learning_rate": 3.3137706805443647e-06, + "loss": 0.9806, + "step": 3257 + }, + { + "epoch": 0.2938179194661135, + "grad_norm": 1.470766104151414, + "learning_rate": 3.313330135247261e-06, + "loss": 1.0154, + "step": 3258 + }, + { + "epoch": 0.2939081029895838, + "grad_norm": 1.254655133087612, + "learning_rate": 3.312889477889588e-06, + "loss": 0.9332, + "step": 3259 + }, + { + "epoch": 0.29399828651305404, + "grad_norm": 1.398270623676854, + "learning_rate": 3.3124487085089464e-06, + "loss": 0.9743, + "step": 3260 + }, + { + "epoch": 0.29408847003652433, + "grad_norm": 1.7148337396490285, + "learning_rate": 3.312007827142943e-06, + "loss": 1.0001, + "step": 3261 + }, + { + "epoch": 0.29417865355999456, + "grad_norm": 1.268857102031218, + "learning_rate": 3.3115668338291983e-06, + "loss": 0.8842, + "step": 3262 + }, + { + "epoch": 0.29426883708346485, + "grad_norm": 1.4194277148786065, + "learning_rate": 3.3111257286053394e-06, + "loss": 1.0403, + "step": 3263 + }, + { + "epoch": 0.29435902060693514, + "grad_norm": 1.4208227602806733, + "learning_rate": 3.3106845115090043e-06, + "loss": 0.9702, + "step": 3264 + }, + { + "epoch": 0.2944492041304054, + "grad_norm": 1.40797306038541, + "learning_rate": 3.310243182577839e-06, + "loss": 0.9167, + "step": 3265 + }, + { + "epoch": 0.29453938765387566, + "grad_norm": 1.3826502473114382, + "learning_rate": 3.3098017418495007e-06, + "loss": 0.8628, + "step": 3266 + }, + { + "epoch": 0.2946295711773459, + "grad_norm": 1.4227438986109011, + "learning_rate": 3.309360189361656e-06, + "loss": 0.9272, + "step": 3267 + }, + { + "epoch": 0.2947197547008162, + "grad_norm": 1.3955392907385067, + "learning_rate": 3.3089185251519797e-06, + "loss": 1.0439, + "step": 3268 + }, + { + "epoch": 0.2948099382242864, + "grad_norm": 1.4522654134175719, + "learning_rate": 3.3084767492581574e-06, + "loss": 0.9463, + "step": 3269 + }, + { + "epoch": 0.2949001217477567, + "grad_norm": 1.6740805792744657, + "learning_rate": 3.3080348617178846e-06, + "loss": 0.98, + "step": 3270 + }, + { + "epoch": 0.29499030527122694, + "grad_norm": 1.3357367559936955, + "learning_rate": 3.307592862568865e-06, + "loss": 0.9324, + "step": 3271 + }, + { + "epoch": 0.2950804887946972, + "grad_norm": 1.2922781860560248, + "learning_rate": 3.307150751848812e-06, + "loss": 1.0246, + "step": 3272 + }, + { + "epoch": 0.29517067231816746, + "grad_norm": 1.3002828382039415, + "learning_rate": 3.3067085295954497e-06, + "loss": 0.8679, + "step": 3273 + }, + { + "epoch": 0.29526085584163775, + "grad_norm": 2.5153235971663594, + "learning_rate": 3.3062661958465098e-06, + "loss": 0.8374, + "step": 3274 + }, + { + "epoch": 0.295351039365108, + "grad_norm": 1.553939486033569, + "learning_rate": 3.305823750639736e-06, + "loss": 0.8594, + "step": 3275 + }, + { + "epoch": 0.29544122288857827, + "grad_norm": 1.3874908876549426, + "learning_rate": 3.3053811940128795e-06, + "loss": 0.9595, + "step": 3276 + }, + { + "epoch": 0.2955314064120485, + "grad_norm": 1.4530223687137593, + "learning_rate": 3.3049385260037016e-06, + "loss": 0.9463, + "step": 3277 + }, + { + "epoch": 0.2956215899355188, + "grad_norm": 1.3558689373166029, + "learning_rate": 3.3044957466499736e-06, + "loss": 0.8571, + "step": 3278 + }, + { + "epoch": 0.295711773458989, + "grad_norm": 1.3888151843329715, + "learning_rate": 3.304052855989475e-06, + "loss": 0.8835, + "step": 3279 + }, + { + "epoch": 0.2958019569824593, + "grad_norm": 1.299786069580458, + "learning_rate": 3.3036098540599966e-06, + "loss": 1.0178, + "step": 3280 + }, + { + "epoch": 0.29589214050592955, + "grad_norm": 3.756381263914798, + "learning_rate": 3.3031667408993373e-06, + "loss": 0.9291, + "step": 3281 + }, + { + "epoch": 0.29598232402939983, + "grad_norm": 1.5666780687823563, + "learning_rate": 3.302723516545306e-06, + "loss": 0.9616, + "step": 3282 + }, + { + "epoch": 0.29607250755287007, + "grad_norm": 1.4429514802850396, + "learning_rate": 3.302280181035722e-06, + "loss": 0.9883, + "step": 3283 + }, + { + "epoch": 0.29616269107634036, + "grad_norm": 0.8228463050281357, + "learning_rate": 3.3018367344084117e-06, + "loss": 0.8367, + "step": 3284 + }, + { + "epoch": 0.2962528745998106, + "grad_norm": 2.091053265097952, + "learning_rate": 3.3013931767012125e-06, + "loss": 0.9515, + "step": 3285 + }, + { + "epoch": 0.2963430581232809, + "grad_norm": 1.4049463375201703, + "learning_rate": 3.300949507951972e-06, + "loss": 0.9591, + "step": 3286 + }, + { + "epoch": 0.2964332416467511, + "grad_norm": 1.996777441659379, + "learning_rate": 3.300505728198546e-06, + "loss": 1.0114, + "step": 3287 + }, + { + "epoch": 0.2965234251702214, + "grad_norm": 1.519561139673294, + "learning_rate": 3.3000618374788e-06, + "loss": 0.9437, + "step": 3288 + }, + { + "epoch": 0.2966136086936917, + "grad_norm": 1.6046167790801915, + "learning_rate": 3.2996178358306104e-06, + "loss": 0.9999, + "step": 3289 + }, + { + "epoch": 0.2967037922171619, + "grad_norm": 1.4035304581802972, + "learning_rate": 3.2991737232918606e-06, + "loss": 0.9307, + "step": 3290 + }, + { + "epoch": 0.2967939757406322, + "grad_norm": 1.5619745515418317, + "learning_rate": 3.298729499900445e-06, + "loss": 0.9085, + "step": 3291 + }, + { + "epoch": 0.29688415926410244, + "grad_norm": 1.6996445396396003, + "learning_rate": 3.2982851656942677e-06, + "loss": 0.9074, + "step": 3292 + }, + { + "epoch": 0.29697434278757273, + "grad_norm": 1.2736617282907137, + "learning_rate": 3.2978407207112416e-06, + "loss": 0.9823, + "step": 3293 + }, + { + "epoch": 0.29706452631104296, + "grad_norm": 1.7092888261180201, + "learning_rate": 3.2973961649892888e-06, + "loss": 0.9665, + "step": 3294 + }, + { + "epoch": 0.29715470983451325, + "grad_norm": 1.4460977431643527, + "learning_rate": 3.296951498566341e-06, + "loss": 1.0216, + "step": 3295 + }, + { + "epoch": 0.2972448933579835, + "grad_norm": 1.5553739939423266, + "learning_rate": 3.2965067214803404e-06, + "loss": 0.9583, + "step": 3296 + }, + { + "epoch": 0.2973350768814538, + "grad_norm": 1.1328233389500313, + "learning_rate": 3.2960618337692372e-06, + "loss": 0.9891, + "step": 3297 + }, + { + "epoch": 0.297425260404924, + "grad_norm": 1.3523313948200772, + "learning_rate": 3.2956168354709927e-06, + "loss": 0.9223, + "step": 3298 + }, + { + "epoch": 0.2975154439283943, + "grad_norm": 1.5450291264822378, + "learning_rate": 3.2951717266235754e-06, + "loss": 1.0072, + "step": 3299 + }, + { + "epoch": 0.29760562745186453, + "grad_norm": 1.2907189101812553, + "learning_rate": 3.294726507264964e-06, + "loss": 0.8383, + "step": 3300 + }, + { + "epoch": 0.2976958109753348, + "grad_norm": 1.5689149017596917, + "learning_rate": 3.2942811774331487e-06, + "loss": 0.9298, + "step": 3301 + }, + { + "epoch": 0.29778599449880505, + "grad_norm": 1.6602475848037483, + "learning_rate": 3.293835737166127e-06, + "loss": 0.9497, + "step": 3302 + }, + { + "epoch": 0.29787617802227534, + "grad_norm": 1.4247229423892178, + "learning_rate": 3.293390186501906e-06, + "loss": 0.8974, + "step": 3303 + }, + { + "epoch": 0.2979663615457456, + "grad_norm": 1.502211133129403, + "learning_rate": 3.2929445254785024e-06, + "loss": 0.9722, + "step": 3304 + }, + { + "epoch": 0.29805654506921586, + "grad_norm": 1.9134056365521723, + "learning_rate": 3.2924987541339423e-06, + "loss": 0.9191, + "step": 3305 + }, + { + "epoch": 0.2981467285926861, + "grad_norm": 1.5691395644435284, + "learning_rate": 3.292052872506262e-06, + "loss": 0.983, + "step": 3306 + }, + { + "epoch": 0.2982369121161564, + "grad_norm": 1.3085221000528036, + "learning_rate": 3.291606880633506e-06, + "loss": 1.0005, + "step": 3307 + }, + { + "epoch": 0.2983270956396266, + "grad_norm": 1.563093759254296, + "learning_rate": 3.2911607785537297e-06, + "loss": 1.0109, + "step": 3308 + }, + { + "epoch": 0.2984172791630969, + "grad_norm": 1.5388131496384225, + "learning_rate": 3.290714566304997e-06, + "loss": 1.0487, + "step": 3309 + }, + { + "epoch": 0.29850746268656714, + "grad_norm": 1.6325338755622802, + "learning_rate": 3.2902682439253794e-06, + "loss": 1.03, + "step": 3310 + }, + { + "epoch": 0.2985976462100374, + "grad_norm": 2.0142752689643433, + "learning_rate": 3.289821811452961e-06, + "loss": 0.9726, + "step": 3311 + }, + { + "epoch": 0.2986878297335077, + "grad_norm": 1.934605374276071, + "learning_rate": 3.289375268925834e-06, + "loss": 1.0171, + "step": 3312 + }, + { + "epoch": 0.29877801325697795, + "grad_norm": 1.8885786473363195, + "learning_rate": 3.288928616382099e-06, + "loss": 0.9449, + "step": 3313 + }, + { + "epoch": 0.29886819678044824, + "grad_norm": 1.2096073844078759, + "learning_rate": 3.288481853859868e-06, + "loss": 0.9958, + "step": 3314 + }, + { + "epoch": 0.29895838030391847, + "grad_norm": 1.2108749373487053, + "learning_rate": 3.2880349813972604e-06, + "loss": 0.9386, + "step": 3315 + }, + { + "epoch": 0.29904856382738876, + "grad_norm": 1.4807162518317682, + "learning_rate": 3.2875879990324052e-06, + "loss": 0.9603, + "step": 3316 + }, + { + "epoch": 0.299138747350859, + "grad_norm": 1.6734543942864024, + "learning_rate": 3.287140906803443e-06, + "loss": 1.0181, + "step": 3317 + }, + { + "epoch": 0.2992289308743293, + "grad_norm": 1.4462909582564805, + "learning_rate": 3.2866937047485216e-06, + "loss": 0.8775, + "step": 3318 + }, + { + "epoch": 0.2993191143977995, + "grad_norm": 1.6236429416758578, + "learning_rate": 3.2862463929057985e-06, + "loss": 1.0259, + "step": 3319 + }, + { + "epoch": 0.2994092979212698, + "grad_norm": 1.4184860252791895, + "learning_rate": 3.285798971313441e-06, + "loss": 0.8077, + "step": 3320 + }, + { + "epoch": 0.29949948144474003, + "grad_norm": 1.2337250748950532, + "learning_rate": 3.2853514400096248e-06, + "loss": 0.9834, + "step": 3321 + }, + { + "epoch": 0.2995896649682103, + "grad_norm": 1.2929086815286985, + "learning_rate": 3.2849037990325367e-06, + "loss": 0.9898, + "step": 3322 + }, + { + "epoch": 0.29967984849168056, + "grad_norm": 1.1897350659893016, + "learning_rate": 3.2844560484203717e-06, + "loss": 0.9132, + "step": 3323 + }, + { + "epoch": 0.29977003201515084, + "grad_norm": 1.2126730510273778, + "learning_rate": 3.2840081882113333e-06, + "loss": 1.0466, + "step": 3324 + }, + { + "epoch": 0.2998602155386211, + "grad_norm": 2.0405323095140284, + "learning_rate": 3.283560218443638e-06, + "loss": 0.9397, + "step": 3325 + }, + { + "epoch": 0.29995039906209137, + "grad_norm": 1.2945768837491352, + "learning_rate": 3.2831121391555064e-06, + "loss": 0.9729, + "step": 3326 + }, + { + "epoch": 0.3000405825855616, + "grad_norm": 1.298383960928366, + "learning_rate": 3.2826639503851724e-06, + "loss": 0.9102, + "step": 3327 + }, + { + "epoch": 0.3001307661090319, + "grad_norm": 1.3029427654814867, + "learning_rate": 3.282215652170877e-06, + "loss": 0.994, + "step": 3328 + }, + { + "epoch": 0.3002209496325021, + "grad_norm": 1.4376775383158271, + "learning_rate": 3.281767244550873e-06, + "loss": 0.9971, + "step": 3329 + }, + { + "epoch": 0.3003111331559724, + "grad_norm": 0.6846575062466144, + "learning_rate": 3.2813187275634193e-06, + "loss": 0.762, + "step": 3330 + }, + { + "epoch": 0.30040131667944264, + "grad_norm": 1.42923858765601, + "learning_rate": 3.280870101246787e-06, + "loss": 0.9883, + "step": 3331 + }, + { + "epoch": 0.30049150020291293, + "grad_norm": 1.5546691740096492, + "learning_rate": 3.280421365639255e-06, + "loss": 0.879, + "step": 3332 + }, + { + "epoch": 0.30058168372638316, + "grad_norm": 1.571777192062745, + "learning_rate": 3.279972520779112e-06, + "loss": 0.9452, + "step": 3333 + }, + { + "epoch": 0.30067186724985345, + "grad_norm": 1.471723590036516, + "learning_rate": 3.279523566704656e-06, + "loss": 1.0289, + "step": 3334 + }, + { + "epoch": 0.30076205077332374, + "grad_norm": 1.5041422869255512, + "learning_rate": 3.2790745034541935e-06, + "loss": 1.021, + "step": 3335 + }, + { + "epoch": 0.300852234296794, + "grad_norm": 1.4683012581193025, + "learning_rate": 3.278625331066042e-06, + "loss": 1.0101, + "step": 3336 + }, + { + "epoch": 0.30094241782026426, + "grad_norm": 4.514275055367439, + "learning_rate": 3.278176049578527e-06, + "loss": 1.005, + "step": 3337 + }, + { + "epoch": 0.3010326013437345, + "grad_norm": 1.5360828350301357, + "learning_rate": 3.2777266590299835e-06, + "loss": 0.9192, + "step": 3338 + }, + { + "epoch": 0.3011227848672048, + "grad_norm": 1.5851950575363343, + "learning_rate": 3.2772771594587562e-06, + "loss": 0.8936, + "step": 3339 + }, + { + "epoch": 0.301212968390675, + "grad_norm": 1.582037202800336, + "learning_rate": 3.2768275509031988e-06, + "loss": 1.0598, + "step": 3340 + }, + { + "epoch": 0.3013031519141453, + "grad_norm": 1.5483072759258139, + "learning_rate": 3.276377833401675e-06, + "loss": 0.9464, + "step": 3341 + }, + { + "epoch": 0.30139333543761554, + "grad_norm": 1.4404205011624445, + "learning_rate": 3.2759280069925557e-06, + "loss": 1.0195, + "step": 3342 + }, + { + "epoch": 0.30148351896108583, + "grad_norm": 1.5679075889652394, + "learning_rate": 3.2754780717142233e-06, + "loss": 1.006, + "step": 3343 + }, + { + "epoch": 0.30157370248455606, + "grad_norm": 1.617667306681507, + "learning_rate": 3.27502802760507e-06, + "loss": 0.941, + "step": 3344 + }, + { + "epoch": 0.30166388600802635, + "grad_norm": 1.450842757146044, + "learning_rate": 3.2745778747034943e-06, + "loss": 0.8757, + "step": 3345 + }, + { + "epoch": 0.3017540695314966, + "grad_norm": 1.6368157624329398, + "learning_rate": 3.274127613047906e-06, + "loss": 0.9517, + "step": 3346 + }, + { + "epoch": 0.30184425305496687, + "grad_norm": 1.492117435747729, + "learning_rate": 3.273677242676725e-06, + "loss": 0.9833, + "step": 3347 + }, + { + "epoch": 0.3019344365784371, + "grad_norm": 1.5828562151491536, + "learning_rate": 3.2732267636283782e-06, + "loss": 0.951, + "step": 3348 + }, + { + "epoch": 0.3020246201019074, + "grad_norm": 1.608975777599595, + "learning_rate": 3.2727761759413034e-06, + "loss": 0.9144, + "step": 3349 + }, + { + "epoch": 0.3021148036253776, + "grad_norm": 1.5472998758062917, + "learning_rate": 3.2723254796539477e-06, + "loss": 0.9763, + "step": 3350 + }, + { + "epoch": 0.3022049871488479, + "grad_norm": 1.4493148237191094, + "learning_rate": 3.271874674804766e-06, + "loss": 0.9439, + "step": 3351 + }, + { + "epoch": 0.30229517067231815, + "grad_norm": 1.4102147901224447, + "learning_rate": 3.2714237614322242e-06, + "loss": 0.9861, + "step": 3352 + }, + { + "epoch": 0.30238535419578844, + "grad_norm": 1.2726153194432865, + "learning_rate": 3.2709727395747974e-06, + "loss": 0.7933, + "step": 3353 + }, + { + "epoch": 0.30247553771925867, + "grad_norm": 0.7324158121493856, + "learning_rate": 3.2705216092709673e-06, + "loss": 0.8032, + "step": 3354 + }, + { + "epoch": 0.30256572124272896, + "grad_norm": 1.640245884280106, + "learning_rate": 3.2700703705592282e-06, + "loss": 1.0482, + "step": 3355 + }, + { + "epoch": 0.3026559047661992, + "grad_norm": 1.3920665941066446, + "learning_rate": 3.269619023478082e-06, + "loss": 0.8641, + "step": 3356 + }, + { + "epoch": 0.3027460882896695, + "grad_norm": 1.4139385274422933, + "learning_rate": 3.26916756806604e-06, + "loss": 1.0568, + "step": 3357 + }, + { + "epoch": 0.3028362718131397, + "grad_norm": 1.4932481281773964, + "learning_rate": 3.268716004361623e-06, + "loss": 0.9714, + "step": 3358 + }, + { + "epoch": 0.30292645533661, + "grad_norm": 0.7392000405075219, + "learning_rate": 3.268264332403361e-06, + "loss": 0.8269, + "step": 3359 + }, + { + "epoch": 0.3030166388600803, + "grad_norm": 1.240802880898334, + "learning_rate": 3.2678125522297933e-06, + "loss": 0.9966, + "step": 3360 + }, + { + "epoch": 0.3031068223835505, + "grad_norm": 1.9036254951305573, + "learning_rate": 3.267360663879468e-06, + "loss": 0.909, + "step": 3361 + }, + { + "epoch": 0.3031970059070208, + "grad_norm": 1.7040441989807888, + "learning_rate": 3.266908667390942e-06, + "loss": 0.9488, + "step": 3362 + }, + { + "epoch": 0.30328718943049104, + "grad_norm": 1.6774435268623435, + "learning_rate": 3.2664565628027833e-06, + "loss": 1.0125, + "step": 3363 + }, + { + "epoch": 0.30337737295396133, + "grad_norm": 1.7567431778987708, + "learning_rate": 3.2660043501535675e-06, + "loss": 0.9631, + "step": 3364 + }, + { + "epoch": 0.30346755647743157, + "grad_norm": 1.5817527996898828, + "learning_rate": 3.2655520294818797e-06, + "loss": 1.028, + "step": 3365 + }, + { + "epoch": 0.30355774000090185, + "grad_norm": 1.2784057775126973, + "learning_rate": 3.2650996008263146e-06, + "loss": 1.0326, + "step": 3366 + }, + { + "epoch": 0.3036479235243721, + "grad_norm": 1.6328189247406495, + "learning_rate": 3.2646470642254756e-06, + "loss": 0.9182, + "step": 3367 + }, + { + "epoch": 0.3037381070478424, + "grad_norm": 1.5954350557991852, + "learning_rate": 3.2641944197179767e-06, + "loss": 0.9831, + "step": 3368 + }, + { + "epoch": 0.3038282905713126, + "grad_norm": 0.8359437746186824, + "learning_rate": 3.2637416673424383e-06, + "loss": 0.7852, + "step": 3369 + }, + { + "epoch": 0.3039184740947829, + "grad_norm": 1.3860074220241192, + "learning_rate": 3.2632888071374937e-06, + "loss": 0.9813, + "step": 3370 + }, + { + "epoch": 0.30400865761825313, + "grad_norm": 1.495255357373948, + "learning_rate": 3.2628358391417815e-06, + "loss": 0.9967, + "step": 3371 + }, + { + "epoch": 0.3040988411417234, + "grad_norm": 1.5392046247977886, + "learning_rate": 3.2623827633939526e-06, + "loss": 0.9483, + "step": 3372 + }, + { + "epoch": 0.30418902466519365, + "grad_norm": 1.496088491086955, + "learning_rate": 3.2619295799326657e-06, + "loss": 0.9655, + "step": 3373 + }, + { + "epoch": 0.30427920818866394, + "grad_norm": 1.6258999459942653, + "learning_rate": 3.2614762887965883e-06, + "loss": 0.8532, + "step": 3374 + }, + { + "epoch": 0.3043693917121342, + "grad_norm": 1.4559824751019343, + "learning_rate": 3.2610228900243984e-06, + "loss": 0.9579, + "step": 3375 + }, + { + "epoch": 0.30445957523560446, + "grad_norm": 2.0392901414103073, + "learning_rate": 3.260569383654783e-06, + "loss": 0.8414, + "step": 3376 + }, + { + "epoch": 0.3045497587590747, + "grad_norm": 0.7388658580521016, + "learning_rate": 3.2601157697264365e-06, + "loss": 0.8462, + "step": 3377 + }, + { + "epoch": 0.304639942282545, + "grad_norm": 1.5819834760822922, + "learning_rate": 3.2596620482780647e-06, + "loss": 0.9406, + "step": 3378 + }, + { + "epoch": 0.3047301258060152, + "grad_norm": 0.7078278832521824, + "learning_rate": 3.2592082193483803e-06, + "loss": 0.8056, + "step": 3379 + }, + { + "epoch": 0.3048203093294855, + "grad_norm": 1.4528291462621334, + "learning_rate": 3.258754282976109e-06, + "loss": 0.9448, + "step": 3380 + }, + { + "epoch": 0.30491049285295574, + "grad_norm": 1.5238027281552873, + "learning_rate": 3.25830023919998e-06, + "loss": 1.0309, + "step": 3381 + }, + { + "epoch": 0.305000676376426, + "grad_norm": 1.408008323877829, + "learning_rate": 3.2578460880587374e-06, + "loss": 0.9247, + "step": 3382 + }, + { + "epoch": 0.3050908598998963, + "grad_norm": 1.6708569858744593, + "learning_rate": 3.2573918295911306e-06, + "loss": 1.0214, + "step": 3383 + }, + { + "epoch": 0.30518104342336655, + "grad_norm": 1.5000476034875967, + "learning_rate": 3.2569374638359196e-06, + "loss": 0.9065, + "step": 3384 + }, + { + "epoch": 0.30527122694683684, + "grad_norm": 1.2534384642211889, + "learning_rate": 3.2564829908318736e-06, + "loss": 0.9819, + "step": 3385 + }, + { + "epoch": 0.30536141047030707, + "grad_norm": 1.2191404059696214, + "learning_rate": 3.2560284106177705e-06, + "loss": 1.0211, + "step": 3386 + }, + { + "epoch": 0.30545159399377736, + "grad_norm": 1.4520526333614356, + "learning_rate": 3.2555737232323978e-06, + "loss": 0.9401, + "step": 3387 + }, + { + "epoch": 0.3055417775172476, + "grad_norm": 1.272511713240099, + "learning_rate": 3.255118928714552e-06, + "loss": 0.9948, + "step": 3388 + }, + { + "epoch": 0.3056319610407179, + "grad_norm": 1.725895057887134, + "learning_rate": 3.2546640271030386e-06, + "loss": 0.9058, + "step": 3389 + }, + { + "epoch": 0.3057221445641881, + "grad_norm": 0.712288631073102, + "learning_rate": 3.2542090184366717e-06, + "loss": 0.8446, + "step": 3390 + }, + { + "epoch": 0.3058123280876584, + "grad_norm": 1.500336768175143, + "learning_rate": 3.253753902754276e-06, + "loss": 1.0396, + "step": 3391 + }, + { + "epoch": 0.30590251161112864, + "grad_norm": 1.2305055643054286, + "learning_rate": 3.253298680094685e-06, + "loss": 0.9049, + "step": 3392 + }, + { + "epoch": 0.3059926951345989, + "grad_norm": 1.6597193243975947, + "learning_rate": 3.2528433504967394e-06, + "loss": 0.9055, + "step": 3393 + }, + { + "epoch": 0.30608287865806916, + "grad_norm": 1.4596479303627237, + "learning_rate": 3.252387913999291e-06, + "loss": 0.9694, + "step": 3394 + }, + { + "epoch": 0.30617306218153945, + "grad_norm": 1.4929167877688383, + "learning_rate": 3.2519323706411998e-06, + "loss": 0.9652, + "step": 3395 + }, + { + "epoch": 0.3062632457050097, + "grad_norm": 1.3283592466334004, + "learning_rate": 3.251476720461336e-06, + "loss": 0.854, + "step": 3396 + }, + { + "epoch": 0.30635342922847997, + "grad_norm": 1.749315604850344, + "learning_rate": 3.251020963498578e-06, + "loss": 0.8897, + "step": 3397 + }, + { + "epoch": 0.3064436127519502, + "grad_norm": 1.3311048849468283, + "learning_rate": 3.250565099791813e-06, + "loss": 1.01, + "step": 3398 + }, + { + "epoch": 0.3065337962754205, + "grad_norm": 1.1735643036416024, + "learning_rate": 3.2501091293799387e-06, + "loss": 1.0112, + "step": 3399 + }, + { + "epoch": 0.3066239797988907, + "grad_norm": 1.3323937870801532, + "learning_rate": 3.24965305230186e-06, + "loss": 0.9766, + "step": 3400 + }, + { + "epoch": 0.306714163322361, + "grad_norm": 1.8794984895294689, + "learning_rate": 3.249196868596492e-06, + "loss": 1.0367, + "step": 3401 + }, + { + "epoch": 0.30680434684583124, + "grad_norm": 1.55563019202574, + "learning_rate": 3.24874057830276e-06, + "loss": 0.8961, + "step": 3402 + }, + { + "epoch": 0.30689453036930153, + "grad_norm": 1.4526244090864993, + "learning_rate": 3.2482841814595954e-06, + "loss": 0.892, + "step": 3403 + }, + { + "epoch": 0.30698471389277177, + "grad_norm": 1.5401981249845529, + "learning_rate": 3.247827678105943e-06, + "loss": 0.8988, + "step": 3404 + }, + { + "epoch": 0.30707489741624205, + "grad_norm": 1.4232889208836625, + "learning_rate": 3.247371068280751e-06, + "loss": 0.9556, + "step": 3405 + }, + { + "epoch": 0.3071650809397123, + "grad_norm": 1.21032034621489, + "learning_rate": 3.2469143520229823e-06, + "loss": 0.826, + "step": 3406 + }, + { + "epoch": 0.3072552644631826, + "grad_norm": 1.2370606673311935, + "learning_rate": 3.2464575293716054e-06, + "loss": 0.9194, + "step": 3407 + }, + { + "epoch": 0.30734544798665286, + "grad_norm": 1.471022937142008, + "learning_rate": 3.2460006003655997e-06, + "loss": 0.9035, + "step": 3408 + }, + { + "epoch": 0.3074356315101231, + "grad_norm": 1.6102149771281316, + "learning_rate": 3.245543565043952e-06, + "loss": 1.0238, + "step": 3409 + }, + { + "epoch": 0.3075258150335934, + "grad_norm": 1.7079157512755716, + "learning_rate": 3.2450864234456592e-06, + "loss": 0.9541, + "step": 3410 + }, + { + "epoch": 0.3076159985570636, + "grad_norm": 1.2831681942068325, + "learning_rate": 3.244629175609728e-06, + "loss": 0.9567, + "step": 3411 + }, + { + "epoch": 0.3077061820805339, + "grad_norm": 1.476308245692836, + "learning_rate": 3.2441718215751726e-06, + "loss": 0.9476, + "step": 3412 + }, + { + "epoch": 0.30779636560400414, + "grad_norm": 1.5814304294100214, + "learning_rate": 3.2437143613810173e-06, + "loss": 0.9249, + "step": 3413 + }, + { + "epoch": 0.30788654912747443, + "grad_norm": 1.4501722134892274, + "learning_rate": 3.2432567950662947e-06, + "loss": 1.0025, + "step": 3414 + }, + { + "epoch": 0.30797673265094466, + "grad_norm": 1.3055961323016978, + "learning_rate": 3.2427991226700468e-06, + "loss": 0.9526, + "step": 3415 + }, + { + "epoch": 0.30806691617441495, + "grad_norm": 1.4934827365956653, + "learning_rate": 3.2423413442313246e-06, + "loss": 0.8941, + "step": 3416 + }, + { + "epoch": 0.3081570996978852, + "grad_norm": 1.5506030540035722, + "learning_rate": 3.2418834597891904e-06, + "loss": 0.8747, + "step": 3417 + }, + { + "epoch": 0.3082472832213555, + "grad_norm": 1.8718365367574077, + "learning_rate": 3.2414254693827098e-06, + "loss": 1.048, + "step": 3418 + }, + { + "epoch": 0.3083374667448257, + "grad_norm": 1.2720613591461696, + "learning_rate": 3.2409673730509644e-06, + "loss": 0.9431, + "step": 3419 + }, + { + "epoch": 0.308427650268296, + "grad_norm": 1.5181046484458218, + "learning_rate": 3.2405091708330393e-06, + "loss": 0.9374, + "step": 3420 + }, + { + "epoch": 0.3085178337917662, + "grad_norm": 1.6282825327861679, + "learning_rate": 3.2400508627680323e-06, + "loss": 0.9564, + "step": 3421 + }, + { + "epoch": 0.3086080173152365, + "grad_norm": 1.678288573634526, + "learning_rate": 3.2395924488950474e-06, + "loss": 1.0501, + "step": 3422 + }, + { + "epoch": 0.30869820083870675, + "grad_norm": 2.2459228450604343, + "learning_rate": 3.2391339292532004e-06, + "loss": 1.0841, + "step": 3423 + }, + { + "epoch": 0.30878838436217704, + "grad_norm": 1.660545967584585, + "learning_rate": 3.238675303881614e-06, + "loss": 0.9084, + "step": 3424 + }, + { + "epoch": 0.30887856788564727, + "grad_norm": 1.6404354894267263, + "learning_rate": 3.2382165728194203e-06, + "loss": 0.9285, + "step": 3425 + }, + { + "epoch": 0.30896875140911756, + "grad_norm": 1.3723425761342363, + "learning_rate": 3.237757736105761e-06, + "loss": 0.9437, + "step": 3426 + }, + { + "epoch": 0.3090589349325878, + "grad_norm": 1.2390806584031846, + "learning_rate": 3.2372987937797867e-06, + "loss": 1.034, + "step": 3427 + }, + { + "epoch": 0.3091491184560581, + "grad_norm": 1.2110914071013283, + "learning_rate": 3.2368397458806573e-06, + "loss": 0.8656, + "step": 3428 + }, + { + "epoch": 0.3092393019795283, + "grad_norm": 1.5687023657143748, + "learning_rate": 3.2363805924475412e-06, + "loss": 0.9937, + "step": 3429 + }, + { + "epoch": 0.3093294855029986, + "grad_norm": 1.824520372046499, + "learning_rate": 3.2359213335196153e-06, + "loss": 0.9075, + "step": 3430 + }, + { + "epoch": 0.3094196690264689, + "grad_norm": 1.4861667460230912, + "learning_rate": 3.2354619691360663e-06, + "loss": 0.9132, + "step": 3431 + }, + { + "epoch": 0.3095098525499391, + "grad_norm": 1.2167138917893832, + "learning_rate": 3.2350024993360898e-06, + "loss": 0.9984, + "step": 3432 + }, + { + "epoch": 0.3096000360734094, + "grad_norm": 1.7600636626780541, + "learning_rate": 3.2345429241588902e-06, + "loss": 0.9622, + "step": 3433 + }, + { + "epoch": 0.30969021959687965, + "grad_norm": 1.5802330818157777, + "learning_rate": 3.234083243643681e-06, + "loss": 0.9831, + "step": 3434 + }, + { + "epoch": 0.30978040312034993, + "grad_norm": 1.4703238962118954, + "learning_rate": 3.233623457829686e-06, + "loss": 1.0297, + "step": 3435 + }, + { + "epoch": 0.30987058664382017, + "grad_norm": 1.7836140120596449, + "learning_rate": 3.2331635667561344e-06, + "loss": 0.9337, + "step": 3436 + }, + { + "epoch": 0.30996077016729046, + "grad_norm": 1.4101576871151351, + "learning_rate": 3.2327035704622674e-06, + "loss": 0.9813, + "step": 3437 + }, + { + "epoch": 0.3100509536907607, + "grad_norm": 1.3795165961616367, + "learning_rate": 3.2322434689873353e-06, + "loss": 0.8595, + "step": 3438 + }, + { + "epoch": 0.310141137214231, + "grad_norm": 1.3825136281974926, + "learning_rate": 3.2317832623705957e-06, + "loss": 1.0398, + "step": 3439 + }, + { + "epoch": 0.3102313207377012, + "grad_norm": 1.9202172798157193, + "learning_rate": 3.231322950651316e-06, + "loss": 0.9055, + "step": 3440 + }, + { + "epoch": 0.3103215042611715, + "grad_norm": 1.1329847533706, + "learning_rate": 3.2308625338687735e-06, + "loss": 0.9782, + "step": 3441 + }, + { + "epoch": 0.31041168778464173, + "grad_norm": 2.130701270737137, + "learning_rate": 3.230402012062252e-06, + "loss": 1.0763, + "step": 3442 + }, + { + "epoch": 0.310501871308112, + "grad_norm": 0.7912878251036646, + "learning_rate": 3.2299413852710466e-06, + "loss": 0.8403, + "step": 3443 + }, + { + "epoch": 0.31059205483158225, + "grad_norm": 1.4775569735674965, + "learning_rate": 3.2294806535344606e-06, + "loss": 0.9228, + "step": 3444 + }, + { + "epoch": 0.31068223835505254, + "grad_norm": 1.5765201226428347, + "learning_rate": 3.2290198168918056e-06, + "loss": 0.9986, + "step": 3445 + }, + { + "epoch": 0.3107724218785228, + "grad_norm": 1.409863216989115, + "learning_rate": 3.2285588753824035e-06, + "loss": 0.9999, + "step": 3446 + }, + { + "epoch": 0.31086260540199306, + "grad_norm": 1.551301345282838, + "learning_rate": 3.228097829045584e-06, + "loss": 0.9712, + "step": 3447 + }, + { + "epoch": 0.3109527889254633, + "grad_norm": 1.17133664798937, + "learning_rate": 3.227636677920685e-06, + "loss": 0.9922, + "step": 3448 + }, + { + "epoch": 0.3110429724489336, + "grad_norm": 1.4751480028285378, + "learning_rate": 3.2271754220470567e-06, + "loss": 1.0088, + "step": 3449 + }, + { + "epoch": 0.3111331559724038, + "grad_norm": 1.584193957817962, + "learning_rate": 3.2267140614640547e-06, + "loss": 1.0445, + "step": 3450 + }, + { + "epoch": 0.3112233394958741, + "grad_norm": 1.4678281772581152, + "learning_rate": 3.2262525962110445e-06, + "loss": 1.002, + "step": 3451 + }, + { + "epoch": 0.31131352301934434, + "grad_norm": 1.4315029770276888, + "learning_rate": 3.2257910263274015e-06, + "loss": 0.8875, + "step": 3452 + }, + { + "epoch": 0.31140370654281463, + "grad_norm": 1.6681259760133782, + "learning_rate": 3.225329351852509e-06, + "loss": 0.9062, + "step": 3453 + }, + { + "epoch": 0.3114938900662849, + "grad_norm": 1.7508145888770272, + "learning_rate": 3.2248675728257596e-06, + "loss": 0.9665, + "step": 3454 + }, + { + "epoch": 0.31158407358975515, + "grad_norm": 1.4888748226798192, + "learning_rate": 3.2244056892865557e-06, + "loss": 1.0295, + "step": 3455 + }, + { + "epoch": 0.31167425711322544, + "grad_norm": 1.7077978582007585, + "learning_rate": 3.2239437012743063e-06, + "loss": 0.9505, + "step": 3456 + }, + { + "epoch": 0.31176444063669567, + "grad_norm": 1.2410414105006788, + "learning_rate": 3.223481608828432e-06, + "loss": 0.9834, + "step": 3457 + }, + { + "epoch": 0.31185462416016596, + "grad_norm": 1.6838116781355144, + "learning_rate": 3.223019411988361e-06, + "loss": 0.98, + "step": 3458 + }, + { + "epoch": 0.3119448076836362, + "grad_norm": 1.445930390118922, + "learning_rate": 3.22255711079353e-06, + "loss": 0.9631, + "step": 3459 + }, + { + "epoch": 0.3120349912071065, + "grad_norm": 1.3631945913147077, + "learning_rate": 3.222094705283385e-06, + "loss": 0.9267, + "step": 3460 + }, + { + "epoch": 0.3121251747305767, + "grad_norm": 1.3999639949255884, + "learning_rate": 3.2216321954973805e-06, + "loss": 0.957, + "step": 3461 + }, + { + "epoch": 0.312215358254047, + "grad_norm": 1.364800205420426, + "learning_rate": 3.2211695814749816e-06, + "loss": 0.9523, + "step": 3462 + }, + { + "epoch": 0.31230554177751724, + "grad_norm": 1.9685997678566483, + "learning_rate": 3.220706863255661e-06, + "loss": 1.0584, + "step": 3463 + }, + { + "epoch": 0.3123957253009875, + "grad_norm": 1.3902931245982977, + "learning_rate": 3.2202440408788994e-06, + "loss": 0.9553, + "step": 3464 + }, + { + "epoch": 0.31248590882445776, + "grad_norm": 1.4956585522533508, + "learning_rate": 3.2197811143841883e-06, + "loss": 1.0306, + "step": 3465 + }, + { + "epoch": 0.31257609234792805, + "grad_norm": 1.1968124054559048, + "learning_rate": 3.2193180838110267e-06, + "loss": 0.9534, + "step": 3466 + }, + { + "epoch": 0.3126662758713983, + "grad_norm": 1.637038607058788, + "learning_rate": 3.2188549491989225e-06, + "loss": 0.8539, + "step": 3467 + }, + { + "epoch": 0.31275645939486857, + "grad_norm": 1.5600028874297196, + "learning_rate": 3.2183917105873934e-06, + "loss": 0.9851, + "step": 3468 + }, + { + "epoch": 0.3128466429183388, + "grad_norm": 1.163642756420153, + "learning_rate": 3.217928368015966e-06, + "loss": 1.0175, + "step": 3469 + }, + { + "epoch": 0.3129368264418091, + "grad_norm": 1.3093066195383647, + "learning_rate": 3.217464921524174e-06, + "loss": 0.9904, + "step": 3470 + }, + { + "epoch": 0.3130270099652793, + "grad_norm": 1.4606420860874516, + "learning_rate": 3.2170013711515616e-06, + "loss": 0.9766, + "step": 3471 + }, + { + "epoch": 0.3131171934887496, + "grad_norm": 1.9224717369035924, + "learning_rate": 3.216537716937682e-06, + "loss": 0.8853, + "step": 3472 + }, + { + "epoch": 0.31320737701221985, + "grad_norm": 1.4665406024825425, + "learning_rate": 3.2160739589220968e-06, + "loss": 0.9325, + "step": 3473 + }, + { + "epoch": 0.31329756053569013, + "grad_norm": 1.3469270364219426, + "learning_rate": 3.215610097144376e-06, + "loss": 1.0159, + "step": 3474 + }, + { + "epoch": 0.31338774405916037, + "grad_norm": 1.578298578064295, + "learning_rate": 3.215146131644099e-06, + "loss": 0.9188, + "step": 3475 + }, + { + "epoch": 0.31347792758263066, + "grad_norm": 1.4680024537525926, + "learning_rate": 3.214682062460854e-06, + "loss": 1.0339, + "step": 3476 + }, + { + "epoch": 0.3135681111061009, + "grad_norm": 1.5063532750351043, + "learning_rate": 3.2142178896342367e-06, + "loss": 0.878, + "step": 3477 + }, + { + "epoch": 0.3136582946295712, + "grad_norm": 1.294954555553629, + "learning_rate": 3.2137536132038552e-06, + "loss": 0.9451, + "step": 3478 + }, + { + "epoch": 0.31374847815304147, + "grad_norm": 1.4647914215913815, + "learning_rate": 3.2132892332093226e-06, + "loss": 0.9606, + "step": 3479 + }, + { + "epoch": 0.3138386616765117, + "grad_norm": 1.3873516518958222, + "learning_rate": 3.2128247496902623e-06, + "loss": 1.0652, + "step": 3480 + }, + { + "epoch": 0.313928845199982, + "grad_norm": 1.3551340267013412, + "learning_rate": 3.2123601626863064e-06, + "loss": 0.9808, + "step": 3481 + }, + { + "epoch": 0.3140190287234522, + "grad_norm": 0.8910987330255072, + "learning_rate": 3.2118954722370974e-06, + "loss": 0.7722, + "step": 3482 + }, + { + "epoch": 0.3141092122469225, + "grad_norm": 1.4020477662622342, + "learning_rate": 3.2114306783822837e-06, + "loss": 0.911, + "step": 3483 + }, + { + "epoch": 0.31419939577039274, + "grad_norm": 1.3095842399112592, + "learning_rate": 3.210965781161525e-06, + "loss": 0.9345, + "step": 3484 + }, + { + "epoch": 0.31428957929386303, + "grad_norm": 1.395073195010443, + "learning_rate": 3.2105007806144892e-06, + "loss": 1.085, + "step": 3485 + }, + { + "epoch": 0.31437976281733326, + "grad_norm": 1.6138876451905901, + "learning_rate": 3.2100356767808513e-06, + "loss": 0.9616, + "step": 3486 + }, + { + "epoch": 0.31446994634080355, + "grad_norm": 1.5850193767386065, + "learning_rate": 3.2095704697002977e-06, + "loss": 1.0216, + "step": 3487 + }, + { + "epoch": 0.3145601298642738, + "grad_norm": 1.3889806219112792, + "learning_rate": 3.209105159412522e-06, + "loss": 0.9655, + "step": 3488 + }, + { + "epoch": 0.3146503133877441, + "grad_norm": 1.6741167529602332, + "learning_rate": 3.208639745957228e-06, + "loss": 0.9228, + "step": 3489 + }, + { + "epoch": 0.3147404969112143, + "grad_norm": 1.2328126411776348, + "learning_rate": 3.2081742293741256e-06, + "loss": 0.9826, + "step": 3490 + }, + { + "epoch": 0.3148306804346846, + "grad_norm": 1.2507030893907571, + "learning_rate": 3.2077086097029366e-06, + "loss": 1.0176, + "step": 3491 + }, + { + "epoch": 0.31492086395815483, + "grad_norm": 1.3308026865179603, + "learning_rate": 3.2072428869833895e-06, + "loss": 0.9641, + "step": 3492 + }, + { + "epoch": 0.3150110474816251, + "grad_norm": 1.2904465152456792, + "learning_rate": 3.206777061255223e-06, + "loss": 0.9725, + "step": 3493 + }, + { + "epoch": 0.31510123100509535, + "grad_norm": 1.8766403016948108, + "learning_rate": 3.206311132558183e-06, + "loss": 1.0009, + "step": 3494 + }, + { + "epoch": 0.31519141452856564, + "grad_norm": 1.5520938727855202, + "learning_rate": 3.205845100932026e-06, + "loss": 0.9792, + "step": 3495 + }, + { + "epoch": 0.31528159805203587, + "grad_norm": 1.431970576506155, + "learning_rate": 3.205378966416516e-06, + "loss": 0.9908, + "step": 3496 + }, + { + "epoch": 0.31537178157550616, + "grad_norm": 1.537635645271419, + "learning_rate": 3.204912729051426e-06, + "loss": 0.9617, + "step": 3497 + }, + { + "epoch": 0.3154619650989764, + "grad_norm": 1.7086800091689258, + "learning_rate": 3.2044463888765384e-06, + "loss": 0.9398, + "step": 3498 + }, + { + "epoch": 0.3155521486224467, + "grad_norm": 1.7018538464686062, + "learning_rate": 3.2039799459316436e-06, + "loss": 0.8652, + "step": 3499 + }, + { + "epoch": 0.3156423321459169, + "grad_norm": 1.6229161325763155, + "learning_rate": 3.2035134002565407e-06, + "loss": 0.9976, + "step": 3500 + }, + { + "epoch": 0.3157325156693872, + "grad_norm": 1.4610265204903194, + "learning_rate": 3.203046751891039e-06, + "loss": 0.9724, + "step": 3501 + }, + { + "epoch": 0.3158226991928575, + "grad_norm": 1.4676848262351037, + "learning_rate": 3.2025800008749545e-06, + "loss": 0.9816, + "step": 3502 + }, + { + "epoch": 0.3159128827163277, + "grad_norm": 1.7314681015623434, + "learning_rate": 3.202113147248114e-06, + "loss": 0.9455, + "step": 3503 + }, + { + "epoch": 0.316003066239798, + "grad_norm": 1.4074585278026617, + "learning_rate": 3.20164619105035e-06, + "loss": 0.9378, + "step": 3504 + }, + { + "epoch": 0.31609324976326825, + "grad_norm": 1.2169344657392664, + "learning_rate": 3.201179132321508e-06, + "loss": 0.8128, + "step": 3505 + }, + { + "epoch": 0.31618343328673854, + "grad_norm": 2.2192755063400242, + "learning_rate": 3.200711971101439e-06, + "loss": 0.9082, + "step": 3506 + }, + { + "epoch": 0.31627361681020877, + "grad_norm": 1.653984751975869, + "learning_rate": 3.2002447074300047e-06, + "loss": 0.9673, + "step": 3507 + }, + { + "epoch": 0.31636380033367906, + "grad_norm": 1.176997775228856, + "learning_rate": 3.1997773413470736e-06, + "loss": 0.9636, + "step": 3508 + }, + { + "epoch": 0.3164539838571493, + "grad_norm": 1.8280802012114121, + "learning_rate": 3.199309872892524e-06, + "loss": 0.9564, + "step": 3509 + }, + { + "epoch": 0.3165441673806196, + "grad_norm": 1.4372582439638044, + "learning_rate": 3.198842302106243e-06, + "loss": 1.009, + "step": 3510 + }, + { + "epoch": 0.3166343509040898, + "grad_norm": 1.502671326756691, + "learning_rate": 3.1983746290281265e-06, + "loss": 0.9523, + "step": 3511 + }, + { + "epoch": 0.3167245344275601, + "grad_norm": 1.2325725674883228, + "learning_rate": 3.197906853698079e-06, + "loss": 0.9678, + "step": 3512 + }, + { + "epoch": 0.31681471795103033, + "grad_norm": 1.3428050422416973, + "learning_rate": 3.1974389761560137e-06, + "loss": 0.9095, + "step": 3513 + }, + { + "epoch": 0.3169049014745006, + "grad_norm": 1.329734679482222, + "learning_rate": 3.1969709964418525e-06, + "loss": 0.916, + "step": 3514 + }, + { + "epoch": 0.31699508499797086, + "grad_norm": 1.620974763846765, + "learning_rate": 3.196502914595525e-06, + "loss": 0.9775, + "step": 3515 + }, + { + "epoch": 0.31708526852144114, + "grad_norm": 1.6192128495511298, + "learning_rate": 3.1960347306569723e-06, + "loss": 0.9928, + "step": 3516 + }, + { + "epoch": 0.3171754520449114, + "grad_norm": 1.4421205482357988, + "learning_rate": 3.195566444666141e-06, + "loss": 0.9091, + "step": 3517 + }, + { + "epoch": 0.31726563556838167, + "grad_norm": 1.359169604689843, + "learning_rate": 3.1950980566629886e-06, + "loss": 1.0702, + "step": 3518 + }, + { + "epoch": 0.3173558190918519, + "grad_norm": 1.317561290915033, + "learning_rate": 3.1946295666874797e-06, + "loss": 0.9783, + "step": 3519 + }, + { + "epoch": 0.3174460026153222, + "grad_norm": 1.2612525385669457, + "learning_rate": 3.19416097477959e-06, + "loss": 0.9771, + "step": 3520 + }, + { + "epoch": 0.3175361861387924, + "grad_norm": 1.5904584816217917, + "learning_rate": 3.1936922809793005e-06, + "loss": 1.0238, + "step": 3521 + }, + { + "epoch": 0.3176263696622627, + "grad_norm": 1.2145445231305363, + "learning_rate": 3.193223485326604e-06, + "loss": 1.0832, + "step": 3522 + }, + { + "epoch": 0.31771655318573294, + "grad_norm": 1.6340553627580523, + "learning_rate": 3.1927545878615005e-06, + "loss": 1.0437, + "step": 3523 + }, + { + "epoch": 0.31780673670920323, + "grad_norm": 1.500272726061324, + "learning_rate": 3.192285588623999e-06, + "loss": 0.9163, + "step": 3524 + }, + { + "epoch": 0.31789692023267346, + "grad_norm": 1.1284703135678054, + "learning_rate": 3.191816487654117e-06, + "loss": 0.9002, + "step": 3525 + }, + { + "epoch": 0.31798710375614375, + "grad_norm": 1.7579852210064855, + "learning_rate": 3.19134728499188e-06, + "loss": 0.9355, + "step": 3526 + }, + { + "epoch": 0.31807728727961404, + "grad_norm": 2.2104394574178383, + "learning_rate": 3.1908779806773235e-06, + "loss": 0.8481, + "step": 3527 + }, + { + "epoch": 0.3181674708030843, + "grad_norm": 1.3154155047925802, + "learning_rate": 3.190408574750492e-06, + "loss": 1.0207, + "step": 3528 + }, + { + "epoch": 0.31825765432655456, + "grad_norm": 1.301859365910546, + "learning_rate": 3.1899390672514367e-06, + "loss": 1.0349, + "step": 3529 + }, + { + "epoch": 0.3183478378500248, + "grad_norm": 1.5994508545967085, + "learning_rate": 3.189469458220219e-06, + "loss": 0.8774, + "step": 3530 + }, + { + "epoch": 0.3184380213734951, + "grad_norm": 1.1947202524240241, + "learning_rate": 3.1889997476969086e-06, + "loss": 0.9578, + "step": 3531 + }, + { + "epoch": 0.3185282048969653, + "grad_norm": 1.2980376397951452, + "learning_rate": 3.188529935721583e-06, + "loss": 0.9469, + "step": 3532 + }, + { + "epoch": 0.3186183884204356, + "grad_norm": 1.347190842913022, + "learning_rate": 3.18806002233433e-06, + "loss": 1.0229, + "step": 3533 + }, + { + "epoch": 0.31870857194390584, + "grad_norm": 1.532498454082263, + "learning_rate": 3.187590007575245e-06, + "loss": 1.0316, + "step": 3534 + }, + { + "epoch": 0.3187987554673761, + "grad_norm": 1.3520260054666138, + "learning_rate": 3.1871198914844327e-06, + "loss": 0.899, + "step": 3535 + }, + { + "epoch": 0.31888893899084636, + "grad_norm": 1.5517383763876798, + "learning_rate": 3.1866496741020057e-06, + "loss": 0.8998, + "step": 3536 + }, + { + "epoch": 0.31897912251431665, + "grad_norm": 1.0809970140795266, + "learning_rate": 3.186179355468085e-06, + "loss": 0.9279, + "step": 3537 + }, + { + "epoch": 0.3190693060377869, + "grad_norm": 1.717240936864044, + "learning_rate": 3.1857089356228015e-06, + "loss": 0.9635, + "step": 3538 + }, + { + "epoch": 0.31915948956125717, + "grad_norm": 1.4535970433884797, + "learning_rate": 3.1852384146062933e-06, + "loss": 0.9088, + "step": 3539 + }, + { + "epoch": 0.3192496730847274, + "grad_norm": 0.747281193109913, + "learning_rate": 3.184767792458708e-06, + "loss": 0.7622, + "step": 3540 + }, + { + "epoch": 0.3193398566081977, + "grad_norm": 1.8375038120982792, + "learning_rate": 3.1842970692202023e-06, + "loss": 1.0895, + "step": 3541 + }, + { + "epoch": 0.3194300401316679, + "grad_norm": 1.6684248789702723, + "learning_rate": 3.1838262449309403e-06, + "loss": 0.952, + "step": 3542 + }, + { + "epoch": 0.3195202236551382, + "grad_norm": 1.5751231887113113, + "learning_rate": 3.1833553196310956e-06, + "loss": 0.8772, + "step": 3543 + }, + { + "epoch": 0.31961040717860845, + "grad_norm": 1.3620021470454877, + "learning_rate": 3.18288429336085e-06, + "loss": 0.8313, + "step": 3544 + }, + { + "epoch": 0.31970059070207874, + "grad_norm": 1.457677375770402, + "learning_rate": 3.182413166160394e-06, + "loss": 1.0714, + "step": 3545 + }, + { + "epoch": 0.31979077422554897, + "grad_norm": 1.5220129150313866, + "learning_rate": 3.1819419380699275e-06, + "loss": 1.0113, + "step": 3546 + }, + { + "epoch": 0.31988095774901926, + "grad_norm": 1.2678996238269273, + "learning_rate": 3.181470609129658e-06, + "loss": 0.9597, + "step": 3547 + }, + { + "epoch": 0.3199711412724895, + "grad_norm": 1.7272902806088262, + "learning_rate": 3.1809991793798e-06, + "loss": 1.0208, + "step": 3548 + }, + { + "epoch": 0.3200613247959598, + "grad_norm": 1.501433164681049, + "learning_rate": 3.1805276488605806e-06, + "loss": 0.9243, + "step": 3549 + }, + { + "epoch": 0.32015150831943007, + "grad_norm": 1.3939181564227918, + "learning_rate": 3.1800560176122336e-06, + "loss": 0.9606, + "step": 3550 + }, + { + "epoch": 0.3202416918429003, + "grad_norm": 1.4327919196228471, + "learning_rate": 3.179584285675e-06, + "loss": 0.9709, + "step": 3551 + }, + { + "epoch": 0.3203318753663706, + "grad_norm": 1.5942815754314712, + "learning_rate": 3.1791124530891315e-06, + "loss": 0.8733, + "step": 3552 + }, + { + "epoch": 0.3204220588898408, + "grad_norm": 1.5821760700015646, + "learning_rate": 3.178640519894886e-06, + "loss": 0.9605, + "step": 3553 + }, + { + "epoch": 0.3205122424133111, + "grad_norm": 1.655023660634718, + "learning_rate": 3.1781684861325324e-06, + "loss": 0.917, + "step": 3554 + }, + { + "epoch": 0.32060242593678134, + "grad_norm": 1.4801147359758404, + "learning_rate": 3.177696351842348e-06, + "loss": 0.9663, + "step": 3555 + }, + { + "epoch": 0.32069260946025163, + "grad_norm": 0.9213654354449321, + "learning_rate": 3.1772241170646167e-06, + "loss": 0.8124, + "step": 3556 + }, + { + "epoch": 0.32078279298372187, + "grad_norm": 1.6069695727465265, + "learning_rate": 3.1767517818396334e-06, + "loss": 1.0662, + "step": 3557 + }, + { + "epoch": 0.32087297650719215, + "grad_norm": 1.2948046785142227, + "learning_rate": 3.1762793462076986e-06, + "loss": 1.1102, + "step": 3558 + }, + { + "epoch": 0.3209631600306624, + "grad_norm": 1.831683746110163, + "learning_rate": 3.1758068102091236e-06, + "loss": 0.8468, + "step": 3559 + }, + { + "epoch": 0.3210533435541327, + "grad_norm": 1.7115921092053643, + "learning_rate": 3.175334173884229e-06, + "loss": 0.9318, + "step": 3560 + }, + { + "epoch": 0.3211435270776029, + "grad_norm": 1.2320307429862325, + "learning_rate": 3.174861437273342e-06, + "loss": 0.9599, + "step": 3561 + }, + { + "epoch": 0.3212337106010732, + "grad_norm": 1.4104670131981567, + "learning_rate": 3.174388600416799e-06, + "loss": 0.9293, + "step": 3562 + }, + { + "epoch": 0.32132389412454343, + "grad_norm": 0.8163297699510034, + "learning_rate": 3.1739156633549445e-06, + "loss": 0.8113, + "step": 3563 + }, + { + "epoch": 0.3214140776480137, + "grad_norm": 1.3347929574670332, + "learning_rate": 3.173442626128133e-06, + "loss": 1.0222, + "step": 3564 + }, + { + "epoch": 0.32150426117148395, + "grad_norm": 1.7697459379069524, + "learning_rate": 3.1729694887767265e-06, + "loss": 1.0767, + "step": 3565 + }, + { + "epoch": 0.32159444469495424, + "grad_norm": 1.6657845626467493, + "learning_rate": 3.172496251341096e-06, + "loss": 0.9599, + "step": 3566 + }, + { + "epoch": 0.3216846282184245, + "grad_norm": 1.588077097849276, + "learning_rate": 3.172022913861619e-06, + "loss": 0.8201, + "step": 3567 + }, + { + "epoch": 0.32177481174189476, + "grad_norm": 1.756544003025318, + "learning_rate": 3.171549476378686e-06, + "loss": 0.9207, + "step": 3568 + }, + { + "epoch": 0.321864995265365, + "grad_norm": 1.5129456580741023, + "learning_rate": 3.1710759389326906e-06, + "loss": 0.9311, + "step": 3569 + }, + { + "epoch": 0.3219551787888353, + "grad_norm": 1.539029813312877, + "learning_rate": 3.1706023015640396e-06, + "loss": 0.9482, + "step": 3570 + }, + { + "epoch": 0.3220453623123055, + "grad_norm": 1.4440261477641725, + "learning_rate": 3.1701285643131453e-06, + "loss": 0.9817, + "step": 3571 + }, + { + "epoch": 0.3221355458357758, + "grad_norm": 1.6064252023104861, + "learning_rate": 3.16965472722043e-06, + "loss": 0.9747, + "step": 3572 + }, + { + "epoch": 0.32222572935924604, + "grad_norm": 1.7353306105190922, + "learning_rate": 3.169180790326324e-06, + "loss": 0.8843, + "step": 3573 + }, + { + "epoch": 0.3223159128827163, + "grad_norm": 1.3639595508506883, + "learning_rate": 3.168706753671266e-06, + "loss": 0.9724, + "step": 3574 + }, + { + "epoch": 0.3224060964061866, + "grad_norm": 1.311719071715942, + "learning_rate": 3.168232617295704e-06, + "loss": 0.8951, + "step": 3575 + }, + { + "epoch": 0.32249627992965685, + "grad_norm": 1.5247473929639834, + "learning_rate": 3.167758381240093e-06, + "loss": 0.9585, + "step": 3576 + }, + { + "epoch": 0.32258646345312714, + "grad_norm": 1.4051196747851924, + "learning_rate": 3.1672840455448978e-06, + "loss": 1.0038, + "step": 3577 + }, + { + "epoch": 0.32267664697659737, + "grad_norm": 1.3939506967875381, + "learning_rate": 3.166809610250592e-06, + "loss": 0.8994, + "step": 3578 + }, + { + "epoch": 0.32276683050006766, + "grad_norm": 1.5089575022280362, + "learning_rate": 3.166335075397656e-06, + "loss": 0.89, + "step": 3579 + }, + { + "epoch": 0.3228570140235379, + "grad_norm": 1.8547294348432601, + "learning_rate": 3.1658604410265808e-06, + "loss": 0.9297, + "step": 3580 + }, + { + "epoch": 0.3229471975470082, + "grad_norm": 1.2424575224279668, + "learning_rate": 3.1653857071778644e-06, + "loss": 0.8294, + "step": 3581 + }, + { + "epoch": 0.3230373810704784, + "grad_norm": 1.5000064372878228, + "learning_rate": 3.1649108738920133e-06, + "loss": 0.953, + "step": 3582 + }, + { + "epoch": 0.3231275645939487, + "grad_norm": 1.5191722141707944, + "learning_rate": 3.1644359412095432e-06, + "loss": 0.9714, + "step": 3583 + }, + { + "epoch": 0.32321774811741893, + "grad_norm": 5.0251756096515585, + "learning_rate": 3.163960909170978e-06, + "loss": 0.8299, + "step": 3584 + }, + { + "epoch": 0.3233079316408892, + "grad_norm": 2.0171328087328324, + "learning_rate": 3.1634857778168496e-06, + "loss": 0.8604, + "step": 3585 + }, + { + "epoch": 0.32339811516435946, + "grad_norm": 1.5201868050376879, + "learning_rate": 3.1630105471877002e-06, + "loss": 1.0027, + "step": 3586 + }, + { + "epoch": 0.32348829868782975, + "grad_norm": 1.5739764081454453, + "learning_rate": 3.162535217324077e-06, + "loss": 0.9453, + "step": 3587 + }, + { + "epoch": 0.3235784822113, + "grad_norm": 1.8251906752591238, + "learning_rate": 3.1620597882665393e-06, + "loss": 0.9939, + "step": 3588 + }, + { + "epoch": 0.32366866573477027, + "grad_norm": 1.5828914611341245, + "learning_rate": 3.1615842600556535e-06, + "loss": 1.0165, + "step": 3589 + }, + { + "epoch": 0.3237588492582405, + "grad_norm": 2.23421429676402, + "learning_rate": 3.1611086327319932e-06, + "loss": 0.9567, + "step": 3590 + }, + { + "epoch": 0.3238490327817108, + "grad_norm": 1.5578639872315596, + "learning_rate": 3.160632906336142e-06, + "loss": 1.019, + "step": 3591 + }, + { + "epoch": 0.323939216305181, + "grad_norm": 1.6790667429206043, + "learning_rate": 3.160157080908692e-06, + "loss": 0.9859, + "step": 3592 + }, + { + "epoch": 0.3240293998286513, + "grad_norm": 1.3274392994510567, + "learning_rate": 3.1596811564902426e-06, + "loss": 1.014, + "step": 3593 + }, + { + "epoch": 0.32411958335212154, + "grad_norm": 1.8552868563187543, + "learning_rate": 3.1592051331214023e-06, + "loss": 0.9039, + "step": 3594 + }, + { + "epoch": 0.32420976687559183, + "grad_norm": 1.413576833034068, + "learning_rate": 3.158729010842789e-06, + "loss": 0.9927, + "step": 3595 + }, + { + "epoch": 0.32429995039906206, + "grad_norm": 1.4611071319832103, + "learning_rate": 3.1582527896950266e-06, + "loss": 1.0154, + "step": 3596 + }, + { + "epoch": 0.32439013392253235, + "grad_norm": 1.653438340833757, + "learning_rate": 3.157776469718749e-06, + "loss": 0.9821, + "step": 3597 + }, + { + "epoch": 0.32448031744600264, + "grad_norm": 1.7120317417571251, + "learning_rate": 3.1573000509546004e-06, + "loss": 0.9533, + "step": 3598 + }, + { + "epoch": 0.3245705009694729, + "grad_norm": 1.353739974459424, + "learning_rate": 3.1568235334432296e-06, + "loss": 1.0122, + "step": 3599 + }, + { + "epoch": 0.32466068449294316, + "grad_norm": 1.5842961429030111, + "learning_rate": 3.1563469172252964e-06, + "loss": 1.0292, + "step": 3600 + }, + { + "epoch": 0.3247508680164134, + "grad_norm": 1.5810636599064047, + "learning_rate": 3.155870202341468e-06, + "loss": 1.0262, + "step": 3601 + }, + { + "epoch": 0.3248410515398837, + "grad_norm": 1.8807850919099405, + "learning_rate": 3.155393388832421e-06, + "loss": 0.9586, + "step": 3602 + }, + { + "epoch": 0.3249312350633539, + "grad_norm": 1.7102153072554989, + "learning_rate": 3.1549164767388386e-06, + "loss": 0.9812, + "step": 3603 + }, + { + "epoch": 0.3250214185868242, + "grad_norm": 1.3706733080712947, + "learning_rate": 3.1544394661014145e-06, + "loss": 0.9627, + "step": 3604 + }, + { + "epoch": 0.32511160211029444, + "grad_norm": 1.5614828995529582, + "learning_rate": 3.15396235696085e-06, + "loss": 0.8827, + "step": 3605 + }, + { + "epoch": 0.32520178563376473, + "grad_norm": 1.3197445974208684, + "learning_rate": 3.153485149357854e-06, + "loss": 0.9505, + "step": 3606 + }, + { + "epoch": 0.32529196915723496, + "grad_norm": 1.3951821397670627, + "learning_rate": 3.153007843333145e-06, + "loss": 0.9469, + "step": 3607 + }, + { + "epoch": 0.32538215268070525, + "grad_norm": 1.438337413845411, + "learning_rate": 3.152530438927449e-06, + "loss": 0.9848, + "step": 3608 + }, + { + "epoch": 0.3254723362041755, + "grad_norm": 1.4688515323688967, + "learning_rate": 3.1520529361815008e-06, + "loss": 1.0164, + "step": 3609 + }, + { + "epoch": 0.32556251972764577, + "grad_norm": 1.4609077164985056, + "learning_rate": 3.151575335136044e-06, + "loss": 1.0003, + "step": 3610 + }, + { + "epoch": 0.325652703251116, + "grad_norm": 1.433211356155244, + "learning_rate": 3.1510976358318298e-06, + "loss": 0.9489, + "step": 3611 + }, + { + "epoch": 0.3257428867745863, + "grad_norm": 1.5495982233891028, + "learning_rate": 3.1506198383096186e-06, + "loss": 0.9139, + "step": 3612 + }, + { + "epoch": 0.3258330702980565, + "grad_norm": 1.691416046607563, + "learning_rate": 3.150141942610178e-06, + "loss": 0.9785, + "step": 3613 + }, + { + "epoch": 0.3259232538215268, + "grad_norm": 0.7063487220227663, + "learning_rate": 3.1496639487742853e-06, + "loss": 0.8086, + "step": 3614 + }, + { + "epoch": 0.32601343734499705, + "grad_norm": 1.4135651530500528, + "learning_rate": 3.1491858568427247e-06, + "loss": 1.0037, + "step": 3615 + }, + { + "epoch": 0.32610362086846734, + "grad_norm": 1.4889324695857822, + "learning_rate": 3.1487076668562903e-06, + "loss": 1.013, + "step": 3616 + }, + { + "epoch": 0.32619380439193757, + "grad_norm": 1.566035657313529, + "learning_rate": 3.1482293788557847e-06, + "loss": 1.0055, + "step": 3617 + }, + { + "epoch": 0.32628398791540786, + "grad_norm": 1.386463281444468, + "learning_rate": 3.1477509928820165e-06, + "loss": 0.9378, + "step": 3618 + }, + { + "epoch": 0.3263741714388781, + "grad_norm": 1.4468037927625836, + "learning_rate": 3.147272508975805e-06, + "loss": 0.9611, + "step": 3619 + }, + { + "epoch": 0.3264643549623484, + "grad_norm": 0.8983070942320549, + "learning_rate": 3.1467939271779775e-06, + "loss": 0.8251, + "step": 3620 + }, + { + "epoch": 0.32655453848581867, + "grad_norm": 1.2023957754954315, + "learning_rate": 3.146315247529368e-06, + "loss": 0.9361, + "step": 3621 + }, + { + "epoch": 0.3266447220092889, + "grad_norm": 1.6322540350274162, + "learning_rate": 3.1458364700708212e-06, + "loss": 0.9271, + "step": 3622 + }, + { + "epoch": 0.3267349055327592, + "grad_norm": 1.4041268745843856, + "learning_rate": 3.1453575948431892e-06, + "loss": 0.992, + "step": 3623 + }, + { + "epoch": 0.3268250890562294, + "grad_norm": 1.5544601350852372, + "learning_rate": 3.144878621887331e-06, + "loss": 0.9658, + "step": 3624 + }, + { + "epoch": 0.3269152725796997, + "grad_norm": 1.5776280291030804, + "learning_rate": 3.1443995512441167e-06, + "loss": 1.0124, + "step": 3625 + }, + { + "epoch": 0.32700545610316994, + "grad_norm": 1.3970733894261997, + "learning_rate": 3.1439203829544224e-06, + "loss": 1.0386, + "step": 3626 + }, + { + "epoch": 0.32709563962664023, + "grad_norm": 1.3950151303882499, + "learning_rate": 3.143441117059133e-06, + "loss": 1.048, + "step": 3627 + }, + { + "epoch": 0.32718582315011047, + "grad_norm": 1.6606046183365117, + "learning_rate": 3.142961753599143e-06, + "loss": 0.915, + "step": 3628 + }, + { + "epoch": 0.32727600667358075, + "grad_norm": 1.330511574271577, + "learning_rate": 3.1424822926153543e-06, + "loss": 0.9377, + "step": 3629 + }, + { + "epoch": 0.327366190197051, + "grad_norm": 1.519574319184978, + "learning_rate": 3.142002734148676e-06, + "loss": 1.0051, + "step": 3630 + }, + { + "epoch": 0.3274563737205213, + "grad_norm": 1.567993881563978, + "learning_rate": 3.141523078240028e-06, + "loss": 0.9938, + "step": 3631 + }, + { + "epoch": 0.3275465572439915, + "grad_norm": 1.5707886386789771, + "learning_rate": 3.1410433249303366e-06, + "loss": 1.0181, + "step": 3632 + }, + { + "epoch": 0.3276367407674618, + "grad_norm": 1.444650034252609, + "learning_rate": 3.1405634742605366e-06, + "loss": 1.0478, + "step": 3633 + }, + { + "epoch": 0.32772692429093203, + "grad_norm": 1.632713789442995, + "learning_rate": 3.1400835262715727e-06, + "loss": 0.9991, + "step": 3634 + }, + { + "epoch": 0.3278171078144023, + "grad_norm": 1.537816429140916, + "learning_rate": 3.139603481004396e-06, + "loss": 0.9554, + "step": 3635 + }, + { + "epoch": 0.32790729133787255, + "grad_norm": 1.6080082395162192, + "learning_rate": 3.139123338499966e-06, + "loss": 0.978, + "step": 3636 + }, + { + "epoch": 0.32799747486134284, + "grad_norm": 1.537252921123524, + "learning_rate": 3.1386430987992524e-06, + "loss": 1.0031, + "step": 3637 + }, + { + "epoch": 0.3280876583848131, + "grad_norm": 1.6865085232267953, + "learning_rate": 3.1381627619432307e-06, + "loss": 0.9224, + "step": 3638 + }, + { + "epoch": 0.32817784190828336, + "grad_norm": 2.0504308201343293, + "learning_rate": 3.1376823279728864e-06, + "loss": 0.975, + "step": 3639 + }, + { + "epoch": 0.3282680254317536, + "grad_norm": 1.4604013040399384, + "learning_rate": 3.1372017969292125e-06, + "loss": 1.0453, + "step": 3640 + }, + { + "epoch": 0.3283582089552239, + "grad_norm": 1.4319239566314523, + "learning_rate": 3.136721168853211e-06, + "loss": 0.953, + "step": 3641 + }, + { + "epoch": 0.3284483924786941, + "grad_norm": 1.5636837861872248, + "learning_rate": 3.1362404437858924e-06, + "loss": 0.9361, + "step": 3642 + }, + { + "epoch": 0.3285385760021644, + "grad_norm": 1.4445639656616323, + "learning_rate": 3.135759621768273e-06, + "loss": 1.0315, + "step": 3643 + }, + { + "epoch": 0.32862875952563464, + "grad_norm": 1.4468711902411708, + "learning_rate": 3.13527870284138e-06, + "loss": 1.016, + "step": 3644 + }, + { + "epoch": 0.32871894304910493, + "grad_norm": 1.86181890335936, + "learning_rate": 3.134797687046249e-06, + "loss": 1.0276, + "step": 3645 + }, + { + "epoch": 0.3288091265725752, + "grad_norm": 1.6822769432092817, + "learning_rate": 3.1343165744239218e-06, + "loss": 0.9077, + "step": 3646 + }, + { + "epoch": 0.32889931009604545, + "grad_norm": 1.680147755573986, + "learning_rate": 3.13383536501545e-06, + "loss": 0.9785, + "step": 3647 + }, + { + "epoch": 0.32898949361951574, + "grad_norm": 1.3608885921620455, + "learning_rate": 3.133354058861893e-06, + "loss": 0.993, + "step": 3648 + }, + { + "epoch": 0.32907967714298597, + "grad_norm": 1.431433523480628, + "learning_rate": 3.132872656004318e-06, + "loss": 0.9392, + "step": 3649 + }, + { + "epoch": 0.32916986066645626, + "grad_norm": 1.4510484029372095, + "learning_rate": 3.132391156483802e-06, + "loss": 0.8708, + "step": 3650 + }, + { + "epoch": 0.3292600441899265, + "grad_norm": 1.5203103076256435, + "learning_rate": 3.131909560341428e-06, + "loss": 0.9312, + "step": 3651 + }, + { + "epoch": 0.3293502277133968, + "grad_norm": 1.3786478205677213, + "learning_rate": 3.1314278676182893e-06, + "loss": 0.916, + "step": 3652 + }, + { + "epoch": 0.329440411236867, + "grad_norm": 1.4416050554167819, + "learning_rate": 3.130946078355486e-06, + "loss": 1.04, + "step": 3653 + }, + { + "epoch": 0.3295305947603373, + "grad_norm": 1.2754667998344207, + "learning_rate": 3.130464192594128e-06, + "loss": 1.0058, + "step": 3654 + }, + { + "epoch": 0.32962077828380754, + "grad_norm": 1.2941886010965056, + "learning_rate": 3.1299822103753315e-06, + "loss": 0.8899, + "step": 3655 + }, + { + "epoch": 0.3297109618072778, + "grad_norm": 1.5727611827001033, + "learning_rate": 3.1295001317402217e-06, + "loss": 0.9688, + "step": 3656 + }, + { + "epoch": 0.32980114533074806, + "grad_norm": 1.7217147226387777, + "learning_rate": 3.1290179567299335e-06, + "loss": 0.9091, + "step": 3657 + }, + { + "epoch": 0.32989132885421835, + "grad_norm": 1.2137521334785544, + "learning_rate": 3.128535685385607e-06, + "loss": 0.9936, + "step": 3658 + }, + { + "epoch": 0.3299815123776886, + "grad_norm": 1.3610963993618206, + "learning_rate": 3.1280533177483935e-06, + "loss": 1.0511, + "step": 3659 + }, + { + "epoch": 0.33007169590115887, + "grad_norm": 1.377798570305986, + "learning_rate": 3.127570853859451e-06, + "loss": 0.9839, + "step": 3660 + }, + { + "epoch": 0.3301618794246291, + "grad_norm": 1.4073127545466937, + "learning_rate": 3.1270882937599456e-06, + "loss": 0.8574, + "step": 3661 + }, + { + "epoch": 0.3302520629480994, + "grad_norm": 1.4219089545922186, + "learning_rate": 3.1266056374910532e-06, + "loss": 0.9912, + "step": 3662 + }, + { + "epoch": 0.3303422464715696, + "grad_norm": 2.2785299100907097, + "learning_rate": 3.126122885093955e-06, + "loss": 0.8677, + "step": 3663 + }, + { + "epoch": 0.3304324299950399, + "grad_norm": 1.637808716770891, + "learning_rate": 3.1256400366098427e-06, + "loss": 0.9592, + "step": 3664 + }, + { + "epoch": 0.33052261351851014, + "grad_norm": 1.51574124549176, + "learning_rate": 3.125157092079916e-06, + "loss": 0.9277, + "step": 3665 + }, + { + "epoch": 0.33061279704198043, + "grad_norm": 2.1118655724948647, + "learning_rate": 3.1246740515453824e-06, + "loss": 0.9469, + "step": 3666 + }, + { + "epoch": 0.33070298056545067, + "grad_norm": 1.3579544888652733, + "learning_rate": 3.124190915047457e-06, + "loss": 0.9025, + "step": 3667 + }, + { + "epoch": 0.33079316408892095, + "grad_norm": 1.5650947388562326, + "learning_rate": 3.123707682627364e-06, + "loss": 0.9407, + "step": 3668 + }, + { + "epoch": 0.33088334761239124, + "grad_norm": 1.5402523031020936, + "learning_rate": 3.1232243543263356e-06, + "loss": 0.9446, + "step": 3669 + }, + { + "epoch": 0.3309735311358615, + "grad_norm": 1.6113201349233717, + "learning_rate": 3.1227409301856122e-06, + "loss": 0.873, + "step": 3670 + }, + { + "epoch": 0.33106371465933176, + "grad_norm": 1.2582294886520449, + "learning_rate": 3.1222574102464413e-06, + "loss": 0.9486, + "step": 3671 + }, + { + "epoch": 0.331153898182802, + "grad_norm": 1.324115760421675, + "learning_rate": 3.12177379455008e-06, + "loss": 0.9977, + "step": 3672 + }, + { + "epoch": 0.3312440817062723, + "grad_norm": 1.3774835958040623, + "learning_rate": 3.121290083137794e-06, + "loss": 0.9615, + "step": 3673 + }, + { + "epoch": 0.3313342652297425, + "grad_norm": 1.4163226196040661, + "learning_rate": 3.1208062760508547e-06, + "loss": 0.9453, + "step": 3674 + }, + { + "epoch": 0.3314244487532128, + "grad_norm": 1.3010524432690689, + "learning_rate": 3.1203223733305438e-06, + "loss": 0.905, + "step": 3675 + }, + { + "epoch": 0.33151463227668304, + "grad_norm": 0.7475260622208036, + "learning_rate": 3.1198383750181512e-06, + "loss": 0.7856, + "step": 3676 + }, + { + "epoch": 0.33160481580015333, + "grad_norm": 3.585717952880547, + "learning_rate": 3.1193542811549734e-06, + "loss": 0.8887, + "step": 3677 + }, + { + "epoch": 0.33169499932362356, + "grad_norm": 1.415374989779859, + "learning_rate": 3.1188700917823166e-06, + "loss": 0.8914, + "step": 3678 + }, + { + "epoch": 0.33178518284709385, + "grad_norm": 0.862856972117168, + "learning_rate": 3.1183858069414937e-06, + "loss": 0.8435, + "step": 3679 + }, + { + "epoch": 0.3318753663705641, + "grad_norm": 2.0389886016901286, + "learning_rate": 3.117901426673827e-06, + "loss": 1.0255, + "step": 3680 + }, + { + "epoch": 0.3319655498940344, + "grad_norm": 1.5051816610153275, + "learning_rate": 3.1174169510206466e-06, + "loss": 0.929, + "step": 3681 + }, + { + "epoch": 0.3320557334175046, + "grad_norm": 1.4004373582519918, + "learning_rate": 3.1169323800232908e-06, + "loss": 0.967, + "step": 3682 + }, + { + "epoch": 0.3321459169409749, + "grad_norm": 1.6488902839633035, + "learning_rate": 3.1164477137231054e-06, + "loss": 0.9956, + "step": 3683 + }, + { + "epoch": 0.3322361004644451, + "grad_norm": 1.61437020484605, + "learning_rate": 3.115962952161445e-06, + "loss": 0.9682, + "step": 3684 + }, + { + "epoch": 0.3323262839879154, + "grad_norm": 1.867411005537261, + "learning_rate": 3.1154780953796727e-06, + "loss": 1.0021, + "step": 3685 + }, + { + "epoch": 0.33241646751138565, + "grad_norm": 1.4426596549551804, + "learning_rate": 3.114993143419158e-06, + "loss": 0.8717, + "step": 3686 + }, + { + "epoch": 0.33250665103485594, + "grad_norm": 1.1679313424030207, + "learning_rate": 3.1145080963212806e-06, + "loss": 0.871, + "step": 3687 + }, + { + "epoch": 0.33259683455832617, + "grad_norm": 1.2228047826081332, + "learning_rate": 3.114022954127427e-06, + "loss": 0.9696, + "step": 3688 + }, + { + "epoch": 0.33268701808179646, + "grad_norm": 1.370876066745744, + "learning_rate": 3.1135377168789923e-06, + "loss": 1.1674, + "step": 3689 + }, + { + "epoch": 0.3327772016052667, + "grad_norm": 1.3378552063200628, + "learning_rate": 3.1130523846173803e-06, + "loss": 1.0239, + "step": 3690 + }, + { + "epoch": 0.332867385128737, + "grad_norm": 1.567991144602935, + "learning_rate": 3.1125669573840006e-06, + "loss": 0.9173, + "step": 3691 + }, + { + "epoch": 0.3329575686522072, + "grad_norm": 1.5889273586966144, + "learning_rate": 3.112081435220274e-06, + "loss": 0.9085, + "step": 3692 + }, + { + "epoch": 0.3330477521756775, + "grad_norm": 1.4696022158517261, + "learning_rate": 3.111595818167627e-06, + "loss": 0.9467, + "step": 3693 + }, + { + "epoch": 0.3331379356991478, + "grad_norm": 1.5498466846566348, + "learning_rate": 3.1111101062674953e-06, + "loss": 1.0198, + "step": 3694 + }, + { + "epoch": 0.333228119222618, + "grad_norm": 1.24875640997325, + "learning_rate": 3.1106242995613233e-06, + "loss": 0.9472, + "step": 3695 + }, + { + "epoch": 0.3333183027460883, + "grad_norm": 1.3236324846735337, + "learning_rate": 3.1101383980905616e-06, + "loss": 0.9613, + "step": 3696 + }, + { + "epoch": 0.33340848626955855, + "grad_norm": 0.8053253386558039, + "learning_rate": 3.109652401896671e-06, + "loss": 0.7809, + "step": 3697 + }, + { + "epoch": 0.33349866979302883, + "grad_norm": 1.2819028912155217, + "learning_rate": 3.109166311021119e-06, + "loss": 1.0078, + "step": 3698 + }, + { + "epoch": 0.33358885331649907, + "grad_norm": 1.708036637321988, + "learning_rate": 3.1086801255053807e-06, + "loss": 1.0055, + "step": 3699 + }, + { + "epoch": 0.33367903683996936, + "grad_norm": 1.263144004033282, + "learning_rate": 3.108193845390942e-06, + "loss": 1.0042, + "step": 3700 + }, + { + "epoch": 0.3337692203634396, + "grad_norm": 1.2114111189471917, + "learning_rate": 3.1077074707192933e-06, + "loss": 0.9388, + "step": 3701 + }, + { + "epoch": 0.3338594038869099, + "grad_norm": 1.415320832372637, + "learning_rate": 3.1072210015319353e-06, + "loss": 0.9822, + "step": 3702 + }, + { + "epoch": 0.3339495874103801, + "grad_norm": 1.4925722754532802, + "learning_rate": 3.106734437870376e-06, + "loss": 0.9197, + "step": 3703 + }, + { + "epoch": 0.3340397709338504, + "grad_norm": 1.6187024967484718, + "learning_rate": 3.1062477797761327e-06, + "loss": 0.9032, + "step": 3704 + }, + { + "epoch": 0.33412995445732063, + "grad_norm": 1.698841202107312, + "learning_rate": 3.105761027290729e-06, + "loss": 1.0086, + "step": 3705 + }, + { + "epoch": 0.3342201379807909, + "grad_norm": 1.543099776873394, + "learning_rate": 3.105274180455697e-06, + "loss": 0.9289, + "step": 3706 + }, + { + "epoch": 0.33431032150426115, + "grad_norm": 1.367465966339894, + "learning_rate": 3.1047872393125775e-06, + "loss": 0.9884, + "step": 3707 + }, + { + "epoch": 0.33440050502773144, + "grad_norm": 0.7590091341999451, + "learning_rate": 3.1043002039029186e-06, + "loss": 0.8172, + "step": 3708 + }, + { + "epoch": 0.3344906885512017, + "grad_norm": 1.3401837153965492, + "learning_rate": 3.1038130742682782e-06, + "loss": 0.9513, + "step": 3709 + }, + { + "epoch": 0.33458087207467196, + "grad_norm": 1.372457841875378, + "learning_rate": 3.103325850450219e-06, + "loss": 0.9385, + "step": 3710 + }, + { + "epoch": 0.3346710555981422, + "grad_norm": 1.455667875452037, + "learning_rate": 3.1028385324903154e-06, + "loss": 1.0047, + "step": 3711 + }, + { + "epoch": 0.3347612391216125, + "grad_norm": 1.3095080924845526, + "learning_rate": 3.1023511204301465e-06, + "loss": 0.9648, + "step": 3712 + }, + { + "epoch": 0.3348514226450827, + "grad_norm": 1.264492375822602, + "learning_rate": 3.1018636143113022e-06, + "loss": 0.9298, + "step": 3713 + }, + { + "epoch": 0.334941606168553, + "grad_norm": 1.3176635714481, + "learning_rate": 3.1013760141753787e-06, + "loss": 0.8711, + "step": 3714 + }, + { + "epoch": 0.33503178969202324, + "grad_norm": 1.505033947127717, + "learning_rate": 3.100888320063981e-06, + "loss": 0.9322, + "step": 3715 + }, + { + "epoch": 0.33512197321549353, + "grad_norm": 1.2774225613140366, + "learning_rate": 3.100400532018721e-06, + "loss": 0.9638, + "step": 3716 + }, + { + "epoch": 0.3352121567389638, + "grad_norm": 1.4865513156536694, + "learning_rate": 3.0999126500812204e-06, + "loss": 1.0265, + "step": 3717 + }, + { + "epoch": 0.33530234026243405, + "grad_norm": 0.8395057885366314, + "learning_rate": 3.0994246742931076e-06, + "loss": 0.8663, + "step": 3718 + }, + { + "epoch": 0.33539252378590434, + "grad_norm": 1.3753281981914276, + "learning_rate": 3.098936604696019e-06, + "loss": 0.9038, + "step": 3719 + }, + { + "epoch": 0.3354827073093746, + "grad_norm": 1.569295525401421, + "learning_rate": 3.0984484413316e-06, + "loss": 0.9994, + "step": 3720 + }, + { + "epoch": 0.33557289083284486, + "grad_norm": 1.5306575173864387, + "learning_rate": 3.0979601842415033e-06, + "loss": 0.9466, + "step": 3721 + }, + { + "epoch": 0.3356630743563151, + "grad_norm": 1.3529495002107597, + "learning_rate": 3.0974718334673896e-06, + "loss": 0.8808, + "step": 3722 + }, + { + "epoch": 0.3357532578797854, + "grad_norm": 1.311611874141616, + "learning_rate": 3.0969833890509282e-06, + "loss": 0.9581, + "step": 3723 + }, + { + "epoch": 0.3358434414032556, + "grad_norm": 1.4334890550848212, + "learning_rate": 3.096494851033795e-06, + "loss": 0.958, + "step": 3724 + }, + { + "epoch": 0.3359336249267259, + "grad_norm": 1.5362028868662136, + "learning_rate": 3.0960062194576747e-06, + "loss": 0.891, + "step": 3725 + }, + { + "epoch": 0.33602380845019614, + "grad_norm": 1.440853436743282, + "learning_rate": 3.0955174943642606e-06, + "loss": 0.9102, + "step": 3726 + }, + { + "epoch": 0.3361139919736664, + "grad_norm": 1.2922543398857487, + "learning_rate": 3.0950286757952534e-06, + "loss": 0.8838, + "step": 3727 + }, + { + "epoch": 0.33620417549713666, + "grad_norm": 1.3694262675965518, + "learning_rate": 3.0945397637923617e-06, + "loss": 1.0272, + "step": 3728 + }, + { + "epoch": 0.33629435902060695, + "grad_norm": 1.7149831936738449, + "learning_rate": 3.0940507583973025e-06, + "loss": 0.9671, + "step": 3729 + }, + { + "epoch": 0.3363845425440772, + "grad_norm": 1.3492847720251577, + "learning_rate": 3.093561659651799e-06, + "loss": 0.9335, + "step": 3730 + }, + { + "epoch": 0.33647472606754747, + "grad_norm": 1.3480570321597158, + "learning_rate": 3.093072467597586e-06, + "loss": 0.8934, + "step": 3731 + }, + { + "epoch": 0.3365649095910177, + "grad_norm": 0.942990849069266, + "learning_rate": 3.092583182276402e-06, + "loss": 0.8286, + "step": 3732 + }, + { + "epoch": 0.336655093114488, + "grad_norm": 0.7042187554846324, + "learning_rate": 3.092093803729997e-06, + "loss": 0.8265, + "step": 3733 + }, + { + "epoch": 0.3367452766379582, + "grad_norm": 1.4974612368245919, + "learning_rate": 3.0916043320001264e-06, + "loss": 0.9786, + "step": 3734 + }, + { + "epoch": 0.3368354601614285, + "grad_norm": 1.5857042413562807, + "learning_rate": 3.0911147671285557e-06, + "loss": 0.9732, + "step": 3735 + }, + { + "epoch": 0.33692564368489875, + "grad_norm": 1.4871490256792885, + "learning_rate": 3.0906251091570565e-06, + "loss": 0.9885, + "step": 3736 + }, + { + "epoch": 0.33701582720836903, + "grad_norm": 2.3304278358297763, + "learning_rate": 3.0901353581274094e-06, + "loss": 0.9772, + "step": 3737 + }, + { + "epoch": 0.33710601073183927, + "grad_norm": 1.2729155049836414, + "learning_rate": 3.089645514081402e-06, + "loss": 0.8433, + "step": 3738 + }, + { + "epoch": 0.33719619425530956, + "grad_norm": 2.1871907696820085, + "learning_rate": 3.0891555770608323e-06, + "loss": 0.8886, + "step": 3739 + }, + { + "epoch": 0.33728637777877984, + "grad_norm": 1.5628018659822187, + "learning_rate": 3.088665547107503e-06, + "loss": 0.9912, + "step": 3740 + }, + { + "epoch": 0.3373765613022501, + "grad_norm": 1.3061057508482943, + "learning_rate": 3.0881754242632254e-06, + "loss": 1.0434, + "step": 3741 + }, + { + "epoch": 0.33746674482572037, + "grad_norm": 1.2498528870798098, + "learning_rate": 3.0876852085698213e-06, + "loss": 1.0069, + "step": 3742 + }, + { + "epoch": 0.3375569283491906, + "grad_norm": 1.581157452434462, + "learning_rate": 3.087194900069117e-06, + "loss": 0.9875, + "step": 3743 + }, + { + "epoch": 0.3376471118726609, + "grad_norm": 1.4394751538023527, + "learning_rate": 3.08670449880295e-06, + "loss": 1.0156, + "step": 3744 + }, + { + "epoch": 0.3377372953961311, + "grad_norm": 1.4688966556134697, + "learning_rate": 3.086214004813163e-06, + "loss": 0.9879, + "step": 3745 + }, + { + "epoch": 0.3378274789196014, + "grad_norm": 1.3712925780370386, + "learning_rate": 3.0857234181416074e-06, + "loss": 1.0299, + "step": 3746 + }, + { + "epoch": 0.33791766244307164, + "grad_norm": 1.422062368937907, + "learning_rate": 3.085232738830143e-06, + "loss": 1.0112, + "step": 3747 + }, + { + "epoch": 0.33800784596654193, + "grad_norm": 1.5792360077516348, + "learning_rate": 3.084741966920638e-06, + "loss": 1.0254, + "step": 3748 + }, + { + "epoch": 0.33809802949001216, + "grad_norm": 1.3077117224801262, + "learning_rate": 3.084251102454966e-06, + "loss": 0.9209, + "step": 3749 + }, + { + "epoch": 0.33818821301348245, + "grad_norm": 0.8274201686532859, + "learning_rate": 3.083760145475013e-06, + "loss": 0.8353, + "step": 3750 + }, + { + "epoch": 0.3382783965369527, + "grad_norm": 1.3894164588086255, + "learning_rate": 3.0832690960226678e-06, + "loss": 0.9107, + "step": 3751 + }, + { + "epoch": 0.338368580060423, + "grad_norm": 1.610361482070343, + "learning_rate": 3.08277795413983e-06, + "loss": 0.9356, + "step": 3752 + }, + { + "epoch": 0.3384587635838932, + "grad_norm": 1.414211286441444, + "learning_rate": 3.0822867198684073e-06, + "loss": 1.0128, + "step": 3753 + }, + { + "epoch": 0.3385489471073635, + "grad_norm": 1.6081326328160197, + "learning_rate": 3.081795393250314e-06, + "loss": 0.969, + "step": 3754 + }, + { + "epoch": 0.33863913063083373, + "grad_norm": 3.0047379273752033, + "learning_rate": 3.081303974327473e-06, + "loss": 0.9302, + "step": 3755 + }, + { + "epoch": 0.338729314154304, + "grad_norm": 1.4433076717629796, + "learning_rate": 3.080812463141814e-06, + "loss": 0.9598, + "step": 3756 + }, + { + "epoch": 0.33881949767777425, + "grad_norm": 1.2480276760003626, + "learning_rate": 3.080320859735276e-06, + "loss": 0.8321, + "step": 3757 + }, + { + "epoch": 0.33890968120124454, + "grad_norm": 1.584325939374434, + "learning_rate": 3.079829164149806e-06, + "loss": 1.0131, + "step": 3758 + }, + { + "epoch": 0.3389998647247148, + "grad_norm": 1.6999542398464793, + "learning_rate": 3.0793373764273573e-06, + "loss": 0.9504, + "step": 3759 + }, + { + "epoch": 0.33909004824818506, + "grad_norm": 1.5253696806540673, + "learning_rate": 3.078845496609892e-06, + "loss": 0.9909, + "step": 3760 + }, + { + "epoch": 0.3391802317716553, + "grad_norm": 1.5025059906495768, + "learning_rate": 3.078353524739381e-06, + "loss": 1.0491, + "step": 3761 + }, + { + "epoch": 0.3392704152951256, + "grad_norm": 1.3548424232889076, + "learning_rate": 3.077861460857801e-06, + "loss": 1.0439, + "step": 3762 + }, + { + "epoch": 0.3393605988185958, + "grad_norm": 1.2696969731076295, + "learning_rate": 3.077369305007138e-06, + "loss": 0.9828, + "step": 3763 + }, + { + "epoch": 0.3394507823420661, + "grad_norm": 1.3025890538090006, + "learning_rate": 3.0768770572293852e-06, + "loss": 1.0025, + "step": 3764 + }, + { + "epoch": 0.3395409658655364, + "grad_norm": 1.5600070903078975, + "learning_rate": 3.0763847175665437e-06, + "loss": 1.0421, + "step": 3765 + }, + { + "epoch": 0.3396311493890066, + "grad_norm": 1.529465082693445, + "learning_rate": 3.0758922860606237e-06, + "loss": 0.9078, + "step": 3766 + }, + { + "epoch": 0.3397213329124769, + "grad_norm": 1.3925269760861017, + "learning_rate": 3.0753997627536404e-06, + "loss": 0.8993, + "step": 3767 + }, + { + "epoch": 0.33981151643594715, + "grad_norm": 1.5908562063121818, + "learning_rate": 3.0749071476876203e-06, + "loss": 0.9223, + "step": 3768 + }, + { + "epoch": 0.33990169995941744, + "grad_norm": 1.3708477179904026, + "learning_rate": 3.0744144409045952e-06, + "loss": 0.9337, + "step": 3769 + }, + { + "epoch": 0.33999188348288767, + "grad_norm": 1.639254333499674, + "learning_rate": 3.0739216424466056e-06, + "loss": 0.9805, + "step": 3770 + }, + { + "epoch": 0.34008206700635796, + "grad_norm": 1.7659673443299069, + "learning_rate": 3.0734287523557002e-06, + "loss": 0.9418, + "step": 3771 + }, + { + "epoch": 0.3401722505298282, + "grad_norm": 1.6781388351466828, + "learning_rate": 3.0729357706739348e-06, + "loss": 0.9191, + "step": 3772 + }, + { + "epoch": 0.3402624340532985, + "grad_norm": 1.489453912550195, + "learning_rate": 3.0724426974433737e-06, + "loss": 0.9556, + "step": 3773 + }, + { + "epoch": 0.3403526175767687, + "grad_norm": 1.1932311043736767, + "learning_rate": 3.0719495327060874e-06, + "loss": 0.9348, + "step": 3774 + }, + { + "epoch": 0.340442801100239, + "grad_norm": 1.4049456162974487, + "learning_rate": 3.071456276504157e-06, + "loss": 0.9677, + "step": 3775 + }, + { + "epoch": 0.34053298462370923, + "grad_norm": 1.4416543390412166, + "learning_rate": 3.070962928879669e-06, + "loss": 0.9625, + "step": 3776 + }, + { + "epoch": 0.3406231681471795, + "grad_norm": 1.5327134437712626, + "learning_rate": 3.0704694898747185e-06, + "loss": 0.9829, + "step": 3777 + }, + { + "epoch": 0.34071335167064976, + "grad_norm": 1.4442534819738124, + "learning_rate": 3.069975959531408e-06, + "loss": 1.0914, + "step": 3778 + }, + { + "epoch": 0.34080353519412004, + "grad_norm": 0.7990886011735383, + "learning_rate": 3.06948233789185e-06, + "loss": 0.8684, + "step": 3779 + }, + { + "epoch": 0.3408937187175903, + "grad_norm": 1.3985248730179343, + "learning_rate": 3.0689886249981614e-06, + "loss": 0.98, + "step": 3780 + }, + { + "epoch": 0.34098390224106057, + "grad_norm": 1.4250328294418984, + "learning_rate": 3.0684948208924693e-06, + "loss": 0.9856, + "step": 3781 + }, + { + "epoch": 0.3410740857645308, + "grad_norm": 1.654327860680518, + "learning_rate": 3.068000925616907e-06, + "loss": 0.9898, + "step": 3782 + }, + { + "epoch": 0.3411642692880011, + "grad_norm": 1.7032345377842988, + "learning_rate": 3.067506939213617e-06, + "loss": 0.9617, + "step": 3783 + }, + { + "epoch": 0.3412544528114713, + "grad_norm": 1.2764221506462747, + "learning_rate": 3.0670128617247493e-06, + "loss": 0.9685, + "step": 3784 + }, + { + "epoch": 0.3413446363349416, + "grad_norm": 1.2310027886087176, + "learning_rate": 3.06651869319246e-06, + "loss": 0.881, + "step": 3785 + }, + { + "epoch": 0.34143481985841184, + "grad_norm": 1.4874427015023757, + "learning_rate": 3.0660244336589154e-06, + "loss": 0.9632, + "step": 3786 + }, + { + "epoch": 0.34152500338188213, + "grad_norm": 1.3471647389045869, + "learning_rate": 3.065530083166288e-06, + "loss": 0.979, + "step": 3787 + }, + { + "epoch": 0.3416151869053524, + "grad_norm": 1.2722654507209112, + "learning_rate": 3.0650356417567586e-06, + "loss": 0.926, + "step": 3788 + }, + { + "epoch": 0.34170537042882265, + "grad_norm": 1.4406602371110007, + "learning_rate": 3.0645411094725156e-06, + "loss": 1.0084, + "step": 3789 + }, + { + "epoch": 0.34179555395229294, + "grad_norm": 1.4231635323100664, + "learning_rate": 3.0640464863557556e-06, + "loss": 0.96, + "step": 3790 + }, + { + "epoch": 0.3418857374757632, + "grad_norm": 1.4821079811418203, + "learning_rate": 3.063551772448682e-06, + "loss": 0.9975, + "step": 3791 + }, + { + "epoch": 0.34197592099923346, + "grad_norm": 1.6468396148644202, + "learning_rate": 3.0630569677935075e-06, + "loss": 0.894, + "step": 3792 + }, + { + "epoch": 0.3420661045227037, + "grad_norm": 1.589532470222679, + "learning_rate": 3.06256207243245e-06, + "loss": 1.0442, + "step": 3793 + }, + { + "epoch": 0.342156288046174, + "grad_norm": 0.7922120641628115, + "learning_rate": 3.0620670864077385e-06, + "loss": 0.8093, + "step": 3794 + }, + { + "epoch": 0.3422464715696442, + "grad_norm": 0.6524368493809303, + "learning_rate": 3.0615720097616063e-06, + "loss": 0.8021, + "step": 3795 + }, + { + "epoch": 0.3423366550931145, + "grad_norm": 1.5207896378741557, + "learning_rate": 3.0610768425362967e-06, + "loss": 0.9232, + "step": 3796 + }, + { + "epoch": 0.34242683861658474, + "grad_norm": 1.497163793304097, + "learning_rate": 3.0605815847740603e-06, + "loss": 0.946, + "step": 3797 + }, + { + "epoch": 0.342517022140055, + "grad_norm": 1.863414383776371, + "learning_rate": 3.0600862365171553e-06, + "loss": 0.9328, + "step": 3798 + }, + { + "epoch": 0.34260720566352526, + "grad_norm": 1.4910259419364869, + "learning_rate": 3.0595907978078474e-06, + "loss": 0.9437, + "step": 3799 + }, + { + "epoch": 0.34269738918699555, + "grad_norm": 1.3985405995814613, + "learning_rate": 3.05909526868841e-06, + "loss": 0.9385, + "step": 3800 + }, + { + "epoch": 0.3427875727104658, + "grad_norm": 1.6696762885935124, + "learning_rate": 3.0585996492011243e-06, + "loss": 0.9719, + "step": 3801 + }, + { + "epoch": 0.34287775623393607, + "grad_norm": 1.7322351813115369, + "learning_rate": 3.05810393938828e-06, + "loss": 1.0057, + "step": 3802 + }, + { + "epoch": 0.3429679397574063, + "grad_norm": 1.5087729443920619, + "learning_rate": 3.0576081392921723e-06, + "loss": 0.8835, + "step": 3803 + }, + { + "epoch": 0.3430581232808766, + "grad_norm": 1.4311713351158042, + "learning_rate": 3.057112248955107e-06, + "loss": 1.015, + "step": 3804 + }, + { + "epoch": 0.3431483068043468, + "grad_norm": 1.6761831944158327, + "learning_rate": 3.0566162684193963e-06, + "loss": 0.9863, + "step": 3805 + }, + { + "epoch": 0.3432384903278171, + "grad_norm": 1.2201722966928228, + "learning_rate": 3.056120197727359e-06, + "loss": 0.9121, + "step": 3806 + }, + { + "epoch": 0.34332867385128735, + "grad_norm": 1.5858274525012, + "learning_rate": 3.0556240369213236e-06, + "loss": 0.9561, + "step": 3807 + }, + { + "epoch": 0.34341885737475764, + "grad_norm": 1.8553734042114476, + "learning_rate": 3.055127786043624e-06, + "loss": 0.8728, + "step": 3808 + }, + { + "epoch": 0.34350904089822787, + "grad_norm": 1.4229669255791275, + "learning_rate": 3.054631445136604e-06, + "loss": 1.0504, + "step": 3809 + }, + { + "epoch": 0.34359922442169816, + "grad_norm": 2.606105103341357, + "learning_rate": 3.0541350142426147e-06, + "loss": 0.977, + "step": 3810 + }, + { + "epoch": 0.3436894079451684, + "grad_norm": 1.249782447956871, + "learning_rate": 3.053638493404012e-06, + "loss": 0.9187, + "step": 3811 + }, + { + "epoch": 0.3437795914686387, + "grad_norm": 1.3235717813404446, + "learning_rate": 3.0531418826631643e-06, + "loss": 0.9005, + "step": 3812 + }, + { + "epoch": 0.34386977499210897, + "grad_norm": 1.8094108169515686, + "learning_rate": 3.052645182062444e-06, + "loss": 1.0506, + "step": 3813 + }, + { + "epoch": 0.3439599585155792, + "grad_norm": 1.4903839556216916, + "learning_rate": 3.0521483916442324e-06, + "loss": 0.9991, + "step": 3814 + }, + { + "epoch": 0.3440501420390495, + "grad_norm": 1.3256332809427, + "learning_rate": 3.0516515114509183e-06, + "loss": 0.9582, + "step": 3815 + }, + { + "epoch": 0.3441403255625197, + "grad_norm": 1.2690965113512453, + "learning_rate": 3.0511545415249e-06, + "loss": 0.9147, + "step": 3816 + }, + { + "epoch": 0.34423050908599, + "grad_norm": 1.4424750441271723, + "learning_rate": 3.050657481908579e-06, + "loss": 0.9932, + "step": 3817 + }, + { + "epoch": 0.34432069260946024, + "grad_norm": 1.503733122137138, + "learning_rate": 3.0501603326443677e-06, + "loss": 1.0702, + "step": 3818 + }, + { + "epoch": 0.34441087613293053, + "grad_norm": 1.3253816432856544, + "learning_rate": 3.049663093774687e-06, + "loss": 0.9548, + "step": 3819 + }, + { + "epoch": 0.34450105965640077, + "grad_norm": 1.6165778443754755, + "learning_rate": 3.0491657653419643e-06, + "loss": 0.8651, + "step": 3820 + }, + { + "epoch": 0.34459124317987105, + "grad_norm": 1.4302581315378704, + "learning_rate": 3.0486683473886325e-06, + "loss": 1.0157, + "step": 3821 + }, + { + "epoch": 0.3446814267033413, + "grad_norm": 3.8958360940580055, + "learning_rate": 3.0481708399571355e-06, + "loss": 1.0215, + "step": 3822 + }, + { + "epoch": 0.3447716102268116, + "grad_norm": 1.432409561391417, + "learning_rate": 3.047673243089922e-06, + "loss": 0.9853, + "step": 3823 + }, + { + "epoch": 0.3448617937502818, + "grad_norm": 1.5904945335018423, + "learning_rate": 3.047175556829451e-06, + "loss": 0.9945, + "step": 3824 + }, + { + "epoch": 0.3449519772737521, + "grad_norm": 1.330468522417337, + "learning_rate": 3.046677781218188e-06, + "loss": 1.0084, + "step": 3825 + }, + { + "epoch": 0.34504216079722233, + "grad_norm": 1.2992869549050832, + "learning_rate": 3.0461799162986043e-06, + "loss": 0.9388, + "step": 3826 + }, + { + "epoch": 0.3451323443206926, + "grad_norm": 1.4786199535413074, + "learning_rate": 3.045681962113183e-06, + "loss": 0.8889, + "step": 3827 + }, + { + "epoch": 0.34522252784416285, + "grad_norm": 1.4806981374535537, + "learning_rate": 3.0451839187044095e-06, + "loss": 0.9133, + "step": 3828 + }, + { + "epoch": 0.34531271136763314, + "grad_norm": 1.4270911483365036, + "learning_rate": 3.0446857861147816e-06, + "loss": 0.9772, + "step": 3829 + }, + { + "epoch": 0.3454028948911034, + "grad_norm": 1.4722408447447355, + "learning_rate": 3.044187564386802e-06, + "loss": 0.9336, + "step": 3830 + }, + { + "epoch": 0.34549307841457366, + "grad_norm": 1.6778703663325845, + "learning_rate": 3.0436892535629818e-06, + "loss": 1.0295, + "step": 3831 + }, + { + "epoch": 0.3455832619380439, + "grad_norm": 1.519354018047266, + "learning_rate": 3.0431908536858393e-06, + "loss": 1.0414, + "step": 3832 + }, + { + "epoch": 0.3456734454615142, + "grad_norm": 1.700967314341642, + "learning_rate": 3.0426923647979016e-06, + "loss": 0.9519, + "step": 3833 + }, + { + "epoch": 0.3457636289849844, + "grad_norm": 1.494110226789427, + "learning_rate": 3.0421937869417016e-06, + "loss": 0.9065, + "step": 3834 + }, + { + "epoch": 0.3458538125084547, + "grad_norm": 1.479716328679452, + "learning_rate": 3.041695120159782e-06, + "loss": 1.0315, + "step": 3835 + }, + { + "epoch": 0.345943996031925, + "grad_norm": 1.3819840099138383, + "learning_rate": 3.04119636449469e-06, + "loss": 0.9066, + "step": 3836 + }, + { + "epoch": 0.3460341795553952, + "grad_norm": 1.2599369376159681, + "learning_rate": 3.040697519988983e-06, + "loss": 0.9929, + "step": 3837 + }, + { + "epoch": 0.3461243630788655, + "grad_norm": 0.7160642034073039, + "learning_rate": 3.040198586685226e-06, + "loss": 0.8083, + "step": 3838 + }, + { + "epoch": 0.34621454660233575, + "grad_norm": 1.448261942506869, + "learning_rate": 3.039699564625989e-06, + "loss": 0.9221, + "step": 3839 + }, + { + "epoch": 0.34630473012580604, + "grad_norm": 0.9290189823822855, + "learning_rate": 3.039200453853853e-06, + "loss": 0.8199, + "step": 3840 + }, + { + "epoch": 0.34639491364927627, + "grad_norm": 1.573283104391101, + "learning_rate": 3.038701254411404e-06, + "loss": 1.0105, + "step": 3841 + }, + { + "epoch": 0.34648509717274656, + "grad_norm": 1.5254058641754382, + "learning_rate": 3.0382019663412367e-06, + "loss": 0.9826, + "step": 3842 + }, + { + "epoch": 0.3465752806962168, + "grad_norm": 1.6932632788612398, + "learning_rate": 3.0377025896859532e-06, + "loss": 1.0095, + "step": 3843 + }, + { + "epoch": 0.3466654642196871, + "grad_norm": 1.343466617954592, + "learning_rate": 3.0372031244881627e-06, + "loss": 1.067, + "step": 3844 + }, + { + "epoch": 0.3467556477431573, + "grad_norm": 1.393926152614356, + "learning_rate": 3.0367035707904826e-06, + "loss": 0.93, + "step": 3845 + }, + { + "epoch": 0.3468458312666276, + "grad_norm": 1.620948435693362, + "learning_rate": 3.036203928635537e-06, + "loss": 0.8847, + "step": 3846 + }, + { + "epoch": 0.34693601479009784, + "grad_norm": 2.1929731292191645, + "learning_rate": 3.035704198065959e-06, + "loss": 1.0306, + "step": 3847 + }, + { + "epoch": 0.3470261983135681, + "grad_norm": 1.514667956031555, + "learning_rate": 3.0352043791243886e-06, + "loss": 0.9478, + "step": 3848 + }, + { + "epoch": 0.34711638183703836, + "grad_norm": 0.7272332489612775, + "learning_rate": 3.034704471853472e-06, + "loss": 0.8114, + "step": 3849 + }, + { + "epoch": 0.34720656536050865, + "grad_norm": 1.3007974709539005, + "learning_rate": 3.0342044762958646e-06, + "loss": 1.0176, + "step": 3850 + }, + { + "epoch": 0.3472967488839789, + "grad_norm": 1.4809004423484062, + "learning_rate": 3.0337043924942286e-06, + "loss": 1.0064, + "step": 3851 + }, + { + "epoch": 0.34738693240744917, + "grad_norm": 1.2752668662437523, + "learning_rate": 3.0332042204912343e-06, + "loss": 0.9497, + "step": 3852 + }, + { + "epoch": 0.3474771159309194, + "grad_norm": 1.0757615541249432, + "learning_rate": 3.0327039603295587e-06, + "loss": 0.8533, + "step": 3853 + }, + { + "epoch": 0.3475672994543897, + "grad_norm": 1.7054417103446904, + "learning_rate": 3.032203612051887e-06, + "loss": 0.9338, + "step": 3854 + }, + { + "epoch": 0.3476574829778599, + "grad_norm": 1.6638661300059367, + "learning_rate": 3.0317031757009116e-06, + "loss": 0.955, + "step": 3855 + }, + { + "epoch": 0.3477476665013302, + "grad_norm": 1.5677975685179608, + "learning_rate": 3.0312026513193326e-06, + "loss": 0.9304, + "step": 3856 + }, + { + "epoch": 0.34783785002480044, + "grad_norm": 1.3724830305374123, + "learning_rate": 3.0307020389498573e-06, + "loss": 0.9358, + "step": 3857 + }, + { + "epoch": 0.34792803354827073, + "grad_norm": 1.2546488146679067, + "learning_rate": 3.0302013386352004e-06, + "loss": 0.8993, + "step": 3858 + }, + { + "epoch": 0.348018217071741, + "grad_norm": 1.3357907487899883, + "learning_rate": 3.0297005504180854e-06, + "loss": 0.9572, + "step": 3859 + }, + { + "epoch": 0.34810840059521125, + "grad_norm": 0.9911235601689184, + "learning_rate": 3.0291996743412417e-06, + "loss": 0.7458, + "step": 3860 + }, + { + "epoch": 0.34819858411868154, + "grad_norm": 1.8989711686406967, + "learning_rate": 3.0286987104474063e-06, + "loss": 1.054, + "step": 3861 + }, + { + "epoch": 0.3482887676421518, + "grad_norm": 1.1777893357239624, + "learning_rate": 3.028197658779325e-06, + "loss": 0.942, + "step": 3862 + }, + { + "epoch": 0.34837895116562206, + "grad_norm": 1.6969682193673294, + "learning_rate": 3.0276965193797503e-06, + "loss": 0.9875, + "step": 3863 + }, + { + "epoch": 0.3484691346890923, + "grad_norm": 1.3231269138702548, + "learning_rate": 3.0271952922914423e-06, + "loss": 0.9325, + "step": 3864 + }, + { + "epoch": 0.3485593182125626, + "grad_norm": 1.4133068194166585, + "learning_rate": 3.0266939775571675e-06, + "loss": 0.9523, + "step": 3865 + }, + { + "epoch": 0.3486495017360328, + "grad_norm": 1.4402153243565612, + "learning_rate": 3.026192575219701e-06, + "loss": 0.9541, + "step": 3866 + }, + { + "epoch": 0.3487396852595031, + "grad_norm": 1.322736737888653, + "learning_rate": 3.025691085321826e-06, + "loss": 0.8637, + "step": 3867 + }, + { + "epoch": 0.34882986878297334, + "grad_norm": 1.5940893971204886, + "learning_rate": 3.025189507906332e-06, + "loss": 0.9439, + "step": 3868 + }, + { + "epoch": 0.34892005230644363, + "grad_norm": 1.480837974705647, + "learning_rate": 3.0246878430160166e-06, + "loss": 0.9862, + "step": 3869 + }, + { + "epoch": 0.34901023582991386, + "grad_norm": 1.4250753248952723, + "learning_rate": 3.024186090693684e-06, + "loss": 0.9073, + "step": 3870 + }, + { + "epoch": 0.34910041935338415, + "grad_norm": 1.5057054730449835, + "learning_rate": 3.023684250982147e-06, + "loss": 0.9861, + "step": 3871 + }, + { + "epoch": 0.3491906028768544, + "grad_norm": 1.501096642480396, + "learning_rate": 3.0231823239242252e-06, + "loss": 0.8834, + "step": 3872 + }, + { + "epoch": 0.34928078640032467, + "grad_norm": 1.6377955424801642, + "learning_rate": 3.0226803095627457e-06, + "loss": 0.9176, + "step": 3873 + }, + { + "epoch": 0.3493709699237949, + "grad_norm": 1.6892254097122459, + "learning_rate": 3.022178207940543e-06, + "loss": 0.9435, + "step": 3874 + }, + { + "epoch": 0.3494611534472652, + "grad_norm": 1.6560966132771162, + "learning_rate": 3.02167601910046e-06, + "loss": 0.9755, + "step": 3875 + }, + { + "epoch": 0.3495513369707354, + "grad_norm": 1.3771559978402557, + "learning_rate": 3.021173743085345e-06, + "loss": 0.9855, + "step": 3876 + }, + { + "epoch": 0.3496415204942057, + "grad_norm": 0.9459424717712306, + "learning_rate": 3.0206713799380557e-06, + "loss": 0.7962, + "step": 3877 + }, + { + "epoch": 0.34973170401767595, + "grad_norm": 1.5689783454455875, + "learning_rate": 3.0201689297014565e-06, + "loss": 0.9648, + "step": 3878 + }, + { + "epoch": 0.34982188754114624, + "grad_norm": 1.3856743546618966, + "learning_rate": 3.0196663924184187e-06, + "loss": 1.0088, + "step": 3879 + }, + { + "epoch": 0.34991207106461647, + "grad_norm": 1.69126225446616, + "learning_rate": 3.019163768131822e-06, + "loss": 0.991, + "step": 3880 + }, + { + "epoch": 0.35000225458808676, + "grad_norm": 1.5682208806476425, + "learning_rate": 3.0186610568845533e-06, + "loss": 0.8834, + "step": 3881 + }, + { + "epoch": 0.350092438111557, + "grad_norm": 1.5885581927664532, + "learning_rate": 3.018158258719507e-06, + "loss": 0.8942, + "step": 3882 + }, + { + "epoch": 0.3501826216350273, + "grad_norm": 1.3082020187487158, + "learning_rate": 3.0176553736795827e-06, + "loss": 0.9887, + "step": 3883 + }, + { + "epoch": 0.35027280515849757, + "grad_norm": 1.236210098779813, + "learning_rate": 3.017152401807691e-06, + "loss": 0.7279, + "step": 3884 + }, + { + "epoch": 0.3503629886819678, + "grad_norm": 1.3100613236087846, + "learning_rate": 3.0166493431467476e-06, + "loss": 0.9369, + "step": 3885 + }, + { + "epoch": 0.3504531722054381, + "grad_norm": 1.3473031728660287, + "learning_rate": 3.016146197739677e-06, + "loss": 0.9361, + "step": 3886 + }, + { + "epoch": 0.3505433557289083, + "grad_norm": 1.6456444386436337, + "learning_rate": 3.0156429656294097e-06, + "loss": 0.8911, + "step": 3887 + }, + { + "epoch": 0.3506335392523786, + "grad_norm": 1.4877227776661368, + "learning_rate": 3.0151396468588844e-06, + "loss": 0.9035, + "step": 3888 + }, + { + "epoch": 0.35072372277584885, + "grad_norm": 1.724766828838346, + "learning_rate": 3.014636241471047e-06, + "loss": 0.9167, + "step": 3889 + }, + { + "epoch": 0.35081390629931913, + "grad_norm": 1.5237418628753872, + "learning_rate": 3.0141327495088514e-06, + "loss": 0.9603, + "step": 3890 + }, + { + "epoch": 0.35090408982278937, + "grad_norm": 1.5448549740818505, + "learning_rate": 3.0136291710152566e-06, + "loss": 0.9251, + "step": 3891 + }, + { + "epoch": 0.35099427334625966, + "grad_norm": 1.561219720844619, + "learning_rate": 3.0131255060332325e-06, + "loss": 0.9959, + "step": 3892 + }, + { + "epoch": 0.3510844568697299, + "grad_norm": 1.6031145716396775, + "learning_rate": 3.012621754605754e-06, + "loss": 0.9764, + "step": 3893 + }, + { + "epoch": 0.3511746403932002, + "grad_norm": 1.4759402301027278, + "learning_rate": 3.0121179167758035e-06, + "loss": 0.9927, + "step": 3894 + }, + { + "epoch": 0.3512648239166704, + "grad_norm": 1.7134090322007753, + "learning_rate": 3.0116139925863717e-06, + "loss": 1.0361, + "step": 3895 + }, + { + "epoch": 0.3513550074401407, + "grad_norm": 1.3927998627073734, + "learning_rate": 3.011109982080456e-06, + "loss": 0.9946, + "step": 3896 + }, + { + "epoch": 0.35144519096361093, + "grad_norm": 1.904977092054506, + "learning_rate": 3.0106058853010614e-06, + "loss": 0.9055, + "step": 3897 + }, + { + "epoch": 0.3515353744870812, + "grad_norm": 1.3963144905954132, + "learning_rate": 3.010101702291201e-06, + "loss": 0.9518, + "step": 3898 + }, + { + "epoch": 0.35162555801055145, + "grad_norm": 1.657055281159541, + "learning_rate": 3.009597433093893e-06, + "loss": 0.9527, + "step": 3899 + }, + { + "epoch": 0.35171574153402174, + "grad_norm": 2.453389439027663, + "learning_rate": 3.009093077752165e-06, + "loss": 0.9736, + "step": 3900 + }, + { + "epoch": 0.351805925057492, + "grad_norm": 1.4752978202026457, + "learning_rate": 3.008588636309052e-06, + "loss": 0.9942, + "step": 3901 + }, + { + "epoch": 0.35189610858096226, + "grad_norm": 1.4728875016594294, + "learning_rate": 3.0080841088075947e-06, + "loss": 0.9277, + "step": 3902 + }, + { + "epoch": 0.3519862921044325, + "grad_norm": 1.5845385615627106, + "learning_rate": 3.0075794952908436e-06, + "loss": 0.9997, + "step": 3903 + }, + { + "epoch": 0.3520764756279028, + "grad_norm": 1.3113999525210098, + "learning_rate": 3.0070747958018528e-06, + "loss": 0.9699, + "step": 3904 + }, + { + "epoch": 0.352166659151373, + "grad_norm": 1.203213576982373, + "learning_rate": 3.0065700103836894e-06, + "loss": 0.9734, + "step": 3905 + }, + { + "epoch": 0.3522568426748433, + "grad_norm": 1.4884575366446182, + "learning_rate": 3.0060651390794214e-06, + "loss": 0.9817, + "step": 3906 + }, + { + "epoch": 0.3523470261983136, + "grad_norm": 1.2463352842238296, + "learning_rate": 3.005560181932128e-06, + "loss": 0.9358, + "step": 3907 + }, + { + "epoch": 0.35243720972178383, + "grad_norm": 1.5199379658588714, + "learning_rate": 3.005055138984896e-06, + "loss": 0.9064, + "step": 3908 + }, + { + "epoch": 0.3525273932452541, + "grad_norm": 1.297043065590687, + "learning_rate": 3.0045500102808174e-06, + "loss": 0.9409, + "step": 3909 + }, + { + "epoch": 0.35261757676872435, + "grad_norm": 2.2064561482801888, + "learning_rate": 3.0040447958629927e-06, + "loss": 1.0469, + "step": 3910 + }, + { + "epoch": 0.35270776029219464, + "grad_norm": 1.4427524472686684, + "learning_rate": 3.00353949577453e-06, + "loss": 0.9867, + "step": 3911 + }, + { + "epoch": 0.35279794381566487, + "grad_norm": 1.8043980882447215, + "learning_rate": 3.003034110058544e-06, + "loss": 0.7983, + "step": 3912 + }, + { + "epoch": 0.35288812733913516, + "grad_norm": 1.358554241533858, + "learning_rate": 3.002528638758157e-06, + "loss": 0.944, + "step": 3913 + }, + { + "epoch": 0.3529783108626054, + "grad_norm": 1.7288413910005789, + "learning_rate": 3.0020230819164985e-06, + "loss": 0.9668, + "step": 3914 + }, + { + "epoch": 0.3530684943860757, + "grad_norm": 1.4476763390898604, + "learning_rate": 3.0015174395767064e-06, + "loss": 0.9643, + "step": 3915 + }, + { + "epoch": 0.3531586779095459, + "grad_norm": 1.7303997224081789, + "learning_rate": 3.001011711781923e-06, + "loss": 0.9933, + "step": 3916 + }, + { + "epoch": 0.3532488614330162, + "grad_norm": 1.4168924918354955, + "learning_rate": 3.0005058985753017e-06, + "loss": 0.9162, + "step": 3917 + }, + { + "epoch": 0.35333904495648644, + "grad_norm": 1.3276238337372395, + "learning_rate": 3e-06, + "loss": 0.9454, + "step": 3918 + }, + { + "epoch": 0.3534292284799567, + "grad_norm": 0.950597946313734, + "learning_rate": 2.9994940160991843e-06, + "loss": 0.8212, + "step": 3919 + }, + { + "epoch": 0.35351941200342696, + "grad_norm": 0.6852094031565561, + "learning_rate": 2.9989879469160285e-06, + "loss": 0.7769, + "step": 3920 + }, + { + "epoch": 0.35360959552689725, + "grad_norm": 1.957131510772274, + "learning_rate": 2.9984817924937124e-06, + "loss": 0.9759, + "step": 3921 + }, + { + "epoch": 0.3536997790503675, + "grad_norm": 1.7349134417002656, + "learning_rate": 2.997975552875424e-06, + "loss": 0.9041, + "step": 3922 + }, + { + "epoch": 0.35378996257383777, + "grad_norm": 1.5073031024547379, + "learning_rate": 2.997469228104358e-06, + "loss": 0.9429, + "step": 3923 + }, + { + "epoch": 0.353880146097308, + "grad_norm": 1.2522966743707153, + "learning_rate": 2.996962818223718e-06, + "loss": 0.8438, + "step": 3924 + }, + { + "epoch": 0.3539703296207783, + "grad_norm": 1.5206595892055117, + "learning_rate": 2.9964563232767135e-06, + "loss": 0.901, + "step": 3925 + }, + { + "epoch": 0.3540605131442485, + "grad_norm": 1.644005915805169, + "learning_rate": 2.9959497433065617e-06, + "loss": 0.9577, + "step": 3926 + }, + { + "epoch": 0.3541506966677188, + "grad_norm": 1.4329430041268816, + "learning_rate": 2.9954430783564848e-06, + "loss": 0.8797, + "step": 3927 + }, + { + "epoch": 0.35424088019118904, + "grad_norm": 1.7041657094167744, + "learning_rate": 2.994936328469716e-06, + "loss": 0.9793, + "step": 3928 + }, + { + "epoch": 0.35433106371465933, + "grad_norm": 1.2782147905292929, + "learning_rate": 2.994429493689494e-06, + "loss": 0.955, + "step": 3929 + }, + { + "epoch": 0.35442124723812957, + "grad_norm": 1.3009892661071893, + "learning_rate": 2.9939225740590642e-06, + "loss": 0.9825, + "step": 3930 + }, + { + "epoch": 0.35451143076159986, + "grad_norm": 1.4138444764049167, + "learning_rate": 2.99341556962168e-06, + "loss": 0.9197, + "step": 3931 + }, + { + "epoch": 0.35460161428507014, + "grad_norm": 1.3080328806935255, + "learning_rate": 2.992908480420602e-06, + "loss": 1.0, + "step": 3932 + }, + { + "epoch": 0.3546917978085404, + "grad_norm": 1.609065794638563, + "learning_rate": 2.9924013064990974e-06, + "loss": 0.95, + "step": 3933 + }, + { + "epoch": 0.35478198133201067, + "grad_norm": 0.875259395024843, + "learning_rate": 2.991894047900441e-06, + "loss": 0.8306, + "step": 3934 + }, + { + "epoch": 0.3548721648554809, + "grad_norm": 1.3823634566192384, + "learning_rate": 2.991386704667916e-06, + "loss": 0.9665, + "step": 3935 + }, + { + "epoch": 0.3549623483789512, + "grad_norm": 1.742456047675012, + "learning_rate": 2.9908792768448097e-06, + "loss": 1.0171, + "step": 3936 + }, + { + "epoch": 0.3550525319024214, + "grad_norm": 1.541170023059762, + "learning_rate": 2.990371764474421e-06, + "loss": 0.9553, + "step": 3937 + }, + { + "epoch": 0.3551427154258917, + "grad_norm": 3.2140605060458083, + "learning_rate": 2.9898641676000518e-06, + "loss": 0.9338, + "step": 3938 + }, + { + "epoch": 0.35523289894936194, + "grad_norm": 1.4632406592658334, + "learning_rate": 2.9893564862650138e-06, + "loss": 0.9563, + "step": 3939 + }, + { + "epoch": 0.35532308247283223, + "grad_norm": 1.692993969414792, + "learning_rate": 2.9888487205126254e-06, + "loss": 1.0092, + "step": 3940 + }, + { + "epoch": 0.35541326599630246, + "grad_norm": 1.3541773379956883, + "learning_rate": 2.9883408703862115e-06, + "loss": 0.949, + "step": 3941 + }, + { + "epoch": 0.35550344951977275, + "grad_norm": 1.9545172649996005, + "learning_rate": 2.987832935929105e-06, + "loss": 1.0407, + "step": 3942 + }, + { + "epoch": 0.355593633043243, + "grad_norm": 1.4610351693047756, + "learning_rate": 2.9873249171846454e-06, + "loss": 1.0203, + "step": 3943 + }, + { + "epoch": 0.3556838165667133, + "grad_norm": 2.70043423304421, + "learning_rate": 2.98681681419618e-06, + "loss": 0.9501, + "step": 3944 + }, + { + "epoch": 0.3557740000901835, + "grad_norm": 1.5120495180518705, + "learning_rate": 2.9863086270070627e-06, + "loss": 1.0052, + "step": 3945 + }, + { + "epoch": 0.3558641836136538, + "grad_norm": 1.541613947336385, + "learning_rate": 2.985800355660655e-06, + "loss": 0.967, + "step": 3946 + }, + { + "epoch": 0.35595436713712403, + "grad_norm": 0.7496080964093935, + "learning_rate": 2.9852920002003252e-06, + "loss": 0.825, + "step": 3947 + }, + { + "epoch": 0.3560445506605943, + "grad_norm": 1.6550004672139134, + "learning_rate": 2.9847835606694494e-06, + "loss": 0.993, + "step": 3948 + }, + { + "epoch": 0.35613473418406455, + "grad_norm": 1.6310970658561768, + "learning_rate": 2.98427503711141e-06, + "loss": 0.8855, + "step": 3949 + }, + { + "epoch": 0.35622491770753484, + "grad_norm": 1.6507424966981596, + "learning_rate": 2.9837664295695973e-06, + "loss": 0.9945, + "step": 3950 + }, + { + "epoch": 0.35631510123100507, + "grad_norm": 1.316037768853914, + "learning_rate": 2.983257738087408e-06, + "loss": 0.9525, + "step": 3951 + }, + { + "epoch": 0.35640528475447536, + "grad_norm": 1.4395057948089596, + "learning_rate": 2.982748962708247e-06, + "loss": 0.9866, + "step": 3952 + }, + { + "epoch": 0.3564954682779456, + "grad_norm": 1.3050950835655277, + "learning_rate": 2.982240103475526e-06, + "loss": 1.0079, + "step": 3953 + }, + { + "epoch": 0.3565856518014159, + "grad_norm": 1.8660948042014966, + "learning_rate": 2.981731160432663e-06, + "loss": 0.9601, + "step": 3954 + }, + { + "epoch": 0.35667583532488617, + "grad_norm": 1.4859580181963556, + "learning_rate": 2.981222133623084e-06, + "loss": 0.8645, + "step": 3955 + }, + { + "epoch": 0.3567660188483564, + "grad_norm": 1.4279778269053414, + "learning_rate": 2.980713023090222e-06, + "loss": 0.9674, + "step": 3956 + }, + { + "epoch": 0.3568562023718267, + "grad_norm": 1.5048343321400866, + "learning_rate": 2.980203828877518e-06, + "loss": 0.9361, + "step": 3957 + }, + { + "epoch": 0.3569463858952969, + "grad_norm": 1.70701923846246, + "learning_rate": 2.9796945510284182e-06, + "loss": 0.9959, + "step": 3958 + }, + { + "epoch": 0.3570365694187672, + "grad_norm": 1.449884308112972, + "learning_rate": 2.9791851895863774e-06, + "loss": 1.0111, + "step": 3959 + }, + { + "epoch": 0.35712675294223745, + "grad_norm": 1.3970607608451047, + "learning_rate": 2.978675744594857e-06, + "loss": 0.9927, + "step": 3960 + }, + { + "epoch": 0.35721693646570774, + "grad_norm": 1.3550964635739093, + "learning_rate": 2.978166216097326e-06, + "loss": 0.9139, + "step": 3961 + }, + { + "epoch": 0.35730711998917797, + "grad_norm": 1.4306049009205308, + "learning_rate": 2.9776566041372596e-06, + "loss": 0.9492, + "step": 3962 + }, + { + "epoch": 0.35739730351264826, + "grad_norm": 1.3563460004028611, + "learning_rate": 2.977146908758141e-06, + "loss": 1.0227, + "step": 3963 + }, + { + "epoch": 0.3574874870361185, + "grad_norm": 1.4463056296791243, + "learning_rate": 2.9766371300034604e-06, + "loss": 0.9753, + "step": 3964 + }, + { + "epoch": 0.3575776705595888, + "grad_norm": 1.2445542921865447, + "learning_rate": 2.9761272679167142e-06, + "loss": 1.0047, + "step": 3965 + }, + { + "epoch": 0.357667854083059, + "grad_norm": 1.5785333841519482, + "learning_rate": 2.9756173225414072e-06, + "loss": 0.8108, + "step": 3966 + }, + { + "epoch": 0.3577580376065293, + "grad_norm": 1.487651862078, + "learning_rate": 2.975107293921051e-06, + "loss": 1.1339, + "step": 3967 + }, + { + "epoch": 0.35784822112999953, + "grad_norm": 1.4869470899053876, + "learning_rate": 2.9745971820991643e-06, + "loss": 1.006, + "step": 3968 + }, + { + "epoch": 0.3579384046534698, + "grad_norm": 1.4246876324057178, + "learning_rate": 2.9740869871192715e-06, + "loss": 0.9934, + "step": 3969 + }, + { + "epoch": 0.35802858817694005, + "grad_norm": 1.2763438379535057, + "learning_rate": 2.9735767090249065e-06, + "loss": 1.0173, + "step": 3970 + }, + { + "epoch": 0.35811877170041034, + "grad_norm": 1.4911887936050277, + "learning_rate": 2.973066347859608e-06, + "loss": 0.8737, + "step": 3971 + }, + { + "epoch": 0.3582089552238806, + "grad_norm": 1.2365179172332996, + "learning_rate": 2.972555903666923e-06, + "loss": 0.9743, + "step": 3972 + }, + { + "epoch": 0.35829913874735086, + "grad_norm": 1.7385681601353808, + "learning_rate": 2.972045376490406e-06, + "loss": 0.8681, + "step": 3973 + }, + { + "epoch": 0.3583893222708211, + "grad_norm": 1.4208480983351037, + "learning_rate": 2.9715347663736177e-06, + "loss": 0.884, + "step": 3974 + }, + { + "epoch": 0.3584795057942914, + "grad_norm": 1.3199302074737223, + "learning_rate": 2.9710240733601266e-06, + "loss": 0.9525, + "step": 3975 + }, + { + "epoch": 0.3585696893177616, + "grad_norm": 1.577696109316189, + "learning_rate": 2.970513297493507e-06, + "loss": 1.0406, + "step": 3976 + }, + { + "epoch": 0.3586598728412319, + "grad_norm": 1.2095396279745985, + "learning_rate": 2.9700024388173416e-06, + "loss": 1.0072, + "step": 3977 + }, + { + "epoch": 0.35875005636470214, + "grad_norm": 1.6961425296300137, + "learning_rate": 2.969491497375219e-06, + "loss": 1.0088, + "step": 3978 + }, + { + "epoch": 0.35884023988817243, + "grad_norm": 1.359006064324957, + "learning_rate": 2.9689804732107364e-06, + "loss": 0.9815, + "step": 3979 + }, + { + "epoch": 0.3589304234116427, + "grad_norm": 1.5471811473256034, + "learning_rate": 2.9684693663674968e-06, + "loss": 0.9201, + "step": 3980 + }, + { + "epoch": 0.35902060693511295, + "grad_norm": 1.3666336681677593, + "learning_rate": 2.9679581768891115e-06, + "loss": 1.0492, + "step": 3981 + }, + { + "epoch": 0.35911079045858324, + "grad_norm": 1.501969236924919, + "learning_rate": 2.967446904819197e-06, + "loss": 0.8833, + "step": 3982 + }, + { + "epoch": 0.3592009739820535, + "grad_norm": 1.3899712472452947, + "learning_rate": 2.966935550201378e-06, + "loss": 0.9816, + "step": 3983 + }, + { + "epoch": 0.35929115750552376, + "grad_norm": 1.3575537308795689, + "learning_rate": 2.966424113079286e-06, + "loss": 0.957, + "step": 3984 + }, + { + "epoch": 0.359381341028994, + "grad_norm": 1.359755495686269, + "learning_rate": 2.9659125934965596e-06, + "loss": 1.0085, + "step": 3985 + }, + { + "epoch": 0.3594715245524643, + "grad_norm": 1.9853566662694175, + "learning_rate": 2.9654009914968457e-06, + "loss": 1.0257, + "step": 3986 + }, + { + "epoch": 0.3595617080759345, + "grad_norm": 2.8038378095205805, + "learning_rate": 2.9648893071237956e-06, + "loss": 0.8446, + "step": 3987 + }, + { + "epoch": 0.3596518915994048, + "grad_norm": 1.6327320425844776, + "learning_rate": 2.964377540421069e-06, + "loss": 0.9377, + "step": 3988 + }, + { + "epoch": 0.35974207512287504, + "grad_norm": 1.7325602479397617, + "learning_rate": 2.963865691432334e-06, + "loss": 0.9879, + "step": 3989 + }, + { + "epoch": 0.3598322586463453, + "grad_norm": 1.3621251579313283, + "learning_rate": 2.963353760201263e-06, + "loss": 0.8786, + "step": 3990 + }, + { + "epoch": 0.35992244216981556, + "grad_norm": 2.9708626708677186, + "learning_rate": 2.962841746771537e-06, + "loss": 0.9445, + "step": 3991 + }, + { + "epoch": 0.36001262569328585, + "grad_norm": 1.37619815988638, + "learning_rate": 2.9623296511868445e-06, + "loss": 0.9596, + "step": 3992 + }, + { + "epoch": 0.3601028092167561, + "grad_norm": 1.2449197530354328, + "learning_rate": 2.96181747349088e-06, + "loss": 0.8847, + "step": 3993 + }, + { + "epoch": 0.36019299274022637, + "grad_norm": 1.4556048163279547, + "learning_rate": 2.961305213727345e-06, + "loss": 0.9807, + "step": 3994 + }, + { + "epoch": 0.3602831762636966, + "grad_norm": 0.7234939256583137, + "learning_rate": 2.960792871939949e-06, + "loss": 0.8224, + "step": 3995 + }, + { + "epoch": 0.3603733597871669, + "grad_norm": 3.2267291970407364, + "learning_rate": 2.9602804481724064e-06, + "loss": 1.0475, + "step": 3996 + }, + { + "epoch": 0.3604635433106371, + "grad_norm": 1.686276168676744, + "learning_rate": 2.9597679424684427e-06, + "loss": 0.9568, + "step": 3997 + }, + { + "epoch": 0.3605537268341074, + "grad_norm": 1.5262752610224204, + "learning_rate": 2.9592553548717848e-06, + "loss": 0.9702, + "step": 3998 + }, + { + "epoch": 0.36064391035757765, + "grad_norm": 1.498075522257053, + "learning_rate": 2.958742685426171e-06, + "loss": 1.0437, + "step": 3999 + }, + { + "epoch": 0.36073409388104793, + "grad_norm": 1.460607970909319, + "learning_rate": 2.9582299341753446e-06, + "loss": 1.0279, + "step": 4000 + }, + { + "epoch": 0.36082427740451817, + "grad_norm": 1.572621787347886, + "learning_rate": 2.957717101163057e-06, + "loss": 0.9054, + "step": 4001 + }, + { + "epoch": 0.36091446092798846, + "grad_norm": 1.3350502324744116, + "learning_rate": 2.9572041864330655e-06, + "loss": 0.9612, + "step": 4002 + }, + { + "epoch": 0.36100464445145874, + "grad_norm": 1.631361028758608, + "learning_rate": 2.9566911900291346e-06, + "loss": 0.9803, + "step": 4003 + }, + { + "epoch": 0.361094827974929, + "grad_norm": 1.6340773214724753, + "learning_rate": 2.9561781119950368e-06, + "loss": 0.8778, + "step": 4004 + }, + { + "epoch": 0.36118501149839927, + "grad_norm": 1.2551189512136116, + "learning_rate": 2.9556649523745493e-06, + "loss": 0.9121, + "step": 4005 + }, + { + "epoch": 0.3612751950218695, + "grad_norm": 1.4258091754007556, + "learning_rate": 2.955151711211459e-06, + "loss": 1.021, + "step": 4006 + }, + { + "epoch": 0.3613653785453398, + "grad_norm": 1.2091811702462238, + "learning_rate": 2.9546383885495583e-06, + "loss": 0.9301, + "step": 4007 + }, + { + "epoch": 0.36145556206881, + "grad_norm": 1.5211597336229759, + "learning_rate": 2.9541249844326464e-06, + "loss": 0.9094, + "step": 4008 + }, + { + "epoch": 0.3615457455922803, + "grad_norm": 1.4322254604874818, + "learning_rate": 2.9536114989045295e-06, + "loss": 0.9536, + "step": 4009 + }, + { + "epoch": 0.36163592911575054, + "grad_norm": 1.2818556958869511, + "learning_rate": 2.9530979320090216e-06, + "loss": 1.0245, + "step": 4010 + }, + { + "epoch": 0.36172611263922083, + "grad_norm": 1.433817999240803, + "learning_rate": 2.9525842837899422e-06, + "loss": 1.0387, + "step": 4011 + }, + { + "epoch": 0.36181629616269106, + "grad_norm": 1.4244916544569692, + "learning_rate": 2.95207055429112e-06, + "loss": 1.0434, + "step": 4012 + }, + { + "epoch": 0.36190647968616135, + "grad_norm": 1.519498378525597, + "learning_rate": 2.951556743556388e-06, + "loss": 0.9526, + "step": 4013 + }, + { + "epoch": 0.3619966632096316, + "grad_norm": 1.858455526785035, + "learning_rate": 2.951042851629588e-06, + "loss": 1.009, + "step": 4014 + }, + { + "epoch": 0.3620868467331019, + "grad_norm": 1.7519610180459042, + "learning_rate": 2.950528878554568e-06, + "loss": 0.918, + "step": 4015 + }, + { + "epoch": 0.3621770302565721, + "grad_norm": 1.2508951319948745, + "learning_rate": 2.950014824375183e-06, + "loss": 0.9949, + "step": 4016 + }, + { + "epoch": 0.3622672137800424, + "grad_norm": 0.7062336759453028, + "learning_rate": 2.949500689135295e-06, + "loss": 0.7619, + "step": 4017 + }, + { + "epoch": 0.36235739730351263, + "grad_norm": 1.2800374224526927, + "learning_rate": 2.9489864728787722e-06, + "loss": 1.0418, + "step": 4018 + }, + { + "epoch": 0.3624475808269829, + "grad_norm": 1.3993879530095503, + "learning_rate": 2.9484721756494915e-06, + "loss": 1.0143, + "step": 4019 + }, + { + "epoch": 0.36253776435045315, + "grad_norm": 1.4867862591873866, + "learning_rate": 2.9479577974913343e-06, + "loss": 0.9632, + "step": 4020 + }, + { + "epoch": 0.36262794787392344, + "grad_norm": 1.4014873386672606, + "learning_rate": 2.9474433384481908e-06, + "loss": 0.9582, + "step": 4021 + }, + { + "epoch": 0.3627181313973937, + "grad_norm": 1.2543103764133154, + "learning_rate": 2.9469287985639577e-06, + "loss": 0.9536, + "step": 4022 + }, + { + "epoch": 0.36280831492086396, + "grad_norm": 1.372291324504051, + "learning_rate": 2.9464141778825384e-06, + "loss": 1.0112, + "step": 4023 + }, + { + "epoch": 0.3628984984443342, + "grad_norm": 1.3928664926767063, + "learning_rate": 2.9458994764478427e-06, + "loss": 0.9985, + "step": 4024 + }, + { + "epoch": 0.3629886819678045, + "grad_norm": 1.5413639269672283, + "learning_rate": 2.9453846943037883e-06, + "loss": 1.0033, + "step": 4025 + }, + { + "epoch": 0.36307886549127477, + "grad_norm": 1.3447334771404467, + "learning_rate": 2.9448698314942987e-06, + "loss": 0.9145, + "step": 4026 + }, + { + "epoch": 0.363169049014745, + "grad_norm": 0.8288340502043625, + "learning_rate": 2.944354888063305e-06, + "loss": 0.8688, + "step": 4027 + }, + { + "epoch": 0.3632592325382153, + "grad_norm": 1.3603963851390528, + "learning_rate": 2.9438398640547453e-06, + "loss": 0.9325, + "step": 4028 + }, + { + "epoch": 0.3633494160616855, + "grad_norm": 1.6434353690488106, + "learning_rate": 2.943324759512564e-06, + "loss": 0.9052, + "step": 4029 + }, + { + "epoch": 0.3634395995851558, + "grad_norm": 1.4836122661316975, + "learning_rate": 2.9428095744807134e-06, + "loss": 0.993, + "step": 4030 + }, + { + "epoch": 0.36352978310862605, + "grad_norm": 1.4913872775054529, + "learning_rate": 2.942294309003151e-06, + "loss": 0.919, + "step": 4031 + }, + { + "epoch": 0.36361996663209634, + "grad_norm": 1.5311670086227296, + "learning_rate": 2.941778963123843e-06, + "loss": 0.983, + "step": 4032 + }, + { + "epoch": 0.36371015015556657, + "grad_norm": 1.4723352542807377, + "learning_rate": 2.94126353688676e-06, + "loss": 0.9619, + "step": 4033 + }, + { + "epoch": 0.36380033367903686, + "grad_norm": 1.6959166263429548, + "learning_rate": 2.9407480303358825e-06, + "loss": 0.9107, + "step": 4034 + }, + { + "epoch": 0.3638905172025071, + "grad_norm": 1.2795914405475661, + "learning_rate": 2.940232443515195e-06, + "loss": 0.9729, + "step": 4035 + }, + { + "epoch": 0.3639807007259774, + "grad_norm": 1.4430374798312176, + "learning_rate": 2.9397167764686916e-06, + "loss": 0.9814, + "step": 4036 + }, + { + "epoch": 0.3640708842494476, + "grad_norm": 1.4940858120307374, + "learning_rate": 2.9392010292403714e-06, + "loss": 0.9367, + "step": 4037 + }, + { + "epoch": 0.3641610677729179, + "grad_norm": 1.470775992524823, + "learning_rate": 2.9386852018742404e-06, + "loss": 0.9552, + "step": 4038 + }, + { + "epoch": 0.36425125129638813, + "grad_norm": 1.493462941187916, + "learning_rate": 2.938169294414312e-06, + "loss": 0.9373, + "step": 4039 + }, + { + "epoch": 0.3643414348198584, + "grad_norm": 1.3480676437719115, + "learning_rate": 2.9376533069046067e-06, + "loss": 0.9931, + "step": 4040 + }, + { + "epoch": 0.36443161834332866, + "grad_norm": 1.0041036448877272, + "learning_rate": 2.9371372393891514e-06, + "loss": 0.8756, + "step": 4041 + }, + { + "epoch": 0.36452180186679894, + "grad_norm": 1.1568686917801247, + "learning_rate": 2.936621091911979e-06, + "loss": 1.0369, + "step": 4042 + }, + { + "epoch": 0.3646119853902692, + "grad_norm": 1.618211873279992, + "learning_rate": 2.936104864517131e-06, + "loss": 0.8845, + "step": 4043 + }, + { + "epoch": 0.36470216891373947, + "grad_norm": 1.4904878533248145, + "learning_rate": 2.9355885572486535e-06, + "loss": 0.9181, + "step": 4044 + }, + { + "epoch": 0.3647923524372097, + "grad_norm": 1.5136251725406156, + "learning_rate": 2.9350721701506026e-06, + "loss": 0.9978, + "step": 4045 + }, + { + "epoch": 0.36488253596068, + "grad_norm": 1.2987365690386798, + "learning_rate": 2.9345557032670375e-06, + "loss": 0.9538, + "step": 4046 + }, + { + "epoch": 0.3649727194841502, + "grad_norm": 1.6885298659304306, + "learning_rate": 2.934039156642027e-06, + "loss": 1.0083, + "step": 4047 + }, + { + "epoch": 0.3650629030076205, + "grad_norm": 1.6703749727961241, + "learning_rate": 2.9335225303196454e-06, + "loss": 0.8234, + "step": 4048 + }, + { + "epoch": 0.36515308653109074, + "grad_norm": 1.5052150827364312, + "learning_rate": 2.933005824343974e-06, + "loss": 0.9312, + "step": 4049 + }, + { + "epoch": 0.36524327005456103, + "grad_norm": 1.5079802582862627, + "learning_rate": 2.932489038759101e-06, + "loss": 0.9324, + "step": 4050 + }, + { + "epoch": 0.3653334535780313, + "grad_norm": 1.229658120178803, + "learning_rate": 2.9319721736091215e-06, + "loss": 0.8516, + "step": 4051 + }, + { + "epoch": 0.36542363710150155, + "grad_norm": 1.233697632950773, + "learning_rate": 2.9314552289381377e-06, + "loss": 0.8683, + "step": 4052 + }, + { + "epoch": 0.36551382062497184, + "grad_norm": 1.594885253485811, + "learning_rate": 2.9309382047902574e-06, + "loss": 0.9297, + "step": 4053 + }, + { + "epoch": 0.3656040041484421, + "grad_norm": 1.2376897223335668, + "learning_rate": 2.9304211012095963e-06, + "loss": 1.0087, + "step": 4054 + }, + { + "epoch": 0.36569418767191236, + "grad_norm": 1.5681441029874694, + "learning_rate": 2.929903918240277e-06, + "loss": 1.0225, + "step": 4055 + }, + { + "epoch": 0.3657843711953826, + "grad_norm": 0.744922217462518, + "learning_rate": 2.9293866559264273e-06, + "loss": 0.7968, + "step": 4056 + }, + { + "epoch": 0.3658745547188529, + "grad_norm": 1.5819347210857764, + "learning_rate": 2.928869314312184e-06, + "loss": 0.9632, + "step": 4057 + }, + { + "epoch": 0.3659647382423231, + "grad_norm": 2.0669238878990974, + "learning_rate": 2.9283518934416892e-06, + "loss": 0.9673, + "step": 4058 + }, + { + "epoch": 0.3660549217657934, + "grad_norm": 1.4578803403387453, + "learning_rate": 2.927834393359092e-06, + "loss": 0.9742, + "step": 4059 + }, + { + "epoch": 0.36614510528926364, + "grad_norm": 1.463216870017433, + "learning_rate": 2.927316814108548e-06, + "loss": 1.0296, + "step": 4060 + }, + { + "epoch": 0.36623528881273393, + "grad_norm": 1.5418224084217118, + "learning_rate": 2.92679915573422e-06, + "loss": 0.8608, + "step": 4061 + }, + { + "epoch": 0.36632547233620416, + "grad_norm": 1.3876268878687161, + "learning_rate": 2.926281418280278e-06, + "loss": 0.9029, + "step": 4062 + }, + { + "epoch": 0.36641565585967445, + "grad_norm": 1.3086107850745212, + "learning_rate": 2.925763601790899e-06, + "loss": 0.9614, + "step": 4063 + }, + { + "epoch": 0.3665058393831447, + "grad_norm": 1.3878991489379482, + "learning_rate": 2.9252457063102635e-06, + "loss": 0.9181, + "step": 4064 + }, + { + "epoch": 0.36659602290661497, + "grad_norm": 1.5362393584166065, + "learning_rate": 2.9247277318825626e-06, + "loss": 0.9258, + "step": 4065 + }, + { + "epoch": 0.3666862064300852, + "grad_norm": 1.249653100515422, + "learning_rate": 2.924209678551993e-06, + "loss": 1.0528, + "step": 4066 + }, + { + "epoch": 0.3667763899535555, + "grad_norm": 1.5742777394490093, + "learning_rate": 2.923691546362757e-06, + "loss": 0.968, + "step": 4067 + }, + { + "epoch": 0.3668665734770257, + "grad_norm": 1.4886695175132212, + "learning_rate": 2.9231733353590663e-06, + "loss": 0.9243, + "step": 4068 + }, + { + "epoch": 0.366956757000496, + "grad_norm": 1.5138982788090265, + "learning_rate": 2.922655045585136e-06, + "loss": 1.025, + "step": 4069 + }, + { + "epoch": 0.36704694052396625, + "grad_norm": 0.8722199144015655, + "learning_rate": 2.92213667708519e-06, + "loss": 0.8826, + "step": 4070 + }, + { + "epoch": 0.36713712404743654, + "grad_norm": 1.6739965507672323, + "learning_rate": 2.921618229903457e-06, + "loss": 0.9719, + "step": 4071 + }, + { + "epoch": 0.36722730757090677, + "grad_norm": 1.463749672802531, + "learning_rate": 2.9210997040841752e-06, + "loss": 0.8796, + "step": 4072 + }, + { + "epoch": 0.36731749109437706, + "grad_norm": 24.402532277265177, + "learning_rate": 2.9205810996715885e-06, + "loss": 0.9261, + "step": 4073 + }, + { + "epoch": 0.36740767461784735, + "grad_norm": 1.4207021046703787, + "learning_rate": 2.9200624167099456e-06, + "loss": 0.9248, + "step": 4074 + }, + { + "epoch": 0.3674978581413176, + "grad_norm": 1.3612601702461704, + "learning_rate": 2.919543655243505e-06, + "loss": 0.8734, + "step": 4075 + }, + { + "epoch": 0.36758804166478787, + "grad_norm": 1.5451799603931609, + "learning_rate": 2.919024815316529e-06, + "loss": 0.9913, + "step": 4076 + }, + { + "epoch": 0.3676782251882581, + "grad_norm": 1.81637678225005, + "learning_rate": 2.9185058969732877e-06, + "loss": 1.0833, + "step": 4077 + }, + { + "epoch": 0.3677684087117284, + "grad_norm": 1.523174321843935, + "learning_rate": 2.917986900258059e-06, + "loss": 0.9929, + "step": 4078 + }, + { + "epoch": 0.3678585922351986, + "grad_norm": 1.3711138775426805, + "learning_rate": 2.917467825215126e-06, + "loss": 0.954, + "step": 4079 + }, + { + "epoch": 0.3679487757586689, + "grad_norm": 1.431033642003534, + "learning_rate": 2.9169486718887803e-06, + "loss": 0.9126, + "step": 4080 + }, + { + "epoch": 0.36803895928213914, + "grad_norm": 1.2928025522049884, + "learning_rate": 2.9164294403233173e-06, + "loss": 1.012, + "step": 4081 + }, + { + "epoch": 0.36812914280560943, + "grad_norm": 1.733973001166516, + "learning_rate": 2.915910130563041e-06, + "loss": 0.9002, + "step": 4082 + }, + { + "epoch": 0.36821932632907967, + "grad_norm": 1.490329004485492, + "learning_rate": 2.915390742652262e-06, + "loss": 0.8994, + "step": 4083 + }, + { + "epoch": 0.36830950985254995, + "grad_norm": 2.1862161411043055, + "learning_rate": 2.914871276635298e-06, + "loss": 0.8915, + "step": 4084 + }, + { + "epoch": 0.3683996933760202, + "grad_norm": 1.8386883843715873, + "learning_rate": 2.914351732556472e-06, + "loss": 0.9358, + "step": 4085 + }, + { + "epoch": 0.3684898768994905, + "grad_norm": 1.2557868995933372, + "learning_rate": 2.9138321104601144e-06, + "loss": 0.8796, + "step": 4086 + }, + { + "epoch": 0.3685800604229607, + "grad_norm": 1.4555932688417754, + "learning_rate": 2.9133124103905623e-06, + "loss": 0.8903, + "step": 4087 + }, + { + "epoch": 0.368670243946431, + "grad_norm": 1.806504494976889, + "learning_rate": 2.9127926323921596e-06, + "loss": 0.9427, + "step": 4088 + }, + { + "epoch": 0.36876042746990123, + "grad_norm": 1.6438774204545255, + "learning_rate": 2.912272776509256e-06, + "loss": 0.9786, + "step": 4089 + }, + { + "epoch": 0.3688506109933715, + "grad_norm": 1.612784468524186, + "learning_rate": 2.911752842786209e-06, + "loss": 0.9256, + "step": 4090 + }, + { + "epoch": 0.36894079451684175, + "grad_norm": 1.5341030711196049, + "learning_rate": 2.911232831267383e-06, + "loss": 0.9041, + "step": 4091 + }, + { + "epoch": 0.36903097804031204, + "grad_norm": 1.6159050320631763, + "learning_rate": 2.910712741997146e-06, + "loss": 0.9864, + "step": 4092 + }, + { + "epoch": 0.3691211615637823, + "grad_norm": 1.367210605971045, + "learning_rate": 2.910192575019877e-06, + "loss": 0.9489, + "step": 4093 + }, + { + "epoch": 0.36921134508725256, + "grad_norm": 1.2605711261696781, + "learning_rate": 2.9096723303799583e-06, + "loss": 0.9919, + "step": 4094 + }, + { + "epoch": 0.3693015286107228, + "grad_norm": 1.6414958277593243, + "learning_rate": 2.9091520081217805e-06, + "loss": 0.9665, + "step": 4095 + }, + { + "epoch": 0.3693917121341931, + "grad_norm": 1.507612620333314, + "learning_rate": 2.908631608289741e-06, + "loss": 0.9016, + "step": 4096 + }, + { + "epoch": 0.3694818956576633, + "grad_norm": 1.518329841673961, + "learning_rate": 2.9081111309282423e-06, + "loss": 0.9459, + "step": 4097 + }, + { + "epoch": 0.3695720791811336, + "grad_norm": 0.7524243748325968, + "learning_rate": 2.9075905760816942e-06, + "loss": 0.8529, + "step": 4098 + }, + { + "epoch": 0.3696622627046039, + "grad_norm": 1.540913819019105, + "learning_rate": 2.907069943794514e-06, + "loss": 0.9822, + "step": 4099 + }, + { + "epoch": 0.3697524462280741, + "grad_norm": 1.6487038202882534, + "learning_rate": 2.906549234111125e-06, + "loss": 0.9358, + "step": 4100 + }, + { + "epoch": 0.3698426297515444, + "grad_norm": 1.6047096406919121, + "learning_rate": 2.906028447075956e-06, + "loss": 1.0158, + "step": 4101 + }, + { + "epoch": 0.36993281327501465, + "grad_norm": 1.5676814570187854, + "learning_rate": 2.905507582733445e-06, + "loss": 1.0417, + "step": 4102 + }, + { + "epoch": 0.37002299679848494, + "grad_norm": 1.3540104653673848, + "learning_rate": 2.904986641128033e-06, + "loss": 1.0717, + "step": 4103 + }, + { + "epoch": 0.37011318032195517, + "grad_norm": 1.2824600832194621, + "learning_rate": 2.9044656223041716e-06, + "loss": 0.943, + "step": 4104 + }, + { + "epoch": 0.37020336384542546, + "grad_norm": 1.4716068647221938, + "learning_rate": 2.9039445263063157e-06, + "loss": 0.9074, + "step": 4105 + }, + { + "epoch": 0.3702935473688957, + "grad_norm": 1.806019279237669, + "learning_rate": 2.903423353178929e-06, + "loss": 0.8587, + "step": 4106 + }, + { + "epoch": 0.370383730892366, + "grad_norm": 1.4009739127905856, + "learning_rate": 2.9029021029664802e-06, + "loss": 0.9574, + "step": 4107 + }, + { + "epoch": 0.3704739144158362, + "grad_norm": 1.3215151844949218, + "learning_rate": 2.9023807757134455e-06, + "loss": 1.0088, + "step": 4108 + }, + { + "epoch": 0.3705640979393065, + "grad_norm": 1.4687899320328002, + "learning_rate": 2.901859371464307e-06, + "loss": 1.0201, + "step": 4109 + }, + { + "epoch": 0.37065428146277674, + "grad_norm": 2.5301296439144476, + "learning_rate": 2.9013378902635535e-06, + "loss": 0.9953, + "step": 4110 + }, + { + "epoch": 0.370744464986247, + "grad_norm": 1.5302273975255498, + "learning_rate": 2.9008163321556823e-06, + "loss": 0.9323, + "step": 4111 + }, + { + "epoch": 0.37083464850971726, + "grad_norm": 1.511167046074241, + "learning_rate": 2.900294697185194e-06, + "loss": 0.9648, + "step": 4112 + }, + { + "epoch": 0.37092483203318755, + "grad_norm": 1.2677726890175056, + "learning_rate": 2.899772985396599e-06, + "loss": 0.9795, + "step": 4113 + }, + { + "epoch": 0.3710150155566578, + "grad_norm": 1.4972534465817005, + "learning_rate": 2.8992511968344104e-06, + "loss": 0.9511, + "step": 4114 + }, + { + "epoch": 0.37110519908012807, + "grad_norm": 2.8939674641771322, + "learning_rate": 2.8987293315431523e-06, + "loss": 0.9013, + "step": 4115 + }, + { + "epoch": 0.3711953826035983, + "grad_norm": 1.2013573480090578, + "learning_rate": 2.898207389567351e-06, + "loss": 0.8934, + "step": 4116 + }, + { + "epoch": 0.3712855661270686, + "grad_norm": 1.4453796371125611, + "learning_rate": 2.897685370951543e-06, + "loss": 0.9507, + "step": 4117 + }, + { + "epoch": 0.3713757496505388, + "grad_norm": 2.104526514184953, + "learning_rate": 2.89716327574027e-06, + "loss": 0.8559, + "step": 4118 + }, + { + "epoch": 0.3714659331740091, + "grad_norm": 1.3184505628909318, + "learning_rate": 2.8966411039780787e-06, + "loss": 1.0385, + "step": 4119 + }, + { + "epoch": 0.37155611669747934, + "grad_norm": 1.4021727901373244, + "learning_rate": 2.8961188557095248e-06, + "loss": 0.9629, + "step": 4120 + }, + { + "epoch": 0.37164630022094963, + "grad_norm": 1.5367310190414043, + "learning_rate": 2.895596530979168e-06, + "loss": 0.8981, + "step": 4121 + }, + { + "epoch": 0.3717364837444199, + "grad_norm": 1.4776463642981645, + "learning_rate": 2.895074129831578e-06, + "loss": 0.9711, + "step": 4122 + }, + { + "epoch": 0.37182666726789015, + "grad_norm": 1.3960380641289432, + "learning_rate": 2.8945516523113275e-06, + "loss": 0.8587, + "step": 4123 + }, + { + "epoch": 0.37191685079136044, + "grad_norm": 0.7799657565938684, + "learning_rate": 2.894029098462998e-06, + "loss": 0.7792, + "step": 4124 + }, + { + "epoch": 0.3720070343148307, + "grad_norm": 1.3374256969027167, + "learning_rate": 2.8935064683311756e-06, + "loss": 0.9808, + "step": 4125 + }, + { + "epoch": 0.37209721783830096, + "grad_norm": 0.7124368974281706, + "learning_rate": 2.8929837619604544e-06, + "loss": 0.8828, + "step": 4126 + }, + { + "epoch": 0.3721874013617712, + "grad_norm": 1.5063591312113582, + "learning_rate": 2.8924609793954346e-06, + "loss": 1.0044, + "step": 4127 + }, + { + "epoch": 0.3722775848852415, + "grad_norm": 1.5968678961370013, + "learning_rate": 2.891938120680724e-06, + "loss": 0.9178, + "step": 4128 + }, + { + "epoch": 0.3723677684087117, + "grad_norm": 1.2138440597184557, + "learning_rate": 2.8914151858609343e-06, + "loss": 0.9445, + "step": 4129 + }, + { + "epoch": 0.372457951932182, + "grad_norm": 1.4952929871181866, + "learning_rate": 2.8908921749806858e-06, + "loss": 0.9804, + "step": 4130 + }, + { + "epoch": 0.37254813545565224, + "grad_norm": 1.3736150442379316, + "learning_rate": 2.890369088084605e-06, + "loss": 0.9657, + "step": 4131 + }, + { + "epoch": 0.37263831897912253, + "grad_norm": 1.342856398360501, + "learning_rate": 2.889845925217323e-06, + "loss": 0.9845, + "step": 4132 + }, + { + "epoch": 0.37272850250259276, + "grad_norm": 1.466676750292938, + "learning_rate": 2.8893226864234813e-06, + "loss": 0.9676, + "step": 4133 + }, + { + "epoch": 0.37281868602606305, + "grad_norm": 1.4144681907089554, + "learning_rate": 2.8887993717477236e-06, + "loss": 0.9747, + "step": 4134 + }, + { + "epoch": 0.3729088695495333, + "grad_norm": 1.513820793455783, + "learning_rate": 2.8882759812347035e-06, + "loss": 0.8437, + "step": 4135 + }, + { + "epoch": 0.3729990530730036, + "grad_norm": 1.4874462278247795, + "learning_rate": 2.887752514929078e-06, + "loss": 0.9556, + "step": 4136 + }, + { + "epoch": 0.3730892365964738, + "grad_norm": 1.5889651708512893, + "learning_rate": 2.887228972875513e-06, + "loss": 0.9071, + "step": 4137 + }, + { + "epoch": 0.3731794201199441, + "grad_norm": 1.5465056623866418, + "learning_rate": 2.88670535511868e-06, + "loss": 0.9628, + "step": 4138 + }, + { + "epoch": 0.3732696036434143, + "grad_norm": 1.5777609376874273, + "learning_rate": 2.886181661703257e-06, + "loss": 0.8947, + "step": 4139 + }, + { + "epoch": 0.3733597871668846, + "grad_norm": 1.5294573664339701, + "learning_rate": 2.8856578926739285e-06, + "loss": 0.9191, + "step": 4140 + }, + { + "epoch": 0.37344997069035485, + "grad_norm": 1.5651249008794101, + "learning_rate": 2.8851340480753846e-06, + "loss": 0.9191, + "step": 4141 + }, + { + "epoch": 0.37354015421382514, + "grad_norm": 1.5979196196142944, + "learning_rate": 2.8846101279523232e-06, + "loss": 0.9681, + "step": 4142 + }, + { + "epoch": 0.37363033773729537, + "grad_norm": 1.4456183857319116, + "learning_rate": 2.8840861323494487e-06, + "loss": 0.9352, + "step": 4143 + }, + { + "epoch": 0.37372052126076566, + "grad_norm": 1.5720561960188197, + "learning_rate": 2.88356206131147e-06, + "loss": 1.0194, + "step": 4144 + }, + { + "epoch": 0.37381070478423595, + "grad_norm": 1.6122190659522275, + "learning_rate": 2.883037914883104e-06, + "loss": 0.9485, + "step": 4145 + }, + { + "epoch": 0.3739008883077062, + "grad_norm": 2.4379546890679666, + "learning_rate": 2.882513693109075e-06, + "loss": 0.9259, + "step": 4146 + }, + { + "epoch": 0.37399107183117647, + "grad_norm": 1.262843478095909, + "learning_rate": 2.8819893960341106e-06, + "loss": 0.9383, + "step": 4147 + }, + { + "epoch": 0.3740812553546467, + "grad_norm": 1.7900417929304393, + "learning_rate": 2.881465023702948e-06, + "loss": 1.0081, + "step": 4148 + }, + { + "epoch": 0.374171438878117, + "grad_norm": 1.3493270910043909, + "learning_rate": 2.8809405761603294e-06, + "loss": 1.0402, + "step": 4149 + }, + { + "epoch": 0.3742616224015872, + "grad_norm": 1.7083150195869146, + "learning_rate": 2.880416053451003e-06, + "loss": 0.8805, + "step": 4150 + }, + { + "epoch": 0.3743518059250575, + "grad_norm": 1.5427415125144668, + "learning_rate": 2.879891455619725e-06, + "loss": 1.0492, + "step": 4151 + }, + { + "epoch": 0.37444198944852775, + "grad_norm": 1.2280881589843164, + "learning_rate": 2.879366782711256e-06, + "loss": 0.9602, + "step": 4152 + }, + { + "epoch": 0.37453217297199803, + "grad_norm": 1.2806092497646915, + "learning_rate": 2.8788420347703643e-06, + "loss": 0.981, + "step": 4153 + }, + { + "epoch": 0.37462235649546827, + "grad_norm": 0.7347699584508668, + "learning_rate": 2.8783172118418244e-06, + "loss": 0.8266, + "step": 4154 + }, + { + "epoch": 0.37471254001893856, + "grad_norm": 1.3340622081921585, + "learning_rate": 2.877792313970417e-06, + "loss": 0.9656, + "step": 4155 + }, + { + "epoch": 0.3748027235424088, + "grad_norm": 1.2528552823410972, + "learning_rate": 2.8772673412009293e-06, + "loss": 1.0107, + "step": 4156 + }, + { + "epoch": 0.3748929070658791, + "grad_norm": 1.5550837179628134, + "learning_rate": 2.8767422935781545e-06, + "loss": 1.0115, + "step": 4157 + }, + { + "epoch": 0.3749830905893493, + "grad_norm": 1.2708466273795436, + "learning_rate": 2.8762171711468935e-06, + "loss": 0.9966, + "step": 4158 + }, + { + "epoch": 0.3750732741128196, + "grad_norm": 1.3390124886965222, + "learning_rate": 2.875691973951952e-06, + "loss": 0.9589, + "step": 4159 + }, + { + "epoch": 0.37516345763628983, + "grad_norm": 0.6928516732008412, + "learning_rate": 2.8751667020381425e-06, + "loss": 0.8751, + "step": 4160 + }, + { + "epoch": 0.3752536411597601, + "grad_norm": 1.4630509862874097, + "learning_rate": 2.8746413554502837e-06, + "loss": 1.0249, + "step": 4161 + }, + { + "epoch": 0.37534382468323035, + "grad_norm": 1.3751108818428683, + "learning_rate": 2.8741159342332027e-06, + "loss": 0.9024, + "step": 4162 + }, + { + "epoch": 0.37543400820670064, + "grad_norm": 1.7293039366993903, + "learning_rate": 2.87359043843173e-06, + "loss": 1.0127, + "step": 4163 + }, + { + "epoch": 0.3755241917301709, + "grad_norm": 1.2896003756940262, + "learning_rate": 2.873064868090704e-06, + "loss": 0.9959, + "step": 4164 + }, + { + "epoch": 0.37561437525364116, + "grad_norm": 1.4241074025230325, + "learning_rate": 2.8725392232549697e-06, + "loss": 1.0528, + "step": 4165 + }, + { + "epoch": 0.3757045587771114, + "grad_norm": 1.766168696320112, + "learning_rate": 2.872013503969378e-06, + "loss": 1.0323, + "step": 4166 + }, + { + "epoch": 0.3757947423005817, + "grad_norm": 1.4006422766420428, + "learning_rate": 2.8714877102787853e-06, + "loss": 0.9748, + "step": 4167 + }, + { + "epoch": 0.3758849258240519, + "grad_norm": 1.4936402124895556, + "learning_rate": 2.8709618422280564e-06, + "loss": 0.9662, + "step": 4168 + }, + { + "epoch": 0.3759751093475222, + "grad_norm": 1.2433912574546617, + "learning_rate": 2.8704358998620605e-06, + "loss": 0.9406, + "step": 4169 + }, + { + "epoch": 0.3760652928709925, + "grad_norm": 1.4184897230258082, + "learning_rate": 2.8699098832256735e-06, + "loss": 0.9853, + "step": 4170 + }, + { + "epoch": 0.37615547639446273, + "grad_norm": 1.5447501027986486, + "learning_rate": 2.86938379236378e-06, + "loss": 0.8764, + "step": 4171 + }, + { + "epoch": 0.376245659917933, + "grad_norm": 2.2181735297274194, + "learning_rate": 2.868857627321266e-06, + "loss": 0.8885, + "step": 4172 + }, + { + "epoch": 0.37633584344140325, + "grad_norm": 1.2007379507921256, + "learning_rate": 2.8683313881430296e-06, + "loss": 0.9621, + "step": 4173 + }, + { + "epoch": 0.37642602696487354, + "grad_norm": 1.4014376207256447, + "learning_rate": 2.8678050748739706e-06, + "loss": 0.9548, + "step": 4174 + }, + { + "epoch": 0.37651621048834377, + "grad_norm": 1.5314520877687776, + "learning_rate": 2.8672786875589976e-06, + "loss": 0.9273, + "step": 4175 + }, + { + "epoch": 0.37660639401181406, + "grad_norm": 1.4201289764448013, + "learning_rate": 2.866752226243025e-06, + "loss": 0.9173, + "step": 4176 + }, + { + "epoch": 0.3766965775352843, + "grad_norm": 1.4795204688664498, + "learning_rate": 2.8662256909709733e-06, + "loss": 1.0038, + "step": 4177 + }, + { + "epoch": 0.3767867610587546, + "grad_norm": 1.4552563039764033, + "learning_rate": 2.865699081787769e-06, + "loss": 1.0001, + "step": 4178 + }, + { + "epoch": 0.3768769445822248, + "grad_norm": 1.3920412031685592, + "learning_rate": 2.8651723987383465e-06, + "loss": 0.9469, + "step": 4179 + }, + { + "epoch": 0.3769671281056951, + "grad_norm": 1.4061658198274314, + "learning_rate": 2.8646456418676437e-06, + "loss": 0.9295, + "step": 4180 + }, + { + "epoch": 0.37705731162916534, + "grad_norm": 0.764966019393091, + "learning_rate": 2.8641188112206067e-06, + "loss": 0.8266, + "step": 4181 + }, + { + "epoch": 0.3771474951526356, + "grad_norm": 1.8377784751540716, + "learning_rate": 2.863591906842189e-06, + "loss": 0.9912, + "step": 4182 + }, + { + "epoch": 0.37723767867610586, + "grad_norm": 1.3822461277643072, + "learning_rate": 2.863064928777347e-06, + "loss": 0.9905, + "step": 4183 + }, + { + "epoch": 0.37732786219957615, + "grad_norm": 1.7178036945707895, + "learning_rate": 2.862537877071047e-06, + "loss": 0.9433, + "step": 4184 + }, + { + "epoch": 0.3774180457230464, + "grad_norm": 0.7191246548961313, + "learning_rate": 2.8620107517682597e-06, + "loss": 0.7703, + "step": 4185 + }, + { + "epoch": 0.37750822924651667, + "grad_norm": 1.2949689623526603, + "learning_rate": 2.8614835529139618e-06, + "loss": 0.8991, + "step": 4186 + }, + { + "epoch": 0.3775984127699869, + "grad_norm": 1.475719069730453, + "learning_rate": 2.8609562805531367e-06, + "loss": 0.9159, + "step": 4187 + }, + { + "epoch": 0.3776885962934572, + "grad_norm": 6.143782485767369, + "learning_rate": 2.8604289347307746e-06, + "loss": 0.9966, + "step": 4188 + }, + { + "epoch": 0.3777787798169274, + "grad_norm": 1.4909240806485797, + "learning_rate": 2.859901515491871e-06, + "loss": 0.9587, + "step": 4189 + }, + { + "epoch": 0.3778689633403977, + "grad_norm": 1.4592659102842513, + "learning_rate": 2.8593740228814298e-06, + "loss": 0.8535, + "step": 4190 + }, + { + "epoch": 0.37795914686386795, + "grad_norm": 1.8160966506773368, + "learning_rate": 2.8588464569444574e-06, + "loss": 0.9488, + "step": 4191 + }, + { + "epoch": 0.37804933038733823, + "grad_norm": 1.3362971485409256, + "learning_rate": 2.8583188177259697e-06, + "loss": 0.9468, + "step": 4192 + }, + { + "epoch": 0.3781395139108085, + "grad_norm": 1.6139023441943658, + "learning_rate": 2.857791105270988e-06, + "loss": 0.9443, + "step": 4193 + }, + { + "epoch": 0.37822969743427876, + "grad_norm": 1.5244036618549344, + "learning_rate": 2.857263319624539e-06, + "loss": 0.984, + "step": 4194 + }, + { + "epoch": 0.37831988095774904, + "grad_norm": 1.4430008831484096, + "learning_rate": 2.856735460831657e-06, + "loss": 0.9413, + "step": 4195 + }, + { + "epoch": 0.3784100644812193, + "grad_norm": 1.3554235885088446, + "learning_rate": 2.856207528937382e-06, + "loss": 0.9761, + "step": 4196 + }, + { + "epoch": 0.37850024800468957, + "grad_norm": 1.404386514871991, + "learning_rate": 2.855679523986759e-06, + "loss": 0.9456, + "step": 4197 + }, + { + "epoch": 0.3785904315281598, + "grad_norm": 2.102987494090032, + "learning_rate": 2.8551514460248406e-06, + "loss": 1.008, + "step": 4198 + }, + { + "epoch": 0.3786806150516301, + "grad_norm": 1.3579747672623075, + "learning_rate": 2.8546232950966868e-06, + "loss": 0.964, + "step": 4199 + }, + { + "epoch": 0.3787707985751003, + "grad_norm": 1.887299568700693, + "learning_rate": 2.85409507124736e-06, + "loss": 0.9139, + "step": 4200 + }, + { + "epoch": 0.3788609820985706, + "grad_norm": 1.3745776308019844, + "learning_rate": 2.8535667745219324e-06, + "loss": 0.9549, + "step": 4201 + }, + { + "epoch": 0.37895116562204084, + "grad_norm": 1.4057743539669658, + "learning_rate": 2.853038404965481e-06, + "loss": 0.9493, + "step": 4202 + }, + { + "epoch": 0.37904134914551113, + "grad_norm": 2.4904022040566, + "learning_rate": 2.8525099626230894e-06, + "loss": 1.0378, + "step": 4203 + }, + { + "epoch": 0.37913153266898136, + "grad_norm": 1.6570895963951247, + "learning_rate": 2.8519814475398472e-06, + "loss": 0.8979, + "step": 4204 + }, + { + "epoch": 0.37922171619245165, + "grad_norm": 1.1313390686209732, + "learning_rate": 2.8514528597608502e-06, + "loss": 0.9357, + "step": 4205 + }, + { + "epoch": 0.3793118997159219, + "grad_norm": 1.2160259517203007, + "learning_rate": 2.8509241993312004e-06, + "loss": 0.9965, + "step": 4206 + }, + { + "epoch": 0.3794020832393922, + "grad_norm": 1.9209581374572007, + "learning_rate": 2.850395466296006e-06, + "loss": 1.0018, + "step": 4207 + }, + { + "epoch": 0.3794922667628624, + "grad_norm": 1.5305920958652475, + "learning_rate": 2.849866660700381e-06, + "loss": 0.9864, + "step": 4208 + }, + { + "epoch": 0.3795824502863327, + "grad_norm": 1.5271552474439303, + "learning_rate": 2.8493377825894464e-06, + "loss": 1.0195, + "step": 4209 + }, + { + "epoch": 0.37967263380980293, + "grad_norm": 1.759322539338457, + "learning_rate": 2.848808832008329e-06, + "loss": 1.0228, + "step": 4210 + }, + { + "epoch": 0.3797628173332732, + "grad_norm": 1.3564488717809, + "learning_rate": 2.848279809002162e-06, + "loss": 0.9953, + "step": 4211 + }, + { + "epoch": 0.37985300085674345, + "grad_norm": 1.457949678727388, + "learning_rate": 2.8477507136160842e-06, + "loss": 0.9704, + "step": 4212 + }, + { + "epoch": 0.37994318438021374, + "grad_norm": 0.7695255182507827, + "learning_rate": 2.847221545895241e-06, + "loss": 0.8174, + "step": 4213 + }, + { + "epoch": 0.38003336790368397, + "grad_norm": 1.2407397104267464, + "learning_rate": 2.846692305884785e-06, + "loss": 0.9571, + "step": 4214 + }, + { + "epoch": 0.38012355142715426, + "grad_norm": 1.323652973671641, + "learning_rate": 2.8461629936298718e-06, + "loss": 1.003, + "step": 4215 + }, + { + "epoch": 0.3802137349506245, + "grad_norm": 2.1194799195544483, + "learning_rate": 2.845633609175666e-06, + "loss": 1.0145, + "step": 4216 + }, + { + "epoch": 0.3803039184740948, + "grad_norm": 1.4873366674941266, + "learning_rate": 2.8451041525673383e-06, + "loss": 0.9949, + "step": 4217 + }, + { + "epoch": 0.38039410199756507, + "grad_norm": 1.7669313037646683, + "learning_rate": 2.8445746238500647e-06, + "loss": 0.9937, + "step": 4218 + }, + { + "epoch": 0.3804842855210353, + "grad_norm": 1.6190306254571631, + "learning_rate": 2.844045023069027e-06, + "loss": 0.9489, + "step": 4219 + }, + { + "epoch": 0.3805744690445056, + "grad_norm": 1.4460179437339165, + "learning_rate": 2.8435153502694136e-06, + "loss": 1.003, + "step": 4220 + }, + { + "epoch": 0.3806646525679758, + "grad_norm": 1.849381784295791, + "learning_rate": 2.84298560549642e-06, + "loss": 0.9025, + "step": 4221 + }, + { + "epoch": 0.3807548360914461, + "grad_norm": 1.7477851202852923, + "learning_rate": 2.8424557887952462e-06, + "loss": 0.9807, + "step": 4222 + }, + { + "epoch": 0.38084501961491635, + "grad_norm": 1.6189183357523267, + "learning_rate": 2.841925900211099e-06, + "loss": 0.8917, + "step": 4223 + }, + { + "epoch": 0.38093520313838664, + "grad_norm": 1.3120479713601902, + "learning_rate": 2.841395939789192e-06, + "loss": 1.0119, + "step": 4224 + }, + { + "epoch": 0.38102538666185687, + "grad_norm": 1.5113745009109372, + "learning_rate": 2.8408659075747435e-06, + "loss": 0.9883, + "step": 4225 + }, + { + "epoch": 0.38111557018532716, + "grad_norm": 1.4056982865486414, + "learning_rate": 2.8403358036129796e-06, + "loss": 0.9675, + "step": 4226 + }, + { + "epoch": 0.3812057537087974, + "grad_norm": 1.4013380944824156, + "learning_rate": 2.839805627949132e-06, + "loss": 1.0109, + "step": 4227 + }, + { + "epoch": 0.3812959372322677, + "grad_norm": 1.5958257882493472, + "learning_rate": 2.8392753806284367e-06, + "loss": 1.0205, + "step": 4228 + }, + { + "epoch": 0.3813861207557379, + "grad_norm": 1.3701200587160312, + "learning_rate": 2.838745061696139e-06, + "loss": 0.9148, + "step": 4229 + }, + { + "epoch": 0.3814763042792082, + "grad_norm": 1.4649093409794405, + "learning_rate": 2.838214671197487e-06, + "loss": 0.8991, + "step": 4230 + }, + { + "epoch": 0.38156648780267843, + "grad_norm": 1.289951780660005, + "learning_rate": 2.8376842091777377e-06, + "loss": 1.0664, + "step": 4231 + }, + { + "epoch": 0.3816566713261487, + "grad_norm": 1.335795389391291, + "learning_rate": 2.8371536756821524e-06, + "loss": 0.8703, + "step": 4232 + }, + { + "epoch": 0.38174685484961896, + "grad_norm": 1.3304969251497527, + "learning_rate": 2.836623070756e-06, + "loss": 0.9465, + "step": 4233 + }, + { + "epoch": 0.38183703837308924, + "grad_norm": 1.4381142837940812, + "learning_rate": 2.8360923944445542e-06, + "loss": 0.9893, + "step": 4234 + }, + { + "epoch": 0.3819272218965595, + "grad_norm": 1.3760256843110403, + "learning_rate": 2.8355616467930947e-06, + "loss": 0.9526, + "step": 4235 + }, + { + "epoch": 0.38201740542002977, + "grad_norm": 1.5869012920159233, + "learning_rate": 2.8350308278469085e-06, + "loss": 0.9402, + "step": 4236 + }, + { + "epoch": 0.3821075889435, + "grad_norm": 1.3314032542527736, + "learning_rate": 2.8344999376512877e-06, + "loss": 0.9999, + "step": 4237 + }, + { + "epoch": 0.3821977724669703, + "grad_norm": 1.6353250546723386, + "learning_rate": 2.8339689762515307e-06, + "loss": 0.9533, + "step": 4238 + }, + { + "epoch": 0.3822879559904405, + "grad_norm": 1.333504809836834, + "learning_rate": 2.8334379436929424e-06, + "loss": 0.8954, + "step": 4239 + }, + { + "epoch": 0.3823781395139108, + "grad_norm": 1.3510174837597626, + "learning_rate": 2.832906840020833e-06, + "loss": 0.9342, + "step": 4240 + }, + { + "epoch": 0.3824683230373811, + "grad_norm": 1.3216900833688376, + "learning_rate": 2.83237566528052e-06, + "loss": 0.9745, + "step": 4241 + }, + { + "epoch": 0.38255850656085133, + "grad_norm": 1.3033293547817557, + "learning_rate": 2.831844419517325e-06, + "loss": 0.9507, + "step": 4242 + }, + { + "epoch": 0.3826486900843216, + "grad_norm": 1.3502061244995778, + "learning_rate": 2.8313131027765774e-06, + "loss": 1.004, + "step": 4243 + }, + { + "epoch": 0.38273887360779185, + "grad_norm": 1.346003660795043, + "learning_rate": 2.8307817151036124e-06, + "loss": 0.9401, + "step": 4244 + }, + { + "epoch": 0.38282905713126214, + "grad_norm": 1.4564646419446177, + "learning_rate": 2.8302502565437704e-06, + "loss": 0.9426, + "step": 4245 + }, + { + "epoch": 0.3829192406547324, + "grad_norm": 1.68662359297011, + "learning_rate": 2.829718727142398e-06, + "loss": 0.896, + "step": 4246 + }, + { + "epoch": 0.38300942417820266, + "grad_norm": 1.3420745251396242, + "learning_rate": 2.829187126944849e-06, + "loss": 0.9821, + "step": 4247 + }, + { + "epoch": 0.3830996077016729, + "grad_norm": 1.6300086080873821, + "learning_rate": 2.8286554559964826e-06, + "loss": 0.9121, + "step": 4248 + }, + { + "epoch": 0.3831897912251432, + "grad_norm": 1.5366394798262517, + "learning_rate": 2.8281237143426637e-06, + "loss": 0.9886, + "step": 4249 + }, + { + "epoch": 0.3832799747486134, + "grad_norm": 1.3104278919768155, + "learning_rate": 2.8275919020287626e-06, + "loss": 0.8765, + "step": 4250 + }, + { + "epoch": 0.3833701582720837, + "grad_norm": 1.401469943967838, + "learning_rate": 2.827060019100158e-06, + "loss": 0.9867, + "step": 4251 + }, + { + "epoch": 0.38346034179555394, + "grad_norm": 1.5362889428165238, + "learning_rate": 2.8265280656022315e-06, + "loss": 0.956, + "step": 4252 + }, + { + "epoch": 0.3835505253190242, + "grad_norm": 1.4751627104934628, + "learning_rate": 2.825996041580373e-06, + "loss": 0.8534, + "step": 4253 + }, + { + "epoch": 0.38364070884249446, + "grad_norm": 1.5077365994497556, + "learning_rate": 2.825463947079978e-06, + "loss": 0.9826, + "step": 4254 + }, + { + "epoch": 0.38373089236596475, + "grad_norm": 1.2971374981423187, + "learning_rate": 2.8249317821464483e-06, + "loss": 0.966, + "step": 4255 + }, + { + "epoch": 0.383821075889435, + "grad_norm": 5.346249180013947, + "learning_rate": 2.824399546825189e-06, + "loss": 0.8975, + "step": 4256 + }, + { + "epoch": 0.38391125941290527, + "grad_norm": 1.2560039810767527, + "learning_rate": 2.823867241161616e-06, + "loss": 0.9227, + "step": 4257 + }, + { + "epoch": 0.3840014429363755, + "grad_norm": 1.5629406879761847, + "learning_rate": 2.8233348652011456e-06, + "loss": 1.0128, + "step": 4258 + }, + { + "epoch": 0.3840916264598458, + "grad_norm": 1.563159116962191, + "learning_rate": 2.8228024189892057e-06, + "loss": 0.8794, + "step": 4259 + }, + { + "epoch": 0.384181809983316, + "grad_norm": 1.2092386449785846, + "learning_rate": 2.822269902571226e-06, + "loss": 0.9682, + "step": 4260 + }, + { + "epoch": 0.3842719935067863, + "grad_norm": 1.3528680835001816, + "learning_rate": 2.8217373159926446e-06, + "loss": 0.9444, + "step": 4261 + }, + { + "epoch": 0.38436217703025655, + "grad_norm": 1.148205065398878, + "learning_rate": 2.8212046592989046e-06, + "loss": 0.9021, + "step": 4262 + }, + { + "epoch": 0.38445236055372684, + "grad_norm": 1.4254748707633307, + "learning_rate": 2.820671932535455e-06, + "loss": 0.972, + "step": 4263 + }, + { + "epoch": 0.3845425440771971, + "grad_norm": 1.7969913610846169, + "learning_rate": 2.8201391357477506e-06, + "loss": 0.9475, + "step": 4264 + }, + { + "epoch": 0.38463272760066736, + "grad_norm": 1.3132865456333083, + "learning_rate": 2.8196062689812525e-06, + "loss": 0.9363, + "step": 4265 + }, + { + "epoch": 0.38472291112413765, + "grad_norm": 1.3691532499027859, + "learning_rate": 2.819073332281429e-06, + "loss": 0.9084, + "step": 4266 + }, + { + "epoch": 0.3848130946476079, + "grad_norm": 0.948938901488038, + "learning_rate": 2.8185403256937524e-06, + "loss": 0.7692, + "step": 4267 + }, + { + "epoch": 0.38490327817107817, + "grad_norm": 1.5463732281293054, + "learning_rate": 2.8180072492637016e-06, + "loss": 0.922, + "step": 4268 + }, + { + "epoch": 0.3849934616945484, + "grad_norm": 1.431402959521055, + "learning_rate": 2.817474103036762e-06, + "loss": 0.9497, + "step": 4269 + }, + { + "epoch": 0.3850836452180187, + "grad_norm": 2.353347253118886, + "learning_rate": 2.816940887058425e-06, + "loss": 0.9831, + "step": 4270 + }, + { + "epoch": 0.3851738287414889, + "grad_norm": 1.4920967433951429, + "learning_rate": 2.816407601374186e-06, + "loss": 1.0077, + "step": 4271 + }, + { + "epoch": 0.3852640122649592, + "grad_norm": 1.435037826278789, + "learning_rate": 2.815874246029549e-06, + "loss": 0.9016, + "step": 4272 + }, + { + "epoch": 0.38535419578842944, + "grad_norm": 1.2792922158192828, + "learning_rate": 2.815340821070023e-06, + "loss": 0.9163, + "step": 4273 + }, + { + "epoch": 0.38544437931189973, + "grad_norm": 1.543745400734664, + "learning_rate": 2.814807326541122e-06, + "loss": 0.942, + "step": 4274 + }, + { + "epoch": 0.38553456283536996, + "grad_norm": 1.357000227833267, + "learning_rate": 2.8142737624883676e-06, + "loss": 0.8982, + "step": 4275 + }, + { + "epoch": 0.38562474635884025, + "grad_norm": 1.36654102845054, + "learning_rate": 2.8137401289572854e-06, + "loss": 0.9729, + "step": 4276 + }, + { + "epoch": 0.3857149298823105, + "grad_norm": 1.4430463190714273, + "learning_rate": 2.8132064259934086e-06, + "loss": 1.0037, + "step": 4277 + }, + { + "epoch": 0.3858051134057808, + "grad_norm": 1.5642374678236703, + "learning_rate": 2.812672653642276e-06, + "loss": 0.96, + "step": 4278 + }, + { + "epoch": 0.385895296929251, + "grad_norm": 1.3580989768093292, + "learning_rate": 2.812138811949431e-06, + "loss": 0.977, + "step": 4279 + }, + { + "epoch": 0.3859854804527213, + "grad_norm": 1.5421716876702507, + "learning_rate": 2.8116049009604247e-06, + "loss": 1.0289, + "step": 4280 + }, + { + "epoch": 0.38607566397619153, + "grad_norm": 1.5566272998272987, + "learning_rate": 2.8110709207208132e-06, + "loss": 0.8432, + "step": 4281 + }, + { + "epoch": 0.3861658474996618, + "grad_norm": 1.599255570442978, + "learning_rate": 2.810536871276158e-06, + "loss": 0.9927, + "step": 4282 + }, + { + "epoch": 0.38625603102313205, + "grad_norm": 0.7619312870178171, + "learning_rate": 2.8100027526720283e-06, + "loss": 0.876, + "step": 4283 + }, + { + "epoch": 0.38634621454660234, + "grad_norm": 1.3799187938321247, + "learning_rate": 2.8094685649539974e-06, + "loss": 0.8931, + "step": 4284 + }, + { + "epoch": 0.3864363980700726, + "grad_norm": 0.8277317768972469, + "learning_rate": 2.8089343081676455e-06, + "loss": 0.8552, + "step": 4285 + }, + { + "epoch": 0.38652658159354286, + "grad_norm": 1.5513472208915922, + "learning_rate": 2.8083999823585577e-06, + "loss": 0.9546, + "step": 4286 + }, + { + "epoch": 0.3866167651170131, + "grad_norm": 1.2290122479355368, + "learning_rate": 2.8078655875723254e-06, + "loss": 0.9922, + "step": 4287 + }, + { + "epoch": 0.3867069486404834, + "grad_norm": 2.0594873099606406, + "learning_rate": 2.807331123854547e-06, + "loss": 1.0361, + "step": 4288 + }, + { + "epoch": 0.38679713216395367, + "grad_norm": 1.489880761105402, + "learning_rate": 2.806796591250826e-06, + "loss": 0.8336, + "step": 4289 + }, + { + "epoch": 0.3868873156874239, + "grad_norm": 1.5886250541771647, + "learning_rate": 2.8062619898067707e-06, + "loss": 1.0338, + "step": 4290 + }, + { + "epoch": 0.3869774992108942, + "grad_norm": 1.4760388449904618, + "learning_rate": 2.8057273195679963e-06, + "loss": 0.9771, + "step": 4291 + }, + { + "epoch": 0.3870676827343644, + "grad_norm": 1.4606529407497604, + "learning_rate": 2.8051925805801253e-06, + "loss": 0.9719, + "step": 4292 + }, + { + "epoch": 0.3871578662578347, + "grad_norm": 1.3564272962435204, + "learning_rate": 2.804657772888783e-06, + "loss": 0.9961, + "step": 4293 + }, + { + "epoch": 0.38724804978130495, + "grad_norm": 1.533144499028426, + "learning_rate": 2.804122896539602e-06, + "loss": 0.9628, + "step": 4294 + }, + { + "epoch": 0.38733823330477524, + "grad_norm": 1.3662057021656302, + "learning_rate": 2.8035879515782225e-06, + "loss": 0.9101, + "step": 4295 + }, + { + "epoch": 0.38742841682824547, + "grad_norm": 1.59930744978067, + "learning_rate": 2.803052938050288e-06, + "loss": 1.0109, + "step": 4296 + }, + { + "epoch": 0.38751860035171576, + "grad_norm": 1.4283420923618053, + "learning_rate": 2.802517856001449e-06, + "loss": 1.0102, + "step": 4297 + }, + { + "epoch": 0.387608783875186, + "grad_norm": 1.498292825693719, + "learning_rate": 2.801982705477361e-06, + "loss": 0.9237, + "step": 4298 + }, + { + "epoch": 0.3876989673986563, + "grad_norm": 1.3272849174874384, + "learning_rate": 2.8014474865236867e-06, + "loss": 0.9461, + "step": 4299 + }, + { + "epoch": 0.3877891509221265, + "grad_norm": 1.36410261621727, + "learning_rate": 2.800912199186094e-06, + "loss": 0.9495, + "step": 4300 + }, + { + "epoch": 0.3878793344455968, + "grad_norm": 1.3954296050103594, + "learning_rate": 2.800376843510256e-06, + "loss": 0.9714, + "step": 4301 + }, + { + "epoch": 0.38796951796906703, + "grad_norm": 1.3891515748307002, + "learning_rate": 2.799841419541852e-06, + "loss": 0.9821, + "step": 4302 + }, + { + "epoch": 0.3880597014925373, + "grad_norm": 0.7471039409174558, + "learning_rate": 2.799305927326568e-06, + "loss": 0.8247, + "step": 4303 + }, + { + "epoch": 0.38814988501600756, + "grad_norm": 0.7635901522019884, + "learning_rate": 2.7987703669100955e-06, + "loss": 0.819, + "step": 4304 + }, + { + "epoch": 0.38824006853947785, + "grad_norm": 1.549336334398507, + "learning_rate": 2.79823473833813e-06, + "loss": 0.966, + "step": 4305 + }, + { + "epoch": 0.3883302520629481, + "grad_norm": 1.5446247726629192, + "learning_rate": 2.797699041656376e-06, + "loss": 0.9107, + "step": 4306 + }, + { + "epoch": 0.38842043558641837, + "grad_norm": 1.9531657710588315, + "learning_rate": 2.7971632769105412e-06, + "loss": 0.9785, + "step": 4307 + }, + { + "epoch": 0.3885106191098886, + "grad_norm": 1.7982102450204844, + "learning_rate": 2.79662744414634e-06, + "loss": 0.9259, + "step": 4308 + }, + { + "epoch": 0.3886008026333589, + "grad_norm": 1.4959374886523316, + "learning_rate": 2.7960915434094923e-06, + "loss": 0.9099, + "step": 4309 + }, + { + "epoch": 0.3886909861568291, + "grad_norm": 1.6161476514705455, + "learning_rate": 2.7955555747457256e-06, + "loss": 0.9369, + "step": 4310 + }, + { + "epoch": 0.3887811696802994, + "grad_norm": 1.36383535060355, + "learning_rate": 2.79501953820077e-06, + "loss": 0.959, + "step": 4311 + }, + { + "epoch": 0.3888713532037697, + "grad_norm": 1.4258631852527375, + "learning_rate": 2.7944834338203637e-06, + "loss": 0.9781, + "step": 4312 + }, + { + "epoch": 0.38896153672723993, + "grad_norm": 1.4214164078185862, + "learning_rate": 2.79394726165025e-06, + "loss": 0.9189, + "step": 4313 + }, + { + "epoch": 0.3890517202507102, + "grad_norm": 1.846442955332098, + "learning_rate": 2.793411021736178e-06, + "loss": 0.938, + "step": 4314 + }, + { + "epoch": 0.38914190377418045, + "grad_norm": 1.4628113341902382, + "learning_rate": 2.7928747141239027e-06, + "loss": 1.0578, + "step": 4315 + }, + { + "epoch": 0.38923208729765074, + "grad_norm": 0.7524355047094774, + "learning_rate": 2.7923383388591856e-06, + "loss": 0.8299, + "step": 4316 + }, + { + "epoch": 0.389322270821121, + "grad_norm": 1.4627092195497404, + "learning_rate": 2.7918018959877923e-06, + "loss": 0.924, + "step": 4317 + }, + { + "epoch": 0.38941245434459126, + "grad_norm": 1.2712421332972061, + "learning_rate": 2.791265385555495e-06, + "loss": 0.9826, + "step": 4318 + }, + { + "epoch": 0.3895026378680615, + "grad_norm": 1.4236165374817848, + "learning_rate": 2.790728807608072e-06, + "loss": 1.018, + "step": 4319 + }, + { + "epoch": 0.3895928213915318, + "grad_norm": 1.581064338489692, + "learning_rate": 2.790192162191307e-06, + "loss": 0.9535, + "step": 4320 + }, + { + "epoch": 0.389683004915002, + "grad_norm": 1.5940297947041075, + "learning_rate": 2.78965544935099e-06, + "loss": 0.984, + "step": 4321 + }, + { + "epoch": 0.3897731884384723, + "grad_norm": 1.5153926395961985, + "learning_rate": 2.789118669132916e-06, + "loss": 0.9406, + "step": 4322 + }, + { + "epoch": 0.38986337196194254, + "grad_norm": 1.7314497188677715, + "learning_rate": 2.7885818215828856e-06, + "loss": 1.0259, + "step": 4323 + }, + { + "epoch": 0.38995355548541283, + "grad_norm": 1.6286963457944514, + "learning_rate": 2.7880449067467064e-06, + "loss": 0.9167, + "step": 4324 + }, + { + "epoch": 0.39004373900888306, + "grad_norm": 1.5436956697239541, + "learning_rate": 2.78750792467019e-06, + "loss": 0.9102, + "step": 4325 + }, + { + "epoch": 0.39013392253235335, + "grad_norm": 1.5177112653499323, + "learning_rate": 2.786970875399156e-06, + "loss": 0.8238, + "step": 4326 + }, + { + "epoch": 0.3902241060558236, + "grad_norm": 1.2252476441882794, + "learning_rate": 2.7864337589794267e-06, + "loss": 0.8847, + "step": 4327 + }, + { + "epoch": 0.39031428957929387, + "grad_norm": 1.4461925404838047, + "learning_rate": 2.7858965754568335e-06, + "loss": 0.9799, + "step": 4328 + }, + { + "epoch": 0.3904044731027641, + "grad_norm": 1.4121649218090426, + "learning_rate": 2.785359324877211e-06, + "loss": 0.8958, + "step": 4329 + }, + { + "epoch": 0.3904946566262344, + "grad_norm": 1.4144751015309363, + "learning_rate": 2.7848220072864e-06, + "loss": 0.978, + "step": 4330 + }, + { + "epoch": 0.3905848401497046, + "grad_norm": 1.4456823752067811, + "learning_rate": 2.784284622730248e-06, + "loss": 0.9494, + "step": 4331 + }, + { + "epoch": 0.3906750236731749, + "grad_norm": 1.6217176592586389, + "learning_rate": 2.7837471712546073e-06, + "loss": 0.9716, + "step": 4332 + }, + { + "epoch": 0.39076520719664515, + "grad_norm": 1.2002004277218852, + "learning_rate": 2.783209652905337e-06, + "loss": 0.9348, + "step": 4333 + }, + { + "epoch": 0.39085539072011544, + "grad_norm": 1.3823715627654978, + "learning_rate": 2.7826720677283e-06, + "loss": 0.9252, + "step": 4334 + }, + { + "epoch": 0.39094557424358567, + "grad_norm": 1.3785020273585973, + "learning_rate": 2.782134415769367e-06, + "loss": 0.9274, + "step": 4335 + }, + { + "epoch": 0.39103575776705596, + "grad_norm": 1.2914822149348089, + "learning_rate": 2.7815966970744126e-06, + "loss": 1.006, + "step": 4336 + }, + { + "epoch": 0.39112594129052625, + "grad_norm": 1.6026567396141065, + "learning_rate": 2.7810589116893184e-06, + "loss": 1.0227, + "step": 4337 + }, + { + "epoch": 0.3912161248139965, + "grad_norm": 1.351271978476568, + "learning_rate": 2.780521059659972e-06, + "loss": 0.9557, + "step": 4338 + }, + { + "epoch": 0.39130630833746677, + "grad_norm": 1.4577347844088153, + "learning_rate": 2.7799831410322637e-06, + "loss": 0.9473, + "step": 4339 + }, + { + "epoch": 0.391396491860937, + "grad_norm": 1.2515777167872746, + "learning_rate": 2.779445155852094e-06, + "loss": 0.9122, + "step": 4340 + }, + { + "epoch": 0.3914866753844073, + "grad_norm": 0.7721806341936649, + "learning_rate": 2.7789071041653655e-06, + "loss": 0.8872, + "step": 4341 + }, + { + "epoch": 0.3915768589078775, + "grad_norm": 1.5031238452803841, + "learning_rate": 2.7783689860179875e-06, + "loss": 1.0056, + "step": 4342 + }, + { + "epoch": 0.3916670424313478, + "grad_norm": 1.5497142282201417, + "learning_rate": 2.7778308014558767e-06, + "loss": 0.9024, + "step": 4343 + }, + { + "epoch": 0.39175722595481804, + "grad_norm": 1.5152037516894918, + "learning_rate": 2.7772925505249524e-06, + "loss": 1.0315, + "step": 4344 + }, + { + "epoch": 0.39184740947828833, + "grad_norm": 1.1964379790125337, + "learning_rate": 2.7767542332711417e-06, + "loss": 0.9282, + "step": 4345 + }, + { + "epoch": 0.39193759300175857, + "grad_norm": 1.3411414532142394, + "learning_rate": 2.776215849740377e-06, + "loss": 0.9281, + "step": 4346 + }, + { + "epoch": 0.39202777652522885, + "grad_norm": 1.3689904232595786, + "learning_rate": 2.775677399978596e-06, + "loss": 0.97, + "step": 4347 + }, + { + "epoch": 0.3921179600486991, + "grad_norm": 0.7440128166597081, + "learning_rate": 2.775138884031742e-06, + "loss": 0.7979, + "step": 4348 + }, + { + "epoch": 0.3922081435721694, + "grad_norm": 1.2845638962292956, + "learning_rate": 2.774600301945764e-06, + "loss": 0.9195, + "step": 4349 + }, + { + "epoch": 0.3922983270956396, + "grad_norm": 1.3010173045011078, + "learning_rate": 2.774061653766618e-06, + "loss": 0.9796, + "step": 4350 + }, + { + "epoch": 0.3923885106191099, + "grad_norm": 1.4346034680424444, + "learning_rate": 2.773522939540263e-06, + "loss": 0.9438, + "step": 4351 + }, + { + "epoch": 0.39247869414258013, + "grad_norm": 1.5958445380127115, + "learning_rate": 2.7729841593126663e-06, + "loss": 0.8796, + "step": 4352 + }, + { + "epoch": 0.3925688776660504, + "grad_norm": 1.3003481582119873, + "learning_rate": 2.7724453131297988e-06, + "loss": 1.0197, + "step": 4353 + }, + { + "epoch": 0.39265906118952065, + "grad_norm": 1.371604802454264, + "learning_rate": 2.771906401037637e-06, + "loss": 0.9849, + "step": 4354 + }, + { + "epoch": 0.39274924471299094, + "grad_norm": 1.5138781991439276, + "learning_rate": 2.7713674230821664e-06, + "loss": 0.9162, + "step": 4355 + }, + { + "epoch": 0.3928394282364612, + "grad_norm": 1.3130322694059746, + "learning_rate": 2.7708283793093724e-06, + "loss": 0.9109, + "step": 4356 + }, + { + "epoch": 0.39292961175993146, + "grad_norm": 1.4773749482861438, + "learning_rate": 2.7702892697652514e-06, + "loss": 0.9368, + "step": 4357 + }, + { + "epoch": 0.3930197952834017, + "grad_norm": 1.361569660930526, + "learning_rate": 2.7697500944958024e-06, + "loss": 0.9314, + "step": 4358 + }, + { + "epoch": 0.393109978806872, + "grad_norm": 1.536683310724069, + "learning_rate": 2.7692108535470312e-06, + "loss": 1.0433, + "step": 4359 + }, + { + "epoch": 0.3932001623303423, + "grad_norm": 1.732421875, + "learning_rate": 2.768671546964948e-06, + "loss": 1.0018, + "step": 4360 + }, + { + "epoch": 0.3932903458538125, + "grad_norm": 1.56398763410748, + "learning_rate": 2.7681321747955713e-06, + "loss": 0.9811, + "step": 4361 + }, + { + "epoch": 0.3933805293772828, + "grad_norm": 1.2711124374244531, + "learning_rate": 2.767592737084921e-06, + "loss": 0.9728, + "step": 4362 + }, + { + "epoch": 0.39347071290075303, + "grad_norm": 1.5699468538072545, + "learning_rate": 2.767053233879026e-06, + "loss": 1.0459, + "step": 4363 + }, + { + "epoch": 0.3935608964242233, + "grad_norm": 1.4557566449660442, + "learning_rate": 2.76651366522392e-06, + "loss": 1.0297, + "step": 4364 + }, + { + "epoch": 0.39365107994769355, + "grad_norm": 1.6518858507357836, + "learning_rate": 2.7659740311656413e-06, + "loss": 0.9261, + "step": 4365 + }, + { + "epoch": 0.39374126347116384, + "grad_norm": 1.8867182445327997, + "learning_rate": 2.7654343317502352e-06, + "loss": 0.9856, + "step": 4366 + }, + { + "epoch": 0.39383144699463407, + "grad_norm": 1.3152488351949796, + "learning_rate": 2.7648945670237502e-06, + "loss": 0.9323, + "step": 4367 + }, + { + "epoch": 0.39392163051810436, + "grad_norm": 1.6434933973411276, + "learning_rate": 2.7643547370322446e-06, + "loss": 1.0051, + "step": 4368 + }, + { + "epoch": 0.3940118140415746, + "grad_norm": 1.6240445042182552, + "learning_rate": 2.7638148418217775e-06, + "loss": 0.9913, + "step": 4369 + }, + { + "epoch": 0.3941019975650449, + "grad_norm": 1.7092592552373032, + "learning_rate": 2.7632748814384163e-06, + "loss": 0.999, + "step": 4370 + }, + { + "epoch": 0.3941921810885151, + "grad_norm": 1.1878902898183976, + "learning_rate": 2.7627348559282335e-06, + "loss": 0.9849, + "step": 4371 + }, + { + "epoch": 0.3942823646119854, + "grad_norm": 1.3615563090570857, + "learning_rate": 2.7621947653373075e-06, + "loss": 0.8944, + "step": 4372 + }, + { + "epoch": 0.39437254813545564, + "grad_norm": 0.8393750679430788, + "learning_rate": 2.7616546097117213e-06, + "loss": 0.8168, + "step": 4373 + }, + { + "epoch": 0.3944627316589259, + "grad_norm": 1.6113531307245323, + "learning_rate": 2.761114389097564e-06, + "loss": 0.9591, + "step": 4374 + }, + { + "epoch": 0.39455291518239616, + "grad_norm": 3.60203698067583, + "learning_rate": 2.7605741035409305e-06, + "loss": 0.9958, + "step": 4375 + }, + { + "epoch": 0.39464309870586645, + "grad_norm": 1.4538381323827931, + "learning_rate": 2.76003375308792e-06, + "loss": 0.9361, + "step": 4376 + }, + { + "epoch": 0.3947332822293367, + "grad_norm": 1.2143986122556907, + "learning_rate": 2.75949333778464e-06, + "loss": 0.9267, + "step": 4377 + }, + { + "epoch": 0.39482346575280697, + "grad_norm": 1.7840254058696732, + "learning_rate": 2.7589528576772e-06, + "loss": 0.8388, + "step": 4378 + }, + { + "epoch": 0.3949136492762772, + "grad_norm": 1.5945411663130227, + "learning_rate": 2.758412312811717e-06, + "loss": 0.9452, + "step": 4379 + }, + { + "epoch": 0.3950038327997475, + "grad_norm": 1.098173515103557, + "learning_rate": 2.7578717032343146e-06, + "loss": 0.843, + "step": 4380 + }, + { + "epoch": 0.3950940163232177, + "grad_norm": 1.3708686752556052, + "learning_rate": 2.757331028991119e-06, + "loss": 0.986, + "step": 4381 + }, + { + "epoch": 0.395184199846688, + "grad_norm": 1.6886543987780842, + "learning_rate": 2.7567902901282642e-06, + "loss": 0.9736, + "step": 4382 + }, + { + "epoch": 0.3952743833701583, + "grad_norm": 1.9635236488008043, + "learning_rate": 2.7562494866918892e-06, + "loss": 0.9485, + "step": 4383 + }, + { + "epoch": 0.39536456689362853, + "grad_norm": 1.2999534378516315, + "learning_rate": 2.7557086187281378e-06, + "loss": 0.9309, + "step": 4384 + }, + { + "epoch": 0.3954547504170988, + "grad_norm": 1.7646869429338676, + "learning_rate": 2.75516768628316e-06, + "loss": 1.0497, + "step": 4385 + }, + { + "epoch": 0.39554493394056905, + "grad_norm": 1.3589330809919218, + "learning_rate": 2.7546266894031114e-06, + "loss": 1.031, + "step": 4386 + }, + { + "epoch": 0.39563511746403934, + "grad_norm": 1.7388802514286583, + "learning_rate": 2.7540856281341526e-06, + "loss": 0.903, + "step": 4387 + }, + { + "epoch": 0.3957253009875096, + "grad_norm": 1.4744227815512496, + "learning_rate": 2.7535445025224506e-06, + "loss": 0.9207, + "step": 4388 + }, + { + "epoch": 0.39581548451097986, + "grad_norm": 1.8174230867296637, + "learning_rate": 2.753003312614176e-06, + "loss": 0.9517, + "step": 4389 + }, + { + "epoch": 0.3959056680344501, + "grad_norm": 1.588092786393922, + "learning_rate": 2.7524620584555065e-06, + "loss": 0.9551, + "step": 4390 + }, + { + "epoch": 0.3959958515579204, + "grad_norm": 1.6469086703309492, + "learning_rate": 2.7519207400926253e-06, + "loss": 0.9882, + "step": 4391 + }, + { + "epoch": 0.3960860350813906, + "grad_norm": 1.3536702297586194, + "learning_rate": 2.751379357571721e-06, + "loss": 0.9372, + "step": 4392 + }, + { + "epoch": 0.3961762186048609, + "grad_norm": 1.6150920025628737, + "learning_rate": 2.7508379109389865e-06, + "loss": 1.0227, + "step": 4393 + }, + { + "epoch": 0.39626640212833114, + "grad_norm": 1.6972187768859912, + "learning_rate": 2.750296400240622e-06, + "loss": 0.9644, + "step": 4394 + }, + { + "epoch": 0.39635658565180143, + "grad_norm": 1.2429089640319977, + "learning_rate": 2.7497548255228305e-06, + "loss": 0.9855, + "step": 4395 + }, + { + "epoch": 0.39644676917527166, + "grad_norm": 1.536827672328161, + "learning_rate": 2.749213186831824e-06, + "loss": 0.9109, + "step": 4396 + }, + { + "epoch": 0.39653695269874195, + "grad_norm": 1.410527484246045, + "learning_rate": 2.7486714842138173e-06, + "loss": 0.9957, + "step": 4397 + }, + { + "epoch": 0.3966271362222122, + "grad_norm": 1.8342331787602528, + "learning_rate": 2.748129717715031e-06, + "loss": 0.9084, + "step": 4398 + }, + { + "epoch": 0.3967173197456825, + "grad_norm": 2.5370324103330493, + "learning_rate": 2.747587887381692e-06, + "loss": 0.9753, + "step": 4399 + }, + { + "epoch": 0.3968075032691527, + "grad_norm": 1.3336570416333358, + "learning_rate": 2.7470459932600328e-06, + "loss": 0.9094, + "step": 4400 + }, + { + "epoch": 0.396897686792623, + "grad_norm": 1.4075181541152937, + "learning_rate": 2.7465040353962897e-06, + "loss": 0.9552, + "step": 4401 + }, + { + "epoch": 0.3969878703160932, + "grad_norm": 1.6760440789255344, + "learning_rate": 2.745962013836706e-06, + "loss": 0.9441, + "step": 4402 + }, + { + "epoch": 0.3970780538395635, + "grad_norm": 1.6292799559844018, + "learning_rate": 2.74541992862753e-06, + "loss": 0.9311, + "step": 4403 + }, + { + "epoch": 0.39716823736303375, + "grad_norm": 1.6583735689556849, + "learning_rate": 2.744877779815016e-06, + "loss": 0.9571, + "step": 4404 + }, + { + "epoch": 0.39725842088650404, + "grad_norm": 1.2576472961736433, + "learning_rate": 2.7443355674454234e-06, + "loss": 0.9516, + "step": 4405 + }, + { + "epoch": 0.39734860440997427, + "grad_norm": 1.7509783326207589, + "learning_rate": 2.743793291565015e-06, + "loss": 0.9954, + "step": 4406 + }, + { + "epoch": 0.39743878793344456, + "grad_norm": 2.0206748926508564, + "learning_rate": 2.7432509522200617e-06, + "loss": 0.8802, + "step": 4407 + }, + { + "epoch": 0.39752897145691485, + "grad_norm": 1.82451579842767, + "learning_rate": 2.7427085494568383e-06, + "loss": 0.9002, + "step": 4408 + }, + { + "epoch": 0.3976191549803851, + "grad_norm": 0.7564587643838458, + "learning_rate": 2.742166083321628e-06, + "loss": 0.8335, + "step": 4409 + }, + { + "epoch": 0.39770933850385537, + "grad_norm": 1.4720515216176209, + "learning_rate": 2.7416235538607137e-06, + "loss": 0.9983, + "step": 4410 + }, + { + "epoch": 0.3977995220273256, + "grad_norm": 1.3863000801884948, + "learning_rate": 2.7410809611203894e-06, + "loss": 0.9368, + "step": 4411 + }, + { + "epoch": 0.3978897055507959, + "grad_norm": 1.3861157892887639, + "learning_rate": 2.7405383051469507e-06, + "loss": 0.9101, + "step": 4412 + }, + { + "epoch": 0.3979798890742661, + "grad_norm": 1.3875154167899664, + "learning_rate": 2.7399955859867e-06, + "loss": 0.7645, + "step": 4413 + }, + { + "epoch": 0.3980700725977364, + "grad_norm": 1.0151630818373591, + "learning_rate": 2.7394528036859465e-06, + "loss": 0.7747, + "step": 4414 + }, + { + "epoch": 0.39816025612120665, + "grad_norm": 1.3946795558339928, + "learning_rate": 2.738909958291002e-06, + "loss": 0.9531, + "step": 4415 + }, + { + "epoch": 0.39825043964467693, + "grad_norm": 1.311475990101811, + "learning_rate": 2.7383670498481863e-06, + "loss": 0.8665, + "step": 4416 + }, + { + "epoch": 0.39834062316814717, + "grad_norm": 0.7658998813363589, + "learning_rate": 2.737824078403822e-06, + "loss": 0.737, + "step": 4417 + }, + { + "epoch": 0.39843080669161746, + "grad_norm": 1.6897355565722139, + "learning_rate": 2.737281044004239e-06, + "loss": 0.9309, + "step": 4418 + }, + { + "epoch": 0.3985209902150877, + "grad_norm": 1.4089172922636424, + "learning_rate": 2.736737946695772e-06, + "loss": 0.942, + "step": 4419 + }, + { + "epoch": 0.398611173738558, + "grad_norm": 1.4577755088242523, + "learning_rate": 2.736194786524761e-06, + "loss": 0.9466, + "step": 4420 + }, + { + "epoch": 0.3987013572620282, + "grad_norm": 1.4309560848777836, + "learning_rate": 2.7356515635375517e-06, + "loss": 0.9544, + "step": 4421 + }, + { + "epoch": 0.3987915407854985, + "grad_norm": 1.893172636241934, + "learning_rate": 2.735108277780495e-06, + "loss": 0.8738, + "step": 4422 + }, + { + "epoch": 0.39888172430896873, + "grad_norm": 1.3255647553347336, + "learning_rate": 2.7345649292999456e-06, + "loss": 0.9451, + "step": 4423 + }, + { + "epoch": 0.398971907832439, + "grad_norm": 1.5125476230658776, + "learning_rate": 2.734021518142267e-06, + "loss": 0.9567, + "step": 4424 + }, + { + "epoch": 0.39906209135590925, + "grad_norm": 1.705445554805334, + "learning_rate": 2.733478044353825e-06, + "loss": 0.7904, + "step": 4425 + }, + { + "epoch": 0.39915227487937954, + "grad_norm": 1.534424818671369, + "learning_rate": 2.7329345079809917e-06, + "loss": 0.958, + "step": 4426 + }, + { + "epoch": 0.3992424584028498, + "grad_norm": 1.433130839186633, + "learning_rate": 2.7323909090701447e-06, + "loss": 0.8855, + "step": 4427 + }, + { + "epoch": 0.39933264192632006, + "grad_norm": 1.9066332447461714, + "learning_rate": 2.731847247667667e-06, + "loss": 0.9967, + "step": 4428 + }, + { + "epoch": 0.3994228254497903, + "grad_norm": 1.5484025137112, + "learning_rate": 2.731303523819947e-06, + "loss": 0.9299, + "step": 4429 + }, + { + "epoch": 0.3995130089732606, + "grad_norm": 1.4622309054091431, + "learning_rate": 2.7307597375733783e-06, + "loss": 1.0108, + "step": 4430 + }, + { + "epoch": 0.3996031924967309, + "grad_norm": 1.5851437691788952, + "learning_rate": 2.7302158889743587e-06, + "loss": 0.9574, + "step": 4431 + }, + { + "epoch": 0.3996933760202011, + "grad_norm": 1.44389645052506, + "learning_rate": 2.7296719780692937e-06, + "loss": 0.9107, + "step": 4432 + }, + { + "epoch": 0.3997835595436714, + "grad_norm": 1.4465449671528499, + "learning_rate": 2.7291280049045916e-06, + "loss": 0.8639, + "step": 4433 + }, + { + "epoch": 0.39987374306714163, + "grad_norm": 2.118245103480806, + "learning_rate": 2.7285839695266683e-06, + "loss": 0.8958, + "step": 4434 + }, + { + "epoch": 0.3999639265906119, + "grad_norm": 2.121952002677313, + "learning_rate": 2.7280398719819423e-06, + "loss": 0.9009, + "step": 4435 + }, + { + "epoch": 0.40005411011408215, + "grad_norm": 1.5923818063516384, + "learning_rate": 2.727495712316841e-06, + "loss": 1.02, + "step": 4436 + }, + { + "epoch": 0.40014429363755244, + "grad_norm": 1.7606517326321538, + "learning_rate": 2.7269514905777945e-06, + "loss": 0.9007, + "step": 4437 + }, + { + "epoch": 0.4002344771610227, + "grad_norm": 1.4454925424969955, + "learning_rate": 2.7264072068112377e-06, + "loss": 1.0766, + "step": 4438 + }, + { + "epoch": 0.40032466068449296, + "grad_norm": 3.0235143055388938, + "learning_rate": 2.7258628610636133e-06, + "loss": 0.921, + "step": 4439 + }, + { + "epoch": 0.4004148442079632, + "grad_norm": 1.4394385493665034, + "learning_rate": 2.7253184533813667e-06, + "loss": 1.0329, + "step": 4440 + }, + { + "epoch": 0.4005050277314335, + "grad_norm": 1.460182770171357, + "learning_rate": 2.72477398381095e-06, + "loss": 0.9602, + "step": 4441 + }, + { + "epoch": 0.4005952112549037, + "grad_norm": 1.2684568113373385, + "learning_rate": 2.724229452398821e-06, + "loss": 0.9641, + "step": 4442 + }, + { + "epoch": 0.400685394778374, + "grad_norm": 1.5476278919623996, + "learning_rate": 2.7236848591914422e-06, + "loss": 0.9691, + "step": 4443 + }, + { + "epoch": 0.40077557830184424, + "grad_norm": 1.5935858847697701, + "learning_rate": 2.7231402042352803e-06, + "loss": 0.9388, + "step": 4444 + }, + { + "epoch": 0.4008657618253145, + "grad_norm": 1.251090289506906, + "learning_rate": 2.722595487576809e-06, + "loss": 0.9878, + "step": 4445 + }, + { + "epoch": 0.40095594534878476, + "grad_norm": 1.4065958233631772, + "learning_rate": 2.722050709262506e-06, + "loss": 0.9989, + "step": 4446 + }, + { + "epoch": 0.40104612887225505, + "grad_norm": 1.3790708881246683, + "learning_rate": 2.7215058693388557e-06, + "loss": 0.9359, + "step": 4447 + }, + { + "epoch": 0.4011363123957253, + "grad_norm": 1.6031887822541082, + "learning_rate": 2.720960967852346e-06, + "loss": 0.858, + "step": 4448 + }, + { + "epoch": 0.40122649591919557, + "grad_norm": 1.468993876886558, + "learning_rate": 2.720416004849471e-06, + "loss": 0.8881, + "step": 4449 + }, + { + "epoch": 0.4013166794426658, + "grad_norm": 1.3146380312865795, + "learning_rate": 2.7198709803767304e-06, + "loss": 0.904, + "step": 4450 + }, + { + "epoch": 0.4014068629661361, + "grad_norm": 1.3258469286033185, + "learning_rate": 2.7193258944806286e-06, + "loss": 0.9198, + "step": 4451 + }, + { + "epoch": 0.4014970464896063, + "grad_norm": 1.5362744323674793, + "learning_rate": 2.718780747207675e-06, + "loss": 0.9562, + "step": 4452 + }, + { + "epoch": 0.4015872300130766, + "grad_norm": 1.6439694421299638, + "learning_rate": 2.7182355386043847e-06, + "loss": 1.0161, + "step": 4453 + }, + { + "epoch": 0.40167741353654685, + "grad_norm": 1.2486042813651896, + "learning_rate": 2.717690268717278e-06, + "loss": 1.0099, + "step": 4454 + }, + { + "epoch": 0.40176759706001713, + "grad_norm": 1.2402185153698073, + "learning_rate": 2.7171449375928803e-06, + "loss": 0.9783, + "step": 4455 + }, + { + "epoch": 0.4018577805834874, + "grad_norm": 1.7833181722240143, + "learning_rate": 2.716599545277722e-06, + "loss": 0.9736, + "step": 4456 + }, + { + "epoch": 0.40194796410695766, + "grad_norm": 1.361791589291287, + "learning_rate": 2.7160540918183394e-06, + "loss": 0.9159, + "step": 4457 + }, + { + "epoch": 0.40203814763042794, + "grad_norm": 1.3682628653095885, + "learning_rate": 2.715508577261273e-06, + "loss": 0.8759, + "step": 4458 + }, + { + "epoch": 0.4021283311538982, + "grad_norm": 2.0354742874324363, + "learning_rate": 2.7149630016530702e-06, + "loss": 0.9352, + "step": 4459 + }, + { + "epoch": 0.40221851467736847, + "grad_norm": 1.2473688089500161, + "learning_rate": 2.7144173650402815e-06, + "loss": 0.9673, + "step": 4460 + }, + { + "epoch": 0.4023086982008387, + "grad_norm": 1.3990228376290073, + "learning_rate": 2.7138716674694636e-06, + "loss": 0.9767, + "step": 4461 + }, + { + "epoch": 0.402398881724309, + "grad_norm": 1.4330867525662427, + "learning_rate": 2.7133259089871795e-06, + "loss": 0.9045, + "step": 4462 + }, + { + "epoch": 0.4024890652477792, + "grad_norm": 1.4971238218176892, + "learning_rate": 2.712780089639995e-06, + "loss": 0.9518, + "step": 4463 + }, + { + "epoch": 0.4025792487712495, + "grad_norm": 2.2374255290504923, + "learning_rate": 2.712234209474483e-06, + "loss": 0.825, + "step": 4464 + }, + { + "epoch": 0.40266943229471974, + "grad_norm": 1.4238729168998316, + "learning_rate": 2.7116882685372218e-06, + "loss": 0.9938, + "step": 4465 + }, + { + "epoch": 0.40275961581819003, + "grad_norm": 1.4546599126344897, + "learning_rate": 2.7111422668747927e-06, + "loss": 0.9277, + "step": 4466 + }, + { + "epoch": 0.40284979934166026, + "grad_norm": 1.3786810672159797, + "learning_rate": 2.7105962045337846e-06, + "loss": 0.9534, + "step": 4467 + }, + { + "epoch": 0.40293998286513055, + "grad_norm": 1.3352066182322782, + "learning_rate": 2.7100500815607898e-06, + "loss": 0.9453, + "step": 4468 + }, + { + "epoch": 0.4030301663886008, + "grad_norm": 1.2065114009838616, + "learning_rate": 2.709503898002407e-06, + "loss": 0.9783, + "step": 4469 + }, + { + "epoch": 0.4031203499120711, + "grad_norm": 1.7921771793351005, + "learning_rate": 2.708957653905239e-06, + "loss": 0.949, + "step": 4470 + }, + { + "epoch": 0.4032105334355413, + "grad_norm": 1.3124834241274492, + "learning_rate": 2.7084113493158956e-06, + "loss": 0.966, + "step": 4471 + }, + { + "epoch": 0.4033007169590116, + "grad_norm": 2.339375291396787, + "learning_rate": 2.7078649842809888e-06, + "loss": 0.9668, + "step": 4472 + }, + { + "epoch": 0.40339090048248183, + "grad_norm": 1.3193371121212851, + "learning_rate": 2.707318558847139e-06, + "loss": 0.9988, + "step": 4473 + }, + { + "epoch": 0.4034810840059521, + "grad_norm": 0.7883609636907133, + "learning_rate": 2.7067720730609697e-06, + "loss": 0.8178, + "step": 4474 + }, + { + "epoch": 0.40357126752942235, + "grad_norm": 1.4105147225775534, + "learning_rate": 2.70622552696911e-06, + "loss": 0.9278, + "step": 4475 + }, + { + "epoch": 0.40366145105289264, + "grad_norm": 1.3748930542710143, + "learning_rate": 2.7056789206181943e-06, + "loss": 0.9028, + "step": 4476 + }, + { + "epoch": 0.40375163457636287, + "grad_norm": 1.3743379039190324, + "learning_rate": 2.7051322540548615e-06, + "loss": 1.0037, + "step": 4477 + }, + { + "epoch": 0.40384181809983316, + "grad_norm": 1.5562968143637652, + "learning_rate": 2.704585527325757e-06, + "loss": 0.9164, + "step": 4478 + }, + { + "epoch": 0.40393200162330345, + "grad_norm": 1.515255597903383, + "learning_rate": 2.7040387404775303e-06, + "loss": 0.9128, + "step": 4479 + }, + { + "epoch": 0.4040221851467737, + "grad_norm": 1.4795126532722276, + "learning_rate": 2.703491893556837e-06, + "loss": 0.9714, + "step": 4480 + }, + { + "epoch": 0.40411236867024397, + "grad_norm": 1.1799179256312242, + "learning_rate": 2.702944986610335e-06, + "loss": 0.9262, + "step": 4481 + }, + { + "epoch": 0.4042025521937142, + "grad_norm": 1.8406640429152805, + "learning_rate": 2.7023980196846917e-06, + "loss": 0.9712, + "step": 4482 + }, + { + "epoch": 0.4042927357171845, + "grad_norm": 0.7536706506895741, + "learning_rate": 2.7018509928265763e-06, + "loss": 0.8384, + "step": 4483 + }, + { + "epoch": 0.4043829192406547, + "grad_norm": 1.5385206788873569, + "learning_rate": 2.7013039060826635e-06, + "loss": 0.9758, + "step": 4484 + }, + { + "epoch": 0.404473102764125, + "grad_norm": 1.5797760135895194, + "learning_rate": 2.7007567594996347e-06, + "loss": 1.0214, + "step": 4485 + }, + { + "epoch": 0.40456328628759525, + "grad_norm": 1.5453368737561732, + "learning_rate": 2.7002095531241757e-06, + "loss": 0.8568, + "step": 4486 + }, + { + "epoch": 0.40465346981106554, + "grad_norm": 2.2819126159693686, + "learning_rate": 2.6996622870029767e-06, + "loss": 0.9338, + "step": 4487 + }, + { + "epoch": 0.40474365333453577, + "grad_norm": 1.7453070795014305, + "learning_rate": 2.6991149611827335e-06, + "loss": 1.0653, + "step": 4488 + }, + { + "epoch": 0.40483383685800606, + "grad_norm": 1.7015671911253774, + "learning_rate": 2.6985675757101466e-06, + "loss": 0.9921, + "step": 4489 + }, + { + "epoch": 0.4049240203814763, + "grad_norm": 1.4214938983980843, + "learning_rate": 2.698020130631922e-06, + "loss": 0.9102, + "step": 4490 + }, + { + "epoch": 0.4050142039049466, + "grad_norm": 1.424303851825884, + "learning_rate": 2.6974726259947713e-06, + "loss": 0.8876, + "step": 4491 + }, + { + "epoch": 0.4051043874284168, + "grad_norm": 1.7783582537739322, + "learning_rate": 2.6969250618454106e-06, + "loss": 0.9358, + "step": 4492 + }, + { + "epoch": 0.4051945709518871, + "grad_norm": 1.212416895741807, + "learning_rate": 2.696377438230561e-06, + "loss": 1.0247, + "step": 4493 + }, + { + "epoch": 0.40528475447535733, + "grad_norm": 1.673563024833424, + "learning_rate": 2.6958297551969484e-06, + "loss": 0.8911, + "step": 4494 + }, + { + "epoch": 0.4053749379988276, + "grad_norm": 2.094382233318336, + "learning_rate": 2.695282012791304e-06, + "loss": 0.9169, + "step": 4495 + }, + { + "epoch": 0.40546512152229786, + "grad_norm": 1.501266580532983, + "learning_rate": 2.6947342110603646e-06, + "loss": 1.0038, + "step": 4496 + }, + { + "epoch": 0.40555530504576814, + "grad_norm": 1.3967575979714035, + "learning_rate": 2.6941863500508717e-06, + "loss": 1.0075, + "step": 4497 + }, + { + "epoch": 0.4056454885692384, + "grad_norm": 1.7106281854434697, + "learning_rate": 2.693638429809572e-06, + "loss": 0.9999, + "step": 4498 + }, + { + "epoch": 0.40573567209270867, + "grad_norm": 1.6171911119798508, + "learning_rate": 2.6930904503832167e-06, + "loss": 0.9388, + "step": 4499 + }, + { + "epoch": 0.4058258556161789, + "grad_norm": 1.3398748455387386, + "learning_rate": 2.692542411818562e-06, + "loss": 0.9305, + "step": 4500 + }, + { + "epoch": 0.4059160391396492, + "grad_norm": 1.7015007743463764, + "learning_rate": 2.69199431416237e-06, + "loss": 0.9633, + "step": 4501 + }, + { + "epoch": 0.4060062226631194, + "grad_norm": 1.4250518187085826, + "learning_rate": 2.691446157461408e-06, + "loss": 0.9124, + "step": 4502 + }, + { + "epoch": 0.4060964061865897, + "grad_norm": 1.4243102964440861, + "learning_rate": 2.690897941762447e-06, + "loss": 1.0117, + "step": 4503 + }, + { + "epoch": 0.40618658971006, + "grad_norm": 1.3332192948369785, + "learning_rate": 2.6903496671122642e-06, + "loss": 0.9315, + "step": 4504 + }, + { + "epoch": 0.40627677323353023, + "grad_norm": 0.7540949451913997, + "learning_rate": 2.689801333557641e-06, + "loss": 0.7639, + "step": 4505 + }, + { + "epoch": 0.4063669567570005, + "grad_norm": 1.91310431961801, + "learning_rate": 2.689252941145365e-06, + "loss": 0.9583, + "step": 4506 + }, + { + "epoch": 0.40645714028047075, + "grad_norm": 1.3675196216995709, + "learning_rate": 2.6887044899222277e-06, + "loss": 0.9489, + "step": 4507 + }, + { + "epoch": 0.40654732380394104, + "grad_norm": 1.4607642575935438, + "learning_rate": 2.688155979935025e-06, + "loss": 1.0019, + "step": 4508 + }, + { + "epoch": 0.4066375073274113, + "grad_norm": 1.3545024406633606, + "learning_rate": 2.68760741123056e-06, + "loss": 0.9107, + "step": 4509 + }, + { + "epoch": 0.40672769085088156, + "grad_norm": 1.6582927702091406, + "learning_rate": 2.6870587838556394e-06, + "loss": 0.967, + "step": 4510 + }, + { + "epoch": 0.4068178743743518, + "grad_norm": 1.5108838991995222, + "learning_rate": 2.686510097857075e-06, + "loss": 0.9664, + "step": 4511 + }, + { + "epoch": 0.4069080578978221, + "grad_norm": 1.3199446126125862, + "learning_rate": 2.685961353281683e-06, + "loss": 0.9495, + "step": 4512 + }, + { + "epoch": 0.4069982414212923, + "grad_norm": 1.5437999948908063, + "learning_rate": 2.6854125501762863e-06, + "loss": 0.9344, + "step": 4513 + }, + { + "epoch": 0.4070884249447626, + "grad_norm": 1.3437935134584664, + "learning_rate": 2.684863688587712e-06, + "loss": 0.9541, + "step": 4514 + }, + { + "epoch": 0.40717860846823284, + "grad_norm": 1.7545272305566797, + "learning_rate": 2.6843147685627916e-06, + "loss": 0.9228, + "step": 4515 + }, + { + "epoch": 0.4072687919917031, + "grad_norm": 1.6615334935263457, + "learning_rate": 2.683765790148361e-06, + "loss": 0.9263, + "step": 4516 + }, + { + "epoch": 0.40735897551517336, + "grad_norm": 1.5213956794790755, + "learning_rate": 2.6832167533912637e-06, + "loss": 1.0031, + "step": 4517 + }, + { + "epoch": 0.40744915903864365, + "grad_norm": 2.0201647593091607, + "learning_rate": 2.682667658338345e-06, + "loss": 1.0278, + "step": 4518 + }, + { + "epoch": 0.4075393425621139, + "grad_norm": 1.3796877713057358, + "learning_rate": 2.682118505036458e-06, + "loss": 0.9213, + "step": 4519 + }, + { + "epoch": 0.40762952608558417, + "grad_norm": 0.7244596506575837, + "learning_rate": 2.681569293532459e-06, + "loss": 0.781, + "step": 4520 + }, + { + "epoch": 0.4077197096090544, + "grad_norm": 0.7165750269370884, + "learning_rate": 2.6810200238732102e-06, + "loss": 0.8118, + "step": 4521 + }, + { + "epoch": 0.4078098931325247, + "grad_norm": 1.3581751647476135, + "learning_rate": 2.6804706961055776e-06, + "loss": 0.9973, + "step": 4522 + }, + { + "epoch": 0.4079000766559949, + "grad_norm": 1.403403049437738, + "learning_rate": 2.6799213102764326e-06, + "loss": 0.9821, + "step": 4523 + }, + { + "epoch": 0.4079902601794652, + "grad_norm": 2.2706864219063005, + "learning_rate": 2.679371866432653e-06, + "loss": 1.0461, + "step": 4524 + }, + { + "epoch": 0.40808044370293545, + "grad_norm": 1.5960052839158245, + "learning_rate": 2.6788223646211194e-06, + "loss": 0.9253, + "step": 4525 + }, + { + "epoch": 0.40817062722640574, + "grad_norm": 1.3067237360000592, + "learning_rate": 2.6782728048887183e-06, + "loss": 0.8416, + "step": 4526 + }, + { + "epoch": 0.408260810749876, + "grad_norm": 1.0845483361038557, + "learning_rate": 2.6777231872823416e-06, + "loss": 0.9363, + "step": 4527 + }, + { + "epoch": 0.40835099427334626, + "grad_norm": 1.516439710480793, + "learning_rate": 2.6771735118488864e-06, + "loss": 0.9502, + "step": 4528 + }, + { + "epoch": 0.40844117779681655, + "grad_norm": 1.435337762458749, + "learning_rate": 2.6766237786352523e-06, + "loss": 0.9894, + "step": 4529 + }, + { + "epoch": 0.4085313613202868, + "grad_norm": 1.4120108964488796, + "learning_rate": 2.676073987688347e-06, + "loss": 0.8847, + "step": 4530 + }, + { + "epoch": 0.40862154484375707, + "grad_norm": 1.2972134527681023, + "learning_rate": 2.6755241390550818e-06, + "loss": 0.9609, + "step": 4531 + }, + { + "epoch": 0.4087117283672273, + "grad_norm": 1.2789814567474371, + "learning_rate": 2.6749742327823716e-06, + "loss": 0.9515, + "step": 4532 + }, + { + "epoch": 0.4088019118906976, + "grad_norm": 1.384971743202474, + "learning_rate": 2.674424268917138e-06, + "loss": 0.9708, + "step": 4533 + }, + { + "epoch": 0.4088920954141678, + "grad_norm": 1.4507025759497318, + "learning_rate": 2.6738742475063074e-06, + "loss": 1.0207, + "step": 4534 + }, + { + "epoch": 0.4089822789376381, + "grad_norm": 1.701354830481005, + "learning_rate": 2.6733241685968104e-06, + "loss": 0.9905, + "step": 4535 + }, + { + "epoch": 0.40907246246110834, + "grad_norm": 2.457451376603733, + "learning_rate": 2.6727740322355826e-06, + "loss": 0.9652, + "step": 4536 + }, + { + "epoch": 0.40916264598457863, + "grad_norm": 1.6639708014640762, + "learning_rate": 2.6722238384695644e-06, + "loss": 0.9395, + "step": 4537 + }, + { + "epoch": 0.40925282950804887, + "grad_norm": 1.468863219097628, + "learning_rate": 2.671673587345702e-06, + "loss": 0.962, + "step": 4538 + }, + { + "epoch": 0.40934301303151915, + "grad_norm": 0.8306746908441187, + "learning_rate": 2.6711232789109455e-06, + "loss": 0.7977, + "step": 4539 + }, + { + "epoch": 0.4094331965549894, + "grad_norm": 1.5961737808276655, + "learning_rate": 2.6705729132122497e-06, + "loss": 0.9544, + "step": 4540 + }, + { + "epoch": 0.4095233800784597, + "grad_norm": 1.650036005147488, + "learning_rate": 2.670022490296576e-06, + "loss": 1.0272, + "step": 4541 + }, + { + "epoch": 0.4096135636019299, + "grad_norm": 1.4912970317576983, + "learning_rate": 2.669472010210889e-06, + "loss": 1.0102, + "step": 4542 + }, + { + "epoch": 0.4097037471254002, + "grad_norm": 1.7573668868769414, + "learning_rate": 2.668921473002159e-06, + "loss": 0.9285, + "step": 4543 + }, + { + "epoch": 0.40979393064887043, + "grad_norm": 1.370600815545294, + "learning_rate": 2.6683708787173596e-06, + "loss": 1.0079, + "step": 4544 + }, + { + "epoch": 0.4098841141723407, + "grad_norm": 1.4383278826230506, + "learning_rate": 2.6678202274034718e-06, + "loss": 0.9752, + "step": 4545 + }, + { + "epoch": 0.40997429769581095, + "grad_norm": 1.590233082631391, + "learning_rate": 2.66726951910748e-06, + "loss": 1.0352, + "step": 4546 + }, + { + "epoch": 0.41006448121928124, + "grad_norm": 1.4141924466443507, + "learning_rate": 2.6667187538763737e-06, + "loss": 0.9545, + "step": 4547 + }, + { + "epoch": 0.4101546647427515, + "grad_norm": 1.461870762369147, + "learning_rate": 2.6661679317571473e-06, + "loss": 0.9389, + "step": 4548 + }, + { + "epoch": 0.41024484826622176, + "grad_norm": 1.4232033194601321, + "learning_rate": 2.665617052796799e-06, + "loss": 0.9481, + "step": 4549 + }, + { + "epoch": 0.41033503178969205, + "grad_norm": 1.5210459399436518, + "learning_rate": 2.6650661170423346e-06, + "loss": 0.8412, + "step": 4550 + }, + { + "epoch": 0.4104252153131623, + "grad_norm": 1.6502101099734894, + "learning_rate": 2.6645151245407614e-06, + "loss": 1.0291, + "step": 4551 + }, + { + "epoch": 0.4105153988366326, + "grad_norm": 1.4845144909016212, + "learning_rate": 2.6639640753390936e-06, + "loss": 0.9517, + "step": 4552 + }, + { + "epoch": 0.4106055823601028, + "grad_norm": 1.4785642426936811, + "learning_rate": 2.66341296948435e-06, + "loss": 0.8643, + "step": 4553 + }, + { + "epoch": 0.4106957658835731, + "grad_norm": 1.184645534809559, + "learning_rate": 2.6628618070235534e-06, + "loss": 0.961, + "step": 4554 + }, + { + "epoch": 0.4107859494070433, + "grad_norm": 1.5400536922904113, + "learning_rate": 2.662310588003733e-06, + "loss": 0.9619, + "step": 4555 + }, + { + "epoch": 0.4108761329305136, + "grad_norm": 1.6312349720697485, + "learning_rate": 2.6617593124719205e-06, + "loss": 0.9335, + "step": 4556 + }, + { + "epoch": 0.41096631645398385, + "grad_norm": 1.3342774992080484, + "learning_rate": 2.661207980475155e-06, + "loss": 0.9604, + "step": 4557 + }, + { + "epoch": 0.41105649997745414, + "grad_norm": 1.7520331425079518, + "learning_rate": 2.6606565920604793e-06, + "loss": 0.911, + "step": 4558 + }, + { + "epoch": 0.41114668350092437, + "grad_norm": 1.3437471611525245, + "learning_rate": 2.66010514727494e-06, + "loss": 1.03, + "step": 4559 + }, + { + "epoch": 0.41123686702439466, + "grad_norm": 1.78700384443368, + "learning_rate": 2.659553646165589e-06, + "loss": 0.8466, + "step": 4560 + }, + { + "epoch": 0.4113270505478649, + "grad_norm": 1.3907441934659446, + "learning_rate": 2.659002088779485e-06, + "loss": 0.9294, + "step": 4561 + }, + { + "epoch": 0.4114172340713352, + "grad_norm": 0.7542393398220684, + "learning_rate": 2.6584504751636888e-06, + "loss": 0.7505, + "step": 4562 + }, + { + "epoch": 0.4115074175948054, + "grad_norm": 1.5558839727550462, + "learning_rate": 2.657898805365268e-06, + "loss": 0.9239, + "step": 4563 + }, + { + "epoch": 0.4115976011182757, + "grad_norm": 1.3536206489540263, + "learning_rate": 2.657347079431293e-06, + "loss": 0.9253, + "step": 4564 + }, + { + "epoch": 0.41168778464174594, + "grad_norm": 1.2172865762407732, + "learning_rate": 2.6567952974088403e-06, + "loss": 0.922, + "step": 4565 + }, + { + "epoch": 0.4117779681652162, + "grad_norm": 1.2889905042483514, + "learning_rate": 2.6562434593449917e-06, + "loss": 1.0087, + "step": 4566 + }, + { + "epoch": 0.41186815168868646, + "grad_norm": 1.682205583793018, + "learning_rate": 2.6556915652868325e-06, + "loss": 0.9749, + "step": 4567 + }, + { + "epoch": 0.41195833521215675, + "grad_norm": 1.7379523834141057, + "learning_rate": 2.6551396152814534e-06, + "loss": 0.9564, + "step": 4568 + }, + { + "epoch": 0.412048518735627, + "grad_norm": 1.408233176663349, + "learning_rate": 2.65458760937595e-06, + "loss": 0.9417, + "step": 4569 + }, + { + "epoch": 0.41213870225909727, + "grad_norm": 1.1451357654389878, + "learning_rate": 2.654035547617423e-06, + "loss": 0.9011, + "step": 4570 + }, + { + "epoch": 0.4122288857825675, + "grad_norm": 1.5795758065450471, + "learning_rate": 2.653483430052976e-06, + "loss": 0.8598, + "step": 4571 + }, + { + "epoch": 0.4123190693060378, + "grad_norm": 1.5987659881245708, + "learning_rate": 2.6529312567297197e-06, + "loss": 0.9351, + "step": 4572 + }, + { + "epoch": 0.412409252829508, + "grad_norm": 1.534534901291349, + "learning_rate": 2.652379027694768e-06, + "loss": 1.0237, + "step": 4573 + }, + { + "epoch": 0.4124994363529783, + "grad_norm": 1.6473753341024935, + "learning_rate": 2.651826742995241e-06, + "loss": 1.0064, + "step": 4574 + }, + { + "epoch": 0.4125896198764486, + "grad_norm": 1.3087584890037458, + "learning_rate": 2.651274402678262e-06, + "loss": 0.9453, + "step": 4575 + }, + { + "epoch": 0.41267980339991883, + "grad_norm": 1.3229549983120736, + "learning_rate": 2.6507220067909597e-06, + "loss": 0.9774, + "step": 4576 + }, + { + "epoch": 0.4127699869233891, + "grad_norm": 1.7178294404200614, + "learning_rate": 2.650169555380468e-06, + "loss": 0.9104, + "step": 4577 + }, + { + "epoch": 0.41286017044685935, + "grad_norm": 1.3150668249084967, + "learning_rate": 2.6496170484939253e-06, + "loss": 0.9466, + "step": 4578 + }, + { + "epoch": 0.41295035397032964, + "grad_norm": 1.4752441656750093, + "learning_rate": 2.6490644861784735e-06, + "loss": 0.9647, + "step": 4579 + }, + { + "epoch": 0.4130405374937999, + "grad_norm": 1.3847063963884976, + "learning_rate": 2.648511868481261e-06, + "loss": 0.9731, + "step": 4580 + }, + { + "epoch": 0.41313072101727016, + "grad_norm": 2.772226590433631, + "learning_rate": 2.6479591954494397e-06, + "loss": 0.9478, + "step": 4581 + }, + { + "epoch": 0.4132209045407404, + "grad_norm": 1.4011608437071204, + "learning_rate": 2.647406467130167e-06, + "loss": 0.8519, + "step": 4582 + }, + { + "epoch": 0.4133110880642107, + "grad_norm": 1.539979922671541, + "learning_rate": 2.646853683570605e-06, + "loss": 0.9716, + "step": 4583 + }, + { + "epoch": 0.4134012715876809, + "grad_norm": 1.3721905964430063, + "learning_rate": 2.6463008448179196e-06, + "loss": 0.8944, + "step": 4584 + }, + { + "epoch": 0.4134914551111512, + "grad_norm": 1.1485581626563173, + "learning_rate": 2.6457479509192828e-06, + "loss": 0.9066, + "step": 4585 + }, + { + "epoch": 0.41358163863462144, + "grad_norm": 1.612250564563266, + "learning_rate": 2.645195001921871e-06, + "loss": 0.9397, + "step": 4586 + }, + { + "epoch": 0.41367182215809173, + "grad_norm": 1.5584436788947058, + "learning_rate": 2.644641997872863e-06, + "loss": 0.9226, + "step": 4587 + }, + { + "epoch": 0.41376200568156196, + "grad_norm": 1.7864038961437223, + "learning_rate": 2.644088938819445e-06, + "loss": 0.9571, + "step": 4588 + }, + { + "epoch": 0.41385218920503225, + "grad_norm": 1.5224395613426775, + "learning_rate": 2.6435358248088077e-06, + "loss": 1.0269, + "step": 4589 + }, + { + "epoch": 0.4139423727285025, + "grad_norm": 1.569714332638295, + "learning_rate": 2.642982655888146e-06, + "loss": 0.9956, + "step": 4590 + }, + { + "epoch": 0.41403255625197277, + "grad_norm": 1.5471435467897505, + "learning_rate": 2.6424294321046585e-06, + "loss": 0.9179, + "step": 4591 + }, + { + "epoch": 0.414122739775443, + "grad_norm": 0.8680402669619905, + "learning_rate": 2.641876153505549e-06, + "loss": 0.7941, + "step": 4592 + }, + { + "epoch": 0.4142129232989133, + "grad_norm": 1.9197691879018959, + "learning_rate": 2.641322820138027e-06, + "loss": 0.9618, + "step": 4593 + }, + { + "epoch": 0.4143031068223835, + "grad_norm": 1.6553578403075793, + "learning_rate": 2.640769432049306e-06, + "loss": 0.9093, + "step": 4594 + }, + { + "epoch": 0.4143932903458538, + "grad_norm": 1.3597276164514924, + "learning_rate": 2.6402159892866038e-06, + "loss": 0.8754, + "step": 4595 + }, + { + "epoch": 0.41448347386932405, + "grad_norm": 1.1825068049809666, + "learning_rate": 2.639662491897143e-06, + "loss": 0.9417, + "step": 4596 + }, + { + "epoch": 0.41457365739279434, + "grad_norm": 1.3850705519929691, + "learning_rate": 2.639108939928152e-06, + "loss": 0.964, + "step": 4597 + }, + { + "epoch": 0.4146638409162646, + "grad_norm": 1.6124822748673782, + "learning_rate": 2.638555333426862e-06, + "loss": 0.959, + "step": 4598 + }, + { + "epoch": 0.41475402443973486, + "grad_norm": 1.254746295316205, + "learning_rate": 2.6380016724405093e-06, + "loss": 0.9875, + "step": 4599 + }, + { + "epoch": 0.41484420796320515, + "grad_norm": 1.4399764249779188, + "learning_rate": 2.637447957016336e-06, + "loss": 0.9083, + "step": 4600 + }, + { + "epoch": 0.4149343914866754, + "grad_norm": 1.599548189625573, + "learning_rate": 2.636894187201589e-06, + "loss": 1.0107, + "step": 4601 + }, + { + "epoch": 0.41502457501014567, + "grad_norm": 1.4680454918259709, + "learning_rate": 2.6363403630435176e-06, + "loss": 0.8553, + "step": 4602 + }, + { + "epoch": 0.4151147585336159, + "grad_norm": 1.5478339257423832, + "learning_rate": 2.635786484589378e-06, + "loss": 0.9419, + "step": 4603 + }, + { + "epoch": 0.4152049420570862, + "grad_norm": 1.6359800433408327, + "learning_rate": 2.63523255188643e-06, + "loss": 0.8623, + "step": 4604 + }, + { + "epoch": 0.4152951255805564, + "grad_norm": 1.2458196834892383, + "learning_rate": 2.6346785649819375e-06, + "loss": 0.9755, + "step": 4605 + }, + { + "epoch": 0.4153853091040267, + "grad_norm": 1.4299661333568734, + "learning_rate": 2.6341245239231706e-06, + "loss": 0.9776, + "step": 4606 + }, + { + "epoch": 0.41547549262749695, + "grad_norm": 1.0359208647944373, + "learning_rate": 2.6335704287574024e-06, + "loss": 0.9595, + "step": 4607 + }, + { + "epoch": 0.41556567615096723, + "grad_norm": 1.6195790763598175, + "learning_rate": 2.6330162795319124e-06, + "loss": 1.0149, + "step": 4608 + }, + { + "epoch": 0.41565585967443747, + "grad_norm": 1.6112417116980384, + "learning_rate": 2.632462076293983e-06, + "loss": 1.0076, + "step": 4609 + }, + { + "epoch": 0.41574604319790776, + "grad_norm": 1.326372493800128, + "learning_rate": 2.6319078190909017e-06, + "loss": 0.9531, + "step": 4610 + }, + { + "epoch": 0.415836226721378, + "grad_norm": 1.683405181874796, + "learning_rate": 2.6313535079699606e-06, + "loss": 0.8704, + "step": 4611 + }, + { + "epoch": 0.4159264102448483, + "grad_norm": 1.539675750161103, + "learning_rate": 2.6307991429784572e-06, + "loss": 0.9339, + "step": 4612 + }, + { + "epoch": 0.4160165937683185, + "grad_norm": 1.514909398356711, + "learning_rate": 2.6302447241636924e-06, + "loss": 1.0028, + "step": 4613 + }, + { + "epoch": 0.4161067772917888, + "grad_norm": 1.456459321789394, + "learning_rate": 2.629690251572973e-06, + "loss": 1.0316, + "step": 4614 + }, + { + "epoch": 0.41619696081525903, + "grad_norm": 1.7574685671560746, + "learning_rate": 2.629135725253609e-06, + "loss": 0.9261, + "step": 4615 + }, + { + "epoch": 0.4162871443387293, + "grad_norm": 1.5473991816007362, + "learning_rate": 2.6285811452529162e-06, + "loss": 0.9435, + "step": 4616 + }, + { + "epoch": 0.41637732786219955, + "grad_norm": 1.2582345100569603, + "learning_rate": 2.6280265116182136e-06, + "loss": 0.9682, + "step": 4617 + }, + { + "epoch": 0.41646751138566984, + "grad_norm": 1.5027478797807525, + "learning_rate": 2.6274718243968266e-06, + "loss": 0.9528, + "step": 4618 + }, + { + "epoch": 0.4165576949091401, + "grad_norm": 1.500622540670125, + "learning_rate": 2.626917083636084e-06, + "loss": 0.8738, + "step": 4619 + }, + { + "epoch": 0.41664787843261036, + "grad_norm": 1.3005193498221315, + "learning_rate": 2.6263622893833183e-06, + "loss": 0.8822, + "step": 4620 + }, + { + "epoch": 0.4167380619560806, + "grad_norm": 1.4635033743884274, + "learning_rate": 2.625807441685869e-06, + "loss": 0.8935, + "step": 4621 + }, + { + "epoch": 0.4168282454795509, + "grad_norm": 0.7089918151969499, + "learning_rate": 2.625252540591078e-06, + "loss": 0.7725, + "step": 4622 + }, + { + "epoch": 0.4169184290030212, + "grad_norm": 1.5110659902667778, + "learning_rate": 2.6246975861462927e-06, + "loss": 1.0208, + "step": 4623 + }, + { + "epoch": 0.4170086125264914, + "grad_norm": 1.43958231166393, + "learning_rate": 2.624142578398864e-06, + "loss": 0.9896, + "step": 4624 + }, + { + "epoch": 0.4170987960499617, + "grad_norm": 1.2586395672083874, + "learning_rate": 2.6235875173961498e-06, + "loss": 0.9386, + "step": 4625 + }, + { + "epoch": 0.41718897957343193, + "grad_norm": 1.2997133837702362, + "learning_rate": 2.62303240318551e-06, + "loss": 0.8816, + "step": 4626 + }, + { + "epoch": 0.4172791630969022, + "grad_norm": 1.7532823978152086, + "learning_rate": 2.62247723581431e-06, + "loss": 1.0217, + "step": 4627 + }, + { + "epoch": 0.41736934662037245, + "grad_norm": 1.7399520559392327, + "learning_rate": 2.62192201532992e-06, + "loss": 0.9536, + "step": 4628 + }, + { + "epoch": 0.41745953014384274, + "grad_norm": 1.6745828108997516, + "learning_rate": 2.6213667417797145e-06, + "loss": 0.9689, + "step": 4629 + }, + { + "epoch": 0.41754971366731297, + "grad_norm": 1.3001891383717432, + "learning_rate": 2.6208114152110725e-06, + "loss": 0.9784, + "step": 4630 + }, + { + "epoch": 0.41763989719078326, + "grad_norm": 1.535760736238216, + "learning_rate": 2.6202560356713774e-06, + "loss": 0.9689, + "step": 4631 + }, + { + "epoch": 0.4177300807142535, + "grad_norm": 0.7170656204548078, + "learning_rate": 2.619700603208017e-06, + "loss": 0.8691, + "step": 4632 + }, + { + "epoch": 0.4178202642377238, + "grad_norm": 1.2245804145953862, + "learning_rate": 2.6191451178683842e-06, + "loss": 0.8707, + "step": 4633 + }, + { + "epoch": 0.417910447761194, + "grad_norm": 1.4483230990072768, + "learning_rate": 2.6185895796998764e-06, + "loss": 0.9735, + "step": 4634 + }, + { + "epoch": 0.4180006312846643, + "grad_norm": 2.1953618054266304, + "learning_rate": 2.6180339887498946e-06, + "loss": 1.0371, + "step": 4635 + }, + { + "epoch": 0.41809081480813454, + "grad_norm": 1.6838357490392082, + "learning_rate": 2.617478345065846e-06, + "loss": 0.9013, + "step": 4636 + }, + { + "epoch": 0.4181809983316048, + "grad_norm": 1.6507880640859645, + "learning_rate": 2.616922648695139e-06, + "loss": 0.9644, + "step": 4637 + }, + { + "epoch": 0.41827118185507506, + "grad_norm": 3.7791824362922446, + "learning_rate": 2.61636689968519e-06, + "loss": 0.8454, + "step": 4638 + }, + { + "epoch": 0.41836136537854535, + "grad_norm": 2.0107287887712277, + "learning_rate": 2.6158110980834186e-06, + "loss": 1.0241, + "step": 4639 + }, + { + "epoch": 0.4184515489020156, + "grad_norm": 1.5696605638596148, + "learning_rate": 2.615255243937249e-06, + "loss": 1.0051, + "step": 4640 + }, + { + "epoch": 0.41854173242548587, + "grad_norm": 2.117655234383403, + "learning_rate": 2.61469933729411e-06, + "loss": 0.974, + "step": 4641 + }, + { + "epoch": 0.4186319159489561, + "grad_norm": 1.5214966760023039, + "learning_rate": 2.614143378201433e-06, + "loss": 0.9258, + "step": 4642 + }, + { + "epoch": 0.4187220994724264, + "grad_norm": 1.285713932816896, + "learning_rate": 2.6135873667066567e-06, + "loss": 0.9282, + "step": 4643 + }, + { + "epoch": 0.4188122829958966, + "grad_norm": 1.4795330381891239, + "learning_rate": 2.613031302857224e-06, + "loss": 0.9087, + "step": 4644 + }, + { + "epoch": 0.4189024665193669, + "grad_norm": 1.529730529491677, + "learning_rate": 2.6124751867005792e-06, + "loss": 0.9735, + "step": 4645 + }, + { + "epoch": 0.4189926500428372, + "grad_norm": 1.2585422933941393, + "learning_rate": 2.611919018284175e-06, + "loss": 0.9998, + "step": 4646 + }, + { + "epoch": 0.41908283356630743, + "grad_norm": 1.6981141457265185, + "learning_rate": 2.611362797655466e-06, + "loss": 0.9973, + "step": 4647 + }, + { + "epoch": 0.4191730170897777, + "grad_norm": 1.2694412434289717, + "learning_rate": 2.6108065248619124e-06, + "loss": 1.019, + "step": 4648 + }, + { + "epoch": 0.41926320061324795, + "grad_norm": 1.2120008660398354, + "learning_rate": 2.610250199950978e-06, + "loss": 0.8882, + "step": 4649 + }, + { + "epoch": 0.41935338413671824, + "grad_norm": 1.5962183667949095, + "learning_rate": 2.609693822970131e-06, + "loss": 0.9271, + "step": 4650 + }, + { + "epoch": 0.4194435676601885, + "grad_norm": 1.8937207562797882, + "learning_rate": 2.609137393966846e-06, + "loss": 0.9132, + "step": 4651 + }, + { + "epoch": 0.41953375118365877, + "grad_norm": 1.4126427198567348, + "learning_rate": 2.6085809129886e-06, + "loss": 0.9076, + "step": 4652 + }, + { + "epoch": 0.419623934707129, + "grad_norm": 1.7558525857592602, + "learning_rate": 2.608024380082874e-06, + "loss": 0.9682, + "step": 4653 + }, + { + "epoch": 0.4197141182305993, + "grad_norm": 1.5121401808056223, + "learning_rate": 2.6074677952971554e-06, + "loss": 0.959, + "step": 4654 + }, + { + "epoch": 0.4198043017540695, + "grad_norm": 2.783261032840562, + "learning_rate": 2.606911158678935e-06, + "loss": 0.8797, + "step": 4655 + }, + { + "epoch": 0.4198944852775398, + "grad_norm": 1.545743200685421, + "learning_rate": 2.606354470275708e-06, + "loss": 0.9503, + "step": 4656 + }, + { + "epoch": 0.41998466880101004, + "grad_norm": 1.6792213680296264, + "learning_rate": 2.6057977301349744e-06, + "loss": 0.9229, + "step": 4657 + }, + { + "epoch": 0.42007485232448033, + "grad_norm": 1.4447776010823796, + "learning_rate": 2.6052409383042383e-06, + "loss": 0.9177, + "step": 4658 + }, + { + "epoch": 0.42016503584795056, + "grad_norm": 1.340594867999011, + "learning_rate": 2.6046840948310074e-06, + "loss": 0.9281, + "step": 4659 + }, + { + "epoch": 0.42025521937142085, + "grad_norm": 1.3563051309235528, + "learning_rate": 2.6041271997627962e-06, + "loss": 0.9545, + "step": 4660 + }, + { + "epoch": 0.4203454028948911, + "grad_norm": 1.6726495250012594, + "learning_rate": 2.6035702531471202e-06, + "loss": 0.9393, + "step": 4661 + }, + { + "epoch": 0.4204355864183614, + "grad_norm": 1.2794336143972447, + "learning_rate": 2.6030132550315035e-06, + "loss": 0.9542, + "step": 4662 + }, + { + "epoch": 0.4205257699418316, + "grad_norm": 0.6898033614124012, + "learning_rate": 2.60245620546347e-06, + "loss": 0.8323, + "step": 4663 + }, + { + "epoch": 0.4206159534653019, + "grad_norm": 1.4947847461429473, + "learning_rate": 2.6018991044905517e-06, + "loss": 0.9221, + "step": 4664 + }, + { + "epoch": 0.42070613698877213, + "grad_norm": 1.3405251508044413, + "learning_rate": 2.6013419521602825e-06, + "loss": 0.9912, + "step": 4665 + }, + { + "epoch": 0.4207963205122424, + "grad_norm": 1.595021582071785, + "learning_rate": 2.600784748520202e-06, + "loss": 1.0799, + "step": 4666 + }, + { + "epoch": 0.42088650403571265, + "grad_norm": 1.597955950768454, + "learning_rate": 2.6002274936178544e-06, + "loss": 0.8804, + "step": 4667 + }, + { + "epoch": 0.42097668755918294, + "grad_norm": 1.5008073859096076, + "learning_rate": 2.5996701875007873e-06, + "loss": 0.9616, + "step": 4668 + }, + { + "epoch": 0.4210668710826532, + "grad_norm": 1.3916102573056224, + "learning_rate": 2.5991128302165533e-06, + "loss": 0.9418, + "step": 4669 + }, + { + "epoch": 0.42115705460612346, + "grad_norm": 1.352024197962868, + "learning_rate": 2.5985554218127094e-06, + "loss": 1.0221, + "step": 4670 + }, + { + "epoch": 0.42124723812959375, + "grad_norm": 1.5976034216319326, + "learning_rate": 2.597997962336816e-06, + "loss": 0.8826, + "step": 4671 + }, + { + "epoch": 0.421337421653064, + "grad_norm": 1.7955198402296548, + "learning_rate": 2.5974404518364393e-06, + "loss": 0.9437, + "step": 4672 + }, + { + "epoch": 0.42142760517653427, + "grad_norm": 1.552436387360573, + "learning_rate": 2.596882890359149e-06, + "loss": 1.0581, + "step": 4673 + }, + { + "epoch": 0.4215177887000045, + "grad_norm": 1.7736680473337079, + "learning_rate": 2.5963252779525196e-06, + "loss": 0.872, + "step": 4674 + }, + { + "epoch": 0.4216079722234748, + "grad_norm": 1.6545444559976497, + "learning_rate": 2.595767614664129e-06, + "loss": 0.9508, + "step": 4675 + }, + { + "epoch": 0.421698155746945, + "grad_norm": 1.5256043502991636, + "learning_rate": 2.5952099005415607e-06, + "loss": 0.9232, + "step": 4676 + }, + { + "epoch": 0.4217883392704153, + "grad_norm": 1.5918706397066882, + "learning_rate": 2.594652135632402e-06, + "loss": 0.9095, + "step": 4677 + }, + { + "epoch": 0.42187852279388555, + "grad_norm": 1.2180634545582771, + "learning_rate": 2.594094319984244e-06, + "loss": 0.9042, + "step": 4678 + }, + { + "epoch": 0.42196870631735584, + "grad_norm": 1.3970905402217888, + "learning_rate": 2.5935364536446825e-06, + "loss": 1.0064, + "step": 4679 + }, + { + "epoch": 0.42205888984082607, + "grad_norm": 2.0397786603789645, + "learning_rate": 2.5929785366613185e-06, + "loss": 0.9463, + "step": 4680 + }, + { + "epoch": 0.42214907336429636, + "grad_norm": 1.5383162649963575, + "learning_rate": 2.592420569081756e-06, + "loss": 0.9914, + "step": 4681 + }, + { + "epoch": 0.4222392568877666, + "grad_norm": 1.4999175049031506, + "learning_rate": 2.5918625509536037e-06, + "loss": 0.9091, + "step": 4682 + }, + { + "epoch": 0.4223294404112369, + "grad_norm": 0.7502978448735416, + "learning_rate": 2.591304482324475e-06, + "loss": 0.7956, + "step": 4683 + }, + { + "epoch": 0.4224196239347071, + "grad_norm": 1.4363898261177093, + "learning_rate": 2.5907463632419878e-06, + "loss": 0.9632, + "step": 4684 + }, + { + "epoch": 0.4225098074581774, + "grad_norm": 1.4351430727250416, + "learning_rate": 2.5901881937537632e-06, + "loss": 0.918, + "step": 4685 + }, + { + "epoch": 0.42259999098164763, + "grad_norm": 0.8678791964777139, + "learning_rate": 2.589629973907428e-06, + "loss": 0.8562, + "step": 4686 + }, + { + "epoch": 0.4226901745051179, + "grad_norm": 1.5063258140130362, + "learning_rate": 2.589071703750612e-06, + "loss": 0.9025, + "step": 4687 + }, + { + "epoch": 0.42278035802858815, + "grad_norm": 1.3172018663521157, + "learning_rate": 2.5885133833309504e-06, + "loss": 0.9923, + "step": 4688 + }, + { + "epoch": 0.42287054155205844, + "grad_norm": 1.4305086539685734, + "learning_rate": 2.5879550126960814e-06, + "loss": 1.031, + "step": 4689 + }, + { + "epoch": 0.4229607250755287, + "grad_norm": 1.424382775466673, + "learning_rate": 2.5873965918936494e-06, + "loss": 1.0079, + "step": 4690 + }, + { + "epoch": 0.42305090859899896, + "grad_norm": 1.7168258300006005, + "learning_rate": 2.586838120971301e-06, + "loss": 0.9699, + "step": 4691 + }, + { + "epoch": 0.4231410921224692, + "grad_norm": 1.2974475837513022, + "learning_rate": 2.586279599976689e-06, + "loss": 1.0052, + "step": 4692 + }, + { + "epoch": 0.4232312756459395, + "grad_norm": 1.4217888942259547, + "learning_rate": 2.585721028957468e-06, + "loss": 0.9511, + "step": 4693 + }, + { + "epoch": 0.4233214591694098, + "grad_norm": 1.2943811452766691, + "learning_rate": 2.585162407961299e-06, + "loss": 0.9624, + "step": 4694 + }, + { + "epoch": 0.42341164269288, + "grad_norm": 1.9237170192179218, + "learning_rate": 2.584603737035847e-06, + "loss": 0.8873, + "step": 4695 + }, + { + "epoch": 0.4235018262163503, + "grad_norm": 1.6926761643703052, + "learning_rate": 2.5840450162287806e-06, + "loss": 0.9454, + "step": 4696 + }, + { + "epoch": 0.42359200973982053, + "grad_norm": 1.4825436186525087, + "learning_rate": 2.583486245587774e-06, + "loss": 0.8678, + "step": 4697 + }, + { + "epoch": 0.4236821932632908, + "grad_norm": 1.4041140122922287, + "learning_rate": 2.5829274251605023e-06, + "loss": 1.0524, + "step": 4698 + }, + { + "epoch": 0.42377237678676105, + "grad_norm": 1.3672720310781936, + "learning_rate": 2.582368554994649e-06, + "loss": 0.9275, + "step": 4699 + }, + { + "epoch": 0.42386256031023134, + "grad_norm": 0.8383722943782304, + "learning_rate": 2.5818096351378994e-06, + "loss": 0.854, + "step": 4700 + }, + { + "epoch": 0.4239527438337016, + "grad_norm": 1.4966659845946753, + "learning_rate": 2.5812506656379435e-06, + "loss": 1.003, + "step": 4701 + }, + { + "epoch": 0.42404292735717186, + "grad_norm": 1.4354861707243796, + "learning_rate": 2.580691646542476e-06, + "loss": 0.8968, + "step": 4702 + }, + { + "epoch": 0.4241331108806421, + "grad_norm": 1.4977846475642371, + "learning_rate": 2.5801325778991958e-06, + "loss": 0.9954, + "step": 4703 + }, + { + "epoch": 0.4242232944041124, + "grad_norm": 2.0249235962947587, + "learning_rate": 2.5795734597558043e-06, + "loss": 0.8036, + "step": 4704 + }, + { + "epoch": 0.4243134779275826, + "grad_norm": 1.5377775143397723, + "learning_rate": 2.579014292160011e-06, + "loss": 0.9529, + "step": 4705 + }, + { + "epoch": 0.4244036614510529, + "grad_norm": 1.186703314313522, + "learning_rate": 2.5784550751595236e-06, + "loss": 0.9382, + "step": 4706 + }, + { + "epoch": 0.42449384497452314, + "grad_norm": 1.2683551211413546, + "learning_rate": 2.577895808802061e-06, + "loss": 1.0109, + "step": 4707 + }, + { + "epoch": 0.4245840284979934, + "grad_norm": 1.5562922950726952, + "learning_rate": 2.577336493135341e-06, + "loss": 0.8717, + "step": 4708 + }, + { + "epoch": 0.42467421202146366, + "grad_norm": 1.1334085409137122, + "learning_rate": 2.576777128207088e-06, + "loss": 1.0401, + "step": 4709 + }, + { + "epoch": 0.42476439554493395, + "grad_norm": 1.3349859853430361, + "learning_rate": 2.5762177140650306e-06, + "loss": 0.9156, + "step": 4710 + }, + { + "epoch": 0.4248545790684042, + "grad_norm": 1.6331677210140292, + "learning_rate": 2.5756582507569003e-06, + "loss": 0.964, + "step": 4711 + }, + { + "epoch": 0.42494476259187447, + "grad_norm": 1.2946197939918436, + "learning_rate": 2.5750987383304335e-06, + "loss": 0.9718, + "step": 4712 + }, + { + "epoch": 0.4250349461153447, + "grad_norm": 1.7155581574195167, + "learning_rate": 2.574539176833372e-06, + "loss": 0.937, + "step": 4713 + }, + { + "epoch": 0.425125129638815, + "grad_norm": 1.383929862623887, + "learning_rate": 2.5739795663134594e-06, + "loss": 0.8922, + "step": 4714 + }, + { + "epoch": 0.4252153131622852, + "grad_norm": 1.3520123830008128, + "learning_rate": 2.5734199068184454e-06, + "loss": 1.041, + "step": 4715 + }, + { + "epoch": 0.4253054966857555, + "grad_norm": 1.8534363226210682, + "learning_rate": 2.572860198396083e-06, + "loss": 0.8724, + "step": 4716 + }, + { + "epoch": 0.4253956802092258, + "grad_norm": 1.3102267879270737, + "learning_rate": 2.57230044109413e-06, + "loss": 0.9814, + "step": 4717 + }, + { + "epoch": 0.42548586373269603, + "grad_norm": 1.5630704220010438, + "learning_rate": 2.5717406349603483e-06, + "loss": 0.9312, + "step": 4718 + }, + { + "epoch": 0.4255760472561663, + "grad_norm": 1.4185229182739978, + "learning_rate": 2.5711807800425026e-06, + "loss": 0.9773, + "step": 4719 + }, + { + "epoch": 0.42566623077963656, + "grad_norm": 1.8277159665267673, + "learning_rate": 2.5706208763883633e-06, + "loss": 0.795, + "step": 4720 + }, + { + "epoch": 0.42575641430310684, + "grad_norm": 1.5134941144361889, + "learning_rate": 2.570060924045704e-06, + "loss": 0.9044, + "step": 4721 + }, + { + "epoch": 0.4258465978265771, + "grad_norm": 1.417126870672198, + "learning_rate": 2.569500923062304e-06, + "loss": 0.9771, + "step": 4722 + }, + { + "epoch": 0.42593678135004737, + "grad_norm": 1.3525295436262514, + "learning_rate": 2.5689408734859445e-06, + "loss": 0.8696, + "step": 4723 + }, + { + "epoch": 0.4260269648735176, + "grad_norm": 1.5155281743848004, + "learning_rate": 2.5683807753644127e-06, + "loss": 0.9065, + "step": 4724 + }, + { + "epoch": 0.4261171483969879, + "grad_norm": 1.3863567040862772, + "learning_rate": 2.5678206287454996e-06, + "loss": 0.9274, + "step": 4725 + }, + { + "epoch": 0.4262073319204581, + "grad_norm": 1.5914154157341827, + "learning_rate": 2.567260433676999e-06, + "loss": 0.9755, + "step": 4726 + }, + { + "epoch": 0.4262975154439284, + "grad_norm": 1.4926934308277366, + "learning_rate": 2.5667001902067107e-06, + "loss": 0.9292, + "step": 4727 + }, + { + "epoch": 0.42638769896739864, + "grad_norm": 1.5288315962867485, + "learning_rate": 2.566139898382437e-06, + "loss": 0.9974, + "step": 4728 + }, + { + "epoch": 0.42647788249086893, + "grad_norm": 1.378008715163245, + "learning_rate": 2.5655795582519853e-06, + "loss": 0.9618, + "step": 4729 + }, + { + "epoch": 0.42656806601433916, + "grad_norm": 1.428933700539787, + "learning_rate": 2.565019169863168e-06, + "loss": 1.0147, + "step": 4730 + }, + { + "epoch": 0.42665824953780945, + "grad_norm": 0.6573122147271775, + "learning_rate": 2.5644587332637994e-06, + "loss": 0.7662, + "step": 4731 + }, + { + "epoch": 0.4267484330612797, + "grad_norm": 1.3130459785197348, + "learning_rate": 2.5638982485016994e-06, + "loss": 0.979, + "step": 4732 + }, + { + "epoch": 0.42683861658475, + "grad_norm": 0.9681079952157692, + "learning_rate": 2.5633377156246917e-06, + "loss": 0.794, + "step": 4733 + }, + { + "epoch": 0.4269288001082202, + "grad_norm": 1.6542270470015947, + "learning_rate": 2.562777134680603e-06, + "loss": 0.9352, + "step": 4734 + }, + { + "epoch": 0.4270189836316905, + "grad_norm": 1.649555712908113, + "learning_rate": 2.562216505717267e-06, + "loss": 0.957, + "step": 4735 + }, + { + "epoch": 0.42710916715516073, + "grad_norm": 1.3874859903676475, + "learning_rate": 2.561655828782518e-06, + "loss": 0.9777, + "step": 4736 + }, + { + "epoch": 0.427199350678631, + "grad_norm": 1.5663397339660792, + "learning_rate": 2.561095103924197e-06, + "loss": 0.9302, + "step": 4737 + }, + { + "epoch": 0.42728953420210125, + "grad_norm": 1.5672622586767455, + "learning_rate": 2.560534331190148e-06, + "loss": 0.9036, + "step": 4738 + }, + { + "epoch": 0.42737971772557154, + "grad_norm": 0.6724152499901127, + "learning_rate": 2.559973510628218e-06, + "loss": 0.7691, + "step": 4739 + }, + { + "epoch": 0.4274699012490418, + "grad_norm": 22.47780820214163, + "learning_rate": 2.5594126422862615e-06, + "loss": 0.8992, + "step": 4740 + }, + { + "epoch": 0.42756008477251206, + "grad_norm": 1.8478238632598531, + "learning_rate": 2.558851726212134e-06, + "loss": 0.9062, + "step": 4741 + }, + { + "epoch": 0.42765026829598235, + "grad_norm": 1.6414534883814569, + "learning_rate": 2.5582907624536953e-06, + "loss": 0.9274, + "step": 4742 + }, + { + "epoch": 0.4277404518194526, + "grad_norm": 1.7298035731735313, + "learning_rate": 2.557729751058811e-06, + "loss": 0.9654, + "step": 4743 + }, + { + "epoch": 0.42783063534292287, + "grad_norm": 1.474202525621175, + "learning_rate": 2.557168692075348e-06, + "loss": 0.9233, + "step": 4744 + }, + { + "epoch": 0.4279208188663931, + "grad_norm": 1.2482906575074662, + "learning_rate": 2.556607585551181e-06, + "loss": 0.8931, + "step": 4745 + }, + { + "epoch": 0.4280110023898634, + "grad_norm": 1.1030222101923757, + "learning_rate": 2.5560464315341844e-06, + "loss": 0.9013, + "step": 4746 + }, + { + "epoch": 0.4281011859133336, + "grad_norm": 1.5656731238364332, + "learning_rate": 2.555485230072242e-06, + "loss": 0.9461, + "step": 4747 + }, + { + "epoch": 0.4281913694368039, + "grad_norm": 1.7636657917513443, + "learning_rate": 2.5549239812132354e-06, + "loss": 1.0043, + "step": 4748 + }, + { + "epoch": 0.42828155296027415, + "grad_norm": 1.4039928972086335, + "learning_rate": 2.5543626850050556e-06, + "loss": 1.0121, + "step": 4749 + }, + { + "epoch": 0.42837173648374444, + "grad_norm": 1.3103149936614995, + "learning_rate": 2.5538013414955944e-06, + "loss": 1.0027, + "step": 4750 + }, + { + "epoch": 0.42846192000721467, + "grad_norm": 1.3341121584455544, + "learning_rate": 2.5532399507327494e-06, + "loss": 0.9011, + "step": 4751 + }, + { + "epoch": 0.42855210353068496, + "grad_norm": 0.8400460642718183, + "learning_rate": 2.552678512764421e-06, + "loss": 0.8108, + "step": 4752 + }, + { + "epoch": 0.4286422870541552, + "grad_norm": 1.645900129419446, + "learning_rate": 2.5521170276385147e-06, + "loss": 1.0009, + "step": 4753 + }, + { + "epoch": 0.4287324705776255, + "grad_norm": 1.5151438785129334, + "learning_rate": 2.5515554954029394e-06, + "loss": 1.0, + "step": 4754 + }, + { + "epoch": 0.4288226541010957, + "grad_norm": 2.4238185836272215, + "learning_rate": 2.550993916105608e-06, + "loss": 0.9696, + "step": 4755 + }, + { + "epoch": 0.428912837624566, + "grad_norm": 1.362228643010869, + "learning_rate": 2.550432289794437e-06, + "loss": 0.9108, + "step": 4756 + }, + { + "epoch": 0.42900302114803623, + "grad_norm": 1.5358365713586617, + "learning_rate": 2.5498706165173483e-06, + "loss": 0.9586, + "step": 4757 + }, + { + "epoch": 0.4290932046715065, + "grad_norm": 1.2632051575909946, + "learning_rate": 2.5493088963222668e-06, + "loss": 0.9256, + "step": 4758 + }, + { + "epoch": 0.42918338819497676, + "grad_norm": 1.52510511630131, + "learning_rate": 2.548747129257121e-06, + "loss": 0.9957, + "step": 4759 + }, + { + "epoch": 0.42927357171844704, + "grad_norm": 1.5354996702342298, + "learning_rate": 2.548185315369845e-06, + "loss": 0.8764, + "step": 4760 + }, + { + "epoch": 0.4293637552419173, + "grad_norm": 2.1398749603747453, + "learning_rate": 2.5476234547083746e-06, + "loss": 1.0354, + "step": 4761 + }, + { + "epoch": 0.42945393876538757, + "grad_norm": 1.4697955042489084, + "learning_rate": 2.547061547320652e-06, + "loss": 1.0183, + "step": 4762 + }, + { + "epoch": 0.4295441222888578, + "grad_norm": 1.1981401594354129, + "learning_rate": 2.5464995932546217e-06, + "loss": 0.9343, + "step": 4763 + }, + { + "epoch": 0.4296343058123281, + "grad_norm": 1.6164012232204945, + "learning_rate": 2.545937592558232e-06, + "loss": 0.9108, + "step": 4764 + }, + { + "epoch": 0.4297244893357984, + "grad_norm": 1.4386679838585208, + "learning_rate": 2.5453755452794374e-06, + "loss": 0.8993, + "step": 4765 + }, + { + "epoch": 0.4298146728592686, + "grad_norm": 1.4215683658836904, + "learning_rate": 2.5448134514661938e-06, + "loss": 0.9896, + "step": 4766 + }, + { + "epoch": 0.4299048563827389, + "grad_norm": 1.653697512162051, + "learning_rate": 2.5442513111664623e-06, + "loss": 0.9517, + "step": 4767 + }, + { + "epoch": 0.42999503990620913, + "grad_norm": 1.62735614158392, + "learning_rate": 2.5436891244282084e-06, + "loss": 0.9395, + "step": 4768 + }, + { + "epoch": 0.4300852234296794, + "grad_norm": 1.4524483694857206, + "learning_rate": 2.5431268912994004e-06, + "loss": 1.0459, + "step": 4769 + }, + { + "epoch": 0.43017540695314965, + "grad_norm": 1.5242187062023476, + "learning_rate": 2.5425646118280108e-06, + "loss": 0.9216, + "step": 4770 + }, + { + "epoch": 0.43026559047661994, + "grad_norm": 0.9951474650290669, + "learning_rate": 2.5420022860620172e-06, + "loss": 0.8014, + "step": 4771 + }, + { + "epoch": 0.4303557740000902, + "grad_norm": 1.7563537648455072, + "learning_rate": 2.5414399140493995e-06, + "loss": 0.8774, + "step": 4772 + }, + { + "epoch": 0.43044595752356046, + "grad_norm": 1.69455276552983, + "learning_rate": 2.5408774958381436e-06, + "loss": 0.883, + "step": 4773 + }, + { + "epoch": 0.4305361410470307, + "grad_norm": 1.90386820345413, + "learning_rate": 2.540315031476237e-06, + "loss": 0.9921, + "step": 4774 + }, + { + "epoch": 0.430626324570501, + "grad_norm": 1.5518273349406086, + "learning_rate": 2.5397525210116737e-06, + "loss": 0.9546, + "step": 4775 + }, + { + "epoch": 0.4307165080939712, + "grad_norm": 1.2959438336146085, + "learning_rate": 2.539189964492448e-06, + "loss": 0.9015, + "step": 4776 + }, + { + "epoch": 0.4308066916174415, + "grad_norm": 1.7849790789876798, + "learning_rate": 2.5386273619665613e-06, + "loss": 0.9973, + "step": 4777 + }, + { + "epoch": 0.43089687514091174, + "grad_norm": 0.7710275190639834, + "learning_rate": 2.5380647134820186e-06, + "loss": 0.8045, + "step": 4778 + }, + { + "epoch": 0.43098705866438203, + "grad_norm": 1.2763650393595638, + "learning_rate": 2.5375020190868277e-06, + "loss": 1.0049, + "step": 4779 + }, + { + "epoch": 0.43107724218785226, + "grad_norm": 1.4814254367126225, + "learning_rate": 2.536939278829001e-06, + "loss": 0.9425, + "step": 4780 + }, + { + "epoch": 0.43116742571132255, + "grad_norm": 1.6594100890966574, + "learning_rate": 2.5363764927565536e-06, + "loss": 1.0125, + "step": 4781 + }, + { + "epoch": 0.4312576092347928, + "grad_norm": 2.3513262557177463, + "learning_rate": 2.5358136609175064e-06, + "loss": 0.8963, + "step": 4782 + }, + { + "epoch": 0.43134779275826307, + "grad_norm": 1.2529621313329151, + "learning_rate": 2.535250783359884e-06, + "loss": 0.9517, + "step": 4783 + }, + { + "epoch": 0.4314379762817333, + "grad_norm": 1.3880559357384812, + "learning_rate": 2.5346878601317124e-06, + "loss": 0.843, + "step": 4784 + }, + { + "epoch": 0.4315281598052036, + "grad_norm": 1.2521213174518069, + "learning_rate": 2.534124891281025e-06, + "loss": 0.78, + "step": 4785 + }, + { + "epoch": 0.4316183433286738, + "grad_norm": 1.516111630056468, + "learning_rate": 2.533561876855857e-06, + "loss": 0.9615, + "step": 4786 + }, + { + "epoch": 0.4317085268521441, + "grad_norm": 1.5341533461943373, + "learning_rate": 2.532998816904247e-06, + "loss": 0.9012, + "step": 4787 + }, + { + "epoch": 0.4317987103756144, + "grad_norm": 1.948730958132299, + "learning_rate": 2.53243571147424e-06, + "loss": 0.8432, + "step": 4788 + }, + { + "epoch": 0.43188889389908464, + "grad_norm": 1.8641742349856676, + "learning_rate": 2.5318725606138815e-06, + "loss": 0.8602, + "step": 4789 + }, + { + "epoch": 0.4319790774225549, + "grad_norm": 2.0529721137827184, + "learning_rate": 2.5313093643712235e-06, + "loss": 0.9557, + "step": 4790 + }, + { + "epoch": 0.43206926094602516, + "grad_norm": 1.3487384376348464, + "learning_rate": 2.530746122794321e-06, + "loss": 0.9967, + "step": 4791 + }, + { + "epoch": 0.43215944446949545, + "grad_norm": 1.250893226009675, + "learning_rate": 2.5301828359312323e-06, + "loss": 0.8772, + "step": 4792 + }, + { + "epoch": 0.4322496279929657, + "grad_norm": 1.479577271687108, + "learning_rate": 2.529619503830021e-06, + "loss": 0.905, + "step": 4793 + }, + { + "epoch": 0.43233981151643597, + "grad_norm": 1.792612942232592, + "learning_rate": 2.529056126538753e-06, + "loss": 0.8787, + "step": 4794 + }, + { + "epoch": 0.4324299950399062, + "grad_norm": 1.472725864050092, + "learning_rate": 2.5284927041054995e-06, + "loss": 0.9705, + "step": 4795 + }, + { + "epoch": 0.4325201785633765, + "grad_norm": 0.7772008870026007, + "learning_rate": 2.5279292365783348e-06, + "loss": 0.769, + "step": 4796 + }, + { + "epoch": 0.4326103620868467, + "grad_norm": 1.494042726189192, + "learning_rate": 2.527365724005336e-06, + "loss": 1.0456, + "step": 4797 + }, + { + "epoch": 0.432700545610317, + "grad_norm": 1.5883207407262372, + "learning_rate": 2.526802166434586e-06, + "loss": 1.0044, + "step": 4798 + }, + { + "epoch": 0.43279072913378724, + "grad_norm": 1.3187378742691056, + "learning_rate": 2.5262385639141708e-06, + "loss": 0.9166, + "step": 4799 + }, + { + "epoch": 0.43288091265725753, + "grad_norm": 1.4432633178297354, + "learning_rate": 2.525674916492179e-06, + "loss": 1.0181, + "step": 4800 + }, + { + "epoch": 0.43297109618072777, + "grad_norm": 1.2217292562136184, + "learning_rate": 2.5251112242167056e-06, + "loss": 0.9573, + "step": 4801 + }, + { + "epoch": 0.43306127970419805, + "grad_norm": 1.5796192761941084, + "learning_rate": 2.5245474871358464e-06, + "loss": 0.9117, + "step": 4802 + }, + { + "epoch": 0.4331514632276683, + "grad_norm": 1.5213961496103086, + "learning_rate": 2.5239837052977037e-06, + "loss": 0.868, + "step": 4803 + }, + { + "epoch": 0.4332416467511386, + "grad_norm": 1.39481212020741, + "learning_rate": 2.523419878750381e-06, + "loss": 1.0594, + "step": 4804 + }, + { + "epoch": 0.4333318302746088, + "grad_norm": 1.561512367920231, + "learning_rate": 2.522856007541989e-06, + "loss": 0.9039, + "step": 4805 + }, + { + "epoch": 0.4334220137980791, + "grad_norm": 1.3631622787198503, + "learning_rate": 2.5222920917206397e-06, + "loss": 0.8692, + "step": 4806 + }, + { + "epoch": 0.43351219732154933, + "grad_norm": 0.8144759579758051, + "learning_rate": 2.5217281313344493e-06, + "loss": 0.7781, + "step": 4807 + }, + { + "epoch": 0.4336023808450196, + "grad_norm": 1.2739897858466245, + "learning_rate": 2.5211641264315372e-06, + "loss": 0.9924, + "step": 4808 + }, + { + "epoch": 0.43369256436848985, + "grad_norm": 1.3463262214441323, + "learning_rate": 2.5206000770600286e-06, + "loss": 0.9258, + "step": 4809 + }, + { + "epoch": 0.43378274789196014, + "grad_norm": 1.369300604383307, + "learning_rate": 2.520035983268051e-06, + "loss": 0.905, + "step": 4810 + }, + { + "epoch": 0.4338729314154304, + "grad_norm": 1.4295349039657947, + "learning_rate": 2.5194718451037357e-06, + "loss": 0.9517, + "step": 4811 + }, + { + "epoch": 0.43396311493890066, + "grad_norm": 1.3966820210194768, + "learning_rate": 2.518907662615218e-06, + "loss": 0.977, + "step": 4812 + }, + { + "epoch": 0.43405329846237095, + "grad_norm": 1.4004248451297518, + "learning_rate": 2.5183434358506373e-06, + "loss": 0.9056, + "step": 4813 + }, + { + "epoch": 0.4341434819858412, + "grad_norm": 1.5181644833779007, + "learning_rate": 2.5177791648581368e-06, + "loss": 0.935, + "step": 4814 + }, + { + "epoch": 0.4342336655093115, + "grad_norm": 1.3570691425696937, + "learning_rate": 2.517214849685863e-06, + "loss": 0.9536, + "step": 4815 + }, + { + "epoch": 0.4343238490327817, + "grad_norm": 1.4310209799027669, + "learning_rate": 2.5166504903819663e-06, + "loss": 0.9695, + "step": 4816 + }, + { + "epoch": 0.434414032556252, + "grad_norm": 1.605008618578246, + "learning_rate": 2.5160860869946014e-06, + "loss": 1.011, + "step": 4817 + }, + { + "epoch": 0.4345042160797222, + "grad_norm": 1.2300329011688116, + "learning_rate": 2.5155216395719253e-06, + "loss": 1.0279, + "step": 4818 + }, + { + "epoch": 0.4345943996031925, + "grad_norm": 1.2137735933786318, + "learning_rate": 2.5149571481621e-06, + "loss": 0.9063, + "step": 4819 + }, + { + "epoch": 0.43468458312666275, + "grad_norm": 0.708674297360164, + "learning_rate": 2.514392612813292e-06, + "loss": 0.7709, + "step": 4820 + }, + { + "epoch": 0.43477476665013304, + "grad_norm": 1.5063327782389246, + "learning_rate": 2.5138280335736695e-06, + "loss": 1.002, + "step": 4821 + }, + { + "epoch": 0.43486495017360327, + "grad_norm": 1.3603424926367824, + "learning_rate": 2.5132634104914064e-06, + "loss": 1.0283, + "step": 4822 + }, + { + "epoch": 0.43495513369707356, + "grad_norm": 1.2795556658419438, + "learning_rate": 2.5126987436146794e-06, + "loss": 0.9675, + "step": 4823 + }, + { + "epoch": 0.4350453172205438, + "grad_norm": 1.4827341748404352, + "learning_rate": 2.5121340329916675e-06, + "loss": 0.932, + "step": 4824 + }, + { + "epoch": 0.4351355007440141, + "grad_norm": 1.2834228141767392, + "learning_rate": 2.5115692786705566e-06, + "loss": 0.8672, + "step": 4825 + }, + { + "epoch": 0.4352256842674843, + "grad_norm": 1.4362689012523775, + "learning_rate": 2.511004480699534e-06, + "loss": 0.9824, + "step": 4826 + }, + { + "epoch": 0.4353158677909546, + "grad_norm": 0.8943490921522621, + "learning_rate": 2.510439639126791e-06, + "loss": 0.8618, + "step": 4827 + }, + { + "epoch": 0.43540605131442484, + "grad_norm": 1.3993244022628382, + "learning_rate": 2.509874754000524e-06, + "loss": 0.9495, + "step": 4828 + }, + { + "epoch": 0.4354962348378951, + "grad_norm": 1.6868409176606254, + "learning_rate": 2.509309825368932e-06, + "loss": 0.9387, + "step": 4829 + }, + { + "epoch": 0.43558641836136536, + "grad_norm": 1.463417029814969, + "learning_rate": 2.5087448532802173e-06, + "loss": 0.9459, + "step": 4830 + }, + { + "epoch": 0.43567660188483565, + "grad_norm": 2.2204601520621243, + "learning_rate": 2.508179837782586e-06, + "loss": 0.9282, + "step": 4831 + }, + { + "epoch": 0.4357667854083059, + "grad_norm": 23.158553571318418, + "learning_rate": 2.5076147789242493e-06, + "loss": 0.9209, + "step": 4832 + }, + { + "epoch": 0.43585696893177617, + "grad_norm": 1.4827800011565553, + "learning_rate": 2.5070496767534202e-06, + "loss": 0.9649, + "step": 4833 + }, + { + "epoch": 0.4359471524552464, + "grad_norm": 1.8323574141835972, + "learning_rate": 2.506484531318317e-06, + "loss": 0.9825, + "step": 4834 + }, + { + "epoch": 0.4360373359787167, + "grad_norm": 1.1474179263880406, + "learning_rate": 2.5059193426671613e-06, + "loss": 0.858, + "step": 4835 + }, + { + "epoch": 0.436127519502187, + "grad_norm": 1.6320845884205162, + "learning_rate": 2.5053541108481772e-06, + "loss": 1.0811, + "step": 4836 + }, + { + "epoch": 0.4362177030256572, + "grad_norm": 1.5896131643526752, + "learning_rate": 2.5047888359095935e-06, + "loss": 0.9338, + "step": 4837 + }, + { + "epoch": 0.4363078865491275, + "grad_norm": 1.4633560153416936, + "learning_rate": 2.5042235178996436e-06, + "loss": 0.9318, + "step": 4838 + }, + { + "epoch": 0.43639807007259773, + "grad_norm": 1.5777190035557371, + "learning_rate": 2.5036581568665627e-06, + "loss": 1.0108, + "step": 4839 + }, + { + "epoch": 0.436488253596068, + "grad_norm": 1.4501850371847673, + "learning_rate": 2.503092752858591e-06, + "loss": 1.0119, + "step": 4840 + }, + { + "epoch": 0.43657843711953825, + "grad_norm": 0.9572415801938506, + "learning_rate": 2.502527305923971e-06, + "loss": 0.8587, + "step": 4841 + }, + { + "epoch": 0.43666862064300854, + "grad_norm": 1.9614613961634642, + "learning_rate": 2.5019618161109506e-06, + "loss": 0.9375, + "step": 4842 + }, + { + "epoch": 0.4367588041664788, + "grad_norm": 1.5667767550294802, + "learning_rate": 2.5013962834677804e-06, + "loss": 0.9606, + "step": 4843 + }, + { + "epoch": 0.43684898768994906, + "grad_norm": 1.6186856319635143, + "learning_rate": 2.500830708042715e-06, + "loss": 0.9345, + "step": 4844 + }, + { + "epoch": 0.4369391712134193, + "grad_norm": 1.3497196489211747, + "learning_rate": 2.500265089884011e-06, + "loss": 1.0143, + "step": 4845 + }, + { + "epoch": 0.4370293547368896, + "grad_norm": 1.394392375282084, + "learning_rate": 2.499699429039932e-06, + "loss": 0.949, + "step": 4846 + }, + { + "epoch": 0.4371195382603598, + "grad_norm": 1.5450469495682775, + "learning_rate": 2.4991337255587425e-06, + "loss": 0.9062, + "step": 4847 + }, + { + "epoch": 0.4372097217838301, + "grad_norm": 1.8240456345814164, + "learning_rate": 2.4985679794887106e-06, + "loss": 0.8171, + "step": 4848 + }, + { + "epoch": 0.43729990530730034, + "grad_norm": 2.446503857835464, + "learning_rate": 2.49800219087811e-06, + "loss": 0.9833, + "step": 4849 + }, + { + "epoch": 0.43739008883077063, + "grad_norm": 1.674503577387153, + "learning_rate": 2.4974363597752163e-06, + "loss": 0.9778, + "step": 4850 + }, + { + "epoch": 0.43748027235424086, + "grad_norm": 2.018732322402889, + "learning_rate": 2.4968704862283097e-06, + "loss": 1.0059, + "step": 4851 + }, + { + "epoch": 0.43757045587771115, + "grad_norm": 1.636622460202716, + "learning_rate": 2.4963045702856737e-06, + "loss": 1.0165, + "step": 4852 + }, + { + "epoch": 0.4376606394011814, + "grad_norm": 1.297078266070588, + "learning_rate": 2.4957386119955946e-06, + "loss": 0.8942, + "step": 4853 + }, + { + "epoch": 0.4377508229246517, + "grad_norm": 0.8978209992593, + "learning_rate": 2.495172611406364e-06, + "loss": 0.7813, + "step": 4854 + }, + { + "epoch": 0.4378410064481219, + "grad_norm": 1.85388102813681, + "learning_rate": 2.4946065685662757e-06, + "loss": 0.974, + "step": 4855 + }, + { + "epoch": 0.4379311899715922, + "grad_norm": 1.5490039855482176, + "learning_rate": 2.4940404835236283e-06, + "loss": 0.9491, + "step": 4856 + }, + { + "epoch": 0.4380213734950624, + "grad_norm": 1.3221642865293748, + "learning_rate": 2.4934743563267223e-06, + "loss": 0.9471, + "step": 4857 + }, + { + "epoch": 0.4381115570185327, + "grad_norm": 1.6007544735427919, + "learning_rate": 2.4929081870238635e-06, + "loss": 0.9072, + "step": 4858 + }, + { + "epoch": 0.43820174054200295, + "grad_norm": 1.3924665366214934, + "learning_rate": 2.49234197566336e-06, + "loss": 0.9869, + "step": 4859 + }, + { + "epoch": 0.43829192406547324, + "grad_norm": 1.491708408957983, + "learning_rate": 2.4917757222935247e-06, + "loss": 0.9575, + "step": 4860 + }, + { + "epoch": 0.4383821075889435, + "grad_norm": 1.6773519202194618, + "learning_rate": 2.4912094269626725e-06, + "loss": 0.9627, + "step": 4861 + }, + { + "epoch": 0.43847229111241376, + "grad_norm": 1.6194163272933018, + "learning_rate": 2.4906430897191245e-06, + "loss": 0.9936, + "step": 4862 + }, + { + "epoch": 0.43856247463588405, + "grad_norm": 1.3680607214161546, + "learning_rate": 2.490076710611202e-06, + "loss": 0.99, + "step": 4863 + }, + { + "epoch": 0.4386526581593543, + "grad_norm": 1.6605290252436895, + "learning_rate": 2.4895102896872326e-06, + "loss": 0.9952, + "step": 4864 + }, + { + "epoch": 0.43874284168282457, + "grad_norm": 1.5511415000675508, + "learning_rate": 2.4889438269955457e-06, + "loss": 0.8811, + "step": 4865 + }, + { + "epoch": 0.4388330252062948, + "grad_norm": 1.364546399186945, + "learning_rate": 2.4883773225844755e-06, + "loss": 0.9315, + "step": 4866 + }, + { + "epoch": 0.4389232087297651, + "grad_norm": 1.4539383283598897, + "learning_rate": 2.48781077650236e-06, + "loss": 0.9355, + "step": 4867 + }, + { + "epoch": 0.4390133922532353, + "grad_norm": 1.4025112466480196, + "learning_rate": 2.4872441887975386e-06, + "loss": 0.9145, + "step": 4868 + }, + { + "epoch": 0.4391035757767056, + "grad_norm": 1.1836347415485728, + "learning_rate": 2.486677559518356e-06, + "loss": 0.8968, + "step": 4869 + }, + { + "epoch": 0.43919375930017585, + "grad_norm": 1.0914016035413854, + "learning_rate": 2.4861108887131614e-06, + "loss": 0.9341, + "step": 4870 + }, + { + "epoch": 0.43928394282364613, + "grad_norm": 1.3157554851709747, + "learning_rate": 2.485544176430305e-06, + "loss": 0.9167, + "step": 4871 + }, + { + "epoch": 0.43937412634711637, + "grad_norm": 1.5181585942175657, + "learning_rate": 2.4849774227181425e-06, + "loss": 1.0262, + "step": 4872 + }, + { + "epoch": 0.43946430987058666, + "grad_norm": 1.2234738087534063, + "learning_rate": 2.484410627625032e-06, + "loss": 0.9484, + "step": 4873 + }, + { + "epoch": 0.4395544933940569, + "grad_norm": 1.5329871153069166, + "learning_rate": 2.4838437911993356e-06, + "loss": 0.9936, + "step": 4874 + }, + { + "epoch": 0.4396446769175272, + "grad_norm": 1.7651660288887407, + "learning_rate": 2.483276913489419e-06, + "loss": 0.9857, + "step": 4875 + }, + { + "epoch": 0.4397348604409974, + "grad_norm": 1.6316494975032587, + "learning_rate": 2.4827099945436516e-06, + "loss": 0.8526, + "step": 4876 + }, + { + "epoch": 0.4398250439644677, + "grad_norm": 1.5251956282664307, + "learning_rate": 2.482143034410405e-06, + "loss": 1.0272, + "step": 4877 + }, + { + "epoch": 0.43991522748793793, + "grad_norm": 1.478950063758719, + "learning_rate": 2.4815760331380573e-06, + "loss": 0.9812, + "step": 4878 + }, + { + "epoch": 0.4400054110114082, + "grad_norm": 1.2488371208279765, + "learning_rate": 2.481008990774987e-06, + "loss": 0.9102, + "step": 4879 + }, + { + "epoch": 0.44009559453487845, + "grad_norm": 1.5559872508862835, + "learning_rate": 2.480441907369577e-06, + "loss": 0.9521, + "step": 4880 + }, + { + "epoch": 0.44018577805834874, + "grad_norm": 1.598132521843244, + "learning_rate": 2.479874782970214e-06, + "loss": 0.8912, + "step": 4881 + }, + { + "epoch": 0.440275961581819, + "grad_norm": 1.6752184654220086, + "learning_rate": 2.4793076176252887e-06, + "loss": 0.9721, + "step": 4882 + }, + { + "epoch": 0.44036614510528926, + "grad_norm": 1.9217599353615338, + "learning_rate": 2.478740411383195e-06, + "loss": 0.8345, + "step": 4883 + }, + { + "epoch": 0.44045632862875955, + "grad_norm": 1.318836447969311, + "learning_rate": 2.4781731642923296e-06, + "loss": 0.9394, + "step": 4884 + }, + { + "epoch": 0.4405465121522298, + "grad_norm": 1.2600037815400553, + "learning_rate": 2.477605876401093e-06, + "loss": 0.934, + "step": 4885 + }, + { + "epoch": 0.4406366956757001, + "grad_norm": 1.2775152715378597, + "learning_rate": 2.4770385477578894e-06, + "loss": 0.8973, + "step": 4886 + }, + { + "epoch": 0.4407268791991703, + "grad_norm": 1.1525215141125342, + "learning_rate": 2.476471178411127e-06, + "loss": 0.903, + "step": 4887 + }, + { + "epoch": 0.4408170627226406, + "grad_norm": 1.3771614945122315, + "learning_rate": 2.475903768409216e-06, + "loss": 0.9722, + "step": 4888 + }, + { + "epoch": 0.44090724624611083, + "grad_norm": 1.5356603673169953, + "learning_rate": 2.475336317800572e-06, + "loss": 0.9154, + "step": 4889 + }, + { + "epoch": 0.4409974297695811, + "grad_norm": 1.3871110817832404, + "learning_rate": 2.4747688266336118e-06, + "loss": 0.937, + "step": 4890 + }, + { + "epoch": 0.44108761329305135, + "grad_norm": 2.688547329537656, + "learning_rate": 2.4742012949567574e-06, + "loss": 1.0502, + "step": 4891 + }, + { + "epoch": 0.44117779681652164, + "grad_norm": 1.6646650056373662, + "learning_rate": 2.4736337228184338e-06, + "loss": 0.9245, + "step": 4892 + }, + { + "epoch": 0.44126798033999187, + "grad_norm": 1.5083718324809612, + "learning_rate": 2.4730661102670692e-06, + "loss": 1.0452, + "step": 4893 + }, + { + "epoch": 0.44135816386346216, + "grad_norm": 1.4949587508159665, + "learning_rate": 2.472498457351096e-06, + "loss": 0.95, + "step": 4894 + }, + { + "epoch": 0.4414483473869324, + "grad_norm": 1.8182706719058266, + "learning_rate": 2.4719307641189495e-06, + "loss": 0.9323, + "step": 4895 + }, + { + "epoch": 0.4415385309104027, + "grad_norm": 1.6726777475784935, + "learning_rate": 2.4713630306190673e-06, + "loss": 0.9073, + "step": 4896 + }, + { + "epoch": 0.4416287144338729, + "grad_norm": 1.719623066497268, + "learning_rate": 2.4707952568998923e-06, + "loss": 1.0224, + "step": 4897 + }, + { + "epoch": 0.4417188979573432, + "grad_norm": 1.7734564557511856, + "learning_rate": 2.4702274430098703e-06, + "loss": 0.9134, + "step": 4898 + }, + { + "epoch": 0.44180908148081344, + "grad_norm": 1.5771124584711893, + "learning_rate": 2.4696595889974497e-06, + "loss": 0.9009, + "step": 4899 + }, + { + "epoch": 0.4418992650042837, + "grad_norm": 1.3754397469361508, + "learning_rate": 2.469091694911084e-06, + "loss": 1.0057, + "step": 4900 + }, + { + "epoch": 0.44198944852775396, + "grad_norm": 1.421957579509816, + "learning_rate": 2.4685237607992276e-06, + "loss": 0.9428, + "step": 4901 + }, + { + "epoch": 0.44207963205122425, + "grad_norm": 1.4174578458246796, + "learning_rate": 2.4679557867103416e-06, + "loss": 1.0098, + "step": 4902 + }, + { + "epoch": 0.4421698155746945, + "grad_norm": 1.2143655798797492, + "learning_rate": 2.4673877726928865e-06, + "loss": 0.9542, + "step": 4903 + }, + { + "epoch": 0.44225999909816477, + "grad_norm": 1.7367073391613719, + "learning_rate": 2.46681971879533e-06, + "loss": 0.8357, + "step": 4904 + }, + { + "epoch": 0.442350182621635, + "grad_norm": 1.602849973172481, + "learning_rate": 2.4662516250661407e-06, + "loss": 0.8694, + "step": 4905 + }, + { + "epoch": 0.4424403661451053, + "grad_norm": 1.555437070254758, + "learning_rate": 2.465683491553792e-06, + "loss": 0.9252, + "step": 4906 + }, + { + "epoch": 0.4425305496685755, + "grad_norm": 1.4478088265563223, + "learning_rate": 2.4651153183067604e-06, + "loss": 0.9554, + "step": 4907 + }, + { + "epoch": 0.4426207331920458, + "grad_norm": 1.6340423770671524, + "learning_rate": 2.4645471053735245e-06, + "loss": 0.8846, + "step": 4908 + }, + { + "epoch": 0.4427109167155161, + "grad_norm": 1.3931689192915746, + "learning_rate": 2.4639788528025684e-06, + "loss": 0.8824, + "step": 4909 + }, + { + "epoch": 0.44280110023898633, + "grad_norm": 1.1411733485006232, + "learning_rate": 2.463410560642378e-06, + "loss": 0.9507, + "step": 4910 + }, + { + "epoch": 0.4428912837624566, + "grad_norm": 0.7474633871954892, + "learning_rate": 2.4628422289414448e-06, + "loss": 0.8194, + "step": 4911 + }, + { + "epoch": 0.44298146728592686, + "grad_norm": 0.7305863637636895, + "learning_rate": 2.4622738577482592e-06, + "loss": 0.8017, + "step": 4912 + }, + { + "epoch": 0.44307165080939714, + "grad_norm": 1.2799133213318317, + "learning_rate": 2.461705447111319e-06, + "loss": 0.9343, + "step": 4913 + }, + { + "epoch": 0.4431618343328674, + "grad_norm": 1.4186908155019626, + "learning_rate": 2.4611369970791246e-06, + "loss": 0.9115, + "step": 4914 + }, + { + "epoch": 0.44325201785633767, + "grad_norm": 1.4448724850156578, + "learning_rate": 2.460568507700179e-06, + "loss": 0.953, + "step": 4915 + }, + { + "epoch": 0.4433422013798079, + "grad_norm": 1.4816351249063728, + "learning_rate": 2.4599999790229887e-06, + "loss": 0.9867, + "step": 4916 + }, + { + "epoch": 0.4434323849032782, + "grad_norm": 1.5211029945880237, + "learning_rate": 2.459431411096064e-06, + "loss": 0.9015, + "step": 4917 + }, + { + "epoch": 0.4435225684267484, + "grad_norm": 1.4412938516104423, + "learning_rate": 2.458862803967918e-06, + "loss": 0.9105, + "step": 4918 + }, + { + "epoch": 0.4436127519502187, + "grad_norm": 1.4250778344296635, + "learning_rate": 2.4582941576870667e-06, + "loss": 0.9596, + "step": 4919 + }, + { + "epoch": 0.44370293547368894, + "grad_norm": 1.4631329527992796, + "learning_rate": 2.4577254723020315e-06, + "loss": 0.9229, + "step": 4920 + }, + { + "epoch": 0.44379311899715923, + "grad_norm": 1.4678889246295337, + "learning_rate": 2.457156747861335e-06, + "loss": 0.97, + "step": 4921 + }, + { + "epoch": 0.44388330252062946, + "grad_norm": 1.5322314639769834, + "learning_rate": 2.456587984413504e-06, + "loss": 1.0067, + "step": 4922 + }, + { + "epoch": 0.44397348604409975, + "grad_norm": 1.929413934382145, + "learning_rate": 2.4560191820070683e-06, + "loss": 0.8794, + "step": 4923 + }, + { + "epoch": 0.44406366956757, + "grad_norm": 1.7622605397849116, + "learning_rate": 2.4554503406905617e-06, + "loss": 0.9388, + "step": 4924 + }, + { + "epoch": 0.4441538530910403, + "grad_norm": 1.324197414530143, + "learning_rate": 2.454881460512521e-06, + "loss": 0.9864, + "step": 4925 + }, + { + "epoch": 0.4442440366145105, + "grad_norm": 1.1887498101074838, + "learning_rate": 2.4543125415214856e-06, + "loss": 0.9218, + "step": 4926 + }, + { + "epoch": 0.4443342201379808, + "grad_norm": 1.821079141827436, + "learning_rate": 2.4537435837659996e-06, + "loss": 0.9723, + "step": 4927 + }, + { + "epoch": 0.44442440366145103, + "grad_norm": 1.5647585757572584, + "learning_rate": 2.4531745872946085e-06, + "loss": 0.8987, + "step": 4928 + }, + { + "epoch": 0.4445145871849213, + "grad_norm": 1.781144992343237, + "learning_rate": 2.4526055521558632e-06, + "loss": 1.0084, + "step": 4929 + }, + { + "epoch": 0.44460477070839155, + "grad_norm": 1.3824911229019334, + "learning_rate": 2.4520364783983164e-06, + "loss": 0.956, + "step": 4930 + }, + { + "epoch": 0.44469495423186184, + "grad_norm": 1.2966486262183607, + "learning_rate": 2.451467366070525e-06, + "loss": 0.9327, + "step": 4931 + }, + { + "epoch": 0.4447851377553321, + "grad_norm": 1.6085263579500246, + "learning_rate": 2.450898215221048e-06, + "loss": 1.017, + "step": 4932 + }, + { + "epoch": 0.44487532127880236, + "grad_norm": 1.6142554493706236, + "learning_rate": 2.4503290258984498e-06, + "loss": 1.0898, + "step": 4933 + }, + { + "epoch": 0.44496550480227265, + "grad_norm": 1.7696279200406997, + "learning_rate": 2.4497597981512952e-06, + "loss": 0.8193, + "step": 4934 + }, + { + "epoch": 0.4450556883257429, + "grad_norm": 0.7368021686207256, + "learning_rate": 2.4491905320281555e-06, + "loss": 0.824, + "step": 4935 + }, + { + "epoch": 0.44514587184921317, + "grad_norm": 1.3886765105737502, + "learning_rate": 2.448621227577602e-06, + "loss": 0.9294, + "step": 4936 + }, + { + "epoch": 0.4452360553726834, + "grad_norm": 1.5472164355218694, + "learning_rate": 2.4480518848482123e-06, + "loss": 0.8971, + "step": 4937 + }, + { + "epoch": 0.4453262388961537, + "grad_norm": 1.3297565088171968, + "learning_rate": 2.447482503888565e-06, + "loss": 0.9552, + "step": 4938 + }, + { + "epoch": 0.4454164224196239, + "grad_norm": 1.8324287812886135, + "learning_rate": 2.4469130847472434e-06, + "loss": 0.9282, + "step": 4939 + }, + { + "epoch": 0.4455066059430942, + "grad_norm": 1.562764641285086, + "learning_rate": 2.4463436274728326e-06, + "loss": 0.9803, + "step": 4940 + }, + { + "epoch": 0.44559678946656445, + "grad_norm": 1.4335397819318534, + "learning_rate": 2.4457741321139227e-06, + "loss": 0.7909, + "step": 4941 + }, + { + "epoch": 0.44568697299003474, + "grad_norm": 1.3684835340061894, + "learning_rate": 2.4452045987191063e-06, + "loss": 0.9689, + "step": 4942 + }, + { + "epoch": 0.44577715651350497, + "grad_norm": 1.2567809714603873, + "learning_rate": 2.4446350273369776e-06, + "loss": 0.917, + "step": 4943 + }, + { + "epoch": 0.44586734003697526, + "grad_norm": 1.3909230287922247, + "learning_rate": 2.4440654180161374e-06, + "loss": 0.9898, + "step": 4944 + }, + { + "epoch": 0.4459575235604455, + "grad_norm": 1.4042686066945758, + "learning_rate": 2.4434957708051875e-06, + "loss": 0.896, + "step": 4945 + }, + { + "epoch": 0.4460477070839158, + "grad_norm": 1.5391756587375975, + "learning_rate": 2.4429260857527324e-06, + "loss": 0.89, + "step": 4946 + }, + { + "epoch": 0.446137890607386, + "grad_norm": 1.6067806183832885, + "learning_rate": 2.4423563629073815e-06, + "loss": 0.8539, + "step": 4947 + }, + { + "epoch": 0.4462280741308563, + "grad_norm": 1.3265615797994452, + "learning_rate": 2.4417866023177466e-06, + "loss": 0.9396, + "step": 4948 + }, + { + "epoch": 0.44631825765432653, + "grad_norm": 1.6148112802696886, + "learning_rate": 2.441216804032443e-06, + "loss": 0.9713, + "step": 4949 + }, + { + "epoch": 0.4464084411777968, + "grad_norm": 1.3887407631828717, + "learning_rate": 2.440646968100089e-06, + "loss": 0.9645, + "step": 4950 + }, + { + "epoch": 0.44649862470126706, + "grad_norm": 0.8683025991915592, + "learning_rate": 2.4400770945693055e-06, + "loss": 0.8247, + "step": 4951 + }, + { + "epoch": 0.44658880822473734, + "grad_norm": 1.9155024503305367, + "learning_rate": 2.4395071834887177e-06, + "loss": 0.9756, + "step": 4952 + }, + { + "epoch": 0.4466789917482076, + "grad_norm": 0.8430002908400716, + "learning_rate": 2.438937234906954e-06, + "loss": 0.8786, + "step": 4953 + }, + { + "epoch": 0.44676917527167787, + "grad_norm": 1.4635447528123275, + "learning_rate": 2.4383672488726447e-06, + "loss": 0.9456, + "step": 4954 + }, + { + "epoch": 0.44685935879514815, + "grad_norm": 1.677528732895124, + "learning_rate": 2.4377972254344256e-06, + "loss": 0.8693, + "step": 4955 + }, + { + "epoch": 0.4469495423186184, + "grad_norm": 1.6280288345726408, + "learning_rate": 2.437227164640932e-06, + "loss": 0.8453, + "step": 4956 + }, + { + "epoch": 0.4470397258420887, + "grad_norm": 1.5424334792719996, + "learning_rate": 2.436657066540807e-06, + "loss": 0.8861, + "step": 4957 + }, + { + "epoch": 0.4471299093655589, + "grad_norm": 1.7492753299560009, + "learning_rate": 2.4360869311826927e-06, + "loss": 0.8264, + "step": 4958 + }, + { + "epoch": 0.4472200928890292, + "grad_norm": 1.903695568125606, + "learning_rate": 2.4355167586152367e-06, + "loss": 1.036, + "step": 4959 + }, + { + "epoch": 0.44731027641249943, + "grad_norm": 1.6545288211575202, + "learning_rate": 2.4349465488870896e-06, + "loss": 0.912, + "step": 4960 + }, + { + "epoch": 0.4474004599359697, + "grad_norm": 1.3840668156570122, + "learning_rate": 2.434376302046905e-06, + "loss": 1.0265, + "step": 4961 + }, + { + "epoch": 0.44749064345943995, + "grad_norm": 1.737955127084107, + "learning_rate": 2.433806018143339e-06, + "loss": 0.9687, + "step": 4962 + }, + { + "epoch": 0.44758082698291024, + "grad_norm": 0.698802525758846, + "learning_rate": 2.433235697225051e-06, + "loss": 0.8245, + "step": 4963 + }, + { + "epoch": 0.4476710105063805, + "grad_norm": 1.581955896160655, + "learning_rate": 2.4326653393407048e-06, + "loss": 0.9593, + "step": 4964 + }, + { + "epoch": 0.44776119402985076, + "grad_norm": 1.7123246513865926, + "learning_rate": 2.432094944538966e-06, + "loss": 0.9106, + "step": 4965 + }, + { + "epoch": 0.447851377553321, + "grad_norm": 1.3222140100680013, + "learning_rate": 2.4315245128685047e-06, + "loss": 0.9155, + "step": 4966 + }, + { + "epoch": 0.4479415610767913, + "grad_norm": 1.4005395020072584, + "learning_rate": 2.4309540443779925e-06, + "loss": 0.9566, + "step": 4967 + }, + { + "epoch": 0.4480317446002615, + "grad_norm": 1.5606535873880758, + "learning_rate": 2.4303835391161047e-06, + "loss": 0.8832, + "step": 4968 + }, + { + "epoch": 0.4481219281237318, + "grad_norm": 1.3371648671623355, + "learning_rate": 2.42981299713152e-06, + "loss": 0.8956, + "step": 4969 + }, + { + "epoch": 0.44821211164720204, + "grad_norm": 1.5859172049290255, + "learning_rate": 2.4292424184729204e-06, + "loss": 0.9853, + "step": 4970 + }, + { + "epoch": 0.4483022951706723, + "grad_norm": 1.344153144155091, + "learning_rate": 2.4286718031889913e-06, + "loss": 0.9383, + "step": 4971 + }, + { + "epoch": 0.44839247869414256, + "grad_norm": 1.2586983351346936, + "learning_rate": 2.4281011513284202e-06, + "loss": 0.9261, + "step": 4972 + }, + { + "epoch": 0.44848266221761285, + "grad_norm": 1.5314286575413947, + "learning_rate": 2.4275304629398985e-06, + "loss": 0.8855, + "step": 4973 + }, + { + "epoch": 0.4485728457410831, + "grad_norm": 1.2418384661433817, + "learning_rate": 2.4269597380721194e-06, + "loss": 1.0234, + "step": 4974 + }, + { + "epoch": 0.44866302926455337, + "grad_norm": 1.2570096409412015, + "learning_rate": 2.426388976773782e-06, + "loss": 0.9436, + "step": 4975 + }, + { + "epoch": 0.4487532127880236, + "grad_norm": 1.57686171627006, + "learning_rate": 2.425818179093586e-06, + "loss": 1.0443, + "step": 4976 + }, + { + "epoch": 0.4488433963114939, + "grad_norm": 1.4134626433459505, + "learning_rate": 2.4252473450802346e-06, + "loss": 0.8595, + "step": 4977 + }, + { + "epoch": 0.4489335798349641, + "grad_norm": 1.5154258361208954, + "learning_rate": 2.4246764747824355e-06, + "loss": 0.981, + "step": 4978 + }, + { + "epoch": 0.4490237633584344, + "grad_norm": 1.4976040460425928, + "learning_rate": 2.424105568248897e-06, + "loss": 0.9559, + "step": 4979 + }, + { + "epoch": 0.4491139468819047, + "grad_norm": 1.690461985402386, + "learning_rate": 2.4235346255283337e-06, + "loss": 0.8913, + "step": 4980 + }, + { + "epoch": 0.44920413040537494, + "grad_norm": 1.6372489707665114, + "learning_rate": 2.42296364666946e-06, + "loss": 0.9578, + "step": 4981 + }, + { + "epoch": 0.4492943139288452, + "grad_norm": 1.5267052449983889, + "learning_rate": 2.4223926317209965e-06, + "loss": 0.898, + "step": 4982 + }, + { + "epoch": 0.44938449745231546, + "grad_norm": 1.2073228601085335, + "learning_rate": 2.4218215807316647e-06, + "loss": 0.9522, + "step": 4983 + }, + { + "epoch": 0.44947468097578575, + "grad_norm": 1.4555374137501715, + "learning_rate": 2.4212504937501894e-06, + "loss": 1.0309, + "step": 4984 + }, + { + "epoch": 0.449564864499256, + "grad_norm": 1.2088068768631977, + "learning_rate": 2.4206793708253e-06, + "loss": 1.0019, + "step": 4985 + }, + { + "epoch": 0.44965504802272627, + "grad_norm": 1.7457074926479192, + "learning_rate": 2.420108212005726e-06, + "loss": 0.9041, + "step": 4986 + }, + { + "epoch": 0.4497452315461965, + "grad_norm": 1.8220279452738273, + "learning_rate": 2.4195370173402034e-06, + "loss": 0.9508, + "step": 4987 + }, + { + "epoch": 0.4498354150696668, + "grad_norm": 1.421384454304062, + "learning_rate": 2.4189657868774696e-06, + "loss": 1.0653, + "step": 4988 + }, + { + "epoch": 0.449925598593137, + "grad_norm": 1.6014474967046337, + "learning_rate": 2.418394520666264e-06, + "loss": 0.8794, + "step": 4989 + }, + { + "epoch": 0.4500157821166073, + "grad_norm": 1.522763381876662, + "learning_rate": 2.4178232187553307e-06, + "loss": 0.9251, + "step": 4990 + }, + { + "epoch": 0.45010596564007754, + "grad_norm": 1.3164810023605016, + "learning_rate": 2.417251881193417e-06, + "loss": 0.9711, + "step": 4991 + }, + { + "epoch": 0.45019614916354783, + "grad_norm": 1.5458494697955594, + "learning_rate": 2.4166805080292723e-06, + "loss": 1.0035, + "step": 4992 + }, + { + "epoch": 0.45028633268701806, + "grad_norm": 1.366832752525511, + "learning_rate": 2.4161090993116485e-06, + "loss": 0.9032, + "step": 4993 + }, + { + "epoch": 0.45037651621048835, + "grad_norm": 2.183417516158035, + "learning_rate": 2.4155376550893026e-06, + "loss": 0.8298, + "step": 4994 + }, + { + "epoch": 0.4504666997339586, + "grad_norm": 1.5015919502855473, + "learning_rate": 2.4149661754109926e-06, + "loss": 0.9579, + "step": 4995 + }, + { + "epoch": 0.4505568832574289, + "grad_norm": 1.4471522813241864, + "learning_rate": 2.41439466032548e-06, + "loss": 1.0017, + "step": 4996 + }, + { + "epoch": 0.4506470667808991, + "grad_norm": 1.445686003391124, + "learning_rate": 2.41382310988153e-06, + "loss": 0.8903, + "step": 4997 + }, + { + "epoch": 0.4507372503043694, + "grad_norm": 1.638980295394489, + "learning_rate": 2.413251524127911e-06, + "loss": 1.0041, + "step": 4998 + }, + { + "epoch": 0.45082743382783963, + "grad_norm": 1.650257497982241, + "learning_rate": 2.412679903113393e-06, + "loss": 0.9145, + "step": 4999 + }, + { + "epoch": 0.4509176173513099, + "grad_norm": 1.7895444529006455, + "learning_rate": 2.4121082468867505e-06, + "loss": 0.9589, + "step": 5000 + }, + { + "epoch": 0.45100780087478015, + "grad_norm": 1.843709718943093, + "learning_rate": 2.4115365554967597e-06, + "loss": 0.8864, + "step": 5001 + }, + { + "epoch": 0.45109798439825044, + "grad_norm": 1.4698121308925909, + "learning_rate": 2.4109648289922006e-06, + "loss": 0.954, + "step": 5002 + }, + { + "epoch": 0.45118816792172073, + "grad_norm": 1.4246211937136142, + "learning_rate": 2.4103930674218565e-06, + "loss": 0.9565, + "step": 5003 + }, + { + "epoch": 0.45127835144519096, + "grad_norm": 1.629004752201949, + "learning_rate": 2.409821270834513e-06, + "loss": 0.9249, + "step": 5004 + }, + { + "epoch": 0.45136853496866125, + "grad_norm": 1.847453325995758, + "learning_rate": 2.409249439278959e-06, + "loss": 1.0043, + "step": 5005 + }, + { + "epoch": 0.4514587184921315, + "grad_norm": 0.6881115534245591, + "learning_rate": 2.408677572803986e-06, + "loss": 0.7769, + "step": 5006 + }, + { + "epoch": 0.45154890201560177, + "grad_norm": 1.4995397020447385, + "learning_rate": 2.408105671458389e-06, + "loss": 0.9525, + "step": 5007 + }, + { + "epoch": 0.451639085539072, + "grad_norm": 1.3194402488284858, + "learning_rate": 2.4075337352909663e-06, + "loss": 0.8869, + "step": 5008 + }, + { + "epoch": 0.4517292690625423, + "grad_norm": 1.2928517913933415, + "learning_rate": 2.4069617643505177e-06, + "loss": 1.0757, + "step": 5009 + }, + { + "epoch": 0.4518194525860125, + "grad_norm": 1.3394163912644668, + "learning_rate": 2.406389758685848e-06, + "loss": 1.036, + "step": 5010 + }, + { + "epoch": 0.4519096361094828, + "grad_norm": 1.2904733508705035, + "learning_rate": 2.405817718345763e-06, + "loss": 0.9899, + "step": 5011 + }, + { + "epoch": 0.45199981963295305, + "grad_norm": 1.362859141411734, + "learning_rate": 2.4052456433790726e-06, + "loss": 0.8946, + "step": 5012 + }, + { + "epoch": 0.45209000315642334, + "grad_norm": 1.4147028869237044, + "learning_rate": 2.4046735338345897e-06, + "loss": 1.0012, + "step": 5013 + }, + { + "epoch": 0.45218018667989357, + "grad_norm": 1.454003427446358, + "learning_rate": 2.404101389761129e-06, + "loss": 0.9666, + "step": 5014 + }, + { + "epoch": 0.45227037020336386, + "grad_norm": 0.7113024435167885, + "learning_rate": 2.4035292112075097e-06, + "loss": 0.7754, + "step": 5015 + }, + { + "epoch": 0.4523605537268341, + "grad_norm": 1.3404054493069621, + "learning_rate": 2.4029569982225534e-06, + "loss": 1.0078, + "step": 5016 + }, + { + "epoch": 0.4524507372503044, + "grad_norm": 0.7518172896918641, + "learning_rate": 2.402384750855084e-06, + "loss": 0.8607, + "step": 5017 + }, + { + "epoch": 0.4525409207737746, + "grad_norm": 1.3496504030740268, + "learning_rate": 2.4018124691539286e-06, + "loss": 0.9483, + "step": 5018 + }, + { + "epoch": 0.4526311042972449, + "grad_norm": 1.5543146501029508, + "learning_rate": 2.4012401531679178e-06, + "loss": 0.9358, + "step": 5019 + }, + { + "epoch": 0.45272128782071513, + "grad_norm": 1.5149099491915505, + "learning_rate": 2.4006678029458847e-06, + "loss": 0.9516, + "step": 5020 + }, + { + "epoch": 0.4528114713441854, + "grad_norm": 1.3105892168871365, + "learning_rate": 2.400095418536666e-06, + "loss": 0.8503, + "step": 5021 + }, + { + "epoch": 0.45290165486765566, + "grad_norm": 1.7164938250196435, + "learning_rate": 2.3995229999890996e-06, + "loss": 0.9231, + "step": 5022 + }, + { + "epoch": 0.45299183839112594, + "grad_norm": 1.5477528248771777, + "learning_rate": 2.398950547352028e-06, + "loss": 0.8895, + "step": 5023 + }, + { + "epoch": 0.4530820219145962, + "grad_norm": 2.094446891841701, + "learning_rate": 2.398378060674295e-06, + "loss": 0.8663, + "step": 5024 + }, + { + "epoch": 0.45317220543806647, + "grad_norm": 1.3105630660442655, + "learning_rate": 2.39780554000475e-06, + "loss": 0.9496, + "step": 5025 + }, + { + "epoch": 0.4532623889615367, + "grad_norm": 1.4411139463208722, + "learning_rate": 2.3972329853922434e-06, + "loss": 0.9807, + "step": 5026 + }, + { + "epoch": 0.453352572485007, + "grad_norm": 0.6840733181101064, + "learning_rate": 2.3966603968856278e-06, + "loss": 0.8033, + "step": 5027 + }, + { + "epoch": 0.4534427560084773, + "grad_norm": 1.490900253377549, + "learning_rate": 2.39608777453376e-06, + "loss": 1.024, + "step": 5028 + }, + { + "epoch": 0.4535329395319475, + "grad_norm": 1.5443466022123702, + "learning_rate": 2.3955151183854993e-06, + "loss": 0.9724, + "step": 5029 + }, + { + "epoch": 0.4536231230554178, + "grad_norm": 1.4024440123862745, + "learning_rate": 2.3949424284897073e-06, + "loss": 0.9076, + "step": 5030 + }, + { + "epoch": 0.45371330657888803, + "grad_norm": 1.5103509121317138, + "learning_rate": 2.39436970489525e-06, + "loss": 0.8139, + "step": 5031 + }, + { + "epoch": 0.4538034901023583, + "grad_norm": 1.474681645321324, + "learning_rate": 2.3937969476509955e-06, + "loss": 0.9547, + "step": 5032 + }, + { + "epoch": 0.45389367362582855, + "grad_norm": 1.3691033155552015, + "learning_rate": 2.393224156805813e-06, + "loss": 1.011, + "step": 5033 + }, + { + "epoch": 0.45398385714929884, + "grad_norm": 1.4900013352714547, + "learning_rate": 2.392651332408578e-06, + "loss": 0.9211, + "step": 5034 + }, + { + "epoch": 0.4540740406727691, + "grad_norm": 1.7576369134309155, + "learning_rate": 2.3920784745081655e-06, + "loss": 0.9324, + "step": 5035 + }, + { + "epoch": 0.45416422419623936, + "grad_norm": 1.5713565069865432, + "learning_rate": 2.391505583153456e-06, + "loss": 0.9849, + "step": 5036 + }, + { + "epoch": 0.4542544077197096, + "grad_norm": 1.484942759300111, + "learning_rate": 2.3909326583933315e-06, + "loss": 0.9562, + "step": 5037 + }, + { + "epoch": 0.4543445912431799, + "grad_norm": 1.2861970437940204, + "learning_rate": 2.3903597002766777e-06, + "loss": 0.9107, + "step": 5038 + }, + { + "epoch": 0.4544347747666501, + "grad_norm": 1.334044693885866, + "learning_rate": 2.389786708852381e-06, + "loss": 0.9131, + "step": 5039 + }, + { + "epoch": 0.4545249582901204, + "grad_norm": 0.9102748089630923, + "learning_rate": 2.389213684169333e-06, + "loss": 0.9573, + "step": 5040 + }, + { + "epoch": 0.45461514181359064, + "grad_norm": 1.399914137727721, + "learning_rate": 2.388640626276428e-06, + "loss": 0.9096, + "step": 5041 + }, + { + "epoch": 0.45470532533706093, + "grad_norm": 0.8276869496994188, + "learning_rate": 2.388067535222561e-06, + "loss": 0.8108, + "step": 5042 + }, + { + "epoch": 0.45479550886053116, + "grad_norm": 1.6634022293780386, + "learning_rate": 2.3874944110566332e-06, + "loss": 0.9642, + "step": 5043 + }, + { + "epoch": 0.45488569238400145, + "grad_norm": 0.8228269640806296, + "learning_rate": 2.3869212538275447e-06, + "loss": 0.9314, + "step": 5044 + }, + { + "epoch": 0.4549758759074717, + "grad_norm": 1.2422007434067033, + "learning_rate": 2.386348063584202e-06, + "loss": 0.9635, + "step": 5045 + }, + { + "epoch": 0.45506605943094197, + "grad_norm": 1.6202981122370417, + "learning_rate": 2.385774840375511e-06, + "loss": 1.0014, + "step": 5046 + }, + { + "epoch": 0.4551562429544122, + "grad_norm": 1.4669988216351209, + "learning_rate": 2.385201584250385e-06, + "loss": 0.9072, + "step": 5047 + }, + { + "epoch": 0.4552464264778825, + "grad_norm": 0.8083974258995317, + "learning_rate": 2.3846282952577346e-06, + "loss": 0.8679, + "step": 5048 + }, + { + "epoch": 0.4553366100013527, + "grad_norm": 1.2754200673465617, + "learning_rate": 2.3840549734464785e-06, + "loss": 1.0313, + "step": 5049 + }, + { + "epoch": 0.455426793524823, + "grad_norm": 1.600698619551143, + "learning_rate": 2.3834816188655336e-06, + "loss": 0.8863, + "step": 5050 + }, + { + "epoch": 0.4555169770482933, + "grad_norm": 1.5019362191983776, + "learning_rate": 2.3829082315638224e-06, + "loss": 0.9003, + "step": 5051 + }, + { + "epoch": 0.45560716057176354, + "grad_norm": 1.5571060155057426, + "learning_rate": 2.3823348115902695e-06, + "loss": 0.8119, + "step": 5052 + }, + { + "epoch": 0.4556973440952338, + "grad_norm": 1.4746954684356475, + "learning_rate": 2.3817613589938026e-06, + "loss": 0.9598, + "step": 5053 + }, + { + "epoch": 0.45578752761870406, + "grad_norm": 1.575050834562005, + "learning_rate": 2.3811878738233517e-06, + "loss": 0.9892, + "step": 5054 + }, + { + "epoch": 0.45587771114217435, + "grad_norm": 1.707422626244723, + "learning_rate": 2.380614356127849e-06, + "loss": 0.8967, + "step": 5055 + }, + { + "epoch": 0.4559678946656446, + "grad_norm": 1.4708172473982417, + "learning_rate": 2.3800408059562318e-06, + "loss": 0.9402, + "step": 5056 + }, + { + "epoch": 0.45605807818911487, + "grad_norm": 1.7537794173499868, + "learning_rate": 2.3794672233574365e-06, + "loss": 0.954, + "step": 5057 + }, + { + "epoch": 0.4561482617125851, + "grad_norm": 1.5761284615400522, + "learning_rate": 2.3788936083804058e-06, + "loss": 0.9656, + "step": 5058 + }, + { + "epoch": 0.4562384452360554, + "grad_norm": 1.3478485412519574, + "learning_rate": 2.378319961074083e-06, + "loss": 0.947, + "step": 5059 + }, + { + "epoch": 0.4563286287595256, + "grad_norm": 1.4141616786508404, + "learning_rate": 2.377746281487415e-06, + "loss": 0.9934, + "step": 5060 + }, + { + "epoch": 0.4564188122829959, + "grad_norm": 1.6546237807212096, + "learning_rate": 2.377172569669352e-06, + "loss": 1.0349, + "step": 5061 + }, + { + "epoch": 0.45650899580646614, + "grad_norm": 1.5253176311460541, + "learning_rate": 2.376598825668845e-06, + "loss": 0.8022, + "step": 5062 + }, + { + "epoch": 0.45659917932993643, + "grad_norm": 1.5426406704705076, + "learning_rate": 2.3760250495348495e-06, + "loss": 0.9351, + "step": 5063 + }, + { + "epoch": 0.45668936285340667, + "grad_norm": 1.454170671182418, + "learning_rate": 2.3754512413163236e-06, + "loss": 0.9867, + "step": 5064 + }, + { + "epoch": 0.45677954637687695, + "grad_norm": 1.2873026076172556, + "learning_rate": 2.3748774010622285e-06, + "loss": 0.9304, + "step": 5065 + }, + { + "epoch": 0.4568697299003472, + "grad_norm": 1.605197410208017, + "learning_rate": 2.3743035288215254e-06, + "loss": 0.9617, + "step": 5066 + }, + { + "epoch": 0.4569599134238175, + "grad_norm": 1.4037820140146737, + "learning_rate": 2.3737296246431815e-06, + "loss": 0.9572, + "step": 5067 + }, + { + "epoch": 0.4570500969472877, + "grad_norm": 1.3399629678875675, + "learning_rate": 2.3731556885761656e-06, + "loss": 0.9182, + "step": 5068 + }, + { + "epoch": 0.457140280470758, + "grad_norm": 0.6517043034871374, + "learning_rate": 2.372581720669449e-06, + "loss": 0.788, + "step": 5069 + }, + { + "epoch": 0.45723046399422823, + "grad_norm": 1.6598949271579548, + "learning_rate": 2.3720077209720046e-06, + "loss": 0.9736, + "step": 5070 + }, + { + "epoch": 0.4573206475176985, + "grad_norm": 0.737275826602977, + "learning_rate": 2.3714336895328112e-06, + "loss": 0.8059, + "step": 5071 + }, + { + "epoch": 0.45741083104116875, + "grad_norm": 1.6374174461230226, + "learning_rate": 2.370859626400847e-06, + "loss": 0.9034, + "step": 5072 + }, + { + "epoch": 0.45750101456463904, + "grad_norm": 1.3552348789876725, + "learning_rate": 2.3702855316250943e-06, + "loss": 0.9275, + "step": 5073 + }, + { + "epoch": 0.45759119808810933, + "grad_norm": 1.511555349898132, + "learning_rate": 2.369711405254539e-06, + "loss": 0.9478, + "step": 5074 + }, + { + "epoch": 0.45768138161157956, + "grad_norm": 1.7363569602674556, + "learning_rate": 2.3691372473381673e-06, + "loss": 0.9982, + "step": 5075 + }, + { + "epoch": 0.45777156513504985, + "grad_norm": 1.5174212478992277, + "learning_rate": 2.3685630579249708e-06, + "loss": 1.0075, + "step": 5076 + }, + { + "epoch": 0.4578617486585201, + "grad_norm": 1.3953031225626236, + "learning_rate": 2.367988837063942e-06, + "loss": 0.9212, + "step": 5077 + }, + { + "epoch": 0.4579519321819904, + "grad_norm": 1.499418702341354, + "learning_rate": 2.367414584804076e-06, + "loss": 1.0241, + "step": 5078 + }, + { + "epoch": 0.4580421157054606, + "grad_norm": 1.3011622515287886, + "learning_rate": 2.366840301194372e-06, + "loss": 0.9807, + "step": 5079 + }, + { + "epoch": 0.4581322992289309, + "grad_norm": 1.3042967433877966, + "learning_rate": 2.3662659862838308e-06, + "loss": 0.9381, + "step": 5080 + }, + { + "epoch": 0.45822248275240113, + "grad_norm": 1.406415124311485, + "learning_rate": 2.365691640121456e-06, + "loss": 1.0018, + "step": 5081 + }, + { + "epoch": 0.4583126662758714, + "grad_norm": 1.778716711937824, + "learning_rate": 2.365117262756254e-06, + "loss": 0.8876, + "step": 5082 + }, + { + "epoch": 0.45840284979934165, + "grad_norm": 1.6682153023739061, + "learning_rate": 2.3645428542372342e-06, + "loss": 0.9432, + "step": 5083 + }, + { + "epoch": 0.45849303332281194, + "grad_norm": 0.7853569799016747, + "learning_rate": 2.3639684146134083e-06, + "loss": 0.7571, + "step": 5084 + }, + { + "epoch": 0.45858321684628217, + "grad_norm": 2.10340431931899, + "learning_rate": 2.3633939439337897e-06, + "loss": 0.9143, + "step": 5085 + }, + { + "epoch": 0.45867340036975246, + "grad_norm": 1.6977856435324477, + "learning_rate": 2.362819442247396e-06, + "loss": 0.914, + "step": 5086 + }, + { + "epoch": 0.4587635838932227, + "grad_norm": 0.7372681059193805, + "learning_rate": 2.3622449096032477e-06, + "loss": 0.7592, + "step": 5087 + }, + { + "epoch": 0.458853767416693, + "grad_norm": 1.527464488688995, + "learning_rate": 2.361670346050366e-06, + "loss": 0.9826, + "step": 5088 + }, + { + "epoch": 0.4589439509401632, + "grad_norm": 1.7080342643525963, + "learning_rate": 2.3610957516377757e-06, + "loss": 0.8716, + "step": 5089 + }, + { + "epoch": 0.4590341344636335, + "grad_norm": 1.4601522365002608, + "learning_rate": 2.3605211264145048e-06, + "loss": 0.9439, + "step": 5090 + }, + { + "epoch": 0.45912431798710374, + "grad_norm": 1.5203048972462136, + "learning_rate": 2.3599464704295836e-06, + "loss": 0.9173, + "step": 5091 + }, + { + "epoch": 0.459214501510574, + "grad_norm": 1.7637126322247663, + "learning_rate": 2.359371783732045e-06, + "loss": 0.8345, + "step": 5092 + }, + { + "epoch": 0.45930468503404426, + "grad_norm": 1.4877821518467547, + "learning_rate": 2.358797066370924e-06, + "loss": 0.9326, + "step": 5093 + }, + { + "epoch": 0.45939486855751455, + "grad_norm": 1.3401775333599841, + "learning_rate": 2.3582223183952594e-06, + "loss": 0.9524, + "step": 5094 + }, + { + "epoch": 0.4594850520809848, + "grad_norm": 1.6507079773007893, + "learning_rate": 2.357647539854091e-06, + "loss": 0.9814, + "step": 5095 + }, + { + "epoch": 0.45957523560445507, + "grad_norm": 1.6426983765481984, + "learning_rate": 2.3570727307964624e-06, + "loss": 0.9561, + "step": 5096 + }, + { + "epoch": 0.4596654191279253, + "grad_norm": 1.440948248156948, + "learning_rate": 2.35649789127142e-06, + "loss": 0.9933, + "step": 5097 + }, + { + "epoch": 0.4597556026513956, + "grad_norm": 1.4948984655137734, + "learning_rate": 2.3559230213280115e-06, + "loss": 0.9178, + "step": 5098 + }, + { + "epoch": 0.4598457861748659, + "grad_norm": 1.2884023189622107, + "learning_rate": 2.3553481210152886e-06, + "loss": 0.8652, + "step": 5099 + }, + { + "epoch": 0.4599359696983361, + "grad_norm": 0.817622287539876, + "learning_rate": 2.3547731903823043e-06, + "loss": 0.8524, + "step": 5100 + }, + { + "epoch": 0.4600261532218064, + "grad_norm": 1.2978149592603405, + "learning_rate": 2.3541982294781155e-06, + "loss": 0.9204, + "step": 5101 + }, + { + "epoch": 0.46011633674527663, + "grad_norm": 1.345797996332653, + "learning_rate": 2.3536232383517804e-06, + "loss": 0.9421, + "step": 5102 + }, + { + "epoch": 0.4602065202687469, + "grad_norm": 1.2234134462216004, + "learning_rate": 2.3530482170523602e-06, + "loss": 0.9495, + "step": 5103 + }, + { + "epoch": 0.46029670379221715, + "grad_norm": 1.264172792279451, + "learning_rate": 2.3524731656289206e-06, + "loss": 1.0335, + "step": 5104 + }, + { + "epoch": 0.46038688731568744, + "grad_norm": 1.4202782185092475, + "learning_rate": 2.351898084130526e-06, + "loss": 1.0321, + "step": 5105 + }, + { + "epoch": 0.4604770708391577, + "grad_norm": 13.315633915341126, + "learning_rate": 2.351322972606247e-06, + "loss": 1.0109, + "step": 5106 + }, + { + "epoch": 0.46056725436262796, + "grad_norm": 1.7043649202166944, + "learning_rate": 2.350747831105155e-06, + "loss": 1.0951, + "step": 5107 + }, + { + "epoch": 0.4606574378860982, + "grad_norm": 1.995507379990869, + "learning_rate": 2.350172659676323e-06, + "loss": 1.0275, + "step": 5108 + }, + { + "epoch": 0.4607476214095685, + "grad_norm": 1.3521081341240577, + "learning_rate": 2.3495974583688306e-06, + "loss": 0.9836, + "step": 5109 + }, + { + "epoch": 0.4608378049330387, + "grad_norm": 1.2969992244926964, + "learning_rate": 2.3490222272317543e-06, + "loss": 1.0198, + "step": 5110 + }, + { + "epoch": 0.460927988456509, + "grad_norm": 1.5059241294544343, + "learning_rate": 2.348446966314177e-06, + "loss": 0.9955, + "step": 5111 + }, + { + "epoch": 0.46101817197997924, + "grad_norm": 1.316598668736853, + "learning_rate": 2.3478716756651837e-06, + "loss": 0.9251, + "step": 5112 + }, + { + "epoch": 0.46110835550344953, + "grad_norm": 1.4501953947022284, + "learning_rate": 2.347296355333861e-06, + "loss": 0.9379, + "step": 5113 + }, + { + "epoch": 0.46119853902691976, + "grad_norm": 1.4373649036116902, + "learning_rate": 2.3467210053692972e-06, + "loss": 0.954, + "step": 5114 + }, + { + "epoch": 0.46128872255039005, + "grad_norm": 1.7532673035119566, + "learning_rate": 2.3461456258205866e-06, + "loss": 0.9551, + "step": 5115 + }, + { + "epoch": 0.4613789060738603, + "grad_norm": 1.6600302255705284, + "learning_rate": 2.345570216736822e-06, + "loss": 0.8908, + "step": 5116 + }, + { + "epoch": 0.4614690895973306, + "grad_norm": 1.502831964618598, + "learning_rate": 2.3449947781671013e-06, + "loss": 0.9553, + "step": 5117 + }, + { + "epoch": 0.4615592731208008, + "grad_norm": 1.5261078734460558, + "learning_rate": 2.3444193101605237e-06, + "loss": 0.977, + "step": 5118 + }, + { + "epoch": 0.4616494566442711, + "grad_norm": 1.4616353209442787, + "learning_rate": 2.3438438127661913e-06, + "loss": 0.9634, + "step": 5119 + }, + { + "epoch": 0.4617396401677413, + "grad_norm": 1.6838873587287828, + "learning_rate": 2.3432682860332096e-06, + "loss": 0.7898, + "step": 5120 + }, + { + "epoch": 0.4618298236912116, + "grad_norm": 1.3552553739991817, + "learning_rate": 2.342692730010684e-06, + "loss": 0.8969, + "step": 5121 + }, + { + "epoch": 0.4619200072146819, + "grad_norm": 2.025202978816621, + "learning_rate": 2.342117144747726e-06, + "loss": 0.9543, + "step": 5122 + }, + { + "epoch": 0.46201019073815214, + "grad_norm": 1.4838435777755228, + "learning_rate": 2.3415415302934457e-06, + "loss": 0.9411, + "step": 5123 + }, + { + "epoch": 0.4621003742616224, + "grad_norm": 1.466814673426261, + "learning_rate": 2.340965886696959e-06, + "loss": 0.9744, + "step": 5124 + }, + { + "epoch": 0.46219055778509266, + "grad_norm": 1.3454375205614786, + "learning_rate": 2.340390214007384e-06, + "loss": 0.9033, + "step": 5125 + }, + { + "epoch": 0.46228074130856295, + "grad_norm": 1.5529135546675685, + "learning_rate": 2.339814512273838e-06, + "loss": 0.834, + "step": 5126 + }, + { + "epoch": 0.4623709248320332, + "grad_norm": 1.3872258508326112, + "learning_rate": 2.3392387815454447e-06, + "loss": 1.0502, + "step": 5127 + }, + { + "epoch": 0.46246110835550347, + "grad_norm": 1.5516085399853892, + "learning_rate": 2.3386630218713273e-06, + "loss": 0.9478, + "step": 5128 + }, + { + "epoch": 0.4625512918789737, + "grad_norm": 1.4932696827169751, + "learning_rate": 2.3380872333006135e-06, + "loss": 0.9619, + "step": 5129 + }, + { + "epoch": 0.462641475402444, + "grad_norm": 13.427849573369485, + "learning_rate": 2.3375114158824335e-06, + "loss": 0.9941, + "step": 5130 + }, + { + "epoch": 0.4627316589259142, + "grad_norm": 1.4393804938161394, + "learning_rate": 2.3369355696659184e-06, + "loss": 0.9857, + "step": 5131 + }, + { + "epoch": 0.4628218424493845, + "grad_norm": 1.3946778890837164, + "learning_rate": 2.336359694700202e-06, + "loss": 0.934, + "step": 5132 + }, + { + "epoch": 0.46291202597285475, + "grad_norm": 1.4100542110816474, + "learning_rate": 2.335783791034422e-06, + "loss": 0.9528, + "step": 5133 + }, + { + "epoch": 0.46300220949632503, + "grad_norm": 1.44331940013571, + "learning_rate": 2.3352078587177173e-06, + "loss": 0.9324, + "step": 5134 + }, + { + "epoch": 0.46309239301979527, + "grad_norm": 1.3881331846226905, + "learning_rate": 2.33463189779923e-06, + "loss": 0.9533, + "step": 5135 + }, + { + "epoch": 0.46318257654326556, + "grad_norm": 1.2960320916205648, + "learning_rate": 2.334055908328104e-06, + "loss": 1.0222, + "step": 5136 + }, + { + "epoch": 0.4632727600667358, + "grad_norm": 1.518947695200129, + "learning_rate": 2.3334798903534866e-06, + "loss": 0.8668, + "step": 5137 + }, + { + "epoch": 0.4633629435902061, + "grad_norm": 1.5778615750564664, + "learning_rate": 2.3329038439245252e-06, + "loss": 0.8502, + "step": 5138 + }, + { + "epoch": 0.4634531271136763, + "grad_norm": 1.6254953216372765, + "learning_rate": 2.3323277690903724e-06, + "loss": 0.9492, + "step": 5139 + }, + { + "epoch": 0.4635433106371466, + "grad_norm": 1.2917541146158402, + "learning_rate": 2.3317516659001827e-06, + "loss": 0.9272, + "step": 5140 + }, + { + "epoch": 0.46363349416061683, + "grad_norm": 1.348119506824746, + "learning_rate": 2.331175534403111e-06, + "loss": 0.959, + "step": 5141 + }, + { + "epoch": 0.4637236776840871, + "grad_norm": 1.3471164672785214, + "learning_rate": 2.3305993746483167e-06, + "loss": 1.0271, + "step": 5142 + }, + { + "epoch": 0.46381386120755735, + "grad_norm": 1.8471696466916778, + "learning_rate": 2.3300231866849606e-06, + "loss": 0.925, + "step": 5143 + }, + { + "epoch": 0.46390404473102764, + "grad_norm": 1.5062845028293113, + "learning_rate": 2.3294469705622067e-06, + "loss": 0.9817, + "step": 5144 + }, + { + "epoch": 0.4639942282544979, + "grad_norm": 1.3937917934019026, + "learning_rate": 2.3288707263292203e-06, + "loss": 0.923, + "step": 5145 + }, + { + "epoch": 0.46408441177796816, + "grad_norm": 1.2586324163709572, + "learning_rate": 2.3282944540351707e-06, + "loss": 0.9679, + "step": 5146 + }, + { + "epoch": 0.46417459530143845, + "grad_norm": 2.378096820198154, + "learning_rate": 2.327718153729228e-06, + "loss": 0.9254, + "step": 5147 + }, + { + "epoch": 0.4642647788249087, + "grad_norm": 1.6206200332653762, + "learning_rate": 2.327141825460566e-06, + "loss": 0.9914, + "step": 5148 + }, + { + "epoch": 0.464354962348379, + "grad_norm": 0.802462158083353, + "learning_rate": 2.326565469278358e-06, + "loss": 0.8216, + "step": 5149 + }, + { + "epoch": 0.4644451458718492, + "grad_norm": 0.7620679788379231, + "learning_rate": 2.3259890852317846e-06, + "loss": 0.8152, + "step": 5150 + }, + { + "epoch": 0.4645353293953195, + "grad_norm": 2.7373597339943205, + "learning_rate": 2.3254126733700246e-06, + "loss": 0.9504, + "step": 5151 + }, + { + "epoch": 0.46462551291878973, + "grad_norm": 1.547893073551215, + "learning_rate": 2.324836233742262e-06, + "loss": 0.9756, + "step": 5152 + }, + { + "epoch": 0.46471569644226, + "grad_norm": 1.6679562427489576, + "learning_rate": 2.3242597663976793e-06, + "loss": 0.9879, + "step": 5153 + }, + { + "epoch": 0.46480587996573025, + "grad_norm": 1.5579160206335905, + "learning_rate": 2.3236832713854663e-06, + "loss": 0.9255, + "step": 5154 + }, + { + "epoch": 0.46489606348920054, + "grad_norm": 1.5171178956963558, + "learning_rate": 2.323106748754812e-06, + "loss": 0.9444, + "step": 5155 + }, + { + "epoch": 0.4649862470126708, + "grad_norm": 1.3232145299519655, + "learning_rate": 2.3225301985549077e-06, + "loss": 1.0394, + "step": 5156 + }, + { + "epoch": 0.46507643053614106, + "grad_norm": 1.5265732014182385, + "learning_rate": 2.321953620834948e-06, + "loss": 0.8831, + "step": 5157 + }, + { + "epoch": 0.4651666140596113, + "grad_norm": 1.2851024952693357, + "learning_rate": 2.3213770156441314e-06, + "loss": 0.9224, + "step": 5158 + }, + { + "epoch": 0.4652567975830816, + "grad_norm": 1.689658867981619, + "learning_rate": 2.3208003830316554e-06, + "loss": 0.9946, + "step": 5159 + }, + { + "epoch": 0.4653469811065518, + "grad_norm": 1.4304865704212388, + "learning_rate": 2.3202237230467215e-06, + "loss": 1.035, + "step": 5160 + }, + { + "epoch": 0.4654371646300221, + "grad_norm": 1.2315670379810604, + "learning_rate": 2.3196470357385338e-06, + "loss": 0.9021, + "step": 5161 + }, + { + "epoch": 0.46552734815349234, + "grad_norm": 1.304242497921807, + "learning_rate": 2.319070321156299e-06, + "loss": 0.9371, + "step": 5162 + }, + { + "epoch": 0.4656175316769626, + "grad_norm": 1.445874244191759, + "learning_rate": 2.318493579349224e-06, + "loss": 1.0815, + "step": 5163 + }, + { + "epoch": 0.46570771520043286, + "grad_norm": 1.9325186158303898, + "learning_rate": 2.317916810366522e-06, + "loss": 0.9424, + "step": 5164 + }, + { + "epoch": 0.46579789872390315, + "grad_norm": 1.3833885070797667, + "learning_rate": 2.317340014257404e-06, + "loss": 0.89, + "step": 5165 + }, + { + "epoch": 0.4658880822473734, + "grad_norm": 1.3105046681914423, + "learning_rate": 2.316763191071086e-06, + "loss": 0.8732, + "step": 5166 + }, + { + "epoch": 0.46597826577084367, + "grad_norm": 1.3793239663228398, + "learning_rate": 2.316186340856787e-06, + "loss": 0.9067, + "step": 5167 + }, + { + "epoch": 0.4660684492943139, + "grad_norm": 1.6003712759787283, + "learning_rate": 2.315609463663725e-06, + "loss": 0.8966, + "step": 5168 + }, + { + "epoch": 0.4661586328177842, + "grad_norm": 1.5448282746367201, + "learning_rate": 2.315032559541123e-06, + "loss": 0.9258, + "step": 5169 + }, + { + "epoch": 0.4662488163412545, + "grad_norm": 1.5460741686629118, + "learning_rate": 2.314455628538207e-06, + "loss": 0.9725, + "step": 5170 + }, + { + "epoch": 0.4663389998647247, + "grad_norm": 1.215628514615742, + "learning_rate": 2.3138786707042023e-06, + "loss": 0.9172, + "step": 5171 + }, + { + "epoch": 0.466429183388195, + "grad_norm": 1.3245548058184071, + "learning_rate": 2.3133016860883387e-06, + "loss": 0.9576, + "step": 5172 + }, + { + "epoch": 0.46651936691166523, + "grad_norm": 1.7271896535115614, + "learning_rate": 2.3127246747398475e-06, + "loss": 0.9329, + "step": 5173 + }, + { + "epoch": 0.4666095504351355, + "grad_norm": 1.1969562771842008, + "learning_rate": 2.312147636707963e-06, + "loss": 0.9424, + "step": 5174 + }, + { + "epoch": 0.46669973395860576, + "grad_norm": 1.5821328212957315, + "learning_rate": 2.3115705720419214e-06, + "loss": 0.9378, + "step": 5175 + }, + { + "epoch": 0.46678991748207604, + "grad_norm": 1.4532483776588119, + "learning_rate": 2.31099348079096e-06, + "loss": 0.9714, + "step": 5176 + }, + { + "epoch": 0.4668801010055463, + "grad_norm": 1.182325836001568, + "learning_rate": 2.31041636300432e-06, + "loss": 0.9847, + "step": 5177 + }, + { + "epoch": 0.46697028452901657, + "grad_norm": 1.541330902377085, + "learning_rate": 2.3098392187312445e-06, + "loss": 1.0282, + "step": 5178 + }, + { + "epoch": 0.4670604680524868, + "grad_norm": 1.453229920896095, + "learning_rate": 2.309262048020978e-06, + "loss": 0.9282, + "step": 5179 + }, + { + "epoch": 0.4671506515759571, + "grad_norm": 2.747320690603895, + "learning_rate": 2.308684850922769e-06, + "loss": 0.9817, + "step": 5180 + }, + { + "epoch": 0.4672408350994273, + "grad_norm": 1.4476065909714977, + "learning_rate": 2.3081076274858664e-06, + "loss": 0.9533, + "step": 5181 + }, + { + "epoch": 0.4673310186228976, + "grad_norm": 1.694742906928395, + "learning_rate": 2.307530377759522e-06, + "loss": 0.9836, + "step": 5182 + }, + { + "epoch": 0.46742120214636784, + "grad_norm": 1.7588392183818748, + "learning_rate": 2.30695310179299e-06, + "loss": 0.8865, + "step": 5183 + }, + { + "epoch": 0.46751138566983813, + "grad_norm": 1.344520082045017, + "learning_rate": 2.3063757996355267e-06, + "loss": 0.9395, + "step": 5184 + }, + { + "epoch": 0.46760156919330836, + "grad_norm": 1.5925077196829396, + "learning_rate": 2.3057984713363903e-06, + "loss": 0.9806, + "step": 5185 + }, + { + "epoch": 0.46769175271677865, + "grad_norm": 0.6600275534961629, + "learning_rate": 2.3052211169448436e-06, + "loss": 0.7968, + "step": 5186 + }, + { + "epoch": 0.4677819362402489, + "grad_norm": 1.546218038982639, + "learning_rate": 2.3046437365101474e-06, + "loss": 0.8939, + "step": 5187 + }, + { + "epoch": 0.4678721197637192, + "grad_norm": 1.4971019246346426, + "learning_rate": 2.3040663300815673e-06, + "loss": 0.9334, + "step": 5188 + }, + { + "epoch": 0.4679623032871894, + "grad_norm": 0.6827113096210915, + "learning_rate": 2.3034888977083723e-06, + "loss": 0.7839, + "step": 5189 + }, + { + "epoch": 0.4680524868106597, + "grad_norm": 1.4875213204787299, + "learning_rate": 2.30291143943983e-06, + "loss": 0.868, + "step": 5190 + }, + { + "epoch": 0.46814267033412993, + "grad_norm": 1.4398665643168882, + "learning_rate": 2.3023339553252145e-06, + "loss": 0.9098, + "step": 5191 + }, + { + "epoch": 0.4682328538576002, + "grad_norm": 1.41092717873483, + "learning_rate": 2.301756445413799e-06, + "loss": 0.9849, + "step": 5192 + }, + { + "epoch": 0.4683230373810705, + "grad_norm": 1.5209899804251141, + "learning_rate": 2.3011789097548585e-06, + "loss": 0.9477, + "step": 5193 + }, + { + "epoch": 0.46841322090454074, + "grad_norm": 1.3259775640191984, + "learning_rate": 2.3006013483976738e-06, + "loss": 0.9437, + "step": 5194 + }, + { + "epoch": 0.468503404428011, + "grad_norm": 1.6479751878740871, + "learning_rate": 2.300023761391524e-06, + "loss": 0.9487, + "step": 5195 + }, + { + "epoch": 0.46859358795148126, + "grad_norm": 1.6373051068754683, + "learning_rate": 2.299446148785693e-06, + "loss": 0.9987, + "step": 5196 + }, + { + "epoch": 0.46868377147495155, + "grad_norm": 1.6185647011529187, + "learning_rate": 2.2988685106294654e-06, + "loss": 0.9271, + "step": 5197 + }, + { + "epoch": 0.4687739549984218, + "grad_norm": 1.418793073496058, + "learning_rate": 2.2982908469721284e-06, + "loss": 0.8503, + "step": 5198 + }, + { + "epoch": 0.46886413852189207, + "grad_norm": 1.6729924398184157, + "learning_rate": 2.2977131578629714e-06, + "loss": 1.0212, + "step": 5199 + }, + { + "epoch": 0.4689543220453623, + "grad_norm": 2.2570242908406146, + "learning_rate": 2.297135443351286e-06, + "loss": 0.9749, + "step": 5200 + }, + { + "epoch": 0.4690445055688326, + "grad_norm": 1.2300428349704418, + "learning_rate": 2.296557703486367e-06, + "loss": 0.9618, + "step": 5201 + }, + { + "epoch": 0.4691346890923028, + "grad_norm": 6.526405691683701, + "learning_rate": 2.295979938317509e-06, + "loss": 0.9746, + "step": 5202 + }, + { + "epoch": 0.4692248726157731, + "grad_norm": 1.4479715990948436, + "learning_rate": 2.295402147894011e-06, + "loss": 0.946, + "step": 5203 + }, + { + "epoch": 0.46931505613924335, + "grad_norm": 1.2192812030953646, + "learning_rate": 2.2948243322651723e-06, + "loss": 0.8694, + "step": 5204 + }, + { + "epoch": 0.46940523966271364, + "grad_norm": 1.46789452820426, + "learning_rate": 2.2942464914802962e-06, + "loss": 0.9479, + "step": 5205 + }, + { + "epoch": 0.46949542318618387, + "grad_norm": 1.513074794032478, + "learning_rate": 2.293668625588687e-06, + "loss": 0.9248, + "step": 5206 + }, + { + "epoch": 0.46958560670965416, + "grad_norm": 1.5697354447342642, + "learning_rate": 2.293090734639651e-06, + "loss": 0.984, + "step": 5207 + }, + { + "epoch": 0.4696757902331244, + "grad_norm": 1.5655393413326457, + "learning_rate": 2.2925128186824983e-06, + "loss": 1.0716, + "step": 5208 + }, + { + "epoch": 0.4697659737565947, + "grad_norm": 3.7255692987832907, + "learning_rate": 2.2919348777665384e-06, + "loss": 1.0448, + "step": 5209 + }, + { + "epoch": 0.4698561572800649, + "grad_norm": 1.5096142533824668, + "learning_rate": 2.2913569119410856e-06, + "loss": 0.9532, + "step": 5210 + }, + { + "epoch": 0.4699463408035352, + "grad_norm": 1.3768829111364604, + "learning_rate": 2.290778921255454e-06, + "loss": 0.9723, + "step": 5211 + }, + { + "epoch": 0.47003652432700543, + "grad_norm": 1.309499626328574, + "learning_rate": 2.2902009057589613e-06, + "loss": 0.984, + "step": 5212 + }, + { + "epoch": 0.4701267078504757, + "grad_norm": 1.1640247108418968, + "learning_rate": 2.2896228655009276e-06, + "loss": 0.9099, + "step": 5213 + }, + { + "epoch": 0.47021689137394596, + "grad_norm": 1.727086397666038, + "learning_rate": 2.289044800530674e-06, + "loss": 0.9159, + "step": 5214 + }, + { + "epoch": 0.47030707489741624, + "grad_norm": 1.7876518051669692, + "learning_rate": 2.2884667108975245e-06, + "loss": 0.9157, + "step": 5215 + }, + { + "epoch": 0.4703972584208865, + "grad_norm": 1.46269145266716, + "learning_rate": 2.287888596650804e-06, + "loss": 0.987, + "step": 5216 + }, + { + "epoch": 0.47048744194435677, + "grad_norm": 1.5088478447287124, + "learning_rate": 2.287310457839841e-06, + "loss": 0.9515, + "step": 5217 + }, + { + "epoch": 0.47057762546782705, + "grad_norm": 1.6559338537817825, + "learning_rate": 2.286732294513966e-06, + "loss": 1.0222, + "step": 5218 + }, + { + "epoch": 0.4706678089912973, + "grad_norm": 1.5304044607776452, + "learning_rate": 2.2861541067225106e-06, + "loss": 0.9573, + "step": 5219 + }, + { + "epoch": 0.4707579925147676, + "grad_norm": 1.8897453342404873, + "learning_rate": 2.2855758945148095e-06, + "loss": 0.9397, + "step": 5220 + }, + { + "epoch": 0.4708481760382378, + "grad_norm": 1.3262802326952334, + "learning_rate": 2.2849976579401977e-06, + "loss": 1.0065, + "step": 5221 + }, + { + "epoch": 0.4709383595617081, + "grad_norm": 1.856580477494103, + "learning_rate": 2.284419397048014e-06, + "loss": 0.945, + "step": 5222 + }, + { + "epoch": 0.47102854308517833, + "grad_norm": 1.3499766400753328, + "learning_rate": 2.2838411118875997e-06, + "loss": 0.9604, + "step": 5223 + }, + { + "epoch": 0.4711187266086486, + "grad_norm": 1.3414440329667703, + "learning_rate": 2.283262802508296e-06, + "loss": 0.8782, + "step": 5224 + }, + { + "epoch": 0.47120891013211885, + "grad_norm": 1.110031148799957, + "learning_rate": 2.2826844689594492e-06, + "loss": 0.9288, + "step": 5225 + }, + { + "epoch": 0.47129909365558914, + "grad_norm": 1.38352801243775, + "learning_rate": 2.282106111290404e-06, + "loss": 1.0303, + "step": 5226 + }, + { + "epoch": 0.4713892771790594, + "grad_norm": 0.6789662272097912, + "learning_rate": 2.2815277295505098e-06, + "loss": 0.8236, + "step": 5227 + }, + { + "epoch": 0.47147946070252966, + "grad_norm": 1.5199185934352426, + "learning_rate": 2.2809493237891174e-06, + "loss": 0.9043, + "step": 5228 + }, + { + "epoch": 0.4715696442259999, + "grad_norm": 1.3416391693677776, + "learning_rate": 2.2803708940555796e-06, + "loss": 0.9791, + "step": 5229 + }, + { + "epoch": 0.4716598277494702, + "grad_norm": 1.2500655633898297, + "learning_rate": 2.2797924403992514e-06, + "loss": 1.0269, + "step": 5230 + }, + { + "epoch": 0.4717500112729404, + "grad_norm": 1.5492950928499722, + "learning_rate": 2.2792139628694892e-06, + "loss": 0.9675, + "step": 5231 + }, + { + "epoch": 0.4718401947964107, + "grad_norm": 1.2552104121569545, + "learning_rate": 2.2786354615156524e-06, + "loss": 0.9198, + "step": 5232 + }, + { + "epoch": 0.47193037831988094, + "grad_norm": 1.6258404099104946, + "learning_rate": 2.2780569363871016e-06, + "loss": 1.0123, + "step": 5233 + }, + { + "epoch": 0.4720205618433512, + "grad_norm": 0.6502768385502542, + "learning_rate": 2.277478387533199e-06, + "loss": 0.7886, + "step": 5234 + }, + { + "epoch": 0.47211074536682146, + "grad_norm": 1.7262754590078968, + "learning_rate": 2.276899815003311e-06, + "loss": 0.9912, + "step": 5235 + }, + { + "epoch": 0.47220092889029175, + "grad_norm": 1.3984686139444182, + "learning_rate": 2.2763212188468045e-06, + "loss": 0.9391, + "step": 5236 + }, + { + "epoch": 0.472291112413762, + "grad_norm": 1.2364599751196708, + "learning_rate": 2.2757425991130473e-06, + "loss": 0.9663, + "step": 5237 + }, + { + "epoch": 0.47238129593723227, + "grad_norm": 0.8515786685633548, + "learning_rate": 2.2751639558514117e-06, + "loss": 0.8286, + "step": 5238 + }, + { + "epoch": 0.4724714794607025, + "grad_norm": 1.2126405123276918, + "learning_rate": 2.2745852891112697e-06, + "loss": 0.9889, + "step": 5239 + }, + { + "epoch": 0.4725616629841728, + "grad_norm": 1.683008220487069, + "learning_rate": 2.274006598941997e-06, + "loss": 0.962, + "step": 5240 + }, + { + "epoch": 0.4726518465076431, + "grad_norm": 1.6737599665008167, + "learning_rate": 2.27342788539297e-06, + "loss": 0.9368, + "step": 5241 + }, + { + "epoch": 0.4727420300311133, + "grad_norm": 1.406365877251589, + "learning_rate": 2.2728491485135684e-06, + "loss": 0.9137, + "step": 5242 + }, + { + "epoch": 0.4728322135545836, + "grad_norm": 1.3642320787519036, + "learning_rate": 2.272270388353173e-06, + "loss": 1.0054, + "step": 5243 + }, + { + "epoch": 0.47292239707805384, + "grad_norm": 1.6256409628090602, + "learning_rate": 2.2716916049611666e-06, + "loss": 0.9268, + "step": 5244 + }, + { + "epoch": 0.4730125806015241, + "grad_norm": 1.3245217755847232, + "learning_rate": 2.2711127983869346e-06, + "loss": 0.9366, + "step": 5245 + }, + { + "epoch": 0.47310276412499436, + "grad_norm": 1.2412520431086849, + "learning_rate": 2.270533968679864e-06, + "loss": 0.9607, + "step": 5246 + }, + { + "epoch": 0.47319294764846465, + "grad_norm": 1.277156851157841, + "learning_rate": 2.269955115889343e-06, + "loss": 0.9562, + "step": 5247 + }, + { + "epoch": 0.4732831311719349, + "grad_norm": 1.5883732774214359, + "learning_rate": 2.269376240064763e-06, + "loss": 0.9116, + "step": 5248 + }, + { + "epoch": 0.47337331469540517, + "grad_norm": 1.4927016565777391, + "learning_rate": 2.268797341255517e-06, + "loss": 0.9692, + "step": 5249 + }, + { + "epoch": 0.4734634982188754, + "grad_norm": 1.4696225760036685, + "learning_rate": 2.268218419511e-06, + "loss": 0.8667, + "step": 5250 + }, + { + "epoch": 0.4735536817423457, + "grad_norm": 1.4272555319297495, + "learning_rate": 2.267639474880608e-06, + "loss": 0.8743, + "step": 5251 + }, + { + "epoch": 0.4736438652658159, + "grad_norm": 1.2245818261267822, + "learning_rate": 2.2670605074137407e-06, + "loss": 0.9272, + "step": 5252 + }, + { + "epoch": 0.4737340487892862, + "grad_norm": 1.5659181210048259, + "learning_rate": 2.2664815171597983e-06, + "loss": 0.9374, + "step": 5253 + }, + { + "epoch": 0.47382423231275644, + "grad_norm": 1.455662634278281, + "learning_rate": 2.265902504168183e-06, + "loss": 0.9008, + "step": 5254 + }, + { + "epoch": 0.47391441583622673, + "grad_norm": 1.4285447186289124, + "learning_rate": 2.2653234684883007e-06, + "loss": 0.9103, + "step": 5255 + }, + { + "epoch": 0.47400459935969697, + "grad_norm": 1.657196494230392, + "learning_rate": 2.264744410169556e-06, + "loss": 0.8729, + "step": 5256 + }, + { + "epoch": 0.47409478288316725, + "grad_norm": 1.2938236971149928, + "learning_rate": 2.264165329261359e-06, + "loss": 0.931, + "step": 5257 + }, + { + "epoch": 0.4741849664066375, + "grad_norm": 0.649979768034703, + "learning_rate": 2.26358622581312e-06, + "loss": 0.8021, + "step": 5258 + }, + { + "epoch": 0.4742751499301078, + "grad_norm": 1.417910487972088, + "learning_rate": 2.2630070998742504e-06, + "loss": 1.0168, + "step": 5259 + }, + { + "epoch": 0.474365333453578, + "grad_norm": 1.2398273911625821, + "learning_rate": 2.262427951494165e-06, + "loss": 0.9728, + "step": 5260 + }, + { + "epoch": 0.4744555169770483, + "grad_norm": 0.7107297153536942, + "learning_rate": 2.2618487807222794e-06, + "loss": 0.8388, + "step": 5261 + }, + { + "epoch": 0.47454570050051853, + "grad_norm": 1.4607318590648024, + "learning_rate": 2.261269587608012e-06, + "loss": 1.0326, + "step": 5262 + }, + { + "epoch": 0.4746358840239888, + "grad_norm": 1.181350697660666, + "learning_rate": 2.260690372200783e-06, + "loss": 0.95, + "step": 5263 + }, + { + "epoch": 0.47472606754745905, + "grad_norm": 1.280788059232807, + "learning_rate": 2.2601111345500138e-06, + "loss": 1.0343, + "step": 5264 + }, + { + "epoch": 0.47481625107092934, + "grad_norm": 1.4626216055368786, + "learning_rate": 2.2595318747051286e-06, + "loss": 0.9412, + "step": 5265 + }, + { + "epoch": 0.47490643459439963, + "grad_norm": 2.6060331039375457, + "learning_rate": 2.258952592715553e-06, + "loss": 0.9805, + "step": 5266 + }, + { + "epoch": 0.47499661811786986, + "grad_norm": 0.7824648757217676, + "learning_rate": 2.2583732886307142e-06, + "loss": 0.8516, + "step": 5267 + }, + { + "epoch": 0.47508680164134015, + "grad_norm": 1.4739900816968043, + "learning_rate": 2.2577939625000414e-06, + "loss": 0.9024, + "step": 5268 + }, + { + "epoch": 0.4751769851648104, + "grad_norm": 0.7477508038688989, + "learning_rate": 2.257214614372967e-06, + "loss": 0.8081, + "step": 5269 + }, + { + "epoch": 0.4752671686882807, + "grad_norm": 1.4184074458846068, + "learning_rate": 2.2566352442989227e-06, + "loss": 0.9775, + "step": 5270 + }, + { + "epoch": 0.4753573522117509, + "grad_norm": 1.2972074794861845, + "learning_rate": 2.256055852327344e-06, + "loss": 0.9464, + "step": 5271 + }, + { + "epoch": 0.4754475357352212, + "grad_norm": 1.460526025933951, + "learning_rate": 2.2554764385076685e-06, + "loss": 0.8905, + "step": 5272 + }, + { + "epoch": 0.4755377192586914, + "grad_norm": 1.290510901333015, + "learning_rate": 2.2548970028893348e-06, + "loss": 0.9665, + "step": 5273 + }, + { + "epoch": 0.4756279027821617, + "grad_norm": 1.5915032800562139, + "learning_rate": 2.254317545521783e-06, + "loss": 0.9694, + "step": 5274 + }, + { + "epoch": 0.47571808630563195, + "grad_norm": 1.4353824443456331, + "learning_rate": 2.253738066454457e-06, + "loss": 0.9832, + "step": 5275 + }, + { + "epoch": 0.47580826982910224, + "grad_norm": 1.2521238404046122, + "learning_rate": 2.2531585657367986e-06, + "loss": 0.9222, + "step": 5276 + }, + { + "epoch": 0.47589845335257247, + "grad_norm": 1.3808007126667088, + "learning_rate": 2.252579043418256e-06, + "loss": 0.9001, + "step": 5277 + }, + { + "epoch": 0.47598863687604276, + "grad_norm": 1.3550373457234892, + "learning_rate": 2.251999499548277e-06, + "loss": 0.9223, + "step": 5278 + }, + { + "epoch": 0.476078820399513, + "grad_norm": 1.552978419672857, + "learning_rate": 2.251419934176311e-06, + "loss": 0.9169, + "step": 5279 + }, + { + "epoch": 0.4761690039229833, + "grad_norm": 1.2126319597148942, + "learning_rate": 2.25084034735181e-06, + "loss": 1.0067, + "step": 5280 + }, + { + "epoch": 0.4762591874464535, + "grad_norm": 1.3521343631058726, + "learning_rate": 2.2502607391242274e-06, + "loss": 1.0017, + "step": 5281 + }, + { + "epoch": 0.4763493709699238, + "grad_norm": 0.7005857818067349, + "learning_rate": 2.2496811095430182e-06, + "loss": 0.8217, + "step": 5282 + }, + { + "epoch": 0.47643955449339404, + "grad_norm": 1.762369378302058, + "learning_rate": 2.249101458657641e-06, + "loss": 0.9482, + "step": 5283 + }, + { + "epoch": 0.4765297380168643, + "grad_norm": 1.5836131534601534, + "learning_rate": 2.2485217865175526e-06, + "loss": 1.0192, + "step": 5284 + }, + { + "epoch": 0.47661992154033456, + "grad_norm": 1.5514745435725452, + "learning_rate": 2.2479420931722156e-06, + "loss": 1.0805, + "step": 5285 + }, + { + "epoch": 0.47671010506380485, + "grad_norm": 0.7246773988612789, + "learning_rate": 2.2473623786710923e-06, + "loss": 0.8346, + "step": 5286 + }, + { + "epoch": 0.4768002885872751, + "grad_norm": 1.6199898879595096, + "learning_rate": 2.2467826430636465e-06, + "loss": 0.8665, + "step": 5287 + }, + { + "epoch": 0.47689047211074537, + "grad_norm": 1.2195447628975893, + "learning_rate": 2.246202886399345e-06, + "loss": 0.9741, + "step": 5288 + }, + { + "epoch": 0.47698065563421566, + "grad_norm": 1.3415242324869843, + "learning_rate": 2.2456231087276556e-06, + "loss": 0.9524, + "step": 5289 + }, + { + "epoch": 0.4770708391576859, + "grad_norm": 1.3548685113515861, + "learning_rate": 2.245043310098048e-06, + "loss": 0.8778, + "step": 5290 + }, + { + "epoch": 0.4771610226811562, + "grad_norm": 1.6570574393713993, + "learning_rate": 2.244463490559995e-06, + "loss": 0.9939, + "step": 5291 + }, + { + "epoch": 0.4772512062046264, + "grad_norm": 1.3945901468272188, + "learning_rate": 2.2438836501629683e-06, + "loss": 0.9604, + "step": 5292 + }, + { + "epoch": 0.4773413897280967, + "grad_norm": 1.2278691517922562, + "learning_rate": 2.2433037889564437e-06, + "loss": 0.9549, + "step": 5293 + }, + { + "epoch": 0.47743157325156693, + "grad_norm": 4.8836169259243505, + "learning_rate": 2.242723906989899e-06, + "loss": 0.8885, + "step": 5294 + }, + { + "epoch": 0.4775217567750372, + "grad_norm": 1.1661698895872783, + "learning_rate": 2.2421440043128114e-06, + "loss": 0.8765, + "step": 5295 + }, + { + "epoch": 0.47761194029850745, + "grad_norm": 1.4532238506206872, + "learning_rate": 2.241564080974662e-06, + "loss": 0.8607, + "step": 5296 + }, + { + "epoch": 0.47770212382197774, + "grad_norm": 9.145435182656744, + "learning_rate": 2.2409841370249343e-06, + "loss": 0.9074, + "step": 5297 + }, + { + "epoch": 0.477792307345448, + "grad_norm": 1.6299746003541535, + "learning_rate": 2.2404041725131106e-06, + "loss": 1.045, + "step": 5298 + }, + { + "epoch": 0.47788249086891826, + "grad_norm": 1.5083851887902642, + "learning_rate": 2.239824187488677e-06, + "loss": 0.9317, + "step": 5299 + }, + { + "epoch": 0.4779726743923885, + "grad_norm": 0.7868661433684248, + "learning_rate": 2.239244182001122e-06, + "loss": 0.8661, + "step": 5300 + }, + { + "epoch": 0.4780628579158588, + "grad_norm": 1.2584357287363905, + "learning_rate": 2.2386641560999336e-06, + "loss": 0.9668, + "step": 5301 + }, + { + "epoch": 0.478153041439329, + "grad_norm": 1.5637790022858449, + "learning_rate": 2.238084109834604e-06, + "loss": 0.9852, + "step": 5302 + }, + { + "epoch": 0.4782432249627993, + "grad_norm": 1.4321607032781056, + "learning_rate": 2.237504043254625e-06, + "loss": 0.9479, + "step": 5303 + }, + { + "epoch": 0.47833340848626954, + "grad_norm": 1.2940471105298408, + "learning_rate": 2.2369239564094915e-06, + "loss": 0.9233, + "step": 5304 + }, + { + "epoch": 0.47842359200973983, + "grad_norm": 1.322864301150739, + "learning_rate": 2.2363438493486995e-06, + "loss": 0.8756, + "step": 5305 + }, + { + "epoch": 0.47851377553321006, + "grad_norm": 1.5236576801525472, + "learning_rate": 2.235763722121747e-06, + "loss": 0.8358, + "step": 5306 + }, + { + "epoch": 0.47860395905668035, + "grad_norm": 1.4473334951612984, + "learning_rate": 2.2351835747781346e-06, + "loss": 0.9749, + "step": 5307 + }, + { + "epoch": 0.4786941425801506, + "grad_norm": 1.4002046554707077, + "learning_rate": 2.234603407367362e-06, + "loss": 0.9915, + "step": 5308 + }, + { + "epoch": 0.47878432610362087, + "grad_norm": 1.3484645023051407, + "learning_rate": 2.2340232199389337e-06, + "loss": 0.9685, + "step": 5309 + }, + { + "epoch": 0.4788745096270911, + "grad_norm": 1.386422138997954, + "learning_rate": 2.2334430125423538e-06, + "loss": 0.8314, + "step": 5310 + }, + { + "epoch": 0.4789646931505614, + "grad_norm": 1.4526459919609476, + "learning_rate": 2.232862785227128e-06, + "loss": 0.9993, + "step": 5311 + }, + { + "epoch": 0.4790548766740317, + "grad_norm": 1.4068178937550055, + "learning_rate": 2.232282538042766e-06, + "loss": 0.9782, + "step": 5312 + }, + { + "epoch": 0.4791450601975019, + "grad_norm": 1.6042839189094036, + "learning_rate": 2.231702271038777e-06, + "loss": 0.9927, + "step": 5313 + }, + { + "epoch": 0.4792352437209722, + "grad_norm": 1.6186616232765683, + "learning_rate": 2.231121984264673e-06, + "loss": 0.9295, + "step": 5314 + }, + { + "epoch": 0.47932542724444244, + "grad_norm": 1.4273337912460038, + "learning_rate": 2.2305416777699665e-06, + "loss": 0.9755, + "step": 5315 + }, + { + "epoch": 0.4794156107679127, + "grad_norm": 1.4401801458898662, + "learning_rate": 2.229961351604173e-06, + "loss": 0.9597, + "step": 5316 + }, + { + "epoch": 0.47950579429138296, + "grad_norm": 1.509269603263878, + "learning_rate": 2.2293810058168085e-06, + "loss": 0.9087, + "step": 5317 + }, + { + "epoch": 0.47959597781485325, + "grad_norm": 1.3179967310840317, + "learning_rate": 2.2288006404573922e-06, + "loss": 0.968, + "step": 5318 + }, + { + "epoch": 0.4796861613383235, + "grad_norm": 1.5110789283117843, + "learning_rate": 2.228220255575444e-06, + "loss": 0.9407, + "step": 5319 + }, + { + "epoch": 0.47977634486179377, + "grad_norm": 1.7126258636777585, + "learning_rate": 2.2276398512204847e-06, + "loss": 0.903, + "step": 5320 + }, + { + "epoch": 0.479866528385264, + "grad_norm": 1.5372733933844331, + "learning_rate": 2.2270594274420382e-06, + "loss": 0.9558, + "step": 5321 + }, + { + "epoch": 0.4799567119087343, + "grad_norm": 0.7116877610864493, + "learning_rate": 2.22647898428963e-06, + "loss": 0.8333, + "step": 5322 + }, + { + "epoch": 0.4800468954322045, + "grad_norm": 1.1950523741669037, + "learning_rate": 2.225898521812785e-06, + "loss": 0.9992, + "step": 5323 + }, + { + "epoch": 0.4801370789556748, + "grad_norm": 1.4218541238111608, + "learning_rate": 2.2253180400610337e-06, + "loss": 0.9966, + "step": 5324 + }, + { + "epoch": 0.48022726247914505, + "grad_norm": 1.5842270671093028, + "learning_rate": 2.2247375390839037e-06, + "loss": 0.9306, + "step": 5325 + }, + { + "epoch": 0.48031744600261533, + "grad_norm": 1.419038899453726, + "learning_rate": 2.224157018930928e-06, + "loss": 0.9212, + "step": 5326 + }, + { + "epoch": 0.48040762952608557, + "grad_norm": 1.7090008369742855, + "learning_rate": 2.2235764796516395e-06, + "loss": 0.9497, + "step": 5327 + }, + { + "epoch": 0.48049781304955586, + "grad_norm": 1.3532416877033218, + "learning_rate": 2.222995921295573e-06, + "loss": 0.956, + "step": 5328 + }, + { + "epoch": 0.4805879965730261, + "grad_norm": 1.5956733168545822, + "learning_rate": 2.222415343912265e-06, + "loss": 0.9261, + "step": 5329 + }, + { + "epoch": 0.4806781800964964, + "grad_norm": 2.0841095304938304, + "learning_rate": 2.221834747551254e-06, + "loss": 1.0177, + "step": 5330 + }, + { + "epoch": 0.4807683636199666, + "grad_norm": 1.3241205319689604, + "learning_rate": 2.221254132262078e-06, + "loss": 0.9197, + "step": 5331 + }, + { + "epoch": 0.4808585471434369, + "grad_norm": 1.4870583623939895, + "learning_rate": 2.2206734980942802e-06, + "loss": 0.9907, + "step": 5332 + }, + { + "epoch": 0.48094873066690713, + "grad_norm": 1.6808976183456223, + "learning_rate": 2.2200928450974024e-06, + "loss": 0.9238, + "step": 5333 + }, + { + "epoch": 0.4810389141903774, + "grad_norm": 1.4796512329426423, + "learning_rate": 2.21951217332099e-06, + "loss": 0.9743, + "step": 5334 + }, + { + "epoch": 0.48112909771384765, + "grad_norm": 1.8742514705624205, + "learning_rate": 2.2189314828145883e-06, + "loss": 0.871, + "step": 5335 + }, + { + "epoch": 0.48121928123731794, + "grad_norm": 1.298481830524153, + "learning_rate": 2.2183507736277453e-06, + "loss": 0.9359, + "step": 5336 + }, + { + "epoch": 0.48130946476078823, + "grad_norm": 1.4144326247252235, + "learning_rate": 2.2177700458100107e-06, + "loss": 0.9637, + "step": 5337 + }, + { + "epoch": 0.48139964828425846, + "grad_norm": 1.4629565480024975, + "learning_rate": 2.2171892994109346e-06, + "loss": 0.9311, + "step": 5338 + }, + { + "epoch": 0.48148983180772875, + "grad_norm": 1.528809295527426, + "learning_rate": 2.21660853448007e-06, + "loss": 1.0128, + "step": 5339 + }, + { + "epoch": 0.481580015331199, + "grad_norm": 1.9316551894515384, + "learning_rate": 2.2160277510669703e-06, + "loss": 0.9094, + "step": 5340 + }, + { + "epoch": 0.4816701988546693, + "grad_norm": 1.2207299313462945, + "learning_rate": 2.215446949221193e-06, + "loss": 0.947, + "step": 5341 + }, + { + "epoch": 0.4817603823781395, + "grad_norm": 1.596929781397757, + "learning_rate": 2.2148661289922924e-06, + "loss": 0.956, + "step": 5342 + }, + { + "epoch": 0.4818505659016098, + "grad_norm": 1.7610136596132588, + "learning_rate": 2.21428529042983e-06, + "loss": 1.0109, + "step": 5343 + }, + { + "epoch": 0.48194074942508003, + "grad_norm": 1.5216879947138393, + "learning_rate": 2.2137044335833647e-06, + "loss": 1.0016, + "step": 5344 + }, + { + "epoch": 0.4820309329485503, + "grad_norm": 1.5792583512771858, + "learning_rate": 2.213123558502459e-06, + "loss": 0.9353, + "step": 5345 + }, + { + "epoch": 0.48212111647202055, + "grad_norm": 1.9937202690562503, + "learning_rate": 2.2125426652366763e-06, + "loss": 0.9671, + "step": 5346 + }, + { + "epoch": 0.48221129999549084, + "grad_norm": 1.4808473933204858, + "learning_rate": 2.211961753835581e-06, + "loss": 1.0444, + "step": 5347 + }, + { + "epoch": 0.48230148351896107, + "grad_norm": 3.139272977647219, + "learning_rate": 2.21138082434874e-06, + "loss": 0.9444, + "step": 5348 + }, + { + "epoch": 0.48239166704243136, + "grad_norm": 1.6431047863041586, + "learning_rate": 2.210799876825722e-06, + "loss": 0.9855, + "step": 5349 + }, + { + "epoch": 0.4824818505659016, + "grad_norm": 1.395499697235195, + "learning_rate": 2.210218911316096e-06, + "loss": 0.922, + "step": 5350 + }, + { + "epoch": 0.4825720340893719, + "grad_norm": 1.479958076269892, + "learning_rate": 2.2096379278694336e-06, + "loss": 0.9017, + "step": 5351 + }, + { + "epoch": 0.4826622176128421, + "grad_norm": 1.374489082434609, + "learning_rate": 2.2090569265353074e-06, + "loss": 0.8938, + "step": 5352 + }, + { + "epoch": 0.4827524011363124, + "grad_norm": 1.985043030279638, + "learning_rate": 2.2084759073632912e-06, + "loss": 0.9448, + "step": 5353 + }, + { + "epoch": 0.48284258465978264, + "grad_norm": 1.433509429198097, + "learning_rate": 2.2078948704029606e-06, + "loss": 0.9087, + "step": 5354 + }, + { + "epoch": 0.4829327681832529, + "grad_norm": 1.4461618762917154, + "learning_rate": 2.2073138157038935e-06, + "loss": 0.8676, + "step": 5355 + }, + { + "epoch": 0.48302295170672316, + "grad_norm": 1.4921169563914565, + "learning_rate": 2.2067327433156687e-06, + "loss": 0.9436, + "step": 5356 + }, + { + "epoch": 0.48311313523019345, + "grad_norm": 1.6172747749698893, + "learning_rate": 2.2061516532878667e-06, + "loss": 0.9703, + "step": 5357 + }, + { + "epoch": 0.4832033187536637, + "grad_norm": 1.324412463937798, + "learning_rate": 2.2055705456700686e-06, + "loss": 0.9967, + "step": 5358 + }, + { + "epoch": 0.48329350227713397, + "grad_norm": 1.347754477639067, + "learning_rate": 2.204989420511858e-06, + "loss": 0.8513, + "step": 5359 + }, + { + "epoch": 0.48338368580060426, + "grad_norm": 1.2473908851010393, + "learning_rate": 2.20440827786282e-06, + "loss": 0.9, + "step": 5360 + }, + { + "epoch": 0.4834738693240745, + "grad_norm": 1.3462160680350062, + "learning_rate": 2.20382711777254e-06, + "loss": 0.9526, + "step": 5361 + }, + { + "epoch": 0.4835640528475448, + "grad_norm": 1.3504111617526466, + "learning_rate": 2.203245940290607e-06, + "loss": 0.9598, + "step": 5362 + }, + { + "epoch": 0.483654236371015, + "grad_norm": 1.5284972067156355, + "learning_rate": 2.2026647454666097e-06, + "loss": 0.9405, + "step": 5363 + }, + { + "epoch": 0.4837444198944853, + "grad_norm": 1.6825242329591064, + "learning_rate": 2.2020835333501384e-06, + "loss": 0.9869, + "step": 5364 + }, + { + "epoch": 0.48383460341795553, + "grad_norm": 1.4125955043002318, + "learning_rate": 2.2015023039907863e-06, + "loss": 1.0069, + "step": 5365 + }, + { + "epoch": 0.4839247869414258, + "grad_norm": 1.3032963354236717, + "learning_rate": 2.2009210574381464e-06, + "loss": 0.9313, + "step": 5366 + }, + { + "epoch": 0.48401497046489605, + "grad_norm": 1.4940909184184912, + "learning_rate": 2.2003397937418134e-06, + "loss": 0.8988, + "step": 5367 + }, + { + "epoch": 0.48410515398836634, + "grad_norm": 0.6977471791097272, + "learning_rate": 2.1997585129513852e-06, + "loss": 0.8345, + "step": 5368 + }, + { + "epoch": 0.4841953375118366, + "grad_norm": 1.4025561668586528, + "learning_rate": 2.1991772151164595e-06, + "loss": 0.974, + "step": 5369 + }, + { + "epoch": 0.48428552103530687, + "grad_norm": 1.4630502529678708, + "learning_rate": 2.1985959002866346e-06, + "loss": 0.9947, + "step": 5370 + }, + { + "epoch": 0.4843757045587771, + "grad_norm": 1.8123538517401165, + "learning_rate": 2.198014568511513e-06, + "loss": 1.0158, + "step": 5371 + }, + { + "epoch": 0.4844658880822474, + "grad_norm": 0.7653934362129557, + "learning_rate": 2.1974332198406965e-06, + "loss": 0.8134, + "step": 5372 + }, + { + "epoch": 0.4845560716057176, + "grad_norm": 1.4283470164874736, + "learning_rate": 2.196851854323789e-06, + "loss": 0.9271, + "step": 5373 + }, + { + "epoch": 0.4846462551291879, + "grad_norm": 1.566738026934926, + "learning_rate": 2.196270472010396e-06, + "loss": 0.9261, + "step": 5374 + }, + { + "epoch": 0.48473643865265814, + "grad_norm": 1.4354206470864321, + "learning_rate": 2.195689072950124e-06, + "loss": 0.9472, + "step": 5375 + }, + { + "epoch": 0.48482662217612843, + "grad_norm": 0.7146474016197185, + "learning_rate": 2.195107657192581e-06, + "loss": 0.7807, + "step": 5376 + }, + { + "epoch": 0.48491680569959866, + "grad_norm": 0.645984652440253, + "learning_rate": 2.194526224787378e-06, + "loss": 0.7501, + "step": 5377 + }, + { + "epoch": 0.48500698922306895, + "grad_norm": 1.511767246148596, + "learning_rate": 2.1939447757841236e-06, + "loss": 0.9153, + "step": 5378 + }, + { + "epoch": 0.4850971727465392, + "grad_norm": 1.5961178412278767, + "learning_rate": 2.193363310232432e-06, + "loss": 1.0107, + "step": 5379 + }, + { + "epoch": 0.4851873562700095, + "grad_norm": 1.5285464183533692, + "learning_rate": 2.192781828181917e-06, + "loss": 1.0518, + "step": 5380 + }, + { + "epoch": 0.4852775397934797, + "grad_norm": 1.2775896868045251, + "learning_rate": 2.192200329682193e-06, + "loss": 0.9681, + "step": 5381 + }, + { + "epoch": 0.48536772331695, + "grad_norm": 1.4305585698627865, + "learning_rate": 2.1916188147828767e-06, + "loss": 0.9467, + "step": 5382 + }, + { + "epoch": 0.48545790684042023, + "grad_norm": 1.3890939921358947, + "learning_rate": 2.191037283533587e-06, + "loss": 0.9627, + "step": 5383 + }, + { + "epoch": 0.4855480903638905, + "grad_norm": 1.9693279326349138, + "learning_rate": 2.1904557359839428e-06, + "loss": 0.9548, + "step": 5384 + }, + { + "epoch": 0.4856382738873608, + "grad_norm": 1.956958518132951, + "learning_rate": 2.189874172183565e-06, + "loss": 0.931, + "step": 5385 + }, + { + "epoch": 0.48572845741083104, + "grad_norm": 1.3200267505102625, + "learning_rate": 2.1892925921820763e-06, + "loss": 0.9134, + "step": 5386 + }, + { + "epoch": 0.4858186409343013, + "grad_norm": 1.4753576136121076, + "learning_rate": 2.1887109960290994e-06, + "loss": 0.9422, + "step": 5387 + }, + { + "epoch": 0.48590882445777156, + "grad_norm": 2.130408752514246, + "learning_rate": 2.18812938377426e-06, + "loss": 1.0416, + "step": 5388 + }, + { + "epoch": 0.48599900798124185, + "grad_norm": 1.3608636706852895, + "learning_rate": 2.187547755467184e-06, + "loss": 0.9887, + "step": 5389 + }, + { + "epoch": 0.4860891915047121, + "grad_norm": 1.452492689156969, + "learning_rate": 2.1869661111574994e-06, + "loss": 0.8473, + "step": 5390 + }, + { + "epoch": 0.48617937502818237, + "grad_norm": 1.6597882749359167, + "learning_rate": 2.1863844508948353e-06, + "loss": 0.9983, + "step": 5391 + }, + { + "epoch": 0.4862695585516526, + "grad_norm": 1.4210638729937264, + "learning_rate": 2.185802774728823e-06, + "loss": 0.9626, + "step": 5392 + }, + { + "epoch": 0.4863597420751229, + "grad_norm": 1.2317783713026103, + "learning_rate": 2.1852210827090927e-06, + "loss": 0.9029, + "step": 5393 + }, + { + "epoch": 0.4864499255985931, + "grad_norm": 1.4084807610326875, + "learning_rate": 2.184639374885278e-06, + "loss": 0.8893, + "step": 5394 + }, + { + "epoch": 0.4865401091220634, + "grad_norm": 1.7891601136076765, + "learning_rate": 2.184057651307014e-06, + "loss": 1.0495, + "step": 5395 + }, + { + "epoch": 0.48663029264553365, + "grad_norm": 1.902501845791256, + "learning_rate": 2.183475912023937e-06, + "loss": 1.0214, + "step": 5396 + }, + { + "epoch": 0.48672047616900393, + "grad_norm": 1.448679450936456, + "learning_rate": 2.1828941570856826e-06, + "loss": 0.9574, + "step": 5397 + }, + { + "epoch": 0.48681065969247417, + "grad_norm": 0.7185367184855066, + "learning_rate": 2.1823123865418903e-06, + "loss": 0.8232, + "step": 5398 + }, + { + "epoch": 0.48690084321594446, + "grad_norm": 1.7319252652789954, + "learning_rate": 2.1817306004422e-06, + "loss": 0.9185, + "step": 5399 + }, + { + "epoch": 0.4869910267394147, + "grad_norm": 1.4166985489492712, + "learning_rate": 2.1811487988362527e-06, + "loss": 0.9795, + "step": 5400 + }, + { + "epoch": 0.487081210262885, + "grad_norm": 1.5513983971055751, + "learning_rate": 2.1805669817736917e-06, + "loss": 1.0172, + "step": 5401 + }, + { + "epoch": 0.4871713937863552, + "grad_norm": 1.4801055543219932, + "learning_rate": 2.17998514930416e-06, + "loss": 0.7681, + "step": 5402 + }, + { + "epoch": 0.4872615773098255, + "grad_norm": 1.740780454641773, + "learning_rate": 2.1794033014773025e-06, + "loss": 0.9515, + "step": 5403 + }, + { + "epoch": 0.48735176083329573, + "grad_norm": 1.3781898085391597, + "learning_rate": 2.178821438342766e-06, + "loss": 1.0208, + "step": 5404 + }, + { + "epoch": 0.487441944356766, + "grad_norm": 1.9546232046748595, + "learning_rate": 2.1782395599501996e-06, + "loss": 0.9414, + "step": 5405 + }, + { + "epoch": 0.48753212788023625, + "grad_norm": 0.7028690508068829, + "learning_rate": 2.1776576663492498e-06, + "loss": 0.8174, + "step": 5406 + }, + { + "epoch": 0.48762231140370654, + "grad_norm": 1.7530052402546403, + "learning_rate": 2.177075757589569e-06, + "loss": 1.0288, + "step": 5407 + }, + { + "epoch": 0.48771249492717683, + "grad_norm": 1.2552258924255923, + "learning_rate": 2.176493833720808e-06, + "loss": 0.8711, + "step": 5408 + }, + { + "epoch": 0.48780267845064706, + "grad_norm": 1.3937069035183596, + "learning_rate": 2.1759118947926195e-06, + "loss": 0.9124, + "step": 5409 + }, + { + "epoch": 0.48789286197411735, + "grad_norm": 1.3375065384464342, + "learning_rate": 2.1753299408546587e-06, + "loss": 0.9591, + "step": 5410 + }, + { + "epoch": 0.4879830454975876, + "grad_norm": 1.1737852168055898, + "learning_rate": 2.1747479719565803e-06, + "loss": 0.9588, + "step": 5411 + }, + { + "epoch": 0.4880732290210579, + "grad_norm": 1.1766765139026343, + "learning_rate": 2.174165988148042e-06, + "loss": 1.0088, + "step": 5412 + }, + { + "epoch": 0.4881634125445281, + "grad_norm": 1.5781113274614635, + "learning_rate": 2.1735839894787003e-06, + "loss": 0.9154, + "step": 5413 + }, + { + "epoch": 0.4882535960679984, + "grad_norm": 0.8243361836883014, + "learning_rate": 2.1730019759982163e-06, + "loss": 0.8626, + "step": 5414 + }, + { + "epoch": 0.48834377959146863, + "grad_norm": 1.394628654946344, + "learning_rate": 2.172419947756249e-06, + "loss": 1.0002, + "step": 5415 + }, + { + "epoch": 0.4884339631149389, + "grad_norm": 1.4734288624968446, + "learning_rate": 2.171837904802461e-06, + "loss": 0.9922, + "step": 5416 + }, + { + "epoch": 0.48852414663840915, + "grad_norm": 1.289088254729105, + "learning_rate": 2.171255847186516e-06, + "loss": 0.9371, + "step": 5417 + }, + { + "epoch": 0.48861433016187944, + "grad_norm": 1.612309123725726, + "learning_rate": 2.1706737749580783e-06, + "loss": 0.9295, + "step": 5418 + }, + { + "epoch": 0.4887045136853497, + "grad_norm": 1.209718693828924, + "learning_rate": 2.1700916881668127e-06, + "loss": 1.0205, + "step": 5419 + }, + { + "epoch": 0.48879469720881996, + "grad_norm": 1.3057155556179214, + "learning_rate": 2.1695095868623862e-06, + "loss": 1.035, + "step": 5420 + }, + { + "epoch": 0.4888848807322902, + "grad_norm": 1.371328697832538, + "learning_rate": 2.168927471094467e-06, + "loss": 0.8895, + "step": 5421 + }, + { + "epoch": 0.4889750642557605, + "grad_norm": 1.768366348121399, + "learning_rate": 2.168345340912725e-06, + "loss": 0.9716, + "step": 5422 + }, + { + "epoch": 0.4890652477792307, + "grad_norm": 1.2887980796806762, + "learning_rate": 2.1677631963668298e-06, + "loss": 0.9656, + "step": 5423 + }, + { + "epoch": 0.489155431302701, + "grad_norm": 1.4228901854583358, + "learning_rate": 2.167181037506453e-06, + "loss": 0.9041, + "step": 5424 + }, + { + "epoch": 0.48924561482617124, + "grad_norm": 1.47956703928329, + "learning_rate": 2.1665988643812693e-06, + "loss": 0.8859, + "step": 5425 + }, + { + "epoch": 0.4893357983496415, + "grad_norm": 1.7651002492342474, + "learning_rate": 2.166016677040951e-06, + "loss": 0.9427, + "step": 5426 + }, + { + "epoch": 0.48942598187311176, + "grad_norm": 1.358992204810797, + "learning_rate": 2.165434475535175e-06, + "loss": 0.9371, + "step": 5427 + }, + { + "epoch": 0.48951616539658205, + "grad_norm": 1.347269550073336, + "learning_rate": 2.1648522599136173e-06, + "loss": 0.9892, + "step": 5428 + }, + { + "epoch": 0.4896063489200523, + "grad_norm": 1.3035746871101277, + "learning_rate": 2.164270030225956e-06, + "loss": 0.9219, + "step": 5429 + }, + { + "epoch": 0.48969653244352257, + "grad_norm": 1.245459798910633, + "learning_rate": 2.16368778652187e-06, + "loss": 0.8672, + "step": 5430 + }, + { + "epoch": 0.4897867159669928, + "grad_norm": 1.5287166581218024, + "learning_rate": 2.163105528851039e-06, + "loss": 0.9878, + "step": 5431 + }, + { + "epoch": 0.4898768994904631, + "grad_norm": 1.4177300538644488, + "learning_rate": 2.1625232572631448e-06, + "loss": 0.9533, + "step": 5432 + }, + { + "epoch": 0.4899670830139334, + "grad_norm": 0.8288785636801085, + "learning_rate": 2.161940971807871e-06, + "loss": 0.8416, + "step": 5433 + }, + { + "epoch": 0.4900572665374036, + "grad_norm": 1.6812272931802614, + "learning_rate": 2.1613586725348994e-06, + "loss": 0.9383, + "step": 5434 + }, + { + "epoch": 0.4901474500608739, + "grad_norm": 1.5035165257003993, + "learning_rate": 2.1607763594939176e-06, + "loss": 1.0366, + "step": 5435 + }, + { + "epoch": 0.49023763358434413, + "grad_norm": 1.5179251294978242, + "learning_rate": 2.1601940327346093e-06, + "loss": 0.8658, + "step": 5436 + }, + { + "epoch": 0.4903278171078144, + "grad_norm": 1.2743021364134406, + "learning_rate": 2.159611692306663e-06, + "loss": 1.0228, + "step": 5437 + }, + { + "epoch": 0.49041800063128466, + "grad_norm": 1.615741101341527, + "learning_rate": 2.1590293382597667e-06, + "loss": 0.9082, + "step": 5438 + }, + { + "epoch": 0.49050818415475494, + "grad_norm": 1.5967070877570588, + "learning_rate": 2.1584469706436102e-06, + "loss": 0.884, + "step": 5439 + }, + { + "epoch": 0.4905983676782252, + "grad_norm": 1.3254609259936936, + "learning_rate": 2.1578645895078855e-06, + "loss": 0.8652, + "step": 5440 + }, + { + "epoch": 0.49068855120169547, + "grad_norm": 1.3885126250602773, + "learning_rate": 2.157282194902283e-06, + "loss": 1.0073, + "step": 5441 + }, + { + "epoch": 0.4907787347251657, + "grad_norm": 1.5934184608436737, + "learning_rate": 2.1566997868764965e-06, + "loss": 0.8862, + "step": 5442 + }, + { + "epoch": 0.490868918248636, + "grad_norm": 1.7588971669780176, + "learning_rate": 2.15611736548022e-06, + "loss": 0.9115, + "step": 5443 + }, + { + "epoch": 0.4909591017721062, + "grad_norm": 1.480363102266056, + "learning_rate": 2.155534930763149e-06, + "loss": 0.9634, + "step": 5444 + }, + { + "epoch": 0.4910492852955765, + "grad_norm": 1.5693853117719114, + "learning_rate": 2.1549524827749804e-06, + "loss": 0.8417, + "step": 5445 + }, + { + "epoch": 0.49113946881904674, + "grad_norm": 1.2030269037854846, + "learning_rate": 2.1543700215654115e-06, + "loss": 0.8053, + "step": 5446 + }, + { + "epoch": 0.49122965234251703, + "grad_norm": 1.3738702554455107, + "learning_rate": 2.153787547184141e-06, + "loss": 0.9327, + "step": 5447 + }, + { + "epoch": 0.49131983586598726, + "grad_norm": 1.5056722997295837, + "learning_rate": 2.1532050596808695e-06, + "loss": 0.8765, + "step": 5448 + }, + { + "epoch": 0.49141001938945755, + "grad_norm": 1.5038662991852685, + "learning_rate": 2.152622559105297e-06, + "loss": 0.9954, + "step": 5449 + }, + { + "epoch": 0.4915002029129278, + "grad_norm": 1.5904408676202353, + "learning_rate": 2.152040045507126e-06, + "loss": 0.9953, + "step": 5450 + }, + { + "epoch": 0.4915903864363981, + "grad_norm": 1.939534810815184, + "learning_rate": 2.1514575189360607e-06, + "loss": 0.9653, + "step": 5451 + }, + { + "epoch": 0.4916805699598683, + "grad_norm": 1.3005963443237203, + "learning_rate": 2.1508749794418043e-06, + "loss": 0.9067, + "step": 5452 + }, + { + "epoch": 0.4917707534833386, + "grad_norm": 1.2952878501028346, + "learning_rate": 2.1502924270740626e-06, + "loss": 0.9211, + "step": 5453 + }, + { + "epoch": 0.49186093700680883, + "grad_norm": 1.8808425791654997, + "learning_rate": 2.1497098618825427e-06, + "loss": 0.9492, + "step": 5454 + }, + { + "epoch": 0.4919511205302791, + "grad_norm": 1.5219399941604779, + "learning_rate": 2.1491272839169516e-06, + "loss": 0.9858, + "step": 5455 + }, + { + "epoch": 0.4920413040537494, + "grad_norm": 1.355372796523806, + "learning_rate": 2.1485446932269986e-06, + "loss": 1.0575, + "step": 5456 + }, + { + "epoch": 0.49213148757721964, + "grad_norm": 1.4373206151076396, + "learning_rate": 2.147962089862393e-06, + "loss": 0.9815, + "step": 5457 + }, + { + "epoch": 0.49222167110068993, + "grad_norm": 1.770398733626239, + "learning_rate": 2.1473794738728462e-06, + "loss": 0.8556, + "step": 5458 + }, + { + "epoch": 0.49231185462416016, + "grad_norm": 1.408981806343176, + "learning_rate": 2.14679684530807e-06, + "loss": 0.9811, + "step": 5459 + }, + { + "epoch": 0.49240203814763045, + "grad_norm": 1.5139591461273247, + "learning_rate": 2.1462142042177774e-06, + "loss": 0.9322, + "step": 5460 + }, + { + "epoch": 0.4924922216711007, + "grad_norm": 1.3586308865245489, + "learning_rate": 2.145631550651683e-06, + "loss": 0.948, + "step": 5461 + }, + { + "epoch": 0.49258240519457097, + "grad_norm": 1.320960658862893, + "learning_rate": 2.1450488846595016e-06, + "loss": 1.0482, + "step": 5462 + }, + { + "epoch": 0.4926725887180412, + "grad_norm": 1.5210367702476204, + "learning_rate": 2.14446620629095e-06, + "loss": 0.9323, + "step": 5463 + }, + { + "epoch": 0.4927627722415115, + "grad_norm": 1.2751470686669995, + "learning_rate": 2.1438835155957445e-06, + "loss": 1.0183, + "step": 5464 + }, + { + "epoch": 0.4928529557649817, + "grad_norm": 1.3857148741059324, + "learning_rate": 2.143300812623604e-06, + "loss": 0.9839, + "step": 5465 + }, + { + "epoch": 0.492943139288452, + "grad_norm": 1.794053225524032, + "learning_rate": 2.1427180974242485e-06, + "loss": 0.9296, + "step": 5466 + }, + { + "epoch": 0.49303332281192225, + "grad_norm": 1.4166207119089083, + "learning_rate": 2.142135370047398e-06, + "loss": 0.973, + "step": 5467 + }, + { + "epoch": 0.49312350633539254, + "grad_norm": 1.5655388844576104, + "learning_rate": 2.1415526305427735e-06, + "loss": 0.9783, + "step": 5468 + }, + { + "epoch": 0.49321368985886277, + "grad_norm": 1.2210564430087762, + "learning_rate": 2.140969878960098e-06, + "loss": 0.871, + "step": 5469 + }, + { + "epoch": 0.49330387338233306, + "grad_norm": 1.401661528934372, + "learning_rate": 2.1403871153490956e-06, + "loss": 1.0017, + "step": 5470 + }, + { + "epoch": 0.4933940569058033, + "grad_norm": 1.3501606986863532, + "learning_rate": 2.13980433975949e-06, + "loss": 1.0708, + "step": 5471 + }, + { + "epoch": 0.4934842404292736, + "grad_norm": 1.2202923136859727, + "learning_rate": 2.1392215522410076e-06, + "loss": 0.9896, + "step": 5472 + }, + { + "epoch": 0.4935744239527438, + "grad_norm": 1.910328613492857, + "learning_rate": 2.1386387528433743e-06, + "loss": 1.065, + "step": 5473 + }, + { + "epoch": 0.4936646074762141, + "grad_norm": 1.6734075206988348, + "learning_rate": 2.1380559416163186e-06, + "loss": 0.9799, + "step": 5474 + }, + { + "epoch": 0.49375479099968433, + "grad_norm": 1.3167974869809558, + "learning_rate": 2.1374731186095685e-06, + "loss": 0.9996, + "step": 5475 + }, + { + "epoch": 0.4938449745231546, + "grad_norm": 1.2547860074832704, + "learning_rate": 2.136890283872854e-06, + "loss": 0.8962, + "step": 5476 + }, + { + "epoch": 0.49393515804662486, + "grad_norm": 1.5353248251263614, + "learning_rate": 2.136307437455906e-06, + "loss": 0.93, + "step": 5477 + }, + { + "epoch": 0.49402534157009514, + "grad_norm": 1.4721689404536593, + "learning_rate": 2.135724579408456e-06, + "loss": 0.9028, + "step": 5478 + }, + { + "epoch": 0.49411552509356543, + "grad_norm": 1.390303713941604, + "learning_rate": 2.1351417097802356e-06, + "loss": 1.04, + "step": 5479 + }, + { + "epoch": 0.49420570861703567, + "grad_norm": 1.7937960575584004, + "learning_rate": 2.1345588286209798e-06, + "loss": 0.8288, + "step": 5480 + }, + { + "epoch": 0.49429589214050595, + "grad_norm": 1.8816676161059454, + "learning_rate": 2.1339759359804227e-06, + "loss": 0.7979, + "step": 5481 + }, + { + "epoch": 0.4943860756639762, + "grad_norm": 1.2770935186796193, + "learning_rate": 2.1333930319082997e-06, + "loss": 0.9278, + "step": 5482 + }, + { + "epoch": 0.4944762591874465, + "grad_norm": 1.4140313919607872, + "learning_rate": 2.132810116454348e-06, + "loss": 0.9559, + "step": 5483 + }, + { + "epoch": 0.4945664427109167, + "grad_norm": 1.9350890879859726, + "learning_rate": 2.132227189668305e-06, + "loss": 0.9439, + "step": 5484 + }, + { + "epoch": 0.494656626234387, + "grad_norm": 1.4137588933771221, + "learning_rate": 2.1316442515999096e-06, + "loss": 0.8943, + "step": 5485 + }, + { + "epoch": 0.49474680975785723, + "grad_norm": 1.2716944179698273, + "learning_rate": 2.1310613022989e-06, + "loss": 0.9814, + "step": 5486 + }, + { + "epoch": 0.4948369932813275, + "grad_norm": 1.5562779711312322, + "learning_rate": 2.130478341815017e-06, + "loss": 0.8023, + "step": 5487 + }, + { + "epoch": 0.49492717680479775, + "grad_norm": 1.369234786557327, + "learning_rate": 2.1298953701980033e-06, + "loss": 0.8529, + "step": 5488 + }, + { + "epoch": 0.49501736032826804, + "grad_norm": 1.8364401920466218, + "learning_rate": 2.1293123874976003e-06, + "loss": 0.9647, + "step": 5489 + }, + { + "epoch": 0.4951075438517383, + "grad_norm": 1.4268704369263574, + "learning_rate": 2.1287293937635513e-06, + "loss": 0.8953, + "step": 5490 + }, + { + "epoch": 0.49519772737520856, + "grad_norm": 1.503113059394978, + "learning_rate": 2.1281463890456005e-06, + "loss": 0.9987, + "step": 5491 + }, + { + "epoch": 0.4952879108986788, + "grad_norm": 1.390142935875301, + "learning_rate": 2.127563373393493e-06, + "loss": 0.9446, + "step": 5492 + }, + { + "epoch": 0.4953780944221491, + "grad_norm": 1.52413986838788, + "learning_rate": 2.1269803468569756e-06, + "loss": 0.9418, + "step": 5493 + }, + { + "epoch": 0.4954682779456193, + "grad_norm": 1.2683963809005945, + "learning_rate": 2.126397309485794e-06, + "loss": 0.9821, + "step": 5494 + }, + { + "epoch": 0.4955584614690896, + "grad_norm": 1.42790711658595, + "learning_rate": 2.1258142613296983e-06, + "loss": 0.9032, + "step": 5495 + }, + { + "epoch": 0.49564864499255984, + "grad_norm": 1.663086439256934, + "learning_rate": 2.125231202438435e-06, + "loss": 0.9265, + "step": 5496 + }, + { + "epoch": 0.49573882851603013, + "grad_norm": 1.3656392077093182, + "learning_rate": 2.1246481328617553e-06, + "loss": 0.9797, + "step": 5497 + }, + { + "epoch": 0.49582901203950036, + "grad_norm": 3.270027369113442, + "learning_rate": 2.1240650526494096e-06, + "loss": 0.93, + "step": 5498 + }, + { + "epoch": 0.49591919556297065, + "grad_norm": 1.5750434173170325, + "learning_rate": 2.1234819618511493e-06, + "loss": 0.9083, + "step": 5499 + }, + { + "epoch": 0.4960093790864409, + "grad_norm": 0.7218976549829393, + "learning_rate": 2.122898860516728e-06, + "loss": 0.8338, + "step": 5500 + }, + { + "epoch": 0.49609956260991117, + "grad_norm": 1.307540788761912, + "learning_rate": 2.1223157486958976e-06, + "loss": 0.9376, + "step": 5501 + }, + { + "epoch": 0.4961897461333814, + "grad_norm": 1.2197842733200923, + "learning_rate": 2.1217326264384127e-06, + "loss": 0.8368, + "step": 5502 + }, + { + "epoch": 0.4962799296568517, + "grad_norm": 1.8962682567464417, + "learning_rate": 2.1211494937940296e-06, + "loss": 0.9717, + "step": 5503 + }, + { + "epoch": 0.496370113180322, + "grad_norm": 1.2273419084541206, + "learning_rate": 2.1205663508125034e-06, + "loss": 0.9601, + "step": 5504 + }, + { + "epoch": 0.4964602967037922, + "grad_norm": 1.3582494175427158, + "learning_rate": 2.1199831975435914e-06, + "loss": 0.9127, + "step": 5505 + }, + { + "epoch": 0.4965504802272625, + "grad_norm": 1.268614781507593, + "learning_rate": 2.1194000340370517e-06, + "loss": 0.9927, + "step": 5506 + }, + { + "epoch": 0.49664066375073274, + "grad_norm": 1.6247718357415586, + "learning_rate": 2.1188168603426423e-06, + "loss": 0.8926, + "step": 5507 + }, + { + "epoch": 0.496730847274203, + "grad_norm": 2.187420652857796, + "learning_rate": 2.118233676510123e-06, + "loss": 0.918, + "step": 5508 + }, + { + "epoch": 0.49682103079767326, + "grad_norm": 1.4673651801961234, + "learning_rate": 2.117650482589255e-06, + "loss": 0.9612, + "step": 5509 + }, + { + "epoch": 0.49691121432114355, + "grad_norm": 1.5793200962681506, + "learning_rate": 2.1170672786297988e-06, + "loss": 0.9838, + "step": 5510 + }, + { + "epoch": 0.4970013978446138, + "grad_norm": 1.194568876443559, + "learning_rate": 2.1164840646815174e-06, + "loss": 0.915, + "step": 5511 + }, + { + "epoch": 0.49709158136808407, + "grad_norm": 1.4005586956898046, + "learning_rate": 2.1159008407941726e-06, + "loss": 1.0274, + "step": 5512 + }, + { + "epoch": 0.4971817648915543, + "grad_norm": 1.5946575647437777, + "learning_rate": 2.1153176070175293e-06, + "loss": 0.9439, + "step": 5513 + }, + { + "epoch": 0.4972719484150246, + "grad_norm": 1.8596398942251824, + "learning_rate": 2.114734363401352e-06, + "loss": 0.8317, + "step": 5514 + }, + { + "epoch": 0.4973621319384948, + "grad_norm": 1.5351648694444329, + "learning_rate": 2.1141511099954056e-06, + "loss": 0.9461, + "step": 5515 + }, + { + "epoch": 0.4974523154619651, + "grad_norm": 1.3312740467030941, + "learning_rate": 2.1135678468494576e-06, + "loss": 0.9321, + "step": 5516 + }, + { + "epoch": 0.49754249898543534, + "grad_norm": 1.9107017440282428, + "learning_rate": 2.112984574013275e-06, + "loss": 0.9869, + "step": 5517 + }, + { + "epoch": 0.49763268250890563, + "grad_norm": 1.3345045915698304, + "learning_rate": 2.112401291536625e-06, + "loss": 0.9081, + "step": 5518 + }, + { + "epoch": 0.49772286603237587, + "grad_norm": 1.3611898475156263, + "learning_rate": 2.111817999469278e-06, + "loss": 0.99, + "step": 5519 + }, + { + "epoch": 0.49781304955584615, + "grad_norm": 1.432029515175359, + "learning_rate": 2.1112346978610016e-06, + "loss": 0.8957, + "step": 5520 + }, + { + "epoch": 0.4979032330793164, + "grad_norm": 1.434822657589111, + "learning_rate": 2.1106513867615678e-06, + "loss": 0.9066, + "step": 5521 + }, + { + "epoch": 0.4979934166027867, + "grad_norm": 1.2070873951122105, + "learning_rate": 2.110068066220748e-06, + "loss": 0.9451, + "step": 5522 + }, + { + "epoch": 0.4980836001262569, + "grad_norm": 1.2963776611471483, + "learning_rate": 2.109484736288313e-06, + "loss": 1.0157, + "step": 5523 + }, + { + "epoch": 0.4981737836497272, + "grad_norm": 1.597145577218079, + "learning_rate": 2.108901397014037e-06, + "loss": 0.9604, + "step": 5524 + }, + { + "epoch": 0.49826396717319743, + "grad_norm": 1.3303040075983164, + "learning_rate": 2.1083180484476934e-06, + "loss": 0.9544, + "step": 5525 + }, + { + "epoch": 0.4983541506966677, + "grad_norm": 1.5109033874637352, + "learning_rate": 2.1077346906390567e-06, + "loss": 0.9665, + "step": 5526 + }, + { + "epoch": 0.498444334220138, + "grad_norm": 1.4999372946030407, + "learning_rate": 2.107151323637902e-06, + "loss": 0.9664, + "step": 5527 + }, + { + "epoch": 0.49853451774360824, + "grad_norm": 1.4540490934539716, + "learning_rate": 2.106567947494006e-06, + "loss": 0.9227, + "step": 5528 + }, + { + "epoch": 0.49862470126707853, + "grad_norm": 1.755790802462098, + "learning_rate": 2.1059845622571447e-06, + "loss": 1.0135, + "step": 5529 + }, + { + "epoch": 0.49871488479054876, + "grad_norm": 1.377896898556771, + "learning_rate": 2.1054011679770956e-06, + "loss": 0.9885, + "step": 5530 + }, + { + "epoch": 0.49880506831401905, + "grad_norm": 1.525229002221079, + "learning_rate": 2.104817764703638e-06, + "loss": 0.9495, + "step": 5531 + }, + { + "epoch": 0.4988952518374893, + "grad_norm": 1.6374341179793024, + "learning_rate": 2.1042343524865516e-06, + "loss": 0.9563, + "step": 5532 + }, + { + "epoch": 0.4989854353609596, + "grad_norm": 1.7660889817671037, + "learning_rate": 2.103650931375615e-06, + "loss": 1.0567, + "step": 5533 + }, + { + "epoch": 0.4990756188844298, + "grad_norm": 1.9043915240901403, + "learning_rate": 2.1030675014206094e-06, + "loss": 0.8913, + "step": 5534 + }, + { + "epoch": 0.4991658024079001, + "grad_norm": 2.093658217510797, + "learning_rate": 2.1024840626713166e-06, + "loss": 0.8849, + "step": 5535 + }, + { + "epoch": 0.4992559859313703, + "grad_norm": 1.6011021394380454, + "learning_rate": 2.1019006151775177e-06, + "loss": 1.0512, + "step": 5536 + }, + { + "epoch": 0.4993461694548406, + "grad_norm": 1.583995438053806, + "learning_rate": 2.101317158988997e-06, + "loss": 0.9422, + "step": 5537 + }, + { + "epoch": 0.49943635297831085, + "grad_norm": 1.7661490548802967, + "learning_rate": 2.1007336941555374e-06, + "loss": 0.9178, + "step": 5538 + }, + { + "epoch": 0.49952653650178114, + "grad_norm": 1.3618564538274964, + "learning_rate": 2.1001502207269238e-06, + "loss": 0.9095, + "step": 5539 + }, + { + "epoch": 0.49961672002525137, + "grad_norm": 1.4518730759330807, + "learning_rate": 2.0995667387529407e-06, + "loss": 0.9047, + "step": 5540 + }, + { + "epoch": 0.49970690354872166, + "grad_norm": 1.315077430780663, + "learning_rate": 2.098983248283375e-06, + "loss": 0.8947, + "step": 5541 + }, + { + "epoch": 0.4997970870721919, + "grad_norm": 1.5798481513343028, + "learning_rate": 2.098399749368012e-06, + "loss": 0.9555, + "step": 5542 + }, + { + "epoch": 0.4998872705956622, + "grad_norm": 0.7084849887137963, + "learning_rate": 2.09781624205664e-06, + "loss": 0.8026, + "step": 5543 + }, + { + "epoch": 0.4999774541191324, + "grad_norm": 1.5081074480536452, + "learning_rate": 2.0972327263990477e-06, + "loss": 0.9966, + "step": 5544 + }, + { + "epoch": 0.5000676376426026, + "grad_norm": 1.5314284240157294, + "learning_rate": 2.0966492024450226e-06, + "loss": 0.8828, + "step": 5545 + }, + { + "epoch": 0.500157821166073, + "grad_norm": 1.6008795794125652, + "learning_rate": 2.0960656702443545e-06, + "loss": 1.0845, + "step": 5546 + }, + { + "epoch": 0.5002480046895432, + "grad_norm": 1.3789836224253567, + "learning_rate": 2.0954821298468343e-06, + "loss": 0.9436, + "step": 5547 + }, + { + "epoch": 0.5003381882130135, + "grad_norm": 1.2915694856489373, + "learning_rate": 2.0948985813022513e-06, + "loss": 1.0054, + "step": 5548 + }, + { + "epoch": 0.5004283717364837, + "grad_norm": 1.2522901060688756, + "learning_rate": 2.094315024660399e-06, + "loss": 0.989, + "step": 5549 + }, + { + "epoch": 0.500518555259954, + "grad_norm": 1.3870658762413788, + "learning_rate": 2.0937314599710676e-06, + "loss": 0.9747, + "step": 5550 + }, + { + "epoch": 0.5006087387834243, + "grad_norm": 1.3840937739973405, + "learning_rate": 2.0931478872840526e-06, + "loss": 0.9378, + "step": 5551 + }, + { + "epoch": 0.5006989223068945, + "grad_norm": 1.417104326223718, + "learning_rate": 2.092564306649145e-06, + "loss": 0.927, + "step": 5552 + }, + { + "epoch": 0.5007891058303648, + "grad_norm": 1.3121789812146265, + "learning_rate": 2.091980718116141e-06, + "loss": 0.9843, + "step": 5553 + }, + { + "epoch": 0.5008792893538351, + "grad_norm": 1.3111548569802307, + "learning_rate": 2.091397121734835e-06, + "loss": 0.9888, + "step": 5554 + }, + { + "epoch": 0.5009694728773053, + "grad_norm": 1.4311091957629032, + "learning_rate": 2.090813517555022e-06, + "loss": 0.9727, + "step": 5555 + }, + { + "epoch": 0.5010596564007755, + "grad_norm": 1.490605019489468, + "learning_rate": 2.0902299056265e-06, + "loss": 0.8986, + "step": 5556 + }, + { + "epoch": 0.5011498399242459, + "grad_norm": 1.5333398843017179, + "learning_rate": 2.0896462859990643e-06, + "loss": 0.9315, + "step": 5557 + }, + { + "epoch": 0.5012400234477161, + "grad_norm": 1.361589666607305, + "learning_rate": 2.089062658722513e-06, + "loss": 1.0147, + "step": 5558 + }, + { + "epoch": 0.5013302069711864, + "grad_norm": 1.4492554364354966, + "learning_rate": 2.0884790238466452e-06, + "loss": 0.9876, + "step": 5559 + }, + { + "epoch": 0.5014203904946566, + "grad_norm": 2.5724609115492845, + "learning_rate": 2.087895381421259e-06, + "loss": 0.9904, + "step": 5560 + }, + { + "epoch": 0.5015105740181269, + "grad_norm": 1.4698211335333626, + "learning_rate": 2.087311731496154e-06, + "loss": 0.9378, + "step": 5561 + }, + { + "epoch": 0.5016007575415972, + "grad_norm": 1.2824830890306496, + "learning_rate": 2.08672807412113e-06, + "loss": 0.9365, + "step": 5562 + }, + { + "epoch": 0.5016909410650674, + "grad_norm": 1.4594042070557915, + "learning_rate": 2.08614440934599e-06, + "loss": 1.039, + "step": 5563 + }, + { + "epoch": 0.5017811245885376, + "grad_norm": 1.4943456571435143, + "learning_rate": 2.0855607372205337e-06, + "loss": 0.8639, + "step": 5564 + }, + { + "epoch": 0.501871308112008, + "grad_norm": 1.5281090733608942, + "learning_rate": 2.0849770577945623e-06, + "loss": 0.9635, + "step": 5565 + }, + { + "epoch": 0.5019614916354782, + "grad_norm": 1.2594684575618644, + "learning_rate": 2.084393371117881e-06, + "loss": 0.9826, + "step": 5566 + }, + { + "epoch": 0.5020516751589484, + "grad_norm": 1.3740115080466722, + "learning_rate": 2.0838096772402902e-06, + "loss": 0.8817, + "step": 5567 + }, + { + "epoch": 0.5021418586824187, + "grad_norm": 1.5395455159182876, + "learning_rate": 2.0832259762115973e-06, + "loss": 0.8399, + "step": 5568 + }, + { + "epoch": 0.502232042205889, + "grad_norm": 1.1608858222806113, + "learning_rate": 2.082642268081605e-06, + "loss": 0.9725, + "step": 5569 + }, + { + "epoch": 0.5023222257293593, + "grad_norm": 1.577889226542733, + "learning_rate": 2.082058552900118e-06, + "loss": 0.9033, + "step": 5570 + }, + { + "epoch": 0.5024124092528295, + "grad_norm": 1.5947231238959407, + "learning_rate": 2.081474830716944e-06, + "loss": 0.9291, + "step": 5571 + }, + { + "epoch": 0.5025025927762997, + "grad_norm": 1.9791854522884333, + "learning_rate": 2.080891101581887e-06, + "loss": 0.9711, + "step": 5572 + }, + { + "epoch": 0.5025927762997701, + "grad_norm": 1.4917366185471146, + "learning_rate": 2.080307365544755e-06, + "loss": 0.9928, + "step": 5573 + }, + { + "epoch": 0.5026829598232403, + "grad_norm": 1.4903526009275634, + "learning_rate": 2.0797236226553567e-06, + "loss": 0.9844, + "step": 5574 + }, + { + "epoch": 0.5027731433467105, + "grad_norm": 1.4908705087330067, + "learning_rate": 2.079139872963499e-06, + "loss": 0.8805, + "step": 5575 + }, + { + "epoch": 0.5028633268701809, + "grad_norm": 1.8787747850642353, + "learning_rate": 2.078556116518991e-06, + "loss": 0.8466, + "step": 5576 + }, + { + "epoch": 0.5029535103936511, + "grad_norm": 1.2643679274598334, + "learning_rate": 2.077972353371642e-06, + "loss": 0.9209, + "step": 5577 + }, + { + "epoch": 0.5030436939171213, + "grad_norm": 1.3913402646679143, + "learning_rate": 2.077388583571262e-06, + "loss": 0.8928, + "step": 5578 + }, + { + "epoch": 0.5031338774405916, + "grad_norm": 0.8012333407879556, + "learning_rate": 2.0768048071676608e-06, + "loss": 0.7637, + "step": 5579 + }, + { + "epoch": 0.5032240609640619, + "grad_norm": 1.6024817085114862, + "learning_rate": 2.0762210242106505e-06, + "loss": 0.9105, + "step": 5580 + }, + { + "epoch": 0.5033142444875321, + "grad_norm": 2.7142725062228203, + "learning_rate": 2.0756372347500424e-06, + "loss": 0.9625, + "step": 5581 + }, + { + "epoch": 0.5034044280110024, + "grad_norm": 1.427966723863788, + "learning_rate": 2.0750534388356473e-06, + "loss": 1.0523, + "step": 5582 + }, + { + "epoch": 0.5034946115344726, + "grad_norm": 1.5971246781717205, + "learning_rate": 2.07446963651728e-06, + "loss": 0.901, + "step": 5583 + }, + { + "epoch": 0.503584795057943, + "grad_norm": 1.4752167720236369, + "learning_rate": 2.0738858278447516e-06, + "loss": 0.8893, + "step": 5584 + }, + { + "epoch": 0.5036749785814132, + "grad_norm": 1.4006377657811144, + "learning_rate": 2.073302012867878e-06, + "loss": 1.0384, + "step": 5585 + }, + { + "epoch": 0.5037651621048834, + "grad_norm": 1.295784733260572, + "learning_rate": 2.0727181916364725e-06, + "loss": 0.9856, + "step": 5586 + }, + { + "epoch": 0.5038553456283537, + "grad_norm": 1.293824434212363, + "learning_rate": 2.0721343642003493e-06, + "loss": 0.9792, + "step": 5587 + }, + { + "epoch": 0.503945529151824, + "grad_norm": 1.6293683391474405, + "learning_rate": 2.0715505306093247e-06, + "loss": 0.9825, + "step": 5588 + }, + { + "epoch": 0.5040357126752942, + "grad_norm": 1.6938376625714124, + "learning_rate": 2.070966690913214e-06, + "loss": 0.9792, + "step": 5589 + }, + { + "epoch": 0.5041258961987645, + "grad_norm": 1.5806776496079822, + "learning_rate": 2.0703828451618346e-06, + "loss": 0.8333, + "step": 5590 + }, + { + "epoch": 0.5042160797222347, + "grad_norm": 1.2877337919387928, + "learning_rate": 2.069798993405002e-06, + "loss": 0.9371, + "step": 5591 + }, + { + "epoch": 0.504306263245705, + "grad_norm": 1.5043201223839062, + "learning_rate": 2.0692151356925345e-06, + "loss": 0.9903, + "step": 5592 + }, + { + "epoch": 0.5043964467691753, + "grad_norm": 1.4061143597778374, + "learning_rate": 2.068631272074251e-06, + "loss": 0.9679, + "step": 5593 + }, + { + "epoch": 0.5044866302926455, + "grad_norm": 1.2572903704485128, + "learning_rate": 2.0680474025999676e-06, + "loss": 1.0781, + "step": 5594 + }, + { + "epoch": 0.5045768138161157, + "grad_norm": 0.900299067610559, + "learning_rate": 2.0674635273195055e-06, + "loss": 0.843, + "step": 5595 + }, + { + "epoch": 0.5046669973395861, + "grad_norm": 1.8495398954495734, + "learning_rate": 2.066879646282682e-06, + "loss": 0.9456, + "step": 5596 + }, + { + "epoch": 0.5047571808630563, + "grad_norm": 1.9102869281651647, + "learning_rate": 2.0662957595393194e-06, + "loss": 0.9161, + "step": 5597 + }, + { + "epoch": 0.5048473643865266, + "grad_norm": 1.4346098663922537, + "learning_rate": 2.0657118671392373e-06, + "loss": 0.9761, + "step": 5598 + }, + { + "epoch": 0.5049375479099969, + "grad_norm": 2.1792599932183383, + "learning_rate": 2.0651279691322558e-06, + "loss": 1.0047, + "step": 5599 + }, + { + "epoch": 0.5050277314334671, + "grad_norm": 1.3669238026945931, + "learning_rate": 2.0645440655681973e-06, + "loss": 0.9091, + "step": 5600 + }, + { + "epoch": 0.5051179149569374, + "grad_norm": 1.7492569980980204, + "learning_rate": 2.0639601564968826e-06, + "loss": 1.0177, + "step": 5601 + }, + { + "epoch": 0.5052080984804076, + "grad_norm": 0.7802294359464306, + "learning_rate": 2.0633762419681355e-06, + "loss": 0.8924, + "step": 5602 + }, + { + "epoch": 0.5052982820038779, + "grad_norm": 1.1860532983026058, + "learning_rate": 2.062792322031777e-06, + "loss": 0.9617, + "step": 5603 + }, + { + "epoch": 0.5053884655273482, + "grad_norm": 1.569326138421297, + "learning_rate": 2.062208396737632e-06, + "loss": 0.9104, + "step": 5604 + }, + { + "epoch": 0.5054786490508184, + "grad_norm": 1.2816057409150774, + "learning_rate": 2.0616244661355235e-06, + "loss": 0.9694, + "step": 5605 + }, + { + "epoch": 0.5055688325742886, + "grad_norm": 1.7116458071162255, + "learning_rate": 2.0610405302752752e-06, + "loss": 0.9821, + "step": 5606 + }, + { + "epoch": 0.505659016097759, + "grad_norm": 1.3454425266002032, + "learning_rate": 2.060456589206713e-06, + "loss": 0.9421, + "step": 5607 + }, + { + "epoch": 0.5057491996212292, + "grad_norm": 1.573838332335547, + "learning_rate": 2.0598726429796614e-06, + "loss": 0.8973, + "step": 5608 + }, + { + "epoch": 0.5058393831446995, + "grad_norm": 1.442623296588751, + "learning_rate": 2.059288691643945e-06, + "loss": 0.912, + "step": 5609 + }, + { + "epoch": 0.5059295666681697, + "grad_norm": 1.4441256212028277, + "learning_rate": 2.0587047352493913e-06, + "loss": 1.0297, + "step": 5610 + }, + { + "epoch": 0.50601975019164, + "grad_norm": 1.491505172270382, + "learning_rate": 2.0581207738458248e-06, + "loss": 0.9273, + "step": 5611 + }, + { + "epoch": 0.5061099337151103, + "grad_norm": 2.594882947453763, + "learning_rate": 2.0575368074830743e-06, + "loss": 0.9038, + "step": 5612 + }, + { + "epoch": 0.5062001172385805, + "grad_norm": 1.5192648058294294, + "learning_rate": 2.0569528362109667e-06, + "loss": 0.8653, + "step": 5613 + }, + { + "epoch": 0.5062903007620507, + "grad_norm": 1.354195252141262, + "learning_rate": 2.056368860079327e-06, + "loss": 0.9517, + "step": 5614 + }, + { + "epoch": 0.5063804842855211, + "grad_norm": 1.7765805796271115, + "learning_rate": 2.0557848791379874e-06, + "loss": 0.8864, + "step": 5615 + }, + { + "epoch": 0.5064706678089913, + "grad_norm": 1.5787863525892896, + "learning_rate": 2.0552008934367734e-06, + "loss": 0.8505, + "step": 5616 + }, + { + "epoch": 0.5065608513324615, + "grad_norm": 1.9367780878687573, + "learning_rate": 2.0546169030255154e-06, + "loss": 1.0242, + "step": 5617 + }, + { + "epoch": 0.5066510348559318, + "grad_norm": 1.6011045964352848, + "learning_rate": 2.054032907954041e-06, + "loss": 1.0271, + "step": 5618 + }, + { + "epoch": 0.5067412183794021, + "grad_norm": 1.507564702203307, + "learning_rate": 2.053448908272182e-06, + "loss": 0.9123, + "step": 5619 + }, + { + "epoch": 0.5068314019028723, + "grad_norm": 1.4179855639929424, + "learning_rate": 2.0528649040297673e-06, + "loss": 0.9785, + "step": 5620 + }, + { + "epoch": 0.5069215854263426, + "grad_norm": 1.3165597797821142, + "learning_rate": 2.0522808952766266e-06, + "loss": 0.9543, + "step": 5621 + }, + { + "epoch": 0.5070117689498129, + "grad_norm": 1.2667283793942492, + "learning_rate": 2.0516968820625925e-06, + "loss": 0.9971, + "step": 5622 + }, + { + "epoch": 0.5071019524732832, + "grad_norm": 1.4534905906926456, + "learning_rate": 2.051112864437495e-06, + "loss": 0.9358, + "step": 5623 + }, + { + "epoch": 0.5071921359967534, + "grad_norm": 1.7394108563043462, + "learning_rate": 2.050528842451166e-06, + "loss": 0.9206, + "step": 5624 + }, + { + "epoch": 0.5072823195202236, + "grad_norm": 1.4072713745521548, + "learning_rate": 2.049944816153438e-06, + "loss": 0.8334, + "step": 5625 + }, + { + "epoch": 0.507372503043694, + "grad_norm": 1.4051278617482923, + "learning_rate": 2.049360785594142e-06, + "loss": 0.9976, + "step": 5626 + }, + { + "epoch": 0.5074626865671642, + "grad_norm": 0.7378634930979553, + "learning_rate": 2.048776750823113e-06, + "loss": 0.7519, + "step": 5627 + }, + { + "epoch": 0.5075528700906344, + "grad_norm": 0.7036513583636748, + "learning_rate": 2.0481927118901817e-06, + "loss": 0.7968, + "step": 5628 + }, + { + "epoch": 0.5076430536141047, + "grad_norm": 1.63703933525947, + "learning_rate": 2.0476086688451824e-06, + "loss": 0.8933, + "step": 5629 + }, + { + "epoch": 0.507733237137575, + "grad_norm": 2.65159153559921, + "learning_rate": 2.04702462173795e-06, + "loss": 0.9441, + "step": 5630 + }, + { + "epoch": 0.5078234206610452, + "grad_norm": 1.7129666910713264, + "learning_rate": 2.0464405706183167e-06, + "loss": 0.9231, + "step": 5631 + }, + { + "epoch": 0.5079136041845155, + "grad_norm": 1.3978166534000522, + "learning_rate": 2.045856515536118e-06, + "loss": 0.9629, + "step": 5632 + }, + { + "epoch": 0.5080037877079857, + "grad_norm": 1.390789022309319, + "learning_rate": 2.045272456541188e-06, + "loss": 0.9666, + "step": 5633 + }, + { + "epoch": 0.508093971231456, + "grad_norm": 1.260056714870504, + "learning_rate": 2.0446883936833635e-06, + "loss": 1.0136, + "step": 5634 + }, + { + "epoch": 0.5081841547549263, + "grad_norm": 1.3838802461053514, + "learning_rate": 2.0441043270124782e-06, + "loss": 0.9218, + "step": 5635 + }, + { + "epoch": 0.5082743382783965, + "grad_norm": 1.477628857694624, + "learning_rate": 2.0435202565783683e-06, + "loss": 0.9781, + "step": 5636 + }, + { + "epoch": 0.5083645218018668, + "grad_norm": 1.8812801726193324, + "learning_rate": 2.042936182430871e-06, + "loss": 0.9107, + "step": 5637 + }, + { + "epoch": 0.5084547053253371, + "grad_norm": 1.4358105890853055, + "learning_rate": 2.0423521046198206e-06, + "loss": 0.9503, + "step": 5638 + }, + { + "epoch": 0.5085448888488073, + "grad_norm": 1.4453679254575307, + "learning_rate": 2.041768023195056e-06, + "loss": 0.8665, + "step": 5639 + }, + { + "epoch": 0.5086350723722776, + "grad_norm": 1.8475714698357741, + "learning_rate": 2.0411839382064126e-06, + "loss": 1.0201, + "step": 5640 + }, + { + "epoch": 0.5087252558957478, + "grad_norm": 1.4425798306118065, + "learning_rate": 2.040599849703729e-06, + "loss": 0.842, + "step": 5641 + }, + { + "epoch": 0.5088154394192181, + "grad_norm": 3.2214283586416816, + "learning_rate": 2.040015757736843e-06, + "loss": 0.8856, + "step": 5642 + }, + { + "epoch": 0.5089056229426884, + "grad_norm": 1.446789950349016, + "learning_rate": 2.039431662355591e-06, + "loss": 0.8735, + "step": 5643 + }, + { + "epoch": 0.5089958064661586, + "grad_norm": 1.4095437365428043, + "learning_rate": 2.0388475636098126e-06, + "loss": 0.9617, + "step": 5644 + }, + { + "epoch": 0.5090859899896288, + "grad_norm": 1.2269848624821396, + "learning_rate": 2.038263461549346e-06, + "loss": 0.9875, + "step": 5645 + }, + { + "epoch": 0.5091761735130992, + "grad_norm": 1.5860084555401721, + "learning_rate": 2.0376793562240297e-06, + "loss": 0.9473, + "step": 5646 + }, + { + "epoch": 0.5092663570365694, + "grad_norm": 1.3405598320049426, + "learning_rate": 2.037095247683703e-06, + "loss": 0.9542, + "step": 5647 + }, + { + "epoch": 0.5093565405600397, + "grad_norm": 1.469630363078843, + "learning_rate": 2.0365111359782046e-06, + "loss": 0.9049, + "step": 5648 + }, + { + "epoch": 0.50944672408351, + "grad_norm": 1.3510813656015799, + "learning_rate": 2.0359270211573757e-06, + "loss": 0.9746, + "step": 5649 + }, + { + "epoch": 0.5095369076069802, + "grad_norm": 1.7817865868520324, + "learning_rate": 2.0353429032710545e-06, + "loss": 1.0123, + "step": 5650 + }, + { + "epoch": 0.5096270911304505, + "grad_norm": 1.292902089039952, + "learning_rate": 2.0347587823690825e-06, + "loss": 0.9296, + "step": 5651 + }, + { + "epoch": 0.5097172746539207, + "grad_norm": 1.4370161154313899, + "learning_rate": 2.034174658501299e-06, + "loss": 0.9414, + "step": 5652 + }, + { + "epoch": 0.509807458177391, + "grad_norm": 1.4310235623139917, + "learning_rate": 2.0335905317175453e-06, + "loss": 0.9867, + "step": 5653 + }, + { + "epoch": 0.5098976417008613, + "grad_norm": 2.261704940621972, + "learning_rate": 2.033006402067663e-06, + "loss": 0.9445, + "step": 5654 + }, + { + "epoch": 0.5099878252243315, + "grad_norm": 1.826953871463593, + "learning_rate": 2.0324222696014912e-06, + "loss": 0.861, + "step": 5655 + }, + { + "epoch": 0.5100780087478017, + "grad_norm": 1.4557401853450338, + "learning_rate": 2.0318381343688733e-06, + "loss": 0.9815, + "step": 5656 + }, + { + "epoch": 0.5101681922712721, + "grad_norm": 1.3314913553293208, + "learning_rate": 2.0312539964196505e-06, + "loss": 0.9262, + "step": 5657 + }, + { + "epoch": 0.5102583757947423, + "grad_norm": 1.5576402237961497, + "learning_rate": 2.030669855803664e-06, + "loss": 0.9314, + "step": 5658 + }, + { + "epoch": 0.5103485593182125, + "grad_norm": 1.4279976952905515, + "learning_rate": 2.0300857125707563e-06, + "loss": 0.9938, + "step": 5659 + }, + { + "epoch": 0.5104387428416828, + "grad_norm": 2.008781229095689, + "learning_rate": 2.0295015667707697e-06, + "loss": 0.9777, + "step": 5660 + }, + { + "epoch": 0.5105289263651531, + "grad_norm": 1.389548796072511, + "learning_rate": 2.0289174184535472e-06, + "loss": 0.9534, + "step": 5661 + }, + { + "epoch": 0.5106191098886234, + "grad_norm": 1.3091540147852756, + "learning_rate": 2.02833326766893e-06, + "loss": 0.9194, + "step": 5662 + }, + { + "epoch": 0.5107092934120936, + "grad_norm": 1.4691750743248178, + "learning_rate": 2.027749114466763e-06, + "loss": 1.0188, + "step": 5663 + }, + { + "epoch": 0.5107994769355638, + "grad_norm": 1.3843218222709854, + "learning_rate": 2.027164958896889e-06, + "loss": 0.9923, + "step": 5664 + }, + { + "epoch": 0.5108896604590342, + "grad_norm": 1.9150647089212074, + "learning_rate": 2.02658080100915e-06, + "loss": 1.0648, + "step": 5665 + }, + { + "epoch": 0.5109798439825044, + "grad_norm": 1.1718688964684802, + "learning_rate": 2.0259966408533915e-06, + "loss": 1.018, + "step": 5666 + }, + { + "epoch": 0.5110700275059746, + "grad_norm": 1.9164022042083644, + "learning_rate": 2.025412478479455e-06, + "loss": 1.0407, + "step": 5667 + }, + { + "epoch": 0.5111602110294449, + "grad_norm": 1.5899781866196754, + "learning_rate": 2.0248283139371862e-06, + "loss": 0.9953, + "step": 5668 + }, + { + "epoch": 0.5112503945529152, + "grad_norm": 1.4490148190042673, + "learning_rate": 2.024244147276429e-06, + "loss": 0.9196, + "step": 5669 + }, + { + "epoch": 0.5113405780763854, + "grad_norm": 1.2020387576260614, + "learning_rate": 2.023659978547027e-06, + "loss": 1.0173, + "step": 5670 + }, + { + "epoch": 0.5114307615998557, + "grad_norm": 1.227547996810168, + "learning_rate": 2.023075807798826e-06, + "loss": 0.9431, + "step": 5671 + }, + { + "epoch": 0.511520945123326, + "grad_norm": 1.3037893893179213, + "learning_rate": 2.0224916350816696e-06, + "loss": 0.8782, + "step": 5672 + }, + { + "epoch": 0.5116111286467963, + "grad_norm": 1.380054935591975, + "learning_rate": 2.0219074604454026e-06, + "loss": 0.9631, + "step": 5673 + }, + { + "epoch": 0.5117013121702665, + "grad_norm": 1.5871089874274615, + "learning_rate": 2.02132328393987e-06, + "loss": 0.9544, + "step": 5674 + }, + { + "epoch": 0.5117914956937367, + "grad_norm": 5.02059984521632, + "learning_rate": 2.0207391056149174e-06, + "loss": 0.9015, + "step": 5675 + }, + { + "epoch": 0.5118816792172071, + "grad_norm": 1.6727314831893687, + "learning_rate": 2.020154925520391e-06, + "loss": 0.9207, + "step": 5676 + }, + { + "epoch": 0.5119718627406773, + "grad_norm": 1.3535430597164357, + "learning_rate": 2.0195707437061332e-06, + "loss": 1.0512, + "step": 5677 + }, + { + "epoch": 0.5120620462641475, + "grad_norm": 1.716433750326427, + "learning_rate": 2.0189865602219934e-06, + "loss": 0.9587, + "step": 5678 + }, + { + "epoch": 0.5121522297876178, + "grad_norm": 1.5630954370826071, + "learning_rate": 2.0184023751178154e-06, + "loss": 1.0266, + "step": 5679 + }, + { + "epoch": 0.5122424133110881, + "grad_norm": 1.2999193698380447, + "learning_rate": 2.017818188443444e-06, + "loss": 0.9032, + "step": 5680 + }, + { + "epoch": 0.5123325968345583, + "grad_norm": 1.6304470614648174, + "learning_rate": 2.017234000248728e-06, + "loss": 0.9664, + "step": 5681 + }, + { + "epoch": 0.5124227803580286, + "grad_norm": 1.360545388986771, + "learning_rate": 2.0166498105835108e-06, + "loss": 0.8892, + "step": 5682 + }, + { + "epoch": 0.5125129638814988, + "grad_norm": 1.190674303691019, + "learning_rate": 2.0160656194976407e-06, + "loss": 0.8901, + "step": 5683 + }, + { + "epoch": 0.5126031474049692, + "grad_norm": 1.573339325494724, + "learning_rate": 2.0154814270409634e-06, + "loss": 0.8982, + "step": 5684 + }, + { + "epoch": 0.5126933309284394, + "grad_norm": 1.8167480147227417, + "learning_rate": 2.0148972332633247e-06, + "loss": 0.9093, + "step": 5685 + }, + { + "epoch": 0.5127835144519096, + "grad_norm": 1.5636702923239008, + "learning_rate": 2.0143130382145733e-06, + "loss": 1.0125, + "step": 5686 + }, + { + "epoch": 0.5128736979753798, + "grad_norm": 1.9012143897057978, + "learning_rate": 2.0137288419445533e-06, + "loss": 0.9268, + "step": 5687 + }, + { + "epoch": 0.5129638814988502, + "grad_norm": 1.4962405936982015, + "learning_rate": 2.0131446445031134e-06, + "loss": 0.991, + "step": 5688 + }, + { + "epoch": 0.5130540650223204, + "grad_norm": 1.5524499788687713, + "learning_rate": 2.0125604459400994e-06, + "loss": 1.0282, + "step": 5689 + }, + { + "epoch": 0.5131442485457907, + "grad_norm": 1.625439657742832, + "learning_rate": 2.0119762463053596e-06, + "loss": 1.0139, + "step": 5690 + }, + { + "epoch": 0.5132344320692609, + "grad_norm": 1.5525434268645806, + "learning_rate": 2.0113920456487406e-06, + "loss": 0.9445, + "step": 5691 + }, + { + "epoch": 0.5133246155927312, + "grad_norm": 1.3147608041241408, + "learning_rate": 2.010807844020088e-06, + "loss": 0.9448, + "step": 5692 + }, + { + "epoch": 0.5134147991162015, + "grad_norm": 1.3917289809402027, + "learning_rate": 2.0102236414692524e-06, + "loss": 0.9999, + "step": 5693 + }, + { + "epoch": 0.5135049826396717, + "grad_norm": 1.2911678345675242, + "learning_rate": 2.0096394380460777e-06, + "loss": 0.9392, + "step": 5694 + }, + { + "epoch": 0.513595166163142, + "grad_norm": 1.9362713394318978, + "learning_rate": 2.0090552338004136e-06, + "loss": 0.9756, + "step": 5695 + }, + { + "epoch": 0.5136853496866123, + "grad_norm": 1.4927633083288834, + "learning_rate": 2.0084710287821077e-06, + "loss": 0.9476, + "step": 5696 + }, + { + "epoch": 0.5137755332100825, + "grad_norm": 1.4963322939824872, + "learning_rate": 2.007886823041006e-06, + "loss": 0.9983, + "step": 5697 + }, + { + "epoch": 0.5138657167335527, + "grad_norm": 1.60326075880687, + "learning_rate": 2.0073026166269577e-06, + "loss": 0.9702, + "step": 5698 + }, + { + "epoch": 0.5139559002570231, + "grad_norm": 1.2519602663785694, + "learning_rate": 2.0067184095898093e-06, + "loss": 1.0074, + "step": 5699 + }, + { + "epoch": 0.5140460837804933, + "grad_norm": 1.5081558231977517, + "learning_rate": 2.0061342019794094e-06, + "loss": 0.9678, + "step": 5700 + }, + { + "epoch": 0.5141362673039636, + "grad_norm": 1.2895661728332928, + "learning_rate": 2.0055499938456058e-06, + "loss": 1.0262, + "step": 5701 + }, + { + "epoch": 0.5142264508274338, + "grad_norm": 1.454909428795877, + "learning_rate": 2.0049657852382464e-06, + "loss": 0.9223, + "step": 5702 + }, + { + "epoch": 0.5143166343509041, + "grad_norm": 1.9862028099297542, + "learning_rate": 2.0043815762071782e-06, + "loss": 0.8855, + "step": 5703 + }, + { + "epoch": 0.5144068178743744, + "grad_norm": 1.4528613928143135, + "learning_rate": 2.0037973668022492e-06, + "loss": 0.9194, + "step": 5704 + }, + { + "epoch": 0.5144970013978446, + "grad_norm": 1.6062306176585857, + "learning_rate": 2.003213157073309e-06, + "loss": 0.8393, + "step": 5705 + }, + { + "epoch": 0.5145871849213148, + "grad_norm": 1.4053187359685726, + "learning_rate": 2.002628947070204e-06, + "loss": 0.945, + "step": 5706 + }, + { + "epoch": 0.5146773684447852, + "grad_norm": 1.4329430041268816, + "learning_rate": 2.002044736842783e-06, + "loss": 1.0139, + "step": 5707 + }, + { + "epoch": 0.5147675519682554, + "grad_norm": 1.4290172902923848, + "learning_rate": 2.001460526440894e-06, + "loss": 0.9424, + "step": 5708 + }, + { + "epoch": 0.5148577354917256, + "grad_norm": 0.672086771037641, + "learning_rate": 2.0008763159143843e-06, + "loss": 0.791, + "step": 5709 + }, + { + "epoch": 0.5149479190151959, + "grad_norm": 1.3374936593876932, + "learning_rate": 2.000292105313103e-06, + "loss": 0.9745, + "step": 5710 + }, + { + "epoch": 0.5150381025386662, + "grad_norm": 1.3342108918166435, + "learning_rate": 1.999707894686897e-06, + "loss": 0.9953, + "step": 5711 + }, + { + "epoch": 0.5151282860621365, + "grad_norm": 1.3719320315799932, + "learning_rate": 1.9991236840856155e-06, + "loss": 0.9465, + "step": 5712 + }, + { + "epoch": 0.5152184695856067, + "grad_norm": 1.4651027603043216, + "learning_rate": 1.9985394735591065e-06, + "loss": 0.9222, + "step": 5713 + }, + { + "epoch": 0.5153086531090769, + "grad_norm": 1.5041458533512255, + "learning_rate": 1.997955263157217e-06, + "loss": 0.9976, + "step": 5714 + }, + { + "epoch": 0.5153988366325473, + "grad_norm": 1.4070534847860126, + "learning_rate": 1.997371052929796e-06, + "loss": 0.9783, + "step": 5715 + }, + { + "epoch": 0.5154890201560175, + "grad_norm": 1.5119933044144058, + "learning_rate": 1.996786842926691e-06, + "loss": 0.9038, + "step": 5716 + }, + { + "epoch": 0.5155792036794877, + "grad_norm": 1.7905688212105144, + "learning_rate": 1.9962026331977506e-06, + "loss": 0.9632, + "step": 5717 + }, + { + "epoch": 0.5156693872029581, + "grad_norm": 1.4925008714376236, + "learning_rate": 1.9956184237928224e-06, + "loss": 0.9662, + "step": 5718 + }, + { + "epoch": 0.5157595707264283, + "grad_norm": 1.479821055909608, + "learning_rate": 1.995034214761754e-06, + "loss": 0.9371, + "step": 5719 + }, + { + "epoch": 0.5158497542498985, + "grad_norm": 1.1481320760028149, + "learning_rate": 1.9944500061543945e-06, + "loss": 0.9265, + "step": 5720 + }, + { + "epoch": 0.5159399377733688, + "grad_norm": 1.701629401959354, + "learning_rate": 1.99386579802059e-06, + "loss": 0.9728, + "step": 5721 + }, + { + "epoch": 0.5160301212968391, + "grad_norm": 1.740085034539749, + "learning_rate": 1.993281590410191e-06, + "loss": 0.9316, + "step": 5722 + }, + { + "epoch": 0.5161203048203094, + "grad_norm": 1.5230317969406244, + "learning_rate": 1.992697383373043e-06, + "loss": 0.9026, + "step": 5723 + }, + { + "epoch": 0.5162104883437796, + "grad_norm": 0.8657719511487272, + "learning_rate": 1.9921131769589937e-06, + "loss": 0.8932, + "step": 5724 + }, + { + "epoch": 0.5163006718672498, + "grad_norm": 1.6104848053578693, + "learning_rate": 1.991528971217893e-06, + "loss": 0.9524, + "step": 5725 + }, + { + "epoch": 0.5163908553907202, + "grad_norm": 1.4072861562866184, + "learning_rate": 1.9909447661995858e-06, + "loss": 0.946, + "step": 5726 + }, + { + "epoch": 0.5164810389141904, + "grad_norm": 1.6635142394758649, + "learning_rate": 1.990360561953922e-06, + "loss": 0.9663, + "step": 5727 + }, + { + "epoch": 0.5165712224376606, + "grad_norm": 1.2291182115131278, + "learning_rate": 1.9897763585307483e-06, + "loss": 0.976, + "step": 5728 + }, + { + "epoch": 0.5166614059611309, + "grad_norm": 1.4039573630409645, + "learning_rate": 1.989192155979912e-06, + "loss": 0.9872, + "step": 5729 + }, + { + "epoch": 0.5167515894846012, + "grad_norm": 1.357657300992812, + "learning_rate": 1.98860795435126e-06, + "loss": 0.9833, + "step": 5730 + }, + { + "epoch": 0.5168417730080714, + "grad_norm": 1.3864835297864668, + "learning_rate": 1.9880237536946406e-06, + "loss": 0.9579, + "step": 5731 + }, + { + "epoch": 0.5169319565315417, + "grad_norm": 1.6363033231784827, + "learning_rate": 1.987439554059901e-06, + "loss": 0.9155, + "step": 5732 + }, + { + "epoch": 0.5170221400550119, + "grad_norm": 1.4210326665550317, + "learning_rate": 1.9868553554968864e-06, + "loss": 0.925, + "step": 5733 + }, + { + "epoch": 0.5171123235784822, + "grad_norm": 1.5589180898087724, + "learning_rate": 1.986271158055447e-06, + "loss": 0.9047, + "step": 5734 + }, + { + "epoch": 0.5172025071019525, + "grad_norm": 1.429678411402763, + "learning_rate": 1.9856869617854273e-06, + "loss": 0.9455, + "step": 5735 + }, + { + "epoch": 0.5172926906254227, + "grad_norm": 1.5025589097037517, + "learning_rate": 1.9851027667366746e-06, + "loss": 0.9244, + "step": 5736 + }, + { + "epoch": 0.517382874148893, + "grad_norm": 1.338324884258946, + "learning_rate": 1.984518572959037e-06, + "loss": 1.0485, + "step": 5737 + }, + { + "epoch": 0.5174730576723633, + "grad_norm": 1.5891179107937579, + "learning_rate": 1.9839343805023587e-06, + "loss": 0.8687, + "step": 5738 + }, + { + "epoch": 0.5175632411958335, + "grad_norm": 1.617140027515029, + "learning_rate": 1.9833501894164886e-06, + "loss": 1.0392, + "step": 5739 + }, + { + "epoch": 0.5176534247193038, + "grad_norm": 1.3821018785335162, + "learning_rate": 1.982765999751273e-06, + "loss": 0.9891, + "step": 5740 + }, + { + "epoch": 0.5177436082427741, + "grad_norm": 1.7344303981422131, + "learning_rate": 1.9821818115565553e-06, + "loss": 0.9809, + "step": 5741 + }, + { + "epoch": 0.5178337917662443, + "grad_norm": 1.4293939674763612, + "learning_rate": 1.9815976248821853e-06, + "loss": 0.8628, + "step": 5742 + }, + { + "epoch": 0.5179239752897146, + "grad_norm": 1.21094153618909, + "learning_rate": 1.981013439778007e-06, + "loss": 0.9637, + "step": 5743 + }, + { + "epoch": 0.5180141588131848, + "grad_norm": 1.516112101826232, + "learning_rate": 1.9804292562938666e-06, + "loss": 0.9603, + "step": 5744 + }, + { + "epoch": 0.5181043423366551, + "grad_norm": 1.4871927120264097, + "learning_rate": 1.97984507447961e-06, + "loss": 0.9937, + "step": 5745 + }, + { + "epoch": 0.5181945258601254, + "grad_norm": 1.5115391824219935, + "learning_rate": 1.9792608943850824e-06, + "loss": 0.8939, + "step": 5746 + }, + { + "epoch": 0.5182847093835956, + "grad_norm": 1.4083260364832502, + "learning_rate": 1.9786767160601305e-06, + "loss": 0.9316, + "step": 5747 + }, + { + "epoch": 0.5183748929070658, + "grad_norm": 1.4289165982413439, + "learning_rate": 1.9780925395545977e-06, + "loss": 0.879, + "step": 5748 + }, + { + "epoch": 0.5184650764305362, + "grad_norm": 1.0402799658645505, + "learning_rate": 1.9775083649183306e-06, + "loss": 0.8022, + "step": 5749 + }, + { + "epoch": 0.5185552599540064, + "grad_norm": 1.4714167308727784, + "learning_rate": 1.976924192201174e-06, + "loss": 0.9937, + "step": 5750 + }, + { + "epoch": 0.5186454434774767, + "grad_norm": 1.413239255231672, + "learning_rate": 1.9763400214529723e-06, + "loss": 0.8842, + "step": 5751 + }, + { + "epoch": 0.5187356270009469, + "grad_norm": 1.4146381702758886, + "learning_rate": 1.9757558527235713e-06, + "loss": 0.8226, + "step": 5752 + }, + { + "epoch": 0.5188258105244172, + "grad_norm": 1.4595854515577942, + "learning_rate": 1.9751716860628136e-06, + "loss": 0.9442, + "step": 5753 + }, + { + "epoch": 0.5189159940478875, + "grad_norm": 1.5410145419930446, + "learning_rate": 1.974587521520545e-06, + "loss": 0.975, + "step": 5754 + }, + { + "epoch": 0.5190061775713577, + "grad_norm": 1.3408493850123375, + "learning_rate": 1.9740033591466088e-06, + "loss": 0.9895, + "step": 5755 + }, + { + "epoch": 0.5190963610948279, + "grad_norm": 1.3416974559749866, + "learning_rate": 1.97341919899085e-06, + "loss": 1.057, + "step": 5756 + }, + { + "epoch": 0.5191865446182983, + "grad_norm": 1.5038894454215352, + "learning_rate": 1.9728350411031114e-06, + "loss": 0.9772, + "step": 5757 + }, + { + "epoch": 0.5192767281417685, + "grad_norm": 1.5821956596186713, + "learning_rate": 1.9722508855332367e-06, + "loss": 1.0221, + "step": 5758 + }, + { + "epoch": 0.5193669116652387, + "grad_norm": 1.2928403577414975, + "learning_rate": 1.97166673233107e-06, + "loss": 0.8859, + "step": 5759 + }, + { + "epoch": 0.519457095188709, + "grad_norm": 1.308310042414924, + "learning_rate": 1.971082581546453e-06, + "loss": 0.9938, + "step": 5760 + }, + { + "epoch": 0.5195472787121793, + "grad_norm": 1.4520487747932789, + "learning_rate": 1.9704984332292306e-06, + "loss": 0.9759, + "step": 5761 + }, + { + "epoch": 0.5196374622356495, + "grad_norm": 1.3979774869298531, + "learning_rate": 1.9699142874292444e-06, + "loss": 1.0391, + "step": 5762 + }, + { + "epoch": 0.5197276457591198, + "grad_norm": 1.773218754793061, + "learning_rate": 1.969330144196336e-06, + "loss": 1.0433, + "step": 5763 + }, + { + "epoch": 0.51981782928259, + "grad_norm": 2.3888733873184713, + "learning_rate": 1.9687460035803497e-06, + "loss": 0.9044, + "step": 5764 + }, + { + "epoch": 0.5199080128060604, + "grad_norm": 2.3645813951750445, + "learning_rate": 1.9681618656311265e-06, + "loss": 0.9263, + "step": 5765 + }, + { + "epoch": 0.5199981963295306, + "grad_norm": 1.3602546388995354, + "learning_rate": 1.9675777303985086e-06, + "loss": 0.8639, + "step": 5766 + }, + { + "epoch": 0.5200883798530008, + "grad_norm": 1.5309089748001137, + "learning_rate": 1.9669935979323376e-06, + "loss": 0.9845, + "step": 5767 + }, + { + "epoch": 0.5201785633764712, + "grad_norm": 1.4360877023867773, + "learning_rate": 1.9664094682824545e-06, + "loss": 1.0114, + "step": 5768 + }, + { + "epoch": 0.5202687468999414, + "grad_norm": 1.3383078711111702, + "learning_rate": 1.965825341498701e-06, + "loss": 0.9137, + "step": 5769 + }, + { + "epoch": 0.5203589304234116, + "grad_norm": 1.2755513345548488, + "learning_rate": 1.9652412176309177e-06, + "loss": 1.0236, + "step": 5770 + }, + { + "epoch": 0.5204491139468819, + "grad_norm": 1.176764143930711, + "learning_rate": 1.9646570967289453e-06, + "loss": 0.8185, + "step": 5771 + }, + { + "epoch": 0.5205392974703522, + "grad_norm": 1.3505773757806625, + "learning_rate": 1.9640729788426246e-06, + "loss": 0.9875, + "step": 5772 + }, + { + "epoch": 0.5206294809938224, + "grad_norm": 1.2973777072436097, + "learning_rate": 1.963488864021795e-06, + "loss": 0.9863, + "step": 5773 + }, + { + "epoch": 0.5207196645172927, + "grad_norm": 1.4231872372091985, + "learning_rate": 1.962904752316298e-06, + "loss": 0.8444, + "step": 5774 + }, + { + "epoch": 0.5208098480407629, + "grad_norm": 1.610831703241783, + "learning_rate": 1.9623206437759706e-06, + "loss": 0.8769, + "step": 5775 + }, + { + "epoch": 0.5209000315642333, + "grad_norm": 1.4037767489594595, + "learning_rate": 1.9617365384506545e-06, + "loss": 0.91, + "step": 5776 + }, + { + "epoch": 0.5209902150877035, + "grad_norm": 1.1529426699344767, + "learning_rate": 1.9611524363901872e-06, + "loss": 0.9361, + "step": 5777 + }, + { + "epoch": 0.5210803986111737, + "grad_norm": 1.3882442626147606, + "learning_rate": 1.960568337644409e-06, + "loss": 0.9453, + "step": 5778 + }, + { + "epoch": 0.521170582134644, + "grad_norm": 1.3067414796506567, + "learning_rate": 1.9599842422631576e-06, + "loss": 0.9349, + "step": 5779 + }, + { + "epoch": 0.5212607656581143, + "grad_norm": 1.4204553077002127, + "learning_rate": 1.9594001502962703e-06, + "loss": 0.9428, + "step": 5780 + }, + { + "epoch": 0.5213509491815845, + "grad_norm": 1.1928043354605489, + "learning_rate": 1.9588160617935868e-06, + "loss": 1.0105, + "step": 5781 + }, + { + "epoch": 0.5214411327050548, + "grad_norm": 1.660467859082904, + "learning_rate": 1.958231976804944e-06, + "loss": 1.04, + "step": 5782 + }, + { + "epoch": 0.521531316228525, + "grad_norm": 1.6518089926121424, + "learning_rate": 1.957647895380179e-06, + "loss": 0.8786, + "step": 5783 + }, + { + "epoch": 0.5216214997519953, + "grad_norm": 1.3032437405358173, + "learning_rate": 1.9570638175691297e-06, + "loss": 0.8755, + "step": 5784 + }, + { + "epoch": 0.5217116832754656, + "grad_norm": 1.6176784341492736, + "learning_rate": 1.956479743421632e-06, + "loss": 0.9879, + "step": 5785 + }, + { + "epoch": 0.5218018667989358, + "grad_norm": 1.4296568987130163, + "learning_rate": 1.955895672987522e-06, + "loss": 0.8896, + "step": 5786 + }, + { + "epoch": 0.521892050322406, + "grad_norm": 1.337894456398483, + "learning_rate": 1.9553116063166367e-06, + "loss": 0.8493, + "step": 5787 + }, + { + "epoch": 0.5219822338458764, + "grad_norm": 1.3433486427794663, + "learning_rate": 1.954727543458812e-06, + "loss": 0.8614, + "step": 5788 + }, + { + "epoch": 0.5220724173693466, + "grad_norm": 1.4547716061117968, + "learning_rate": 1.954143484463883e-06, + "loss": 0.9762, + "step": 5789 + }, + { + "epoch": 0.5221626008928169, + "grad_norm": 1.6636349840769613, + "learning_rate": 1.9535594293816836e-06, + "loss": 0.9421, + "step": 5790 + }, + { + "epoch": 0.5222527844162872, + "grad_norm": 2.6338414748156787, + "learning_rate": 1.952975378262051e-06, + "loss": 0.899, + "step": 5791 + }, + { + "epoch": 0.5223429679397574, + "grad_norm": 1.6133590568640808, + "learning_rate": 1.952391331154817e-06, + "loss": 0.9019, + "step": 5792 + }, + { + "epoch": 0.5224331514632277, + "grad_norm": 1.5702355968566055, + "learning_rate": 1.9518072881098185e-06, + "loss": 1.0062, + "step": 5793 + }, + { + "epoch": 0.5225233349866979, + "grad_norm": 1.424736831858544, + "learning_rate": 1.9512232491768867e-06, + "loss": 0.9387, + "step": 5794 + }, + { + "epoch": 0.5226135185101682, + "grad_norm": 1.256776560793917, + "learning_rate": 1.9506392144058573e-06, + "loss": 0.9435, + "step": 5795 + }, + { + "epoch": 0.5227037020336385, + "grad_norm": 1.4961520748870967, + "learning_rate": 1.9500551838465623e-06, + "loss": 0.9424, + "step": 5796 + }, + { + "epoch": 0.5227938855571087, + "grad_norm": 1.4771251918240265, + "learning_rate": 1.9494711575488337e-06, + "loss": 0.9568, + "step": 5797 + }, + { + "epoch": 0.5228840690805789, + "grad_norm": 2.0172245273397733, + "learning_rate": 1.948887135562505e-06, + "loss": 0.9029, + "step": 5798 + }, + { + "epoch": 0.5229742526040493, + "grad_norm": 1.3774384204787518, + "learning_rate": 1.9483031179374074e-06, + "loss": 0.9998, + "step": 5799 + }, + { + "epoch": 0.5230644361275195, + "grad_norm": 1.4975878871544024, + "learning_rate": 1.9477191047233736e-06, + "loss": 1.0052, + "step": 5800 + }, + { + "epoch": 0.5231546196509897, + "grad_norm": 1.6537586403964863, + "learning_rate": 1.9471350959702334e-06, + "loss": 0.8305, + "step": 5801 + }, + { + "epoch": 0.52324480317446, + "grad_norm": 1.5783158743972607, + "learning_rate": 1.9465510917278184e-06, + "loss": 0.9518, + "step": 5802 + }, + { + "epoch": 0.5233349866979303, + "grad_norm": 1.4297055102605347, + "learning_rate": 1.9459670920459593e-06, + "loss": 0.9422, + "step": 5803 + }, + { + "epoch": 0.5234251702214006, + "grad_norm": 1.5652053300996374, + "learning_rate": 1.945383096974485e-06, + "loss": 0.8686, + "step": 5804 + }, + { + "epoch": 0.5235153537448708, + "grad_norm": 2.2674018995099585, + "learning_rate": 1.944799106563227e-06, + "loss": 0.8684, + "step": 5805 + }, + { + "epoch": 0.523605537268341, + "grad_norm": 1.430164778625738, + "learning_rate": 1.9442151208620133e-06, + "loss": 0.9762, + "step": 5806 + }, + { + "epoch": 0.5236957207918114, + "grad_norm": 1.4317011592708158, + "learning_rate": 1.943631139920672e-06, + "loss": 0.9954, + "step": 5807 + }, + { + "epoch": 0.5237859043152816, + "grad_norm": 1.2714049611876783, + "learning_rate": 1.943047163789034e-06, + "loss": 0.8983, + "step": 5808 + }, + { + "epoch": 0.5238760878387518, + "grad_norm": 1.3388005413270876, + "learning_rate": 1.942463192516925e-06, + "loss": 0.9099, + "step": 5809 + }, + { + "epoch": 0.5239662713622221, + "grad_norm": 1.3460651680274782, + "learning_rate": 1.9418792261541746e-06, + "loss": 0.9636, + "step": 5810 + }, + { + "epoch": 0.5240564548856924, + "grad_norm": 1.4924344163092236, + "learning_rate": 1.9412952647506094e-06, + "loss": 0.9046, + "step": 5811 + }, + { + "epoch": 0.5241466384091626, + "grad_norm": 1.3493546108809802, + "learning_rate": 1.9407113083560552e-06, + "loss": 0.9251, + "step": 5812 + }, + { + "epoch": 0.5242368219326329, + "grad_norm": 1.648270946603919, + "learning_rate": 1.940127357020339e-06, + "loss": 0.933, + "step": 5813 + }, + { + "epoch": 0.5243270054561032, + "grad_norm": 1.65156114590823, + "learning_rate": 1.939543410793287e-06, + "loss": 0.9556, + "step": 5814 + }, + { + "epoch": 0.5244171889795735, + "grad_norm": 1.4522894641401858, + "learning_rate": 1.9389594697247246e-06, + "loss": 0.9856, + "step": 5815 + }, + { + "epoch": 0.5245073725030437, + "grad_norm": 1.5348543836133428, + "learning_rate": 1.9383755338644763e-06, + "loss": 1.0194, + "step": 5816 + }, + { + "epoch": 0.5245975560265139, + "grad_norm": 3.413348160304212, + "learning_rate": 1.937791603262368e-06, + "loss": 0.9747, + "step": 5817 + }, + { + "epoch": 0.5246877395499843, + "grad_norm": 1.439539913216429, + "learning_rate": 1.9372076779682235e-06, + "loss": 0.8937, + "step": 5818 + }, + { + "epoch": 0.5247779230734545, + "grad_norm": 1.6004666184457061, + "learning_rate": 1.9366237580318648e-06, + "loss": 0.8892, + "step": 5819 + }, + { + "epoch": 0.5248681065969247, + "grad_norm": 1.2206950195043396, + "learning_rate": 1.9360398435031176e-06, + "loss": 0.9493, + "step": 5820 + }, + { + "epoch": 0.524958290120395, + "grad_norm": 1.6069355967256205, + "learning_rate": 1.9354559344318025e-06, + "loss": 0.9643, + "step": 5821 + }, + { + "epoch": 0.5250484736438653, + "grad_norm": 1.8859578685403287, + "learning_rate": 1.934872030867744e-06, + "loss": 1.0006, + "step": 5822 + }, + { + "epoch": 0.5251386571673355, + "grad_norm": 1.5143928631946364, + "learning_rate": 1.934288132860763e-06, + "loss": 0.9535, + "step": 5823 + }, + { + "epoch": 0.5252288406908058, + "grad_norm": 1.3127417568804491, + "learning_rate": 1.93370424046068e-06, + "loss": 0.9489, + "step": 5824 + }, + { + "epoch": 0.525319024214276, + "grad_norm": 1.8487818315820728, + "learning_rate": 1.9331203537173177e-06, + "loss": 1.0082, + "step": 5825 + }, + { + "epoch": 0.5254092077377464, + "grad_norm": 1.5424965437691174, + "learning_rate": 1.9325364726804947e-06, + "loss": 1.059, + "step": 5826 + }, + { + "epoch": 0.5254993912612166, + "grad_norm": 1.5290202040350984, + "learning_rate": 1.9319525974000327e-06, + "loss": 0.8757, + "step": 5827 + }, + { + "epoch": 0.5255895747846868, + "grad_norm": 1.9914196853513717, + "learning_rate": 1.93136872792575e-06, + "loss": 1.0051, + "step": 5828 + }, + { + "epoch": 0.525679758308157, + "grad_norm": 1.3848810188196408, + "learning_rate": 1.9307848643074653e-06, + "loss": 0.8971, + "step": 5829 + }, + { + "epoch": 0.5257699418316274, + "grad_norm": 1.3287244902321504, + "learning_rate": 1.9302010065949985e-06, + "loss": 0.984, + "step": 5830 + }, + { + "epoch": 0.5258601253550976, + "grad_norm": 1.4362701462410887, + "learning_rate": 1.9296171548381657e-06, + "loss": 0.9074, + "step": 5831 + }, + { + "epoch": 0.5259503088785679, + "grad_norm": 1.370730403564569, + "learning_rate": 1.9290333090867862e-06, + "loss": 0.9358, + "step": 5832 + }, + { + "epoch": 0.5260404924020381, + "grad_norm": 1.5638957084264655, + "learning_rate": 1.928449469390676e-06, + "loss": 0.9501, + "step": 5833 + }, + { + "epoch": 0.5261306759255084, + "grad_norm": 1.4467798156408178, + "learning_rate": 1.927865635799651e-06, + "loss": 0.919, + "step": 5834 + }, + { + "epoch": 0.5262208594489787, + "grad_norm": 1.2677831733739988, + "learning_rate": 1.927281808363528e-06, + "loss": 0.8791, + "step": 5835 + }, + { + "epoch": 0.5263110429724489, + "grad_norm": 1.757402160221924, + "learning_rate": 1.9266979871321216e-06, + "loss": 1.0229, + "step": 5836 + }, + { + "epoch": 0.5264012264959192, + "grad_norm": 1.417498802305654, + "learning_rate": 1.9261141721552482e-06, + "loss": 0.8988, + "step": 5837 + }, + { + "epoch": 0.5264914100193895, + "grad_norm": 1.346232184289556, + "learning_rate": 1.9255303634827204e-06, + "loss": 1.0145, + "step": 5838 + }, + { + "epoch": 0.5265815935428597, + "grad_norm": 1.4232117793212744, + "learning_rate": 1.924946561164352e-06, + "loss": 1.011, + "step": 5839 + }, + { + "epoch": 0.52667177706633, + "grad_norm": 1.4065133166152317, + "learning_rate": 1.9243627652499582e-06, + "loss": 0.8943, + "step": 5840 + }, + { + "epoch": 0.5267619605898003, + "grad_norm": 1.666092185511888, + "learning_rate": 1.9237789757893493e-06, + "loss": 1.011, + "step": 5841 + }, + { + "epoch": 0.5268521441132705, + "grad_norm": 1.4492978796730016, + "learning_rate": 1.9231951928323395e-06, + "loss": 0.9817, + "step": 5842 + }, + { + "epoch": 0.5269423276367408, + "grad_norm": 1.6516585135362922, + "learning_rate": 1.922611416428738e-06, + "loss": 0.9034, + "step": 5843 + }, + { + "epoch": 0.527032511160211, + "grad_norm": 1.4563706769590132, + "learning_rate": 1.922027646628358e-06, + "loss": 0.9648, + "step": 5844 + }, + { + "epoch": 0.5271226946836813, + "grad_norm": 0.7522934576953607, + "learning_rate": 1.9214438834810092e-06, + "loss": 0.8469, + "step": 5845 + }, + { + "epoch": 0.5272128782071516, + "grad_norm": 1.3450808477388023, + "learning_rate": 1.9208601270365008e-06, + "loss": 0.8759, + "step": 5846 + }, + { + "epoch": 0.5273030617306218, + "grad_norm": 1.6350356304007732, + "learning_rate": 1.9202763773446435e-06, + "loss": 1.0356, + "step": 5847 + }, + { + "epoch": 0.527393245254092, + "grad_norm": 1.4448554888735508, + "learning_rate": 1.9196926344552444e-06, + "loss": 0.8471, + "step": 5848 + }, + { + "epoch": 0.5274834287775624, + "grad_norm": 1.400364746535646, + "learning_rate": 1.919108898418113e-06, + "loss": 0.9729, + "step": 5849 + }, + { + "epoch": 0.5275736123010326, + "grad_norm": 1.2229374571315905, + "learning_rate": 1.918525169283057e-06, + "loss": 0.839, + "step": 5850 + }, + { + "epoch": 0.5276637958245028, + "grad_norm": 1.4999689257899882, + "learning_rate": 1.9179414470998817e-06, + "loss": 0.9763, + "step": 5851 + }, + { + "epoch": 0.5277539793479731, + "grad_norm": 1.9366536291452885, + "learning_rate": 1.917357731918395e-06, + "loss": 0.9062, + "step": 5852 + }, + { + "epoch": 0.5278441628714434, + "grad_norm": 1.3248582944129, + "learning_rate": 1.9167740237884025e-06, + "loss": 1.0773, + "step": 5853 + }, + { + "epoch": 0.5279343463949137, + "grad_norm": 2.4497773478017324, + "learning_rate": 1.916190322759709e-06, + "loss": 0.9843, + "step": 5854 + }, + { + "epoch": 0.5280245299183839, + "grad_norm": 1.43472793990739, + "learning_rate": 1.91560662888212e-06, + "loss": 1.0477, + "step": 5855 + }, + { + "epoch": 0.5281147134418541, + "grad_norm": 1.2623504380120856, + "learning_rate": 1.915022942205438e-06, + "loss": 0.9881, + "step": 5856 + }, + { + "epoch": 0.5282048969653245, + "grad_norm": 1.4893850003321765, + "learning_rate": 1.914439262779468e-06, + "loss": 0.9437, + "step": 5857 + }, + { + "epoch": 0.5282950804887947, + "grad_norm": 2.102583172907298, + "learning_rate": 1.9138555906540103e-06, + "loss": 0.8883, + "step": 5858 + }, + { + "epoch": 0.5283852640122649, + "grad_norm": 1.4136812739443223, + "learning_rate": 1.91327192587887e-06, + "loss": 0.861, + "step": 5859 + }, + { + "epoch": 0.5284754475357353, + "grad_norm": 1.5852670990834326, + "learning_rate": 1.912688268503846e-06, + "loss": 0.9236, + "step": 5860 + }, + { + "epoch": 0.5285656310592055, + "grad_norm": 1.5016006036185108, + "learning_rate": 1.912104618578741e-06, + "loss": 0.9015, + "step": 5861 + }, + { + "epoch": 0.5286558145826757, + "grad_norm": 1.4332311519898595, + "learning_rate": 1.9115209761533554e-06, + "loss": 0.9257, + "step": 5862 + }, + { + "epoch": 0.528745998106146, + "grad_norm": 1.763389928170336, + "learning_rate": 1.9109373412774863e-06, + "loss": 0.8604, + "step": 5863 + }, + { + "epoch": 0.5288361816296163, + "grad_norm": 1.2947454317921603, + "learning_rate": 1.910353714000936e-06, + "loss": 0.947, + "step": 5864 + }, + { + "epoch": 0.5289263651530866, + "grad_norm": 1.6944117812185666, + "learning_rate": 1.9097700943734997e-06, + "loss": 0.9028, + "step": 5865 + }, + { + "epoch": 0.5290165486765568, + "grad_norm": 1.3936541280573986, + "learning_rate": 1.909186482444977e-06, + "loss": 0.8484, + "step": 5866 + }, + { + "epoch": 0.529106732200027, + "grad_norm": 1.518307308151243, + "learning_rate": 1.9086028782651652e-06, + "loss": 0.8501, + "step": 5867 + }, + { + "epoch": 0.5291969157234974, + "grad_norm": 1.4611187990676917, + "learning_rate": 1.908019281883859e-06, + "loss": 0.9448, + "step": 5868 + }, + { + "epoch": 0.5292870992469676, + "grad_norm": 1.2381092517665788, + "learning_rate": 1.9074356933508545e-06, + "loss": 0.9384, + "step": 5869 + }, + { + "epoch": 0.5293772827704378, + "grad_norm": 1.886351113209932, + "learning_rate": 1.9068521127159477e-06, + "loss": 0.9831, + "step": 5870 + }, + { + "epoch": 0.5294674662939081, + "grad_norm": 1.346907742272704, + "learning_rate": 1.9062685400289322e-06, + "loss": 0.8993, + "step": 5871 + }, + { + "epoch": 0.5295576498173784, + "grad_norm": 1.2634371461379512, + "learning_rate": 1.9056849753396018e-06, + "loss": 0.9203, + "step": 5872 + }, + { + "epoch": 0.5296478333408486, + "grad_norm": 1.7131899286190093, + "learning_rate": 1.9051014186977485e-06, + "loss": 0.9813, + "step": 5873 + }, + { + "epoch": 0.5297380168643189, + "grad_norm": 1.262743931990057, + "learning_rate": 1.9045178701531664e-06, + "loss": 0.9438, + "step": 5874 + }, + { + "epoch": 0.5298282003877891, + "grad_norm": 1.2254509718545457, + "learning_rate": 1.903934329755645e-06, + "loss": 0.9344, + "step": 5875 + }, + { + "epoch": 0.5299183839112594, + "grad_norm": 1.3393341518907211, + "learning_rate": 1.9033507975549775e-06, + "loss": 0.9848, + "step": 5876 + }, + { + "epoch": 0.5300085674347297, + "grad_norm": 1.3591403977977785, + "learning_rate": 1.9027672736009525e-06, + "loss": 0.935, + "step": 5877 + }, + { + "epoch": 0.5300987509581999, + "grad_norm": 1.2927788540245013, + "learning_rate": 1.9021837579433593e-06, + "loss": 0.8941, + "step": 5878 + }, + { + "epoch": 0.5301889344816701, + "grad_norm": 1.3619272673172005, + "learning_rate": 1.901600250631988e-06, + "loss": 0.927, + "step": 5879 + }, + { + "epoch": 0.5302791180051405, + "grad_norm": 1.3976519205571132, + "learning_rate": 1.901016751716625e-06, + "loss": 0.9332, + "step": 5880 + }, + { + "epoch": 0.5303693015286107, + "grad_norm": 1.3957136681727274, + "learning_rate": 1.9004332612470593e-06, + "loss": 0.914, + "step": 5881 + }, + { + "epoch": 0.530459485052081, + "grad_norm": 1.1149517175458747, + "learning_rate": 1.8998497792730763e-06, + "loss": 0.973, + "step": 5882 + }, + { + "epoch": 0.5305496685755512, + "grad_norm": 1.453667980186209, + "learning_rate": 1.8992663058444629e-06, + "loss": 0.9956, + "step": 5883 + }, + { + "epoch": 0.5306398520990215, + "grad_norm": 1.2512178210713758, + "learning_rate": 1.8986828410110032e-06, + "loss": 0.9817, + "step": 5884 + }, + { + "epoch": 0.5307300356224918, + "grad_norm": 1.560632963515417, + "learning_rate": 1.8980993848224823e-06, + "loss": 0.9348, + "step": 5885 + }, + { + "epoch": 0.530820219145962, + "grad_norm": 1.35713146588557, + "learning_rate": 1.8975159373286843e-06, + "loss": 0.9017, + "step": 5886 + }, + { + "epoch": 0.5309104026694323, + "grad_norm": 1.326438775824824, + "learning_rate": 1.8969324985793904e-06, + "loss": 0.975, + "step": 5887 + }, + { + "epoch": 0.5310005861929026, + "grad_norm": 1.5202193479629906, + "learning_rate": 1.8963490686243851e-06, + "loss": 0.9393, + "step": 5888 + }, + { + "epoch": 0.5310907697163728, + "grad_norm": 1.6173122192057072, + "learning_rate": 1.8957656475134486e-06, + "loss": 0.8776, + "step": 5889 + }, + { + "epoch": 0.531180953239843, + "grad_norm": 1.5024582270243811, + "learning_rate": 1.895182235296361e-06, + "loss": 0.9259, + "step": 5890 + }, + { + "epoch": 0.5312711367633134, + "grad_norm": 1.4064491978599472, + "learning_rate": 1.8945988320229042e-06, + "loss": 0.8673, + "step": 5891 + }, + { + "epoch": 0.5313613202867836, + "grad_norm": 1.5160164868205648, + "learning_rate": 1.8940154377428553e-06, + "loss": 1.0344, + "step": 5892 + }, + { + "epoch": 0.5314515038102539, + "grad_norm": 1.3688924131814237, + "learning_rate": 1.8934320525059944e-06, + "loss": 0.9717, + "step": 5893 + }, + { + "epoch": 0.5315416873337241, + "grad_norm": 2.9197154504793064, + "learning_rate": 1.8928486763620984e-06, + "loss": 0.8349, + "step": 5894 + }, + { + "epoch": 0.5316318708571944, + "grad_norm": 1.4734613054603656, + "learning_rate": 1.892265309360943e-06, + "loss": 0.9616, + "step": 5895 + }, + { + "epoch": 0.5317220543806647, + "grad_norm": 1.5345587501901106, + "learning_rate": 1.8916819515523067e-06, + "loss": 0.932, + "step": 5896 + }, + { + "epoch": 0.5318122379041349, + "grad_norm": 1.3478176738976688, + "learning_rate": 1.891098602985963e-06, + "loss": 0.8661, + "step": 5897 + }, + { + "epoch": 0.5319024214276051, + "grad_norm": 1.3930031233572466, + "learning_rate": 1.8905152637116872e-06, + "loss": 0.9622, + "step": 5898 + }, + { + "epoch": 0.5319926049510755, + "grad_norm": 1.512909255342743, + "learning_rate": 1.8899319337792527e-06, + "loss": 0.9797, + "step": 5899 + }, + { + "epoch": 0.5320827884745457, + "grad_norm": 1.1634498878739612, + "learning_rate": 1.8893486132384325e-06, + "loss": 0.9813, + "step": 5900 + }, + { + "epoch": 0.5321729719980159, + "grad_norm": 1.4888492811843412, + "learning_rate": 1.888765302138999e-06, + "loss": 0.9201, + "step": 5901 + }, + { + "epoch": 0.5322631555214862, + "grad_norm": 1.456461531702385, + "learning_rate": 1.8881820005307224e-06, + "loss": 0.8896, + "step": 5902 + }, + { + "epoch": 0.5323533390449565, + "grad_norm": 1.2488028992988855, + "learning_rate": 1.8875987084633748e-06, + "loss": 0.8823, + "step": 5903 + }, + { + "epoch": 0.5324435225684268, + "grad_norm": 1.67934211906042, + "learning_rate": 1.8870154259867246e-06, + "loss": 0.9763, + "step": 5904 + }, + { + "epoch": 0.532533706091897, + "grad_norm": 1.4203937066660939, + "learning_rate": 1.886432153150542e-06, + "loss": 0.8765, + "step": 5905 + }, + { + "epoch": 0.5326238896153672, + "grad_norm": 1.4402298921288215, + "learning_rate": 1.8858488900045944e-06, + "loss": 0.9256, + "step": 5906 + }, + { + "epoch": 0.5327140731388376, + "grad_norm": 2.8043721056235236, + "learning_rate": 1.885265636598648e-06, + "loss": 0.8933, + "step": 5907 + }, + { + "epoch": 0.5328042566623078, + "grad_norm": 1.5301565820691077, + "learning_rate": 1.884682392982471e-06, + "loss": 0.9771, + "step": 5908 + }, + { + "epoch": 0.532894440185778, + "grad_norm": 1.7745412341131983, + "learning_rate": 1.8840991592058274e-06, + "loss": 1.0075, + "step": 5909 + }, + { + "epoch": 0.5329846237092484, + "grad_norm": 1.2308951471639522, + "learning_rate": 1.8835159353184828e-06, + "loss": 0.9614, + "step": 5910 + }, + { + "epoch": 0.5330748072327186, + "grad_norm": 1.4543086327859545, + "learning_rate": 1.8829327213702013e-06, + "loss": 1.0119, + "step": 5911 + }, + { + "epoch": 0.5331649907561888, + "grad_norm": 1.394481198564003, + "learning_rate": 1.8823495174107452e-06, + "loss": 1.0107, + "step": 5912 + }, + { + "epoch": 0.5332551742796591, + "grad_norm": 1.6180662264856334, + "learning_rate": 1.8817663234898773e-06, + "loss": 0.9174, + "step": 5913 + }, + { + "epoch": 0.5333453578031294, + "grad_norm": 1.4590966361491708, + "learning_rate": 1.881183139657358e-06, + "loss": 0.9511, + "step": 5914 + }, + { + "epoch": 0.5334355413265996, + "grad_norm": 1.304866070433707, + "learning_rate": 1.8805999659629488e-06, + "loss": 0.9541, + "step": 5915 + }, + { + "epoch": 0.5335257248500699, + "grad_norm": 0.683181158621081, + "learning_rate": 1.880016802456409e-06, + "loss": 0.7638, + "step": 5916 + }, + { + "epoch": 0.5336159083735401, + "grad_norm": 1.7281611019730072, + "learning_rate": 1.8794336491874964e-06, + "loss": 0.8865, + "step": 5917 + }, + { + "epoch": 0.5337060918970105, + "grad_norm": 3.626630087719432, + "learning_rate": 1.8788505062059708e-06, + "loss": 0.9106, + "step": 5918 + }, + { + "epoch": 0.5337962754204807, + "grad_norm": 1.3037308252082112, + "learning_rate": 1.8782673735615869e-06, + "loss": 0.9332, + "step": 5919 + }, + { + "epoch": 0.5338864589439509, + "grad_norm": 1.4952594233453362, + "learning_rate": 1.8776842513041026e-06, + "loss": 0.9033, + "step": 5920 + }, + { + "epoch": 0.5339766424674212, + "grad_norm": 1.6606084948126163, + "learning_rate": 1.8771011394832727e-06, + "loss": 0.9113, + "step": 5921 + }, + { + "epoch": 0.5340668259908915, + "grad_norm": 1.4513066718234713, + "learning_rate": 1.8765180381488501e-06, + "loss": 0.9259, + "step": 5922 + }, + { + "epoch": 0.5341570095143617, + "grad_norm": 0.7864525944245655, + "learning_rate": 1.8759349473505905e-06, + "loss": 0.9021, + "step": 5923 + }, + { + "epoch": 0.534247193037832, + "grad_norm": 1.5209673295584716, + "learning_rate": 1.8753518671382447e-06, + "loss": 0.8758, + "step": 5924 + }, + { + "epoch": 0.5343373765613022, + "grad_norm": 1.2272953832506717, + "learning_rate": 1.8747687975615649e-06, + "loss": 0.9242, + "step": 5925 + }, + { + "epoch": 0.5344275600847725, + "grad_norm": 1.4333768675351104, + "learning_rate": 1.874185738670302e-06, + "loss": 0.8422, + "step": 5926 + }, + { + "epoch": 0.5345177436082428, + "grad_norm": 1.319736106604322, + "learning_rate": 1.8736026905142057e-06, + "loss": 0.923, + "step": 5927 + }, + { + "epoch": 0.534607927131713, + "grad_norm": 1.3427263397280003, + "learning_rate": 1.873019653143025e-06, + "loss": 0.9176, + "step": 5928 + }, + { + "epoch": 0.5346981106551832, + "grad_norm": 1.3256590445758745, + "learning_rate": 1.8724366266065069e-06, + "loss": 0.9542, + "step": 5929 + }, + { + "epoch": 0.5347882941786536, + "grad_norm": 1.5666187170820838, + "learning_rate": 1.8718536109543998e-06, + "loss": 1.0096, + "step": 5930 + }, + { + "epoch": 0.5348784777021238, + "grad_norm": 1.386042728321413, + "learning_rate": 1.8712706062364485e-06, + "loss": 0.9526, + "step": 5931 + }, + { + "epoch": 0.534968661225594, + "grad_norm": 1.428477875166675, + "learning_rate": 1.8706876125024e-06, + "loss": 0.8351, + "step": 5932 + }, + { + "epoch": 0.5350588447490644, + "grad_norm": 1.559478890851281, + "learning_rate": 1.870104629801997e-06, + "loss": 0.9811, + "step": 5933 + }, + { + "epoch": 0.5351490282725346, + "grad_norm": 1.993988898144027, + "learning_rate": 1.8695216581849823e-06, + "loss": 0.9581, + "step": 5934 + }, + { + "epoch": 0.5352392117960049, + "grad_norm": 1.3361419610506442, + "learning_rate": 1.8689386977011003e-06, + "loss": 0.9128, + "step": 5935 + }, + { + "epoch": 0.5353293953194751, + "grad_norm": 0.7568601936637211, + "learning_rate": 1.8683557484000903e-06, + "loss": 0.7968, + "step": 5936 + }, + { + "epoch": 0.5354195788429454, + "grad_norm": 1.4485361800173773, + "learning_rate": 1.8677728103316947e-06, + "loss": 0.9728, + "step": 5937 + }, + { + "epoch": 0.5355097623664157, + "grad_norm": 1.2140517029349664, + "learning_rate": 1.8671898835456518e-06, + "loss": 0.8993, + "step": 5938 + }, + { + "epoch": 0.5355999458898859, + "grad_norm": 1.41534332100328, + "learning_rate": 1.8666069680917003e-06, + "loss": 1.0429, + "step": 5939 + }, + { + "epoch": 0.5356901294133561, + "grad_norm": 1.610175146799935, + "learning_rate": 1.8660240640195775e-06, + "loss": 0.9013, + "step": 5940 + }, + { + "epoch": 0.5357803129368265, + "grad_norm": 1.2499908446930346, + "learning_rate": 1.8654411713790203e-06, + "loss": 0.9788, + "step": 5941 + }, + { + "epoch": 0.5358704964602967, + "grad_norm": 1.601307992949608, + "learning_rate": 1.8648582902197648e-06, + "loss": 0.9655, + "step": 5942 + }, + { + "epoch": 0.535960679983767, + "grad_norm": 1.6863919611643337, + "learning_rate": 1.8642754205915452e-06, + "loss": 0.9507, + "step": 5943 + }, + { + "epoch": 0.5360508635072372, + "grad_norm": 1.5750957914014163, + "learning_rate": 1.8636925625440943e-06, + "loss": 0.8946, + "step": 5944 + }, + { + "epoch": 0.5361410470307075, + "grad_norm": 1.6731262515964322, + "learning_rate": 1.863109716127146e-06, + "loss": 0.8572, + "step": 5945 + }, + { + "epoch": 0.5362312305541778, + "grad_norm": 1.259714950606895, + "learning_rate": 1.8625268813904311e-06, + "loss": 0.9862, + "step": 5946 + }, + { + "epoch": 0.536321414077648, + "grad_norm": 2.1609947963794935, + "learning_rate": 1.8619440583836814e-06, + "loss": 0.9656, + "step": 5947 + }, + { + "epoch": 0.5364115976011182, + "grad_norm": 1.4544809229580922, + "learning_rate": 1.8613612471566249e-06, + "loss": 0.9462, + "step": 5948 + }, + { + "epoch": 0.5365017811245886, + "grad_norm": 1.5325815289266038, + "learning_rate": 1.8607784477589922e-06, + "loss": 0.89, + "step": 5949 + }, + { + "epoch": 0.5365919646480588, + "grad_norm": 1.3917075240634398, + "learning_rate": 1.8601956602405103e-06, + "loss": 0.966, + "step": 5950 + }, + { + "epoch": 0.536682148171529, + "grad_norm": 1.3639900529112805, + "learning_rate": 1.8596128846509043e-06, + "loss": 0.9854, + "step": 5951 + }, + { + "epoch": 0.5367723316949993, + "grad_norm": 1.3444064998438938, + "learning_rate": 1.859030121039902e-06, + "loss": 0.8966, + "step": 5952 + }, + { + "epoch": 0.5368625152184696, + "grad_norm": 1.3349166003261608, + "learning_rate": 1.8584473694572268e-06, + "loss": 0.8928, + "step": 5953 + }, + { + "epoch": 0.5369526987419398, + "grad_norm": 1.480042086595676, + "learning_rate": 1.8578646299526026e-06, + "loss": 0.9686, + "step": 5954 + }, + { + "epoch": 0.5370428822654101, + "grad_norm": 1.3145321599770592, + "learning_rate": 1.8572819025757518e-06, + "loss": 0.9105, + "step": 5955 + }, + { + "epoch": 0.5371330657888804, + "grad_norm": 1.4329769460310398, + "learning_rate": 1.8566991873763959e-06, + "loss": 0.9614, + "step": 5956 + }, + { + "epoch": 0.5372232493123507, + "grad_norm": 1.6258058750500222, + "learning_rate": 1.856116484404256e-06, + "loss": 0.8913, + "step": 5957 + }, + { + "epoch": 0.5373134328358209, + "grad_norm": 1.4700178801818022, + "learning_rate": 1.8555337937090502e-06, + "loss": 0.9054, + "step": 5958 + }, + { + "epoch": 0.5374036163592911, + "grad_norm": 1.6181227333993018, + "learning_rate": 1.8549511153404984e-06, + "loss": 0.9202, + "step": 5959 + }, + { + "epoch": 0.5374937998827615, + "grad_norm": 1.491393192447914, + "learning_rate": 1.854368449348317e-06, + "loss": 1.0525, + "step": 5960 + }, + { + "epoch": 0.5375839834062317, + "grad_norm": 1.443895707477398, + "learning_rate": 1.853785795782222e-06, + "loss": 0.908, + "step": 5961 + }, + { + "epoch": 0.5376741669297019, + "grad_norm": 1.4508807992958135, + "learning_rate": 1.85320315469193e-06, + "loss": 0.8076, + "step": 5962 + }, + { + "epoch": 0.5377643504531722, + "grad_norm": 0.8036999614795781, + "learning_rate": 1.8526205261271534e-06, + "loss": 0.8392, + "step": 5963 + }, + { + "epoch": 0.5378545339766425, + "grad_norm": 1.3135740789921446, + "learning_rate": 1.852037910137607e-06, + "loss": 0.8575, + "step": 5964 + }, + { + "epoch": 0.5379447175001127, + "grad_norm": 1.2250889473366469, + "learning_rate": 1.851455306773002e-06, + "loss": 1.0309, + "step": 5965 + }, + { + "epoch": 0.538034901023583, + "grad_norm": 1.227557319499277, + "learning_rate": 1.8508727160830483e-06, + "loss": 0.9181, + "step": 5966 + }, + { + "epoch": 0.5381250845470532, + "grad_norm": 1.3838229178605967, + "learning_rate": 1.8502901381174575e-06, + "loss": 1.0114, + "step": 5967 + }, + { + "epoch": 0.5382152680705236, + "grad_norm": 1.5603369427200313, + "learning_rate": 1.8497075729259372e-06, + "loss": 0.847, + "step": 5968 + }, + { + "epoch": 0.5383054515939938, + "grad_norm": 0.8515918271675532, + "learning_rate": 1.8491250205581963e-06, + "loss": 0.8841, + "step": 5969 + }, + { + "epoch": 0.538395635117464, + "grad_norm": 1.486302940766847, + "learning_rate": 1.8485424810639393e-06, + "loss": 0.9418, + "step": 5970 + }, + { + "epoch": 0.5384858186409343, + "grad_norm": 1.71276270389815, + "learning_rate": 1.847959954492874e-06, + "loss": 0.9969, + "step": 5971 + }, + { + "epoch": 0.5385760021644046, + "grad_norm": 1.5905536688078634, + "learning_rate": 1.8473774408947035e-06, + "loss": 1.0353, + "step": 5972 + }, + { + "epoch": 0.5386661856878748, + "grad_norm": 0.8419338389270273, + "learning_rate": 1.8467949403191308e-06, + "loss": 0.7939, + "step": 5973 + }, + { + "epoch": 0.5387563692113451, + "grad_norm": 1.406712307701795, + "learning_rate": 1.8462124528158592e-06, + "loss": 0.9935, + "step": 5974 + }, + { + "epoch": 0.5388465527348153, + "grad_norm": 1.353748428118384, + "learning_rate": 1.8456299784345881e-06, + "loss": 0.9264, + "step": 5975 + }, + { + "epoch": 0.5389367362582856, + "grad_norm": 1.2959485249157323, + "learning_rate": 1.8450475172250194e-06, + "loss": 0.9668, + "step": 5976 + }, + { + "epoch": 0.5390269197817559, + "grad_norm": 1.4063086603433608, + "learning_rate": 1.844465069236851e-06, + "loss": 0.9225, + "step": 5977 + }, + { + "epoch": 0.5391171033052261, + "grad_norm": 1.4097315182977026, + "learning_rate": 1.8438826345197796e-06, + "loss": 0.8577, + "step": 5978 + }, + { + "epoch": 0.5392072868286965, + "grad_norm": 1.9700327130811157, + "learning_rate": 1.8433002131235036e-06, + "loss": 0.9408, + "step": 5979 + }, + { + "epoch": 0.5392974703521667, + "grad_norm": 1.4359437562559234, + "learning_rate": 1.8427178050977167e-06, + "loss": 0.9673, + "step": 5980 + }, + { + "epoch": 0.5393876538756369, + "grad_norm": 1.2940545262724625, + "learning_rate": 1.8421354104921143e-06, + "loss": 1.0236, + "step": 5981 + }, + { + "epoch": 0.5394778373991072, + "grad_norm": 0.7087544049861131, + "learning_rate": 1.8415530293563894e-06, + "loss": 0.7824, + "step": 5982 + }, + { + "epoch": 0.5395680209225775, + "grad_norm": 1.320278641306102, + "learning_rate": 1.8409706617402333e-06, + "loss": 0.916, + "step": 5983 + }, + { + "epoch": 0.5396582044460477, + "grad_norm": 1.2784785091934645, + "learning_rate": 1.8403883076933378e-06, + "loss": 0.9615, + "step": 5984 + }, + { + "epoch": 0.539748387969518, + "grad_norm": 1.5038563905848217, + "learning_rate": 1.839805967265391e-06, + "loss": 0.9325, + "step": 5985 + }, + { + "epoch": 0.5398385714929882, + "grad_norm": 1.4031052500942693, + "learning_rate": 1.839223640506083e-06, + "loss": 1.0021, + "step": 5986 + }, + { + "epoch": 0.5399287550164585, + "grad_norm": 1.600616547883585, + "learning_rate": 1.8386413274650998e-06, + "loss": 0.9416, + "step": 5987 + }, + { + "epoch": 0.5400189385399288, + "grad_norm": 1.3645277036242722, + "learning_rate": 1.8380590281921294e-06, + "loss": 0.9871, + "step": 5988 + }, + { + "epoch": 0.540109122063399, + "grad_norm": 1.342286355193967, + "learning_rate": 1.8374767427368552e-06, + "loss": 0.8534, + "step": 5989 + }, + { + "epoch": 0.5401993055868692, + "grad_norm": 5.454246952096423, + "learning_rate": 1.8368944711489608e-06, + "loss": 0.9722, + "step": 5990 + }, + { + "epoch": 0.5402894891103396, + "grad_norm": 1.6220046092672273, + "learning_rate": 1.8363122134781304e-06, + "loss": 0.9697, + "step": 5991 + }, + { + "epoch": 0.5403796726338098, + "grad_norm": 1.5051342199046498, + "learning_rate": 1.835729969774044e-06, + "loss": 0.9672, + "step": 5992 + }, + { + "epoch": 0.54046985615728, + "grad_norm": 1.2828061492799894, + "learning_rate": 1.8351477400863823e-06, + "loss": 0.9495, + "step": 5993 + }, + { + "epoch": 0.5405600396807503, + "grad_norm": 1.4297465327848338, + "learning_rate": 1.8345655244648249e-06, + "loss": 0.926, + "step": 5994 + }, + { + "epoch": 0.5406502232042206, + "grad_norm": 1.1672190425752227, + "learning_rate": 1.8339833229590486e-06, + "loss": 0.9341, + "step": 5995 + }, + { + "epoch": 0.5407404067276909, + "grad_norm": 1.3273524112879107, + "learning_rate": 1.833401135618731e-06, + "loss": 1.0208, + "step": 5996 + }, + { + "epoch": 0.5408305902511611, + "grad_norm": 1.360595812644386, + "learning_rate": 1.8328189624935466e-06, + "loss": 0.8733, + "step": 5997 + }, + { + "epoch": 0.5409207737746313, + "grad_norm": 1.3477236081304091, + "learning_rate": 1.832236803633171e-06, + "loss": 0.9687, + "step": 5998 + }, + { + "epoch": 0.5410109572981017, + "grad_norm": 1.8564431934454964, + "learning_rate": 1.831654659087276e-06, + "loss": 0.9123, + "step": 5999 + }, + { + "epoch": 0.5411011408215719, + "grad_norm": 1.3952361390341752, + "learning_rate": 1.831072528905533e-06, + "loss": 0.9588, + "step": 6000 + }, + { + "epoch": 0.5411913243450421, + "grad_norm": 1.8163364848203314, + "learning_rate": 1.8304904131376142e-06, + "loss": 0.9958, + "step": 6001 + }, + { + "epoch": 0.5412815078685124, + "grad_norm": 1.4745071072515477, + "learning_rate": 1.8299083118331874e-06, + "loss": 0.9528, + "step": 6002 + }, + { + "epoch": 0.5413716913919827, + "grad_norm": 1.4838972427080332, + "learning_rate": 1.8293262250419217e-06, + "loss": 1.0822, + "step": 6003 + }, + { + "epoch": 0.541461874915453, + "grad_norm": 1.6019811315723091, + "learning_rate": 1.828744152813484e-06, + "loss": 1.0553, + "step": 6004 + }, + { + "epoch": 0.5415520584389232, + "grad_norm": 1.4815775159112285, + "learning_rate": 1.8281620951975382e-06, + "loss": 0.8974, + "step": 6005 + }, + { + "epoch": 0.5416422419623935, + "grad_norm": 1.460728431470611, + "learning_rate": 1.827580052243751e-06, + "loss": 0.916, + "step": 6006 + }, + { + "epoch": 0.5417324254858638, + "grad_norm": 1.2702234816475633, + "learning_rate": 1.826998024001784e-06, + "loss": 0.9773, + "step": 6007 + }, + { + "epoch": 0.541822609009334, + "grad_norm": 1.968350748420612, + "learning_rate": 1.8264160105212995e-06, + "loss": 0.9699, + "step": 6008 + }, + { + "epoch": 0.5419127925328042, + "grad_norm": 1.5867887570720662, + "learning_rate": 1.8258340118519582e-06, + "loss": 0.9101, + "step": 6009 + }, + { + "epoch": 0.5420029760562746, + "grad_norm": 1.9445261764000392, + "learning_rate": 1.82525202804342e-06, + "loss": 0.9289, + "step": 6010 + }, + { + "epoch": 0.5420931595797448, + "grad_norm": 1.5360784890863453, + "learning_rate": 1.8246700591453415e-06, + "loss": 0.9223, + "step": 6011 + }, + { + "epoch": 0.542183343103215, + "grad_norm": 1.5508166868835687, + "learning_rate": 1.8240881052073801e-06, + "loss": 0.9418, + "step": 6012 + }, + { + "epoch": 0.5422735266266853, + "grad_norm": 1.4606278851292476, + "learning_rate": 1.8235061662791923e-06, + "loss": 0.9159, + "step": 6013 + }, + { + "epoch": 0.5423637101501556, + "grad_norm": 1.492110405173631, + "learning_rate": 1.8229242424104309e-06, + "loss": 1.0133, + "step": 6014 + }, + { + "epoch": 0.5424538936736258, + "grad_norm": 0.6698383690751615, + "learning_rate": 1.8223423336507503e-06, + "loss": 0.7852, + "step": 6015 + }, + { + "epoch": 0.5425440771970961, + "grad_norm": 2.447114420708012, + "learning_rate": 1.8217604400498012e-06, + "loss": 0.9319, + "step": 6016 + }, + { + "epoch": 0.5426342607205663, + "grad_norm": 1.2665720092808002, + "learning_rate": 1.8211785616572333e-06, + "loss": 0.9868, + "step": 6017 + }, + { + "epoch": 0.5427244442440367, + "grad_norm": 1.4746889206608038, + "learning_rate": 1.8205966985226975e-06, + "loss": 1.0156, + "step": 6018 + }, + { + "epoch": 0.5428146277675069, + "grad_norm": 1.2193019301881094, + "learning_rate": 1.8200148506958397e-06, + "loss": 0.9527, + "step": 6019 + }, + { + "epoch": 0.5429048112909771, + "grad_norm": 0.7411328099907805, + "learning_rate": 1.819433018226308e-06, + "loss": 0.8345, + "step": 6020 + }, + { + "epoch": 0.5429949948144474, + "grad_norm": 3.8184320545352826, + "learning_rate": 1.8188512011637471e-06, + "loss": 0.7666, + "step": 6021 + }, + { + "epoch": 0.5430851783379177, + "grad_norm": 2.395972261685404, + "learning_rate": 1.8182693995578e-06, + "loss": 0.9255, + "step": 6022 + }, + { + "epoch": 0.5431753618613879, + "grad_norm": 1.1524680894914803, + "learning_rate": 1.8176876134581098e-06, + "loss": 0.8226, + "step": 6023 + }, + { + "epoch": 0.5432655453848582, + "grad_norm": 1.4644094408245176, + "learning_rate": 1.8171058429143176e-06, + "loss": 0.8887, + "step": 6024 + }, + { + "epoch": 0.5433557289083284, + "grad_norm": 1.292477609525313, + "learning_rate": 1.8165240879760637e-06, + "loss": 0.9823, + "step": 6025 + }, + { + "epoch": 0.5434459124317987, + "grad_norm": 1.2607493263710305, + "learning_rate": 1.8159423486929862e-06, + "loss": 0.9722, + "step": 6026 + }, + { + "epoch": 0.543536095955269, + "grad_norm": 1.304894573659414, + "learning_rate": 1.815360625114722e-06, + "loss": 0.9403, + "step": 6027 + }, + { + "epoch": 0.5436262794787392, + "grad_norm": 1.519127407197937, + "learning_rate": 1.814778917290908e-06, + "loss": 0.9335, + "step": 6028 + }, + { + "epoch": 0.5437164630022095, + "grad_norm": 1.5226619213423356, + "learning_rate": 1.8141972252711773e-06, + "loss": 0.9575, + "step": 6029 + }, + { + "epoch": 0.5438066465256798, + "grad_norm": 1.5674000771780847, + "learning_rate": 1.8136155491051645e-06, + "loss": 0.9381, + "step": 6030 + }, + { + "epoch": 0.54389683004915, + "grad_norm": 0.8046707966376413, + "learning_rate": 1.8130338888424998e-06, + "loss": 0.8482, + "step": 6031 + }, + { + "epoch": 0.5439870135726202, + "grad_norm": 2.255278858612065, + "learning_rate": 1.812452244532816e-06, + "loss": 0.9633, + "step": 6032 + }, + { + "epoch": 0.5440771970960906, + "grad_norm": 1.414034131856223, + "learning_rate": 1.8118706162257405e-06, + "loss": 0.8882, + "step": 6033 + }, + { + "epoch": 0.5441673806195608, + "grad_norm": 1.3091159974087976, + "learning_rate": 1.8112890039709002e-06, + "loss": 1.0242, + "step": 6034 + }, + { + "epoch": 0.5442575641430311, + "grad_norm": 1.4399010881255716, + "learning_rate": 1.8107074078179238e-06, + "loss": 0.945, + "step": 6035 + }, + { + "epoch": 0.5443477476665013, + "grad_norm": 1.5455673547616287, + "learning_rate": 1.8101258278164348e-06, + "loss": 0.9024, + "step": 6036 + }, + { + "epoch": 0.5444379311899716, + "grad_norm": 1.5223484156773082, + "learning_rate": 1.8095442640160575e-06, + "loss": 1.0467, + "step": 6037 + }, + { + "epoch": 0.5445281147134419, + "grad_norm": 2.872743301379808, + "learning_rate": 1.8089627164664132e-06, + "loss": 0.9101, + "step": 6038 + }, + { + "epoch": 0.5446182982369121, + "grad_norm": 1.1752972794926397, + "learning_rate": 1.8083811852171233e-06, + "loss": 0.8631, + "step": 6039 + }, + { + "epoch": 0.5447084817603823, + "grad_norm": 1.3991638513612095, + "learning_rate": 1.8077996703178078e-06, + "loss": 0.9444, + "step": 6040 + }, + { + "epoch": 0.5447986652838527, + "grad_norm": 1.3657239655483981, + "learning_rate": 1.8072181718180833e-06, + "loss": 0.9028, + "step": 6041 + }, + { + "epoch": 0.5448888488073229, + "grad_norm": 1.4615941331469002, + "learning_rate": 1.806636689767568e-06, + "loss": 1.0292, + "step": 6042 + }, + { + "epoch": 0.5449790323307931, + "grad_norm": 1.2872997368920083, + "learning_rate": 1.8060552242158769e-06, + "loss": 0.8828, + "step": 6043 + }, + { + "epoch": 0.5450692158542634, + "grad_norm": 1.408564464330245, + "learning_rate": 1.8054737752126224e-06, + "loss": 0.9195, + "step": 6044 + }, + { + "epoch": 0.5451593993777337, + "grad_norm": 2.134089943210377, + "learning_rate": 1.804892342807419e-06, + "loss": 0.9379, + "step": 6045 + }, + { + "epoch": 0.545249582901204, + "grad_norm": 1.8511484705292083, + "learning_rate": 1.8043109270498756e-06, + "loss": 1.0103, + "step": 6046 + }, + { + "epoch": 0.5453397664246742, + "grad_norm": 1.240630607688395, + "learning_rate": 1.803729527989604e-06, + "loss": 0.9551, + "step": 6047 + }, + { + "epoch": 0.5454299499481444, + "grad_norm": 1.354569898382465, + "learning_rate": 1.8031481456762112e-06, + "loss": 0.9382, + "step": 6048 + }, + { + "epoch": 0.5455201334716148, + "grad_norm": 1.5183683912805925, + "learning_rate": 1.8025667801593033e-06, + "loss": 0.8703, + "step": 6049 + }, + { + "epoch": 0.545610316995085, + "grad_norm": 2.614953023805863, + "learning_rate": 1.8019854314884871e-06, + "loss": 0.8897, + "step": 6050 + }, + { + "epoch": 0.5457005005185552, + "grad_norm": 1.344934961242563, + "learning_rate": 1.8014040997133652e-06, + "loss": 0.9978, + "step": 6051 + }, + { + "epoch": 0.5457906840420256, + "grad_norm": 0.9066940403644089, + "learning_rate": 1.8008227848835414e-06, + "loss": 0.8441, + "step": 6052 + }, + { + "epoch": 0.5458808675654958, + "grad_norm": 2.0706434453205422, + "learning_rate": 1.8002414870486144e-06, + "loss": 0.8933, + "step": 6053 + }, + { + "epoch": 0.545971051088966, + "grad_norm": 1.1586395522136017, + "learning_rate": 1.7996602062581864e-06, + "loss": 0.9021, + "step": 6054 + }, + { + "epoch": 0.5460612346124363, + "grad_norm": 1.4883755231020044, + "learning_rate": 1.7990789425618544e-06, + "loss": 0.9941, + "step": 6055 + }, + { + "epoch": 0.5461514181359066, + "grad_norm": 1.219914295907102, + "learning_rate": 1.7984976960092137e-06, + "loss": 0.9236, + "step": 6056 + }, + { + "epoch": 0.5462416016593769, + "grad_norm": 1.595769388230777, + "learning_rate": 1.7979164666498617e-06, + "loss": 0.9076, + "step": 6057 + }, + { + "epoch": 0.5463317851828471, + "grad_norm": 0.715296450826937, + "learning_rate": 1.7973352545333901e-06, + "loss": 0.7633, + "step": 6058 + }, + { + "epoch": 0.5464219687063173, + "grad_norm": 2.0435711937009597, + "learning_rate": 1.796754059709393e-06, + "loss": 0.8634, + "step": 6059 + }, + { + "epoch": 0.5465121522297877, + "grad_norm": 1.3537855003119206, + "learning_rate": 1.7961728822274603e-06, + "loss": 1.0019, + "step": 6060 + }, + { + "epoch": 0.5466023357532579, + "grad_norm": 1.3816123107476541, + "learning_rate": 1.7955917221371802e-06, + "loss": 0.8962, + "step": 6061 + }, + { + "epoch": 0.5466925192767281, + "grad_norm": 1.6575778460741741, + "learning_rate": 1.7950105794881422e-06, + "loss": 0.8594, + "step": 6062 + }, + { + "epoch": 0.5467827028001984, + "grad_norm": 1.233991877335906, + "learning_rate": 1.7944294543299317e-06, + "loss": 0.9294, + "step": 6063 + }, + { + "epoch": 0.5468728863236687, + "grad_norm": 1.7836036524788073, + "learning_rate": 1.7938483467121333e-06, + "loss": 0.9951, + "step": 6064 + }, + { + "epoch": 0.5469630698471389, + "grad_norm": 1.5041307157973598, + "learning_rate": 1.7932672566843313e-06, + "loss": 0.9217, + "step": 6065 + }, + { + "epoch": 0.5470532533706092, + "grad_norm": 1.3920110588566192, + "learning_rate": 1.7926861842961065e-06, + "loss": 0.8662, + "step": 6066 + }, + { + "epoch": 0.5471434368940794, + "grad_norm": 1.3256693858584865, + "learning_rate": 1.7921051295970399e-06, + "loss": 0.866, + "step": 6067 + }, + { + "epoch": 0.5472336204175497, + "grad_norm": 1.4146598270878914, + "learning_rate": 1.7915240926367092e-06, + "loss": 0.9074, + "step": 6068 + }, + { + "epoch": 0.54732380394102, + "grad_norm": 1.2436398347151378, + "learning_rate": 1.7909430734646932e-06, + "loss": 0.9915, + "step": 6069 + }, + { + "epoch": 0.5474139874644902, + "grad_norm": 0.8275299093367338, + "learning_rate": 1.790362072130567e-06, + "loss": 0.8345, + "step": 6070 + }, + { + "epoch": 0.5475041709879604, + "grad_norm": 1.5690531829028576, + "learning_rate": 1.7897810886839037e-06, + "loss": 0.9386, + "step": 6071 + }, + { + "epoch": 0.5475943545114308, + "grad_norm": 1.6225830590570143, + "learning_rate": 1.7892001231742782e-06, + "loss": 0.957, + "step": 6072 + }, + { + "epoch": 0.547684538034901, + "grad_norm": 1.480900281352714, + "learning_rate": 1.7886191756512594e-06, + "loss": 0.9546, + "step": 6073 + }, + { + "epoch": 0.5477747215583713, + "grad_norm": 1.310652249643724, + "learning_rate": 1.7880382461644192e-06, + "loss": 1.0227, + "step": 6074 + }, + { + "epoch": 0.5478649050818416, + "grad_norm": 1.4592174665553854, + "learning_rate": 1.7874573347633235e-06, + "loss": 0.9264, + "step": 6075 + }, + { + "epoch": 0.5479550886053118, + "grad_norm": 1.4681073669959674, + "learning_rate": 1.7868764414975408e-06, + "loss": 1.0141, + "step": 6076 + }, + { + "epoch": 0.5480452721287821, + "grad_norm": 1.641765960685483, + "learning_rate": 1.7862955664166353e-06, + "loss": 0.883, + "step": 6077 + }, + { + "epoch": 0.5481354556522523, + "grad_norm": 3.1088796249747284, + "learning_rate": 1.78571470957017e-06, + "loss": 1.0268, + "step": 6078 + }, + { + "epoch": 0.5482256391757226, + "grad_norm": 1.4626389657535537, + "learning_rate": 1.7851338710077074e-06, + "loss": 1.0065, + "step": 6079 + }, + { + "epoch": 0.5483158226991929, + "grad_norm": 1.5085130556355246, + "learning_rate": 1.7845530507788076e-06, + "loss": 1.0456, + "step": 6080 + }, + { + "epoch": 0.5484060062226631, + "grad_norm": 1.5114922563289508, + "learning_rate": 1.7839722489330298e-06, + "loss": 0.9159, + "step": 6081 + }, + { + "epoch": 0.5484961897461333, + "grad_norm": 1.4078415659148982, + "learning_rate": 1.7833914655199308e-06, + "loss": 0.9168, + "step": 6082 + }, + { + "epoch": 0.5485863732696037, + "grad_norm": 1.5006289752874187, + "learning_rate": 1.7828107005890658e-06, + "loss": 0.9984, + "step": 6083 + }, + { + "epoch": 0.5486765567930739, + "grad_norm": 1.541871813278709, + "learning_rate": 1.7822299541899898e-06, + "loss": 0.8599, + "step": 6084 + }, + { + "epoch": 0.5487667403165442, + "grad_norm": 1.3742871170430493, + "learning_rate": 1.7816492263722545e-06, + "loss": 0.945, + "step": 6085 + }, + { + "epoch": 0.5488569238400144, + "grad_norm": 1.496347191760383, + "learning_rate": 1.781068517185412e-06, + "loss": 0.9694, + "step": 6086 + }, + { + "epoch": 0.5489471073634847, + "grad_norm": 1.3386208873218606, + "learning_rate": 1.7804878266790104e-06, + "loss": 1.0111, + "step": 6087 + }, + { + "epoch": 0.549037290886955, + "grad_norm": 1.2525613292366748, + "learning_rate": 1.779907154902597e-06, + "loss": 0.8772, + "step": 6088 + }, + { + "epoch": 0.5491274744104252, + "grad_norm": 1.2656023470888185, + "learning_rate": 1.7793265019057198e-06, + "loss": 0.871, + "step": 6089 + }, + { + "epoch": 0.5492176579338954, + "grad_norm": 0.9229599339064278, + "learning_rate": 1.7787458677379212e-06, + "loss": 0.8229, + "step": 6090 + }, + { + "epoch": 0.5493078414573658, + "grad_norm": 1.689729700988399, + "learning_rate": 1.7781652524487463e-06, + "loss": 1.0046, + "step": 6091 + }, + { + "epoch": 0.549398024980836, + "grad_norm": 1.372564803645126, + "learning_rate": 1.777584656087735e-06, + "loss": 0.8941, + "step": 6092 + }, + { + "epoch": 0.5494882085043062, + "grad_norm": 1.4901879779597478, + "learning_rate": 1.777004078704427e-06, + "loss": 0.8657, + "step": 6093 + }, + { + "epoch": 0.5495783920277765, + "grad_norm": 1.479094015495362, + "learning_rate": 1.7764235203483603e-06, + "loss": 0.9646, + "step": 6094 + }, + { + "epoch": 0.5496685755512468, + "grad_norm": 1.5769126692244164, + "learning_rate": 1.775842981069072e-06, + "loss": 0.9137, + "step": 6095 + }, + { + "epoch": 0.549758759074717, + "grad_norm": 1.761161428462099, + "learning_rate": 1.7752624609160966e-06, + "loss": 0.9487, + "step": 6096 + }, + { + "epoch": 0.5498489425981873, + "grad_norm": 1.7062026048722, + "learning_rate": 1.7746819599389665e-06, + "loss": 0.9853, + "step": 6097 + }, + { + "epoch": 0.5499391261216575, + "grad_norm": 1.2948341397444043, + "learning_rate": 1.774101478187215e-06, + "loss": 0.9441, + "step": 6098 + }, + { + "epoch": 0.5500293096451279, + "grad_norm": 1.624899274198525, + "learning_rate": 1.773521015710371e-06, + "loss": 1.0121, + "step": 6099 + }, + { + "epoch": 0.5501194931685981, + "grad_norm": 1.6236145275476321, + "learning_rate": 1.7729405725579614e-06, + "loss": 1.0232, + "step": 6100 + }, + { + "epoch": 0.5502096766920683, + "grad_norm": 1.2467818318600927, + "learning_rate": 1.7723601487795151e-06, + "loss": 0.9189, + "step": 6101 + }, + { + "epoch": 0.5502998602155387, + "grad_norm": 1.2373917329995747, + "learning_rate": 1.7717797444245557e-06, + "loss": 0.8974, + "step": 6102 + }, + { + "epoch": 0.5503900437390089, + "grad_norm": 1.2033461330363573, + "learning_rate": 1.7711993595426076e-06, + "loss": 0.8751, + "step": 6103 + }, + { + "epoch": 0.5504802272624791, + "grad_norm": 1.594026952877025, + "learning_rate": 1.7706189941831915e-06, + "loss": 1.0542, + "step": 6104 + }, + { + "epoch": 0.5505704107859494, + "grad_norm": 1.5453436621753311, + "learning_rate": 1.770038648395827e-06, + "loss": 0.9065, + "step": 6105 + }, + { + "epoch": 0.5506605943094197, + "grad_norm": 0.674353773226689, + "learning_rate": 1.7694583222300336e-06, + "loss": 0.8235, + "step": 6106 + }, + { + "epoch": 0.55075077783289, + "grad_norm": 1.456343255720318, + "learning_rate": 1.7688780157353272e-06, + "loss": 0.9249, + "step": 6107 + }, + { + "epoch": 0.5508409613563602, + "grad_norm": 1.4414575252355184, + "learning_rate": 1.768297728961223e-06, + "loss": 0.8891, + "step": 6108 + }, + { + "epoch": 0.5509311448798304, + "grad_norm": 1.4012738665301603, + "learning_rate": 1.7677174619572342e-06, + "loss": 0.9505, + "step": 6109 + }, + { + "epoch": 0.5510213284033008, + "grad_norm": 1.4770859693639107, + "learning_rate": 1.7671372147728717e-06, + "loss": 0.9603, + "step": 6110 + }, + { + "epoch": 0.551111511926771, + "grad_norm": 1.6577634554267866, + "learning_rate": 1.7665569874576471e-06, + "loss": 1.0174, + "step": 6111 + }, + { + "epoch": 0.5512016954502412, + "grad_norm": 1.6840666706361305, + "learning_rate": 1.7659767800610664e-06, + "loss": 0.9418, + "step": 6112 + }, + { + "epoch": 0.5512918789737115, + "grad_norm": 1.5484333088314794, + "learning_rate": 1.7653965926326379e-06, + "loss": 0.8532, + "step": 6113 + }, + { + "epoch": 0.5513820624971818, + "grad_norm": 1.1491254445779207, + "learning_rate": 1.764816425221866e-06, + "loss": 0.9036, + "step": 6114 + }, + { + "epoch": 0.551472246020652, + "grad_norm": 1.1699958994377857, + "learning_rate": 1.7642362778782524e-06, + "loss": 0.8631, + "step": 6115 + }, + { + "epoch": 0.5515624295441223, + "grad_norm": 1.5777910086848612, + "learning_rate": 1.7636561506513005e-06, + "loss": 0.9233, + "step": 6116 + }, + { + "epoch": 0.5516526130675925, + "grad_norm": 1.1368597932860736, + "learning_rate": 1.7630760435905083e-06, + "loss": 0.919, + "step": 6117 + }, + { + "epoch": 0.5517427965910628, + "grad_norm": 1.6010142806951304, + "learning_rate": 1.762495956745375e-06, + "loss": 0.9032, + "step": 6118 + }, + { + "epoch": 0.5518329801145331, + "grad_norm": 2.433580915928488, + "learning_rate": 1.7619158901653962e-06, + "loss": 0.9925, + "step": 6119 + }, + { + "epoch": 0.5519231636380033, + "grad_norm": 1.9683230103564697, + "learning_rate": 1.761335843900066e-06, + "loss": 0.941, + "step": 6120 + }, + { + "epoch": 0.5520133471614735, + "grad_norm": 1.2592605406286124, + "learning_rate": 1.7607558179988785e-06, + "loss": 0.9294, + "step": 6121 + }, + { + "epoch": 0.5521035306849439, + "grad_norm": 1.3748490944329395, + "learning_rate": 1.760175812511323e-06, + "loss": 0.8775, + "step": 6122 + }, + { + "epoch": 0.5521937142084141, + "grad_norm": 1.453506501683567, + "learning_rate": 1.75959582748689e-06, + "loss": 0.9, + "step": 6123 + }, + { + "epoch": 0.5522838977318844, + "grad_norm": 1.4614632216480785, + "learning_rate": 1.7590158629750657e-06, + "loss": 0.978, + "step": 6124 + }, + { + "epoch": 0.5523740812553547, + "grad_norm": 1.3472586667264679, + "learning_rate": 1.7584359190253376e-06, + "loss": 1.0021, + "step": 6125 + }, + { + "epoch": 0.5524642647788249, + "grad_norm": 1.7483687290988206, + "learning_rate": 1.7578559956871892e-06, + "loss": 1.0052, + "step": 6126 + }, + { + "epoch": 0.5525544483022952, + "grad_norm": 1.256053096845566, + "learning_rate": 1.7572760930101012e-06, + "loss": 0.9414, + "step": 6127 + }, + { + "epoch": 0.5526446318257654, + "grad_norm": 0.7601168882052903, + "learning_rate": 1.7566962110435563e-06, + "loss": 0.8777, + "step": 6128 + }, + { + "epoch": 0.5527348153492357, + "grad_norm": 1.193806865595962, + "learning_rate": 1.7561163498370313e-06, + "loss": 0.963, + "step": 6129 + }, + { + "epoch": 0.552824998872706, + "grad_norm": 1.3217693620320283, + "learning_rate": 1.755536509440005e-06, + "loss": 0.9088, + "step": 6130 + }, + { + "epoch": 0.5529151823961762, + "grad_norm": 1.8004642629596468, + "learning_rate": 1.7549566899019519e-06, + "loss": 0.9463, + "step": 6131 + }, + { + "epoch": 0.5530053659196464, + "grad_norm": 1.3197940508252142, + "learning_rate": 1.754376891272344e-06, + "loss": 0.9591, + "step": 6132 + }, + { + "epoch": 0.5530955494431168, + "grad_norm": 1.9317681836476637, + "learning_rate": 1.753797113600655e-06, + "loss": 0.8981, + "step": 6133 + }, + { + "epoch": 0.553185732966587, + "grad_norm": 1.4429855172216306, + "learning_rate": 1.7532173569363535e-06, + "loss": 1.0775, + "step": 6134 + }, + { + "epoch": 0.5532759164900573, + "grad_norm": 1.7306756784961714, + "learning_rate": 1.7526376213289077e-06, + "loss": 0.7685, + "step": 6135 + }, + { + "epoch": 0.5533661000135275, + "grad_norm": 1.3751660593500223, + "learning_rate": 1.7520579068277844e-06, + "loss": 0.9249, + "step": 6136 + }, + { + "epoch": 0.5534562835369978, + "grad_norm": 1.3724491560226593, + "learning_rate": 1.7514782134824472e-06, + "loss": 0.978, + "step": 6137 + }, + { + "epoch": 0.5535464670604681, + "grad_norm": 1.3441005959130807, + "learning_rate": 1.7508985413423599e-06, + "loss": 0.9444, + "step": 6138 + }, + { + "epoch": 0.5536366505839383, + "grad_norm": 1.3380447633038053, + "learning_rate": 1.7503188904569814e-06, + "loss": 1.0117, + "step": 6139 + }, + { + "epoch": 0.5537268341074085, + "grad_norm": 1.5074780345221122, + "learning_rate": 1.7497392608757728e-06, + "loss": 0.9133, + "step": 6140 + }, + { + "epoch": 0.5538170176308789, + "grad_norm": 1.2121425908092647, + "learning_rate": 1.7491596526481897e-06, + "loss": 0.9644, + "step": 6141 + }, + { + "epoch": 0.5539072011543491, + "grad_norm": 1.3067171675952898, + "learning_rate": 1.7485800658236888e-06, + "loss": 0.8745, + "step": 6142 + }, + { + "epoch": 0.5539973846778193, + "grad_norm": 1.3739935052210133, + "learning_rate": 1.7480005004517232e-06, + "loss": 1.0074, + "step": 6143 + }, + { + "epoch": 0.5540875682012896, + "grad_norm": 1.9007515951619296, + "learning_rate": 1.7474209565817435e-06, + "loss": 1.0099, + "step": 6144 + }, + { + "epoch": 0.5541777517247599, + "grad_norm": 1.480493389317145, + "learning_rate": 1.7468414342632014e-06, + "loss": 0.9462, + "step": 6145 + }, + { + "epoch": 0.5542679352482301, + "grad_norm": 1.483776654564046, + "learning_rate": 1.746261933545543e-06, + "loss": 0.9995, + "step": 6146 + }, + { + "epoch": 0.5543581187717004, + "grad_norm": 1.3289102364760073, + "learning_rate": 1.7456824544782165e-06, + "loss": 0.9448, + "step": 6147 + }, + { + "epoch": 0.5544483022951707, + "grad_norm": 1.3193534663551498, + "learning_rate": 1.7451029971106653e-06, + "loss": 0.9488, + "step": 6148 + }, + { + "epoch": 0.554538485818641, + "grad_norm": 1.2695473537890891, + "learning_rate": 1.7445235614923313e-06, + "loss": 0.9731, + "step": 6149 + }, + { + "epoch": 0.5546286693421112, + "grad_norm": 1.2828103310570171, + "learning_rate": 1.7439441476726556e-06, + "loss": 0.9393, + "step": 6150 + }, + { + "epoch": 0.5547188528655814, + "grad_norm": 1.463178904182859, + "learning_rate": 1.7433647557010776e-06, + "loss": 0.9537, + "step": 6151 + }, + { + "epoch": 0.5548090363890518, + "grad_norm": 1.4442066807971534, + "learning_rate": 1.7427853856270338e-06, + "loss": 0.9107, + "step": 6152 + }, + { + "epoch": 0.554899219912522, + "grad_norm": 1.4397623255407925, + "learning_rate": 1.7422060374999593e-06, + "loss": 0.9045, + "step": 6153 + }, + { + "epoch": 0.5549894034359922, + "grad_norm": 1.534706030290821, + "learning_rate": 1.7416267113692862e-06, + "loss": 0.9036, + "step": 6154 + }, + { + "epoch": 0.5550795869594625, + "grad_norm": 1.2752329333086354, + "learning_rate": 1.7410474072844475e-06, + "loss": 0.9763, + "step": 6155 + }, + { + "epoch": 0.5551697704829328, + "grad_norm": 1.3139222930970322, + "learning_rate": 1.740468125294871e-06, + "loss": 0.934, + "step": 6156 + }, + { + "epoch": 0.555259954006403, + "grad_norm": 2.3066458964469296, + "learning_rate": 1.739888865449986e-06, + "loss": 0.8584, + "step": 6157 + }, + { + "epoch": 0.5553501375298733, + "grad_norm": 1.5873270946867672, + "learning_rate": 1.7393096277992174e-06, + "loss": 0.9723, + "step": 6158 + }, + { + "epoch": 0.5554403210533435, + "grad_norm": 1.3213080089758558, + "learning_rate": 1.738730412391988e-06, + "loss": 0.9582, + "step": 6159 + }, + { + "epoch": 0.5555305045768139, + "grad_norm": 1.3698874707194317, + "learning_rate": 1.738151219277721e-06, + "loss": 0.9952, + "step": 6160 + }, + { + "epoch": 0.5556206881002841, + "grad_norm": 1.4338182486642568, + "learning_rate": 1.7375720485058349e-06, + "loss": 0.8993, + "step": 6161 + }, + { + "epoch": 0.5557108716237543, + "grad_norm": 1.7581069360612593, + "learning_rate": 1.7369929001257498e-06, + "loss": 0.9636, + "step": 6162 + }, + { + "epoch": 0.5558010551472246, + "grad_norm": 1.5162393956705915, + "learning_rate": 1.73641377418688e-06, + "loss": 0.8589, + "step": 6163 + }, + { + "epoch": 0.5558912386706949, + "grad_norm": 1.4641141574991943, + "learning_rate": 1.7358346707386408e-06, + "loss": 0.8772, + "step": 6164 + }, + { + "epoch": 0.5559814221941651, + "grad_norm": 1.47477686859192, + "learning_rate": 1.7352555898304439e-06, + "loss": 0.9894, + "step": 6165 + }, + { + "epoch": 0.5560716057176354, + "grad_norm": 1.435574278277989, + "learning_rate": 1.7346765315116996e-06, + "loss": 0.8646, + "step": 6166 + }, + { + "epoch": 0.5561617892411056, + "grad_norm": 1.532614819828799, + "learning_rate": 1.734097495831817e-06, + "loss": 0.9077, + "step": 6167 + }, + { + "epoch": 0.5562519727645759, + "grad_norm": 1.432461741204507, + "learning_rate": 1.7335184828402015e-06, + "loss": 0.957, + "step": 6168 + }, + { + "epoch": 0.5563421562880462, + "grad_norm": 1.3407587869396416, + "learning_rate": 1.7329394925862595e-06, + "loss": 0.9197, + "step": 6169 + }, + { + "epoch": 0.5564323398115164, + "grad_norm": 1.4306857266917743, + "learning_rate": 1.7323605251193922e-06, + "loss": 0.9897, + "step": 6170 + }, + { + "epoch": 0.5565225233349868, + "grad_norm": 1.5059005395232858, + "learning_rate": 1.7317815804890001e-06, + "loss": 0.9661, + "step": 6171 + }, + { + "epoch": 0.556612706858457, + "grad_norm": 1.445770768379385, + "learning_rate": 1.731202658744483e-06, + "loss": 0.9031, + "step": 6172 + }, + { + "epoch": 0.5567028903819272, + "grad_norm": 1.1999361458319757, + "learning_rate": 1.7306237599352365e-06, + "loss": 1.0038, + "step": 6173 + }, + { + "epoch": 0.5567930739053975, + "grad_norm": 1.198968270948617, + "learning_rate": 1.730044884110657e-06, + "loss": 0.9274, + "step": 6174 + }, + { + "epoch": 0.5568832574288678, + "grad_norm": 1.5937806668790506, + "learning_rate": 1.7294660313201366e-06, + "loss": 0.9044, + "step": 6175 + }, + { + "epoch": 0.556973440952338, + "grad_norm": 1.8218517426303502, + "learning_rate": 1.7288872016130652e-06, + "loss": 0.9137, + "step": 6176 + }, + { + "epoch": 0.5570636244758083, + "grad_norm": 1.497144365065511, + "learning_rate": 1.7283083950388334e-06, + "loss": 0.8566, + "step": 6177 + }, + { + "epoch": 0.5571538079992785, + "grad_norm": 1.3805490708961052, + "learning_rate": 1.727729611646827e-06, + "loss": 0.8482, + "step": 6178 + }, + { + "epoch": 0.5572439915227488, + "grad_norm": 1.3971472814031856, + "learning_rate": 1.7271508514864318e-06, + "loss": 1.0016, + "step": 6179 + }, + { + "epoch": 0.5573341750462191, + "grad_norm": 1.50514808015003, + "learning_rate": 1.7265721146070302e-06, + "loss": 0.9497, + "step": 6180 + }, + { + "epoch": 0.5574243585696893, + "grad_norm": 1.3974580798063243, + "learning_rate": 1.7259934010580035e-06, + "loss": 0.9453, + "step": 6181 + }, + { + "epoch": 0.5575145420931595, + "grad_norm": 1.3622895489470248, + "learning_rate": 1.725414710888731e-06, + "loss": 0.8924, + "step": 6182 + }, + { + "epoch": 0.5576047256166299, + "grad_norm": 1.2560247190800766, + "learning_rate": 1.7248360441485885e-06, + "loss": 0.9307, + "step": 6183 + }, + { + "epoch": 0.5576949091401001, + "grad_norm": 1.4003003888073597, + "learning_rate": 1.7242574008869528e-06, + "loss": 1.0165, + "step": 6184 + }, + { + "epoch": 0.5577850926635703, + "grad_norm": 0.702211974005656, + "learning_rate": 1.7236787811531951e-06, + "loss": 0.8704, + "step": 6185 + }, + { + "epoch": 0.5578752761870406, + "grad_norm": 1.4718841228485855, + "learning_rate": 1.7231001849966887e-06, + "loss": 0.9168, + "step": 6186 + }, + { + "epoch": 0.5579654597105109, + "grad_norm": 0.6838204362215398, + "learning_rate": 1.722521612466801e-06, + "loss": 0.8354, + "step": 6187 + }, + { + "epoch": 0.5580556432339812, + "grad_norm": 1.448623740723415, + "learning_rate": 1.7219430636128984e-06, + "loss": 0.8897, + "step": 6188 + }, + { + "epoch": 0.5581458267574514, + "grad_norm": 1.321693420564297, + "learning_rate": 1.7213645384843479e-06, + "loss": 0.8912, + "step": 6189 + }, + { + "epoch": 0.5582360102809216, + "grad_norm": 1.4137527801054612, + "learning_rate": 1.7207860371305108e-06, + "loss": 0.9278, + "step": 6190 + }, + { + "epoch": 0.558326193804392, + "grad_norm": 1.5941303771628312, + "learning_rate": 1.7202075596007487e-06, + "loss": 0.907, + "step": 6191 + }, + { + "epoch": 0.5584163773278622, + "grad_norm": 1.458231413776592, + "learning_rate": 1.7196291059444206e-06, + "loss": 0.9079, + "step": 6192 + }, + { + "epoch": 0.5585065608513324, + "grad_norm": 1.4915528071048796, + "learning_rate": 1.7190506762108828e-06, + "loss": 0.9333, + "step": 6193 + }, + { + "epoch": 0.5585967443748028, + "grad_norm": 1.6263676536745384, + "learning_rate": 1.7184722704494907e-06, + "loss": 0.9426, + "step": 6194 + }, + { + "epoch": 0.558686927898273, + "grad_norm": 1.3804316310069549, + "learning_rate": 1.717893888709596e-06, + "loss": 0.9548, + "step": 6195 + }, + { + "epoch": 0.5587771114217432, + "grad_norm": 1.4428477122418137, + "learning_rate": 1.7173155310405515e-06, + "loss": 0.964, + "step": 6196 + }, + { + "epoch": 0.5588672949452135, + "grad_norm": 1.537042289152113, + "learning_rate": 1.7167371974917043e-06, + "loss": 1.0192, + "step": 6197 + }, + { + "epoch": 0.5589574784686838, + "grad_norm": 1.570261332878682, + "learning_rate": 1.7161588881124003e-06, + "loss": 0.9677, + "step": 6198 + }, + { + "epoch": 0.559047661992154, + "grad_norm": 2.230299427132592, + "learning_rate": 1.7155806029519861e-06, + "loss": 0.9354, + "step": 6199 + }, + { + "epoch": 0.5591378455156243, + "grad_norm": 1.4953884125417094, + "learning_rate": 1.7150023420598023e-06, + "loss": 0.9941, + "step": 6200 + }, + { + "epoch": 0.5592280290390945, + "grad_norm": 1.1962723711367447, + "learning_rate": 1.714424105485191e-06, + "loss": 0.9953, + "step": 6201 + }, + { + "epoch": 0.5593182125625649, + "grad_norm": 2.0068361751064336, + "learning_rate": 1.7138458932774896e-06, + "loss": 0.8989, + "step": 6202 + }, + { + "epoch": 0.5594083960860351, + "grad_norm": 0.7137983839534594, + "learning_rate": 1.7132677054860335e-06, + "loss": 0.8356, + "step": 6203 + }, + { + "epoch": 0.5594985796095053, + "grad_norm": 1.334278571332026, + "learning_rate": 1.7126895421601586e-06, + "loss": 0.9691, + "step": 6204 + }, + { + "epoch": 0.5595887631329756, + "grad_norm": 1.286485813274658, + "learning_rate": 1.712111403349196e-06, + "loss": 0.8871, + "step": 6205 + }, + { + "epoch": 0.5596789466564459, + "grad_norm": 1.2371036935331925, + "learning_rate": 1.7115332891024757e-06, + "loss": 0.9733, + "step": 6206 + }, + { + "epoch": 0.5597691301799161, + "grad_norm": 0.772736896745653, + "learning_rate": 1.7109551994693257e-06, + "loss": 0.8934, + "step": 6207 + }, + { + "epoch": 0.5598593137033864, + "grad_norm": 1.2486490101496859, + "learning_rate": 1.7103771344990725e-06, + "loss": 0.8801, + "step": 6208 + }, + { + "epoch": 0.5599494972268566, + "grad_norm": 1.6424386308838095, + "learning_rate": 1.709799094241039e-06, + "loss": 0.8642, + "step": 6209 + }, + { + "epoch": 0.560039680750327, + "grad_norm": 1.4318506937615612, + "learning_rate": 1.709221078744546e-06, + "loss": 0.9777, + "step": 6210 + }, + { + "epoch": 0.5601298642737972, + "grad_norm": 1.2554199494121554, + "learning_rate": 1.7086430880589148e-06, + "loss": 0.9531, + "step": 6211 + }, + { + "epoch": 0.5602200477972674, + "grad_norm": 1.597544398041566, + "learning_rate": 1.7080651222334612e-06, + "loss": 0.9493, + "step": 6212 + }, + { + "epoch": 0.5603102313207377, + "grad_norm": 1.595036155984467, + "learning_rate": 1.7074871813175018e-06, + "loss": 0.9937, + "step": 6213 + }, + { + "epoch": 0.560400414844208, + "grad_norm": 2.1255731370718016, + "learning_rate": 1.706909265360349e-06, + "loss": 0.8556, + "step": 6214 + }, + { + "epoch": 0.5604905983676782, + "grad_norm": 0.7897803940534982, + "learning_rate": 1.7063313744113128e-06, + "loss": 0.8088, + "step": 6215 + }, + { + "epoch": 0.5605807818911485, + "grad_norm": 0.6791656068976936, + "learning_rate": 1.7057535085197042e-06, + "loss": 0.7885, + "step": 6216 + }, + { + "epoch": 0.5606709654146187, + "grad_norm": 1.605537802888841, + "learning_rate": 1.705175667734828e-06, + "loss": 0.8629, + "step": 6217 + }, + { + "epoch": 0.560761148938089, + "grad_norm": 1.6542274073187495, + "learning_rate": 1.7045978521059894e-06, + "loss": 0.912, + "step": 6218 + }, + { + "epoch": 0.5608513324615593, + "grad_norm": 1.2385475522161622, + "learning_rate": 1.7040200616824914e-06, + "loss": 0.9394, + "step": 6219 + }, + { + "epoch": 0.5609415159850295, + "grad_norm": 1.5552580280204067, + "learning_rate": 1.7034422965136333e-06, + "loss": 0.8914, + "step": 6220 + }, + { + "epoch": 0.5610316995084998, + "grad_norm": 1.37472795916431, + "learning_rate": 1.7028645566487137e-06, + "loss": 0.9854, + "step": 6221 + }, + { + "epoch": 0.5611218830319701, + "grad_norm": 1.4445699072823568, + "learning_rate": 1.7022868421370284e-06, + "loss": 0.9217, + "step": 6222 + }, + { + "epoch": 0.5612120665554403, + "grad_norm": 1.3799545582601096, + "learning_rate": 1.701709153027872e-06, + "loss": 0.9857, + "step": 6223 + }, + { + "epoch": 0.5613022500789105, + "grad_norm": 1.4293794560665298, + "learning_rate": 1.7011314893705353e-06, + "loss": 0.9071, + "step": 6224 + }, + { + "epoch": 0.5613924336023809, + "grad_norm": 1.3640668294341955, + "learning_rate": 1.700553851214307e-06, + "loss": 0.9454, + "step": 6225 + }, + { + "epoch": 0.5614826171258511, + "grad_norm": 1.3513324078636846, + "learning_rate": 1.699976238608476e-06, + "loss": 0.8133, + "step": 6226 + }, + { + "epoch": 0.5615728006493214, + "grad_norm": 1.6018253761966803, + "learning_rate": 1.699398651602326e-06, + "loss": 0.8874, + "step": 6227 + }, + { + "epoch": 0.5616629841727916, + "grad_norm": 1.2949578695918815, + "learning_rate": 1.6988210902451413e-06, + "loss": 0.9892, + "step": 6228 + }, + { + "epoch": 0.5617531676962619, + "grad_norm": 1.536061415616702, + "learning_rate": 1.6982435545862011e-06, + "loss": 0.9199, + "step": 6229 + }, + { + "epoch": 0.5618433512197322, + "grad_norm": 1.4460244564557727, + "learning_rate": 1.6976660446747853e-06, + "loss": 0.9447, + "step": 6230 + }, + { + "epoch": 0.5619335347432024, + "grad_norm": 1.6387840478125817, + "learning_rate": 1.6970885605601696e-06, + "loss": 0.8683, + "step": 6231 + }, + { + "epoch": 0.5620237182666726, + "grad_norm": 1.437995410635977, + "learning_rate": 1.6965111022916282e-06, + "loss": 1.0496, + "step": 6232 + }, + { + "epoch": 0.562113901790143, + "grad_norm": 1.437061408762239, + "learning_rate": 1.6959336699184323e-06, + "loss": 0.9142, + "step": 6233 + }, + { + "epoch": 0.5622040853136132, + "grad_norm": 1.5342732382163342, + "learning_rate": 1.6953562634898529e-06, + "loss": 0.9289, + "step": 6234 + }, + { + "epoch": 0.5622942688370834, + "grad_norm": 1.640362527832578, + "learning_rate": 1.6947788830551569e-06, + "loss": 0.912, + "step": 6235 + }, + { + "epoch": 0.5623844523605537, + "grad_norm": 0.7873901169423864, + "learning_rate": 1.6942015286636093e-06, + "loss": 0.8625, + "step": 6236 + }, + { + "epoch": 0.562474635884024, + "grad_norm": 1.3423330687696375, + "learning_rate": 1.6936242003644735e-06, + "loss": 0.9224, + "step": 6237 + }, + { + "epoch": 0.5625648194074943, + "grad_norm": 1.4252321623024025, + "learning_rate": 1.6930468982070106e-06, + "loss": 1.0095, + "step": 6238 + }, + { + "epoch": 0.5626550029309645, + "grad_norm": 0.6797623647916998, + "learning_rate": 1.692469622240478e-06, + "loss": 0.7608, + "step": 6239 + }, + { + "epoch": 0.5627451864544347, + "grad_norm": 1.5932776555238426, + "learning_rate": 1.6918923725141339e-06, + "loss": 0.898, + "step": 6240 + }, + { + "epoch": 0.5628353699779051, + "grad_norm": 1.2908047548598363, + "learning_rate": 1.6913151490772312e-06, + "loss": 0.9911, + "step": 6241 + }, + { + "epoch": 0.5629255535013753, + "grad_norm": 1.3191421106639083, + "learning_rate": 1.6907379519790215e-06, + "loss": 0.983, + "step": 6242 + }, + { + "epoch": 0.5630157370248455, + "grad_norm": 1.2327432591244902, + "learning_rate": 1.6901607812687558e-06, + "loss": 0.8463, + "step": 6243 + }, + { + "epoch": 0.5631059205483159, + "grad_norm": 1.2925218345671947, + "learning_rate": 1.6895836369956794e-06, + "loss": 0.8973, + "step": 6244 + }, + { + "epoch": 0.5631961040717861, + "grad_norm": 0.8080472343183639, + "learning_rate": 1.6890065192090402e-06, + "loss": 0.7815, + "step": 6245 + }, + { + "epoch": 0.5632862875952563, + "grad_norm": 2.075698123828084, + "learning_rate": 1.6884294279580793e-06, + "loss": 0.847, + "step": 6246 + }, + { + "epoch": 0.5633764711187266, + "grad_norm": 1.4244910686583823, + "learning_rate": 1.6878523632920371e-06, + "loss": 0.966, + "step": 6247 + }, + { + "epoch": 0.5634666546421969, + "grad_norm": 1.7199275751168888, + "learning_rate": 1.6872753252601525e-06, + "loss": 1.0036, + "step": 6248 + }, + { + "epoch": 0.5635568381656672, + "grad_norm": 1.5195130491760216, + "learning_rate": 1.6866983139116616e-06, + "loss": 0.8828, + "step": 6249 + }, + { + "epoch": 0.5636470216891374, + "grad_norm": 1.57100711471724, + "learning_rate": 1.6861213292957981e-06, + "loss": 0.9099, + "step": 6250 + }, + { + "epoch": 0.5637372052126076, + "grad_norm": 1.3185046758760497, + "learning_rate": 1.685544371461793e-06, + "loss": 0.9719, + "step": 6251 + }, + { + "epoch": 0.563827388736078, + "grad_norm": 1.5181315823094665, + "learning_rate": 1.6849674404588767e-06, + "loss": 0.9607, + "step": 6252 + }, + { + "epoch": 0.5639175722595482, + "grad_norm": 1.551112449504842, + "learning_rate": 1.6843905363362758e-06, + "loss": 0.9714, + "step": 6253 + }, + { + "epoch": 0.5640077557830184, + "grad_norm": 1.4639401509918482, + "learning_rate": 1.6838136591432136e-06, + "loss": 0.8844, + "step": 6254 + }, + { + "epoch": 0.5640979393064887, + "grad_norm": 1.4843665674873556, + "learning_rate": 1.6832368089289139e-06, + "loss": 0.9637, + "step": 6255 + }, + { + "epoch": 0.564188122829959, + "grad_norm": 1.43773939378382, + "learning_rate": 1.682659985742596e-06, + "loss": 0.9547, + "step": 6256 + }, + { + "epoch": 0.5642783063534292, + "grad_norm": 1.5440243743593307, + "learning_rate": 1.6820831896334782e-06, + "loss": 0.9874, + "step": 6257 + }, + { + "epoch": 0.5643684898768995, + "grad_norm": 1.693519241548064, + "learning_rate": 1.681506420650776e-06, + "loss": 0.9601, + "step": 6258 + }, + { + "epoch": 0.5644586734003697, + "grad_norm": 1.282846107925486, + "learning_rate": 1.680929678843701e-06, + "loss": 0.9315, + "step": 6259 + }, + { + "epoch": 0.56454885692384, + "grad_norm": 1.3287302321132572, + "learning_rate": 1.6803529642614662e-06, + "loss": 0.9918, + "step": 6260 + }, + { + "epoch": 0.5646390404473103, + "grad_norm": 1.8793189216165835, + "learning_rate": 1.6797762769532785e-06, + "loss": 0.9522, + "step": 6261 + }, + { + "epoch": 0.5647292239707805, + "grad_norm": 1.3452211798710807, + "learning_rate": 1.679199616968345e-06, + "loss": 0.9719, + "step": 6262 + }, + { + "epoch": 0.5648194074942507, + "grad_norm": 1.247631069399815, + "learning_rate": 1.6786229843558689e-06, + "loss": 0.9691, + "step": 6263 + }, + { + "epoch": 0.5649095910177211, + "grad_norm": 1.640309839404567, + "learning_rate": 1.6780463791650514e-06, + "loss": 0.9222, + "step": 6264 + }, + { + "epoch": 0.5649997745411913, + "grad_norm": 1.6262291880983137, + "learning_rate": 1.6774698014450928e-06, + "loss": 0.8849, + "step": 6265 + }, + { + "epoch": 0.5650899580646616, + "grad_norm": 0.75131999522806, + "learning_rate": 1.6768932512451883e-06, + "loss": 0.8036, + "step": 6266 + }, + { + "epoch": 0.5651801415881319, + "grad_norm": 1.3621875124618694, + "learning_rate": 1.676316728614534e-06, + "loss": 0.897, + "step": 6267 + }, + { + "epoch": 0.5652703251116021, + "grad_norm": 0.6041590646287428, + "learning_rate": 1.675740233602321e-06, + "loss": 0.8027, + "step": 6268 + }, + { + "epoch": 0.5653605086350724, + "grad_norm": 1.2784175733805057, + "learning_rate": 1.6751637662577385e-06, + "loss": 0.984, + "step": 6269 + }, + { + "epoch": 0.5654506921585426, + "grad_norm": 0.7107149551344634, + "learning_rate": 1.6745873266299753e-06, + "loss": 0.7377, + "step": 6270 + }, + { + "epoch": 0.565540875682013, + "grad_norm": 1.4611611424588211, + "learning_rate": 1.6740109147682148e-06, + "loss": 0.9054, + "step": 6271 + }, + { + "epoch": 0.5656310592054832, + "grad_norm": 1.4476670340451845, + "learning_rate": 1.6734345307216418e-06, + "loss": 0.9617, + "step": 6272 + }, + { + "epoch": 0.5657212427289534, + "grad_norm": 1.7353825434901569, + "learning_rate": 1.6728581745394346e-06, + "loss": 0.9083, + "step": 6273 + }, + { + "epoch": 0.5658114262524236, + "grad_norm": 1.2713891153351806, + "learning_rate": 1.672281846270772e-06, + "loss": 1.0191, + "step": 6274 + }, + { + "epoch": 0.565901609775894, + "grad_norm": 1.7516104917051305, + "learning_rate": 1.6717055459648295e-06, + "loss": 0.9409, + "step": 6275 + }, + { + "epoch": 0.5659917932993642, + "grad_norm": 1.50504907559702, + "learning_rate": 1.6711292736707793e-06, + "loss": 0.8957, + "step": 6276 + }, + { + "epoch": 0.5660819768228345, + "grad_norm": 1.7465347996042442, + "learning_rate": 1.6705530294377938e-06, + "loss": 1.0143, + "step": 6277 + }, + { + "epoch": 0.5661721603463047, + "grad_norm": 1.5612578222782816, + "learning_rate": 1.6699768133150395e-06, + "loss": 0.9435, + "step": 6278 + }, + { + "epoch": 0.566262343869775, + "grad_norm": 1.474559818470889, + "learning_rate": 1.6694006253516837e-06, + "loss": 0.9361, + "step": 6279 + }, + { + "epoch": 0.5663525273932453, + "grad_norm": 1.9653365804489276, + "learning_rate": 1.6688244655968896e-06, + "loss": 0.9493, + "step": 6280 + }, + { + "epoch": 0.5664427109167155, + "grad_norm": 1.477798832398404, + "learning_rate": 1.6682483340998175e-06, + "loss": 0.865, + "step": 6281 + }, + { + "epoch": 0.5665328944401857, + "grad_norm": 1.5578785261512023, + "learning_rate": 1.6676722309096276e-06, + "loss": 0.8987, + "step": 6282 + }, + { + "epoch": 0.5666230779636561, + "grad_norm": 0.7758822864149152, + "learning_rate": 1.6670961560754744e-06, + "loss": 0.8618, + "step": 6283 + }, + { + "epoch": 0.5667132614871263, + "grad_norm": 1.618694101200631, + "learning_rate": 1.6665201096465138e-06, + "loss": 0.9372, + "step": 6284 + }, + { + "epoch": 0.5668034450105965, + "grad_norm": 1.409786947400403, + "learning_rate": 1.6659440916718961e-06, + "loss": 0.9873, + "step": 6285 + }, + { + "epoch": 0.5668936285340668, + "grad_norm": 1.5279270629002049, + "learning_rate": 1.6653681022007696e-06, + "loss": 0.9577, + "step": 6286 + }, + { + "epoch": 0.5669838120575371, + "grad_norm": 1.8586271689490386, + "learning_rate": 1.6647921412822825e-06, + "loss": 0.8685, + "step": 6287 + }, + { + "epoch": 0.5670739955810074, + "grad_norm": 1.5233791242321264, + "learning_rate": 1.6642162089655782e-06, + "loss": 0.9393, + "step": 6288 + }, + { + "epoch": 0.5671641791044776, + "grad_norm": 1.4734948803437669, + "learning_rate": 1.663640305299798e-06, + "loss": 0.9037, + "step": 6289 + }, + { + "epoch": 0.5672543626279479, + "grad_norm": 0.7604303271664787, + "learning_rate": 1.6630644303340824e-06, + "loss": 0.836, + "step": 6290 + }, + { + "epoch": 0.5673445461514182, + "grad_norm": 1.3847316635551399, + "learning_rate": 1.662488584117567e-06, + "loss": 0.9508, + "step": 6291 + }, + { + "epoch": 0.5674347296748884, + "grad_norm": 2.027108651055689, + "learning_rate": 1.6619127666993867e-06, + "loss": 0.9933, + "step": 6292 + }, + { + "epoch": 0.5675249131983586, + "grad_norm": 2.051125575632599, + "learning_rate": 1.6613369781286727e-06, + "loss": 0.9469, + "step": 6293 + }, + { + "epoch": 0.567615096721829, + "grad_norm": 1.7044445140890554, + "learning_rate": 1.6607612184545562e-06, + "loss": 0.874, + "step": 6294 + }, + { + "epoch": 0.5677052802452992, + "grad_norm": 1.4583566482133448, + "learning_rate": 1.6601854877261617e-06, + "loss": 1.0007, + "step": 6295 + }, + { + "epoch": 0.5677954637687694, + "grad_norm": 1.3616906535784057, + "learning_rate": 1.6596097859926163e-06, + "loss": 0.9332, + "step": 6296 + }, + { + "epoch": 0.5678856472922397, + "grad_norm": 1.4702995729435542, + "learning_rate": 1.6590341133030407e-06, + "loss": 1.0174, + "step": 6297 + }, + { + "epoch": 0.56797583081571, + "grad_norm": 1.355293152618055, + "learning_rate": 1.658458469706554e-06, + "loss": 1.0174, + "step": 6298 + }, + { + "epoch": 0.5680660143391802, + "grad_norm": 1.4177661257065384, + "learning_rate": 1.6578828552522746e-06, + "loss": 0.8989, + "step": 6299 + }, + { + "epoch": 0.5681561978626505, + "grad_norm": 1.3854328479634535, + "learning_rate": 1.6573072699893156e-06, + "loss": 0.9889, + "step": 6300 + }, + { + "epoch": 0.5682463813861207, + "grad_norm": 1.5454381569590105, + "learning_rate": 1.6567317139667906e-06, + "loss": 0.9099, + "step": 6301 + }, + { + "epoch": 0.5683365649095911, + "grad_norm": 1.7134150155829146, + "learning_rate": 1.6561561872338087e-06, + "loss": 0.9573, + "step": 6302 + }, + { + "epoch": 0.5684267484330613, + "grad_norm": 1.3499184901744052, + "learning_rate": 1.6555806898394764e-06, + "loss": 0.8549, + "step": 6303 + }, + { + "epoch": 0.5685169319565315, + "grad_norm": 1.4537396515390186, + "learning_rate": 1.6550052218328987e-06, + "loss": 0.9132, + "step": 6304 + }, + { + "epoch": 0.5686071154800018, + "grad_norm": 1.3945035958419838, + "learning_rate": 1.6544297832631777e-06, + "loss": 0.915, + "step": 6305 + }, + { + "epoch": 0.5686972990034721, + "grad_norm": 1.6813173415300537, + "learning_rate": 1.6538543741794135e-06, + "loss": 0.9435, + "step": 6306 + }, + { + "epoch": 0.5687874825269423, + "grad_norm": 1.332724695566901, + "learning_rate": 1.6532789946307028e-06, + "loss": 0.9099, + "step": 6307 + }, + { + "epoch": 0.5688776660504126, + "grad_norm": 0.7056156339268751, + "learning_rate": 1.6527036446661393e-06, + "loss": 0.8647, + "step": 6308 + }, + { + "epoch": 0.5689678495738828, + "grad_norm": 1.3501170814041075, + "learning_rate": 1.6521283243348165e-06, + "loss": 0.9765, + "step": 6309 + }, + { + "epoch": 0.5690580330973531, + "grad_norm": 1.3674667945052275, + "learning_rate": 1.6515530336858227e-06, + "loss": 0.936, + "step": 6310 + }, + { + "epoch": 0.5691482166208234, + "grad_norm": 1.4034963987091578, + "learning_rate": 1.6509777727682457e-06, + "loss": 0.9849, + "step": 6311 + }, + { + "epoch": 0.5692384001442936, + "grad_norm": 1.378363137635988, + "learning_rate": 1.65040254163117e-06, + "loss": 0.977, + "step": 6312 + }, + { + "epoch": 0.569328583667764, + "grad_norm": 1.9873563342682357, + "learning_rate": 1.649827340323676e-06, + "loss": 0.8411, + "step": 6313 + }, + { + "epoch": 0.5694187671912342, + "grad_norm": 1.5689185488913757, + "learning_rate": 1.6492521688948454e-06, + "loss": 0.9073, + "step": 6314 + }, + { + "epoch": 0.5695089507147044, + "grad_norm": 1.5538398310379418, + "learning_rate": 1.6486770273937526e-06, + "loss": 0.9621, + "step": 6315 + }, + { + "epoch": 0.5695991342381747, + "grad_norm": 1.303986183856836, + "learning_rate": 1.6481019158694738e-06, + "loss": 0.9128, + "step": 6316 + }, + { + "epoch": 0.569689317761645, + "grad_norm": 0.8795009720185912, + "learning_rate": 1.6475268343710792e-06, + "loss": 0.8923, + "step": 6317 + }, + { + "epoch": 0.5697795012851152, + "grad_norm": 1.803127853417742, + "learning_rate": 1.6469517829476396e-06, + "loss": 0.8717, + "step": 6318 + }, + { + "epoch": 0.5698696848085855, + "grad_norm": 1.4816615953276402, + "learning_rate": 1.64637676164822e-06, + "loss": 0.9212, + "step": 6319 + }, + { + "epoch": 0.5699598683320557, + "grad_norm": 1.2729338603671687, + "learning_rate": 1.6458017705218848e-06, + "loss": 0.9711, + "step": 6320 + }, + { + "epoch": 0.570050051855526, + "grad_norm": 1.6213541947693726, + "learning_rate": 1.645226809617696e-06, + "loss": 0.954, + "step": 6321 + }, + { + "epoch": 0.5701402353789963, + "grad_norm": 1.5523836327523248, + "learning_rate": 1.6446518789847112e-06, + "loss": 0.9738, + "step": 6322 + }, + { + "epoch": 0.5702304189024665, + "grad_norm": 1.5866274530843658, + "learning_rate": 1.6440769786719883e-06, + "loss": 0.9608, + "step": 6323 + }, + { + "epoch": 0.5703206024259367, + "grad_norm": 1.1728690953655427, + "learning_rate": 1.6435021087285803e-06, + "loss": 0.98, + "step": 6324 + }, + { + "epoch": 0.5704107859494071, + "grad_norm": 1.555428946353166, + "learning_rate": 1.642927269203537e-06, + "loss": 0.9719, + "step": 6325 + }, + { + "epoch": 0.5705009694728773, + "grad_norm": 1.5335727124707241, + "learning_rate": 1.642352460145909e-06, + "loss": 0.9917, + "step": 6326 + }, + { + "epoch": 0.5705911529963476, + "grad_norm": 1.2821537180166191, + "learning_rate": 1.6417776816047402e-06, + "loss": 0.9462, + "step": 6327 + }, + { + "epoch": 0.5706813365198178, + "grad_norm": 1.4860517969307163, + "learning_rate": 1.6412029336290755e-06, + "loss": 0.9284, + "step": 6328 + }, + { + "epoch": 0.5707715200432881, + "grad_norm": 1.8360907957319292, + "learning_rate": 1.6406282162679551e-06, + "loss": 0.9348, + "step": 6329 + }, + { + "epoch": 0.5708617035667584, + "grad_norm": 1.4577123773175356, + "learning_rate": 1.6400535295704162e-06, + "loss": 0.9129, + "step": 6330 + }, + { + "epoch": 0.5709518870902286, + "grad_norm": 1.3166497794810723, + "learning_rate": 1.6394788735854955e-06, + "loss": 0.8766, + "step": 6331 + }, + { + "epoch": 0.5710420706136988, + "grad_norm": 1.4740092490021217, + "learning_rate": 1.6389042483622246e-06, + "loss": 1.0022, + "step": 6332 + }, + { + "epoch": 0.5711322541371692, + "grad_norm": 1.4767435760074172, + "learning_rate": 1.638329653949635e-06, + "loss": 0.9363, + "step": 6333 + }, + { + "epoch": 0.5712224376606394, + "grad_norm": 1.4349049073455669, + "learning_rate": 1.637755090396753e-06, + "loss": 0.9048, + "step": 6334 + }, + { + "epoch": 0.5713126211841096, + "grad_norm": 0.7305587060382569, + "learning_rate": 1.6371805577526039e-06, + "loss": 0.8537, + "step": 6335 + }, + { + "epoch": 0.5714028047075799, + "grad_norm": 1.5649690575023971, + "learning_rate": 1.636606056066211e-06, + "loss": 0.8863, + "step": 6336 + }, + { + "epoch": 0.5714929882310502, + "grad_norm": 1.471620311691034, + "learning_rate": 1.636031585386592e-06, + "loss": 0.8653, + "step": 6337 + }, + { + "epoch": 0.5715831717545204, + "grad_norm": 1.2006869197766814, + "learning_rate": 1.635457145762766e-06, + "loss": 0.9991, + "step": 6338 + }, + { + "epoch": 0.5716733552779907, + "grad_norm": 1.7236161444617235, + "learning_rate": 1.6348827372437456e-06, + "loss": 0.9677, + "step": 6339 + }, + { + "epoch": 0.571763538801461, + "grad_norm": 1.174589649538449, + "learning_rate": 1.634308359878544e-06, + "loss": 0.9389, + "step": 6340 + }, + { + "epoch": 0.5718537223249313, + "grad_norm": 1.4260007924402718, + "learning_rate": 1.6337340137161695e-06, + "loss": 1.0206, + "step": 6341 + }, + { + "epoch": 0.5719439058484015, + "grad_norm": 1.406150475265053, + "learning_rate": 1.6331596988056277e-06, + "loss": 0.9429, + "step": 6342 + }, + { + "epoch": 0.5720340893718717, + "grad_norm": 1.3141448295648008, + "learning_rate": 1.632585415195924e-06, + "loss": 0.9698, + "step": 6343 + }, + { + "epoch": 0.5721242728953421, + "grad_norm": 1.1895240299031289, + "learning_rate": 1.6320111629360583e-06, + "loss": 0.9712, + "step": 6344 + }, + { + "epoch": 0.5722144564188123, + "grad_norm": 1.6994620313451803, + "learning_rate": 1.631436942075029e-06, + "loss": 0.9159, + "step": 6345 + }, + { + "epoch": 0.5723046399422825, + "grad_norm": 1.4201176441513501, + "learning_rate": 1.630862752661833e-06, + "loss": 0.917, + "step": 6346 + }, + { + "epoch": 0.5723948234657528, + "grad_norm": 1.3924477022578816, + "learning_rate": 1.6302885947454612e-06, + "loss": 0.8729, + "step": 6347 + }, + { + "epoch": 0.5724850069892231, + "grad_norm": 1.4945180378881748, + "learning_rate": 1.6297144683749057e-06, + "loss": 0.9591, + "step": 6348 + }, + { + "epoch": 0.5725751905126933, + "grad_norm": 1.525179839864812, + "learning_rate": 1.629140373599153e-06, + "loss": 0.9464, + "step": 6349 + }, + { + "epoch": 0.5726653740361636, + "grad_norm": 1.4082653439303991, + "learning_rate": 1.628566310467189e-06, + "loss": 0.9542, + "step": 6350 + }, + { + "epoch": 0.5727555575596338, + "grad_norm": 1.6062307660921702, + "learning_rate": 1.6279922790279957e-06, + "loss": 0.9094, + "step": 6351 + }, + { + "epoch": 0.5728457410831042, + "grad_norm": 1.420593354812111, + "learning_rate": 1.6274182793305512e-06, + "loss": 0.9602, + "step": 6352 + }, + { + "epoch": 0.5729359246065744, + "grad_norm": 1.7843089022979122, + "learning_rate": 1.626844311423835e-06, + "loss": 0.9636, + "step": 6353 + }, + { + "epoch": 0.5730261081300446, + "grad_norm": 1.5925213434774281, + "learning_rate": 1.6262703753568181e-06, + "loss": 1.0266, + "step": 6354 + }, + { + "epoch": 0.5731162916535149, + "grad_norm": 1.7965084199567405, + "learning_rate": 1.6256964711784747e-06, + "loss": 1.0159, + "step": 6355 + }, + { + "epoch": 0.5732064751769852, + "grad_norm": 1.4729341199273367, + "learning_rate": 1.6251225989377723e-06, + "loss": 0.9608, + "step": 6356 + }, + { + "epoch": 0.5732966587004554, + "grad_norm": 1.5571447534912082, + "learning_rate": 1.624548758683676e-06, + "loss": 0.9501, + "step": 6357 + }, + { + "epoch": 0.5733868422239257, + "grad_norm": 1.4573405655934186, + "learning_rate": 1.6239749504651505e-06, + "loss": 1.039, + "step": 6358 + }, + { + "epoch": 0.5734770257473959, + "grad_norm": 1.4318930701285717, + "learning_rate": 1.6234011743311552e-06, + "loss": 0.9188, + "step": 6359 + }, + { + "epoch": 0.5735672092708662, + "grad_norm": 1.481167107689159, + "learning_rate": 1.6228274303306483e-06, + "loss": 0.8813, + "step": 6360 + }, + { + "epoch": 0.5736573927943365, + "grad_norm": 1.3202987760473863, + "learning_rate": 1.6222537185125847e-06, + "loss": 1.0259, + "step": 6361 + }, + { + "epoch": 0.5737475763178067, + "grad_norm": 1.2348747207636652, + "learning_rate": 1.6216800389259172e-06, + "loss": 0.9673, + "step": 6362 + }, + { + "epoch": 0.573837759841277, + "grad_norm": 1.3697413107702057, + "learning_rate": 1.6211063916195949e-06, + "loss": 0.9378, + "step": 6363 + }, + { + "epoch": 0.5739279433647473, + "grad_norm": 1.6402474831886094, + "learning_rate": 1.6205327766425633e-06, + "loss": 0.9458, + "step": 6364 + }, + { + "epoch": 0.5740181268882175, + "grad_norm": 1.249291553010066, + "learning_rate": 1.6199591940437689e-06, + "loss": 0.9581, + "step": 6365 + }, + { + "epoch": 0.5741083104116878, + "grad_norm": 1.2966733109239394, + "learning_rate": 1.6193856438721505e-06, + "loss": 0.9116, + "step": 6366 + }, + { + "epoch": 0.5741984939351581, + "grad_norm": 1.701507920579974, + "learning_rate": 1.6188121261766483e-06, + "loss": 0.9519, + "step": 6367 + }, + { + "epoch": 0.5742886774586283, + "grad_norm": 1.2667428248917272, + "learning_rate": 1.6182386410061976e-06, + "loss": 0.8748, + "step": 6368 + }, + { + "epoch": 0.5743788609820986, + "grad_norm": 1.6219802822273914, + "learning_rate": 1.61766518840973e-06, + "loss": 0.8586, + "step": 6369 + }, + { + "epoch": 0.5744690445055688, + "grad_norm": 1.3935947211970352, + "learning_rate": 1.6170917684361779e-06, + "loss": 0.8234, + "step": 6370 + }, + { + "epoch": 0.5745592280290391, + "grad_norm": 1.4118368431902997, + "learning_rate": 1.6165183811344662e-06, + "loss": 1.0257, + "step": 6371 + }, + { + "epoch": 0.5746494115525094, + "grad_norm": 1.6142468091493938, + "learning_rate": 1.6159450265535218e-06, + "loss": 0.9359, + "step": 6372 + }, + { + "epoch": 0.5747395950759796, + "grad_norm": 1.3065434577975765, + "learning_rate": 1.6153717047422652e-06, + "loss": 1.0286, + "step": 6373 + }, + { + "epoch": 0.5748297785994498, + "grad_norm": 2.0135107262673575, + "learning_rate": 1.6147984157496155e-06, + "loss": 0.8317, + "step": 6374 + }, + { + "epoch": 0.5749199621229202, + "grad_norm": 1.3254846244418126, + "learning_rate": 1.6142251596244886e-06, + "loss": 0.8775, + "step": 6375 + }, + { + "epoch": 0.5750101456463904, + "grad_norm": 1.3772367144802677, + "learning_rate": 1.6136519364157983e-06, + "loss": 0.9987, + "step": 6376 + }, + { + "epoch": 0.5751003291698606, + "grad_norm": 1.5258603123867107, + "learning_rate": 1.6130787461724555e-06, + "loss": 0.8878, + "step": 6377 + }, + { + "epoch": 0.5751905126933309, + "grad_norm": 1.3651968278210387, + "learning_rate": 1.6125055889433679e-06, + "loss": 1.0195, + "step": 6378 + }, + { + "epoch": 0.5752806962168012, + "grad_norm": 1.212516542695447, + "learning_rate": 1.6119324647774386e-06, + "loss": 0.9751, + "step": 6379 + }, + { + "epoch": 0.5753708797402715, + "grad_norm": 1.5876360286794822, + "learning_rate": 1.6113593737235724e-06, + "loss": 0.9619, + "step": 6380 + }, + { + "epoch": 0.5754610632637417, + "grad_norm": 1.6206162818118097, + "learning_rate": 1.6107863158306665e-06, + "loss": 0.8947, + "step": 6381 + }, + { + "epoch": 0.5755512467872119, + "grad_norm": 1.5548881516294857, + "learning_rate": 1.610213291147619e-06, + "loss": 0.8391, + "step": 6382 + }, + { + "epoch": 0.5756414303106823, + "grad_norm": 1.784486073044942, + "learning_rate": 1.609640299723322e-06, + "loss": 0.972, + "step": 6383 + }, + { + "epoch": 0.5757316138341525, + "grad_norm": 1.2695148173515816, + "learning_rate": 1.609067341606668e-06, + "loss": 0.9303, + "step": 6384 + }, + { + "epoch": 0.5758217973576227, + "grad_norm": 1.579192904989968, + "learning_rate": 1.6084944168465438e-06, + "loss": 0.939, + "step": 6385 + }, + { + "epoch": 0.5759119808810931, + "grad_norm": 1.2773977379078423, + "learning_rate": 1.6079215254918339e-06, + "loss": 0.9688, + "step": 6386 + }, + { + "epoch": 0.5760021644045633, + "grad_norm": 1.8582719367908598, + "learning_rate": 1.6073486675914222e-06, + "loss": 0.8523, + "step": 6387 + }, + { + "epoch": 0.5760923479280335, + "grad_norm": 1.569265747399692, + "learning_rate": 1.606775843194187e-06, + "loss": 0.9694, + "step": 6388 + }, + { + "epoch": 0.5761825314515038, + "grad_norm": 1.9619741549291585, + "learning_rate": 1.6062030523490053e-06, + "loss": 0.9406, + "step": 6389 + }, + { + "epoch": 0.5762727149749741, + "grad_norm": 1.5463391541518587, + "learning_rate": 1.60563029510475e-06, + "loss": 0.8704, + "step": 6390 + }, + { + "epoch": 0.5763628984984444, + "grad_norm": 0.8124466658473629, + "learning_rate": 1.6050575715102927e-06, + "loss": 0.788, + "step": 6391 + }, + { + "epoch": 0.5764530820219146, + "grad_norm": 1.56887075559753, + "learning_rate": 1.6044848816145014e-06, + "loss": 0.9975, + "step": 6392 + }, + { + "epoch": 0.5765432655453848, + "grad_norm": 1.420704034569538, + "learning_rate": 1.60391222546624e-06, + "loss": 0.9685, + "step": 6393 + }, + { + "epoch": 0.5766334490688552, + "grad_norm": 1.466374119206916, + "learning_rate": 1.6033396031143725e-06, + "loss": 0.9372, + "step": 6394 + }, + { + "epoch": 0.5767236325923254, + "grad_norm": 1.5027201148859561, + "learning_rate": 1.602767014607757e-06, + "loss": 0.9459, + "step": 6395 + }, + { + "epoch": 0.5768138161157956, + "grad_norm": 1.6734647946308936, + "learning_rate": 1.6021944599952493e-06, + "loss": 0.9612, + "step": 6396 + }, + { + "epoch": 0.5769039996392659, + "grad_norm": 1.6068706842080391, + "learning_rate": 1.6016219393257048e-06, + "loss": 0.9627, + "step": 6397 + }, + { + "epoch": 0.5769941831627362, + "grad_norm": 1.3230414544471185, + "learning_rate": 1.6010494526479722e-06, + "loss": 0.9015, + "step": 6398 + }, + { + "epoch": 0.5770843666862064, + "grad_norm": 2.2731509732725717, + "learning_rate": 1.6004770000109006e-06, + "loss": 0.8983, + "step": 6399 + }, + { + "epoch": 0.5771745502096767, + "grad_norm": 1.297634499842415, + "learning_rate": 1.5999045814633348e-06, + "loss": 0.9113, + "step": 6400 + }, + { + "epoch": 0.5772647337331469, + "grad_norm": 1.7520014353271598, + "learning_rate": 1.5993321970541151e-06, + "loss": 0.9548, + "step": 6401 + }, + { + "epoch": 0.5773549172566173, + "grad_norm": 1.6262873172216918, + "learning_rate": 1.5987598468320825e-06, + "loss": 0.9631, + "step": 6402 + }, + { + "epoch": 0.5774451007800875, + "grad_norm": 1.2846020370261755, + "learning_rate": 1.5981875308460717e-06, + "loss": 0.9389, + "step": 6403 + }, + { + "epoch": 0.5775352843035577, + "grad_norm": 1.6246360224459746, + "learning_rate": 1.5976152491449169e-06, + "loss": 0.9344, + "step": 6404 + }, + { + "epoch": 0.577625467827028, + "grad_norm": 1.4369880759873357, + "learning_rate": 1.5970430017774468e-06, + "loss": 1.0137, + "step": 6405 + }, + { + "epoch": 0.5777156513504983, + "grad_norm": 1.3408844578924088, + "learning_rate": 1.5964707887924904e-06, + "loss": 0.9635, + "step": 6406 + }, + { + "epoch": 0.5778058348739685, + "grad_norm": 1.2335538909637982, + "learning_rate": 1.5958986102388714e-06, + "loss": 0.8355, + "step": 6407 + }, + { + "epoch": 0.5778960183974388, + "grad_norm": 1.3636462753109075, + "learning_rate": 1.5953264661654104e-06, + "loss": 0.904, + "step": 6408 + }, + { + "epoch": 0.5779862019209091, + "grad_norm": 1.4499506580244605, + "learning_rate": 1.5947543566209276e-06, + "loss": 0.9889, + "step": 6409 + }, + { + "epoch": 0.5780763854443793, + "grad_norm": 1.364270307829096, + "learning_rate": 1.5941822816542367e-06, + "loss": 0.9235, + "step": 6410 + }, + { + "epoch": 0.5781665689678496, + "grad_norm": 1.3357928013656297, + "learning_rate": 1.5936102413141519e-06, + "loss": 0.9046, + "step": 6411 + }, + { + "epoch": 0.5782567524913198, + "grad_norm": 1.7522445997605862, + "learning_rate": 1.5930382356494823e-06, + "loss": 0.8745, + "step": 6412 + }, + { + "epoch": 0.5783469360147901, + "grad_norm": 1.3196718817438648, + "learning_rate": 1.5924662647090335e-06, + "loss": 0.9966, + "step": 6413 + }, + { + "epoch": 0.5784371195382604, + "grad_norm": 1.354543672558146, + "learning_rate": 1.5918943285416108e-06, + "loss": 0.9614, + "step": 6414 + }, + { + "epoch": 0.5785273030617306, + "grad_norm": 1.1826043352709201, + "learning_rate": 1.5913224271960139e-06, + "loss": 0.8819, + "step": 6415 + }, + { + "epoch": 0.5786174865852008, + "grad_norm": 1.2962620217217389, + "learning_rate": 1.590750560721041e-06, + "loss": 0.9567, + "step": 6416 + }, + { + "epoch": 0.5787076701086712, + "grad_norm": 1.2476239032409941, + "learning_rate": 1.5901787291654874e-06, + "loss": 0.9165, + "step": 6417 + }, + { + "epoch": 0.5787978536321414, + "grad_norm": 1.305586225519142, + "learning_rate": 1.5896069325781435e-06, + "loss": 0.8187, + "step": 6418 + }, + { + "epoch": 0.5788880371556117, + "grad_norm": 1.347872730504039, + "learning_rate": 1.5890351710077998e-06, + "loss": 0.9775, + "step": 6419 + }, + { + "epoch": 0.5789782206790819, + "grad_norm": 1.2713199632235332, + "learning_rate": 1.5884634445032406e-06, + "loss": 0.9996, + "step": 6420 + }, + { + "epoch": 0.5790684042025522, + "grad_norm": 0.6546290451210363, + "learning_rate": 1.5878917531132501e-06, + "loss": 0.8183, + "step": 6421 + }, + { + "epoch": 0.5791585877260225, + "grad_norm": 1.7701862798610561, + "learning_rate": 1.5873200968866077e-06, + "loss": 0.8173, + "step": 6422 + }, + { + "epoch": 0.5792487712494927, + "grad_norm": 1.929265829494184, + "learning_rate": 1.586748475872089e-06, + "loss": 0.9195, + "step": 6423 + }, + { + "epoch": 0.5793389547729629, + "grad_norm": 1.404912821484876, + "learning_rate": 1.58617689011847e-06, + "loss": 0.808, + "step": 6424 + }, + { + "epoch": 0.5794291382964333, + "grad_norm": 1.4340477829903493, + "learning_rate": 1.5856053396745198e-06, + "loss": 0.9863, + "step": 6425 + }, + { + "epoch": 0.5795193218199035, + "grad_norm": 1.2774415051679162, + "learning_rate": 1.5850338245890078e-06, + "loss": 1.0146, + "step": 6426 + }, + { + "epoch": 0.5796095053433737, + "grad_norm": 1.4531176987331273, + "learning_rate": 1.5844623449106974e-06, + "loss": 0.9279, + "step": 6427 + }, + { + "epoch": 0.579699688866844, + "grad_norm": 1.433988012830359, + "learning_rate": 1.583890900688351e-06, + "loss": 0.9889, + "step": 6428 + }, + { + "epoch": 0.5797898723903143, + "grad_norm": 1.783134668045228, + "learning_rate": 1.583319491970728e-06, + "loss": 1.0158, + "step": 6429 + }, + { + "epoch": 0.5798800559137846, + "grad_norm": 1.2297517170794234, + "learning_rate": 1.5827481188065828e-06, + "loss": 0.9616, + "step": 6430 + }, + { + "epoch": 0.5799702394372548, + "grad_norm": 1.335562714769813, + "learning_rate": 1.5821767812446689e-06, + "loss": 0.9147, + "step": 6431 + }, + { + "epoch": 0.5800604229607251, + "grad_norm": 1.5159308528027016, + "learning_rate": 1.581605479333736e-06, + "loss": 1.063, + "step": 6432 + }, + { + "epoch": 0.5801506064841954, + "grad_norm": 1.548843595372178, + "learning_rate": 1.5810342131225308e-06, + "loss": 0.9577, + "step": 6433 + }, + { + "epoch": 0.5802407900076656, + "grad_norm": 1.6032996456205093, + "learning_rate": 1.580462982659797e-06, + "loss": 0.981, + "step": 6434 + }, + { + "epoch": 0.5803309735311358, + "grad_norm": 1.5708531450123497, + "learning_rate": 1.5798917879942736e-06, + "loss": 1.0102, + "step": 6435 + }, + { + "epoch": 0.5804211570546062, + "grad_norm": 1.2824463724892563, + "learning_rate": 1.5793206291747006e-06, + "loss": 0.8902, + "step": 6436 + }, + { + "epoch": 0.5805113405780764, + "grad_norm": 1.193856942544405, + "learning_rate": 1.57874950624981e-06, + "loss": 0.8841, + "step": 6437 + }, + { + "epoch": 0.5806015241015466, + "grad_norm": 1.4873699291645173, + "learning_rate": 1.5781784192683351e-06, + "loss": 0.9433, + "step": 6438 + }, + { + "epoch": 0.5806917076250169, + "grad_norm": 1.1253815639651443, + "learning_rate": 1.5776073682790033e-06, + "loss": 0.9104, + "step": 6439 + }, + { + "epoch": 0.5807818911484872, + "grad_norm": 1.5226742128317703, + "learning_rate": 1.5770363533305393e-06, + "loss": 0.9661, + "step": 6440 + }, + { + "epoch": 0.5808720746719575, + "grad_norm": 1.3991777815537294, + "learning_rate": 1.5764653744716665e-06, + "loss": 0.9374, + "step": 6441 + }, + { + "epoch": 0.5809622581954277, + "grad_norm": 1.6112346830150506, + "learning_rate": 1.575894431751103e-06, + "loss": 0.9651, + "step": 6442 + }, + { + "epoch": 0.5810524417188979, + "grad_norm": 1.413697717308678, + "learning_rate": 1.575323525217565e-06, + "loss": 0.9635, + "step": 6443 + }, + { + "epoch": 0.5811426252423683, + "grad_norm": 1.4673905269725172, + "learning_rate": 1.574752654919766e-06, + "loss": 1.0219, + "step": 6444 + }, + { + "epoch": 0.5812328087658385, + "grad_norm": 1.3404243034466992, + "learning_rate": 1.5741818209064146e-06, + "loss": 0.9208, + "step": 6445 + }, + { + "epoch": 0.5813229922893087, + "grad_norm": 1.6163821219058083, + "learning_rate": 1.5736110232262183e-06, + "loss": 0.9547, + "step": 6446 + }, + { + "epoch": 0.581413175812779, + "grad_norm": 1.6853426812919678, + "learning_rate": 1.5730402619278804e-06, + "loss": 0.9906, + "step": 6447 + }, + { + "epoch": 0.5815033593362493, + "grad_norm": 0.7312585161805449, + "learning_rate": 1.5724695370601024e-06, + "loss": 0.7818, + "step": 6448 + }, + { + "epoch": 0.5815935428597195, + "grad_norm": 2.2674735059105107, + "learning_rate": 1.5718988486715798e-06, + "loss": 0.8741, + "step": 6449 + }, + { + "epoch": 0.5816837263831898, + "grad_norm": 1.3098013245279252, + "learning_rate": 1.5713281968110087e-06, + "loss": 0.935, + "step": 6450 + }, + { + "epoch": 0.58177390990666, + "grad_norm": 1.5273087831675116, + "learning_rate": 1.5707575815270796e-06, + "loss": 0.9322, + "step": 6451 + }, + { + "epoch": 0.5818640934301303, + "grad_norm": 1.6143306247284908, + "learning_rate": 1.57018700286848e-06, + "loss": 0.9414, + "step": 6452 + }, + { + "epoch": 0.5819542769536006, + "grad_norm": 1.4200744966656933, + "learning_rate": 1.5696164608838956e-06, + "loss": 0.9632, + "step": 6453 + }, + { + "epoch": 0.5820444604770708, + "grad_norm": 2.3648798304345697, + "learning_rate": 1.5690459556220073e-06, + "loss": 0.9574, + "step": 6454 + }, + { + "epoch": 0.582134644000541, + "grad_norm": 1.513511678770971, + "learning_rate": 1.5684754871314949e-06, + "loss": 1.0069, + "step": 6455 + }, + { + "epoch": 0.5822248275240114, + "grad_norm": 1.4209003671324956, + "learning_rate": 1.5679050554610335e-06, + "loss": 0.9447, + "step": 6456 + }, + { + "epoch": 0.5823150110474816, + "grad_norm": 1.278287299934211, + "learning_rate": 1.567334660659295e-06, + "loss": 0.9491, + "step": 6457 + }, + { + "epoch": 0.5824051945709519, + "grad_norm": 1.3370301535493672, + "learning_rate": 1.5667643027749488e-06, + "loss": 0.9585, + "step": 6458 + }, + { + "epoch": 0.5824953780944222, + "grad_norm": 1.649682898828442, + "learning_rate": 1.5661939818566614e-06, + "loss": 1.0183, + "step": 6459 + }, + { + "epoch": 0.5825855616178924, + "grad_norm": 1.498592113358351, + "learning_rate": 1.5656236979530956e-06, + "loss": 0.8114, + "step": 6460 + }, + { + "epoch": 0.5826757451413627, + "grad_norm": 1.5658385658413587, + "learning_rate": 1.5650534511129106e-06, + "loss": 0.8909, + "step": 6461 + }, + { + "epoch": 0.5827659286648329, + "grad_norm": 1.5445841771011923, + "learning_rate": 1.5644832413847635e-06, + "loss": 0.9263, + "step": 6462 + }, + { + "epoch": 0.5828561121883032, + "grad_norm": 1.222347866569764, + "learning_rate": 1.5639130688173082e-06, + "loss": 0.9907, + "step": 6463 + }, + { + "epoch": 0.5829462957117735, + "grad_norm": 1.31298828125, + "learning_rate": 1.5633429334591932e-06, + "loss": 0.9188, + "step": 6464 + }, + { + "epoch": 0.5830364792352437, + "grad_norm": 1.5309297654968224, + "learning_rate": 1.562772835359068e-06, + "loss": 0.975, + "step": 6465 + }, + { + "epoch": 0.5831266627587139, + "grad_norm": 2.0702273945140273, + "learning_rate": 1.5622027745655753e-06, + "loss": 0.9151, + "step": 6466 + }, + { + "epoch": 0.5832168462821843, + "grad_norm": 1.3983143587079188, + "learning_rate": 1.561632751127355e-06, + "loss": 0.9035, + "step": 6467 + }, + { + "epoch": 0.5833070298056545, + "grad_norm": 1.5656577436179202, + "learning_rate": 1.561062765093046e-06, + "loss": 0.9939, + "step": 6468 + }, + { + "epoch": 0.5833972133291248, + "grad_norm": 1.628904493591679, + "learning_rate": 1.5604928165112817e-06, + "loss": 0.9446, + "step": 6469 + }, + { + "epoch": 0.583487396852595, + "grad_norm": 1.4325496186988358, + "learning_rate": 1.5599229054306945e-06, + "loss": 0.9358, + "step": 6470 + }, + { + "epoch": 0.5835775803760653, + "grad_norm": 1.4082660634529711, + "learning_rate": 1.5593530318999111e-06, + "loss": 0.9469, + "step": 6471 + }, + { + "epoch": 0.5836677638995356, + "grad_norm": 1.354182707862722, + "learning_rate": 1.5587831959675572e-06, + "loss": 0.9738, + "step": 6472 + }, + { + "epoch": 0.5837579474230058, + "grad_norm": 1.3472011074015173, + "learning_rate": 1.5582133976822534e-06, + "loss": 0.9283, + "step": 6473 + }, + { + "epoch": 0.583848130946476, + "grad_norm": 1.5468002455402157, + "learning_rate": 1.5576436370926185e-06, + "loss": 0.9021, + "step": 6474 + }, + { + "epoch": 0.5839383144699464, + "grad_norm": 1.1713401082327297, + "learning_rate": 1.5570739142472679e-06, + "loss": 0.9287, + "step": 6475 + }, + { + "epoch": 0.5840284979934166, + "grad_norm": 1.2502278120348602, + "learning_rate": 1.5565042291948127e-06, + "loss": 1.01, + "step": 6476 + }, + { + "epoch": 0.5841186815168868, + "grad_norm": 1.2978594157581955, + "learning_rate": 1.5559345819838624e-06, + "loss": 0.9845, + "step": 6477 + }, + { + "epoch": 0.5842088650403571, + "grad_norm": 1.1919952437542747, + "learning_rate": 1.5553649726630226e-06, + "loss": 0.9236, + "step": 6478 + }, + { + "epoch": 0.5842990485638274, + "grad_norm": 1.315879693828222, + "learning_rate": 1.5547954012808942e-06, + "loss": 0.8765, + "step": 6479 + }, + { + "epoch": 0.5843892320872976, + "grad_norm": 1.672638620696155, + "learning_rate": 1.5542258678860776e-06, + "loss": 0.9163, + "step": 6480 + }, + { + "epoch": 0.5844794156107679, + "grad_norm": 1.381440036525761, + "learning_rate": 1.553656372527167e-06, + "loss": 1.0613, + "step": 6481 + }, + { + "epoch": 0.5845695991342382, + "grad_norm": 1.7573145860565391, + "learning_rate": 1.5530869152527568e-06, + "loss": 1.0458, + "step": 6482 + }, + { + "epoch": 0.5846597826577085, + "grad_norm": 1.8941729334559605, + "learning_rate": 1.5525174961114353e-06, + "loss": 0.9629, + "step": 6483 + }, + { + "epoch": 0.5847499661811787, + "grad_norm": 1.3675079406145099, + "learning_rate": 1.5519481151517875e-06, + "loss": 0.9893, + "step": 6484 + }, + { + "epoch": 0.5848401497046489, + "grad_norm": 1.5047819684282218, + "learning_rate": 1.551378772422398e-06, + "loss": 0.9506, + "step": 6485 + }, + { + "epoch": 0.5849303332281193, + "grad_norm": 1.2533818274274273, + "learning_rate": 1.5508094679718447e-06, + "loss": 0.9547, + "step": 6486 + }, + { + "epoch": 0.5850205167515895, + "grad_norm": 1.7666550728879333, + "learning_rate": 1.5502402018487048e-06, + "loss": 0.8546, + "step": 6487 + }, + { + "epoch": 0.5851107002750597, + "grad_norm": 1.4423164450551036, + "learning_rate": 1.54967097410155e-06, + "loss": 0.9457, + "step": 6488 + }, + { + "epoch": 0.58520088379853, + "grad_norm": 1.2853505645687795, + "learning_rate": 1.5491017847789519e-06, + "loss": 0.8746, + "step": 6489 + }, + { + "epoch": 0.5852910673220003, + "grad_norm": 1.4543498629769387, + "learning_rate": 1.5485326339294755e-06, + "loss": 0.8926, + "step": 6490 + }, + { + "epoch": 0.5853812508454705, + "grad_norm": 1.4175888687613845, + "learning_rate": 1.5479635216016832e-06, + "loss": 0.9111, + "step": 6491 + }, + { + "epoch": 0.5854714343689408, + "grad_norm": 1.4185425829407057, + "learning_rate": 1.547394447844137e-06, + "loss": 0.8593, + "step": 6492 + }, + { + "epoch": 0.585561617892411, + "grad_norm": 1.4458660818269993, + "learning_rate": 1.546825412705391e-06, + "loss": 0.8661, + "step": 6493 + }, + { + "epoch": 0.5856518014158814, + "grad_norm": 1.4791892770821091, + "learning_rate": 1.5462564162340007e-06, + "loss": 0.976, + "step": 6494 + }, + { + "epoch": 0.5857419849393516, + "grad_norm": 1.6100181109434508, + "learning_rate": 1.5456874584785144e-06, + "loss": 0.946, + "step": 6495 + }, + { + "epoch": 0.5858321684628218, + "grad_norm": 1.6906969375958965, + "learning_rate": 1.5451185394874785e-06, + "loss": 0.9177, + "step": 6496 + }, + { + "epoch": 0.5859223519862921, + "grad_norm": 1.494029321451739, + "learning_rate": 1.5445496593094381e-06, + "loss": 0.9586, + "step": 6497 + }, + { + "epoch": 0.5860125355097624, + "grad_norm": 1.53687739287908, + "learning_rate": 1.5439808179929316e-06, + "loss": 0.9557, + "step": 6498 + }, + { + "epoch": 0.5861027190332326, + "grad_norm": 1.4282401841873156, + "learning_rate": 1.543412015586496e-06, + "loss": 0.9084, + "step": 6499 + }, + { + "epoch": 0.5861929025567029, + "grad_norm": 1.3888575862959822, + "learning_rate": 1.5428432521386655e-06, + "loss": 0.9128, + "step": 6500 + }, + { + "epoch": 0.5862830860801731, + "grad_norm": 1.453543079899141, + "learning_rate": 1.5422745276979688e-06, + "loss": 0.9832, + "step": 6501 + }, + { + "epoch": 0.5863732696036434, + "grad_norm": 1.3320705821922791, + "learning_rate": 1.5417058423129336e-06, + "loss": 0.9353, + "step": 6502 + }, + { + "epoch": 0.5864634531271137, + "grad_norm": 1.2900288052263014, + "learning_rate": 1.5411371960320822e-06, + "loss": 0.9813, + "step": 6503 + }, + { + "epoch": 0.5865536366505839, + "grad_norm": 1.3437570527357385, + "learning_rate": 1.5405685889039363e-06, + "loss": 0.981, + "step": 6504 + }, + { + "epoch": 0.5866438201740543, + "grad_norm": 1.465267028688815, + "learning_rate": 1.5400000209770118e-06, + "loss": 0.9401, + "step": 6505 + }, + { + "epoch": 0.5867340036975245, + "grad_norm": 1.420648905548339, + "learning_rate": 1.5394314922998208e-06, + "loss": 0.9537, + "step": 6506 + }, + { + "epoch": 0.5868241872209947, + "grad_norm": 1.9897829391718915, + "learning_rate": 1.5388630029208756e-06, + "loss": 0.9962, + "step": 6507 + }, + { + "epoch": 0.586914370744465, + "grad_norm": 1.3705786365020267, + "learning_rate": 1.5382945528886806e-06, + "loss": 0.9676, + "step": 6508 + }, + { + "epoch": 0.5870045542679353, + "grad_norm": 1.7536877196517204, + "learning_rate": 1.5377261422517412e-06, + "loss": 0.9296, + "step": 6509 + }, + { + "epoch": 0.5870947377914055, + "grad_norm": 1.3964768629605646, + "learning_rate": 1.5371577710585553e-06, + "loss": 0.9128, + "step": 6510 + }, + { + "epoch": 0.5871849213148758, + "grad_norm": 0.8204327086518503, + "learning_rate": 1.536589439357621e-06, + "loss": 0.8141, + "step": 6511 + }, + { + "epoch": 0.587275104838346, + "grad_norm": 1.3262622560874988, + "learning_rate": 1.5360211471974315e-06, + "loss": 0.9523, + "step": 6512 + }, + { + "epoch": 0.5873652883618163, + "grad_norm": 1.3915401833366203, + "learning_rate": 1.5354528946264753e-06, + "loss": 0.9971, + "step": 6513 + }, + { + "epoch": 0.5874554718852866, + "grad_norm": 1.1793728844684819, + "learning_rate": 1.5348846816932399e-06, + "loss": 0.8803, + "step": 6514 + }, + { + "epoch": 0.5875456554087568, + "grad_norm": 0.679717951125352, + "learning_rate": 1.5343165084462077e-06, + "loss": 0.7896, + "step": 6515 + }, + { + "epoch": 0.587635838932227, + "grad_norm": 1.6238168664192785, + "learning_rate": 1.5337483749338595e-06, + "loss": 0.9015, + "step": 6516 + }, + { + "epoch": 0.5877260224556974, + "grad_norm": 2.096431779708134, + "learning_rate": 1.5331802812046708e-06, + "loss": 0.9188, + "step": 6517 + }, + { + "epoch": 0.5878162059791676, + "grad_norm": 1.3893789216336714, + "learning_rate": 1.5326122273071133e-06, + "loss": 0.8022, + "step": 6518 + }, + { + "epoch": 0.5879063895026378, + "grad_norm": 1.3750716537532244, + "learning_rate": 1.532044213289659e-06, + "loss": 0.9871, + "step": 6519 + }, + { + "epoch": 0.5879965730261081, + "grad_norm": 1.4575921582093745, + "learning_rate": 1.5314762392007718e-06, + "loss": 0.8365, + "step": 6520 + }, + { + "epoch": 0.5880867565495784, + "grad_norm": 1.4315014780658495, + "learning_rate": 1.530908305088916e-06, + "loss": 1.0011, + "step": 6521 + }, + { + "epoch": 0.5881769400730487, + "grad_norm": 1.5196083657277684, + "learning_rate": 1.5303404110025501e-06, + "loss": 0.9373, + "step": 6522 + }, + { + "epoch": 0.5882671235965189, + "grad_norm": 1.2424774311403026, + "learning_rate": 1.5297725569901293e-06, + "loss": 0.9076, + "step": 6523 + }, + { + "epoch": 0.5883573071199891, + "grad_norm": 1.418155037909414, + "learning_rate": 1.5292047431001077e-06, + "loss": 0.8429, + "step": 6524 + }, + { + "epoch": 0.5884474906434595, + "grad_norm": 0.779731648328546, + "learning_rate": 1.5286369693809321e-06, + "loss": 0.8303, + "step": 6525 + }, + { + "epoch": 0.5885376741669297, + "grad_norm": 1.781526778284055, + "learning_rate": 1.5280692358810506e-06, + "loss": 0.9509, + "step": 6526 + }, + { + "epoch": 0.5886278576903999, + "grad_norm": 1.676671714105207, + "learning_rate": 1.527501542648904e-06, + "loss": 0.8968, + "step": 6527 + }, + { + "epoch": 0.5887180412138703, + "grad_norm": 1.72944690164719, + "learning_rate": 1.5269338897329308e-06, + "loss": 0.8553, + "step": 6528 + }, + { + "epoch": 0.5888082247373405, + "grad_norm": 1.4179643783291769, + "learning_rate": 1.5263662771815662e-06, + "loss": 0.9676, + "step": 6529 + }, + { + "epoch": 0.5888984082608107, + "grad_norm": 1.4699114813070948, + "learning_rate": 1.5257987050432429e-06, + "loss": 1.0188, + "step": 6530 + }, + { + "epoch": 0.588988591784281, + "grad_norm": 1.4556754914990164, + "learning_rate": 1.5252311733663887e-06, + "loss": 0.849, + "step": 6531 + }, + { + "epoch": 0.5890787753077513, + "grad_norm": 1.503212350063944, + "learning_rate": 1.5246636821994281e-06, + "loss": 0.8919, + "step": 6532 + }, + { + "epoch": 0.5891689588312216, + "grad_norm": 1.363937001610926, + "learning_rate": 1.524096231590784e-06, + "loss": 0.992, + "step": 6533 + }, + { + "epoch": 0.5892591423546918, + "grad_norm": 1.572905870952147, + "learning_rate": 1.5235288215888736e-06, + "loss": 0.9856, + "step": 6534 + }, + { + "epoch": 0.589349325878162, + "grad_norm": 1.5087015959932446, + "learning_rate": 1.5229614522421102e-06, + "loss": 0.9471, + "step": 6535 + }, + { + "epoch": 0.5894395094016324, + "grad_norm": 1.4424476893045393, + "learning_rate": 1.5223941235989071e-06, + "loss": 0.7813, + "step": 6536 + }, + { + "epoch": 0.5895296929251026, + "grad_norm": 1.313194636042246, + "learning_rate": 1.52182683570767e-06, + "loss": 0.9131, + "step": 6537 + }, + { + "epoch": 0.5896198764485728, + "grad_norm": 1.4729838120813434, + "learning_rate": 1.5212595886168046e-06, + "loss": 0.9589, + "step": 6538 + }, + { + "epoch": 0.5897100599720431, + "grad_norm": 1.407506000377271, + "learning_rate": 1.520692382374711e-06, + "loss": 0.9536, + "step": 6539 + }, + { + "epoch": 0.5898002434955134, + "grad_norm": 1.5882024517486675, + "learning_rate": 1.5201252170297854e-06, + "loss": 0.9675, + "step": 6540 + }, + { + "epoch": 0.5898904270189836, + "grad_norm": 1.7434248015998908, + "learning_rate": 1.5195580926304232e-06, + "loss": 0.8361, + "step": 6541 + }, + { + "epoch": 0.5899806105424539, + "grad_norm": 1.632467863225151, + "learning_rate": 1.5189910092250131e-06, + "loss": 0.9558, + "step": 6542 + }, + { + "epoch": 0.5900707940659241, + "grad_norm": 1.6218526144081493, + "learning_rate": 1.5184239668619427e-06, + "loss": 0.9542, + "step": 6543 + }, + { + "epoch": 0.5901609775893945, + "grad_norm": 1.4311750833819086, + "learning_rate": 1.5178569655895946e-06, + "loss": 0.947, + "step": 6544 + }, + { + "epoch": 0.5902511611128647, + "grad_norm": 1.2305190787543303, + "learning_rate": 1.5172900054563487e-06, + "loss": 0.9727, + "step": 6545 + }, + { + "epoch": 0.5903413446363349, + "grad_norm": 1.4263509369955698, + "learning_rate": 1.5167230865105814e-06, + "loss": 0.9678, + "step": 6546 + }, + { + "epoch": 0.5904315281598052, + "grad_norm": 1.6129579381738364, + "learning_rate": 1.5161562088006644e-06, + "loss": 0.8618, + "step": 6547 + }, + { + "epoch": 0.5905217116832755, + "grad_norm": 1.8519537146244909, + "learning_rate": 1.5155893723749685e-06, + "loss": 0.9759, + "step": 6548 + }, + { + "epoch": 0.5906118952067457, + "grad_norm": 1.3802046673804007, + "learning_rate": 1.5150225772818582e-06, + "loss": 0.9511, + "step": 6549 + }, + { + "epoch": 0.590702078730216, + "grad_norm": 1.4162230451532156, + "learning_rate": 1.5144558235696949e-06, + "loss": 1.0003, + "step": 6550 + }, + { + "epoch": 0.5907922622536863, + "grad_norm": 1.3740440426754352, + "learning_rate": 1.5138891112868388e-06, + "loss": 0.8606, + "step": 6551 + }, + { + "epoch": 0.5908824457771565, + "grad_norm": 0.752413997361355, + "learning_rate": 1.5133224404816433e-06, + "loss": 0.8172, + "step": 6552 + }, + { + "epoch": 0.5909726293006268, + "grad_norm": 1.6842060436091875, + "learning_rate": 1.5127558112024617e-06, + "loss": 0.8756, + "step": 6553 + }, + { + "epoch": 0.591062812824097, + "grad_norm": 0.6815667754559086, + "learning_rate": 1.5121892234976404e-06, + "loss": 0.8192, + "step": 6554 + }, + { + "epoch": 0.5911529963475673, + "grad_norm": 1.480077364677985, + "learning_rate": 1.5116226774155243e-06, + "loss": 0.9295, + "step": 6555 + }, + { + "epoch": 0.5912431798710376, + "grad_norm": 1.708861253935505, + "learning_rate": 1.5110561730044547e-06, + "loss": 0.9112, + "step": 6556 + }, + { + "epoch": 0.5913333633945078, + "grad_norm": 1.3830252877003957, + "learning_rate": 1.510489710312768e-06, + "loss": 0.9412, + "step": 6557 + }, + { + "epoch": 0.591423546917978, + "grad_norm": 1.4354051169644544, + "learning_rate": 1.5099232893887987e-06, + "loss": 0.9398, + "step": 6558 + }, + { + "epoch": 0.5915137304414484, + "grad_norm": 1.3375017700896628, + "learning_rate": 1.5093569102808758e-06, + "loss": 0.9141, + "step": 6559 + }, + { + "epoch": 0.5916039139649186, + "grad_norm": 1.6011582771611708, + "learning_rate": 1.5087905730373275e-06, + "loss": 0.9491, + "step": 6560 + }, + { + "epoch": 0.5916940974883889, + "grad_norm": 0.6728866634431655, + "learning_rate": 1.508224277706476e-06, + "loss": 0.8319, + "step": 6561 + }, + { + "epoch": 0.5917842810118591, + "grad_norm": 2.2529671495991352, + "learning_rate": 1.5076580243366399e-06, + "loss": 0.9063, + "step": 6562 + }, + { + "epoch": 0.5918744645353294, + "grad_norm": 1.6813399592159881, + "learning_rate": 1.507091812976137e-06, + "loss": 0.8536, + "step": 6563 + }, + { + "epoch": 0.5919646480587997, + "grad_norm": 1.3934720929803726, + "learning_rate": 1.5065256436732773e-06, + "loss": 0.8957, + "step": 6564 + }, + { + "epoch": 0.5920548315822699, + "grad_norm": 1.4936143534213293, + "learning_rate": 1.5059595164763717e-06, + "loss": 0.8725, + "step": 6565 + }, + { + "epoch": 0.5921450151057401, + "grad_norm": 0.6445611657800462, + "learning_rate": 1.5053934314337243e-06, + "loss": 0.8246, + "step": 6566 + }, + { + "epoch": 0.5922351986292105, + "grad_norm": 1.3118949131138111, + "learning_rate": 1.5048273885936356e-06, + "loss": 0.936, + "step": 6567 + }, + { + "epoch": 0.5923253821526807, + "grad_norm": 1.20942224020944, + "learning_rate": 1.5042613880044053e-06, + "loss": 0.8626, + "step": 6568 + }, + { + "epoch": 0.592415565676151, + "grad_norm": 1.433761961004889, + "learning_rate": 1.5036954297143265e-06, + "loss": 0.9748, + "step": 6569 + }, + { + "epoch": 0.5925057491996212, + "grad_norm": 1.179207811659944, + "learning_rate": 1.50312951377169e-06, + "loss": 0.8968, + "step": 6570 + }, + { + "epoch": 0.5925959327230915, + "grad_norm": 0.6218566287983684, + "learning_rate": 1.502563640224784e-06, + "loss": 0.7962, + "step": 6571 + }, + { + "epoch": 0.5926861162465618, + "grad_norm": 1.5089305626153908, + "learning_rate": 1.5019978091218903e-06, + "loss": 0.9402, + "step": 6572 + }, + { + "epoch": 0.592776299770032, + "grad_norm": 1.2440209443454289, + "learning_rate": 1.50143202051129e-06, + "loss": 1.0026, + "step": 6573 + }, + { + "epoch": 0.5928664832935022, + "grad_norm": 1.4134430766833768, + "learning_rate": 1.500866274441258e-06, + "loss": 0.9993, + "step": 6574 + }, + { + "epoch": 0.5929566668169726, + "grad_norm": 1.6405107730882635, + "learning_rate": 1.5003005709600682e-06, + "loss": 0.9259, + "step": 6575 + }, + { + "epoch": 0.5930468503404428, + "grad_norm": 1.3801342302498643, + "learning_rate": 1.4997349101159885e-06, + "loss": 1.0152, + "step": 6576 + }, + { + "epoch": 0.593137033863913, + "grad_norm": 1.564505244040547, + "learning_rate": 1.4991692919572854e-06, + "loss": 0.8887, + "step": 6577 + }, + { + "epoch": 0.5932272173873834, + "grad_norm": 1.4318143106933932, + "learning_rate": 1.4986037165322199e-06, + "loss": 0.9501, + "step": 6578 + }, + { + "epoch": 0.5933174009108536, + "grad_norm": 0.7187169938179832, + "learning_rate": 1.498038183889049e-06, + "loss": 0.8389, + "step": 6579 + }, + { + "epoch": 0.5934075844343238, + "grad_norm": 1.4677430617386622, + "learning_rate": 1.4974726940760292e-06, + "loss": 0.8652, + "step": 6580 + }, + { + "epoch": 0.5934977679577941, + "grad_norm": 1.4979071321922341, + "learning_rate": 1.496907247141409e-06, + "loss": 1.0097, + "step": 6581 + }, + { + "epoch": 0.5935879514812644, + "grad_norm": 0.8239907767511105, + "learning_rate": 1.4963418431334372e-06, + "loss": 0.9249, + "step": 6582 + }, + { + "epoch": 0.5936781350047347, + "grad_norm": 1.5410819964022062, + "learning_rate": 1.4957764821003566e-06, + "loss": 0.9251, + "step": 6583 + }, + { + "epoch": 0.5937683185282049, + "grad_norm": 1.4236508692123422, + "learning_rate": 1.4952111640904063e-06, + "loss": 0.9482, + "step": 6584 + }, + { + "epoch": 0.5938585020516751, + "grad_norm": 1.4219885979851183, + "learning_rate": 1.494645889151823e-06, + "loss": 0.9218, + "step": 6585 + }, + { + "epoch": 0.5939486855751455, + "grad_norm": 0.724606948396059, + "learning_rate": 1.494080657332839e-06, + "loss": 0.7761, + "step": 6586 + }, + { + "epoch": 0.5940388690986157, + "grad_norm": 2.0005279082716347, + "learning_rate": 1.4935154686816832e-06, + "loss": 0.9038, + "step": 6587 + }, + { + "epoch": 0.5941290526220859, + "grad_norm": 1.5748880255722375, + "learning_rate": 1.4929503232465802e-06, + "loss": 1.0094, + "step": 6588 + }, + { + "epoch": 0.5942192361455562, + "grad_norm": 1.3340495639583994, + "learning_rate": 1.492385221075751e-06, + "loss": 0.8575, + "step": 6589 + }, + { + "epoch": 0.5943094196690265, + "grad_norm": 1.4995466182899433, + "learning_rate": 1.4918201622174142e-06, + "loss": 0.9733, + "step": 6590 + }, + { + "epoch": 0.5943996031924967, + "grad_norm": 1.5079349606449473, + "learning_rate": 1.4912551467197827e-06, + "loss": 0.9664, + "step": 6591 + }, + { + "epoch": 0.594489786715967, + "grad_norm": 1.6567568453314483, + "learning_rate": 1.4906901746310678e-06, + "loss": 1.02, + "step": 6592 + }, + { + "epoch": 0.5945799702394372, + "grad_norm": 1.2370166759492525, + "learning_rate": 1.4901252459994757e-06, + "loss": 0.9043, + "step": 6593 + }, + { + "epoch": 0.5946701537629075, + "grad_norm": 1.420417038162256, + "learning_rate": 1.489560360873208e-06, + "loss": 0.9265, + "step": 6594 + }, + { + "epoch": 0.5947603372863778, + "grad_norm": 1.4217835281628495, + "learning_rate": 1.4889955193004659e-06, + "loss": 0.9195, + "step": 6595 + }, + { + "epoch": 0.594850520809848, + "grad_norm": 0.6800994282496691, + "learning_rate": 1.4884307213294428e-06, + "loss": 0.8005, + "step": 6596 + }, + { + "epoch": 0.5949407043333182, + "grad_norm": 1.5937387802626675, + "learning_rate": 1.4878659670083321e-06, + "loss": 0.9269, + "step": 6597 + }, + { + "epoch": 0.5950308878567886, + "grad_norm": 1.5398686810027913, + "learning_rate": 1.4873012563853208e-06, + "loss": 0.8671, + "step": 6598 + }, + { + "epoch": 0.5951210713802588, + "grad_norm": 2.2448540226771723, + "learning_rate": 1.4867365895085935e-06, + "loss": 0.9533, + "step": 6599 + }, + { + "epoch": 0.5952112549037291, + "grad_norm": 1.2752623325567465, + "learning_rate": 1.4861719664263301e-06, + "loss": 0.9185, + "step": 6600 + }, + { + "epoch": 0.5953014384271994, + "grad_norm": 1.4491558215521718, + "learning_rate": 1.485607387186708e-06, + "loss": 1.0359, + "step": 6601 + }, + { + "epoch": 0.5953916219506696, + "grad_norm": 1.551301960040332, + "learning_rate": 1.4850428518379001e-06, + "loss": 0.9373, + "step": 6602 + }, + { + "epoch": 0.5954818054741399, + "grad_norm": 1.2613176115607347, + "learning_rate": 1.4844783604280746e-06, + "loss": 0.9359, + "step": 6603 + }, + { + "epoch": 0.5955719889976101, + "grad_norm": 1.8068939196382177, + "learning_rate": 1.483913913005399e-06, + "loss": 0.8869, + "step": 6604 + }, + { + "epoch": 0.5956621725210804, + "grad_norm": 1.5081176449042635, + "learning_rate": 1.483349509618034e-06, + "loss": 0.9988, + "step": 6605 + }, + { + "epoch": 0.5957523560445507, + "grad_norm": 1.4020215357651493, + "learning_rate": 1.4827851503141367e-06, + "loss": 0.8744, + "step": 6606 + }, + { + "epoch": 0.5958425395680209, + "grad_norm": 1.357490768330975, + "learning_rate": 1.482220835141863e-06, + "loss": 0.8683, + "step": 6607 + }, + { + "epoch": 0.5959327230914911, + "grad_norm": 1.452915217525867, + "learning_rate": 1.481656564149362e-06, + "loss": 1.0183, + "step": 6608 + }, + { + "epoch": 0.5960229066149615, + "grad_norm": 2.971324727711902, + "learning_rate": 1.4810923373847818e-06, + "loss": 0.9384, + "step": 6609 + }, + { + "epoch": 0.5961130901384317, + "grad_norm": 1.4981543789602032, + "learning_rate": 1.4805281548962647e-06, + "loss": 0.9463, + "step": 6610 + }, + { + "epoch": 0.596203273661902, + "grad_norm": 1.502612620241218, + "learning_rate": 1.4799640167319488e-06, + "loss": 0.885, + "step": 6611 + }, + { + "epoch": 0.5962934571853722, + "grad_norm": 1.4865791419627155, + "learning_rate": 1.4793999229399714e-06, + "loss": 0.9281, + "step": 6612 + }, + { + "epoch": 0.5963836407088425, + "grad_norm": 1.4831701910478086, + "learning_rate": 1.4788358735684626e-06, + "loss": 1.0032, + "step": 6613 + }, + { + "epoch": 0.5964738242323128, + "grad_norm": 1.3500001289226329, + "learning_rate": 1.4782718686655514e-06, + "loss": 1.0034, + "step": 6614 + }, + { + "epoch": 0.596564007755783, + "grad_norm": 3.227230270780517, + "learning_rate": 1.4777079082793605e-06, + "loss": 0.9861, + "step": 6615 + }, + { + "epoch": 0.5966541912792532, + "grad_norm": 1.4952139795069264, + "learning_rate": 1.4771439924580108e-06, + "loss": 0.9096, + "step": 6616 + }, + { + "epoch": 0.5967443748027236, + "grad_norm": 1.239381079108199, + "learning_rate": 1.4765801212496189e-06, + "loss": 0.9797, + "step": 6617 + }, + { + "epoch": 0.5968345583261938, + "grad_norm": 1.5088748648236319, + "learning_rate": 1.4760162947022968e-06, + "loss": 0.8596, + "step": 6618 + }, + { + "epoch": 0.596924741849664, + "grad_norm": 1.4353963967688277, + "learning_rate": 1.475452512864154e-06, + "loss": 0.9295, + "step": 6619 + }, + { + "epoch": 0.5970149253731343, + "grad_norm": 1.1535933104442806, + "learning_rate": 1.4748887757832945e-06, + "loss": 0.9495, + "step": 6620 + }, + { + "epoch": 0.5971051088966046, + "grad_norm": 1.435351383106959, + "learning_rate": 1.4743250835078209e-06, + "loss": 0.9724, + "step": 6621 + }, + { + "epoch": 0.5971952924200749, + "grad_norm": 1.311483034603729, + "learning_rate": 1.4737614360858297e-06, + "loss": 0.9037, + "step": 6622 + }, + { + "epoch": 0.5972854759435451, + "grad_norm": 1.5814736219456176, + "learning_rate": 1.4731978335654133e-06, + "loss": 0.897, + "step": 6623 + }, + { + "epoch": 0.5973756594670154, + "grad_norm": 1.664400666311507, + "learning_rate": 1.4726342759946638e-06, + "loss": 0.8833, + "step": 6624 + }, + { + "epoch": 0.5974658429904857, + "grad_norm": 1.488214286038618, + "learning_rate": 1.4720707634216653e-06, + "loss": 0.9429, + "step": 6625 + }, + { + "epoch": 0.5975560265139559, + "grad_norm": 1.6605052626014394, + "learning_rate": 1.4715072958945e-06, + "loss": 0.9503, + "step": 6626 + }, + { + "epoch": 0.5976462100374261, + "grad_norm": 1.6248749171313563, + "learning_rate": 1.470943873461247e-06, + "loss": 0.9421, + "step": 6627 + }, + { + "epoch": 0.5977363935608965, + "grad_norm": 2.295397951782993, + "learning_rate": 1.470380496169979e-06, + "loss": 0.9078, + "step": 6628 + }, + { + "epoch": 0.5978265770843667, + "grad_norm": 1.477928377586723, + "learning_rate": 1.4698171640687682e-06, + "loss": 0.9367, + "step": 6629 + }, + { + "epoch": 0.5979167606078369, + "grad_norm": 1.3468549032105979, + "learning_rate": 1.4692538772056792e-06, + "loss": 0.8833, + "step": 6630 + }, + { + "epoch": 0.5980069441313072, + "grad_norm": 1.2231276215928197, + "learning_rate": 1.4686906356287772e-06, + "loss": 0.8562, + "step": 6631 + }, + { + "epoch": 0.5980971276547775, + "grad_norm": 1.3539496712564807, + "learning_rate": 1.4681274393861194e-06, + "loss": 0.9809, + "step": 6632 + }, + { + "epoch": 0.5981873111782477, + "grad_norm": 1.5325331469337966, + "learning_rate": 1.4675642885257603e-06, + "loss": 1.0356, + "step": 6633 + }, + { + "epoch": 0.598277494701718, + "grad_norm": 1.475578585795096, + "learning_rate": 1.4670011830957529e-06, + "loss": 1.0361, + "step": 6634 + }, + { + "epoch": 0.5983676782251882, + "grad_norm": 1.8186689824152957, + "learning_rate": 1.4664381231441427e-06, + "loss": 0.9075, + "step": 6635 + }, + { + "epoch": 0.5984578617486586, + "grad_norm": 1.4453482958510657, + "learning_rate": 1.4658751087189746e-06, + "loss": 0.943, + "step": 6636 + }, + { + "epoch": 0.5985480452721288, + "grad_norm": 1.5546881900958347, + "learning_rate": 1.4653121398682874e-06, + "loss": 0.9884, + "step": 6637 + }, + { + "epoch": 0.598638228795599, + "grad_norm": 1.2168277350809287, + "learning_rate": 1.4647492166401159e-06, + "loss": 0.8833, + "step": 6638 + }, + { + "epoch": 0.5987284123190693, + "grad_norm": 1.5262391762189544, + "learning_rate": 1.4641863390824934e-06, + "loss": 0.9311, + "step": 6639 + }, + { + "epoch": 0.5988185958425396, + "grad_norm": 1.413960574359878, + "learning_rate": 1.4636235072434465e-06, + "loss": 0.9348, + "step": 6640 + }, + { + "epoch": 0.5989087793660098, + "grad_norm": 1.308524833117038, + "learning_rate": 1.4630607211709994e-06, + "loss": 0.965, + "step": 6641 + }, + { + "epoch": 0.5989989628894801, + "grad_norm": 1.313220870645714, + "learning_rate": 1.4624979809131723e-06, + "loss": 0.9497, + "step": 6642 + }, + { + "epoch": 0.5990891464129503, + "grad_norm": 1.5822688171598622, + "learning_rate": 1.4619352865179814e-06, + "loss": 0.8519, + "step": 6643 + }, + { + "epoch": 0.5991793299364206, + "grad_norm": 1.633218742100687, + "learning_rate": 1.4613726380334391e-06, + "loss": 0.9156, + "step": 6644 + }, + { + "epoch": 0.5992695134598909, + "grad_norm": 1.6454057339211448, + "learning_rate": 1.4608100355075522e-06, + "loss": 0.7976, + "step": 6645 + }, + { + "epoch": 0.5993596969833611, + "grad_norm": 0.9359673369528396, + "learning_rate": 1.460247478988327e-06, + "loss": 0.8026, + "step": 6646 + }, + { + "epoch": 0.5994498805068315, + "grad_norm": 1.3008317908930183, + "learning_rate": 1.4596849685237623e-06, + "loss": 0.9507, + "step": 6647 + }, + { + "epoch": 0.5995400640303017, + "grad_norm": 1.4176759864243067, + "learning_rate": 1.459122504161856e-06, + "loss": 0.8493, + "step": 6648 + }, + { + "epoch": 0.5996302475537719, + "grad_norm": 1.3499697522907372, + "learning_rate": 1.4585600859506001e-06, + "loss": 0.9116, + "step": 6649 + }, + { + "epoch": 0.5997204310772422, + "grad_norm": 1.5796659141748561, + "learning_rate": 1.4579977139379826e-06, + "loss": 1.0006, + "step": 6650 + }, + { + "epoch": 0.5998106146007125, + "grad_norm": 1.4891523079943936, + "learning_rate": 1.4574353881719895e-06, + "loss": 1.0102, + "step": 6651 + }, + { + "epoch": 0.5999007981241827, + "grad_norm": 1.7028378585800603, + "learning_rate": 1.4568731087005998e-06, + "loss": 0.9439, + "step": 6652 + }, + { + "epoch": 0.599990981647653, + "grad_norm": 1.2019822775126123, + "learning_rate": 1.4563108755717916e-06, + "loss": 0.9598, + "step": 6653 + }, + { + "epoch": 0.6000811651711232, + "grad_norm": 1.4432445681780477, + "learning_rate": 1.455748688833538e-06, + "loss": 0.9615, + "step": 6654 + }, + { + "epoch": 0.6001713486945935, + "grad_norm": 1.4717233470371507, + "learning_rate": 1.4551865485338065e-06, + "loss": 0.8515, + "step": 6655 + }, + { + "epoch": 0.6002615322180638, + "grad_norm": 1.3466197132590019, + "learning_rate": 1.4546244547205629e-06, + "loss": 1.0034, + "step": 6656 + }, + { + "epoch": 0.600351715741534, + "grad_norm": 1.3877454798285844, + "learning_rate": 1.4540624074417678e-06, + "loss": 0.9614, + "step": 6657 + }, + { + "epoch": 0.6004418992650042, + "grad_norm": 1.3401085060939912, + "learning_rate": 1.453500406745379e-06, + "loss": 0.9972, + "step": 6658 + }, + { + "epoch": 0.6005320827884746, + "grad_norm": 1.6727169448124715, + "learning_rate": 1.4529384526793486e-06, + "loss": 1.0159, + "step": 6659 + }, + { + "epoch": 0.6006222663119448, + "grad_norm": 1.581069013166646, + "learning_rate": 1.4523765452916252e-06, + "loss": 0.9948, + "step": 6660 + }, + { + "epoch": 0.600712449835415, + "grad_norm": 1.399296629850677, + "learning_rate": 1.4518146846301554e-06, + "loss": 0.9691, + "step": 6661 + }, + { + "epoch": 0.6008026333588853, + "grad_norm": 1.5248653665115253, + "learning_rate": 1.4512528707428787e-06, + "loss": 0.9181, + "step": 6662 + }, + { + "epoch": 0.6008928168823556, + "grad_norm": 1.316260717480563, + "learning_rate": 1.4506911036777335e-06, + "loss": 0.9161, + "step": 6663 + }, + { + "epoch": 0.6009830004058259, + "grad_norm": 0.7241909330389881, + "learning_rate": 1.450129383482651e-06, + "loss": 0.7898, + "step": 6664 + }, + { + "epoch": 0.6010731839292961, + "grad_norm": 1.173624030474117, + "learning_rate": 1.4495677102055629e-06, + "loss": 0.8968, + "step": 6665 + }, + { + "epoch": 0.6011633674527663, + "grad_norm": 1.3695456957232024, + "learning_rate": 1.4490060838943924e-06, + "loss": 0.9775, + "step": 6666 + }, + { + "epoch": 0.6012535509762367, + "grad_norm": 3.1318564918350296, + "learning_rate": 1.4484445045970609e-06, + "loss": 0.941, + "step": 6667 + }, + { + "epoch": 0.6013437344997069, + "grad_norm": 1.398477777508415, + "learning_rate": 1.447882972361485e-06, + "loss": 0.9384, + "step": 6668 + }, + { + "epoch": 0.6014339180231771, + "grad_norm": 1.427446286715805, + "learning_rate": 1.4473214872355785e-06, + "loss": 0.9252, + "step": 6669 + }, + { + "epoch": 0.6015241015466475, + "grad_norm": 1.5989021349380252, + "learning_rate": 1.4467600492672508e-06, + "loss": 0.9841, + "step": 6670 + }, + { + "epoch": 0.6016142850701177, + "grad_norm": 1.540273664236046, + "learning_rate": 1.4461986585044054e-06, + "loss": 0.9188, + "step": 6671 + }, + { + "epoch": 0.601704468593588, + "grad_norm": 1.415660397927028, + "learning_rate": 1.4456373149949446e-06, + "loss": 0.9211, + "step": 6672 + }, + { + "epoch": 0.6017946521170582, + "grad_norm": 1.5583555570163505, + "learning_rate": 1.4450760187867648e-06, + "loss": 0.9487, + "step": 6673 + }, + { + "epoch": 0.6018848356405285, + "grad_norm": 1.3759442468426224, + "learning_rate": 1.4445147699277581e-06, + "loss": 0.9309, + "step": 6674 + }, + { + "epoch": 0.6019750191639988, + "grad_norm": 1.4651903073124857, + "learning_rate": 1.4439535684658154e-06, + "loss": 0.99, + "step": 6675 + }, + { + "epoch": 0.602065202687469, + "grad_norm": 0.9140935061159944, + "learning_rate": 1.44339241444882e-06, + "loss": 0.8574, + "step": 6676 + }, + { + "epoch": 0.6021553862109392, + "grad_norm": 1.4925806616067983, + "learning_rate": 1.4428313079246518e-06, + "loss": 0.8882, + "step": 6677 + }, + { + "epoch": 0.6022455697344096, + "grad_norm": 0.6988579655732278, + "learning_rate": 1.4422702489411896e-06, + "loss": 0.8181, + "step": 6678 + }, + { + "epoch": 0.6023357532578798, + "grad_norm": 1.622471676477867, + "learning_rate": 1.4417092375463043e-06, + "loss": 0.8912, + "step": 6679 + }, + { + "epoch": 0.60242593678135, + "grad_norm": 1.361636637205198, + "learning_rate": 1.441148273787866e-06, + "loss": 0.9769, + "step": 6680 + }, + { + "epoch": 0.6025161203048203, + "grad_norm": 0.8668998034833553, + "learning_rate": 1.4405873577137383e-06, + "loss": 0.8626, + "step": 6681 + }, + { + "epoch": 0.6026063038282906, + "grad_norm": 1.8085030518564846, + "learning_rate": 1.4400264893717816e-06, + "loss": 0.9535, + "step": 6682 + }, + { + "epoch": 0.6026964873517608, + "grad_norm": 1.2800128360194092, + "learning_rate": 1.4394656688098526e-06, + "loss": 0.9885, + "step": 6683 + }, + { + "epoch": 0.6027866708752311, + "grad_norm": 1.377518644604793, + "learning_rate": 1.4389048960758032e-06, + "loss": 0.9723, + "step": 6684 + }, + { + "epoch": 0.6028768543987013, + "grad_norm": 1.9375068910537736, + "learning_rate": 1.4383441712174826e-06, + "loss": 0.9737, + "step": 6685 + }, + { + "epoch": 0.6029670379221717, + "grad_norm": 1.4714151915551679, + "learning_rate": 1.4377834942827333e-06, + "loss": 0.9996, + "step": 6686 + }, + { + "epoch": 0.6030572214456419, + "grad_norm": 1.3650183772851678, + "learning_rate": 1.437222865319397e-06, + "loss": 0.8805, + "step": 6687 + }, + { + "epoch": 0.6031474049691121, + "grad_norm": 1.6755349571795608, + "learning_rate": 1.4366622843753092e-06, + "loss": 0.873, + "step": 6688 + }, + { + "epoch": 0.6032375884925824, + "grad_norm": 1.2718715752032985, + "learning_rate": 1.4361017514983006e-06, + "loss": 0.9648, + "step": 6689 + }, + { + "epoch": 0.6033277720160527, + "grad_norm": 1.520884717656751, + "learning_rate": 1.4355412667362006e-06, + "loss": 1.0019, + "step": 6690 + }, + { + "epoch": 0.6034179555395229, + "grad_norm": 1.4382895084539775, + "learning_rate": 1.4349808301368311e-06, + "loss": 0.896, + "step": 6691 + }, + { + "epoch": 0.6035081390629932, + "grad_norm": 1.6501461051063424, + "learning_rate": 1.4344204417480139e-06, + "loss": 0.982, + "step": 6692 + }, + { + "epoch": 0.6035983225864634, + "grad_norm": 1.4939833614388018, + "learning_rate": 1.4338601016175628e-06, + "loss": 0.89, + "step": 6693 + }, + { + "epoch": 0.6036885061099337, + "grad_norm": 1.2679373257186979, + "learning_rate": 1.433299809793289e-06, + "loss": 0.939, + "step": 6694 + }, + { + "epoch": 0.603778689633404, + "grad_norm": 2.2839615266302, + "learning_rate": 1.432739566323001e-06, + "loss": 0.807, + "step": 6695 + }, + { + "epoch": 0.6038688731568742, + "grad_norm": 1.5949700117730115, + "learning_rate": 1.4321793712545004e-06, + "loss": 0.9065, + "step": 6696 + }, + { + "epoch": 0.6039590566803446, + "grad_norm": 1.5967450143124955, + "learning_rate": 1.4316192246355873e-06, + "loss": 0.9793, + "step": 6697 + }, + { + "epoch": 0.6040492402038148, + "grad_norm": 0.7026733855435077, + "learning_rate": 1.4310591265140555e-06, + "loss": 0.8063, + "step": 6698 + }, + { + "epoch": 0.604139423727285, + "grad_norm": 1.3265476059723411, + "learning_rate": 1.4304990769376963e-06, + "loss": 0.9543, + "step": 6699 + }, + { + "epoch": 0.6042296072507553, + "grad_norm": 1.3217527671194793, + "learning_rate": 1.4299390759542962e-06, + "loss": 0.9316, + "step": 6700 + }, + { + "epoch": 0.6043197907742256, + "grad_norm": 1.4990705153438062, + "learning_rate": 1.4293791236116368e-06, + "loss": 0.9222, + "step": 6701 + }, + { + "epoch": 0.6044099742976958, + "grad_norm": 1.5001491631411104, + "learning_rate": 1.4288192199574978e-06, + "loss": 0.911, + "step": 6702 + }, + { + "epoch": 0.6045001578211661, + "grad_norm": 0.7820657667227318, + "learning_rate": 1.4282593650396524e-06, + "loss": 0.7617, + "step": 6703 + }, + { + "epoch": 0.6045903413446363, + "grad_norm": 1.2506184478542375, + "learning_rate": 1.4276995589058695e-06, + "loss": 1.0413, + "step": 6704 + }, + { + "epoch": 0.6046805248681066, + "grad_norm": 0.7150679690059605, + "learning_rate": 1.4271398016039168e-06, + "loss": 0.7875, + "step": 6705 + }, + { + "epoch": 0.6047707083915769, + "grad_norm": 2.3051245323517033, + "learning_rate": 1.4265800931815542e-06, + "loss": 0.9809, + "step": 6706 + }, + { + "epoch": 0.6048608919150471, + "grad_norm": 1.3355266987921453, + "learning_rate": 1.4260204336865406e-06, + "loss": 0.9492, + "step": 6707 + }, + { + "epoch": 0.6049510754385173, + "grad_norm": 1.4360176404437974, + "learning_rate": 1.4254608231666281e-06, + "loss": 0.9372, + "step": 6708 + }, + { + "epoch": 0.6050412589619877, + "grad_norm": 1.3240985647029688, + "learning_rate": 1.4249012616695661e-06, + "loss": 0.9477, + "step": 6709 + }, + { + "epoch": 0.6051314424854579, + "grad_norm": 1.3969467998033114, + "learning_rate": 1.4243417492431e-06, + "loss": 0.9507, + "step": 6710 + }, + { + "epoch": 0.6052216260089281, + "grad_norm": 1.5666429145852185, + "learning_rate": 1.4237822859349696e-06, + "loss": 0.9273, + "step": 6711 + }, + { + "epoch": 0.6053118095323984, + "grad_norm": 1.2417043550049836, + "learning_rate": 1.423222871792912e-06, + "loss": 0.9112, + "step": 6712 + }, + { + "epoch": 0.6054019930558687, + "grad_norm": 1.31368197852091, + "learning_rate": 1.4226635068646586e-06, + "loss": 1.0044, + "step": 6713 + }, + { + "epoch": 0.605492176579339, + "grad_norm": 0.690357209843404, + "learning_rate": 1.4221041911979393e-06, + "loss": 0.8029, + "step": 6714 + }, + { + "epoch": 0.6055823601028092, + "grad_norm": 1.4859821653823273, + "learning_rate": 1.4215449248404765e-06, + "loss": 0.9533, + "step": 6715 + }, + { + "epoch": 0.6056725436262794, + "grad_norm": 1.4364247654493656, + "learning_rate": 1.4209857078399896e-06, + "loss": 0.9596, + "step": 6716 + }, + { + "epoch": 0.6057627271497498, + "grad_norm": 1.5568888046406073, + "learning_rate": 1.4204265402441955e-06, + "loss": 0.9114, + "step": 6717 + }, + { + "epoch": 0.60585291067322, + "grad_norm": 1.309395433507729, + "learning_rate": 1.419867422100804e-06, + "loss": 0.8213, + "step": 6718 + }, + { + "epoch": 0.6059430941966902, + "grad_norm": 1.3476884478717088, + "learning_rate": 1.4193083534575236e-06, + "loss": 0.9291, + "step": 6719 + }, + { + "epoch": 0.6060332777201606, + "grad_norm": 1.9454519689116678, + "learning_rate": 1.4187493343620567e-06, + "loss": 0.9676, + "step": 6720 + }, + { + "epoch": 0.6061234612436308, + "grad_norm": 1.4908435621314053, + "learning_rate": 1.4181903648621006e-06, + "loss": 0.9717, + "step": 6721 + }, + { + "epoch": 0.606213644767101, + "grad_norm": 1.3886054729998296, + "learning_rate": 1.4176314450053512e-06, + "loss": 0.9702, + "step": 6722 + }, + { + "epoch": 0.6063038282905713, + "grad_norm": 1.6346784816836657, + "learning_rate": 1.4170725748394977e-06, + "loss": 0.9337, + "step": 6723 + }, + { + "epoch": 0.6063940118140416, + "grad_norm": 1.4961237095156286, + "learning_rate": 1.4165137544122266e-06, + "loss": 0.8502, + "step": 6724 + }, + { + "epoch": 0.6064841953375119, + "grad_norm": 1.9757908456975861, + "learning_rate": 1.4159549837712194e-06, + "loss": 1.0063, + "step": 6725 + }, + { + "epoch": 0.6065743788609821, + "grad_norm": 1.2408485633952147, + "learning_rate": 1.415396262964153e-06, + "loss": 0.8622, + "step": 6726 + }, + { + "epoch": 0.6066645623844523, + "grad_norm": 1.6617358064695271, + "learning_rate": 1.4148375920387016e-06, + "loss": 1.0173, + "step": 6727 + }, + { + "epoch": 0.6067547459079227, + "grad_norm": 0.733659883063379, + "learning_rate": 1.4142789710425325e-06, + "loss": 0.8108, + "step": 6728 + }, + { + "epoch": 0.6068449294313929, + "grad_norm": 1.7568562937432401, + "learning_rate": 1.4137204000233118e-06, + "loss": 0.8672, + "step": 6729 + }, + { + "epoch": 0.6069351129548631, + "grad_norm": 1.707507872221241, + "learning_rate": 1.4131618790286987e-06, + "loss": 0.9356, + "step": 6730 + }, + { + "epoch": 0.6070252964783334, + "grad_norm": 1.370432506773998, + "learning_rate": 1.4126034081063506e-06, + "loss": 0.9315, + "step": 6731 + }, + { + "epoch": 0.6071154800018037, + "grad_norm": 1.5145741388530702, + "learning_rate": 1.4120449873039186e-06, + "loss": 0.931, + "step": 6732 + }, + { + "epoch": 0.6072056635252739, + "grad_norm": 1.4500687977484845, + "learning_rate": 1.4114866166690494e-06, + "loss": 0.9761, + "step": 6733 + }, + { + "epoch": 0.6072958470487442, + "grad_norm": 1.1171531938740848, + "learning_rate": 1.4109282962493877e-06, + "loss": 0.8983, + "step": 6734 + }, + { + "epoch": 0.6073860305722144, + "grad_norm": 1.561882888999942, + "learning_rate": 1.4103700260925716e-06, + "loss": 0.9319, + "step": 6735 + }, + { + "epoch": 0.6074762140956848, + "grad_norm": 1.5575192985747366, + "learning_rate": 1.4098118062462364e-06, + "loss": 0.9798, + "step": 6736 + }, + { + "epoch": 0.607566397619155, + "grad_norm": 1.207851785314521, + "learning_rate": 1.4092536367580123e-06, + "loss": 0.9366, + "step": 6737 + }, + { + "epoch": 0.6076565811426252, + "grad_norm": 1.52053250860544, + "learning_rate": 1.4086955176755248e-06, + "loss": 0.8557, + "step": 6738 + }, + { + "epoch": 0.6077467646660955, + "grad_norm": 1.4198207897950632, + "learning_rate": 1.4081374490463964e-06, + "loss": 0.8808, + "step": 6739 + }, + { + "epoch": 0.6078369481895658, + "grad_norm": 1.7310891169632285, + "learning_rate": 1.4075794309182443e-06, + "loss": 0.9602, + "step": 6740 + }, + { + "epoch": 0.607927131713036, + "grad_norm": 1.6779090159850958, + "learning_rate": 1.407021463338682e-06, + "loss": 0.9527, + "step": 6741 + }, + { + "epoch": 0.6080173152365063, + "grad_norm": 0.6610093783349561, + "learning_rate": 1.4064635463553177e-06, + "loss": 0.7659, + "step": 6742 + }, + { + "epoch": 0.6081074987599766, + "grad_norm": 1.4796639622847412, + "learning_rate": 1.4059056800157563e-06, + "loss": 0.965, + "step": 6743 + }, + { + "epoch": 0.6081976822834468, + "grad_norm": 1.4077291550770537, + "learning_rate": 1.4053478643675982e-06, + "loss": 0.8725, + "step": 6744 + }, + { + "epoch": 0.6082878658069171, + "grad_norm": 1.3860276770257234, + "learning_rate": 1.4047900994584389e-06, + "loss": 0.8376, + "step": 6745 + }, + { + "epoch": 0.6083780493303873, + "grad_norm": 1.6375626093419362, + "learning_rate": 1.404232385335871e-06, + "loss": 0.8524, + "step": 6746 + }, + { + "epoch": 0.6084682328538576, + "grad_norm": 1.4223017628517536, + "learning_rate": 1.4036747220474806e-06, + "loss": 0.9466, + "step": 6747 + }, + { + "epoch": 0.6085584163773279, + "grad_norm": 1.4713017637210215, + "learning_rate": 1.4031171096408506e-06, + "loss": 0.8998, + "step": 6748 + }, + { + "epoch": 0.6086485999007981, + "grad_norm": 1.4126671498066639, + "learning_rate": 1.4025595481635607e-06, + "loss": 0.9652, + "step": 6749 + }, + { + "epoch": 0.6087387834242683, + "grad_norm": 1.627715849032893, + "learning_rate": 1.4020020376631836e-06, + "loss": 0.9621, + "step": 6750 + }, + { + "epoch": 0.6088289669477387, + "grad_norm": 1.5823332321738053, + "learning_rate": 1.4014445781872908e-06, + "loss": 0.9607, + "step": 6751 + }, + { + "epoch": 0.6089191504712089, + "grad_norm": 1.3421515338950474, + "learning_rate": 1.4008871697834465e-06, + "loss": 1.001, + "step": 6752 + }, + { + "epoch": 0.6090093339946792, + "grad_norm": 1.4485380728329702, + "learning_rate": 1.400329812499213e-06, + "loss": 0.9735, + "step": 6753 + }, + { + "epoch": 0.6090995175181494, + "grad_norm": 1.2893454963537083, + "learning_rate": 1.3997725063821458e-06, + "loss": 0.953, + "step": 6754 + }, + { + "epoch": 0.6091897010416197, + "grad_norm": 1.4385628295638189, + "learning_rate": 1.3992152514797978e-06, + "loss": 0.9077, + "step": 6755 + }, + { + "epoch": 0.60927988456509, + "grad_norm": 0.6275048843512853, + "learning_rate": 1.398658047839718e-06, + "loss": 0.8235, + "step": 6756 + }, + { + "epoch": 0.6093700680885602, + "grad_norm": 0.6592324468903594, + "learning_rate": 1.3981008955094481e-06, + "loss": 0.7911, + "step": 6757 + }, + { + "epoch": 0.6094602516120304, + "grad_norm": 1.18970916447755, + "learning_rate": 1.39754379453653e-06, + "loss": 0.9157, + "step": 6758 + }, + { + "epoch": 0.6095504351355008, + "grad_norm": 2.008581585663647, + "learning_rate": 1.3969867449684972e-06, + "loss": 0.9488, + "step": 6759 + }, + { + "epoch": 0.609640618658971, + "grad_norm": 1.8926825558536398, + "learning_rate": 1.396429746852879e-06, + "loss": 0.9239, + "step": 6760 + }, + { + "epoch": 0.6097308021824412, + "grad_norm": 1.8312884616393115, + "learning_rate": 1.395872800237204e-06, + "loss": 0.9195, + "step": 6761 + }, + { + "epoch": 0.6098209857059115, + "grad_norm": 0.9311412696599941, + "learning_rate": 1.3953159051689918e-06, + "loss": 0.8401, + "step": 6762 + }, + { + "epoch": 0.6099111692293818, + "grad_norm": 1.4486053073303926, + "learning_rate": 1.3947590616957618e-06, + "loss": 0.9821, + "step": 6763 + }, + { + "epoch": 0.610001352752852, + "grad_norm": 1.5513309302109903, + "learning_rate": 1.3942022698650258e-06, + "loss": 0.9788, + "step": 6764 + }, + { + "epoch": 0.6100915362763223, + "grad_norm": 1.3849868920459938, + "learning_rate": 1.3936455297242917e-06, + "loss": 0.974, + "step": 6765 + }, + { + "epoch": 0.6101817197997926, + "grad_norm": 1.4371232907491691, + "learning_rate": 1.3930888413210652e-06, + "loss": 0.951, + "step": 6766 + }, + { + "epoch": 0.6102719033232629, + "grad_norm": 1.5307836795673517, + "learning_rate": 1.392532204702845e-06, + "loss": 0.9104, + "step": 6767 + }, + { + "epoch": 0.6103620868467331, + "grad_norm": 1.7534655588692774, + "learning_rate": 1.3919756199171266e-06, + "loss": 0.8648, + "step": 6768 + }, + { + "epoch": 0.6104522703702033, + "grad_norm": 1.2216349958726906, + "learning_rate": 1.3914190870114009e-06, + "loss": 1.0149, + "step": 6769 + }, + { + "epoch": 0.6105424538936737, + "grad_norm": 1.3376977489135673, + "learning_rate": 1.3908626060331541e-06, + "loss": 0.9861, + "step": 6770 + }, + { + "epoch": 0.6106326374171439, + "grad_norm": 1.3512884314168392, + "learning_rate": 1.3903061770298693e-06, + "loss": 0.9694, + "step": 6771 + }, + { + "epoch": 0.6107228209406141, + "grad_norm": 1.396554243567408, + "learning_rate": 1.3897498000490223e-06, + "loss": 0.9107, + "step": 6772 + }, + { + "epoch": 0.6108130044640844, + "grad_norm": 2.477561484458305, + "learning_rate": 1.3891934751380879e-06, + "loss": 1.007, + "step": 6773 + }, + { + "epoch": 0.6109031879875547, + "grad_norm": 1.3352413036561062, + "learning_rate": 1.3886372023445334e-06, + "loss": 0.953, + "step": 6774 + }, + { + "epoch": 0.610993371511025, + "grad_norm": 1.2978954207312747, + "learning_rate": 1.3880809817158246e-06, + "loss": 1.0092, + "step": 6775 + }, + { + "epoch": 0.6110835550344952, + "grad_norm": 1.5529965353323465, + "learning_rate": 1.3875248132994206e-06, + "loss": 0.9336, + "step": 6776 + }, + { + "epoch": 0.6111737385579654, + "grad_norm": 1.538844369059945, + "learning_rate": 1.386968697142776e-06, + "loss": 0.8662, + "step": 6777 + }, + { + "epoch": 0.6112639220814358, + "grad_norm": 1.319543377474428, + "learning_rate": 1.386412633293343e-06, + "loss": 0.9868, + "step": 6778 + }, + { + "epoch": 0.611354105604906, + "grad_norm": 1.5325800510426866, + "learning_rate": 1.3858566217985672e-06, + "loss": 0.9144, + "step": 6779 + }, + { + "epoch": 0.6114442891283762, + "grad_norm": 1.5370958029548931, + "learning_rate": 1.3853006627058905e-06, + "loss": 1.0267, + "step": 6780 + }, + { + "epoch": 0.6115344726518465, + "grad_norm": 1.7454261954490558, + "learning_rate": 1.3847447560627512e-06, + "loss": 0.8276, + "step": 6781 + }, + { + "epoch": 0.6116246561753168, + "grad_norm": 1.4635722834777356, + "learning_rate": 1.3841889019165812e-06, + "loss": 0.9855, + "step": 6782 + }, + { + "epoch": 0.611714839698787, + "grad_norm": 1.8542945117666174, + "learning_rate": 1.3836331003148106e-06, + "loss": 0.8952, + "step": 6783 + }, + { + "epoch": 0.6118050232222573, + "grad_norm": 1.2761490393896664, + "learning_rate": 1.3830773513048612e-06, + "loss": 0.9845, + "step": 6784 + }, + { + "epoch": 0.6118952067457275, + "grad_norm": 1.5283444923706235, + "learning_rate": 1.382521654934155e-06, + "loss": 0.9619, + "step": 6785 + }, + { + "epoch": 0.6119853902691978, + "grad_norm": 1.7375510064192208, + "learning_rate": 1.3819660112501057e-06, + "loss": 0.8903, + "step": 6786 + }, + { + "epoch": 0.6120755737926681, + "grad_norm": 1.4436688123722436, + "learning_rate": 1.3814104203001234e-06, + "loss": 0.9088, + "step": 6787 + }, + { + "epoch": 0.6121657573161383, + "grad_norm": 1.4439667080821783, + "learning_rate": 1.3808548821316156e-06, + "loss": 0.9849, + "step": 6788 + }, + { + "epoch": 0.6122559408396087, + "grad_norm": 1.2401321969782317, + "learning_rate": 1.3802993967919824e-06, + "loss": 0.8731, + "step": 6789 + }, + { + "epoch": 0.6123461243630789, + "grad_norm": 1.7091395716361615, + "learning_rate": 1.3797439643286227e-06, + "loss": 0.9029, + "step": 6790 + }, + { + "epoch": 0.6124363078865491, + "grad_norm": 1.2684889050255235, + "learning_rate": 1.3791885847889277e-06, + "loss": 0.9259, + "step": 6791 + }, + { + "epoch": 0.6125264914100194, + "grad_norm": 1.4388605396484306, + "learning_rate": 1.3786332582202853e-06, + "loss": 0.9735, + "step": 6792 + }, + { + "epoch": 0.6126166749334897, + "grad_norm": 1.3145992656937275, + "learning_rate": 1.3780779846700799e-06, + "loss": 0.9441, + "step": 6793 + }, + { + "epoch": 0.6127068584569599, + "grad_norm": 1.2825324455603844, + "learning_rate": 1.3775227641856899e-06, + "loss": 0.9527, + "step": 6794 + }, + { + "epoch": 0.6127970419804302, + "grad_norm": 1.3518354989117052, + "learning_rate": 1.37696759681449e-06, + "loss": 0.9439, + "step": 6795 + }, + { + "epoch": 0.6128872255039004, + "grad_norm": 0.7429209247523423, + "learning_rate": 1.37641248260385e-06, + "loss": 0.8446, + "step": 6796 + }, + { + "epoch": 0.6129774090273707, + "grad_norm": 1.4990066577084058, + "learning_rate": 1.375857421601136e-06, + "loss": 0.9921, + "step": 6797 + }, + { + "epoch": 0.613067592550841, + "grad_norm": 1.4717574475558244, + "learning_rate": 1.3753024138537082e-06, + "loss": 0.9498, + "step": 6798 + }, + { + "epoch": 0.6131577760743112, + "grad_norm": 1.279219530402859, + "learning_rate": 1.3747474594089221e-06, + "loss": 1.0064, + "step": 6799 + }, + { + "epoch": 0.6132479595977814, + "grad_norm": 1.6602553394141586, + "learning_rate": 1.374192558314131e-06, + "loss": 0.9202, + "step": 6800 + }, + { + "epoch": 0.6133381431212518, + "grad_norm": 1.805447703180651, + "learning_rate": 1.373637710616681e-06, + "loss": 1.0045, + "step": 6801 + }, + { + "epoch": 0.613428326644722, + "grad_norm": 1.6442639638833587, + "learning_rate": 1.373082916363916e-06, + "loss": 1.0426, + "step": 6802 + }, + { + "epoch": 0.6135185101681923, + "grad_norm": 1.357317320824462, + "learning_rate": 1.3725281756031732e-06, + "loss": 0.9842, + "step": 6803 + }, + { + "epoch": 0.6136086936916625, + "grad_norm": 1.2303149899279975, + "learning_rate": 1.3719734883817858e-06, + "loss": 0.9391, + "step": 6804 + }, + { + "epoch": 0.6136988772151328, + "grad_norm": 1.4476102966821494, + "learning_rate": 1.371418854747084e-06, + "loss": 1.0585, + "step": 6805 + }, + { + "epoch": 0.6137890607386031, + "grad_norm": 5.44868949042666, + "learning_rate": 1.3708642747463905e-06, + "loss": 0.9796, + "step": 6806 + }, + { + "epoch": 0.6138792442620733, + "grad_norm": 1.3561376410848118, + "learning_rate": 1.370309748427027e-06, + "loss": 0.979, + "step": 6807 + }, + { + "epoch": 0.6139694277855435, + "grad_norm": 1.4443029228393756, + "learning_rate": 1.3697552758363079e-06, + "loss": 1.0229, + "step": 6808 + }, + { + "epoch": 0.6140596113090139, + "grad_norm": 1.7103374944554615, + "learning_rate": 1.3692008570215432e-06, + "loss": 0.9466, + "step": 6809 + }, + { + "epoch": 0.6141497948324841, + "grad_norm": 1.3956892404151244, + "learning_rate": 1.3686464920300398e-06, + "loss": 0.8967, + "step": 6810 + }, + { + "epoch": 0.6142399783559543, + "grad_norm": 1.8773347305925725, + "learning_rate": 1.3680921809090985e-06, + "loss": 0.9204, + "step": 6811 + }, + { + "epoch": 0.6143301618794246, + "grad_norm": 1.473251910369116, + "learning_rate": 1.3675379237060175e-06, + "loss": 0.9783, + "step": 6812 + }, + { + "epoch": 0.6144203454028949, + "grad_norm": 1.3741268940512354, + "learning_rate": 1.366983720468088e-06, + "loss": 0.9377, + "step": 6813 + }, + { + "epoch": 0.6145105289263652, + "grad_norm": 1.1825438523175849, + "learning_rate": 1.3664295712425972e-06, + "loss": 0.9497, + "step": 6814 + }, + { + "epoch": 0.6146007124498354, + "grad_norm": 1.410788523771129, + "learning_rate": 1.3658754760768296e-06, + "loss": 0.8822, + "step": 6815 + }, + { + "epoch": 0.6146908959733057, + "grad_norm": 1.4143888823956907, + "learning_rate": 1.3653214350180621e-06, + "loss": 0.9854, + "step": 6816 + }, + { + "epoch": 0.614781079496776, + "grad_norm": 1.3710180625782997, + "learning_rate": 1.3647674481135703e-06, + "loss": 1.0157, + "step": 6817 + }, + { + "epoch": 0.6148712630202462, + "grad_norm": 1.240322464345215, + "learning_rate": 1.3642135154106217e-06, + "loss": 0.8452, + "step": 6818 + }, + { + "epoch": 0.6149614465437164, + "grad_norm": 1.3874614177528932, + "learning_rate": 1.363659636956482e-06, + "loss": 0.8926, + "step": 6819 + }, + { + "epoch": 0.6150516300671868, + "grad_norm": 1.2951515880072353, + "learning_rate": 1.3631058127984112e-06, + "loss": 0.9095, + "step": 6820 + }, + { + "epoch": 0.615141813590657, + "grad_norm": 1.5492106827503254, + "learning_rate": 1.3625520429836632e-06, + "loss": 0.9241, + "step": 6821 + }, + { + "epoch": 0.6152319971141272, + "grad_norm": 1.56641652396393, + "learning_rate": 1.361998327559491e-06, + "loss": 0.8529, + "step": 6822 + }, + { + "epoch": 0.6153221806375975, + "grad_norm": 0.6535483562438013, + "learning_rate": 1.3614446665731385e-06, + "loss": 0.8057, + "step": 6823 + }, + { + "epoch": 0.6154123641610678, + "grad_norm": 1.592659446243796, + "learning_rate": 1.3608910600718484e-06, + "loss": 0.9535, + "step": 6824 + }, + { + "epoch": 0.615502547684538, + "grad_norm": 1.457755882741971, + "learning_rate": 1.360337508102857e-06, + "loss": 0.9504, + "step": 6825 + }, + { + "epoch": 0.6155927312080083, + "grad_norm": 1.823152675791305, + "learning_rate": 1.3597840107133962e-06, + "loss": 1.0448, + "step": 6826 + }, + { + "epoch": 0.6156829147314785, + "grad_norm": 1.7621393145802229, + "learning_rate": 1.3592305679506944e-06, + "loss": 0.9493, + "step": 6827 + }, + { + "epoch": 0.6157730982549489, + "grad_norm": 1.3086025864032347, + "learning_rate": 1.3586771798619726e-06, + "loss": 0.8552, + "step": 6828 + }, + { + "epoch": 0.6158632817784191, + "grad_norm": 2.9549793698386244, + "learning_rate": 1.358123846494451e-06, + "loss": 1.005, + "step": 6829 + }, + { + "epoch": 0.6159534653018893, + "grad_norm": 1.9378783564149413, + "learning_rate": 1.3575705678953422e-06, + "loss": 0.9445, + "step": 6830 + }, + { + "epoch": 0.6160436488253596, + "grad_norm": 1.562736874745109, + "learning_rate": 1.357017344111854e-06, + "loss": 0.9806, + "step": 6831 + }, + { + "epoch": 0.6161338323488299, + "grad_norm": 1.631442284487044, + "learning_rate": 1.356464175191192e-06, + "loss": 0.9546, + "step": 6832 + }, + { + "epoch": 0.6162240158723001, + "grad_norm": 1.985459099149706, + "learning_rate": 1.3559110611805542e-06, + "loss": 0.9736, + "step": 6833 + }, + { + "epoch": 0.6163141993957704, + "grad_norm": 1.2957371695470405, + "learning_rate": 1.3553580021271372e-06, + "loss": 0.9858, + "step": 6834 + }, + { + "epoch": 0.6164043829192406, + "grad_norm": 1.4205314240892213, + "learning_rate": 1.3548049980781297e-06, + "loss": 0.89, + "step": 6835 + }, + { + "epoch": 0.616494566442711, + "grad_norm": 1.7749972437447652, + "learning_rate": 1.3542520490807166e-06, + "loss": 0.9404, + "step": 6836 + }, + { + "epoch": 0.6165847499661812, + "grad_norm": 1.672697987719459, + "learning_rate": 1.3536991551820802e-06, + "loss": 0.9438, + "step": 6837 + }, + { + "epoch": 0.6166749334896514, + "grad_norm": 1.2188715751849426, + "learning_rate": 1.3531463164293952e-06, + "loss": 0.92, + "step": 6838 + }, + { + "epoch": 0.6167651170131218, + "grad_norm": 1.8443089947060094, + "learning_rate": 1.3525935328698332e-06, + "loss": 0.8561, + "step": 6839 + }, + { + "epoch": 0.616855300536592, + "grad_norm": 1.566220622110638, + "learning_rate": 1.3520408045505605e-06, + "loss": 0.8824, + "step": 6840 + }, + { + "epoch": 0.6169454840600622, + "grad_norm": 1.4927749675706457, + "learning_rate": 1.3514881315187396e-06, + "loss": 0.9096, + "step": 6841 + }, + { + "epoch": 0.6170356675835325, + "grad_norm": 1.2536555719994473, + "learning_rate": 1.3509355138215273e-06, + "loss": 0.9977, + "step": 6842 + }, + { + "epoch": 0.6171258511070028, + "grad_norm": 1.4417854781980648, + "learning_rate": 1.350382951506075e-06, + "loss": 1.0255, + "step": 6843 + }, + { + "epoch": 0.617216034630473, + "grad_norm": 0.6674026187126881, + "learning_rate": 1.349830444619532e-06, + "loss": 0.7884, + "step": 6844 + }, + { + "epoch": 0.6173062181539433, + "grad_norm": 1.393224237260981, + "learning_rate": 1.3492779932090397e-06, + "loss": 0.9622, + "step": 6845 + }, + { + "epoch": 0.6173964016774135, + "grad_norm": 1.2992744310000395, + "learning_rate": 1.3487255973217377e-06, + "loss": 0.9688, + "step": 6846 + }, + { + "epoch": 0.6174865852008838, + "grad_norm": 1.5522374155594547, + "learning_rate": 1.3481732570047592e-06, + "loss": 1.0193, + "step": 6847 + }, + { + "epoch": 0.6175767687243541, + "grad_norm": 1.709996909869341, + "learning_rate": 1.3476209723052314e-06, + "loss": 0.8719, + "step": 6848 + }, + { + "epoch": 0.6176669522478243, + "grad_norm": 1.5252641730457397, + "learning_rate": 1.3470687432702806e-06, + "loss": 0.9565, + "step": 6849 + }, + { + "epoch": 0.6177571357712945, + "grad_norm": 1.561919676777885, + "learning_rate": 1.346516569947024e-06, + "loss": 0.8772, + "step": 6850 + }, + { + "epoch": 0.6178473192947649, + "grad_norm": 1.681152485568539, + "learning_rate": 1.3459644523825774e-06, + "loss": 0.9347, + "step": 6851 + }, + { + "epoch": 0.6179375028182351, + "grad_norm": 1.5185332563550025, + "learning_rate": 1.34541239062405e-06, + "loss": 0.9915, + "step": 6852 + }, + { + "epoch": 0.6180276863417054, + "grad_norm": 1.294657592501664, + "learning_rate": 1.3448603847185464e-06, + "loss": 0.9454, + "step": 6853 + }, + { + "epoch": 0.6181178698651756, + "grad_norm": 1.461756267708366, + "learning_rate": 1.344308434713168e-06, + "loss": 0.8871, + "step": 6854 + }, + { + "epoch": 0.6182080533886459, + "grad_norm": 1.6555668123589353, + "learning_rate": 1.3437565406550083e-06, + "loss": 0.885, + "step": 6855 + }, + { + "epoch": 0.6182982369121162, + "grad_norm": 2.238054780004999, + "learning_rate": 1.34320470259116e-06, + "loss": 0.9342, + "step": 6856 + }, + { + "epoch": 0.6183884204355864, + "grad_norm": 1.3780908524138435, + "learning_rate": 1.3426529205687078e-06, + "loss": 0.8471, + "step": 6857 + }, + { + "epoch": 0.6184786039590566, + "grad_norm": 1.4263142464800365, + "learning_rate": 1.3421011946347323e-06, + "loss": 1.0013, + "step": 6858 + }, + { + "epoch": 0.618568787482527, + "grad_norm": 1.3456648556786321, + "learning_rate": 1.3415495248363113e-06, + "loss": 0.9071, + "step": 6859 + }, + { + "epoch": 0.6186589710059972, + "grad_norm": 1.8463052403289952, + "learning_rate": 1.3409979112205148e-06, + "loss": 0.9694, + "step": 6860 + }, + { + "epoch": 0.6187491545294674, + "grad_norm": 1.578787334178901, + "learning_rate": 1.3404463538344107e-06, + "loss": 0.9739, + "step": 6861 + }, + { + "epoch": 0.6188393380529378, + "grad_norm": 1.7617751152681076, + "learning_rate": 1.3398948527250602e-06, + "loss": 1.0356, + "step": 6862 + }, + { + "epoch": 0.618929521576408, + "grad_norm": 0.7728793124060558, + "learning_rate": 1.3393434079395207e-06, + "loss": 0.8156, + "step": 6863 + }, + { + "epoch": 0.6190197050998782, + "grad_norm": 1.349669216391643, + "learning_rate": 1.3387920195248449e-06, + "loss": 0.9313, + "step": 6864 + }, + { + "epoch": 0.6191098886233485, + "grad_norm": 1.2412175164097135, + "learning_rate": 1.3382406875280791e-06, + "loss": 0.954, + "step": 6865 + }, + { + "epoch": 0.6192000721468188, + "grad_norm": 1.3630220882811823, + "learning_rate": 1.3376894119962672e-06, + "loss": 0.9648, + "step": 6866 + }, + { + "epoch": 0.6192902556702891, + "grad_norm": 2.6848759815408387, + "learning_rate": 1.3371381929764464e-06, + "loss": 0.9698, + "step": 6867 + }, + { + "epoch": 0.6193804391937593, + "grad_norm": 0.7257053282153273, + "learning_rate": 1.3365870305156506e-06, + "loss": 0.7926, + "step": 6868 + }, + { + "epoch": 0.6194706227172295, + "grad_norm": 1.805001830102727, + "learning_rate": 1.3360359246609073e-06, + "loss": 0.9532, + "step": 6869 + }, + { + "epoch": 0.6195608062406999, + "grad_norm": 1.6195961526543805, + "learning_rate": 1.3354848754592387e-06, + "loss": 0.9449, + "step": 6870 + }, + { + "epoch": 0.6196509897641701, + "grad_norm": 1.4504487198368452, + "learning_rate": 1.334933882957666e-06, + "loss": 1.0352, + "step": 6871 + }, + { + "epoch": 0.6197411732876403, + "grad_norm": 1.3384544799354643, + "learning_rate": 1.3343829472032004e-06, + "loss": 0.9055, + "step": 6872 + }, + { + "epoch": 0.6198313568111106, + "grad_norm": 1.5230830636119288, + "learning_rate": 1.3338320682428527e-06, + "loss": 0.9308, + "step": 6873 + }, + { + "epoch": 0.6199215403345809, + "grad_norm": 1.31897582159086, + "learning_rate": 1.3332812461236263e-06, + "loss": 0.9117, + "step": 6874 + }, + { + "epoch": 0.6200117238580511, + "grad_norm": 1.2626208219218316, + "learning_rate": 1.3327304808925192e-06, + "loss": 0.9088, + "step": 6875 + }, + { + "epoch": 0.6201019073815214, + "grad_norm": 1.5581074579860381, + "learning_rate": 1.332179772596528e-06, + "loss": 0.8049, + "step": 6876 + }, + { + "epoch": 0.6201920909049916, + "grad_norm": 1.4844638195569086, + "learning_rate": 1.3316291212826402e-06, + "loss": 0.8674, + "step": 6877 + }, + { + "epoch": 0.620282274428462, + "grad_norm": 1.5089205292642995, + "learning_rate": 1.3310785269978413e-06, + "loss": 0.9904, + "step": 6878 + }, + { + "epoch": 0.6203724579519322, + "grad_norm": 1.5728494827709614, + "learning_rate": 1.3305279897891111e-06, + "loss": 0.9878, + "step": 6879 + }, + { + "epoch": 0.6204626414754024, + "grad_norm": 1.577567804756463, + "learning_rate": 1.329977509703424e-06, + "loss": 0.8076, + "step": 6880 + }, + { + "epoch": 0.6205528249988727, + "grad_norm": 1.5706249324800088, + "learning_rate": 1.32942708678775e-06, + "loss": 1.0311, + "step": 6881 + }, + { + "epoch": 0.620643008522343, + "grad_norm": 1.427383818165798, + "learning_rate": 1.3288767210890548e-06, + "loss": 0.9523, + "step": 6882 + }, + { + "epoch": 0.6207331920458132, + "grad_norm": 1.4242260119262973, + "learning_rate": 1.3283264126542986e-06, + "loss": 0.9086, + "step": 6883 + }, + { + "epoch": 0.6208233755692835, + "grad_norm": 1.3300008653336772, + "learning_rate": 1.3277761615304356e-06, + "loss": 0.9448, + "step": 6884 + }, + { + "epoch": 0.6209135590927538, + "grad_norm": 1.1855754817421031, + "learning_rate": 1.3272259677644178e-06, + "loss": 1.0369, + "step": 6885 + }, + { + "epoch": 0.621003742616224, + "grad_norm": 1.585955164017047, + "learning_rate": 1.32667583140319e-06, + "loss": 0.8174, + "step": 6886 + }, + { + "epoch": 0.6210939261396943, + "grad_norm": 1.616248332625353, + "learning_rate": 1.3261257524936924e-06, + "loss": 0.9851, + "step": 6887 + }, + { + "epoch": 0.6211841096631645, + "grad_norm": 1.3574905487911777, + "learning_rate": 1.3255757310828619e-06, + "loss": 0.981, + "step": 6888 + }, + { + "epoch": 0.6212742931866349, + "grad_norm": 1.5449411653840708, + "learning_rate": 1.3250257672176282e-06, + "loss": 0.9645, + "step": 6889 + }, + { + "epoch": 0.6213644767101051, + "grad_norm": 1.398532629773266, + "learning_rate": 1.3244758609449183e-06, + "loss": 0.851, + "step": 6890 + }, + { + "epoch": 0.6214546602335753, + "grad_norm": 1.3835185775281587, + "learning_rate": 1.323926012311653e-06, + "loss": 0.966, + "step": 6891 + }, + { + "epoch": 0.6215448437570456, + "grad_norm": 1.4044014118530816, + "learning_rate": 1.3233762213647476e-06, + "loss": 0.9679, + "step": 6892 + }, + { + "epoch": 0.6216350272805159, + "grad_norm": 1.6569219701578135, + "learning_rate": 1.3228264881511137e-06, + "loss": 0.9143, + "step": 6893 + }, + { + "epoch": 0.6217252108039861, + "grad_norm": 1.5925002340319594, + "learning_rate": 1.322276812717658e-06, + "loss": 1.0312, + "step": 6894 + }, + { + "epoch": 0.6218153943274564, + "grad_norm": 1.262428532834323, + "learning_rate": 1.3217271951112818e-06, + "loss": 0.854, + "step": 6895 + }, + { + "epoch": 0.6219055778509266, + "grad_norm": 1.42316403489153, + "learning_rate": 1.321177635378881e-06, + "loss": 0.9241, + "step": 6896 + }, + { + "epoch": 0.6219957613743969, + "grad_norm": 1.6135047591511296, + "learning_rate": 1.3206281335673475e-06, + "loss": 0.8406, + "step": 6897 + }, + { + "epoch": 0.6220859448978672, + "grad_norm": 1.77107451049424, + "learning_rate": 1.3200786897235677e-06, + "loss": 0.9151, + "step": 6898 + }, + { + "epoch": 0.6221761284213374, + "grad_norm": 1.4208789732343814, + "learning_rate": 1.3195293038944227e-06, + "loss": 0.9684, + "step": 6899 + }, + { + "epoch": 0.6222663119448076, + "grad_norm": 1.238165624376667, + "learning_rate": 1.3189799761267902e-06, + "loss": 0.9389, + "step": 6900 + }, + { + "epoch": 0.622356495468278, + "grad_norm": 1.6900044682686328, + "learning_rate": 1.3184307064675412e-06, + "loss": 0.9422, + "step": 6901 + }, + { + "epoch": 0.6224466789917482, + "grad_norm": 0.779025802696703, + "learning_rate": 1.3178814949635416e-06, + "loss": 0.8468, + "step": 6902 + }, + { + "epoch": 0.6225368625152184, + "grad_norm": 1.8077714340042905, + "learning_rate": 1.3173323416616549e-06, + "loss": 0.9008, + "step": 6903 + }, + { + "epoch": 0.6226270460386887, + "grad_norm": 1.4631194278263848, + "learning_rate": 1.3167832466087361e-06, + "loss": 0.9894, + "step": 6904 + }, + { + "epoch": 0.622717229562159, + "grad_norm": 1.3835109520062676, + "learning_rate": 1.3162342098516388e-06, + "loss": 0.8973, + "step": 6905 + }, + { + "epoch": 0.6228074130856293, + "grad_norm": 1.3800039271976041, + "learning_rate": 1.3156852314372086e-06, + "loss": 0.8668, + "step": 6906 + }, + { + "epoch": 0.6228975966090995, + "grad_norm": 0.7942277281691048, + "learning_rate": 1.3151363114122882e-06, + "loss": 0.8034, + "step": 6907 + }, + { + "epoch": 0.6229877801325698, + "grad_norm": 1.3713921116342105, + "learning_rate": 1.3145874498237133e-06, + "loss": 0.9508, + "step": 6908 + }, + { + "epoch": 0.6230779636560401, + "grad_norm": 1.4343640495331085, + "learning_rate": 1.3140386467183166e-06, + "loss": 0.9935, + "step": 6909 + }, + { + "epoch": 0.6231681471795103, + "grad_norm": 1.448611232375159, + "learning_rate": 1.3134899021429258e-06, + "loss": 0.9858, + "step": 6910 + }, + { + "epoch": 0.6232583307029805, + "grad_norm": 1.6197346700798685, + "learning_rate": 1.3129412161443604e-06, + "loss": 0.9288, + "step": 6911 + }, + { + "epoch": 0.6233485142264509, + "grad_norm": 1.5502968903739769, + "learning_rate": 1.3123925887694402e-06, + "loss": 0.9493, + "step": 6912 + }, + { + "epoch": 0.6234386977499211, + "grad_norm": 1.2566822731674698, + "learning_rate": 1.3118440200649756e-06, + "loss": 0.9527, + "step": 6913 + }, + { + "epoch": 0.6235288812733913, + "grad_norm": 2.183113604935493, + "learning_rate": 1.3112955100777727e-06, + "loss": 0.9092, + "step": 6914 + }, + { + "epoch": 0.6236190647968616, + "grad_norm": 1.2165732384870902, + "learning_rate": 1.3107470588546353e-06, + "loss": 0.975, + "step": 6915 + }, + { + "epoch": 0.6237092483203319, + "grad_norm": 1.5092482772294114, + "learning_rate": 1.3101986664423583e-06, + "loss": 0.9254, + "step": 6916 + }, + { + "epoch": 0.6237994318438022, + "grad_norm": 1.4424186810557953, + "learning_rate": 1.3096503328877358e-06, + "loss": 0.9155, + "step": 6917 + }, + { + "epoch": 0.6238896153672724, + "grad_norm": 1.3245515658319198, + "learning_rate": 1.309102058237553e-06, + "loss": 0.9673, + "step": 6918 + }, + { + "epoch": 0.6239797988907426, + "grad_norm": 1.090521215621634, + "learning_rate": 1.3085538425385917e-06, + "loss": 0.9859, + "step": 6919 + }, + { + "epoch": 0.624069982414213, + "grad_norm": 1.8096414583963247, + "learning_rate": 1.3080056858376298e-06, + "loss": 0.9738, + "step": 6920 + }, + { + "epoch": 0.6241601659376832, + "grad_norm": 1.7647926596735304, + "learning_rate": 1.3074575881814383e-06, + "loss": 0.9282, + "step": 6921 + }, + { + "epoch": 0.6242503494611534, + "grad_norm": 1.4559209034258938, + "learning_rate": 1.3069095496167841e-06, + "loss": 0.875, + "step": 6922 + }, + { + "epoch": 0.6243405329846237, + "grad_norm": 1.3178442732861564, + "learning_rate": 1.3063615701904285e-06, + "loss": 0.8561, + "step": 6923 + }, + { + "epoch": 0.624430716508094, + "grad_norm": 2.4181795536768567, + "learning_rate": 1.3058136499491283e-06, + "loss": 0.9709, + "step": 6924 + }, + { + "epoch": 0.6245209000315642, + "grad_norm": 1.776858353619017, + "learning_rate": 1.3052657889396357e-06, + "loss": 0.9204, + "step": 6925 + }, + { + "epoch": 0.6246110835550345, + "grad_norm": 1.3973401841855335, + "learning_rate": 1.304717987208696e-06, + "loss": 0.9316, + "step": 6926 + }, + { + "epoch": 0.6247012670785047, + "grad_norm": 1.2608153708305232, + "learning_rate": 1.304170244803052e-06, + "loss": 0.9358, + "step": 6927 + }, + { + "epoch": 0.624791450601975, + "grad_norm": 1.3547527609904528, + "learning_rate": 1.3036225617694387e-06, + "loss": 0.9139, + "step": 6928 + }, + { + "epoch": 0.6248816341254453, + "grad_norm": 1.4104586881897987, + "learning_rate": 1.3030749381545892e-06, + "loss": 0.9768, + "step": 6929 + }, + { + "epoch": 0.6249718176489155, + "grad_norm": 1.7027857031265252, + "learning_rate": 1.3025273740052285e-06, + "loss": 0.8847, + "step": 6930 + }, + { + "epoch": 0.6250620011723858, + "grad_norm": 1.1986789544684682, + "learning_rate": 1.3019798693680774e-06, + "loss": 0.9904, + "step": 6931 + }, + { + "epoch": 0.6251521846958561, + "grad_norm": 1.2909798436825954, + "learning_rate": 1.3014324242898536e-06, + "loss": 1.0581, + "step": 6932 + }, + { + "epoch": 0.6252423682193263, + "grad_norm": 1.6209112225460507, + "learning_rate": 1.3008850388172668e-06, + "loss": 0.9972, + "step": 6933 + }, + { + "epoch": 0.6253325517427966, + "grad_norm": 1.3128229379727052, + "learning_rate": 1.3003377129970233e-06, + "loss": 0.9009, + "step": 6934 + }, + { + "epoch": 0.6254227352662669, + "grad_norm": 1.2748490449889176, + "learning_rate": 1.2997904468758243e-06, + "loss": 0.8662, + "step": 6935 + }, + { + "epoch": 0.6255129187897371, + "grad_norm": 2.244015257730818, + "learning_rate": 1.299243240500365e-06, + "loss": 0.9593, + "step": 6936 + }, + { + "epoch": 0.6256031023132074, + "grad_norm": 1.5237942009039063, + "learning_rate": 1.2986960939173368e-06, + "loss": 0.9607, + "step": 6937 + }, + { + "epoch": 0.6256932858366776, + "grad_norm": 1.4761585308682144, + "learning_rate": 1.298149007173424e-06, + "loss": 0.9851, + "step": 6938 + }, + { + "epoch": 0.625783469360148, + "grad_norm": 1.797991397523085, + "learning_rate": 1.2976019803153087e-06, + "loss": 0.9731, + "step": 6939 + }, + { + "epoch": 0.6258736528836182, + "grad_norm": 1.473057780684446, + "learning_rate": 1.2970550133896652e-06, + "loss": 0.89, + "step": 6940 + }, + { + "epoch": 0.6259638364070884, + "grad_norm": 1.4749854006287217, + "learning_rate": 1.2965081064431634e-06, + "loss": 1.0293, + "step": 6941 + }, + { + "epoch": 0.6260540199305586, + "grad_norm": 1.5521096178257097, + "learning_rate": 1.2959612595224698e-06, + "loss": 0.9626, + "step": 6942 + }, + { + "epoch": 0.626144203454029, + "grad_norm": 1.7074455963002015, + "learning_rate": 1.2954144726742424e-06, + "loss": 0.9561, + "step": 6943 + }, + { + "epoch": 0.6262343869774992, + "grad_norm": 1.673765521842885, + "learning_rate": 1.2948677459451385e-06, + "loss": 0.9404, + "step": 6944 + }, + { + "epoch": 0.6263245705009695, + "grad_norm": 1.297333601790502, + "learning_rate": 1.2943210793818064e-06, + "loss": 0.9908, + "step": 6945 + }, + { + "epoch": 0.6264147540244397, + "grad_norm": 1.5208282818993413, + "learning_rate": 1.2937744730308899e-06, + "loss": 0.9205, + "step": 6946 + }, + { + "epoch": 0.62650493754791, + "grad_norm": 1.7476093447942904, + "learning_rate": 1.2932279269390305e-06, + "loss": 0.9381, + "step": 6947 + }, + { + "epoch": 0.6265951210713803, + "grad_norm": 1.5977088527833627, + "learning_rate": 1.292681441152861e-06, + "loss": 0.8527, + "step": 6948 + }, + { + "epoch": 0.6266853045948505, + "grad_norm": 1.6362394300196639, + "learning_rate": 1.292135015719011e-06, + "loss": 0.9534, + "step": 6949 + }, + { + "epoch": 0.6267754881183207, + "grad_norm": 3.3587076633228405, + "learning_rate": 1.2915886506841046e-06, + "loss": 0.9065, + "step": 6950 + }, + { + "epoch": 0.6268656716417911, + "grad_norm": 1.422151559615731, + "learning_rate": 1.2910423460947613e-06, + "loss": 0.879, + "step": 6951 + }, + { + "epoch": 0.6269558551652613, + "grad_norm": 1.5484330778703568, + "learning_rate": 1.290496101997594e-06, + "loss": 0.9182, + "step": 6952 + }, + { + "epoch": 0.6270460386887315, + "grad_norm": 0.7703419786116963, + "learning_rate": 1.2899499184392105e-06, + "loss": 0.8219, + "step": 6953 + }, + { + "epoch": 0.6271362222122018, + "grad_norm": 2.9887473148092973, + "learning_rate": 1.289403795466216e-06, + "loss": 0.9179, + "step": 6954 + }, + { + "epoch": 0.6272264057356721, + "grad_norm": 1.4631068804505794, + "learning_rate": 1.288857733125207e-06, + "loss": 0.9543, + "step": 6955 + }, + { + "epoch": 0.6273165892591424, + "grad_norm": 1.4399746864796015, + "learning_rate": 1.2883117314627785e-06, + "loss": 0.9368, + "step": 6956 + }, + { + "epoch": 0.6274067727826126, + "grad_norm": 1.3758088247209987, + "learning_rate": 1.2877657905255168e-06, + "loss": 0.8807, + "step": 6957 + }, + { + "epoch": 0.6274969563060829, + "grad_norm": 1.4207931427104963, + "learning_rate": 1.2872199103600046e-06, + "loss": 1.0055, + "step": 6958 + }, + { + "epoch": 0.6275871398295532, + "grad_norm": 1.5548841649176461, + "learning_rate": 1.286674091012821e-06, + "loss": 1.0119, + "step": 6959 + }, + { + "epoch": 0.6276773233530234, + "grad_norm": 1.3114634916988346, + "learning_rate": 1.2861283325305356e-06, + "loss": 0.9967, + "step": 6960 + }, + { + "epoch": 0.6277675068764936, + "grad_norm": 1.4523057987041947, + "learning_rate": 1.2855826349597185e-06, + "loss": 0.947, + "step": 6961 + }, + { + "epoch": 0.627857690399964, + "grad_norm": 1.2865976060029973, + "learning_rate": 1.2850369983469302e-06, + "loss": 0.8078, + "step": 6962 + }, + { + "epoch": 0.6279478739234342, + "grad_norm": 1.4359205110061708, + "learning_rate": 1.2844914227387266e-06, + "loss": 0.9739, + "step": 6963 + }, + { + "epoch": 0.6280380574469044, + "grad_norm": 1.356284388093031, + "learning_rate": 1.2839459081816606e-06, + "loss": 0.9661, + "step": 6964 + }, + { + "epoch": 0.6281282409703747, + "grad_norm": 1.4509849787380844, + "learning_rate": 1.283400454722278e-06, + "loss": 1.0086, + "step": 6965 + }, + { + "epoch": 0.628218424493845, + "grad_norm": 1.3538835033832628, + "learning_rate": 1.28285506240712e-06, + "loss": 1.0104, + "step": 6966 + }, + { + "epoch": 0.6283086080173153, + "grad_norm": 1.2985696865326695, + "learning_rate": 1.2823097312827225e-06, + "loss": 0.9673, + "step": 6967 + }, + { + "epoch": 0.6283987915407855, + "grad_norm": 1.3840465750244293, + "learning_rate": 1.2817644613956153e-06, + "loss": 0.9544, + "step": 6968 + }, + { + "epoch": 0.6284889750642557, + "grad_norm": 1.488416930894889, + "learning_rate": 1.2812192527923253e-06, + "loss": 0.9997, + "step": 6969 + }, + { + "epoch": 0.6285791585877261, + "grad_norm": 1.428036179065535, + "learning_rate": 1.2806741055193712e-06, + "loss": 0.985, + "step": 6970 + }, + { + "epoch": 0.6286693421111963, + "grad_norm": 1.468046628663635, + "learning_rate": 1.2801290196232695e-06, + "loss": 0.8983, + "step": 6971 + }, + { + "epoch": 0.6287595256346665, + "grad_norm": 2.0702690839713584, + "learning_rate": 1.2795839951505282e-06, + "loss": 0.9344, + "step": 6972 + }, + { + "epoch": 0.6288497091581368, + "grad_norm": 1.5524561218842794, + "learning_rate": 1.2790390321476538e-06, + "loss": 0.9679, + "step": 6973 + }, + { + "epoch": 0.6289398926816071, + "grad_norm": 1.2829348023782503, + "learning_rate": 1.2784941306611446e-06, + "loss": 0.9226, + "step": 6974 + }, + { + "epoch": 0.6290300762050773, + "grad_norm": 5.557303412251334, + "learning_rate": 1.2779492907374935e-06, + "loss": 1.0049, + "step": 6975 + }, + { + "epoch": 0.6291202597285476, + "grad_norm": 1.39472101026412, + "learning_rate": 1.2774045124231911e-06, + "loss": 0.9336, + "step": 6976 + }, + { + "epoch": 0.6292104432520178, + "grad_norm": 1.2728157163673484, + "learning_rate": 1.2768597957647197e-06, + "loss": 0.8343, + "step": 6977 + }, + { + "epoch": 0.6293006267754881, + "grad_norm": 1.5361609823138245, + "learning_rate": 1.2763151408085582e-06, + "loss": 0.9458, + "step": 6978 + }, + { + "epoch": 0.6293908102989584, + "grad_norm": 1.3856302636872053, + "learning_rate": 1.2757705476011788e-06, + "loss": 0.9156, + "step": 6979 + }, + { + "epoch": 0.6294809938224286, + "grad_norm": 1.3408821908518482, + "learning_rate": 1.27522601618905e-06, + "loss": 0.89, + "step": 6980 + }, + { + "epoch": 0.629571177345899, + "grad_norm": 0.6250632969275506, + "learning_rate": 1.2746815466186337e-06, + "loss": 0.8196, + "step": 6981 + }, + { + "epoch": 0.6296613608693692, + "grad_norm": 1.3883410782767818, + "learning_rate": 1.274137138936387e-06, + "loss": 0.9426, + "step": 6982 + }, + { + "epoch": 0.6297515443928394, + "grad_norm": 1.53198956126947, + "learning_rate": 1.2735927931887625e-06, + "loss": 0.9439, + "step": 6983 + }, + { + "epoch": 0.6298417279163097, + "grad_norm": 0.7209597908479579, + "learning_rate": 1.2730485094222061e-06, + "loss": 0.8169, + "step": 6984 + }, + { + "epoch": 0.62993191143978, + "grad_norm": 1.8685828705116951, + "learning_rate": 1.2725042876831586e-06, + "loss": 0.9833, + "step": 6985 + }, + { + "epoch": 0.6300220949632502, + "grad_norm": 2.283351609465509, + "learning_rate": 1.2719601280180573e-06, + "loss": 0.8404, + "step": 6986 + }, + { + "epoch": 0.6301122784867205, + "grad_norm": 1.668864025549327, + "learning_rate": 1.2714160304733317e-06, + "loss": 1.0061, + "step": 6987 + }, + { + "epoch": 0.6302024620101907, + "grad_norm": 1.594326139544039, + "learning_rate": 1.2708719950954082e-06, + "loss": 0.9638, + "step": 6988 + }, + { + "epoch": 0.630292645533661, + "grad_norm": 1.4609566753577392, + "learning_rate": 1.2703280219307065e-06, + "loss": 0.9564, + "step": 6989 + }, + { + "epoch": 0.6303828290571313, + "grad_norm": 1.5032592174222696, + "learning_rate": 1.2697841110256411e-06, + "loss": 0.8794, + "step": 6990 + }, + { + "epoch": 0.6304730125806015, + "grad_norm": 1.5358955602653608, + "learning_rate": 1.2692402624266221e-06, + "loss": 0.9831, + "step": 6991 + }, + { + "epoch": 0.6305631961040717, + "grad_norm": 1.6847249278002876, + "learning_rate": 1.2686964761800529e-06, + "loss": 0.8617, + "step": 6992 + }, + { + "epoch": 0.6306533796275421, + "grad_norm": 0.6542475895325245, + "learning_rate": 1.268152752332333e-06, + "loss": 0.8026, + "step": 6993 + }, + { + "epoch": 0.6307435631510123, + "grad_norm": 1.5888956978273365, + "learning_rate": 1.2676090909298549e-06, + "loss": 0.858, + "step": 6994 + }, + { + "epoch": 0.6308337466744826, + "grad_norm": 1.3588538212140329, + "learning_rate": 1.2670654920190086e-06, + "loss": 0.9923, + "step": 6995 + }, + { + "epoch": 0.6309239301979528, + "grad_norm": 1.176946980953797, + "learning_rate": 1.2665219556461754e-06, + "loss": 0.9868, + "step": 6996 + }, + { + "epoch": 0.6310141137214231, + "grad_norm": 1.594252937097642, + "learning_rate": 1.2659784818577329e-06, + "loss": 1.0365, + "step": 6997 + }, + { + "epoch": 0.6311042972448934, + "grad_norm": 1.3543043164449573, + "learning_rate": 1.2654350707000542e-06, + "loss": 0.8269, + "step": 6998 + }, + { + "epoch": 0.6311944807683636, + "grad_norm": 1.6087277045850208, + "learning_rate": 1.264891722219505e-06, + "loss": 0.9299, + "step": 6999 + }, + { + "epoch": 0.6312846642918338, + "grad_norm": 1.5474377003641144, + "learning_rate": 1.2643484364624483e-06, + "loss": 0.9381, + "step": 7000 + }, + { + "epoch": 0.6313748478153042, + "grad_norm": 1.4988777412909307, + "learning_rate": 1.2638052134752393e-06, + "loss": 0.9951, + "step": 7001 + }, + { + "epoch": 0.6314650313387744, + "grad_norm": 1.5096293359510842, + "learning_rate": 1.2632620533042277e-06, + "loss": 1.0168, + "step": 7002 + }, + { + "epoch": 0.6315552148622446, + "grad_norm": 1.5256344335405803, + "learning_rate": 1.2627189559957612e-06, + "loss": 0.9097, + "step": 7003 + }, + { + "epoch": 0.631645398385715, + "grad_norm": 1.7047327834564963, + "learning_rate": 1.262175921596178e-06, + "loss": 0.9995, + "step": 7004 + }, + { + "epoch": 0.6317355819091852, + "grad_norm": 2.0639886974276607, + "learning_rate": 1.2616329501518137e-06, + "loss": 0.8135, + "step": 7005 + }, + { + "epoch": 0.6318257654326555, + "grad_norm": 1.484053486587615, + "learning_rate": 1.2610900417089978e-06, + "loss": 0.8639, + "step": 7006 + }, + { + "epoch": 0.6319159489561257, + "grad_norm": 1.3857517363076135, + "learning_rate": 1.2605471963140535e-06, + "loss": 0.9519, + "step": 7007 + }, + { + "epoch": 0.632006132479596, + "grad_norm": 1.4149399032231993, + "learning_rate": 1.2600044140133e-06, + "loss": 1.0145, + "step": 7008 + }, + { + "epoch": 0.6320963160030663, + "grad_norm": 1.4901505392948533, + "learning_rate": 1.2594616948530493e-06, + "loss": 0.898, + "step": 7009 + }, + { + "epoch": 0.6321864995265365, + "grad_norm": 2.2072364914247746, + "learning_rate": 1.258919038879611e-06, + "loss": 0.9722, + "step": 7010 + }, + { + "epoch": 0.6322766830500067, + "grad_norm": 1.4674828927701746, + "learning_rate": 1.2583764461392859e-06, + "loss": 0.945, + "step": 7011 + }, + { + "epoch": 0.6323668665734771, + "grad_norm": 1.29362895049136, + "learning_rate": 1.2578339166783724e-06, + "loss": 0.9054, + "step": 7012 + }, + { + "epoch": 0.6324570500969473, + "grad_norm": 1.250980135981796, + "learning_rate": 1.2572914505431613e-06, + "loss": 0.8804, + "step": 7013 + }, + { + "epoch": 0.6325472336204175, + "grad_norm": 1.3839571681781722, + "learning_rate": 1.2567490477799383e-06, + "loss": 0.8443, + "step": 7014 + }, + { + "epoch": 0.6326374171438878, + "grad_norm": 1.3713320446246342, + "learning_rate": 1.2562067084349852e-06, + "loss": 1.0306, + "step": 7015 + }, + { + "epoch": 0.6327276006673581, + "grad_norm": 1.4070841116879997, + "learning_rate": 1.2556644325545764e-06, + "loss": 0.9269, + "step": 7016 + }, + { + "epoch": 0.6328177841908283, + "grad_norm": 1.5478785178548016, + "learning_rate": 1.255122220184983e-06, + "loss": 0.9217, + "step": 7017 + }, + { + "epoch": 0.6329079677142986, + "grad_norm": 1.3577954553515428, + "learning_rate": 1.2545800713724694e-06, + "loss": 0.8581, + "step": 7018 + }, + { + "epoch": 0.6329981512377688, + "grad_norm": 1.2942039835786474, + "learning_rate": 1.254037986163294e-06, + "loss": 0.8089, + "step": 7019 + }, + { + "epoch": 0.6330883347612392, + "grad_norm": 1.384117114500481, + "learning_rate": 1.2534959646037104e-06, + "loss": 0.9047, + "step": 7020 + }, + { + "epoch": 0.6331785182847094, + "grad_norm": 1.4511201215220544, + "learning_rate": 1.2529540067399675e-06, + "loss": 0.9489, + "step": 7021 + }, + { + "epoch": 0.6332687018081796, + "grad_norm": 1.456932821349668, + "learning_rate": 1.252412112618308e-06, + "loss": 0.8826, + "step": 7022 + }, + { + "epoch": 0.6333588853316499, + "grad_norm": 1.5213977950684803, + "learning_rate": 1.2518702822849696e-06, + "loss": 0.8268, + "step": 7023 + }, + { + "epoch": 0.6334490688551202, + "grad_norm": 1.549223917819285, + "learning_rate": 1.2513285157861831e-06, + "loss": 0.8872, + "step": 7024 + }, + { + "epoch": 0.6335392523785904, + "grad_norm": 1.4563585625649844, + "learning_rate": 1.2507868131681764e-06, + "loss": 0.9334, + "step": 7025 + }, + { + "epoch": 0.6336294359020607, + "grad_norm": 1.5331161960328807, + "learning_rate": 1.250245174477169e-06, + "loss": 0.9225, + "step": 7026 + }, + { + "epoch": 0.6337196194255309, + "grad_norm": 0.7523541854816003, + "learning_rate": 1.2497035997593783e-06, + "loss": 0.7677, + "step": 7027 + }, + { + "epoch": 0.6338098029490012, + "grad_norm": 1.386450986128594, + "learning_rate": 1.2491620890610135e-06, + "loss": 0.9387, + "step": 7028 + }, + { + "epoch": 0.6338999864724715, + "grad_norm": 1.319128374534794, + "learning_rate": 1.2486206424282788e-06, + "loss": 0.9286, + "step": 7029 + }, + { + "epoch": 0.6339901699959417, + "grad_norm": 1.4819242624809186, + "learning_rate": 1.2480792599073743e-06, + "loss": 0.9952, + "step": 7030 + }, + { + "epoch": 0.634080353519412, + "grad_norm": 1.6562445298590387, + "learning_rate": 1.247537941544493e-06, + "loss": 0.9864, + "step": 7031 + }, + { + "epoch": 0.6341705370428823, + "grad_norm": 1.5184287653779955, + "learning_rate": 1.2469966873858242e-06, + "loss": 0.9701, + "step": 7032 + }, + { + "epoch": 0.6342607205663525, + "grad_norm": 1.609490547846864, + "learning_rate": 1.2464554974775496e-06, + "loss": 0.8655, + "step": 7033 + }, + { + "epoch": 0.6343509040898228, + "grad_norm": 1.4608624280928977, + "learning_rate": 1.2459143718658474e-06, + "loss": 0.9359, + "step": 7034 + }, + { + "epoch": 0.6344410876132931, + "grad_norm": 1.7917333930549852, + "learning_rate": 1.2453733105968886e-06, + "loss": 0.8767, + "step": 7035 + }, + { + "epoch": 0.6345312711367633, + "grad_norm": 0.737531856883318, + "learning_rate": 1.2448323137168399e-06, + "loss": 0.8014, + "step": 7036 + }, + { + "epoch": 0.6346214546602336, + "grad_norm": 1.2689410436956825, + "learning_rate": 1.2442913812718625e-06, + "loss": 0.8559, + "step": 7037 + }, + { + "epoch": 0.6347116381837038, + "grad_norm": 1.4482461385229315, + "learning_rate": 1.2437505133081108e-06, + "loss": 0.9222, + "step": 7038 + }, + { + "epoch": 0.6348018217071741, + "grad_norm": 1.635392409547683, + "learning_rate": 1.2432097098717358e-06, + "loss": 0.9526, + "step": 7039 + }, + { + "epoch": 0.6348920052306444, + "grad_norm": 1.5675006952847, + "learning_rate": 1.2426689710088813e-06, + "loss": 0.9463, + "step": 7040 + }, + { + "epoch": 0.6349821887541146, + "grad_norm": 1.4433297243286072, + "learning_rate": 1.2421282967656855e-06, + "loss": 0.9904, + "step": 7041 + }, + { + "epoch": 0.6350723722775848, + "grad_norm": 1.4152193342942132, + "learning_rate": 1.2415876871882827e-06, + "loss": 0.9676, + "step": 7042 + }, + { + "epoch": 0.6351625558010552, + "grad_norm": 0.7246476237851965, + "learning_rate": 1.2410471423227998e-06, + "loss": 0.8553, + "step": 7043 + }, + { + "epoch": 0.6352527393245254, + "grad_norm": 1.4235011433046423, + "learning_rate": 1.24050666221536e-06, + "loss": 0.9305, + "step": 7044 + }, + { + "epoch": 0.6353429228479957, + "grad_norm": 1.3647395425547144, + "learning_rate": 1.23996624691208e-06, + "loss": 1.003, + "step": 7045 + }, + { + "epoch": 0.6354331063714659, + "grad_norm": 1.238435609264023, + "learning_rate": 1.2394258964590693e-06, + "loss": 0.9285, + "step": 7046 + }, + { + "epoch": 0.6355232898949362, + "grad_norm": 1.6273935736154375, + "learning_rate": 1.238885610902436e-06, + "loss": 0.8921, + "step": 7047 + }, + { + "epoch": 0.6356134734184065, + "grad_norm": 1.3837236322993842, + "learning_rate": 1.2383453902882787e-06, + "loss": 0.9495, + "step": 7048 + }, + { + "epoch": 0.6357036569418767, + "grad_norm": 1.303561930052609, + "learning_rate": 1.2378052346626927e-06, + "loss": 1.0025, + "step": 7049 + }, + { + "epoch": 0.6357938404653469, + "grad_norm": 2.0217089247263957, + "learning_rate": 1.2372651440717665e-06, + "loss": 0.872, + "step": 7050 + }, + { + "epoch": 0.6358840239888173, + "grad_norm": 1.43582985093633, + "learning_rate": 1.236725118561584e-06, + "loss": 0.9742, + "step": 7051 + }, + { + "epoch": 0.6359742075122875, + "grad_norm": 1.227908276953296, + "learning_rate": 1.2361851581782232e-06, + "loss": 0.9434, + "step": 7052 + }, + { + "epoch": 0.6360643910357577, + "grad_norm": 1.3884192557534663, + "learning_rate": 1.2356452629677554e-06, + "loss": 1.0103, + "step": 7053 + }, + { + "epoch": 0.6361545745592281, + "grad_norm": 1.488142032069212, + "learning_rate": 1.2351054329762494e-06, + "loss": 0.8817, + "step": 7054 + }, + { + "epoch": 0.6362447580826983, + "grad_norm": 1.4232444456398479, + "learning_rate": 1.2345656682497648e-06, + "loss": 0.9691, + "step": 7055 + }, + { + "epoch": 0.6363349416061685, + "grad_norm": 1.292583949980028, + "learning_rate": 1.2340259688343583e-06, + "loss": 0.9196, + "step": 7056 + }, + { + "epoch": 0.6364251251296388, + "grad_norm": 1.4381587136373237, + "learning_rate": 1.2334863347760803e-06, + "loss": 0.8411, + "step": 7057 + }, + { + "epoch": 0.6365153086531091, + "grad_norm": 0.6410667245641432, + "learning_rate": 1.2329467661209734e-06, + "loss": 0.7535, + "step": 7058 + }, + { + "epoch": 0.6366054921765794, + "grad_norm": 1.4680495519564425, + "learning_rate": 1.2324072629150788e-06, + "loss": 0.98, + "step": 7059 + }, + { + "epoch": 0.6366956757000496, + "grad_norm": 1.571680336761625, + "learning_rate": 1.2318678252044287e-06, + "loss": 0.9842, + "step": 7060 + }, + { + "epoch": 0.6367858592235198, + "grad_norm": 1.6297531304900639, + "learning_rate": 1.2313284530350512e-06, + "loss": 0.9911, + "step": 7061 + }, + { + "epoch": 0.6368760427469902, + "grad_norm": 1.4581883312980157, + "learning_rate": 1.230789146452969e-06, + "loss": 0.8736, + "step": 7062 + }, + { + "epoch": 0.6369662262704604, + "grad_norm": 1.8451643869002725, + "learning_rate": 1.2302499055041974e-06, + "loss": 0.9432, + "step": 7063 + }, + { + "epoch": 0.6370564097939306, + "grad_norm": 0.6922368378632971, + "learning_rate": 1.2297107302347488e-06, + "loss": 0.8153, + "step": 7064 + }, + { + "epoch": 0.6371465933174009, + "grad_norm": 1.3638733722885776, + "learning_rate": 1.2291716206906275e-06, + "loss": 1.0067, + "step": 7065 + }, + { + "epoch": 0.6372367768408712, + "grad_norm": 1.550147861688337, + "learning_rate": 1.2286325769178345e-06, + "loss": 1.0038, + "step": 7066 + }, + { + "epoch": 0.6373269603643414, + "grad_norm": 1.513798035317042, + "learning_rate": 1.2280935989623633e-06, + "loss": 0.9632, + "step": 7067 + }, + { + "epoch": 0.6374171438878117, + "grad_norm": 1.5259793716926258, + "learning_rate": 1.2275546868702017e-06, + "loss": 0.8777, + "step": 7068 + }, + { + "epoch": 0.6375073274112819, + "grad_norm": 1.5509070049964222, + "learning_rate": 1.2270158406873341e-06, + "loss": 0.8842, + "step": 7069 + }, + { + "epoch": 0.6375975109347523, + "grad_norm": 1.4437735947831862, + "learning_rate": 1.2264770604597363e-06, + "loss": 0.988, + "step": 7070 + }, + { + "epoch": 0.6376876944582225, + "grad_norm": 2.497702591996819, + "learning_rate": 1.2259383462333819e-06, + "loss": 1.0385, + "step": 7071 + }, + { + "epoch": 0.6377778779816927, + "grad_norm": 1.5471416975594932, + "learning_rate": 1.2253996980542359e-06, + "loss": 0.8972, + "step": 7072 + }, + { + "epoch": 0.637868061505163, + "grad_norm": 2.0188830163553377, + "learning_rate": 1.2248611159682578e-06, + "loss": 0.9259, + "step": 7073 + }, + { + "epoch": 0.6379582450286333, + "grad_norm": 0.7747352578606055, + "learning_rate": 1.2243226000214044e-06, + "loss": 0.7936, + "step": 7074 + }, + { + "epoch": 0.6380484285521035, + "grad_norm": 1.6061956611674448, + "learning_rate": 1.2237841502596232e-06, + "loss": 0.9398, + "step": 7075 + }, + { + "epoch": 0.6381386120755738, + "grad_norm": 1.3251157889965868, + "learning_rate": 1.2232457667288583e-06, + "loss": 0.9349, + "step": 7076 + }, + { + "epoch": 0.6382287955990441, + "grad_norm": 1.5904901863330234, + "learning_rate": 1.2227074494750476e-06, + "loss": 0.9404, + "step": 7077 + }, + { + "epoch": 0.6383189791225143, + "grad_norm": 1.502768663451987, + "learning_rate": 1.2221691985441238e-06, + "loss": 0.972, + "step": 7078 + }, + { + "epoch": 0.6384091626459846, + "grad_norm": 1.3875470333891184, + "learning_rate": 1.2216310139820128e-06, + "loss": 0.8851, + "step": 7079 + }, + { + "epoch": 0.6384993461694548, + "grad_norm": 1.2851959500659493, + "learning_rate": 1.2210928958346347e-06, + "loss": 0.984, + "step": 7080 + }, + { + "epoch": 0.6385895296929252, + "grad_norm": 1.3441742515625876, + "learning_rate": 1.2205548441479065e-06, + "loss": 0.8672, + "step": 7081 + }, + { + "epoch": 0.6386797132163954, + "grad_norm": 1.4546090208328006, + "learning_rate": 1.2200168589677357e-06, + "loss": 0.8719, + "step": 7082 + }, + { + "epoch": 0.6387698967398656, + "grad_norm": 1.25188804135931, + "learning_rate": 1.2194789403400284e-06, + "loss": 0.9043, + "step": 7083 + }, + { + "epoch": 0.6388600802633358, + "grad_norm": 1.7286453457017812, + "learning_rate": 1.2189410883106816e-06, + "loss": 0.9211, + "step": 7084 + }, + { + "epoch": 0.6389502637868062, + "grad_norm": 1.529148606168581, + "learning_rate": 1.2184033029255872e-06, + "loss": 1.009, + "step": 7085 + }, + { + "epoch": 0.6390404473102764, + "grad_norm": 0.8029522379736092, + "learning_rate": 1.2178655842306334e-06, + "loss": 0.8182, + "step": 7086 + }, + { + "epoch": 0.6391306308337467, + "grad_norm": 1.7069577129245963, + "learning_rate": 1.2173279322716999e-06, + "loss": 0.9332, + "step": 7087 + }, + { + "epoch": 0.6392208143572169, + "grad_norm": 1.4746169740594763, + "learning_rate": 1.216790347094663e-06, + "loss": 0.939, + "step": 7088 + }, + { + "epoch": 0.6393109978806872, + "grad_norm": 1.6169358209673002, + "learning_rate": 1.2162528287453927e-06, + "loss": 0.9773, + "step": 7089 + }, + { + "epoch": 0.6394011814041575, + "grad_norm": 3.8944942736616017, + "learning_rate": 1.215715377269752e-06, + "loss": 0.9397, + "step": 7090 + }, + { + "epoch": 0.6394913649276277, + "grad_norm": 1.3770949703230797, + "learning_rate": 1.2151779927136003e-06, + "loss": 0.9227, + "step": 7091 + }, + { + "epoch": 0.6395815484510979, + "grad_norm": 1.4961184507078924, + "learning_rate": 1.2146406751227893e-06, + "loss": 1.0099, + "step": 7092 + }, + { + "epoch": 0.6396717319745683, + "grad_norm": 1.4202882905246013, + "learning_rate": 1.214103424543167e-06, + "loss": 0.9761, + "step": 7093 + }, + { + "epoch": 0.6397619154980385, + "grad_norm": 1.9227164062612692, + "learning_rate": 1.2135662410205735e-06, + "loss": 0.8532, + "step": 7094 + }, + { + "epoch": 0.6398520990215087, + "grad_norm": 1.5555354733911637, + "learning_rate": 1.2130291246008444e-06, + "loss": 0.8566, + "step": 7095 + }, + { + "epoch": 0.639942282544979, + "grad_norm": 1.6862604745775662, + "learning_rate": 1.21249207532981e-06, + "loss": 0.8595, + "step": 7096 + }, + { + "epoch": 0.6400324660684493, + "grad_norm": 1.5412183659656946, + "learning_rate": 1.2119550932532936e-06, + "loss": 0.8555, + "step": 7097 + }, + { + "epoch": 0.6401226495919196, + "grad_norm": 1.427848007752505, + "learning_rate": 1.2114181784171144e-06, + "loss": 0.9176, + "step": 7098 + }, + { + "epoch": 0.6402128331153898, + "grad_norm": 1.3186535768376724, + "learning_rate": 1.2108813308670837e-06, + "loss": 0.837, + "step": 7099 + }, + { + "epoch": 0.6403030166388601, + "grad_norm": 1.777241597017156, + "learning_rate": 1.2103445506490099e-06, + "loss": 0.9435, + "step": 7100 + }, + { + "epoch": 0.6403932001623304, + "grad_norm": 0.7164304456712107, + "learning_rate": 1.209807837808693e-06, + "loss": 0.8292, + "step": 7101 + }, + { + "epoch": 0.6404833836858006, + "grad_norm": 1.5378791403638359, + "learning_rate": 1.2092711923919282e-06, + "loss": 0.9797, + "step": 7102 + }, + { + "epoch": 0.6405735672092708, + "grad_norm": 1.3034561651224608, + "learning_rate": 1.2087346144445053e-06, + "loss": 0.9025, + "step": 7103 + }, + { + "epoch": 0.6406637507327412, + "grad_norm": 1.9377924175479864, + "learning_rate": 1.2081981040122081e-06, + "loss": 0.953, + "step": 7104 + }, + { + "epoch": 0.6407539342562114, + "grad_norm": 9.144822003200218, + "learning_rate": 1.2076616611408148e-06, + "loss": 0.8915, + "step": 7105 + }, + { + "epoch": 0.6408441177796816, + "grad_norm": 1.8873823748183685, + "learning_rate": 1.2071252858760972e-06, + "loss": 0.9748, + "step": 7106 + }, + { + "epoch": 0.6409343013031519, + "grad_norm": 1.436258111305008, + "learning_rate": 1.2065889782638218e-06, + "loss": 0.9254, + "step": 7107 + }, + { + "epoch": 0.6410244848266222, + "grad_norm": 1.9447037077431704, + "learning_rate": 1.2060527383497506e-06, + "loss": 0.9175, + "step": 7108 + }, + { + "epoch": 0.6411146683500925, + "grad_norm": 1.3294306900418855, + "learning_rate": 1.2055165661796363e-06, + "loss": 1.0571, + "step": 7109 + }, + { + "epoch": 0.6412048518735627, + "grad_norm": 1.3177293416435596, + "learning_rate": 1.2049804617992303e-06, + "loss": 1.008, + "step": 7110 + }, + { + "epoch": 0.6412950353970329, + "grad_norm": 0.6965100765567368, + "learning_rate": 1.204444425254275e-06, + "loss": 0.818, + "step": 7111 + }, + { + "epoch": 0.6413852189205033, + "grad_norm": 1.573063959294643, + "learning_rate": 1.203908456590507e-06, + "loss": 0.9255, + "step": 7112 + }, + { + "epoch": 0.6414754024439735, + "grad_norm": 1.4459106032568105, + "learning_rate": 1.20337255585366e-06, + "loss": 0.9066, + "step": 7113 + }, + { + "epoch": 0.6415655859674437, + "grad_norm": 0.6835956682450764, + "learning_rate": 1.2028367230894582e-06, + "loss": 0.7538, + "step": 7114 + }, + { + "epoch": 0.641655769490914, + "grad_norm": 1.5247035316833197, + "learning_rate": 1.2023009583436237e-06, + "loss": 0.8627, + "step": 7115 + }, + { + "epoch": 0.6417459530143843, + "grad_norm": 1.6432436435100046, + "learning_rate": 1.2017652616618698e-06, + "loss": 0.9969, + "step": 7116 + }, + { + "epoch": 0.6418361365378545, + "grad_norm": 1.4443917305611649, + "learning_rate": 1.2012296330899048e-06, + "loss": 0.9359, + "step": 7117 + }, + { + "epoch": 0.6419263200613248, + "grad_norm": 1.4377262932499941, + "learning_rate": 1.200694072673432e-06, + "loss": 0.9686, + "step": 7118 + }, + { + "epoch": 0.642016503584795, + "grad_norm": 1.5367878792151228, + "learning_rate": 1.200158580458148e-06, + "loss": 0.9692, + "step": 7119 + }, + { + "epoch": 0.6421066871082654, + "grad_norm": 1.787801039628502, + "learning_rate": 1.1996231564897448e-06, + "loss": 0.9194, + "step": 7120 + }, + { + "epoch": 0.6421968706317356, + "grad_norm": 1.4604422806014148, + "learning_rate": 1.1990878008139062e-06, + "loss": 0.9009, + "step": 7121 + }, + { + "epoch": 0.6422870541552058, + "grad_norm": 1.3941916257893252, + "learning_rate": 1.1985525134763132e-06, + "loss": 0.9703, + "step": 7122 + }, + { + "epoch": 0.6423772376786762, + "grad_norm": 1.4772237276073825, + "learning_rate": 1.1980172945226393e-06, + "loss": 0.9891, + "step": 7123 + }, + { + "epoch": 0.6424674212021464, + "grad_norm": 1.6596923179894116, + "learning_rate": 1.197482143998551e-06, + "loss": 0.947, + "step": 7124 + }, + { + "epoch": 0.6425576047256166, + "grad_norm": 1.4565552452969055, + "learning_rate": 1.196947061949712e-06, + "loss": 0.9229, + "step": 7125 + }, + { + "epoch": 0.6426477882490869, + "grad_norm": 1.439373619751428, + "learning_rate": 1.1964120484217768e-06, + "loss": 0.9969, + "step": 7126 + }, + { + "epoch": 0.6427379717725572, + "grad_norm": 0.7900829326581815, + "learning_rate": 1.1958771034603975e-06, + "loss": 0.8168, + "step": 7127 + }, + { + "epoch": 0.6428281552960274, + "grad_norm": 1.5160356732099243, + "learning_rate": 1.1953422271112175e-06, + "loss": 0.8837, + "step": 7128 + }, + { + "epoch": 0.6429183388194977, + "grad_norm": 1.641374979715374, + "learning_rate": 1.1948074194198748e-06, + "loss": 0.9306, + "step": 7129 + }, + { + "epoch": 0.6430085223429679, + "grad_norm": 1.5733109878849794, + "learning_rate": 1.1942726804320033e-06, + "loss": 0.8446, + "step": 7130 + }, + { + "epoch": 0.6430987058664382, + "grad_norm": 1.6810765400287575, + "learning_rate": 1.1937380101932295e-06, + "loss": 0.879, + "step": 7131 + }, + { + "epoch": 0.6431888893899085, + "grad_norm": 1.2590031645222253, + "learning_rate": 1.1932034087491745e-06, + "loss": 0.9328, + "step": 7132 + }, + { + "epoch": 0.6432790729133787, + "grad_norm": 1.5336795916947839, + "learning_rate": 1.1926688761454531e-06, + "loss": 0.8552, + "step": 7133 + }, + { + "epoch": 0.643369256436849, + "grad_norm": 1.033116702988592, + "learning_rate": 1.1921344124276746e-06, + "loss": 0.9569, + "step": 7134 + }, + { + "epoch": 0.6434594399603193, + "grad_norm": 1.4170899413322187, + "learning_rate": 1.1916000176414431e-06, + "loss": 0.9687, + "step": 7135 + }, + { + "epoch": 0.6435496234837895, + "grad_norm": 1.647959852396248, + "learning_rate": 1.1910656918323546e-06, + "loss": 0.9705, + "step": 7136 + }, + { + "epoch": 0.6436398070072598, + "grad_norm": 1.4074655577407575, + "learning_rate": 1.1905314350460024e-06, + "loss": 0.9652, + "step": 7137 + }, + { + "epoch": 0.64372999053073, + "grad_norm": 1.2863526500376128, + "learning_rate": 1.1899972473279717e-06, + "loss": 0.9927, + "step": 7138 + }, + { + "epoch": 0.6438201740542003, + "grad_norm": 1.3459971069429546, + "learning_rate": 1.1894631287238414e-06, + "loss": 0.9337, + "step": 7139 + }, + { + "epoch": 0.6439103575776706, + "grad_norm": 1.3179576572568716, + "learning_rate": 1.188929079279187e-06, + "loss": 0.9512, + "step": 7140 + }, + { + "epoch": 0.6440005411011408, + "grad_norm": 1.3874512792745317, + "learning_rate": 1.1883950990395751e-06, + "loss": 0.8814, + "step": 7141 + }, + { + "epoch": 0.644090724624611, + "grad_norm": 1.3764480421996923, + "learning_rate": 1.187861188050569e-06, + "loss": 0.8814, + "step": 7142 + }, + { + "epoch": 0.6441809081480814, + "grad_norm": 1.2453942323520815, + "learning_rate": 1.187327346357724e-06, + "loss": 0.9266, + "step": 7143 + }, + { + "epoch": 0.6442710916715516, + "grad_norm": 1.9785135764349586, + "learning_rate": 1.1867935740065912e-06, + "loss": 0.9696, + "step": 7144 + }, + { + "epoch": 0.6443612751950218, + "grad_norm": 1.4602072619594206, + "learning_rate": 1.1862598710427148e-06, + "loss": 0.8265, + "step": 7145 + }, + { + "epoch": 0.6444514587184921, + "grad_norm": 1.8062312801671394, + "learning_rate": 1.1857262375116328e-06, + "loss": 0.9052, + "step": 7146 + }, + { + "epoch": 0.6445416422419624, + "grad_norm": 1.7160244919556298, + "learning_rate": 1.1851926734588783e-06, + "loss": 0.9507, + "step": 7147 + }, + { + "epoch": 0.6446318257654327, + "grad_norm": 1.4101362147717558, + "learning_rate": 1.184659178929977e-06, + "loss": 0.9671, + "step": 7148 + }, + { + "epoch": 0.6447220092889029, + "grad_norm": 1.3867686947706963, + "learning_rate": 1.1841257539704513e-06, + "loss": 0.9383, + "step": 7149 + }, + { + "epoch": 0.6448121928123732, + "grad_norm": 1.4420976505415677, + "learning_rate": 1.1835923986258146e-06, + "loss": 0.9569, + "step": 7150 + }, + { + "epoch": 0.6449023763358435, + "grad_norm": 1.299298652949105, + "learning_rate": 1.1830591129415754e-06, + "loss": 0.8987, + "step": 7151 + }, + { + "epoch": 0.6449925598593137, + "grad_norm": 1.500511082365199, + "learning_rate": 1.182525896963238e-06, + "loss": 0.8887, + "step": 7152 + }, + { + "epoch": 0.6450827433827839, + "grad_norm": 1.6018595350265405, + "learning_rate": 1.181992750736298e-06, + "loss": 0.8675, + "step": 7153 + }, + { + "epoch": 0.6451729269062543, + "grad_norm": 1.5656986303014173, + "learning_rate": 1.1814596743062474e-06, + "loss": 0.9345, + "step": 7154 + }, + { + "epoch": 0.6452631104297245, + "grad_norm": 1.3439299884283396, + "learning_rate": 1.1809266677185711e-06, + "loss": 0.965, + "step": 7155 + }, + { + "epoch": 0.6453532939531947, + "grad_norm": 0.6889292642232104, + "learning_rate": 1.180393731018747e-06, + "loss": 0.8412, + "step": 7156 + }, + { + "epoch": 0.645443477476665, + "grad_norm": 0.5909419667495243, + "learning_rate": 1.1798608642522498e-06, + "loss": 0.7662, + "step": 7157 + }, + { + "epoch": 0.6455336610001353, + "grad_norm": 1.739106194700402, + "learning_rate": 1.1793280674645454e-06, + "loss": 0.9273, + "step": 7158 + }, + { + "epoch": 0.6456238445236056, + "grad_norm": 1.452169042362853, + "learning_rate": 1.1787953407010954e-06, + "loss": 1.0141, + "step": 7159 + }, + { + "epoch": 0.6457140280470758, + "grad_norm": 1.5371275225759249, + "learning_rate": 1.1782626840073554e-06, + "loss": 0.9492, + "step": 7160 + }, + { + "epoch": 0.645804211570546, + "grad_norm": 1.5288082038735131, + "learning_rate": 1.1777300974287738e-06, + "loss": 0.8425, + "step": 7161 + }, + { + "epoch": 0.6458943950940164, + "grad_norm": 1.4360414652084297, + "learning_rate": 1.1771975810107947e-06, + "loss": 0.9657, + "step": 7162 + }, + { + "epoch": 0.6459845786174866, + "grad_norm": 1.4350952268365718, + "learning_rate": 1.1766651347988542e-06, + "loss": 1.0031, + "step": 7163 + }, + { + "epoch": 0.6460747621409568, + "grad_norm": 1.9885029547393387, + "learning_rate": 1.1761327588383848e-06, + "loss": 1.0497, + "step": 7164 + }, + { + "epoch": 0.6461649456644271, + "grad_norm": 2.81352452585158, + "learning_rate": 1.1756004531748105e-06, + "loss": 0.9551, + "step": 7165 + }, + { + "epoch": 0.6462551291878974, + "grad_norm": 1.289325941505138, + "learning_rate": 1.1750682178535521e-06, + "loss": 0.9707, + "step": 7166 + }, + { + "epoch": 0.6463453127113676, + "grad_norm": 1.4696633765746994, + "learning_rate": 1.1745360529200218e-06, + "loss": 0.8962, + "step": 7167 + }, + { + "epoch": 0.6464354962348379, + "grad_norm": 1.4108959593207604, + "learning_rate": 1.1740039584196261e-06, + "loss": 0.9664, + "step": 7168 + }, + { + "epoch": 0.6465256797583081, + "grad_norm": 1.3827823269240769, + "learning_rate": 1.1734719343977683e-06, + "loss": 0.9676, + "step": 7169 + }, + { + "epoch": 0.6466158632817784, + "grad_norm": 2.0353951048579337, + "learning_rate": 1.1729399808998416e-06, + "loss": 0.887, + "step": 7170 + }, + { + "epoch": 0.6467060468052487, + "grad_norm": 1.300606609912884, + "learning_rate": 1.1724080979712368e-06, + "loss": 0.8269, + "step": 7171 + }, + { + "epoch": 0.6467962303287189, + "grad_norm": 1.2630915776001916, + "learning_rate": 1.1718762856573365e-06, + "loss": 1.0142, + "step": 7172 + }, + { + "epoch": 0.6468864138521893, + "grad_norm": 1.30674613218755, + "learning_rate": 1.1713445440035172e-06, + "loss": 0.9334, + "step": 7173 + }, + { + "epoch": 0.6469765973756595, + "grad_norm": 1.5583203680685669, + "learning_rate": 1.1708128730551506e-06, + "loss": 0.9805, + "step": 7174 + }, + { + "epoch": 0.6470667808991297, + "grad_norm": 1.5813392161656057, + "learning_rate": 1.1702812728576019e-06, + "loss": 0.9261, + "step": 7175 + }, + { + "epoch": 0.6471569644226, + "grad_norm": 1.4062918338910977, + "learning_rate": 1.1697497434562303e-06, + "loss": 0.9715, + "step": 7176 + }, + { + "epoch": 0.6472471479460703, + "grad_norm": 1.4137681686353694, + "learning_rate": 1.1692182848963885e-06, + "loss": 0.9805, + "step": 7177 + }, + { + "epoch": 0.6473373314695405, + "grad_norm": 1.5068801606389348, + "learning_rate": 1.1686868972234227e-06, + "loss": 0.9063, + "step": 7178 + }, + { + "epoch": 0.6474275149930108, + "grad_norm": 1.1947706901928337, + "learning_rate": 1.1681555804826755e-06, + "loss": 0.9477, + "step": 7179 + }, + { + "epoch": 0.647517698516481, + "grad_norm": 1.3468589746358257, + "learning_rate": 1.1676243347194806e-06, + "loss": 0.8828, + "step": 7180 + }, + { + "epoch": 0.6476078820399513, + "grad_norm": 1.334578017574462, + "learning_rate": 1.167093159979167e-06, + "loss": 0.9579, + "step": 7181 + }, + { + "epoch": 0.6476980655634216, + "grad_norm": 1.6285496602718874, + "learning_rate": 1.1665620563070575e-06, + "loss": 0.9686, + "step": 7182 + }, + { + "epoch": 0.6477882490868918, + "grad_norm": 1.4793412638316008, + "learning_rate": 1.1660310237484691e-06, + "loss": 1.0452, + "step": 7183 + }, + { + "epoch": 0.647878432610362, + "grad_norm": 1.3569853375738372, + "learning_rate": 1.165500062348713e-06, + "loss": 0.8887, + "step": 7184 + }, + { + "epoch": 0.6479686161338324, + "grad_norm": 1.5997326329476824, + "learning_rate": 1.164969172153091e-06, + "loss": 0.9937, + "step": 7185 + }, + { + "epoch": 0.6480587996573026, + "grad_norm": 1.368579526414245, + "learning_rate": 1.1644383532069055e-06, + "loss": 0.9598, + "step": 7186 + }, + { + "epoch": 0.6481489831807729, + "grad_norm": 1.3018535512987763, + "learning_rate": 1.1639076055554454e-06, + "loss": 0.9798, + "step": 7187 + }, + { + "epoch": 0.6482391667042431, + "grad_norm": 1.417320754947872, + "learning_rate": 1.163376929244e-06, + "loss": 0.9203, + "step": 7188 + }, + { + "epoch": 0.6483293502277134, + "grad_norm": 1.2372325704583658, + "learning_rate": 1.1628463243178472e-06, + "loss": 0.9032, + "step": 7189 + }, + { + "epoch": 0.6484195337511837, + "grad_norm": 1.5373379877841054, + "learning_rate": 1.1623157908222623e-06, + "loss": 1.0024, + "step": 7190 + }, + { + "epoch": 0.6485097172746539, + "grad_norm": 1.3183756961518664, + "learning_rate": 1.1617853288025129e-06, + "loss": 0.9839, + "step": 7191 + }, + { + "epoch": 0.6485999007981241, + "grad_norm": 1.261781814935971, + "learning_rate": 1.1612549383038612e-06, + "loss": 0.9185, + "step": 7192 + }, + { + "epoch": 0.6486900843215945, + "grad_norm": 1.2002247619740547, + "learning_rate": 1.1607246193715629e-06, + "loss": 0.9232, + "step": 7193 + }, + { + "epoch": 0.6487802678450647, + "grad_norm": 1.8064356012255522, + "learning_rate": 1.1601943720508684e-06, + "loss": 0.9599, + "step": 7194 + }, + { + "epoch": 0.6488704513685349, + "grad_norm": 1.382735600461524, + "learning_rate": 1.1596641963870202e-06, + "loss": 0.9848, + "step": 7195 + }, + { + "epoch": 0.6489606348920053, + "grad_norm": 1.5972920867825167, + "learning_rate": 1.1591340924252561e-06, + "loss": 0.9519, + "step": 7196 + }, + { + "epoch": 0.6490508184154755, + "grad_norm": 0.8027348576379518, + "learning_rate": 1.158604060210808e-06, + "loss": 0.8706, + "step": 7197 + }, + { + "epoch": 0.6491410019389457, + "grad_norm": 1.6738064028956245, + "learning_rate": 1.1580740997889008e-06, + "loss": 1.0068, + "step": 7198 + }, + { + "epoch": 0.649231185462416, + "grad_norm": 3.4704363048991844, + "learning_rate": 1.1575442112047544e-06, + "loss": 0.9338, + "step": 7199 + }, + { + "epoch": 0.6493213689858863, + "grad_norm": 1.6758290530769298, + "learning_rate": 1.1570143945035797e-06, + "loss": 0.8776, + "step": 7200 + }, + { + "epoch": 0.6494115525093566, + "grad_norm": 2.881087947764932, + "learning_rate": 1.1564846497305864e-06, + "loss": 0.8976, + "step": 7201 + }, + { + "epoch": 0.6495017360328268, + "grad_norm": 1.5295396711947902, + "learning_rate": 1.1559549769309726e-06, + "loss": 0.9173, + "step": 7202 + }, + { + "epoch": 0.649591919556297, + "grad_norm": 1.2544008509505316, + "learning_rate": 1.1554253761499358e-06, + "loss": 0.9289, + "step": 7203 + }, + { + "epoch": 0.6496821030797674, + "grad_norm": 1.4812008298285928, + "learning_rate": 1.1548958474326617e-06, + "loss": 0.9716, + "step": 7204 + }, + { + "epoch": 0.6497722866032376, + "grad_norm": 1.366211286521578, + "learning_rate": 1.154366390824334e-06, + "loss": 0.8711, + "step": 7205 + }, + { + "epoch": 0.6498624701267078, + "grad_norm": 0.8626467414087323, + "learning_rate": 1.1538370063701287e-06, + "loss": 0.8015, + "step": 7206 + }, + { + "epoch": 0.6499526536501781, + "grad_norm": 1.327848787195705, + "learning_rate": 1.1533076941152153e-06, + "loss": 0.9037, + "step": 7207 + }, + { + "epoch": 0.6500428371736484, + "grad_norm": 1.602355091012058, + "learning_rate": 1.1527784541047583e-06, + "loss": 0.8555, + "step": 7208 + }, + { + "epoch": 0.6501330206971186, + "grad_norm": 1.8158067595854268, + "learning_rate": 1.1522492863839152e-06, + "loss": 0.9033, + "step": 7209 + }, + { + "epoch": 0.6502232042205889, + "grad_norm": 1.6735414417516323, + "learning_rate": 1.1517201909978382e-06, + "loss": 0.9016, + "step": 7210 + }, + { + "epoch": 0.6503133877440591, + "grad_norm": 0.6636065769235817, + "learning_rate": 1.151191167991671e-06, + "loss": 0.8049, + "step": 7211 + }, + { + "epoch": 0.6504035712675295, + "grad_norm": 1.4231572500267962, + "learning_rate": 1.1506622174105536e-06, + "loss": 0.9512, + "step": 7212 + }, + { + "epoch": 0.6504937547909997, + "grad_norm": 1.6860497918972726, + "learning_rate": 1.1501333392996194e-06, + "loss": 0.9708, + "step": 7213 + }, + { + "epoch": 0.6505839383144699, + "grad_norm": 0.685811570410523, + "learning_rate": 1.1496045337039943e-06, + "loss": 0.8049, + "step": 7214 + }, + { + "epoch": 0.6506741218379402, + "grad_norm": 1.52718553430951, + "learning_rate": 1.1490758006687995e-06, + "loss": 0.8965, + "step": 7215 + }, + { + "epoch": 0.6507643053614105, + "grad_norm": 1.6545845150981375, + "learning_rate": 1.1485471402391502e-06, + "loss": 0.9206, + "step": 7216 + }, + { + "epoch": 0.6508544888848807, + "grad_norm": 1.4939484915273733, + "learning_rate": 1.1480185524601522e-06, + "loss": 0.9119, + "step": 7217 + }, + { + "epoch": 0.650944672408351, + "grad_norm": 1.360233781019233, + "learning_rate": 1.1474900373769108e-06, + "loss": 0.9499, + "step": 7218 + }, + { + "epoch": 0.6510348559318213, + "grad_norm": 0.5940363344132145, + "learning_rate": 1.1469615950345184e-06, + "loss": 0.7351, + "step": 7219 + }, + { + "epoch": 0.6511250394552915, + "grad_norm": 0.7114370917243928, + "learning_rate": 1.1464332254780678e-06, + "loss": 0.7818, + "step": 7220 + }, + { + "epoch": 0.6512152229787618, + "grad_norm": 1.324916599455949, + "learning_rate": 1.1459049287526404e-06, + "loss": 0.9264, + "step": 7221 + }, + { + "epoch": 0.651305406502232, + "grad_norm": 1.706683160483609, + "learning_rate": 1.1453767049033137e-06, + "loss": 0.906, + "step": 7222 + }, + { + "epoch": 0.6513955900257024, + "grad_norm": 1.3889035059901467, + "learning_rate": 1.1448485539751586e-06, + "loss": 0.9878, + "step": 7223 + }, + { + "epoch": 0.6514857735491726, + "grad_norm": 1.5200820356115698, + "learning_rate": 1.1443204760132408e-06, + "loss": 0.9134, + "step": 7224 + }, + { + "epoch": 0.6515759570726428, + "grad_norm": 1.415367578002045, + "learning_rate": 1.1437924710626185e-06, + "loss": 0.9227, + "step": 7225 + }, + { + "epoch": 0.651666140596113, + "grad_norm": 0.6901901887265974, + "learning_rate": 1.1432645391683429e-06, + "loss": 0.7622, + "step": 7226 + }, + { + "epoch": 0.6517563241195834, + "grad_norm": 1.655801748401038, + "learning_rate": 1.1427366803754609e-06, + "loss": 0.9696, + "step": 7227 + }, + { + "epoch": 0.6518465076430536, + "grad_norm": 1.3086881688913907, + "learning_rate": 1.142208894729012e-06, + "loss": 0.9561, + "step": 7228 + }, + { + "epoch": 0.6519366911665239, + "grad_norm": 1.3379552226934723, + "learning_rate": 1.1416811822740301e-06, + "loss": 0.9838, + "step": 7229 + }, + { + "epoch": 0.6520268746899941, + "grad_norm": 1.2610424579487844, + "learning_rate": 1.1411535430555428e-06, + "loss": 0.9895, + "step": 7230 + }, + { + "epoch": 0.6521170582134644, + "grad_norm": 1.3277628236502237, + "learning_rate": 1.1406259771185705e-06, + "loss": 0.9942, + "step": 7231 + }, + { + "epoch": 0.6522072417369347, + "grad_norm": 1.1805185081553058, + "learning_rate": 1.1400984845081282e-06, + "loss": 0.8983, + "step": 7232 + }, + { + "epoch": 0.6522974252604049, + "grad_norm": 1.4372581610216517, + "learning_rate": 1.139571065269226e-06, + "loss": 1.02, + "step": 7233 + }, + { + "epoch": 0.6523876087838751, + "grad_norm": 1.3972950962916193, + "learning_rate": 1.139043719446863e-06, + "loss": 0.9849, + "step": 7234 + }, + { + "epoch": 0.6524777923073455, + "grad_norm": 1.3201428817955303, + "learning_rate": 1.1385164470860385e-06, + "loss": 1.0051, + "step": 7235 + }, + { + "epoch": 0.6525679758308157, + "grad_norm": 1.5382259825876334, + "learning_rate": 1.1379892482317403e-06, + "loss": 0.9464, + "step": 7236 + }, + { + "epoch": 0.652658159354286, + "grad_norm": 1.3524327205346272, + "learning_rate": 1.1374621229289524e-06, + "loss": 0.9274, + "step": 7237 + }, + { + "epoch": 0.6527483428777562, + "grad_norm": 1.5847418275794818, + "learning_rate": 1.1369350712226525e-06, + "loss": 0.9263, + "step": 7238 + }, + { + "epoch": 0.6528385264012265, + "grad_norm": 1.65246841285053, + "learning_rate": 1.136408093157811e-06, + "loss": 0.9006, + "step": 7239 + }, + { + "epoch": 0.6529287099246968, + "grad_norm": 1.4834943869664248, + "learning_rate": 1.1358811887793935e-06, + "loss": 0.9026, + "step": 7240 + }, + { + "epoch": 0.653018893448167, + "grad_norm": 1.4674346390681712, + "learning_rate": 1.135354358132356e-06, + "loss": 0.9797, + "step": 7241 + }, + { + "epoch": 0.6531090769716373, + "grad_norm": 1.3258672934705868, + "learning_rate": 1.1348276012616542e-06, + "loss": 0.9844, + "step": 7242 + }, + { + "epoch": 0.6531992604951076, + "grad_norm": 1.4321453875202348, + "learning_rate": 1.134300918212231e-06, + "loss": 0.9081, + "step": 7243 + }, + { + "epoch": 0.6532894440185778, + "grad_norm": 1.53234038164234, + "learning_rate": 1.133774309029027e-06, + "loss": 0.9426, + "step": 7244 + }, + { + "epoch": 0.653379627542048, + "grad_norm": 1.3915843868868791, + "learning_rate": 1.133247773756975e-06, + "loss": 0.9805, + "step": 7245 + }, + { + "epoch": 0.6534698110655184, + "grad_norm": 0.673927366163366, + "learning_rate": 1.1327213124410024e-06, + "loss": 0.8293, + "step": 7246 + }, + { + "epoch": 0.6535599945889886, + "grad_norm": 1.2771258153689715, + "learning_rate": 1.1321949251260292e-06, + "loss": 0.9782, + "step": 7247 + }, + { + "epoch": 0.6536501781124588, + "grad_norm": 1.4848140669509862, + "learning_rate": 1.1316686118569712e-06, + "loss": 0.8283, + "step": 7248 + }, + { + "epoch": 0.6537403616359291, + "grad_norm": 1.5169638786164619, + "learning_rate": 1.1311423726787335e-06, + "loss": 0.8451, + "step": 7249 + }, + { + "epoch": 0.6538305451593994, + "grad_norm": 1.5083345290261827, + "learning_rate": 1.130616207636221e-06, + "loss": 0.9152, + "step": 7250 + }, + { + "epoch": 0.6539207286828697, + "grad_norm": 1.3951580444830232, + "learning_rate": 1.1300901167743263e-06, + "loss": 0.9403, + "step": 7251 + }, + { + "epoch": 0.6540109122063399, + "grad_norm": 1.1355027973975422, + "learning_rate": 1.12956410013794e-06, + "loss": 0.9378, + "step": 7252 + }, + { + "epoch": 0.6541010957298101, + "grad_norm": 1.6058997993327062, + "learning_rate": 1.1290381577719436e-06, + "loss": 0.9256, + "step": 7253 + }, + { + "epoch": 0.6541912792532805, + "grad_norm": 1.7182463168001099, + "learning_rate": 1.1285122897212143e-06, + "loss": 0.8534, + "step": 7254 + }, + { + "epoch": 0.6542814627767507, + "grad_norm": 0.7942795841589642, + "learning_rate": 1.1279864960306228e-06, + "loss": 0.8582, + "step": 7255 + }, + { + "epoch": 0.6543716463002209, + "grad_norm": 1.4560388043546013, + "learning_rate": 1.1274607767450297e-06, + "loss": 1.0094, + "step": 7256 + }, + { + "epoch": 0.6544618298236912, + "grad_norm": 1.5091939339718816, + "learning_rate": 1.126935131909296e-06, + "loss": 0.9203, + "step": 7257 + }, + { + "epoch": 0.6545520133471615, + "grad_norm": 1.2470946402928804, + "learning_rate": 1.1264095615682693e-06, + "loss": 0.8853, + "step": 7258 + }, + { + "epoch": 0.6546421968706317, + "grad_norm": 1.4610080803942094, + "learning_rate": 1.1258840657667973e-06, + "loss": 0.8616, + "step": 7259 + }, + { + "epoch": 0.654732380394102, + "grad_norm": 2.6585946001516145, + "learning_rate": 1.125358644549716e-06, + "loss": 0.8703, + "step": 7260 + }, + { + "epoch": 0.6548225639175722, + "grad_norm": 1.420966895902395, + "learning_rate": 1.1248332979618578e-06, + "loss": 0.8073, + "step": 7261 + }, + { + "epoch": 0.6549127474410426, + "grad_norm": 1.2202050741780772, + "learning_rate": 1.1243080260480482e-06, + "loss": 0.8367, + "step": 7262 + }, + { + "epoch": 0.6550029309645128, + "grad_norm": 1.6905286243207558, + "learning_rate": 1.1237828288531063e-06, + "loss": 0.9982, + "step": 7263 + }, + { + "epoch": 0.655093114487983, + "grad_norm": 1.560133782665789, + "learning_rate": 1.1232577064218449e-06, + "loss": 0.8634, + "step": 7264 + }, + { + "epoch": 0.6551832980114533, + "grad_norm": 1.4215536907306325, + "learning_rate": 1.1227326587990711e-06, + "loss": 1.009, + "step": 7265 + }, + { + "epoch": 0.6552734815349236, + "grad_norm": 1.257762315590118, + "learning_rate": 1.1222076860295832e-06, + "loss": 0.9256, + "step": 7266 + }, + { + "epoch": 0.6553636650583938, + "grad_norm": 1.5276705106975002, + "learning_rate": 1.1216827881581756e-06, + "loss": 0.955, + "step": 7267 + }, + { + "epoch": 0.6554538485818641, + "grad_norm": 1.5714693884688475, + "learning_rate": 1.1211579652296355e-06, + "loss": 0.9314, + "step": 7268 + }, + { + "epoch": 0.6555440321053344, + "grad_norm": 1.5293808252670287, + "learning_rate": 1.1206332172887438e-06, + "loss": 1.0139, + "step": 7269 + }, + { + "epoch": 0.6556342156288046, + "grad_norm": 1.5368350413100973, + "learning_rate": 1.1201085443802756e-06, + "loss": 0.9569, + "step": 7270 + }, + { + "epoch": 0.6557243991522749, + "grad_norm": 1.327070243726979, + "learning_rate": 1.1195839465489964e-06, + "loss": 0.909, + "step": 7271 + }, + { + "epoch": 0.6558145826757451, + "grad_norm": 1.545350296283424, + "learning_rate": 1.1190594238396708e-06, + "loss": 0.9766, + "step": 7272 + }, + { + "epoch": 0.6559047661992154, + "grad_norm": 1.1387724791065739, + "learning_rate": 1.1185349762970515e-06, + "loss": 0.9127, + "step": 7273 + }, + { + "epoch": 0.6559949497226857, + "grad_norm": 1.3633345454163204, + "learning_rate": 1.1180106039658896e-06, + "loss": 0.9444, + "step": 7274 + }, + { + "epoch": 0.6560851332461559, + "grad_norm": 0.7572988755531481, + "learning_rate": 1.117486306890925e-06, + "loss": 0.7891, + "step": 7275 + }, + { + "epoch": 0.6561753167696261, + "grad_norm": 1.7868828968728871, + "learning_rate": 1.116962085116896e-06, + "loss": 0.8888, + "step": 7276 + }, + { + "epoch": 0.6562655002930965, + "grad_norm": 0.7054372491066369, + "learning_rate": 1.1164379386885302e-06, + "loss": 0.7453, + "step": 7277 + }, + { + "epoch": 0.6563556838165667, + "grad_norm": 1.521552303497064, + "learning_rate": 1.1159138676505516e-06, + "loss": 0.9801, + "step": 7278 + }, + { + "epoch": 0.656445867340037, + "grad_norm": 1.3194486060272084, + "learning_rate": 1.1153898720476761e-06, + "loss": 1.0016, + "step": 7279 + }, + { + "epoch": 0.6565360508635072, + "grad_norm": 1.606114018840541, + "learning_rate": 1.114865951924615e-06, + "loss": 0.9278, + "step": 7280 + }, + { + "epoch": 0.6566262343869775, + "grad_norm": 1.4186014912723162, + "learning_rate": 1.1143421073260721e-06, + "loss": 0.8925, + "step": 7281 + }, + { + "epoch": 0.6567164179104478, + "grad_norm": 1.414982701792308, + "learning_rate": 1.1138183382967432e-06, + "loss": 0.8783, + "step": 7282 + }, + { + "epoch": 0.656806601433918, + "grad_norm": 1.5479418225852024, + "learning_rate": 1.11329464488132e-06, + "loss": 0.8532, + "step": 7283 + }, + { + "epoch": 0.6568967849573882, + "grad_norm": 1.4281647290603101, + "learning_rate": 1.112771027124487e-06, + "loss": 0.9179, + "step": 7284 + }, + { + "epoch": 0.6569869684808586, + "grad_norm": 1.3992937759059254, + "learning_rate": 1.112247485070922e-06, + "loss": 0.9524, + "step": 7285 + }, + { + "epoch": 0.6570771520043288, + "grad_norm": 1.3340030964868343, + "learning_rate": 1.1117240187652968e-06, + "loss": 0.9466, + "step": 7286 + }, + { + "epoch": 0.657167335527799, + "grad_norm": 1.5821457056157053, + "learning_rate": 1.1112006282522767e-06, + "loss": 1.0176, + "step": 7287 + }, + { + "epoch": 0.6572575190512693, + "grad_norm": 1.4086381343507133, + "learning_rate": 1.1106773135765183e-06, + "loss": 0.9245, + "step": 7288 + }, + { + "epoch": 0.6573477025747396, + "grad_norm": 1.2464367146407518, + "learning_rate": 1.110154074782677e-06, + "loss": 0.912, + "step": 7289 + }, + { + "epoch": 0.6574378860982099, + "grad_norm": 1.4491147726345137, + "learning_rate": 1.1096309119153948e-06, + "loss": 0.9288, + "step": 7290 + }, + { + "epoch": 0.6575280696216801, + "grad_norm": 0.7216940995817579, + "learning_rate": 1.1091078250193145e-06, + "loss": 0.8407, + "step": 7291 + }, + { + "epoch": 0.6576182531451504, + "grad_norm": 1.9054680768364112, + "learning_rate": 1.108584814139066e-06, + "loss": 0.9497, + "step": 7292 + }, + { + "epoch": 0.6577084366686207, + "grad_norm": 1.6956116021340937, + "learning_rate": 1.108061879319276e-06, + "loss": 0.9383, + "step": 7293 + }, + { + "epoch": 0.6577986201920909, + "grad_norm": 1.4250423659334484, + "learning_rate": 1.1075390206045648e-06, + "loss": 1.0001, + "step": 7294 + }, + { + "epoch": 0.6578888037155611, + "grad_norm": 1.5497511510129354, + "learning_rate": 1.1070162380395454e-06, + "loss": 0.9806, + "step": 7295 + }, + { + "epoch": 0.6579789872390315, + "grad_norm": 1.5358014097878658, + "learning_rate": 1.1064935316688253e-06, + "loss": 0.8839, + "step": 7296 + }, + { + "epoch": 0.6580691707625017, + "grad_norm": 1.5024671927375604, + "learning_rate": 1.105970901537002e-06, + "loss": 1.031, + "step": 7297 + }, + { + "epoch": 0.6581593542859719, + "grad_norm": 1.3462249231438046, + "learning_rate": 1.1054483476886727e-06, + "loss": 0.9854, + "step": 7298 + }, + { + "epoch": 0.6582495378094422, + "grad_norm": 1.3471743841734023, + "learning_rate": 1.1049258701684222e-06, + "loss": 0.8575, + "step": 7299 + }, + { + "epoch": 0.6583397213329125, + "grad_norm": 1.4199797186947836, + "learning_rate": 1.1044034690208315e-06, + "loss": 0.8871, + "step": 7300 + }, + { + "epoch": 0.6584299048563828, + "grad_norm": 1.865340975765981, + "learning_rate": 1.1038811442904755e-06, + "loss": 0.9783, + "step": 7301 + }, + { + "epoch": 0.658520088379853, + "grad_norm": 1.7710764624547428, + "learning_rate": 1.103358896021921e-06, + "loss": 0.8713, + "step": 7302 + }, + { + "epoch": 0.6586102719033232, + "grad_norm": 1.8845780827799639, + "learning_rate": 1.1028367242597298e-06, + "loss": 0.9167, + "step": 7303 + }, + { + "epoch": 0.6587004554267936, + "grad_norm": 1.681086184097001, + "learning_rate": 1.102314629048457e-06, + "loss": 0.9704, + "step": 7304 + }, + { + "epoch": 0.6587906389502638, + "grad_norm": 1.359075315918956, + "learning_rate": 1.1017926104326484e-06, + "loss": 0.9909, + "step": 7305 + }, + { + "epoch": 0.658880822473734, + "grad_norm": 1.3588309240589254, + "learning_rate": 1.1012706684568483e-06, + "loss": 0.9955, + "step": 7306 + }, + { + "epoch": 0.6589710059972043, + "grad_norm": 1.5960807960346817, + "learning_rate": 1.1007488031655894e-06, + "loss": 0.956, + "step": 7307 + }, + { + "epoch": 0.6590611895206746, + "grad_norm": 1.5152887974303513, + "learning_rate": 1.1002270146034013e-06, + "loss": 0.9039, + "step": 7308 + }, + { + "epoch": 0.6591513730441448, + "grad_norm": 1.1067131775442807, + "learning_rate": 1.0997053028148052e-06, + "loss": 0.9678, + "step": 7309 + }, + { + "epoch": 0.6592415565676151, + "grad_norm": 1.1888289293626269, + "learning_rate": 1.0991836678443173e-06, + "loss": 0.9417, + "step": 7310 + }, + { + "epoch": 0.6593317400910853, + "grad_norm": 1.4308735248221156, + "learning_rate": 1.0986621097364465e-06, + "loss": 0.892, + "step": 7311 + }, + { + "epoch": 0.6594219236145556, + "grad_norm": 1.7264271678791465, + "learning_rate": 1.0981406285356932e-06, + "loss": 0.8588, + "step": 7312 + }, + { + "epoch": 0.6595121071380259, + "grad_norm": 1.5699975756000826, + "learning_rate": 1.0976192242865554e-06, + "loss": 0.8612, + "step": 7313 + }, + { + "epoch": 0.6596022906614961, + "grad_norm": 1.5634065668376864, + "learning_rate": 1.0970978970335202e-06, + "loss": 1.0034, + "step": 7314 + }, + { + "epoch": 0.6596924741849665, + "grad_norm": 1.516551727439025, + "learning_rate": 1.0965766468210714e-06, + "loss": 0.9628, + "step": 7315 + }, + { + "epoch": 0.6597826577084367, + "grad_norm": 1.4763431285195292, + "learning_rate": 1.0960554736936843e-06, + "loss": 0.9554, + "step": 7316 + }, + { + "epoch": 0.6598728412319069, + "grad_norm": 1.5499922044619523, + "learning_rate": 1.0955343776958283e-06, + "loss": 0.8453, + "step": 7317 + }, + { + "epoch": 0.6599630247553772, + "grad_norm": 1.4029532465496095, + "learning_rate": 1.0950133588719665e-06, + "loss": 0.959, + "step": 7318 + }, + { + "epoch": 0.6600532082788475, + "grad_norm": 1.4832774069597752, + "learning_rate": 1.0944924172665551e-06, + "loss": 0.9626, + "step": 7319 + }, + { + "epoch": 0.6601433918023177, + "grad_norm": 1.473630466547793, + "learning_rate": 1.0939715529240437e-06, + "loss": 0.9778, + "step": 7320 + }, + { + "epoch": 0.660233575325788, + "grad_norm": 1.8160945501836927, + "learning_rate": 1.0934507658888755e-06, + "loss": 0.9688, + "step": 7321 + }, + { + "epoch": 0.6603237588492582, + "grad_norm": 1.3141213801887563, + "learning_rate": 1.092930056205486e-06, + "loss": 0.9104, + "step": 7322 + }, + { + "epoch": 0.6604139423727285, + "grad_norm": 1.755318869290062, + "learning_rate": 1.092409423918306e-06, + "loss": 0.9083, + "step": 7323 + }, + { + "epoch": 0.6605041258961988, + "grad_norm": 1.2018439668883185, + "learning_rate": 1.0918888690717581e-06, + "loss": 0.8655, + "step": 7324 + }, + { + "epoch": 0.660594309419669, + "grad_norm": 1.6486927576084267, + "learning_rate": 1.091368391710259e-06, + "loss": 1.0067, + "step": 7325 + }, + { + "epoch": 0.6606844929431392, + "grad_norm": 1.4013216336917629, + "learning_rate": 1.0908479918782198e-06, + "loss": 0.8868, + "step": 7326 + }, + { + "epoch": 0.6607746764666096, + "grad_norm": 1.2772407606876723, + "learning_rate": 1.0903276696200413e-06, + "loss": 0.9678, + "step": 7327 + }, + { + "epoch": 0.6608648599900798, + "grad_norm": 1.5621428272188256, + "learning_rate": 1.0898074249801234e-06, + "loss": 0.9482, + "step": 7328 + }, + { + "epoch": 0.6609550435135501, + "grad_norm": 1.3515114747046137, + "learning_rate": 1.0892872580028533e-06, + "loss": 0.9546, + "step": 7329 + }, + { + "epoch": 0.6610452270370203, + "grad_norm": 1.6051843395911274, + "learning_rate": 1.0887671687326178e-06, + "loss": 0.9699, + "step": 7330 + }, + { + "epoch": 0.6611354105604906, + "grad_norm": 1.3332419612491246, + "learning_rate": 1.0882471572137908e-06, + "loss": 0.9024, + "step": 7331 + }, + { + "epoch": 0.6612255940839609, + "grad_norm": 1.4283975086610252, + "learning_rate": 1.087727223490744e-06, + "loss": 0.9856, + "step": 7332 + }, + { + "epoch": 0.6613157776074311, + "grad_norm": 1.8840646972238495, + "learning_rate": 1.0872073676078405e-06, + "loss": 0.994, + "step": 7333 + }, + { + "epoch": 0.6614059611309013, + "grad_norm": 1.66121533887998, + "learning_rate": 1.0866875896094375e-06, + "loss": 0.8872, + "step": 7334 + }, + { + "epoch": 0.6614961446543717, + "grad_norm": 1.494128018992982, + "learning_rate": 1.0861678895398854e-06, + "loss": 0.9608, + "step": 7335 + }, + { + "epoch": 0.6615863281778419, + "grad_norm": 1.5501900033474192, + "learning_rate": 1.0856482674435286e-06, + "loss": 0.8961, + "step": 7336 + }, + { + "epoch": 0.6616765117013121, + "grad_norm": 0.7090932799207406, + "learning_rate": 1.0851287233647024e-06, + "loss": 0.7992, + "step": 7337 + }, + { + "epoch": 0.6617666952247825, + "grad_norm": 1.4491561505969142, + "learning_rate": 1.084609257347738e-06, + "loss": 1.0032, + "step": 7338 + }, + { + "epoch": 0.6618568787482527, + "grad_norm": 1.5132449624787407, + "learning_rate": 1.0840898694369594e-06, + "loss": 0.9195, + "step": 7339 + }, + { + "epoch": 0.661947062271723, + "grad_norm": 1.552099249158665, + "learning_rate": 1.083570559676683e-06, + "loss": 0.9351, + "step": 7340 + }, + { + "epoch": 0.6620372457951932, + "grad_norm": 1.4521036147567095, + "learning_rate": 1.08305132811122e-06, + "loss": 0.9169, + "step": 7341 + }, + { + "epoch": 0.6621274293186635, + "grad_norm": 1.3115102123920184, + "learning_rate": 1.0825321747848735e-06, + "loss": 0.9392, + "step": 7342 + }, + { + "epoch": 0.6622176128421338, + "grad_norm": 0.764703351578689, + "learning_rate": 1.0820130997419417e-06, + "loss": 0.8554, + "step": 7343 + }, + { + "epoch": 0.662307796365604, + "grad_norm": 1.6388228191197642, + "learning_rate": 1.0814941030267123e-06, + "loss": 0.9386, + "step": 7344 + }, + { + "epoch": 0.6623979798890742, + "grad_norm": 0.5810601755091409, + "learning_rate": 1.080975184683472e-06, + "loss": 0.7827, + "step": 7345 + }, + { + "epoch": 0.6624881634125446, + "grad_norm": 1.5060403321781315, + "learning_rate": 1.0804563447564948e-06, + "loss": 1.0018, + "step": 7346 + }, + { + "epoch": 0.6625783469360148, + "grad_norm": 1.539299573889657, + "learning_rate": 1.0799375832900545e-06, + "loss": 0.8887, + "step": 7347 + }, + { + "epoch": 0.662668530459485, + "grad_norm": 1.3970924600738632, + "learning_rate": 1.0794189003284118e-06, + "loss": 0.974, + "step": 7348 + }, + { + "epoch": 0.6627587139829553, + "grad_norm": 1.4659344897445643, + "learning_rate": 1.0789002959158242e-06, + "loss": 0.9583, + "step": 7349 + }, + { + "epoch": 0.6628488975064256, + "grad_norm": 1.7097335829857905, + "learning_rate": 1.0783817700965428e-06, + "loss": 0.9014, + "step": 7350 + }, + { + "epoch": 0.6629390810298958, + "grad_norm": 1.284380554038706, + "learning_rate": 1.0778633229148102e-06, + "loss": 0.9839, + "step": 7351 + }, + { + "epoch": 0.6630292645533661, + "grad_norm": 1.5727547400116153, + "learning_rate": 1.0773449544148645e-06, + "loss": 0.9461, + "step": 7352 + }, + { + "epoch": 0.6631194480768363, + "grad_norm": 1.319141884741889, + "learning_rate": 1.076826664640934e-06, + "loss": 0.9844, + "step": 7353 + }, + { + "epoch": 0.6632096316003067, + "grad_norm": 0.8012316297903925, + "learning_rate": 1.0763084536372424e-06, + "loss": 0.8716, + "step": 7354 + }, + { + "epoch": 0.6632998151237769, + "grad_norm": 1.467145652296073, + "learning_rate": 1.0757903214480068e-06, + "loss": 0.9588, + "step": 7355 + }, + { + "epoch": 0.6633899986472471, + "grad_norm": 1.6452848832438227, + "learning_rate": 1.0752722681174376e-06, + "loss": 0.8827, + "step": 7356 + }, + { + "epoch": 0.6634801821707174, + "grad_norm": 2.5550484591851053, + "learning_rate": 1.074754293689737e-06, + "loss": 0.8597, + "step": 7357 + }, + { + "epoch": 0.6635703656941877, + "grad_norm": 1.5535521842754327, + "learning_rate": 1.0742363982091023e-06, + "loss": 0.9382, + "step": 7358 + }, + { + "epoch": 0.6636605492176579, + "grad_norm": 1.8232068802653938, + "learning_rate": 1.0737185817197215e-06, + "loss": 0.9092, + "step": 7359 + }, + { + "epoch": 0.6637507327411282, + "grad_norm": 1.5195330543801546, + "learning_rate": 1.0732008442657803e-06, + "loss": 0.8994, + "step": 7360 + }, + { + "epoch": 0.6638409162645985, + "grad_norm": 1.3239007978845942, + "learning_rate": 1.0726831858914516e-06, + "loss": 0.9233, + "step": 7361 + }, + { + "epoch": 0.6639310997880687, + "grad_norm": 2.0046148701784285, + "learning_rate": 1.0721656066409084e-06, + "loss": 0.959, + "step": 7362 + }, + { + "epoch": 0.664021283311539, + "grad_norm": 1.6816925147155448, + "learning_rate": 1.0716481065583108e-06, + "loss": 0.9472, + "step": 7363 + }, + { + "epoch": 0.6641114668350092, + "grad_norm": 0.6445223720979479, + "learning_rate": 1.071130685687816e-06, + "loss": 0.7947, + "step": 7364 + }, + { + "epoch": 0.6642016503584796, + "grad_norm": 1.3538592894477937, + "learning_rate": 1.0706133440735723e-06, + "loss": 0.8764, + "step": 7365 + }, + { + "epoch": 0.6642918338819498, + "grad_norm": 1.5168354667098365, + "learning_rate": 1.070096081759723e-06, + "loss": 0.8897, + "step": 7366 + }, + { + "epoch": 0.66438201740542, + "grad_norm": 1.634165882860638, + "learning_rate": 1.069578898790404e-06, + "loss": 1.0428, + "step": 7367 + }, + { + "epoch": 0.6644722009288903, + "grad_norm": 1.466537513609472, + "learning_rate": 1.0690617952097424e-06, + "loss": 0.9272, + "step": 7368 + }, + { + "epoch": 0.6645623844523606, + "grad_norm": 1.5022154183409238, + "learning_rate": 1.068544771061863e-06, + "loss": 0.9866, + "step": 7369 + }, + { + "epoch": 0.6646525679758308, + "grad_norm": 1.916007661171013, + "learning_rate": 1.0680278263908787e-06, + "loss": 0.9814, + "step": 7370 + }, + { + "epoch": 0.6647427514993011, + "grad_norm": 0.7837233112055996, + "learning_rate": 1.0675109612408991e-06, + "loss": 0.8458, + "step": 7371 + }, + { + "epoch": 0.6648329350227713, + "grad_norm": 1.7049332564817947, + "learning_rate": 1.0669941756560264e-06, + "loss": 0.912, + "step": 7372 + }, + { + "epoch": 0.6649231185462416, + "grad_norm": 2.78132817876429, + "learning_rate": 1.0664774696803548e-06, + "loss": 0.8975, + "step": 7373 + }, + { + "epoch": 0.6650133020697119, + "grad_norm": 1.5696258561980414, + "learning_rate": 1.065960843357973e-06, + "loss": 0.9117, + "step": 7374 + }, + { + "epoch": 0.6651034855931821, + "grad_norm": 1.4221984998310724, + "learning_rate": 1.065444296732963e-06, + "loss": 0.957, + "step": 7375 + }, + { + "epoch": 0.6651936691166523, + "grad_norm": 0.6915523353170956, + "learning_rate": 1.064927829849397e-06, + "loss": 0.8018, + "step": 7376 + }, + { + "epoch": 0.6652838526401227, + "grad_norm": 1.7305078781905305, + "learning_rate": 1.0644114427513465e-06, + "loss": 0.9393, + "step": 7377 + }, + { + "epoch": 0.6653740361635929, + "grad_norm": 1.2241213372089157, + "learning_rate": 1.0638951354828693e-06, + "loss": 0.866, + "step": 7378 + }, + { + "epoch": 0.6654642196870632, + "grad_norm": 1.5020039999968275, + "learning_rate": 1.063378908088021e-06, + "loss": 0.9951, + "step": 7379 + }, + { + "epoch": 0.6655544032105334, + "grad_norm": 1.4852984316701563, + "learning_rate": 1.0628627606108486e-06, + "loss": 0.9342, + "step": 7380 + }, + { + "epoch": 0.6656445867340037, + "grad_norm": 1.600181914955205, + "learning_rate": 1.062346693095393e-06, + "loss": 1.0008, + "step": 7381 + }, + { + "epoch": 0.665734770257474, + "grad_norm": 1.4213884799832355, + "learning_rate": 1.0618307055856882e-06, + "loss": 0.8831, + "step": 7382 + }, + { + "epoch": 0.6658249537809442, + "grad_norm": 1.3597457204850625, + "learning_rate": 1.061314798125759e-06, + "loss": 0.9939, + "step": 7383 + }, + { + "epoch": 0.6659151373044144, + "grad_norm": 1.191344262683675, + "learning_rate": 1.0607989707596293e-06, + "loss": 0.9011, + "step": 7384 + }, + { + "epoch": 0.6660053208278848, + "grad_norm": 1.3385451894434228, + "learning_rate": 1.0602832235313078e-06, + "loss": 1.0578, + "step": 7385 + }, + { + "epoch": 0.666095504351355, + "grad_norm": 1.5522430218296908, + "learning_rate": 1.0597675564848053e-06, + "loss": 0.9224, + "step": 7386 + }, + { + "epoch": 0.6661856878748252, + "grad_norm": 1.6716284882105372, + "learning_rate": 1.059251969664118e-06, + "loss": 1.0113, + "step": 7387 + }, + { + "epoch": 0.6662758713982956, + "grad_norm": 1.3911085252556687, + "learning_rate": 1.0587364631132402e-06, + "loss": 0.9497, + "step": 7388 + }, + { + "epoch": 0.6663660549217658, + "grad_norm": 1.330209957989104, + "learning_rate": 1.0582210368761573e-06, + "loss": 0.9828, + "step": 7389 + }, + { + "epoch": 0.666456238445236, + "grad_norm": 1.8956233386879222, + "learning_rate": 1.0577056909968485e-06, + "loss": 0.943, + "step": 7390 + }, + { + "epoch": 0.6665464219687063, + "grad_norm": 1.415859197986922, + "learning_rate": 1.0571904255192857e-06, + "loss": 0.9603, + "step": 7391 + }, + { + "epoch": 0.6666366054921766, + "grad_norm": 1.9209408854772305, + "learning_rate": 1.0566752404874354e-06, + "loss": 0.9752, + "step": 7392 + }, + { + "epoch": 0.6667267890156469, + "grad_norm": 1.595802332059899, + "learning_rate": 1.0561601359452543e-06, + "loss": 0.9348, + "step": 7393 + }, + { + "epoch": 0.6668169725391171, + "grad_norm": 1.5351894074176446, + "learning_rate": 1.0556451119366947e-06, + "loss": 0.8789, + "step": 7394 + }, + { + "epoch": 0.6669071560625873, + "grad_norm": 1.5227319110452928, + "learning_rate": 1.0551301685057011e-06, + "loss": 0.8734, + "step": 7395 + }, + { + "epoch": 0.6669973395860577, + "grad_norm": 5.653367968009324, + "learning_rate": 1.0546153056962117e-06, + "loss": 1.0325, + "step": 7396 + }, + { + "epoch": 0.6670875231095279, + "grad_norm": 1.5686753882184468, + "learning_rate": 1.0541005235521578e-06, + "loss": 0.9276, + "step": 7397 + }, + { + "epoch": 0.6671777066329981, + "grad_norm": 1.5353718768781157, + "learning_rate": 1.0535858221174614e-06, + "loss": 0.9534, + "step": 7398 + }, + { + "epoch": 0.6672678901564684, + "grad_norm": 1.5350117312054505, + "learning_rate": 1.0530712014360426e-06, + "loss": 0.9851, + "step": 7399 + }, + { + "epoch": 0.6673580736799387, + "grad_norm": 1.3404987835990625, + "learning_rate": 1.0525566615518088e-06, + "loss": 0.9862, + "step": 7400 + }, + { + "epoch": 0.667448257203409, + "grad_norm": 1.308882223188579, + "learning_rate": 1.0520422025086662e-06, + "loss": 0.9577, + "step": 7401 + }, + { + "epoch": 0.6675384407268792, + "grad_norm": 1.3816572633215463, + "learning_rate": 1.0515278243505092e-06, + "loss": 0.9913, + "step": 7402 + }, + { + "epoch": 0.6676286242503494, + "grad_norm": 1.282410398554511, + "learning_rate": 1.0510135271212278e-06, + "loss": 0.9682, + "step": 7403 + }, + { + "epoch": 0.6677188077738198, + "grad_norm": 1.1879860234738873, + "learning_rate": 1.0504993108647052e-06, + "loss": 0.998, + "step": 7404 + }, + { + "epoch": 0.66780899129729, + "grad_norm": 1.2647587201552477, + "learning_rate": 1.0499851756248168e-06, + "loss": 0.913, + "step": 7405 + }, + { + "epoch": 0.6678991748207602, + "grad_norm": 1.3865616400464331, + "learning_rate": 1.0494711214454316e-06, + "loss": 0.9212, + "step": 7406 + }, + { + "epoch": 0.6679893583442305, + "grad_norm": 1.3666478427117827, + "learning_rate": 1.0489571483704111e-06, + "loss": 0.8535, + "step": 7407 + }, + { + "epoch": 0.6680795418677008, + "grad_norm": 1.570904824057638, + "learning_rate": 1.048443256443612e-06, + "loss": 0.8615, + "step": 7408 + }, + { + "epoch": 0.668169725391171, + "grad_norm": 1.7227655838851212, + "learning_rate": 1.0479294457088801e-06, + "loss": 0.9638, + "step": 7409 + }, + { + "epoch": 0.6682599089146413, + "grad_norm": 1.3908593483958986, + "learning_rate": 1.0474157162100574e-06, + "loss": 1.0565, + "step": 7410 + }, + { + "epoch": 0.6683500924381116, + "grad_norm": 1.4210694095483691, + "learning_rate": 1.0469020679909786e-06, + "loss": 0.8756, + "step": 7411 + }, + { + "epoch": 0.6684402759615818, + "grad_norm": 2.3558273422044484, + "learning_rate": 1.0463885010954705e-06, + "loss": 0.8614, + "step": 7412 + }, + { + "epoch": 0.6685304594850521, + "grad_norm": 1.618499076571951, + "learning_rate": 1.0458750155673536e-06, + "loss": 1.0089, + "step": 7413 + }, + { + "epoch": 0.6686206430085223, + "grad_norm": 1.690990934414196, + "learning_rate": 1.0453616114504421e-06, + "loss": 0.8975, + "step": 7414 + }, + { + "epoch": 0.6687108265319927, + "grad_norm": 1.416260502129021, + "learning_rate": 1.0448482887885406e-06, + "loss": 0.9704, + "step": 7415 + }, + { + "epoch": 0.6688010100554629, + "grad_norm": 1.46164918588228, + "learning_rate": 1.044335047625451e-06, + "loss": 0.9466, + "step": 7416 + }, + { + "epoch": 0.6688911935789331, + "grad_norm": 1.4988084670013726, + "learning_rate": 1.0438218880049637e-06, + "loss": 0.9481, + "step": 7417 + }, + { + "epoch": 0.6689813771024034, + "grad_norm": 1.6409088615805418, + "learning_rate": 1.0433088099708653e-06, + "loss": 1.0005, + "step": 7418 + }, + { + "epoch": 0.6690715606258737, + "grad_norm": 0.7269863000153467, + "learning_rate": 1.0427958135669346e-06, + "loss": 0.7523, + "step": 7419 + }, + { + "epoch": 0.6691617441493439, + "grad_norm": 3.5281550254929464, + "learning_rate": 1.0422828988369428e-06, + "loss": 0.8771, + "step": 7420 + }, + { + "epoch": 0.6692519276728142, + "grad_norm": 1.472843148187616, + "learning_rate": 1.041770065824655e-06, + "loss": 1.0066, + "step": 7421 + }, + { + "epoch": 0.6693421111962844, + "grad_norm": 1.7576248407859614, + "learning_rate": 1.0412573145738287e-06, + "loss": 0.7969, + "step": 7422 + }, + { + "epoch": 0.6694322947197547, + "grad_norm": 1.3955881510754453, + "learning_rate": 1.040744645128216e-06, + "loss": 0.913, + "step": 7423 + }, + { + "epoch": 0.669522478243225, + "grad_norm": 1.4996202306014188, + "learning_rate": 1.040232057531558e-06, + "loss": 1.0033, + "step": 7424 + }, + { + "epoch": 0.6696126617666952, + "grad_norm": 4.151396247656168, + "learning_rate": 1.0397195518275932e-06, + "loss": 0.9109, + "step": 7425 + }, + { + "epoch": 0.6697028452901654, + "grad_norm": 1.357464072031291, + "learning_rate": 1.0392071280600512e-06, + "loss": 0.9134, + "step": 7426 + }, + { + "epoch": 0.6697930288136358, + "grad_norm": 1.4494006924410292, + "learning_rate": 1.0386947862726549e-06, + "loss": 0.9445, + "step": 7427 + }, + { + "epoch": 0.669883212337106, + "grad_norm": 2.044958134341561, + "learning_rate": 1.0381825265091197e-06, + "loss": 0.8619, + "step": 7428 + }, + { + "epoch": 0.6699733958605762, + "grad_norm": 1.8214700317472545, + "learning_rate": 1.037670348813155e-06, + "loss": 0.8648, + "step": 7429 + }, + { + "epoch": 0.6700635793840465, + "grad_norm": 1.6338764721368604, + "learning_rate": 1.0371582532284624e-06, + "loss": 0.9674, + "step": 7430 + }, + { + "epoch": 0.6701537629075168, + "grad_norm": 1.4161164767426104, + "learning_rate": 1.0366462397987375e-06, + "loss": 0.8764, + "step": 7431 + }, + { + "epoch": 0.6702439464309871, + "grad_norm": 1.6987110300204633, + "learning_rate": 1.0361343085676665e-06, + "loss": 0.9393, + "step": 7432 + }, + { + "epoch": 0.6703341299544573, + "grad_norm": 1.3293800258496633, + "learning_rate": 1.0356224595789309e-06, + "loss": 1.0553, + "step": 7433 + }, + { + "epoch": 0.6704243134779276, + "grad_norm": 1.3847949370602943, + "learning_rate": 1.0351106928762046e-06, + "loss": 0.9992, + "step": 7434 + }, + { + "epoch": 0.6705144970013979, + "grad_norm": 2.199869875961051, + "learning_rate": 1.034599008503154e-06, + "loss": 1.0147, + "step": 7435 + }, + { + "epoch": 0.6706046805248681, + "grad_norm": 1.2858468377192642, + "learning_rate": 1.0340874065034406e-06, + "loss": 0.9319, + "step": 7436 + }, + { + "epoch": 0.6706948640483383, + "grad_norm": 1.4919477340042333, + "learning_rate": 1.0335758869207137e-06, + "loss": 0.8665, + "step": 7437 + }, + { + "epoch": 0.6707850475718087, + "grad_norm": 3.4282566987631986, + "learning_rate": 1.0330644497986227e-06, + "loss": 0.9563, + "step": 7438 + }, + { + "epoch": 0.6708752310952789, + "grad_norm": 1.3600531892332888, + "learning_rate": 1.0325530951808029e-06, + "loss": 0.9829, + "step": 7439 + }, + { + "epoch": 0.6709654146187491, + "grad_norm": 1.5075019951549378, + "learning_rate": 1.0320418231108887e-06, + "loss": 0.9549, + "step": 7440 + }, + { + "epoch": 0.6710555981422194, + "grad_norm": 1.5580786138149103, + "learning_rate": 1.0315306336325028e-06, + "loss": 0.9836, + "step": 7441 + }, + { + "epoch": 0.6711457816656897, + "grad_norm": 1.5177298805035564, + "learning_rate": 1.0310195267892635e-06, + "loss": 0.9025, + "step": 7442 + }, + { + "epoch": 0.67123596518916, + "grad_norm": 1.7132478904694435, + "learning_rate": 1.030508502624781e-06, + "loss": 0.8863, + "step": 7443 + }, + { + "epoch": 0.6713261487126302, + "grad_norm": 1.793097464402229, + "learning_rate": 1.0299975611826587e-06, + "loss": 0.899, + "step": 7444 + }, + { + "epoch": 0.6714163322361004, + "grad_norm": 1.3522765638211047, + "learning_rate": 1.0294867025064928e-06, + "loss": 0.9515, + "step": 7445 + }, + { + "epoch": 0.6715065157595708, + "grad_norm": 1.3225957326005973, + "learning_rate": 1.028975926639874e-06, + "loss": 0.9505, + "step": 7446 + }, + { + "epoch": 0.671596699283041, + "grad_norm": 2.0741756456282183, + "learning_rate": 1.0284652336263823e-06, + "loss": 0.8633, + "step": 7447 + }, + { + "epoch": 0.6716868828065112, + "grad_norm": 3.803369940186795, + "learning_rate": 1.0279546235095938e-06, + "loss": 0.8743, + "step": 7448 + }, + { + "epoch": 0.6717770663299815, + "grad_norm": 0.7532893606942893, + "learning_rate": 1.0274440963330768e-06, + "loss": 0.8344, + "step": 7449 + }, + { + "epoch": 0.6718672498534518, + "grad_norm": 1.3544543865405032, + "learning_rate": 1.0269336521403919e-06, + "loss": 0.9425, + "step": 7450 + }, + { + "epoch": 0.671957433376922, + "grad_norm": 1.4635685367312303, + "learning_rate": 1.0264232909750936e-06, + "loss": 1.0694, + "step": 7451 + }, + { + "epoch": 0.6720476169003923, + "grad_norm": 1.2529068050539807, + "learning_rate": 1.025913012880728e-06, + "loss": 1.0003, + "step": 7452 + }, + { + "epoch": 0.6721378004238625, + "grad_norm": 1.324923167610179, + "learning_rate": 1.0254028179008362e-06, + "loss": 0.8515, + "step": 7453 + }, + { + "epoch": 0.6722279839473329, + "grad_norm": 1.3245274006827121, + "learning_rate": 1.0248927060789483e-06, + "loss": 0.8399, + "step": 7454 + }, + { + "epoch": 0.6723181674708031, + "grad_norm": 1.3950779374887217, + "learning_rate": 1.0243826774585928e-06, + "loss": 0.8922, + "step": 7455 + }, + { + "epoch": 0.6724083509942733, + "grad_norm": 1.6236785504109787, + "learning_rate": 1.0238727320832854e-06, + "loss": 0.9333, + "step": 7456 + }, + { + "epoch": 0.6724985345177437, + "grad_norm": 1.600615281772269, + "learning_rate": 1.0233628699965403e-06, + "loss": 0.9426, + "step": 7457 + }, + { + "epoch": 0.6725887180412139, + "grad_norm": 1.3485483948337258, + "learning_rate": 1.0228530912418594e-06, + "loss": 0.9633, + "step": 7458 + }, + { + "epoch": 0.6726789015646841, + "grad_norm": 1.4934507285675873, + "learning_rate": 1.0223433958627404e-06, + "loss": 0.9117, + "step": 7459 + }, + { + "epoch": 0.6727690850881544, + "grad_norm": 1.6035472202324628, + "learning_rate": 1.021833783902674e-06, + "loss": 0.9699, + "step": 7460 + }, + { + "epoch": 0.6728592686116247, + "grad_norm": 1.358322130441786, + "learning_rate": 1.0213242554051427e-06, + "loss": 0.9453, + "step": 7461 + }, + { + "epoch": 0.6729494521350949, + "grad_norm": 1.4377773680744508, + "learning_rate": 1.0208148104136229e-06, + "loss": 0.8827, + "step": 7462 + }, + { + "epoch": 0.6730396356585652, + "grad_norm": 3.255697684572399, + "learning_rate": 1.020305448971582e-06, + "loss": 0.9109, + "step": 7463 + }, + { + "epoch": 0.6731298191820354, + "grad_norm": 1.281588207360041, + "learning_rate": 1.0197961711224824e-06, + "loss": 0.9032, + "step": 7464 + }, + { + "epoch": 0.6732200027055057, + "grad_norm": 1.4881896944494843, + "learning_rate": 1.0192869769097777e-06, + "loss": 0.9557, + "step": 7465 + }, + { + "epoch": 0.673310186228976, + "grad_norm": 1.6415731687433286, + "learning_rate": 1.018777866376916e-06, + "loss": 0.9551, + "step": 7466 + }, + { + "epoch": 0.6734003697524462, + "grad_norm": 1.6190522725441785, + "learning_rate": 1.0182688395673374e-06, + "loss": 0.8174, + "step": 7467 + }, + { + "epoch": 0.6734905532759164, + "grad_norm": 1.3222272182729953, + "learning_rate": 1.017759896524475e-06, + "loss": 0.9074, + "step": 7468 + }, + { + "epoch": 0.6735807367993868, + "grad_norm": 1.7814901758865793, + "learning_rate": 1.0172510372917528e-06, + "loss": 0.9745, + "step": 7469 + }, + { + "epoch": 0.673670920322857, + "grad_norm": 2.0172114080523667, + "learning_rate": 1.0167422619125925e-06, + "loss": 0.8394, + "step": 7470 + }, + { + "epoch": 0.6737611038463273, + "grad_norm": 1.4747171324711166, + "learning_rate": 1.0162335704304026e-06, + "loss": 0.9549, + "step": 7471 + }, + { + "epoch": 0.6738512873697975, + "grad_norm": 0.7234009076415199, + "learning_rate": 1.0157249628885903e-06, + "loss": 0.8019, + "step": 7472 + }, + { + "epoch": 0.6739414708932678, + "grad_norm": 1.218809615413319, + "learning_rate": 1.0152164393305506e-06, + "loss": 0.8563, + "step": 7473 + }, + { + "epoch": 0.6740316544167381, + "grad_norm": 1.674068473732626, + "learning_rate": 1.0147079997996746e-06, + "loss": 0.9323, + "step": 7474 + }, + { + "epoch": 0.6741218379402083, + "grad_norm": 1.6388195457803132, + "learning_rate": 1.0141996443393446e-06, + "loss": 0.9681, + "step": 7475 + }, + { + "epoch": 0.6742120214636785, + "grad_norm": 1.4843749196905818, + "learning_rate": 1.0136913729929369e-06, + "loss": 0.9317, + "step": 7476 + }, + { + "epoch": 0.6743022049871489, + "grad_norm": 1.4445724654726426, + "learning_rate": 1.0131831858038203e-06, + "loss": 0.9082, + "step": 7477 + }, + { + "epoch": 0.6743923885106191, + "grad_norm": 1.3577349185179752, + "learning_rate": 1.0126750828153538e-06, + "loss": 1.0008, + "step": 7478 + }, + { + "epoch": 0.6744825720340893, + "grad_norm": 1.5928050212897833, + "learning_rate": 1.012167064070895e-06, + "loss": 0.9818, + "step": 7479 + }, + { + "epoch": 0.6745727555575597, + "grad_norm": 1.4094556510500487, + "learning_rate": 1.0116591296137885e-06, + "loss": 0.9667, + "step": 7480 + }, + { + "epoch": 0.6746629390810299, + "grad_norm": 1.3524938913232065, + "learning_rate": 1.0111512794873746e-06, + "loss": 0.9682, + "step": 7481 + }, + { + "epoch": 0.6747531226045002, + "grad_norm": 1.9637385572346548, + "learning_rate": 1.010643513734986e-06, + "loss": 0.9404, + "step": 7482 + }, + { + "epoch": 0.6748433061279704, + "grad_norm": 1.4260512840925674, + "learning_rate": 1.010135832399948e-06, + "loss": 0.9823, + "step": 7483 + }, + { + "epoch": 0.6749334896514407, + "grad_norm": 1.150574337550007, + "learning_rate": 1.0096282355255792e-06, + "loss": 0.9451, + "step": 7484 + }, + { + "epoch": 0.675023673174911, + "grad_norm": 1.668350426201745, + "learning_rate": 1.0091207231551905e-06, + "loss": 0.9976, + "step": 7485 + }, + { + "epoch": 0.6751138566983812, + "grad_norm": 1.3240574651302344, + "learning_rate": 1.0086132953320842e-06, + "loss": 0.902, + "step": 7486 + }, + { + "epoch": 0.6752040402218514, + "grad_norm": 1.753215084379363, + "learning_rate": 1.0081059520995591e-06, + "loss": 0.9677, + "step": 7487 + }, + { + "epoch": 0.6752942237453218, + "grad_norm": 2.125092560491311, + "learning_rate": 1.0075986935009028e-06, + "loss": 1.0013, + "step": 7488 + }, + { + "epoch": 0.675384407268792, + "grad_norm": 1.4189004490606387, + "learning_rate": 1.0070915195793982e-06, + "loss": 0.9484, + "step": 7489 + }, + { + "epoch": 0.6754745907922622, + "grad_norm": 1.7568060133942278, + "learning_rate": 1.0065844303783197e-06, + "loss": 0.9215, + "step": 7490 + }, + { + "epoch": 0.6755647743157325, + "grad_norm": 1.274558434036901, + "learning_rate": 1.0060774259409356e-06, + "loss": 0.8627, + "step": 7491 + }, + { + "epoch": 0.6756549578392028, + "grad_norm": 1.2765462176289575, + "learning_rate": 1.0055705063105065e-06, + "loss": 0.9754, + "step": 7492 + }, + { + "epoch": 0.675745141362673, + "grad_norm": 1.4327819355041544, + "learning_rate": 1.0050636715302837e-06, + "loss": 0.9368, + "step": 7493 + }, + { + "epoch": 0.6758353248861433, + "grad_norm": 1.4831011476137912, + "learning_rate": 1.0045569216435157e-06, + "loss": 0.9551, + "step": 7494 + }, + { + "epoch": 0.6759255084096135, + "grad_norm": 1.2991030294571384, + "learning_rate": 1.0040502566934384e-06, + "loss": 1.026, + "step": 7495 + }, + { + "epoch": 0.6760156919330839, + "grad_norm": 1.9045449432774604, + "learning_rate": 1.0035436767232866e-06, + "loss": 0.8805, + "step": 7496 + }, + { + "epoch": 0.6761058754565541, + "grad_norm": 1.3813557683572337, + "learning_rate": 1.0030371817762816e-06, + "loss": 0.957, + "step": 7497 + }, + { + "epoch": 0.6761960589800243, + "grad_norm": 1.393936286759149, + "learning_rate": 1.0025307718956417e-06, + "loss": 0.9494, + "step": 7498 + }, + { + "epoch": 0.6762862425034946, + "grad_norm": 1.3067250131860413, + "learning_rate": 1.0020244471245765e-06, + "loss": 0.9622, + "step": 7499 + }, + { + "epoch": 0.6763764260269649, + "grad_norm": 1.5638412059894888, + "learning_rate": 1.001518207506288e-06, + "loss": 0.8924, + "step": 7500 + }, + { + "epoch": 0.6764666095504351, + "grad_norm": 1.4530167898270017, + "learning_rate": 1.0010120530839717e-06, + "loss": 1.0005, + "step": 7501 + }, + { + "epoch": 0.6765567930739054, + "grad_norm": 1.1348972474022725, + "learning_rate": 1.0005059839008161e-06, + "loss": 0.9633, + "step": 7502 + }, + { + "epoch": 0.6766469765973756, + "grad_norm": 1.5495919921146202, + "learning_rate": 1.0000000000000004e-06, + "loss": 0.8922, + "step": 7503 + }, + { + "epoch": 0.676737160120846, + "grad_norm": 1.4569641588734086, + "learning_rate": 9.994941014246985e-07, + "loss": 0.9916, + "step": 7504 + }, + { + "epoch": 0.6768273436443162, + "grad_norm": 1.2626256842419235, + "learning_rate": 9.989882882180766e-07, + "loss": 0.834, + "step": 7505 + }, + { + "epoch": 0.6769175271677864, + "grad_norm": 1.5465825893032317, + "learning_rate": 9.984825604232938e-07, + "loss": 0.9573, + "step": 7506 + }, + { + "epoch": 0.6770077106912568, + "grad_norm": 1.3637287531245699, + "learning_rate": 9.97976918083502e-07, + "loss": 0.8818, + "step": 7507 + }, + { + "epoch": 0.677097894214727, + "grad_norm": 1.3119891852944947, + "learning_rate": 9.974713612418427e-07, + "loss": 1.0144, + "step": 7508 + }, + { + "epoch": 0.6771880777381972, + "grad_norm": 1.3671316080930342, + "learning_rate": 9.969658899414563e-07, + "loss": 0.9512, + "step": 7509 + }, + { + "epoch": 0.6772782612616675, + "grad_norm": 1.516545438986898, + "learning_rate": 9.964605042254696e-07, + "loss": 0.8627, + "step": 7510 + }, + { + "epoch": 0.6773684447851378, + "grad_norm": 1.5082517784509049, + "learning_rate": 9.959552041370076e-07, + "loss": 0.8809, + "step": 7511 + }, + { + "epoch": 0.677458628308608, + "grad_norm": 1.3652199674836611, + "learning_rate": 9.954499897191824e-07, + "loss": 0.8586, + "step": 7512 + }, + { + "epoch": 0.6775488118320783, + "grad_norm": 1.8396380202163183, + "learning_rate": 9.949448610151043e-07, + "loss": 0.9051, + "step": 7513 + }, + { + "epoch": 0.6776389953555485, + "grad_norm": 1.3192325667313218, + "learning_rate": 9.944398180678719e-07, + "loss": 0.921, + "step": 7514 + }, + { + "epoch": 0.6777291788790188, + "grad_norm": 1.467199765480131, + "learning_rate": 9.939348609205789e-07, + "loss": 0.8994, + "step": 7515 + }, + { + "epoch": 0.6778193624024891, + "grad_norm": 1.3212157551235184, + "learning_rate": 9.93429989616311e-07, + "loss": 0.9474, + "step": 7516 + }, + { + "epoch": 0.6779095459259593, + "grad_norm": 1.5997610986690924, + "learning_rate": 9.929252041981464e-07, + "loss": 0.8664, + "step": 7517 + }, + { + "epoch": 0.6779997294494295, + "grad_norm": 1.3987395743492017, + "learning_rate": 9.924205047091572e-07, + "loss": 0.9105, + "step": 7518 + }, + { + "epoch": 0.6780899129728999, + "grad_norm": 1.4685905857013535, + "learning_rate": 9.919158911924056e-07, + "loss": 1.062, + "step": 7519 + }, + { + "epoch": 0.6781800964963701, + "grad_norm": 1.3209420232648004, + "learning_rate": 9.914113636909483e-07, + "loss": 0.9735, + "step": 7520 + }, + { + "epoch": 0.6782702800198404, + "grad_norm": 0.7987581329648025, + "learning_rate": 9.90906922247835e-07, + "loss": 0.8822, + "step": 7521 + }, + { + "epoch": 0.6783604635433106, + "grad_norm": 1.2036654757001957, + "learning_rate": 9.904025669061072e-07, + "loss": 0.9752, + "step": 7522 + }, + { + "epoch": 0.6784506470667809, + "grad_norm": 1.1917901093007424, + "learning_rate": 9.89898297708799e-07, + "loss": 0.9349, + "step": 7523 + }, + { + "epoch": 0.6785408305902512, + "grad_norm": 1.6393311257695902, + "learning_rate": 9.893941146989388e-07, + "loss": 0.9014, + "step": 7524 + }, + { + "epoch": 0.6786310141137214, + "grad_norm": 1.5223574991671258, + "learning_rate": 9.888900179195437e-07, + "loss": 0.8977, + "step": 7525 + }, + { + "epoch": 0.6787211976371916, + "grad_norm": 1.4817956297754529, + "learning_rate": 9.883860074136285e-07, + "loss": 1.0177, + "step": 7526 + }, + { + "epoch": 0.678811381160662, + "grad_norm": 2.4603219367473406, + "learning_rate": 9.87882083224196e-07, + "loss": 0.9395, + "step": 7527 + }, + { + "epoch": 0.6789015646841322, + "grad_norm": 0.7378128018796659, + "learning_rate": 9.873782453942462e-07, + "loss": 0.8354, + "step": 7528 + }, + { + "epoch": 0.6789917482076024, + "grad_norm": 1.6082552375346448, + "learning_rate": 9.868744939667676e-07, + "loss": 0.8755, + "step": 7529 + }, + { + "epoch": 0.6790819317310728, + "grad_norm": 1.4147411425081182, + "learning_rate": 9.863708289847432e-07, + "loss": 0.8224, + "step": 7530 + }, + { + "epoch": 0.679172115254543, + "grad_norm": 1.6600379811962984, + "learning_rate": 9.85867250491149e-07, + "loss": 0.8472, + "step": 7531 + }, + { + "epoch": 0.6792622987780133, + "grad_norm": 1.4383931080732628, + "learning_rate": 9.853637585289528e-07, + "loss": 1.049, + "step": 7532 + }, + { + "epoch": 0.6793524823014835, + "grad_norm": 2.6896166782887763, + "learning_rate": 9.848603531411159e-07, + "loss": 0.9405, + "step": 7533 + }, + { + "epoch": 0.6794426658249538, + "grad_norm": 1.6944333095219417, + "learning_rate": 9.843570343705899e-07, + "loss": 0.9531, + "step": 7534 + }, + { + "epoch": 0.6795328493484241, + "grad_norm": 1.194178573210075, + "learning_rate": 9.83853802260323e-07, + "loss": 1.0045, + "step": 7535 + }, + { + "epoch": 0.6796230328718943, + "grad_norm": 1.507319948062463, + "learning_rate": 9.833506568532524e-07, + "loss": 0.8883, + "step": 7536 + }, + { + "epoch": 0.6797132163953645, + "grad_norm": 1.3460351011282128, + "learning_rate": 9.828475981923093e-07, + "loss": 0.7916, + "step": 7537 + }, + { + "epoch": 0.6798033999188349, + "grad_norm": 1.765285222941313, + "learning_rate": 9.823446263204175e-07, + "loss": 0.9213, + "step": 7538 + }, + { + "epoch": 0.6798935834423051, + "grad_norm": 1.6528095283272424, + "learning_rate": 9.818417412804937e-07, + "loss": 0.9451, + "step": 7539 + }, + { + "epoch": 0.6799837669657753, + "grad_norm": 1.4738990942527452, + "learning_rate": 9.813389431154463e-07, + "loss": 1.022, + "step": 7540 + }, + { + "epoch": 0.6800739504892456, + "grad_norm": 1.883143289086642, + "learning_rate": 9.808362318681783e-07, + "loss": 0.9695, + "step": 7541 + }, + { + "epoch": 0.6801641340127159, + "grad_norm": 1.6509693823874538, + "learning_rate": 9.803336075815807e-07, + "loss": 0.956, + "step": 7542 + }, + { + "epoch": 0.6802543175361861, + "grad_norm": 1.4855446473675942, + "learning_rate": 9.79831070298544e-07, + "loss": 0.8695, + "step": 7543 + }, + { + "epoch": 0.6803445010596564, + "grad_norm": 1.5286984106983241, + "learning_rate": 9.793286200619443e-07, + "loss": 0.9147, + "step": 7544 + }, + { + "epoch": 0.6804346845831266, + "grad_norm": 1.285175682778967, + "learning_rate": 9.78826256914655e-07, + "loss": 0.9942, + "step": 7545 + }, + { + "epoch": 0.680524868106597, + "grad_norm": 1.730270133234407, + "learning_rate": 9.7832398089954e-07, + "loss": 0.9886, + "step": 7546 + }, + { + "epoch": 0.6806150516300672, + "grad_norm": 1.673693799534679, + "learning_rate": 9.778217920594565e-07, + "loss": 1.0442, + "step": 7547 + }, + { + "epoch": 0.6807052351535374, + "grad_norm": 1.4780544433529026, + "learning_rate": 9.773196904372547e-07, + "loss": 0.9856, + "step": 7548 + }, + { + "epoch": 0.6807954186770077, + "grad_norm": 1.6114102429359254, + "learning_rate": 9.768176760757742e-07, + "loss": 1.0328, + "step": 7549 + }, + { + "epoch": 0.680885602200478, + "grad_norm": 1.2853063246460157, + "learning_rate": 9.76315749017853e-07, + "loss": 0.9197, + "step": 7550 + }, + { + "epoch": 0.6809757857239482, + "grad_norm": 1.8215256606333003, + "learning_rate": 9.758139093063161e-07, + "loss": 0.948, + "step": 7551 + }, + { + "epoch": 0.6810659692474185, + "grad_norm": 1.2635149850987006, + "learning_rate": 9.753121569839834e-07, + "loss": 1.0091, + "step": 7552 + }, + { + "epoch": 0.6811561527708888, + "grad_norm": 0.8028950774004492, + "learning_rate": 9.748104920936678e-07, + "loss": 0.7746, + "step": 7553 + }, + { + "epoch": 0.681246336294359, + "grad_norm": 1.4144279050014021, + "learning_rate": 9.743089146781738e-07, + "loss": 0.9224, + "step": 7554 + }, + { + "epoch": 0.6813365198178293, + "grad_norm": 1.3709291103881096, + "learning_rate": 9.738074247802988e-07, + "loss": 0.8744, + "step": 7555 + }, + { + "epoch": 0.6814267033412995, + "grad_norm": 1.526877251528541, + "learning_rate": 9.733060224428325e-07, + "loss": 0.9622, + "step": 7556 + }, + { + "epoch": 0.6815168868647699, + "grad_norm": 1.557085268085609, + "learning_rate": 9.728047077085577e-07, + "loss": 0.9531, + "step": 7557 + }, + { + "epoch": 0.6816070703882401, + "grad_norm": 1.7328135704539427, + "learning_rate": 9.723034806202497e-07, + "loss": 0.8409, + "step": 7558 + }, + { + "epoch": 0.6816972539117103, + "grad_norm": 1.5675617626865732, + "learning_rate": 9.718023412206748e-07, + "loss": 0.9555, + "step": 7559 + }, + { + "epoch": 0.6817874374351806, + "grad_norm": 1.300382031646964, + "learning_rate": 9.713012895525935e-07, + "loss": 0.9639, + "step": 7560 + }, + { + "epoch": 0.6818776209586509, + "grad_norm": 1.4531996512977776, + "learning_rate": 9.708003256587584e-07, + "loss": 0.9261, + "step": 7561 + }, + { + "epoch": 0.6819678044821211, + "grad_norm": 1.358892684084553, + "learning_rate": 9.702994495819147e-07, + "loss": 0.9903, + "step": 7562 + }, + { + "epoch": 0.6820579880055914, + "grad_norm": 1.5287057408860665, + "learning_rate": 9.697986613647999e-07, + "loss": 0.8473, + "step": 7563 + }, + { + "epoch": 0.6821481715290616, + "grad_norm": 0.7331240937080212, + "learning_rate": 9.692979610501425e-07, + "loss": 0.8366, + "step": 7564 + }, + { + "epoch": 0.6822383550525319, + "grad_norm": 1.3931710584650154, + "learning_rate": 9.68797348680668e-07, + "loss": 1.0157, + "step": 7565 + }, + { + "epoch": 0.6823285385760022, + "grad_norm": 1.283469069515013, + "learning_rate": 9.682968242990878e-07, + "loss": 0.9055, + "step": 7566 + }, + { + "epoch": 0.6824187220994724, + "grad_norm": 1.2771043932341462, + "learning_rate": 9.677963879481132e-07, + "loss": 0.9936, + "step": 7567 + }, + { + "epoch": 0.6825089056229426, + "grad_norm": 1.6495700940805862, + "learning_rate": 9.672960396704416e-07, + "loss": 0.9258, + "step": 7568 + }, + { + "epoch": 0.682599089146413, + "grad_norm": 1.4115415832442635, + "learning_rate": 9.667957795087657e-07, + "loss": 0.9416, + "step": 7569 + }, + { + "epoch": 0.6826892726698832, + "grad_norm": 1.7004370520274237, + "learning_rate": 9.662956075057712e-07, + "loss": 1.0028, + "step": 7570 + }, + { + "epoch": 0.6827794561933535, + "grad_norm": 1.9988367751073943, + "learning_rate": 9.657955237041354e-07, + "loss": 1.0027, + "step": 7571 + }, + { + "epoch": 0.6828696397168237, + "grad_norm": 1.29529558085584, + "learning_rate": 9.652955281465278e-07, + "loss": 0.923, + "step": 7572 + }, + { + "epoch": 0.682959823240294, + "grad_norm": 1.5577533339549083, + "learning_rate": 9.64795620875612e-07, + "loss": 0.9489, + "step": 7573 + }, + { + "epoch": 0.6830500067637643, + "grad_norm": 1.3834965194062794, + "learning_rate": 9.64295801934041e-07, + "loss": 0.8866, + "step": 7574 + }, + { + "epoch": 0.6831401902872345, + "grad_norm": 0.6919897171480301, + "learning_rate": 9.63796071364463e-07, + "loss": 0.8061, + "step": 7575 + }, + { + "epoch": 0.6832303738107048, + "grad_norm": 1.4871785241241102, + "learning_rate": 9.632964292095179e-07, + "loss": 1.0291, + "step": 7576 + }, + { + "epoch": 0.6833205573341751, + "grad_norm": 1.2215842035121633, + "learning_rate": 9.627968755118374e-07, + "loss": 0.9287, + "step": 7577 + }, + { + "epoch": 0.6834107408576453, + "grad_norm": 0.9756795019240185, + "learning_rate": 9.622974103140468e-07, + "loss": 0.824, + "step": 7578 + }, + { + "epoch": 0.6835009243811155, + "grad_norm": 1.374886768187, + "learning_rate": 9.617980336587632e-07, + "loss": 1.0026, + "step": 7579 + }, + { + "epoch": 0.6835911079045859, + "grad_norm": 1.517804888619213, + "learning_rate": 9.612987455885964e-07, + "loss": 0.8214, + "step": 7580 + }, + { + "epoch": 0.6836812914280561, + "grad_norm": 1.4423863663368117, + "learning_rate": 9.607995461461467e-07, + "loss": 0.9423, + "step": 7581 + }, + { + "epoch": 0.6837714749515263, + "grad_norm": 1.3705884214182984, + "learning_rate": 9.603004353740111e-07, + "loss": 0.84, + "step": 7582 + }, + { + "epoch": 0.6838616584749966, + "grad_norm": 0.709982261704773, + "learning_rate": 9.598014133147738e-07, + "loss": 0.8478, + "step": 7583 + }, + { + "epoch": 0.6839518419984669, + "grad_norm": 1.242494221372618, + "learning_rate": 9.59302480011017e-07, + "loss": 0.9107, + "step": 7584 + }, + { + "epoch": 0.6840420255219372, + "grad_norm": 0.7104654107100279, + "learning_rate": 9.588036355053102e-07, + "loss": 0.8125, + "step": 7585 + }, + { + "epoch": 0.6841322090454074, + "grad_norm": 1.8178164022796282, + "learning_rate": 9.583048798402182e-07, + "loss": 0.8121, + "step": 7586 + }, + { + "epoch": 0.6842223925688776, + "grad_norm": 1.5011366670059676, + "learning_rate": 9.57806213058298e-07, + "loss": 0.9447, + "step": 7587 + }, + { + "epoch": 0.684312576092348, + "grad_norm": 0.7471334193883193, + "learning_rate": 9.57307635202098e-07, + "loss": 0.8433, + "step": 7588 + }, + { + "epoch": 0.6844027596158182, + "grad_norm": 1.367640000801043, + "learning_rate": 9.568091463141607e-07, + "loss": 1.0458, + "step": 7589 + }, + { + "epoch": 0.6844929431392884, + "grad_norm": 1.24694608519186, + "learning_rate": 9.563107464370187e-07, + "loss": 0.9146, + "step": 7590 + }, + { + "epoch": 0.6845831266627587, + "grad_norm": 1.641394516450394, + "learning_rate": 9.558124356131982e-07, + "loss": 0.9637, + "step": 7591 + }, + { + "epoch": 0.684673310186229, + "grad_norm": 1.7506140585260874, + "learning_rate": 9.553142138852187e-07, + "loss": 0.8783, + "step": 7592 + }, + { + "epoch": 0.6847634937096992, + "grad_norm": 1.7883903873416482, + "learning_rate": 9.548160812955905e-07, + "loss": 0.8538, + "step": 7593 + }, + { + "epoch": 0.6848536772331695, + "grad_norm": 1.4118486641246735, + "learning_rate": 9.543180378868175e-07, + "loss": 0.7686, + "step": 7594 + }, + { + "epoch": 0.6849438607566397, + "grad_norm": 1.6063892853309254, + "learning_rate": 9.538200837013962e-07, + "loss": 0.9752, + "step": 7595 + }, + { + "epoch": 0.68503404428011, + "grad_norm": 0.7363303915540381, + "learning_rate": 9.533222187818122e-07, + "loss": 0.8541, + "step": 7596 + }, + { + "epoch": 0.6851242278035803, + "grad_norm": 1.480042972585206, + "learning_rate": 9.528244431705492e-07, + "loss": 0.9519, + "step": 7597 + }, + { + "epoch": 0.6852144113270505, + "grad_norm": 1.3250439078775256, + "learning_rate": 9.523267569100774e-07, + "loss": 0.9516, + "step": 7598 + }, + { + "epoch": 0.6853045948505209, + "grad_norm": 1.4278601135744242, + "learning_rate": 9.518291600428652e-07, + "loss": 0.9041, + "step": 7599 + }, + { + "epoch": 0.6853947783739911, + "grad_norm": 1.644461514768122, + "learning_rate": 9.513316526113677e-07, + "loss": 0.9631, + "step": 7600 + }, + { + "epoch": 0.6854849618974613, + "grad_norm": 1.3744897762735693, + "learning_rate": 9.50834234658036e-07, + "loss": 0.9678, + "step": 7601 + }, + { + "epoch": 0.6855751454209316, + "grad_norm": 1.3797346441869722, + "learning_rate": 9.503369062253123e-07, + "loss": 0.9522, + "step": 7602 + }, + { + "epoch": 0.6856653289444019, + "grad_norm": 0.7547748286107914, + "learning_rate": 9.498396673556317e-07, + "loss": 0.8116, + "step": 7603 + }, + { + "epoch": 0.6857555124678721, + "grad_norm": 1.368982455576569, + "learning_rate": 9.493425180914219e-07, + "loss": 0.9642, + "step": 7604 + }, + { + "epoch": 0.6858456959913424, + "grad_norm": 1.4507624791851188, + "learning_rate": 9.488454584751e-07, + "loss": 0.8727, + "step": 7605 + }, + { + "epoch": 0.6859358795148126, + "grad_norm": 1.408285871341766, + "learning_rate": 9.483484885490813e-07, + "loss": 0.9488, + "step": 7606 + }, + { + "epoch": 0.686026063038283, + "grad_norm": 1.5149536218877948, + "learning_rate": 9.478516083557675e-07, + "loss": 0.9617, + "step": 7607 + }, + { + "epoch": 0.6861162465617532, + "grad_norm": 1.7022716939381064, + "learning_rate": 9.473548179375561e-07, + "loss": 0.9606, + "step": 7608 + }, + { + "epoch": 0.6862064300852234, + "grad_norm": 0.8088270947178886, + "learning_rate": 9.468581173368358e-07, + "loss": 0.8277, + "step": 7609 + }, + { + "epoch": 0.6862966136086937, + "grad_norm": 1.3552480732366294, + "learning_rate": 9.463615065959878e-07, + "loss": 0.9518, + "step": 7610 + }, + { + "epoch": 0.686386797132164, + "grad_norm": 1.5738148514260544, + "learning_rate": 9.458649857573857e-07, + "loss": 0.9967, + "step": 7611 + }, + { + "epoch": 0.6864769806556342, + "grad_norm": 1.3510068511986824, + "learning_rate": 9.453685548633963e-07, + "loss": 0.9543, + "step": 7612 + }, + { + "epoch": 0.6865671641791045, + "grad_norm": 1.8180335845992601, + "learning_rate": 9.448722139563756e-07, + "loss": 0.8979, + "step": 7613 + }, + { + "epoch": 0.6866573477025747, + "grad_norm": 1.4884399970188056, + "learning_rate": 9.443759630786769e-07, + "loss": 0.8416, + "step": 7614 + }, + { + "epoch": 0.686747531226045, + "grad_norm": 1.5813303960935858, + "learning_rate": 9.438798022726408e-07, + "loss": 1.0067, + "step": 7615 + }, + { + "epoch": 0.6868377147495153, + "grad_norm": 1.4735487603649136, + "learning_rate": 9.433837315806037e-07, + "loss": 0.898, + "step": 7616 + }, + { + "epoch": 0.6869278982729855, + "grad_norm": 1.6238111401941282, + "learning_rate": 9.428877510448925e-07, + "loss": 0.8948, + "step": 7617 + }, + { + "epoch": 0.6870180817964557, + "grad_norm": 1.2557296567283296, + "learning_rate": 9.423918607078272e-07, + "loss": 0.8692, + "step": 7618 + }, + { + "epoch": 0.6871082653199261, + "grad_norm": 1.5366506510056488, + "learning_rate": 9.418960606117208e-07, + "loss": 0.9562, + "step": 7619 + }, + { + "epoch": 0.6871984488433963, + "grad_norm": 1.419620277245779, + "learning_rate": 9.414003507988752e-07, + "loss": 0.8875, + "step": 7620 + }, + { + "epoch": 0.6872886323668665, + "grad_norm": 1.359106190729795, + "learning_rate": 9.409047313115904e-07, + "loss": 0.8716, + "step": 7621 + }, + { + "epoch": 0.6873788158903368, + "grad_norm": 1.779926678508269, + "learning_rate": 9.404092021921521e-07, + "loss": 0.8838, + "step": 7622 + }, + { + "epoch": 0.6874689994138071, + "grad_norm": 2.167966330587051, + "learning_rate": 9.399137634828447e-07, + "loss": 0.9264, + "step": 7623 + }, + { + "epoch": 0.6875591829372774, + "grad_norm": 1.373324023040829, + "learning_rate": 9.394184152259396e-07, + "loss": 0.9636, + "step": 7624 + }, + { + "epoch": 0.6876493664607476, + "grad_norm": 1.4019740900537816, + "learning_rate": 9.389231574637033e-07, + "loss": 1.0423, + "step": 7625 + }, + { + "epoch": 0.6877395499842179, + "grad_norm": 1.4458845501420496, + "learning_rate": 9.384279902383938e-07, + "loss": 1.0328, + "step": 7626 + }, + { + "epoch": 0.6878297335076882, + "grad_norm": 1.30413605691659, + "learning_rate": 9.379329135922615e-07, + "loss": 0.9519, + "step": 7627 + }, + { + "epoch": 0.6879199170311584, + "grad_norm": 1.5745352528642245, + "learning_rate": 9.374379275675495e-07, + "loss": 0.9001, + "step": 7628 + }, + { + "epoch": 0.6880101005546286, + "grad_norm": 1.8220697524603195, + "learning_rate": 9.369430322064931e-07, + "loss": 0.9447, + "step": 7629 + }, + { + "epoch": 0.688100284078099, + "grad_norm": 1.5660826236954175, + "learning_rate": 9.364482275513179e-07, + "loss": 1.0225, + "step": 7630 + }, + { + "epoch": 0.6881904676015692, + "grad_norm": 1.683007512176122, + "learning_rate": 9.359535136442444e-07, + "loss": 1.0078, + "step": 7631 + }, + { + "epoch": 0.6882806511250394, + "grad_norm": 1.3858010707654544, + "learning_rate": 9.354588905274843e-07, + "loss": 0.8695, + "step": 7632 + }, + { + "epoch": 0.6883708346485097, + "grad_norm": 1.3976294884377203, + "learning_rate": 9.349643582432414e-07, + "loss": 0.9726, + "step": 7633 + }, + { + "epoch": 0.68846101817198, + "grad_norm": 1.349206580022673, + "learning_rate": 9.344699168337127e-07, + "loss": 0.9372, + "step": 7634 + }, + { + "epoch": 0.6885512016954503, + "grad_norm": 1.49265829119108, + "learning_rate": 9.339755663410845e-07, + "loss": 0.9051, + "step": 7635 + }, + { + "epoch": 0.6886413852189205, + "grad_norm": 1.5048058927707595, + "learning_rate": 9.334813068075405e-07, + "loss": 0.9675, + "step": 7636 + }, + { + "epoch": 0.6887315687423907, + "grad_norm": 1.3887493471406764, + "learning_rate": 9.329871382752506e-07, + "loss": 0.9492, + "step": 7637 + }, + { + "epoch": 0.6888217522658611, + "grad_norm": 1.5423307620894748, + "learning_rate": 9.32493060786383e-07, + "loss": 0.9291, + "step": 7638 + }, + { + "epoch": 0.6889119357893313, + "grad_norm": 1.239137996956761, + "learning_rate": 9.31999074383093e-07, + "loss": 1.0071, + "step": 7639 + }, + { + "epoch": 0.6890021193128015, + "grad_norm": 1.5862293444672184, + "learning_rate": 9.315051791075308e-07, + "loss": 0.9396, + "step": 7640 + }, + { + "epoch": 0.6890923028362718, + "grad_norm": 1.1643720893598246, + "learning_rate": 9.310113750018382e-07, + "loss": 0.9331, + "step": 7641 + }, + { + "epoch": 0.6891824863597421, + "grad_norm": 1.7389916500661047, + "learning_rate": 9.305176621081496e-07, + "loss": 1.0153, + "step": 7642 + }, + { + "epoch": 0.6892726698832123, + "grad_norm": 1.4590646091036608, + "learning_rate": 9.300240404685911e-07, + "loss": 0.9759, + "step": 7643 + }, + { + "epoch": 0.6893628534066826, + "grad_norm": 1.5805259796054103, + "learning_rate": 9.295305101252812e-07, + "loss": 0.9491, + "step": 7644 + }, + { + "epoch": 0.6894530369301528, + "grad_norm": 1.929738156043745, + "learning_rate": 9.290370711203314e-07, + "loss": 0.9241, + "step": 7645 + }, + { + "epoch": 0.6895432204536232, + "grad_norm": 1.5019778880842238, + "learning_rate": 9.285437234958433e-07, + "loss": 0.9406, + "step": 7646 + }, + { + "epoch": 0.6896334039770934, + "grad_norm": 1.5137102282187427, + "learning_rate": 9.280504672939124e-07, + "loss": 0.9341, + "step": 7647 + }, + { + "epoch": 0.6897235875005636, + "grad_norm": 1.3552946039293234, + "learning_rate": 9.275573025566266e-07, + "loss": 0.9764, + "step": 7648 + }, + { + "epoch": 0.689813771024034, + "grad_norm": 1.1985182813253545, + "learning_rate": 9.27064229326065e-07, + "loss": 0.9792, + "step": 7649 + }, + { + "epoch": 0.6899039545475042, + "grad_norm": 1.3765427864040054, + "learning_rate": 9.265712476442995e-07, + "loss": 0.9085, + "step": 7650 + }, + { + "epoch": 0.6899941380709744, + "grad_norm": 1.2788224367002048, + "learning_rate": 9.260783575533949e-07, + "loss": 0.8741, + "step": 7651 + }, + { + "epoch": 0.6900843215944447, + "grad_norm": 2.387228454006837, + "learning_rate": 9.255855590954045e-07, + "loss": 0.8996, + "step": 7652 + }, + { + "epoch": 0.690174505117915, + "grad_norm": 1.3812315357061564, + "learning_rate": 9.250928523123802e-07, + "loss": 1.0094, + "step": 7653 + }, + { + "epoch": 0.6902646886413852, + "grad_norm": 1.2898465170966353, + "learning_rate": 9.24600237246359e-07, + "loss": 0.9524, + "step": 7654 + }, + { + "epoch": 0.6903548721648555, + "grad_norm": 1.5956619612241436, + "learning_rate": 9.241077139393769e-07, + "loss": 0.951, + "step": 7655 + }, + { + "epoch": 0.6904450556883257, + "grad_norm": 1.579178939744803, + "learning_rate": 9.236152824334564e-07, + "loss": 0.9719, + "step": 7656 + }, + { + "epoch": 0.690535239211796, + "grad_norm": 1.4525797651650219, + "learning_rate": 9.231229427706151e-07, + "loss": 0.8392, + "step": 7657 + }, + { + "epoch": 0.6906254227352663, + "grad_norm": 1.595109845627159, + "learning_rate": 9.226306949928622e-07, + "loss": 0.9476, + "step": 7658 + }, + { + "epoch": 0.6907156062587365, + "grad_norm": 1.6140720747509527, + "learning_rate": 9.221385391421988e-07, + "loss": 1.0182, + "step": 7659 + }, + { + "epoch": 0.6908057897822067, + "grad_norm": 1.2902064478338036, + "learning_rate": 9.216464752606192e-07, + "loss": 0.9532, + "step": 7660 + }, + { + "epoch": 0.6908959733056771, + "grad_norm": 1.2768788556205508, + "learning_rate": 9.211545033901078e-07, + "loss": 1.0226, + "step": 7661 + }, + { + "epoch": 0.6909861568291473, + "grad_norm": 1.3662830521895002, + "learning_rate": 9.206626235726426e-07, + "loss": 0.9738, + "step": 7662 + }, + { + "epoch": 0.6910763403526176, + "grad_norm": 1.6732199419719591, + "learning_rate": 9.20170835850194e-07, + "loss": 0.8767, + "step": 7663 + }, + { + "epoch": 0.6911665238760878, + "grad_norm": 0.7406368319053204, + "learning_rate": 9.196791402647237e-07, + "loss": 0.8242, + "step": 7664 + }, + { + "epoch": 0.6912567073995581, + "grad_norm": 1.328657155852883, + "learning_rate": 9.191875368581861e-07, + "loss": 0.8687, + "step": 7665 + }, + { + "epoch": 0.6913468909230284, + "grad_norm": 1.4819849951186537, + "learning_rate": 9.186960256725271e-07, + "loss": 0.9561, + "step": 7666 + }, + { + "epoch": 0.6914370744464986, + "grad_norm": 1.4338041146001044, + "learning_rate": 9.182046067496856e-07, + "loss": 1.0025, + "step": 7667 + }, + { + "epoch": 0.6915272579699688, + "grad_norm": 1.509420061722214, + "learning_rate": 9.177132801315927e-07, + "loss": 1.0238, + "step": 7668 + }, + { + "epoch": 0.6916174414934392, + "grad_norm": 1.322649585867067, + "learning_rate": 9.172220458601692e-07, + "loss": 0.9157, + "step": 7669 + }, + { + "epoch": 0.6917076250169094, + "grad_norm": 1.6766754112369533, + "learning_rate": 9.167309039773324e-07, + "loss": 0.9121, + "step": 7670 + }, + { + "epoch": 0.6917978085403796, + "grad_norm": 1.162714337514168, + "learning_rate": 9.162398545249872e-07, + "loss": 0.8427, + "step": 7671 + }, + { + "epoch": 0.69188799206385, + "grad_norm": 1.2250031198734337, + "learning_rate": 9.157488975450334e-07, + "loss": 0.9914, + "step": 7672 + }, + { + "epoch": 0.6919781755873202, + "grad_norm": 1.3315329416678747, + "learning_rate": 9.15258033079362e-07, + "loss": 0.7775, + "step": 7673 + }, + { + "epoch": 0.6920683591107905, + "grad_norm": 1.3689209330528618, + "learning_rate": 9.147672611698567e-07, + "loss": 0.9359, + "step": 7674 + }, + { + "epoch": 0.6921585426342607, + "grad_norm": 1.5055696874254803, + "learning_rate": 9.142765818583933e-07, + "loss": 0.9351, + "step": 7675 + }, + { + "epoch": 0.692248726157731, + "grad_norm": 1.2210291556799067, + "learning_rate": 9.13785995186837e-07, + "loss": 0.9189, + "step": 7676 + }, + { + "epoch": 0.6923389096812013, + "grad_norm": 1.352950337261884, + "learning_rate": 9.132955011970502e-07, + "loss": 0.908, + "step": 7677 + }, + { + "epoch": 0.6924290932046715, + "grad_norm": 2.206222304907893, + "learning_rate": 9.128050999308827e-07, + "loss": 0.933, + "step": 7678 + }, + { + "epoch": 0.6925192767281417, + "grad_norm": 1.5813790189556132, + "learning_rate": 9.123147914301789e-07, + "loss": 1.0355, + "step": 7679 + }, + { + "epoch": 0.6926094602516121, + "grad_norm": 1.4361012329132397, + "learning_rate": 9.118245757367745e-07, + "loss": 0.9886, + "step": 7680 + }, + { + "epoch": 0.6926996437750823, + "grad_norm": 0.691579872357879, + "learning_rate": 9.113344528924973e-07, + "loss": 0.8149, + "step": 7681 + }, + { + "epoch": 0.6927898272985525, + "grad_norm": 1.3814915527819995, + "learning_rate": 9.108444229391676e-07, + "loss": 0.9222, + "step": 7682 + }, + { + "epoch": 0.6928800108220228, + "grad_norm": 1.6473482700572815, + "learning_rate": 9.103544859185979e-07, + "loss": 0.9143, + "step": 7683 + }, + { + "epoch": 0.6929701943454931, + "grad_norm": 1.4996243642309783, + "learning_rate": 9.098646418725902e-07, + "loss": 1.0257, + "step": 7684 + }, + { + "epoch": 0.6930603778689634, + "grad_norm": 0.651762011968603, + "learning_rate": 9.093748908429437e-07, + "loss": 0.8281, + "step": 7685 + }, + { + "epoch": 0.6931505613924336, + "grad_norm": 1.367624049653669, + "learning_rate": 9.088852328714444e-07, + "loss": 0.9329, + "step": 7686 + }, + { + "epoch": 0.6932407449159038, + "grad_norm": 1.2601238362447236, + "learning_rate": 9.083956679998735e-07, + "loss": 0.8555, + "step": 7687 + }, + { + "epoch": 0.6933309284393742, + "grad_norm": 1.2131827919355598, + "learning_rate": 9.079061962700032e-07, + "loss": 0.9106, + "step": 7688 + }, + { + "epoch": 0.6934211119628444, + "grad_norm": 1.3491905876538197, + "learning_rate": 9.074168177235979e-07, + "loss": 0.9631, + "step": 7689 + }, + { + "epoch": 0.6935112954863146, + "grad_norm": 1.470401727966771, + "learning_rate": 9.069275324024151e-07, + "loss": 0.9559, + "step": 7690 + }, + { + "epoch": 0.6936014790097849, + "grad_norm": 1.717190537870877, + "learning_rate": 9.064383403482005e-07, + "loss": 0.8734, + "step": 7691 + }, + { + "epoch": 0.6936916625332552, + "grad_norm": 1.5256449039321984, + "learning_rate": 9.059492416026983e-07, + "loss": 0.9088, + "step": 7692 + }, + { + "epoch": 0.6937818460567254, + "grad_norm": 1.241181355986008, + "learning_rate": 9.054602362076378e-07, + "loss": 0.9398, + "step": 7693 + }, + { + "epoch": 0.6938720295801957, + "grad_norm": 1.6164510035783963, + "learning_rate": 9.049713242047468e-07, + "loss": 0.9322, + "step": 7694 + }, + { + "epoch": 0.693962213103666, + "grad_norm": 1.5971193040870262, + "learning_rate": 9.044825056357395e-07, + "loss": 0.9182, + "step": 7695 + }, + { + "epoch": 0.6940523966271362, + "grad_norm": 1.36478518189495, + "learning_rate": 9.039937805423255e-07, + "loss": 0.8973, + "step": 7696 + }, + { + "epoch": 0.6941425801506065, + "grad_norm": 0.6429754495561385, + "learning_rate": 9.035051489662051e-07, + "loss": 0.7572, + "step": 7697 + }, + { + "epoch": 0.6942327636740767, + "grad_norm": 1.4624018546535076, + "learning_rate": 9.030166109490718e-07, + "loss": 0.8974, + "step": 7698 + }, + { + "epoch": 0.6943229471975471, + "grad_norm": 1.9088720205459988, + "learning_rate": 9.025281665326099e-07, + "loss": 0.9608, + "step": 7699 + }, + { + "epoch": 0.6944131307210173, + "grad_norm": 1.5082281458868105, + "learning_rate": 9.020398157584967e-07, + "loss": 0.947, + "step": 7700 + }, + { + "epoch": 0.6945033142444875, + "grad_norm": 1.566242009634961, + "learning_rate": 9.015515586684002e-07, + "loss": 0.8466, + "step": 7701 + }, + { + "epoch": 0.6945934977679578, + "grad_norm": 0.7408110453324972, + "learning_rate": 9.010633953039812e-07, + "loss": 0.8088, + "step": 7702 + }, + { + "epoch": 0.6946836812914281, + "grad_norm": 1.3487297316080953, + "learning_rate": 9.005753257068929e-07, + "loss": 0.9567, + "step": 7703 + }, + { + "epoch": 0.6947738648148983, + "grad_norm": 14.365470049264943, + "learning_rate": 9.000873499187797e-07, + "loss": 0.9579, + "step": 7704 + }, + { + "epoch": 0.6948640483383686, + "grad_norm": 1.5068991469193305, + "learning_rate": 8.995994679812797e-07, + "loss": 0.9804, + "step": 7705 + }, + { + "epoch": 0.6949542318618388, + "grad_norm": 1.2695197002178946, + "learning_rate": 8.991116799360192e-07, + "loss": 0.9732, + "step": 7706 + }, + { + "epoch": 0.6950444153853091, + "grad_norm": 1.630751116237466, + "learning_rate": 8.986239858246217e-07, + "loss": 0.9351, + "step": 7707 + }, + { + "epoch": 0.6951345989087794, + "grad_norm": 1.5590470116251778, + "learning_rate": 8.981363856886972e-07, + "loss": 1.017, + "step": 7708 + }, + { + "epoch": 0.6952247824322496, + "grad_norm": 1.6605930606410821, + "learning_rate": 8.976488795698533e-07, + "loss": 0.9245, + "step": 7709 + }, + { + "epoch": 0.6953149659557198, + "grad_norm": 1.479817350303337, + "learning_rate": 8.971614675096841e-07, + "loss": 0.9595, + "step": 7710 + }, + { + "epoch": 0.6954051494791902, + "grad_norm": 1.6913659353054904, + "learning_rate": 8.966741495497807e-07, + "loss": 1.0052, + "step": 7711 + }, + { + "epoch": 0.6954953330026604, + "grad_norm": 1.2666751131395175, + "learning_rate": 8.961869257317218e-07, + "loss": 0.9134, + "step": 7712 + }, + { + "epoch": 0.6955855165261307, + "grad_norm": 1.6593534076672352, + "learning_rate": 8.956997960970809e-07, + "loss": 0.9297, + "step": 7713 + }, + { + "epoch": 0.6956757000496009, + "grad_norm": 1.4956987223008704, + "learning_rate": 8.952127606874224e-07, + "loss": 0.8507, + "step": 7714 + }, + { + "epoch": 0.6957658835730712, + "grad_norm": 1.3213454500220854, + "learning_rate": 8.947258195443028e-07, + "loss": 0.9423, + "step": 7715 + }, + { + "epoch": 0.6958560670965415, + "grad_norm": 1.4034302309400546, + "learning_rate": 8.942389727092716e-07, + "loss": 0.9096, + "step": 7716 + }, + { + "epoch": 0.6959462506200117, + "grad_norm": 0.9771168775568579, + "learning_rate": 8.937522202238677e-07, + "loss": 0.8083, + "step": 7717 + }, + { + "epoch": 0.696036434143482, + "grad_norm": 1.3954185419913891, + "learning_rate": 8.932655621296239e-07, + "loss": 0.9562, + "step": 7718 + }, + { + "epoch": 0.6961266176669523, + "grad_norm": 1.1625499755617867, + "learning_rate": 8.927789984680649e-07, + "loss": 0.9387, + "step": 7719 + }, + { + "epoch": 0.6962168011904225, + "grad_norm": 1.2847598777728277, + "learning_rate": 8.922925292807068e-07, + "loss": 0.8105, + "step": 7720 + }, + { + "epoch": 0.6963069847138927, + "grad_norm": 1.4576081879966802, + "learning_rate": 8.91806154609058e-07, + "loss": 1.0185, + "step": 7721 + }, + { + "epoch": 0.6963971682373631, + "grad_norm": 1.3677686491559276, + "learning_rate": 8.913198744946195e-07, + "loss": 0.9156, + "step": 7722 + }, + { + "epoch": 0.6964873517608333, + "grad_norm": 1.3219410259810667, + "learning_rate": 8.908336889788807e-07, + "loss": 0.9632, + "step": 7723 + }, + { + "epoch": 0.6965775352843036, + "grad_norm": 1.4576566034268132, + "learning_rate": 8.903475981033293e-07, + "loss": 0.8222, + "step": 7724 + }, + { + "epoch": 0.6966677188077738, + "grad_norm": 0.8210337057036631, + "learning_rate": 8.898616019094376e-07, + "loss": 0.8853, + "step": 7725 + }, + { + "epoch": 0.6967579023312441, + "grad_norm": 1.5594262980430722, + "learning_rate": 8.89375700438677e-07, + "loss": 0.9778, + "step": 7726 + }, + { + "epoch": 0.6968480858547144, + "grad_norm": 1.6816129781862832, + "learning_rate": 8.888898937325047e-07, + "loss": 0.9054, + "step": 7727 + }, + { + "epoch": 0.6969382693781846, + "grad_norm": 3.5655257108406224, + "learning_rate": 8.884041818323733e-07, + "loss": 0.9148, + "step": 7728 + }, + { + "epoch": 0.6970284529016548, + "grad_norm": 1.515361409026385, + "learning_rate": 8.879185647797262e-07, + "loss": 0.9711, + "step": 7729 + }, + { + "epoch": 0.6971186364251252, + "grad_norm": 1.5094169026436428, + "learning_rate": 8.874330426159993e-07, + "loss": 0.9114, + "step": 7730 + }, + { + "epoch": 0.6972088199485954, + "grad_norm": 1.432203569881836, + "learning_rate": 8.869476153826205e-07, + "loss": 0.9661, + "step": 7731 + }, + { + "epoch": 0.6972990034720656, + "grad_norm": 1.4361228150842438, + "learning_rate": 8.864622831210071e-07, + "loss": 0.9418, + "step": 7732 + }, + { + "epoch": 0.6973891869955359, + "grad_norm": 1.6566369666284249, + "learning_rate": 8.85977045872573e-07, + "loss": 0.9519, + "step": 7733 + }, + { + "epoch": 0.6974793705190062, + "grad_norm": 1.71184024771338, + "learning_rate": 8.854919036787194e-07, + "loss": 0.925, + "step": 7734 + }, + { + "epoch": 0.6975695540424764, + "grad_norm": 1.7558816435253648, + "learning_rate": 8.850068565808417e-07, + "loss": 0.9139, + "step": 7735 + }, + { + "epoch": 0.6976597375659467, + "grad_norm": 1.7468043850460442, + "learning_rate": 8.845219046203271e-07, + "loss": 0.9923, + "step": 7736 + }, + { + "epoch": 0.6977499210894169, + "grad_norm": 1.7290296232157978, + "learning_rate": 8.840370478385544e-07, + "loss": 0.9669, + "step": 7737 + }, + { + "epoch": 0.6978401046128873, + "grad_norm": 1.5467036759262123, + "learning_rate": 8.83552286276894e-07, + "loss": 0.9616, + "step": 7738 + }, + { + "epoch": 0.6979302881363575, + "grad_norm": 1.7207302823126505, + "learning_rate": 8.830676199767095e-07, + "loss": 0.9585, + "step": 7739 + }, + { + "epoch": 0.6980204716598277, + "grad_norm": 1.3908113076210868, + "learning_rate": 8.825830489793527e-07, + "loss": 0.9264, + "step": 7740 + }, + { + "epoch": 0.698110655183298, + "grad_norm": 2.9571618639622774, + "learning_rate": 8.820985733261732e-07, + "loss": 0.8966, + "step": 7741 + }, + { + "epoch": 0.6982008387067683, + "grad_norm": 1.5040234012305043, + "learning_rate": 8.816141930585066e-07, + "loss": 0.8442, + "step": 7742 + }, + { + "epoch": 0.6982910222302385, + "grad_norm": 0.6967552304518114, + "learning_rate": 8.811299082176837e-07, + "loss": 0.8099, + "step": 7743 + }, + { + "epoch": 0.6983812057537088, + "grad_norm": 1.3497246390777706, + "learning_rate": 8.806457188450265e-07, + "loss": 0.9094, + "step": 7744 + }, + { + "epoch": 0.6984713892771791, + "grad_norm": 1.3457567622287512, + "learning_rate": 8.801616249818487e-07, + "loss": 0.9727, + "step": 7745 + }, + { + "epoch": 0.6985615728006493, + "grad_norm": 1.4583069481052224, + "learning_rate": 8.796776266694564e-07, + "loss": 0.9063, + "step": 7746 + }, + { + "epoch": 0.6986517563241196, + "grad_norm": 1.2772464540155706, + "learning_rate": 8.79193723949145e-07, + "loss": 0.8879, + "step": 7747 + }, + { + "epoch": 0.6987419398475898, + "grad_norm": 1.4751082429123992, + "learning_rate": 8.787099168622063e-07, + "loss": 0.8408, + "step": 7748 + }, + { + "epoch": 0.6988321233710602, + "grad_norm": 1.6069868572151065, + "learning_rate": 8.782262054499199e-07, + "loss": 0.8737, + "step": 7749 + }, + { + "epoch": 0.6989223068945304, + "grad_norm": 1.2421768955761732, + "learning_rate": 8.777425897535588e-07, + "loss": 0.9722, + "step": 7750 + }, + { + "epoch": 0.6990124904180006, + "grad_norm": 1.4407890673240082, + "learning_rate": 8.77259069814388e-07, + "loss": 0.9444, + "step": 7751 + }, + { + "epoch": 0.6991026739414709, + "grad_norm": 1.2805816023451582, + "learning_rate": 8.767756456736641e-07, + "loss": 0.9465, + "step": 7752 + }, + { + "epoch": 0.6991928574649412, + "grad_norm": 1.271600111515032, + "learning_rate": 8.762923173726358e-07, + "loss": 0.866, + "step": 7753 + }, + { + "epoch": 0.6992830409884114, + "grad_norm": 1.4565720230770067, + "learning_rate": 8.758090849525428e-07, + "loss": 1.04, + "step": 7754 + }, + { + "epoch": 0.6993732245118817, + "grad_norm": 1.539170004870821, + "learning_rate": 8.753259484546174e-07, + "loss": 0.8824, + "step": 7755 + }, + { + "epoch": 0.6994634080353519, + "grad_norm": 1.8035529727881785, + "learning_rate": 8.748429079200841e-07, + "loss": 0.9033, + "step": 7756 + }, + { + "epoch": 0.6995535915588222, + "grad_norm": 1.7237707153137822, + "learning_rate": 8.743599633901575e-07, + "loss": 0.9982, + "step": 7757 + }, + { + "epoch": 0.6996437750822925, + "grad_norm": 1.4558530241856131, + "learning_rate": 8.738771149060453e-07, + "loss": 0.9492, + "step": 7758 + }, + { + "epoch": 0.6997339586057627, + "grad_norm": 1.4176395759287266, + "learning_rate": 8.73394362508947e-07, + "loss": 0.9054, + "step": 7759 + }, + { + "epoch": 0.6998241421292329, + "grad_norm": 1.4983688863850884, + "learning_rate": 8.72911706240054e-07, + "loss": 0.9052, + "step": 7760 + }, + { + "epoch": 0.6999143256527033, + "grad_norm": 1.33774137012462, + "learning_rate": 8.724291461405493e-07, + "loss": 0.8181, + "step": 7761 + }, + { + "epoch": 0.7000045091761735, + "grad_norm": 1.5293828518646058, + "learning_rate": 8.71946682251606e-07, + "loss": 0.91, + "step": 7762 + }, + { + "epoch": 0.7000946926996438, + "grad_norm": 1.4312838621173367, + "learning_rate": 8.714643146143932e-07, + "loss": 0.9369, + "step": 7763 + }, + { + "epoch": 0.700184876223114, + "grad_norm": 1.3340656931542476, + "learning_rate": 8.709820432700663e-07, + "loss": 0.8976, + "step": 7764 + }, + { + "epoch": 0.7002750597465843, + "grad_norm": 1.2527792551145187, + "learning_rate": 8.704998682597784e-07, + "loss": 0.8576, + "step": 7765 + }, + { + "epoch": 0.7003652432700546, + "grad_norm": 1.4366710594770948, + "learning_rate": 8.700177896246688e-07, + "loss": 0.9596, + "step": 7766 + }, + { + "epoch": 0.7004554267935248, + "grad_norm": 1.5344732963945715, + "learning_rate": 8.695358074058721e-07, + "loss": 0.8652, + "step": 7767 + }, + { + "epoch": 0.7005456103169951, + "grad_norm": 1.5246403567122555, + "learning_rate": 8.690539216445136e-07, + "loss": 0.9802, + "step": 7768 + }, + { + "epoch": 0.7006357938404654, + "grad_norm": 1.7167975693702378, + "learning_rate": 8.685721323817106e-07, + "loss": 0.8423, + "step": 7769 + }, + { + "epoch": 0.7007259773639356, + "grad_norm": 1.404049274544457, + "learning_rate": 8.680904396585718e-07, + "loss": 0.9805, + "step": 7770 + }, + { + "epoch": 0.7008161608874058, + "grad_norm": 1.4294132323962812, + "learning_rate": 8.676088435161988e-07, + "loss": 0.8885, + "step": 7771 + }, + { + "epoch": 0.7009063444108762, + "grad_norm": 1.6735204282527587, + "learning_rate": 8.671273439956824e-07, + "loss": 0.9542, + "step": 7772 + }, + { + "epoch": 0.7009965279343464, + "grad_norm": 1.4305153206328507, + "learning_rate": 8.666459411381075e-07, + "loss": 0.9418, + "step": 7773 + }, + { + "epoch": 0.7010867114578166, + "grad_norm": 1.3380063640346198, + "learning_rate": 8.661646349845501e-07, + "loss": 0.9658, + "step": 7774 + }, + { + "epoch": 0.7011768949812869, + "grad_norm": 1.4536850373045413, + "learning_rate": 8.656834255760783e-07, + "loss": 0.9926, + "step": 7775 + }, + { + "epoch": 0.7012670785047572, + "grad_norm": 1.9306461563013209, + "learning_rate": 8.652023129537509e-07, + "loss": 0.9933, + "step": 7776 + }, + { + "epoch": 0.7013572620282275, + "grad_norm": 1.4118511549517918, + "learning_rate": 8.647212971586195e-07, + "loss": 0.9672, + "step": 7777 + }, + { + "epoch": 0.7014474455516977, + "grad_norm": 1.4639299721515533, + "learning_rate": 8.642403782317275e-07, + "loss": 0.9008, + "step": 7778 + }, + { + "epoch": 0.7015376290751679, + "grad_norm": 1.3615455836868682, + "learning_rate": 8.637595562141075e-07, + "loss": 0.9441, + "step": 7779 + }, + { + "epoch": 0.7016278125986383, + "grad_norm": 1.296681860824122, + "learning_rate": 8.632788311467889e-07, + "loss": 0.9061, + "step": 7780 + }, + { + "epoch": 0.7017179961221085, + "grad_norm": 1.2446553888567682, + "learning_rate": 8.627982030707867e-07, + "loss": 0.93, + "step": 7781 + }, + { + "epoch": 0.7018081796455787, + "grad_norm": 1.482716889107708, + "learning_rate": 8.623176720271139e-07, + "loss": 1.0076, + "step": 7782 + }, + { + "epoch": 0.701898363169049, + "grad_norm": 1.8633399170410638, + "learning_rate": 8.618372380567696e-07, + "loss": 1.0106, + "step": 7783 + }, + { + "epoch": 0.7019885466925193, + "grad_norm": 1.6490137620655836, + "learning_rate": 8.613569012007478e-07, + "loss": 0.8957, + "step": 7784 + }, + { + "epoch": 0.7020787302159895, + "grad_norm": 1.64464976411496, + "learning_rate": 8.608766615000338e-07, + "loss": 0.9801, + "step": 7785 + }, + { + "epoch": 0.7021689137394598, + "grad_norm": 1.662938987883529, + "learning_rate": 8.603965189956039e-07, + "loss": 0.9613, + "step": 7786 + }, + { + "epoch": 0.70225909726293, + "grad_norm": 1.6194932505321802, + "learning_rate": 8.599164737284276e-07, + "loss": 0.9364, + "step": 7787 + }, + { + "epoch": 0.7023492807864004, + "grad_norm": 1.5297832860366, + "learning_rate": 8.594365257394634e-07, + "loss": 0.8943, + "step": 7788 + }, + { + "epoch": 0.7024394643098706, + "grad_norm": 1.670449620783995, + "learning_rate": 8.589566750696637e-07, + "loss": 0.8826, + "step": 7789 + }, + { + "epoch": 0.7025296478333408, + "grad_norm": 1.5578318480855982, + "learning_rate": 8.584769217599721e-07, + "loss": 0.8804, + "step": 7790 + }, + { + "epoch": 0.7026198313568112, + "grad_norm": 1.9066583164850313, + "learning_rate": 8.579972658513239e-07, + "loss": 0.8864, + "step": 7791 + }, + { + "epoch": 0.7027100148802814, + "grad_norm": 1.616479248640514, + "learning_rate": 8.57517707384646e-07, + "loss": 0.9454, + "step": 7792 + }, + { + "epoch": 0.7028001984037516, + "grad_norm": 1.3822174084899164, + "learning_rate": 8.570382464008574e-07, + "loss": 0.9728, + "step": 7793 + }, + { + "epoch": 0.7028903819272219, + "grad_norm": 1.5398113926662387, + "learning_rate": 8.565588829408665e-07, + "loss": 0.9076, + "step": 7794 + }, + { + "epoch": 0.7029805654506922, + "grad_norm": 1.4878701270452381, + "learning_rate": 8.560796170455782e-07, + "loss": 0.9177, + "step": 7795 + }, + { + "epoch": 0.7030707489741624, + "grad_norm": 1.5486366952209256, + "learning_rate": 8.556004487558828e-07, + "loss": 0.9706, + "step": 7796 + }, + { + "epoch": 0.7031609324976327, + "grad_norm": 1.3071780161703943, + "learning_rate": 8.55121378112669e-07, + "loss": 0.8551, + "step": 7797 + }, + { + "epoch": 0.7032511160211029, + "grad_norm": 1.540822837770462, + "learning_rate": 8.546424051568111e-07, + "loss": 0.9169, + "step": 7798 + }, + { + "epoch": 0.7033412995445733, + "grad_norm": 1.4461824840193407, + "learning_rate": 8.541635299291785e-07, + "loss": 0.9873, + "step": 7799 + }, + { + "epoch": 0.7034314830680435, + "grad_norm": 0.6610557477428939, + "learning_rate": 8.536847524706317e-07, + "loss": 0.8164, + "step": 7800 + }, + { + "epoch": 0.7035216665915137, + "grad_norm": 1.4555315168998721, + "learning_rate": 8.532060728220225e-07, + "loss": 0.8762, + "step": 7801 + }, + { + "epoch": 0.703611850114984, + "grad_norm": 1.7559147742233783, + "learning_rate": 8.527274910241955e-07, + "loss": 0.9575, + "step": 7802 + }, + { + "epoch": 0.7037020336384543, + "grad_norm": 1.388109911648136, + "learning_rate": 8.522490071179833e-07, + "loss": 0.9351, + "step": 7803 + }, + { + "epoch": 0.7037922171619245, + "grad_norm": 1.2658139311504981, + "learning_rate": 8.517706211442159e-07, + "loss": 0.8169, + "step": 7804 + }, + { + "epoch": 0.7038824006853948, + "grad_norm": 1.4595610310103577, + "learning_rate": 8.512923331437097e-07, + "loss": 1.0175, + "step": 7805 + }, + { + "epoch": 0.703972584208865, + "grad_norm": 1.3626384804722582, + "learning_rate": 8.508141431572755e-07, + "loss": 0.9069, + "step": 7806 + }, + { + "epoch": 0.7040627677323353, + "grad_norm": 1.660796421264181, + "learning_rate": 8.503360512257152e-07, + "loss": 0.9282, + "step": 7807 + }, + { + "epoch": 0.7041529512558056, + "grad_norm": 1.559234410983408, + "learning_rate": 8.498580573898219e-07, + "loss": 0.9011, + "step": 7808 + }, + { + "epoch": 0.7042431347792758, + "grad_norm": 1.7815997800889412, + "learning_rate": 8.493801616903813e-07, + "loss": 0.9461, + "step": 7809 + }, + { + "epoch": 0.704333318302746, + "grad_norm": 2.3421545065807536, + "learning_rate": 8.489023641681705e-07, + "loss": 0.9565, + "step": 7810 + }, + { + "epoch": 0.7044235018262164, + "grad_norm": 1.364271269003517, + "learning_rate": 8.484246648639555e-07, + "loss": 0.9901, + "step": 7811 + }, + { + "epoch": 0.7045136853496866, + "grad_norm": 1.2573415218014492, + "learning_rate": 8.479470638184994e-07, + "loss": 0.818, + "step": 7812 + }, + { + "epoch": 0.7046038688731568, + "grad_norm": 2.314598780688343, + "learning_rate": 8.474695610725513e-07, + "loss": 0.8663, + "step": 7813 + }, + { + "epoch": 0.7046940523966272, + "grad_norm": 1.2776200581850545, + "learning_rate": 8.469921566668552e-07, + "loss": 1.0121, + "step": 7814 + }, + { + "epoch": 0.7047842359200974, + "grad_norm": 1.4097877929838725, + "learning_rate": 8.46514850642146e-07, + "loss": 0.9939, + "step": 7815 + }, + { + "epoch": 0.7048744194435677, + "grad_norm": 1.5242500681277262, + "learning_rate": 8.460376430391499e-07, + "loss": 0.9268, + "step": 7816 + }, + { + "epoch": 0.7049646029670379, + "grad_norm": 1.125975344972374, + "learning_rate": 8.455605338985858e-07, + "loss": 0.9633, + "step": 7817 + }, + { + "epoch": 0.7050547864905082, + "grad_norm": 1.3510083071144803, + "learning_rate": 8.45083523261161e-07, + "loss": 0.7977, + "step": 7818 + }, + { + "epoch": 0.7051449700139785, + "grad_norm": 1.2035637092827913, + "learning_rate": 8.446066111675796e-07, + "loss": 0.9469, + "step": 7819 + }, + { + "epoch": 0.7052351535374487, + "grad_norm": 1.573206270685218, + "learning_rate": 8.441297976585314e-07, + "loss": 0.8345, + "step": 7820 + }, + { + "epoch": 0.7053253370609189, + "grad_norm": 1.4952996838378163, + "learning_rate": 8.436530827747037e-07, + "loss": 1.0182, + "step": 7821 + }, + { + "epoch": 0.7054155205843893, + "grad_norm": 2.010036913594487, + "learning_rate": 8.431764665567704e-07, + "loss": 1.0697, + "step": 7822 + }, + { + "epoch": 0.7055057041078595, + "grad_norm": 1.6317212414341227, + "learning_rate": 8.426999490453996e-07, + "loss": 0.8801, + "step": 7823 + }, + { + "epoch": 0.7055958876313297, + "grad_norm": 1.7707965697417933, + "learning_rate": 8.422235302812504e-07, + "loss": 0.948, + "step": 7824 + }, + { + "epoch": 0.7056860711548, + "grad_norm": 1.7315106495224886, + "learning_rate": 8.417472103049734e-07, + "loss": 1.0103, + "step": 7825 + }, + { + "epoch": 0.7057762546782703, + "grad_norm": 1.4835507965699786, + "learning_rate": 8.412709891572112e-07, + "loss": 0.9727, + "step": 7826 + }, + { + "epoch": 0.7058664382017406, + "grad_norm": 1.7347120352775887, + "learning_rate": 8.407948668785978e-07, + "loss": 0.9717, + "step": 7827 + }, + { + "epoch": 0.7059566217252108, + "grad_norm": 1.3435803683355745, + "learning_rate": 8.403188435097576e-07, + "loss": 0.9331, + "step": 7828 + }, + { + "epoch": 0.706046805248681, + "grad_norm": 1.3348636438261954, + "learning_rate": 8.398429190913081e-07, + "loss": 1.0036, + "step": 7829 + }, + { + "epoch": 0.7061369887721514, + "grad_norm": 1.456917438715397, + "learning_rate": 8.393670936638578e-07, + "loss": 0.9386, + "step": 7830 + }, + { + "epoch": 0.7062271722956216, + "grad_norm": 1.5287625876381796, + "learning_rate": 8.388913672680067e-07, + "loss": 0.937, + "step": 7831 + }, + { + "epoch": 0.7063173558190918, + "grad_norm": 1.5070016687000456, + "learning_rate": 8.384157399443472e-07, + "loss": 0.8887, + "step": 7832 + }, + { + "epoch": 0.7064075393425621, + "grad_norm": 1.4573757388241233, + "learning_rate": 8.379402117334601e-07, + "loss": 0.9707, + "step": 7833 + }, + { + "epoch": 0.7064977228660324, + "grad_norm": 1.4218661968252126, + "learning_rate": 8.374647826759232e-07, + "loss": 0.936, + "step": 7834 + }, + { + "epoch": 0.7065879063895026, + "grad_norm": 1.5954692020103673, + "learning_rate": 8.369894528122998e-07, + "loss": 0.9397, + "step": 7835 + }, + { + "epoch": 0.7066780899129729, + "grad_norm": 1.879043669245448, + "learning_rate": 8.365142221831505e-07, + "loss": 0.8096, + "step": 7836 + }, + { + "epoch": 0.7067682734364432, + "grad_norm": 1.734712928636448, + "learning_rate": 8.360390908290222e-07, + "loss": 0.9026, + "step": 7837 + }, + { + "epoch": 0.7068584569599135, + "grad_norm": 1.5023590452116864, + "learning_rate": 8.355640587904569e-07, + "loss": 0.9177, + "step": 7838 + }, + { + "epoch": 0.7069486404833837, + "grad_norm": 1.4704981201274, + "learning_rate": 8.350891261079866e-07, + "loss": 0.9517, + "step": 7839 + }, + { + "epoch": 0.7070388240068539, + "grad_norm": 1.5078736920367284, + "learning_rate": 8.346142928221356e-07, + "loss": 0.9801, + "step": 7840 + }, + { + "epoch": 0.7071290075303243, + "grad_norm": 1.5600471317067195, + "learning_rate": 8.341395589734189e-07, + "loss": 0.7843, + "step": 7841 + }, + { + "epoch": 0.7072191910537945, + "grad_norm": 1.756758106621761, + "learning_rate": 8.336649246023433e-07, + "loss": 0.8909, + "step": 7842 + }, + { + "epoch": 0.7073093745772647, + "grad_norm": 1.3474976819501483, + "learning_rate": 8.331903897494084e-07, + "loss": 0.8451, + "step": 7843 + }, + { + "epoch": 0.707399558100735, + "grad_norm": 1.722652443948637, + "learning_rate": 8.327159544551024e-07, + "loss": 0.9449, + "step": 7844 + }, + { + "epoch": 0.7074897416242053, + "grad_norm": 1.4240862242225314, + "learning_rate": 8.322416187599073e-07, + "loss": 0.8877, + "step": 7845 + }, + { + "epoch": 0.7075799251476755, + "grad_norm": 1.6075292576905265, + "learning_rate": 8.317673827042963e-07, + "loss": 0.9622, + "step": 7846 + }, + { + "epoch": 0.7076701086711458, + "grad_norm": 1.4070972009904925, + "learning_rate": 8.312932463287339e-07, + "loss": 0.9259, + "step": 7847 + }, + { + "epoch": 0.707760292194616, + "grad_norm": 1.7092974043674052, + "learning_rate": 8.308192096736759e-07, + "loss": 0.9052, + "step": 7848 + }, + { + "epoch": 0.7078504757180863, + "grad_norm": 1.3283801114274845, + "learning_rate": 8.303452727795703e-07, + "loss": 0.9134, + "step": 7849 + }, + { + "epoch": 0.7079406592415566, + "grad_norm": 1.5371044890775667, + "learning_rate": 8.298714356868542e-07, + "loss": 1.0099, + "step": 7850 + }, + { + "epoch": 0.7080308427650268, + "grad_norm": 1.7834102848717681, + "learning_rate": 8.293976984359605e-07, + "loss": 0.9518, + "step": 7851 + }, + { + "epoch": 0.708121026288497, + "grad_norm": 1.5842603262007497, + "learning_rate": 8.289240610673092e-07, + "loss": 0.9354, + "step": 7852 + }, + { + "epoch": 0.7082112098119674, + "grad_norm": 1.572850619649421, + "learning_rate": 8.284505236213144e-07, + "loss": 0.9667, + "step": 7853 + }, + { + "epoch": 0.7083013933354376, + "grad_norm": 1.4166803733312199, + "learning_rate": 8.279770861383806e-07, + "loss": 0.9277, + "step": 7854 + }, + { + "epoch": 0.7083915768589079, + "grad_norm": 1.316538908751825, + "learning_rate": 8.275037486589042e-07, + "loss": 0.9279, + "step": 7855 + }, + { + "epoch": 0.7084817603823781, + "grad_norm": 0.7332685535891603, + "learning_rate": 8.270305112232739e-07, + "loss": 0.7872, + "step": 7856 + }, + { + "epoch": 0.7085719439058484, + "grad_norm": 1.3790544208745508, + "learning_rate": 8.265573738718665e-07, + "loss": 0.9305, + "step": 7857 + }, + { + "epoch": 0.7086621274293187, + "grad_norm": 1.5358735173074578, + "learning_rate": 8.260843366450559e-07, + "loss": 0.8999, + "step": 7858 + }, + { + "epoch": 0.7087523109527889, + "grad_norm": 1.4644119643570004, + "learning_rate": 8.256113995832017e-07, + "loss": 0.9381, + "step": 7859 + }, + { + "epoch": 0.7088424944762591, + "grad_norm": 1.2425974523293783, + "learning_rate": 8.251385627266583e-07, + "loss": 0.9825, + "step": 7860 + }, + { + "epoch": 0.7089326779997295, + "grad_norm": 1.5926333985082906, + "learning_rate": 8.24665826115771e-07, + "loss": 0.9314, + "step": 7861 + }, + { + "epoch": 0.7090228615231997, + "grad_norm": 1.6167622619890631, + "learning_rate": 8.241931897908763e-07, + "loss": 0.9534, + "step": 7862 + }, + { + "epoch": 0.7091130450466699, + "grad_norm": 1.3832164971675616, + "learning_rate": 8.237206537923016e-07, + "loss": 0.9629, + "step": 7863 + }, + { + "epoch": 0.7092032285701403, + "grad_norm": 1.771471252729272, + "learning_rate": 8.232482181603671e-07, + "loss": 0.9257, + "step": 7864 + }, + { + "epoch": 0.7092934120936105, + "grad_norm": 1.5468766954200779, + "learning_rate": 8.227758829353828e-07, + "loss": 0.9099, + "step": 7865 + }, + { + "epoch": 0.7093835956170808, + "grad_norm": 1.513615406582795, + "learning_rate": 8.223036481576522e-07, + "loss": 0.9072, + "step": 7866 + }, + { + "epoch": 0.709473779140551, + "grad_norm": 1.7786580015988491, + "learning_rate": 8.218315138674672e-07, + "loss": 0.9455, + "step": 7867 + }, + { + "epoch": 0.7095639626640213, + "grad_norm": 0.7414644835487586, + "learning_rate": 8.21359480105114e-07, + "loss": 0.7903, + "step": 7868 + }, + { + "epoch": 0.7096541461874916, + "grad_norm": 1.6186647164402692, + "learning_rate": 8.208875469108689e-07, + "loss": 0.9698, + "step": 7869 + }, + { + "epoch": 0.7097443297109618, + "grad_norm": 1.9194373201359782, + "learning_rate": 8.204157143249997e-07, + "loss": 0.924, + "step": 7870 + }, + { + "epoch": 0.709834513234432, + "grad_norm": 1.219904914808907, + "learning_rate": 8.199439823877668e-07, + "loss": 0.9122, + "step": 7871 + }, + { + "epoch": 0.7099246967579024, + "grad_norm": 1.3861903943661504, + "learning_rate": 8.194723511394186e-07, + "loss": 0.9272, + "step": 7872 + }, + { + "epoch": 0.7100148802813726, + "grad_norm": 1.9771225328113495, + "learning_rate": 8.190008206202002e-07, + "loss": 0.8955, + "step": 7873 + }, + { + "epoch": 0.7101050638048428, + "grad_norm": 1.4597549140012531, + "learning_rate": 8.185293908703423e-07, + "loss": 0.9803, + "step": 7874 + }, + { + "epoch": 0.7101952473283131, + "grad_norm": 1.6447189840049987, + "learning_rate": 8.180580619300727e-07, + "loss": 0.9018, + "step": 7875 + }, + { + "epoch": 0.7102854308517834, + "grad_norm": 1.2341264402044396, + "learning_rate": 8.175868338396057e-07, + "loss": 0.8913, + "step": 7876 + }, + { + "epoch": 0.7103756143752537, + "grad_norm": 1.632867182495869, + "learning_rate": 8.171157066391499e-07, + "loss": 0.915, + "step": 7877 + }, + { + "epoch": 0.7104657978987239, + "grad_norm": 1.6768261335136394, + "learning_rate": 8.166446803689045e-07, + "loss": 0.963, + "step": 7878 + }, + { + "epoch": 0.7105559814221941, + "grad_norm": 0.6760379105411175, + "learning_rate": 8.161737550690595e-07, + "loss": 0.8025, + "step": 7879 + }, + { + "epoch": 0.7106461649456645, + "grad_norm": 1.3568668246099809, + "learning_rate": 8.157029307797976e-07, + "loss": 0.9642, + "step": 7880 + }, + { + "epoch": 0.7107363484691347, + "grad_norm": 1.2822792757098265, + "learning_rate": 8.152322075412925e-07, + "loss": 0.9855, + "step": 7881 + }, + { + "epoch": 0.7108265319926049, + "grad_norm": 1.3179701392998013, + "learning_rate": 8.147615853937073e-07, + "loss": 0.8929, + "step": 7882 + }, + { + "epoch": 0.7109167155160752, + "grad_norm": 1.4400478671648218, + "learning_rate": 8.142910643771992e-07, + "loss": 0.8644, + "step": 7883 + }, + { + "epoch": 0.7110068990395455, + "grad_norm": 1.7325107074313093, + "learning_rate": 8.138206445319152e-07, + "loss": 0.8979, + "step": 7884 + }, + { + "epoch": 0.7110970825630157, + "grad_norm": 1.442792520465294, + "learning_rate": 8.133503258979944e-07, + "loss": 0.8392, + "step": 7885 + }, + { + "epoch": 0.711187266086486, + "grad_norm": 1.4984215537645789, + "learning_rate": 8.12880108515567e-07, + "loss": 0.8579, + "step": 7886 + }, + { + "epoch": 0.7112774496099563, + "grad_norm": 1.3211102304722153, + "learning_rate": 8.124099924247543e-07, + "loss": 0.9066, + "step": 7887 + }, + { + "epoch": 0.7113676331334265, + "grad_norm": 1.4780408936135596, + "learning_rate": 8.119399776656701e-07, + "loss": 0.8485, + "step": 7888 + }, + { + "epoch": 0.7114578166568968, + "grad_norm": 1.3283621183664438, + "learning_rate": 8.114700642784167e-07, + "loss": 0.9383, + "step": 7889 + }, + { + "epoch": 0.711548000180367, + "grad_norm": 1.2248022484272227, + "learning_rate": 8.110002523030921e-07, + "loss": 0.9878, + "step": 7890 + }, + { + "epoch": 0.7116381837038374, + "grad_norm": 1.2865792602455481, + "learning_rate": 8.105305417797808e-07, + "loss": 0.9633, + "step": 7891 + }, + { + "epoch": 0.7117283672273076, + "grad_norm": 1.3530506034291783, + "learning_rate": 8.100609327485635e-07, + "loss": 0.9719, + "step": 7892 + }, + { + "epoch": 0.7118185507507778, + "grad_norm": 1.9255593812355054, + "learning_rate": 8.095914252495082e-07, + "loss": 0.9918, + "step": 7893 + }, + { + "epoch": 0.7119087342742481, + "grad_norm": 1.7824448041676284, + "learning_rate": 8.091220193226762e-07, + "loss": 0.9091, + "step": 7894 + }, + { + "epoch": 0.7119989177977184, + "grad_norm": 1.2763512164563615, + "learning_rate": 8.0865271500812e-07, + "loss": 0.9208, + "step": 7895 + }, + { + "epoch": 0.7120891013211886, + "grad_norm": 1.5588659369112166, + "learning_rate": 8.081835123458831e-07, + "loss": 0.9801, + "step": 7896 + }, + { + "epoch": 0.7121792848446589, + "grad_norm": 1.5099816408367561, + "learning_rate": 8.077144113760013e-07, + "loss": 0.9272, + "step": 7897 + }, + { + "epoch": 0.7122694683681291, + "grad_norm": 1.4077520190254789, + "learning_rate": 8.072454121384995e-07, + "loss": 0.9137, + "step": 7898 + }, + { + "epoch": 0.7123596518915994, + "grad_norm": 1.2871329923324013, + "learning_rate": 8.067765146733958e-07, + "loss": 0.9743, + "step": 7899 + }, + { + "epoch": 0.7124498354150697, + "grad_norm": 1.5391023117570541, + "learning_rate": 8.063077190206993e-07, + "loss": 1.0107, + "step": 7900 + }, + { + "epoch": 0.7125400189385399, + "grad_norm": 0.6584598934672723, + "learning_rate": 8.058390252204101e-07, + "loss": 0.7106, + "step": 7901 + }, + { + "epoch": 0.7126302024620101, + "grad_norm": 1.297469726606234, + "learning_rate": 8.0537043331252e-07, + "loss": 0.9322, + "step": 7902 + }, + { + "epoch": 0.7127203859854805, + "grad_norm": 1.5616404648777449, + "learning_rate": 8.049019433370121e-07, + "loss": 0.8717, + "step": 7903 + }, + { + "epoch": 0.7128105695089507, + "grad_norm": 1.3991963975272197, + "learning_rate": 8.044335553338588e-07, + "loss": 0.9401, + "step": 7904 + }, + { + "epoch": 0.712900753032421, + "grad_norm": 2.9788967801715627, + "learning_rate": 8.039652693430281e-07, + "loss": 0.9432, + "step": 7905 + }, + { + "epoch": 0.7129909365558912, + "grad_norm": 3.6650073169255304, + "learning_rate": 8.034970854044742e-07, + "loss": 0.9145, + "step": 7906 + }, + { + "epoch": 0.7130811200793615, + "grad_norm": 1.4196810721491826, + "learning_rate": 8.03029003558148e-07, + "loss": 1.0318, + "step": 7907 + }, + { + "epoch": 0.7131713036028318, + "grad_norm": 1.616630421392768, + "learning_rate": 8.025610238439864e-07, + "loss": 0.997, + "step": 7908 + }, + { + "epoch": 0.713261487126302, + "grad_norm": 1.94702851010194, + "learning_rate": 8.020931463019207e-07, + "loss": 1.0134, + "step": 7909 + }, + { + "epoch": 0.7133516706497723, + "grad_norm": 1.4879681115117536, + "learning_rate": 8.016253709718732e-07, + "loss": 0.8776, + "step": 7910 + }, + { + "epoch": 0.7134418541732426, + "grad_norm": 1.4867573142249435, + "learning_rate": 8.011576978937567e-07, + "loss": 0.8492, + "step": 7911 + }, + { + "epoch": 0.7135320376967128, + "grad_norm": 2.21985555694398, + "learning_rate": 8.006901271074764e-07, + "loss": 0.9385, + "step": 7912 + }, + { + "epoch": 0.713622221220183, + "grad_norm": 1.6169562427825956, + "learning_rate": 8.002226586529261e-07, + "loss": 0.9768, + "step": 7913 + }, + { + "epoch": 0.7137124047436534, + "grad_norm": 1.464726720585555, + "learning_rate": 7.997552925699956e-07, + "loss": 1.0192, + "step": 7914 + }, + { + "epoch": 0.7138025882671236, + "grad_norm": 1.5760090305389147, + "learning_rate": 7.992880288985606e-07, + "loss": 1.0283, + "step": 7915 + }, + { + "epoch": 0.7138927717905938, + "grad_norm": 1.392231088762971, + "learning_rate": 7.988208676784918e-07, + "loss": 0.9443, + "step": 7916 + }, + { + "epoch": 0.7139829553140641, + "grad_norm": 1.5629952980844524, + "learning_rate": 7.983538089496497e-07, + "loss": 0.939, + "step": 7917 + }, + { + "epoch": 0.7140731388375344, + "grad_norm": 1.4620371061539847, + "learning_rate": 7.978868527518864e-07, + "loss": 0.8945, + "step": 7918 + }, + { + "epoch": 0.7141633223610047, + "grad_norm": 1.4559705212302227, + "learning_rate": 7.974199991250455e-07, + "loss": 0.9139, + "step": 7919 + }, + { + "epoch": 0.7142535058844749, + "grad_norm": 1.780208082145136, + "learning_rate": 7.969532481089616e-07, + "loss": 1.0347, + "step": 7920 + }, + { + "epoch": 0.7143436894079451, + "grad_norm": 1.5286334512221007, + "learning_rate": 7.964865997434589e-07, + "loss": 0.9515, + "step": 7921 + }, + { + "epoch": 0.7144338729314155, + "grad_norm": 1.8896151911844392, + "learning_rate": 7.96020054068357e-07, + "loss": 1.0116, + "step": 7922 + }, + { + "epoch": 0.7145240564548857, + "grad_norm": 1.2187364039518573, + "learning_rate": 7.95553611123462e-07, + "loss": 0.897, + "step": 7923 + }, + { + "epoch": 0.7146142399783559, + "grad_norm": 2.2790519890071423, + "learning_rate": 7.950872709485741e-07, + "loss": 1.1111, + "step": 7924 + }, + { + "epoch": 0.7147044235018262, + "grad_norm": 2.9264154905431687, + "learning_rate": 7.946210335834842e-07, + "loss": 0.99, + "step": 7925 + }, + { + "epoch": 0.7147946070252965, + "grad_norm": 1.2168944979429566, + "learning_rate": 7.94154899067974e-07, + "loss": 0.862, + "step": 7926 + }, + { + "epoch": 0.7148847905487667, + "grad_norm": 1.3000162985586947, + "learning_rate": 7.936888674418177e-07, + "loss": 0.8589, + "step": 7927 + }, + { + "epoch": 0.714974974072237, + "grad_norm": 1.729763119637479, + "learning_rate": 7.932229387447771e-07, + "loss": 0.9398, + "step": 7928 + }, + { + "epoch": 0.7150651575957072, + "grad_norm": 1.804687632110723, + "learning_rate": 7.927571130166109e-07, + "loss": 0.9239, + "step": 7929 + }, + { + "epoch": 0.7151553411191776, + "grad_norm": 0.7782076152162247, + "learning_rate": 7.922913902970632e-07, + "loss": 0.7894, + "step": 7930 + }, + { + "epoch": 0.7152455246426478, + "grad_norm": 1.4300178187934058, + "learning_rate": 7.918257706258744e-07, + "loss": 0.9356, + "step": 7931 + }, + { + "epoch": 0.715335708166118, + "grad_norm": 1.7663152240175912, + "learning_rate": 7.913602540427724e-07, + "loss": 0.9439, + "step": 7932 + }, + { + "epoch": 0.7154258916895884, + "grad_norm": 1.4806776075570103, + "learning_rate": 7.908948405874775e-07, + "loss": 0.9303, + "step": 7933 + }, + { + "epoch": 0.7155160752130586, + "grad_norm": 1.5391953309341246, + "learning_rate": 7.904295302997019e-07, + "loss": 0.8712, + "step": 7934 + }, + { + "epoch": 0.7156062587365288, + "grad_norm": 1.3080682866706095, + "learning_rate": 7.899643232191484e-07, + "loss": 0.9539, + "step": 7935 + }, + { + "epoch": 0.7156964422599991, + "grad_norm": 1.2923944125405054, + "learning_rate": 7.894992193855108e-07, + "loss": 0.9143, + "step": 7936 + }, + { + "epoch": 0.7157866257834694, + "grad_norm": 1.7315888579324046, + "learning_rate": 7.890342188384751e-07, + "loss": 0.8814, + "step": 7937 + }, + { + "epoch": 0.7158768093069396, + "grad_norm": 1.4436696381104908, + "learning_rate": 7.885693216177165e-07, + "loss": 0.8774, + "step": 7938 + }, + { + "epoch": 0.7159669928304099, + "grad_norm": 2.435413347758133, + "learning_rate": 7.88104527762903e-07, + "loss": 0.928, + "step": 7939 + }, + { + "epoch": 0.7160571763538801, + "grad_norm": 1.5822994805403192, + "learning_rate": 7.876398373136936e-07, + "loss": 0.8963, + "step": 7940 + }, + { + "epoch": 0.7161473598773505, + "grad_norm": 1.5527017458000498, + "learning_rate": 7.87175250309738e-07, + "loss": 0.9359, + "step": 7941 + }, + { + "epoch": 0.7162375434008207, + "grad_norm": 1.3962553676819656, + "learning_rate": 7.867107667906785e-07, + "loss": 1.0071, + "step": 7942 + }, + { + "epoch": 0.7163277269242909, + "grad_norm": 1.5937451381235201, + "learning_rate": 7.862463867961446e-07, + "loss": 0.9294, + "step": 7943 + }, + { + "epoch": 0.7164179104477612, + "grad_norm": 1.2769045760155258, + "learning_rate": 7.857821103657632e-07, + "loss": 0.9988, + "step": 7944 + }, + { + "epoch": 0.7165080939712315, + "grad_norm": 1.4445065287346426, + "learning_rate": 7.853179375391459e-07, + "loss": 0.9619, + "step": 7945 + }, + { + "epoch": 0.7165982774947017, + "grad_norm": 1.4055918530957034, + "learning_rate": 7.848538683559012e-07, + "loss": 0.9234, + "step": 7946 + }, + { + "epoch": 0.716688461018172, + "grad_norm": 1.5358650570741101, + "learning_rate": 7.843899028556238e-07, + "loss": 0.9768, + "step": 7947 + }, + { + "epoch": 0.7167786445416422, + "grad_norm": 1.2389145925209455, + "learning_rate": 7.839260410779029e-07, + "loss": 0.9333, + "step": 7948 + }, + { + "epoch": 0.7168688280651125, + "grad_norm": 1.2926029944196153, + "learning_rate": 7.834622830623175e-07, + "loss": 0.9582, + "step": 7949 + }, + { + "epoch": 0.7169590115885828, + "grad_norm": 1.6482250926494926, + "learning_rate": 7.82998628848438e-07, + "loss": 0.9104, + "step": 7950 + }, + { + "epoch": 0.717049195112053, + "grad_norm": 1.309220303521906, + "learning_rate": 7.825350784758261e-07, + "loss": 0.9311, + "step": 7951 + }, + { + "epoch": 0.7171393786355232, + "grad_norm": 1.2568552390678693, + "learning_rate": 7.820716319840342e-07, + "loss": 0.8947, + "step": 7952 + }, + { + "epoch": 0.7172295621589936, + "grad_norm": 1.4609747897198293, + "learning_rate": 7.816082894126069e-07, + "loss": 1.0117, + "step": 7953 + }, + { + "epoch": 0.7173197456824638, + "grad_norm": 1.4381574702835136, + "learning_rate": 7.811450508010778e-07, + "loss": 0.903, + "step": 7954 + }, + { + "epoch": 0.717409929205934, + "grad_norm": 1.3852058862182708, + "learning_rate": 7.806819161889737e-07, + "loss": 0.9082, + "step": 7955 + }, + { + "epoch": 0.7175001127294043, + "grad_norm": 1.3794554570339246, + "learning_rate": 7.802188856158119e-07, + "loss": 0.9647, + "step": 7956 + }, + { + "epoch": 0.7175902962528746, + "grad_norm": 1.9277362920100334, + "learning_rate": 7.797559591211002e-07, + "loss": 0.9164, + "step": 7957 + }, + { + "epoch": 0.7176804797763449, + "grad_norm": 1.3649785534777323, + "learning_rate": 7.79293136744339e-07, + "loss": 0.8974, + "step": 7958 + }, + { + "epoch": 0.7177706632998151, + "grad_norm": 1.5363355770749934, + "learning_rate": 7.788304185250185e-07, + "loss": 0.9359, + "step": 7959 + }, + { + "epoch": 0.7178608468232854, + "grad_norm": 8.442208339814414, + "learning_rate": 7.78367804502619e-07, + "loss": 0.9638, + "step": 7960 + }, + { + "epoch": 0.7179510303467557, + "grad_norm": 1.6866651518567386, + "learning_rate": 7.779052947166156e-07, + "loss": 0.9469, + "step": 7961 + }, + { + "epoch": 0.7180412138702259, + "grad_norm": 1.6139233952982754, + "learning_rate": 7.774428892064697e-07, + "loss": 0.8892, + "step": 7962 + }, + { + "epoch": 0.7181313973936961, + "grad_norm": 1.4747700786811293, + "learning_rate": 7.769805880116391e-07, + "loss": 0.9236, + "step": 7963 + }, + { + "epoch": 0.7182215809171665, + "grad_norm": 2.1382300351594674, + "learning_rate": 7.765183911715678e-07, + "loss": 0.9129, + "step": 7964 + }, + { + "epoch": 0.7183117644406367, + "grad_norm": 1.3451485563863081, + "learning_rate": 7.760562987256933e-07, + "loss": 0.8325, + "step": 7965 + }, + { + "epoch": 0.718401947964107, + "grad_norm": 1.471135576152943, + "learning_rate": 7.755943107134444e-07, + "loss": 0.937, + "step": 7966 + }, + { + "epoch": 0.7184921314875772, + "grad_norm": 0.7007941075807244, + "learning_rate": 7.751324271742401e-07, + "loss": 0.8247, + "step": 7967 + }, + { + "epoch": 0.7185823150110475, + "grad_norm": 1.409278533070123, + "learning_rate": 7.746706481474916e-07, + "loss": 0.9536, + "step": 7968 + }, + { + "epoch": 0.7186724985345178, + "grad_norm": 0.6715146362516117, + "learning_rate": 7.742089736725992e-07, + "loss": 0.757, + "step": 7969 + }, + { + "epoch": 0.718762682057988, + "grad_norm": 1.5914995348729237, + "learning_rate": 7.737474037889559e-07, + "loss": 0.8528, + "step": 7970 + }, + { + "epoch": 0.7188528655814582, + "grad_norm": 1.4260563833156759, + "learning_rate": 7.732859385359458e-07, + "loss": 0.9052, + "step": 7971 + }, + { + "epoch": 0.7189430491049286, + "grad_norm": 1.5284845720934463, + "learning_rate": 7.728245779529434e-07, + "loss": 0.8585, + "step": 7972 + }, + { + "epoch": 0.7190332326283988, + "grad_norm": 1.4147258066932191, + "learning_rate": 7.723633220793146e-07, + "loss": 0.9609, + "step": 7973 + }, + { + "epoch": 0.719123416151869, + "grad_norm": 1.514941661202478, + "learning_rate": 7.719021709544162e-07, + "loss": 0.827, + "step": 7974 + }, + { + "epoch": 0.7192135996753393, + "grad_norm": 1.407299751956885, + "learning_rate": 7.714411246175964e-07, + "loss": 1.0217, + "step": 7975 + }, + { + "epoch": 0.7193037831988096, + "grad_norm": 1.4539171746287827, + "learning_rate": 7.709801831081946e-07, + "loss": 0.8513, + "step": 7976 + }, + { + "epoch": 0.7193939667222798, + "grad_norm": 1.5329193046915566, + "learning_rate": 7.705193464655391e-07, + "loss": 0.9001, + "step": 7977 + }, + { + "epoch": 0.7194841502457501, + "grad_norm": 1.2649619638054628, + "learning_rate": 7.700586147289534e-07, + "loss": 0.9102, + "step": 7978 + }, + { + "epoch": 0.7195743337692203, + "grad_norm": 1.3309817388615985, + "learning_rate": 7.695979879377481e-07, + "loss": 0.8935, + "step": 7979 + }, + { + "epoch": 0.7196645172926907, + "grad_norm": 1.341590165786663, + "learning_rate": 7.691374661312266e-07, + "loss": 0.9531, + "step": 7980 + }, + { + "epoch": 0.7197547008161609, + "grad_norm": 1.548079512986278, + "learning_rate": 7.686770493486834e-07, + "loss": 0.9109, + "step": 7981 + }, + { + "epoch": 0.7198448843396311, + "grad_norm": 1.4538080394439143, + "learning_rate": 7.68216737629404e-07, + "loss": 0.875, + "step": 7982 + }, + { + "epoch": 0.7199350678631015, + "grad_norm": 1.3492390059566939, + "learning_rate": 7.67756531012665e-07, + "loss": 1.0006, + "step": 7983 + }, + { + "epoch": 0.7200252513865717, + "grad_norm": 1.4312576260426013, + "learning_rate": 7.67296429537732e-07, + "loss": 0.9852, + "step": 7984 + }, + { + "epoch": 0.7201154349100419, + "grad_norm": 1.4653043709381188, + "learning_rate": 7.668364332438661e-07, + "loss": 0.9922, + "step": 7985 + }, + { + "epoch": 0.7202056184335122, + "grad_norm": 1.2820515800133383, + "learning_rate": 7.663765421703145e-07, + "loss": 0.9383, + "step": 7986 + }, + { + "epoch": 0.7202958019569825, + "grad_norm": 1.4986843856434904, + "learning_rate": 7.659167563563187e-07, + "loss": 0.8677, + "step": 7987 + }, + { + "epoch": 0.7203859854804527, + "grad_norm": 1.3803502375751302, + "learning_rate": 7.654570758411096e-07, + "loss": 0.9513, + "step": 7988 + }, + { + "epoch": 0.720476169003923, + "grad_norm": 1.6283128421370034, + "learning_rate": 7.649975006639103e-07, + "loss": 0.9959, + "step": 7989 + }, + { + "epoch": 0.7205663525273932, + "grad_norm": 0.6566932180607183, + "learning_rate": 7.645380308639337e-07, + "loss": 0.7625, + "step": 7990 + }, + { + "epoch": 0.7206565360508635, + "grad_norm": 1.4000202364821408, + "learning_rate": 7.640786664803853e-07, + "loss": 0.9827, + "step": 7991 + }, + { + "epoch": 0.7207467195743338, + "grad_norm": 1.244568468244981, + "learning_rate": 7.636194075524587e-07, + "loss": 0.8568, + "step": 7992 + }, + { + "epoch": 0.720836903097804, + "grad_norm": 1.3134969376469154, + "learning_rate": 7.631602541193429e-07, + "loss": 0.894, + "step": 7993 + }, + { + "epoch": 0.7209270866212742, + "grad_norm": 1.8528536387209882, + "learning_rate": 7.627012062202132e-07, + "loss": 0.995, + "step": 7994 + }, + { + "epoch": 0.7210172701447446, + "grad_norm": 1.5334487232382918, + "learning_rate": 7.622422638942391e-07, + "loss": 0.9299, + "step": 7995 + }, + { + "epoch": 0.7211074536682148, + "grad_norm": 1.1764469226742922, + "learning_rate": 7.617834271805801e-07, + "loss": 0.9985, + "step": 7996 + }, + { + "epoch": 0.7211976371916851, + "grad_norm": 0.660273908253741, + "learning_rate": 7.613246961183863e-07, + "loss": 0.7706, + "step": 7997 + }, + { + "epoch": 0.7212878207151553, + "grad_norm": 1.668780234416569, + "learning_rate": 7.608660707468002e-07, + "loss": 0.8945, + "step": 7998 + }, + { + "epoch": 0.7213780042386256, + "grad_norm": 1.4714546461876945, + "learning_rate": 7.604075511049522e-07, + "loss": 0.8307, + "step": 7999 + }, + { + "epoch": 0.7214681877620959, + "grad_norm": 1.3445478776328075, + "learning_rate": 7.599491372319682e-07, + "loss": 0.8793, + "step": 8000 + }, + { + "epoch": 0.7215583712855661, + "grad_norm": 1.1321774511394096, + "learning_rate": 7.594908291669601e-07, + "loss": 0.9322, + "step": 8001 + }, + { + "epoch": 0.7216485548090363, + "grad_norm": 1.4819866038964111, + "learning_rate": 7.590326269490359e-07, + "loss": 0.9704, + "step": 8002 + }, + { + "epoch": 0.7217387383325067, + "grad_norm": 1.2946742584855164, + "learning_rate": 7.585745306172899e-07, + "loss": 0.8907, + "step": 8003 + }, + { + "epoch": 0.7218289218559769, + "grad_norm": 1.4108492345227415, + "learning_rate": 7.5811654021081e-07, + "loss": 0.9187, + "step": 8004 + }, + { + "epoch": 0.7219191053794471, + "grad_norm": 1.3087132185874621, + "learning_rate": 7.576586557686748e-07, + "loss": 0.9602, + "step": 8005 + }, + { + "epoch": 0.7220092889029175, + "grad_norm": 1.5134026663326077, + "learning_rate": 7.572008773299531e-07, + "loss": 0.9083, + "step": 8006 + }, + { + "epoch": 0.7220994724263877, + "grad_norm": 1.4147879072909046, + "learning_rate": 7.567432049337055e-07, + "loss": 0.9661, + "step": 8007 + }, + { + "epoch": 0.722189655949858, + "grad_norm": 1.570891164563389, + "learning_rate": 7.562856386189834e-07, + "loss": 0.8723, + "step": 8008 + }, + { + "epoch": 0.7222798394733282, + "grad_norm": 3.1487626506555833, + "learning_rate": 7.558281784248275e-07, + "loss": 0.9464, + "step": 8009 + }, + { + "epoch": 0.7223700229967985, + "grad_norm": 1.411288960935226, + "learning_rate": 7.553708243902721e-07, + "loss": 0.9253, + "step": 8010 + }, + { + "epoch": 0.7224602065202688, + "grad_norm": 2.160439443391555, + "learning_rate": 7.549135765543404e-07, + "loss": 0.8258, + "step": 8011 + }, + { + "epoch": 0.722550390043739, + "grad_norm": 1.5273462476002597, + "learning_rate": 7.544564349560481e-07, + "loss": 0.9591, + "step": 8012 + }, + { + "epoch": 0.7226405735672092, + "grad_norm": 1.2120217176354888, + "learning_rate": 7.539993996344009e-07, + "loss": 0.9207, + "step": 8013 + }, + { + "epoch": 0.7227307570906796, + "grad_norm": 1.4724488453811917, + "learning_rate": 7.535424706283941e-07, + "loss": 0.9095, + "step": 8014 + }, + { + "epoch": 0.7228209406141498, + "grad_norm": 1.5560517579323183, + "learning_rate": 7.530856479770181e-07, + "loss": 0.894, + "step": 8015 + }, + { + "epoch": 0.72291112413762, + "grad_norm": 1.32054352855185, + "learning_rate": 7.526289317192484e-07, + "loss": 1.0131, + "step": 8016 + }, + { + "epoch": 0.7230013076610903, + "grad_norm": 1.4679657485874387, + "learning_rate": 7.521723218940579e-07, + "loss": 0.9366, + "step": 8017 + }, + { + "epoch": 0.7230914911845606, + "grad_norm": 1.5367224083814723, + "learning_rate": 7.517158185404038e-07, + "loss": 0.9939, + "step": 8018 + }, + { + "epoch": 0.7231816747080309, + "grad_norm": 1.3845492733349574, + "learning_rate": 7.512594216972403e-07, + "loss": 0.953, + "step": 8019 + }, + { + "epoch": 0.7232718582315011, + "grad_norm": 1.2802764170922611, + "learning_rate": 7.508031314035078e-07, + "loss": 0.9674, + "step": 8020 + }, + { + "epoch": 0.7233620417549713, + "grad_norm": 1.3950895159039989, + "learning_rate": 7.503469476981401e-07, + "loss": 0.9542, + "step": 8021 + }, + { + "epoch": 0.7234522252784417, + "grad_norm": 1.5066507241719387, + "learning_rate": 7.498908706200613e-07, + "loss": 0.8053, + "step": 8022 + }, + { + "epoch": 0.7235424088019119, + "grad_norm": 1.2670223857585088, + "learning_rate": 7.494349002081866e-07, + "loss": 0.9865, + "step": 8023 + }, + { + "epoch": 0.7236325923253821, + "grad_norm": 1.457139571103363, + "learning_rate": 7.489790365014224e-07, + "loss": 0.9551, + "step": 8024 + }, + { + "epoch": 0.7237227758488524, + "grad_norm": 1.4006222329884765, + "learning_rate": 7.485232795386642e-07, + "loss": 0.9498, + "step": 8025 + }, + { + "epoch": 0.7238129593723227, + "grad_norm": 1.415022466231825, + "learning_rate": 7.480676293588002e-07, + "loss": 0.9576, + "step": 8026 + }, + { + "epoch": 0.7239031428957929, + "grad_norm": 0.7444442970440214, + "learning_rate": 7.476120860007093e-07, + "loss": 0.8264, + "step": 8027 + }, + { + "epoch": 0.7239933264192632, + "grad_norm": 1.7280036124836284, + "learning_rate": 7.471566495032608e-07, + "loss": 0.98, + "step": 8028 + }, + { + "epoch": 0.7240835099427335, + "grad_norm": 1.3806173714132997, + "learning_rate": 7.467013199053152e-07, + "loss": 0.8699, + "step": 8029 + }, + { + "epoch": 0.7241736934662037, + "grad_norm": 1.4589227665937283, + "learning_rate": 7.46246097245724e-07, + "loss": 0.9321, + "step": 8030 + }, + { + "epoch": 0.724263876989674, + "grad_norm": 2.0004988286692598, + "learning_rate": 7.457909815633276e-07, + "loss": 0.9396, + "step": 8031 + }, + { + "epoch": 0.7243540605131442, + "grad_norm": 1.760780575249692, + "learning_rate": 7.453359728969618e-07, + "loss": 0.9315, + "step": 8032 + }, + { + "epoch": 0.7244442440366146, + "grad_norm": 1.2570525533270962, + "learning_rate": 7.448810712854475e-07, + "loss": 0.8531, + "step": 8033 + }, + { + "epoch": 0.7245344275600848, + "grad_norm": 1.3961659742052093, + "learning_rate": 7.444262767676022e-07, + "loss": 0.9671, + "step": 8034 + }, + { + "epoch": 0.724624611083555, + "grad_norm": 1.661571877485197, + "learning_rate": 7.439715893822296e-07, + "loss": 1.0103, + "step": 8035 + }, + { + "epoch": 0.7247147946070253, + "grad_norm": 1.3230633942141843, + "learning_rate": 7.435170091681264e-07, + "loss": 0.9552, + "step": 8036 + }, + { + "epoch": 0.7248049781304956, + "grad_norm": 1.5843558105269744, + "learning_rate": 7.430625361640803e-07, + "loss": 0.8947, + "step": 8037 + }, + { + "epoch": 0.7248951616539658, + "grad_norm": 1.902725463420288, + "learning_rate": 7.426081704088694e-07, + "loss": 0.9741, + "step": 8038 + }, + { + "epoch": 0.7249853451774361, + "grad_norm": 1.4904565807981072, + "learning_rate": 7.42153911941263e-07, + "loss": 1.0035, + "step": 8039 + }, + { + "epoch": 0.7250755287009063, + "grad_norm": 1.4045635283250855, + "learning_rate": 7.416997608000192e-07, + "loss": 0.9813, + "step": 8040 + }, + { + "epoch": 0.7251657122243766, + "grad_norm": 1.5091406156474056, + "learning_rate": 7.412457170238918e-07, + "loss": 0.9152, + "step": 8041 + }, + { + "epoch": 0.7252558957478469, + "grad_norm": 1.3687713168500504, + "learning_rate": 7.407917806516193e-07, + "loss": 0.8747, + "step": 8042 + }, + { + "epoch": 0.7253460792713171, + "grad_norm": 1.565278977095874, + "learning_rate": 7.403379517219354e-07, + "loss": 0.9279, + "step": 8043 + }, + { + "epoch": 0.7254362627947873, + "grad_norm": 1.553035836375572, + "learning_rate": 7.398842302735636e-07, + "loss": 0.963, + "step": 8044 + }, + { + "epoch": 0.7255264463182577, + "grad_norm": 0.6902725062391812, + "learning_rate": 7.394306163452171e-07, + "loss": 0.7703, + "step": 8045 + }, + { + "epoch": 0.7256166298417279, + "grad_norm": 1.1144864702768333, + "learning_rate": 7.38977109975601e-07, + "loss": 0.91, + "step": 8046 + }, + { + "epoch": 0.7257068133651982, + "grad_norm": 1.622123225920522, + "learning_rate": 7.385237112034119e-07, + "loss": 0.8448, + "step": 8047 + }, + { + "epoch": 0.7257969968886684, + "grad_norm": 1.5310217239562847, + "learning_rate": 7.380704200673342e-07, + "loss": 0.9932, + "step": 8048 + }, + { + "epoch": 0.7258871804121387, + "grad_norm": 1.2520810447244572, + "learning_rate": 7.376172366060478e-07, + "loss": 0.96, + "step": 8049 + }, + { + "epoch": 0.725977363935609, + "grad_norm": 1.3686414997870224, + "learning_rate": 7.371641608582187e-07, + "loss": 0.7448, + "step": 8050 + }, + { + "epoch": 0.7260675474590792, + "grad_norm": 1.8311223945815278, + "learning_rate": 7.367111928625067e-07, + "loss": 0.8026, + "step": 8051 + }, + { + "epoch": 0.7261577309825495, + "grad_norm": 1.5032671474568067, + "learning_rate": 7.362583326575613e-07, + "loss": 0.8425, + "step": 8052 + }, + { + "epoch": 0.7262479145060198, + "grad_norm": 1.6414841355188403, + "learning_rate": 7.358055802820234e-07, + "loss": 0.9173, + "step": 8053 + }, + { + "epoch": 0.72633809802949, + "grad_norm": 1.551740988369503, + "learning_rate": 7.353529357745245e-07, + "loss": 0.9042, + "step": 8054 + }, + { + "epoch": 0.7264282815529602, + "grad_norm": 1.7541545870524353, + "learning_rate": 7.349003991736851e-07, + "loss": 0.9765, + "step": 8055 + }, + { + "epoch": 0.7265184650764306, + "grad_norm": 1.5489267171144168, + "learning_rate": 7.344479705181206e-07, + "loss": 0.7838, + "step": 8056 + }, + { + "epoch": 0.7266086485999008, + "grad_norm": 1.4887149208990273, + "learning_rate": 7.339956498464322e-07, + "loss": 0.887, + "step": 8057 + }, + { + "epoch": 0.726698832123371, + "grad_norm": 1.3530109119553242, + "learning_rate": 7.335434371972169e-07, + "loss": 0.9851, + "step": 8058 + }, + { + "epoch": 0.7267890156468413, + "grad_norm": 1.379365666180287, + "learning_rate": 7.33091332609058e-07, + "loss": 0.919, + "step": 8059 + }, + { + "epoch": 0.7268791991703116, + "grad_norm": 1.5090218867439402, + "learning_rate": 7.326393361205323e-07, + "loss": 0.9451, + "step": 8060 + }, + { + "epoch": 0.7269693826937819, + "grad_norm": 1.4833517462769858, + "learning_rate": 7.321874477702068e-07, + "loss": 1.0144, + "step": 8061 + }, + { + "epoch": 0.7270595662172521, + "grad_norm": 0.7680584906517575, + "learning_rate": 7.317356675966386e-07, + "loss": 0.8462, + "step": 8062 + }, + { + "epoch": 0.7271497497407223, + "grad_norm": 1.220574993275294, + "learning_rate": 7.312839956383765e-07, + "loss": 0.9644, + "step": 8063 + }, + { + "epoch": 0.7272399332641927, + "grad_norm": 1.3767737305568286, + "learning_rate": 7.308324319339603e-07, + "loss": 0.9101, + "step": 8064 + }, + { + "epoch": 0.7273301167876629, + "grad_norm": 1.4357747215017125, + "learning_rate": 7.303809765219182e-07, + "loss": 0.9329, + "step": 8065 + }, + { + "epoch": 0.7274203003111331, + "grad_norm": 1.4457894852701667, + "learning_rate": 7.299296294407719e-07, + "loss": 0.9777, + "step": 8066 + }, + { + "epoch": 0.7275104838346034, + "grad_norm": 1.3466508293794315, + "learning_rate": 7.294783907290327e-07, + "loss": 0.9728, + "step": 8067 + }, + { + "epoch": 0.7276006673580737, + "grad_norm": 1.4944413025943923, + "learning_rate": 7.290272604252028e-07, + "loss": 0.8001, + "step": 8068 + }, + { + "epoch": 0.727690850881544, + "grad_norm": 1.5914718202426534, + "learning_rate": 7.285762385677758e-07, + "loss": 0.8468, + "step": 8069 + }, + { + "epoch": 0.7277810344050142, + "grad_norm": 1.316059010153719, + "learning_rate": 7.281253251952335e-07, + "loss": 0.9633, + "step": 8070 + }, + { + "epoch": 0.7278712179284844, + "grad_norm": 1.4265603641021884, + "learning_rate": 7.276745203460526e-07, + "loss": 0.9177, + "step": 8071 + }, + { + "epoch": 0.7279614014519548, + "grad_norm": 1.5952140217274817, + "learning_rate": 7.272238240586959e-07, + "loss": 0.8749, + "step": 8072 + }, + { + "epoch": 0.728051584975425, + "grad_norm": 1.22532105034022, + "learning_rate": 7.267732363716219e-07, + "loss": 0.8706, + "step": 8073 + }, + { + "epoch": 0.7281417684988952, + "grad_norm": 1.4271604790916312, + "learning_rate": 7.263227573232753e-07, + "loss": 0.8944, + "step": 8074 + }, + { + "epoch": 0.7282319520223655, + "grad_norm": 1.4412593611639017, + "learning_rate": 7.258723869520937e-07, + "loss": 1.0168, + "step": 8075 + }, + { + "epoch": 0.7283221355458358, + "grad_norm": 1.3977937548630102, + "learning_rate": 7.254221252965059e-07, + "loss": 0.9828, + "step": 8076 + }, + { + "epoch": 0.728412319069306, + "grad_norm": 1.7826049068659449, + "learning_rate": 7.249719723949301e-07, + "loss": 0.8454, + "step": 8077 + }, + { + "epoch": 0.7285025025927763, + "grad_norm": 1.3311103478910509, + "learning_rate": 7.245219282857761e-07, + "loss": 0.9505, + "step": 8078 + }, + { + "epoch": 0.7285926861162466, + "grad_norm": 2.0568220415371328, + "learning_rate": 7.240719930074442e-07, + "loss": 0.9671, + "step": 8079 + }, + { + "epoch": 0.7286828696397168, + "grad_norm": 1.646725167510727, + "learning_rate": 7.236221665983257e-07, + "loss": 0.9626, + "step": 8080 + }, + { + "epoch": 0.7287730531631871, + "grad_norm": 0.8571178966248456, + "learning_rate": 7.231724490968012e-07, + "loss": 0.8148, + "step": 8081 + }, + { + "epoch": 0.7288632366866573, + "grad_norm": 1.3371158779118772, + "learning_rate": 7.227228405412438e-07, + "loss": 0.9066, + "step": 8082 + }, + { + "epoch": 0.7289534202101277, + "grad_norm": 1.9178387817352274, + "learning_rate": 7.222733409700165e-07, + "loss": 0.8971, + "step": 8083 + }, + { + "epoch": 0.7290436037335979, + "grad_norm": 1.282374377130695, + "learning_rate": 7.21823950421473e-07, + "loss": 0.948, + "step": 8084 + }, + { + "epoch": 0.7291337872570681, + "grad_norm": 2.1298758722233324, + "learning_rate": 7.213746689339577e-07, + "loss": 0.906, + "step": 8085 + }, + { + "epoch": 0.7292239707805384, + "grad_norm": 1.5106672232633296, + "learning_rate": 7.20925496545807e-07, + "loss": 0.9397, + "step": 8086 + }, + { + "epoch": 0.7293141543040087, + "grad_norm": 1.4594767402293145, + "learning_rate": 7.20476433295344e-07, + "loss": 0.8448, + "step": 8087 + }, + { + "epoch": 0.7294043378274789, + "grad_norm": 1.4028756665654194, + "learning_rate": 7.200274792208882e-07, + "loss": 0.9659, + "step": 8088 + }, + { + "epoch": 0.7294945213509492, + "grad_norm": 1.5978301683445422, + "learning_rate": 7.195786343607444e-07, + "loss": 0.8253, + "step": 8089 + }, + { + "epoch": 0.7295847048744194, + "grad_norm": 1.4510281929363629, + "learning_rate": 7.191298987532131e-07, + "loss": 0.9261, + "step": 8090 + }, + { + "epoch": 0.7296748883978897, + "grad_norm": 1.3888355700536916, + "learning_rate": 7.186812724365805e-07, + "loss": 0.9446, + "step": 8091 + }, + { + "epoch": 0.72976507192136, + "grad_norm": 1.6873777133290504, + "learning_rate": 7.182327554491272e-07, + "loss": 0.9426, + "step": 8092 + }, + { + "epoch": 0.7298552554448302, + "grad_norm": 0.7147675353731541, + "learning_rate": 7.177843478291225e-07, + "loss": 0.8201, + "step": 8093 + }, + { + "epoch": 0.7299454389683004, + "grad_norm": 1.76626501045949, + "learning_rate": 7.173360496148276e-07, + "loss": 0.9608, + "step": 8094 + }, + { + "epoch": 0.7300356224917708, + "grad_norm": 1.55765797911418, + "learning_rate": 7.168878608444939e-07, + "loss": 0.9471, + "step": 8095 + }, + { + "epoch": 0.730125806015241, + "grad_norm": 1.862190743187623, + "learning_rate": 7.164397815563623e-07, + "loss": 0.9367, + "step": 8096 + }, + { + "epoch": 0.7302159895387113, + "grad_norm": 1.6687085835892916, + "learning_rate": 7.159918117886661e-07, + "loss": 0.936, + "step": 8097 + }, + { + "epoch": 0.7303061730621815, + "grad_norm": 1.4857294428361378, + "learning_rate": 7.155439515796284e-07, + "loss": 0.9528, + "step": 8098 + }, + { + "epoch": 0.7303963565856518, + "grad_norm": 1.687140603117627, + "learning_rate": 7.150962009674633e-07, + "loss": 0.9248, + "step": 8099 + }, + { + "epoch": 0.7304865401091221, + "grad_norm": 1.3365311669919566, + "learning_rate": 7.146485599903751e-07, + "loss": 0.9833, + "step": 8100 + }, + { + "epoch": 0.7305767236325923, + "grad_norm": 1.4121452527481049, + "learning_rate": 7.142010286865592e-07, + "loss": 1.0014, + "step": 8101 + }, + { + "epoch": 0.7306669071560626, + "grad_norm": 0.5705875882923802, + "learning_rate": 7.137536070942012e-07, + "loss": 0.7619, + "step": 8102 + }, + { + "epoch": 0.7307570906795329, + "grad_norm": 1.3383781936905255, + "learning_rate": 7.133062952514786e-07, + "loss": 0.9409, + "step": 8103 + }, + { + "epoch": 0.7308472742030031, + "grad_norm": 0.6640204248001511, + "learning_rate": 7.128590931965562e-07, + "loss": 0.8049, + "step": 8104 + }, + { + "epoch": 0.7309374577264733, + "grad_norm": 1.2591071248614356, + "learning_rate": 7.124120009675945e-07, + "loss": 0.999, + "step": 8105 + }, + { + "epoch": 0.7310276412499437, + "grad_norm": 1.3978521731072702, + "learning_rate": 7.119650186027399e-07, + "loss": 0.9607, + "step": 8106 + }, + { + "epoch": 0.7311178247734139, + "grad_norm": 1.5343165151478346, + "learning_rate": 7.11518146140132e-07, + "loss": 1.0552, + "step": 8107 + }, + { + "epoch": 0.7312080082968841, + "grad_norm": 1.6061018463563077, + "learning_rate": 7.110713836179007e-07, + "loss": 0.892, + "step": 8108 + }, + { + "epoch": 0.7312981918203544, + "grad_norm": 1.384103291113489, + "learning_rate": 7.106247310741659e-07, + "loss": 0.8625, + "step": 8109 + }, + { + "epoch": 0.7313883753438247, + "grad_norm": 1.4580504097563798, + "learning_rate": 7.101781885470393e-07, + "loss": 0.9989, + "step": 8110 + }, + { + "epoch": 0.731478558867295, + "grad_norm": 1.5324164638185513, + "learning_rate": 7.097317560746203e-07, + "loss": 0.9733, + "step": 8111 + }, + { + "epoch": 0.7315687423907652, + "grad_norm": 1.4079971480286497, + "learning_rate": 7.092854336950036e-07, + "loss": 0.9953, + "step": 8112 + }, + { + "epoch": 0.7316589259142354, + "grad_norm": 1.4733240854539806, + "learning_rate": 7.0883922144627e-07, + "loss": 0.9889, + "step": 8113 + }, + { + "epoch": 0.7317491094377058, + "grad_norm": 1.522087320482135, + "learning_rate": 7.083931193664934e-07, + "loss": 0.9667, + "step": 8114 + }, + { + "epoch": 0.731839292961176, + "grad_norm": 1.4254820627337046, + "learning_rate": 7.079471274937378e-07, + "loss": 0.9837, + "step": 8115 + }, + { + "epoch": 0.7319294764846462, + "grad_norm": 1.464131581438466, + "learning_rate": 7.075012458660574e-07, + "loss": 0.9219, + "step": 8116 + }, + { + "epoch": 0.7320196600081165, + "grad_norm": 0.6813879555824849, + "learning_rate": 7.070554745214976e-07, + "loss": 0.8178, + "step": 8117 + }, + { + "epoch": 0.7321098435315868, + "grad_norm": 1.6312362144150792, + "learning_rate": 7.066098134980947e-07, + "loss": 0.8942, + "step": 8118 + }, + { + "epoch": 0.732200027055057, + "grad_norm": 1.5584691506968325, + "learning_rate": 7.061642628338727e-07, + "loss": 0.9351, + "step": 8119 + }, + { + "epoch": 0.7322902105785273, + "grad_norm": 1.4499751582155256, + "learning_rate": 7.057188225668513e-07, + "loss": 0.932, + "step": 8120 + }, + { + "epoch": 0.7323803941019975, + "grad_norm": 1.3838553510361, + "learning_rate": 7.052734927350358e-07, + "loss": 1.0831, + "step": 8121 + }, + { + "epoch": 0.7324705776254679, + "grad_norm": 1.6816124819572693, + "learning_rate": 7.048282733764252e-07, + "loss": 0.9585, + "step": 8122 + }, + { + "epoch": 0.7325607611489381, + "grad_norm": 1.3388719509938738, + "learning_rate": 7.043831645290077e-07, + "loss": 1.0134, + "step": 8123 + }, + { + "epoch": 0.7326509446724083, + "grad_norm": 1.5102213853898354, + "learning_rate": 7.039381662307624e-07, + "loss": 0.9097, + "step": 8124 + }, + { + "epoch": 0.7327411281958787, + "grad_norm": 1.3741735228642569, + "learning_rate": 7.034932785196601e-07, + "loss": 0.9149, + "step": 8125 + }, + { + "epoch": 0.7328313117193489, + "grad_norm": 1.4493879440538942, + "learning_rate": 7.030485014336585e-07, + "loss": 0.9256, + "step": 8126 + }, + { + "epoch": 0.7329214952428191, + "grad_norm": 1.6103832456896858, + "learning_rate": 7.026038350107118e-07, + "loss": 0.8956, + "step": 8127 + }, + { + "epoch": 0.7330116787662894, + "grad_norm": 1.9357132825713104, + "learning_rate": 7.021592792887579e-07, + "loss": 1.0702, + "step": 8128 + }, + { + "epoch": 0.7331018622897597, + "grad_norm": 1.5003774485944303, + "learning_rate": 7.01714834305732e-07, + "loss": 0.9946, + "step": 8129 + }, + { + "epoch": 0.7331920458132299, + "grad_norm": 1.3795151703329809, + "learning_rate": 7.012705000995544e-07, + "loss": 0.8981, + "step": 8130 + }, + { + "epoch": 0.7332822293367002, + "grad_norm": 1.2455641239830793, + "learning_rate": 7.008262767081392e-07, + "loss": 0.9361, + "step": 8131 + }, + { + "epoch": 0.7333724128601704, + "grad_norm": 1.82027052147395, + "learning_rate": 7.003821641693892e-07, + "loss": 0.9955, + "step": 8132 + }, + { + "epoch": 0.7334625963836408, + "grad_norm": 1.4916558242204896, + "learning_rate": 6.999381625211993e-07, + "loss": 0.921, + "step": 8133 + }, + { + "epoch": 0.733552779907111, + "grad_norm": 1.521440654709214, + "learning_rate": 6.994942718014536e-07, + "loss": 0.8586, + "step": 8134 + }, + { + "epoch": 0.7336429634305812, + "grad_norm": 1.9780883930308348, + "learning_rate": 6.990504920480282e-07, + "loss": 0.848, + "step": 8135 + }, + { + "epoch": 0.7337331469540515, + "grad_norm": 1.5257222575841245, + "learning_rate": 6.986068232987879e-07, + "loss": 0.937, + "step": 8136 + }, + { + "epoch": 0.7338233304775218, + "grad_norm": 1.1938407663719899, + "learning_rate": 6.981632655915888e-07, + "loss": 0.8944, + "step": 8137 + }, + { + "epoch": 0.733913514000992, + "grad_norm": 1.3664643909436203, + "learning_rate": 6.977198189642783e-07, + "loss": 1.0001, + "step": 8138 + }, + { + "epoch": 0.7340036975244623, + "grad_norm": 1.5965356604335559, + "learning_rate": 6.972764834546935e-07, + "loss": 0.9625, + "step": 8139 + }, + { + "epoch": 0.7340938810479325, + "grad_norm": 1.4920031049148939, + "learning_rate": 6.96833259100663e-07, + "loss": 0.9549, + "step": 8140 + }, + { + "epoch": 0.7341840645714028, + "grad_norm": 1.599996107811962, + "learning_rate": 6.96390145940003e-07, + "loss": 0.9094, + "step": 8141 + }, + { + "epoch": 0.7342742480948731, + "grad_norm": 1.3890319014140953, + "learning_rate": 6.959471440105253e-07, + "loss": 0.8901, + "step": 8142 + }, + { + "epoch": 0.7343644316183433, + "grad_norm": 2.282263961570441, + "learning_rate": 6.955042533500261e-07, + "loss": 0.9737, + "step": 8143 + }, + { + "epoch": 0.7344546151418135, + "grad_norm": 1.6461790985156046, + "learning_rate": 6.950614739962986e-07, + "loss": 0.8977, + "step": 8144 + }, + { + "epoch": 0.7345447986652839, + "grad_norm": 1.385278001696061, + "learning_rate": 6.946188059871198e-07, + "loss": 0.8296, + "step": 8145 + }, + { + "epoch": 0.7346349821887541, + "grad_norm": 1.4010267103264291, + "learning_rate": 6.941762493602638e-07, + "loss": 1.0519, + "step": 8146 + }, + { + "epoch": 0.7347251657122243, + "grad_norm": 1.338674897367763, + "learning_rate": 6.937338041534899e-07, + "loss": 0.8704, + "step": 8147 + }, + { + "epoch": 0.7348153492356947, + "grad_norm": 1.3101029532504458, + "learning_rate": 6.932914704045505e-07, + "loss": 0.9312, + "step": 8148 + }, + { + "epoch": 0.7349055327591649, + "grad_norm": 1.6335670881316757, + "learning_rate": 6.928492481511878e-07, + "loss": 0.964, + "step": 8149 + }, + { + "epoch": 0.7349957162826352, + "grad_norm": 1.8965298208179673, + "learning_rate": 6.924071374311349e-07, + "loss": 0.8862, + "step": 8150 + }, + { + "epoch": 0.7350858998061054, + "grad_norm": 0.6562861250970637, + "learning_rate": 6.919651382821157e-07, + "loss": 0.7926, + "step": 8151 + }, + { + "epoch": 0.7351760833295757, + "grad_norm": 3.1369963222648702, + "learning_rate": 6.915232507418425e-07, + "loss": 0.9093, + "step": 8152 + }, + { + "epoch": 0.735266266853046, + "grad_norm": 1.5387479199194773, + "learning_rate": 6.910814748480204e-07, + "loss": 0.8279, + "step": 8153 + }, + { + "epoch": 0.7353564503765162, + "grad_norm": 1.9619550762380231, + "learning_rate": 6.906398106383445e-07, + "loss": 0.9502, + "step": 8154 + }, + { + "epoch": 0.7354466338999864, + "grad_norm": 1.3977703015963603, + "learning_rate": 6.901982581504994e-07, + "loss": 0.9313, + "step": 8155 + }, + { + "epoch": 0.7355368174234568, + "grad_norm": 1.539367180888444, + "learning_rate": 6.897568174221611e-07, + "loss": 0.8232, + "step": 8156 + }, + { + "epoch": 0.735627000946927, + "grad_norm": 1.4659213159009734, + "learning_rate": 6.893154884909966e-07, + "loss": 1.0108, + "step": 8157 + }, + { + "epoch": 0.7357171844703972, + "grad_norm": 0.6362685501751969, + "learning_rate": 6.888742713946602e-07, + "loss": 0.7896, + "step": 8158 + }, + { + "epoch": 0.7358073679938675, + "grad_norm": 1.6224849017223977, + "learning_rate": 6.884331661708018e-07, + "loss": 0.9389, + "step": 8159 + }, + { + "epoch": 0.7358975515173378, + "grad_norm": 1.2936565955164099, + "learning_rate": 6.879921728570561e-07, + "loss": 0.9327, + "step": 8160 + }, + { + "epoch": 0.735987735040808, + "grad_norm": 1.241151341554001, + "learning_rate": 6.875512914910539e-07, + "loss": 1.0159, + "step": 8161 + }, + { + "epoch": 0.7360779185642783, + "grad_norm": 1.3540400470322065, + "learning_rate": 6.871105221104119e-07, + "loss": 0.889, + "step": 8162 + }, + { + "epoch": 0.7361681020877485, + "grad_norm": 1.6214433040393708, + "learning_rate": 6.866698647527391e-07, + "loss": 0.9156, + "step": 8163 + }, + { + "epoch": 0.7362582856112189, + "grad_norm": 1.466107365141389, + "learning_rate": 6.862293194556353e-07, + "loss": 0.9711, + "step": 8164 + }, + { + "epoch": 0.7363484691346891, + "grad_norm": 1.339845040101078, + "learning_rate": 6.857888862566896e-07, + "loss": 0.8785, + "step": 8165 + }, + { + "epoch": 0.7364386526581593, + "grad_norm": 1.298843268973038, + "learning_rate": 6.853485651934836e-07, + "loss": 0.894, + "step": 8166 + }, + { + "epoch": 0.7365288361816296, + "grad_norm": 1.3617925522152263, + "learning_rate": 6.849083563035855e-07, + "loss": 0.7966, + "step": 8167 + }, + { + "epoch": 0.7366190197050999, + "grad_norm": 0.7647396340192953, + "learning_rate": 6.844682596245592e-07, + "loss": 0.8925, + "step": 8168 + }, + { + "epoch": 0.7367092032285701, + "grad_norm": 1.5436135791625125, + "learning_rate": 6.840282751939539e-07, + "loss": 0.9918, + "step": 8169 + }, + { + "epoch": 0.7367993867520404, + "grad_norm": 1.344461563139732, + "learning_rate": 6.835884030493126e-07, + "loss": 0.9798, + "step": 8170 + }, + { + "epoch": 0.7368895702755107, + "grad_norm": 1.498466820773074, + "learning_rate": 6.831486432281672e-07, + "loss": 0.9581, + "step": 8171 + }, + { + "epoch": 0.736979753798981, + "grad_norm": 1.810587432966713, + "learning_rate": 6.827089957680407e-07, + "loss": 0.9515, + "step": 8172 + }, + { + "epoch": 0.7370699373224512, + "grad_norm": 1.3927158111090003, + "learning_rate": 6.822694607064461e-07, + "loss": 1.0474, + "step": 8173 + }, + { + "epoch": 0.7371601208459214, + "grad_norm": 1.4119380778440298, + "learning_rate": 6.818300380808877e-07, + "loss": 0.9668, + "step": 8174 + }, + { + "epoch": 0.7372503043693918, + "grad_norm": 1.5486655612984854, + "learning_rate": 6.813907279288574e-07, + "loss": 0.927, + "step": 8175 + }, + { + "epoch": 0.737340487892862, + "grad_norm": 1.3564578358543786, + "learning_rate": 6.809515302878422e-07, + "loss": 0.953, + "step": 8176 + }, + { + "epoch": 0.7374306714163322, + "grad_norm": 1.7338334346351838, + "learning_rate": 6.80512445195315e-07, + "loss": 0.9238, + "step": 8177 + }, + { + "epoch": 0.7375208549398025, + "grad_norm": 1.3949229654624606, + "learning_rate": 6.800734726887416e-07, + "loss": 0.9597, + "step": 8178 + }, + { + "epoch": 0.7376110384632728, + "grad_norm": 1.4304837370357721, + "learning_rate": 6.796346128055775e-07, + "loss": 0.9373, + "step": 8179 + }, + { + "epoch": 0.737701221986743, + "grad_norm": 1.4383908704003323, + "learning_rate": 6.791958655832684e-07, + "loss": 0.9062, + "step": 8180 + }, + { + "epoch": 0.7377914055102133, + "grad_norm": 1.5470919986683889, + "learning_rate": 6.787572310592518e-07, + "loss": 0.9041, + "step": 8181 + }, + { + "epoch": 0.7378815890336835, + "grad_norm": 1.677431800922716, + "learning_rate": 6.783187092709521e-07, + "loss": 1.016, + "step": 8182 + }, + { + "epoch": 0.7379717725571538, + "grad_norm": 2.0990457319390945, + "learning_rate": 6.778803002557891e-07, + "loss": 0.9483, + "step": 8183 + }, + { + "epoch": 0.7380619560806241, + "grad_norm": 1.2206239720431529, + "learning_rate": 6.774420040511686e-07, + "loss": 0.9765, + "step": 8184 + }, + { + "epoch": 0.7381521396040943, + "grad_norm": 1.7029971853469283, + "learning_rate": 6.770038206944886e-07, + "loss": 0.9335, + "step": 8185 + }, + { + "epoch": 0.7382423231275645, + "grad_norm": 1.2217578939140306, + "learning_rate": 6.765657502231375e-07, + "loss": 0.9504, + "step": 8186 + }, + { + "epoch": 0.7383325066510349, + "grad_norm": 1.2908826981757575, + "learning_rate": 6.761277926744939e-07, + "loss": 1.0063, + "step": 8187 + }, + { + "epoch": 0.7384226901745051, + "grad_norm": 1.4116025149568643, + "learning_rate": 6.756899480859268e-07, + "loss": 0.9255, + "step": 8188 + }, + { + "epoch": 0.7385128736979754, + "grad_norm": 1.390915401011505, + "learning_rate": 6.752522164947956e-07, + "loss": 0.9859, + "step": 8189 + }, + { + "epoch": 0.7386030572214456, + "grad_norm": 1.1085226518710856, + "learning_rate": 6.748145979384498e-07, + "loss": 0.9902, + "step": 8190 + }, + { + "epoch": 0.7386932407449159, + "grad_norm": 1.4950476592259303, + "learning_rate": 6.743770924542303e-07, + "loss": 0.8814, + "step": 8191 + }, + { + "epoch": 0.7387834242683862, + "grad_norm": 0.6628015101995167, + "learning_rate": 6.739397000794658e-07, + "loss": 0.8059, + "step": 8192 + }, + { + "epoch": 0.7388736077918564, + "grad_norm": 1.6894354846893356, + "learning_rate": 6.735024208514782e-07, + "loss": 0.9143, + "step": 8193 + }, + { + "epoch": 0.7389637913153266, + "grad_norm": 1.1968746075741594, + "learning_rate": 6.73065254807578e-07, + "loss": 0.8558, + "step": 8194 + }, + { + "epoch": 0.739053974838797, + "grad_norm": 1.4845878046985361, + "learning_rate": 6.726282019850669e-07, + "loss": 0.9967, + "step": 8195 + }, + { + "epoch": 0.7391441583622672, + "grad_norm": 1.1199412519510366, + "learning_rate": 6.721912624212376e-07, + "loss": 1.0015, + "step": 8196 + }, + { + "epoch": 0.7392343418857374, + "grad_norm": 1.5106707742847871, + "learning_rate": 6.717544361533696e-07, + "loss": 0.834, + "step": 8197 + }, + { + "epoch": 0.7393245254092078, + "grad_norm": 1.3174104567469949, + "learning_rate": 6.713177232187386e-07, + "loss": 0.9191, + "step": 8198 + }, + { + "epoch": 0.739414708932678, + "grad_norm": 1.8695819138493968, + "learning_rate": 6.708811236546041e-07, + "loss": 0.9312, + "step": 8199 + }, + { + "epoch": 0.7395048924561483, + "grad_norm": 1.7118971410895376, + "learning_rate": 6.704446374982224e-07, + "loss": 0.9057, + "step": 8200 + }, + { + "epoch": 0.7395950759796185, + "grad_norm": 4.494065716387334, + "learning_rate": 6.700082647868346e-07, + "loss": 0.9767, + "step": 8201 + }, + { + "epoch": 0.7396852595030888, + "grad_norm": 1.3649991641897918, + "learning_rate": 6.695720055576751e-07, + "loss": 0.9959, + "step": 8202 + }, + { + "epoch": 0.7397754430265591, + "grad_norm": 1.4793511754685253, + "learning_rate": 6.691358598479679e-07, + "loss": 0.9521, + "step": 8203 + }, + { + "epoch": 0.7398656265500293, + "grad_norm": 1.3841895881299826, + "learning_rate": 6.686998276949276e-07, + "loss": 0.934, + "step": 8204 + }, + { + "epoch": 0.7399558100734995, + "grad_norm": 0.7194118355103829, + "learning_rate": 6.682639091357587e-07, + "loss": 0.8463, + "step": 8205 + }, + { + "epoch": 0.7400459935969699, + "grad_norm": 1.4979553592585124, + "learning_rate": 6.678281042076568e-07, + "loss": 0.9245, + "step": 8206 + }, + { + "epoch": 0.7401361771204401, + "grad_norm": 1.1788268328612899, + "learning_rate": 6.673924129478059e-07, + "loss": 0.942, + "step": 8207 + }, + { + "epoch": 0.7402263606439103, + "grad_norm": 1.4521483554083283, + "learning_rate": 6.669568353933824e-07, + "loss": 0.8588, + "step": 8208 + }, + { + "epoch": 0.7403165441673806, + "grad_norm": 1.414557607395292, + "learning_rate": 6.665213715815519e-07, + "loss": 0.8754, + "step": 8209 + }, + { + "epoch": 0.7404067276908509, + "grad_norm": 1.6580156236011374, + "learning_rate": 6.660860215494706e-07, + "loss": 0.9447, + "step": 8210 + }, + { + "epoch": 0.7404969112143212, + "grad_norm": 1.4389666663120457, + "learning_rate": 6.656507853342852e-07, + "loss": 0.8929, + "step": 8211 + }, + { + "epoch": 0.7405870947377914, + "grad_norm": 2.6116974140816827, + "learning_rate": 6.652156629731323e-07, + "loss": 0.9451, + "step": 8212 + }, + { + "epoch": 0.7406772782612616, + "grad_norm": 1.538958163524987, + "learning_rate": 6.647806545031396e-07, + "loss": 0.9998, + "step": 8213 + }, + { + "epoch": 0.740767461784732, + "grad_norm": 1.4327946652432908, + "learning_rate": 6.643457599614224e-07, + "loss": 0.8616, + "step": 8214 + }, + { + "epoch": 0.7408576453082022, + "grad_norm": 1.586533157410903, + "learning_rate": 6.63910979385091e-07, + "loss": 0.9942, + "step": 8215 + }, + { + "epoch": 0.7409478288316724, + "grad_norm": 1.5746373833919398, + "learning_rate": 6.634763128112409e-07, + "loss": 1.0234, + "step": 8216 + }, + { + "epoch": 0.7410380123551427, + "grad_norm": 1.6833201316592794, + "learning_rate": 6.630417602769622e-07, + "loss": 0.9304, + "step": 8217 + }, + { + "epoch": 0.741128195878613, + "grad_norm": 1.470955106468988, + "learning_rate": 6.62607321819332e-07, + "loss": 0.9524, + "step": 8218 + }, + { + "epoch": 0.7412183794020832, + "grad_norm": 1.2459551216909637, + "learning_rate": 6.621729974754196e-07, + "loss": 0.9425, + "step": 8219 + }, + { + "epoch": 0.7413085629255535, + "grad_norm": 1.47342683984454, + "learning_rate": 6.617387872822835e-07, + "loss": 0.8859, + "step": 8220 + }, + { + "epoch": 0.7413987464490238, + "grad_norm": 4.509947272963092, + "learning_rate": 6.613046912769731e-07, + "loss": 0.9144, + "step": 8221 + }, + { + "epoch": 0.741488929972494, + "grad_norm": 1.6606366348864015, + "learning_rate": 6.608707094965289e-07, + "loss": 1.0061, + "step": 8222 + }, + { + "epoch": 0.7415791134959643, + "grad_norm": 1.9384370814225653, + "learning_rate": 6.604368419779787e-07, + "loss": 0.8652, + "step": 8223 + }, + { + "epoch": 0.7416692970194345, + "grad_norm": 2.6912045340943216, + "learning_rate": 6.600030887583434e-07, + "loss": 0.9444, + "step": 8224 + }, + { + "epoch": 0.7417594805429049, + "grad_norm": 1.5148580911477874, + "learning_rate": 6.595694498746336e-07, + "loss": 1.0372, + "step": 8225 + }, + { + "epoch": 0.7418496640663751, + "grad_norm": 1.6337739586915796, + "learning_rate": 6.591359253638491e-07, + "loss": 0.9123, + "step": 8226 + }, + { + "epoch": 0.7419398475898453, + "grad_norm": 1.8153766298056262, + "learning_rate": 6.587025152629808e-07, + "loss": 0.8367, + "step": 8227 + }, + { + "epoch": 0.7420300311133156, + "grad_norm": 1.95441327142039, + "learning_rate": 6.582692196090107e-07, + "loss": 0.9821, + "step": 8228 + }, + { + "epoch": 0.7421202146367859, + "grad_norm": 1.4413652284982554, + "learning_rate": 6.578360384389074e-07, + "loss": 0.9481, + "step": 8229 + }, + { + "epoch": 0.7422103981602561, + "grad_norm": 1.315293790032092, + "learning_rate": 6.574029717896355e-07, + "loss": 0.9184, + "step": 8230 + }, + { + "epoch": 0.7423005816837264, + "grad_norm": 1.7471549203939911, + "learning_rate": 6.569700196981436e-07, + "loss": 0.9271, + "step": 8231 + }, + { + "epoch": 0.7423907652071966, + "grad_norm": 1.5395039346513957, + "learning_rate": 6.565371822013763e-07, + "loss": 0.8427, + "step": 8232 + }, + { + "epoch": 0.742480948730667, + "grad_norm": 1.5722897355973533, + "learning_rate": 6.561044593362636e-07, + "loss": 0.9905, + "step": 8233 + }, + { + "epoch": 0.7425711322541372, + "grad_norm": 1.3271275982543427, + "learning_rate": 6.556718511397288e-07, + "loss": 0.9223, + "step": 8234 + }, + { + "epoch": 0.7426613157776074, + "grad_norm": 1.3069354125352781, + "learning_rate": 6.552393576486843e-07, + "loss": 1.0489, + "step": 8235 + }, + { + "epoch": 0.7427514993010776, + "grad_norm": 1.5962853554295555, + "learning_rate": 6.548069789000325e-07, + "loss": 0.9472, + "step": 8236 + }, + { + "epoch": 0.742841682824548, + "grad_norm": 2.537880396400332, + "learning_rate": 6.543747149306673e-07, + "loss": 0.922, + "step": 8237 + }, + { + "epoch": 0.7429318663480182, + "grad_norm": 1.3866216061374415, + "learning_rate": 6.5394256577747e-07, + "loss": 0.988, + "step": 8238 + }, + { + "epoch": 0.7430220498714885, + "grad_norm": 1.5014769117909323, + "learning_rate": 6.535105314773161e-07, + "loss": 0.9671, + "step": 8239 + }, + { + "epoch": 0.7431122333949587, + "grad_norm": 1.5911252717116717, + "learning_rate": 6.530786120670677e-07, + "loss": 0.871, + "step": 8240 + }, + { + "epoch": 0.743202416918429, + "grad_norm": 1.513037291375455, + "learning_rate": 6.526468075835787e-07, + "loss": 0.9851, + "step": 8241 + }, + { + "epoch": 0.7432926004418993, + "grad_norm": 1.3278937194504479, + "learning_rate": 6.522151180636937e-07, + "loss": 0.9774, + "step": 8242 + }, + { + "epoch": 0.7433827839653695, + "grad_norm": 3.1310917797203306, + "learning_rate": 6.517835435442461e-07, + "loss": 0.9028, + "step": 8243 + }, + { + "epoch": 0.7434729674888398, + "grad_norm": 2.302320913998293, + "learning_rate": 6.513520840620606e-07, + "loss": 0.9616, + "step": 8244 + }, + { + "epoch": 0.7435631510123101, + "grad_norm": 1.6499249585744806, + "learning_rate": 6.509207396539525e-07, + "loss": 0.891, + "step": 8245 + }, + { + "epoch": 0.7436533345357803, + "grad_norm": 0.6824153006749564, + "learning_rate": 6.50489510356724e-07, + "loss": 0.7761, + "step": 8246 + }, + { + "epoch": 0.7437435180592505, + "grad_norm": 1.6306648549553036, + "learning_rate": 6.500583962071732e-07, + "loss": 0.8638, + "step": 8247 + }, + { + "epoch": 0.7438337015827209, + "grad_norm": 2.585832392005696, + "learning_rate": 6.496273972420827e-07, + "loss": 0.9642, + "step": 8248 + }, + { + "epoch": 0.7439238851061911, + "grad_norm": 1.703751055240654, + "learning_rate": 6.491965134982287e-07, + "loss": 0.994, + "step": 8249 + }, + { + "epoch": 0.7440140686296614, + "grad_norm": 1.632915146809064, + "learning_rate": 6.487657450123765e-07, + "loss": 0.9091, + "step": 8250 + }, + { + "epoch": 0.7441042521531316, + "grad_norm": 1.3736802182857843, + "learning_rate": 6.483350918212814e-07, + "loss": 0.9491, + "step": 8251 + }, + { + "epoch": 0.7441944356766019, + "grad_norm": 0.7564858297444146, + "learning_rate": 6.479045539616898e-07, + "loss": 0.8144, + "step": 8252 + }, + { + "epoch": 0.7442846192000722, + "grad_norm": 1.6222736522862664, + "learning_rate": 6.474741314703358e-07, + "loss": 0.9441, + "step": 8253 + }, + { + "epoch": 0.7443748027235424, + "grad_norm": 1.836110857631546, + "learning_rate": 6.47043824383948e-07, + "loss": 0.9961, + "step": 8254 + }, + { + "epoch": 0.7444649862470126, + "grad_norm": 1.868953811095181, + "learning_rate": 6.466136327392399e-07, + "loss": 0.9784, + "step": 8255 + }, + { + "epoch": 0.744555169770483, + "grad_norm": 1.6541015077276306, + "learning_rate": 6.461835565729206e-07, + "loss": 0.9719, + "step": 8256 + }, + { + "epoch": 0.7446453532939532, + "grad_norm": 1.3764246582574031, + "learning_rate": 6.457535959216844e-07, + "loss": 0.9659, + "step": 8257 + }, + { + "epoch": 0.7447355368174234, + "grad_norm": 1.3657636803366582, + "learning_rate": 6.453237508222186e-07, + "loss": 0.9027, + "step": 8258 + }, + { + "epoch": 0.7448257203408937, + "grad_norm": 1.39214109435484, + "learning_rate": 6.448940213112e-07, + "loss": 0.9543, + "step": 8259 + }, + { + "epoch": 0.744915903864364, + "grad_norm": 1.3100351167296316, + "learning_rate": 6.444644074252954e-07, + "loss": 1.0234, + "step": 8260 + }, + { + "epoch": 0.7450060873878342, + "grad_norm": 1.458172716650906, + "learning_rate": 6.440349092011628e-07, + "loss": 1.0108, + "step": 8261 + }, + { + "epoch": 0.7450962709113045, + "grad_norm": 1.3759456763716607, + "learning_rate": 6.436055266754475e-07, + "loss": 0.9361, + "step": 8262 + }, + { + "epoch": 0.7451864544347747, + "grad_norm": 1.7475496575936305, + "learning_rate": 6.431762598847879e-07, + "loss": 0.921, + "step": 8263 + }, + { + "epoch": 0.7452766379582451, + "grad_norm": 1.4045393393586274, + "learning_rate": 6.427471088658111e-07, + "loss": 0.9177, + "step": 8264 + }, + { + "epoch": 0.7453668214817153, + "grad_norm": 1.4290189587007487, + "learning_rate": 6.42318073655135e-07, + "loss": 0.937, + "step": 8265 + }, + { + "epoch": 0.7454570050051855, + "grad_norm": 1.5955694695928326, + "learning_rate": 6.41889154289367e-07, + "loss": 0.9023, + "step": 8266 + }, + { + "epoch": 0.7455471885286559, + "grad_norm": 1.5378858841873129, + "learning_rate": 6.414603508051055e-07, + "loss": 1.0032, + "step": 8267 + }, + { + "epoch": 0.7456373720521261, + "grad_norm": 0.7024915809058978, + "learning_rate": 6.410316632389365e-07, + "loss": 0.8508, + "step": 8268 + }, + { + "epoch": 0.7457275555755963, + "grad_norm": 1.5571436051458158, + "learning_rate": 6.406030916274406e-07, + "loss": 0.9424, + "step": 8269 + }, + { + "epoch": 0.7458177390990666, + "grad_norm": 1.4287392228221512, + "learning_rate": 6.401746360071831e-07, + "loss": 0.9012, + "step": 8270 + }, + { + "epoch": 0.7459079226225369, + "grad_norm": 1.488737501961546, + "learning_rate": 6.397462964147251e-07, + "loss": 0.9039, + "step": 8271 + }, + { + "epoch": 0.7459981061460071, + "grad_norm": 1.4728682388193213, + "learning_rate": 6.393180728866128e-07, + "loss": 0.9331, + "step": 8272 + }, + { + "epoch": 0.7460882896694774, + "grad_norm": 1.2700442172785849, + "learning_rate": 6.388899654593853e-07, + "loss": 0.9709, + "step": 8273 + }, + { + "epoch": 0.7461784731929476, + "grad_norm": 1.5432603005306953, + "learning_rate": 6.384619741695709e-07, + "loss": 0.9711, + "step": 8274 + }, + { + "epoch": 0.746268656716418, + "grad_norm": 1.3277194582041452, + "learning_rate": 6.380340990536883e-07, + "loss": 0.9475, + "step": 8275 + }, + { + "epoch": 0.7463588402398882, + "grad_norm": 1.6124534422784507, + "learning_rate": 6.37606340148247e-07, + "loss": 0.9424, + "step": 8276 + }, + { + "epoch": 0.7464490237633584, + "grad_norm": 1.65583148202983, + "learning_rate": 6.371786974897433e-07, + "loss": 0.8439, + "step": 8277 + }, + { + "epoch": 0.7465392072868287, + "grad_norm": 1.1787100778495985, + "learning_rate": 6.367511711146691e-07, + "loss": 0.9653, + "step": 8278 + }, + { + "epoch": 0.746629390810299, + "grad_norm": 1.3893700841627101, + "learning_rate": 6.363237610595014e-07, + "loss": 0.8834, + "step": 8279 + }, + { + "epoch": 0.7467195743337692, + "grad_norm": 1.7510733718618985, + "learning_rate": 6.358964673607094e-07, + "loss": 0.919, + "step": 8280 + }, + { + "epoch": 0.7468097578572395, + "grad_norm": 1.2429183633255427, + "learning_rate": 6.354692900547525e-07, + "loss": 0.9784, + "step": 8281 + }, + { + "epoch": 0.7468999413807097, + "grad_norm": 1.4503327481172141, + "learning_rate": 6.350422291780797e-07, + "loss": 0.9125, + "step": 8282 + }, + { + "epoch": 0.74699012490418, + "grad_norm": 1.7893644519512335, + "learning_rate": 6.346152847671302e-07, + "loss": 1.0369, + "step": 8283 + }, + { + "epoch": 0.7470803084276503, + "grad_norm": 1.5211345774888003, + "learning_rate": 6.34188456858334e-07, + "loss": 0.9372, + "step": 8284 + }, + { + "epoch": 0.7471704919511205, + "grad_norm": 1.6621196311697009, + "learning_rate": 6.337617454881081e-07, + "loss": 0.8958, + "step": 8285 + }, + { + "epoch": 0.7472606754745907, + "grad_norm": 1.3228938583850856, + "learning_rate": 6.333351506928651e-07, + "loss": 0.8571, + "step": 8286 + }, + { + "epoch": 0.7473508589980611, + "grad_norm": 1.6058244519993579, + "learning_rate": 6.329086725090018e-07, + "loss": 0.9122, + "step": 8287 + }, + { + "epoch": 0.7474410425215313, + "grad_norm": 1.424183993352255, + "learning_rate": 6.324823109729087e-07, + "loss": 0.9056, + "step": 8288 + }, + { + "epoch": 0.7475312260450016, + "grad_norm": 1.4469876041632743, + "learning_rate": 6.320560661209653e-07, + "loss": 0.8745, + "step": 8289 + }, + { + "epoch": 0.7476214095684719, + "grad_norm": 1.360075583741801, + "learning_rate": 6.316299379895411e-07, + "loss": 0.8285, + "step": 8290 + }, + { + "epoch": 0.7477115930919421, + "grad_norm": 1.5125756016271774, + "learning_rate": 6.312039266149965e-07, + "loss": 0.9718, + "step": 8291 + }, + { + "epoch": 0.7478017766154124, + "grad_norm": 1.2952805794715962, + "learning_rate": 6.307780320336789e-07, + "loss": 0.9714, + "step": 8292 + }, + { + "epoch": 0.7478919601388826, + "grad_norm": 1.5483941219347357, + "learning_rate": 6.303522542819306e-07, + "loss": 0.9955, + "step": 8293 + }, + { + "epoch": 0.7479821436623529, + "grad_norm": 1.9250989095865396, + "learning_rate": 6.299265933960796e-07, + "loss": 0.9141, + "step": 8294 + }, + { + "epoch": 0.7480723271858232, + "grad_norm": 1.7876884814573342, + "learning_rate": 6.295010494124462e-07, + "loss": 0.8917, + "step": 8295 + }, + { + "epoch": 0.7481625107092934, + "grad_norm": 1.3162091840626768, + "learning_rate": 6.290756223673399e-07, + "loss": 0.9097, + "step": 8296 + }, + { + "epoch": 0.7482526942327636, + "grad_norm": 1.1692770580417755, + "learning_rate": 6.28650312297061e-07, + "loss": 0.9676, + "step": 8297 + }, + { + "epoch": 0.748342877756234, + "grad_norm": 1.579768165770081, + "learning_rate": 6.282251192378987e-07, + "loss": 0.8536, + "step": 8298 + }, + { + "epoch": 0.7484330612797042, + "grad_norm": 1.1932262090376864, + "learning_rate": 6.278000432261334e-07, + "loss": 0.8637, + "step": 8299 + }, + { + "epoch": 0.7485232448031744, + "grad_norm": 2.382535424304366, + "learning_rate": 6.273750842980345e-07, + "loss": 0.9632, + "step": 8300 + }, + { + "epoch": 0.7486134283266447, + "grad_norm": 1.4528211050256696, + "learning_rate": 6.269502424898625e-07, + "loss": 0.9728, + "step": 8301 + }, + { + "epoch": 0.748703611850115, + "grad_norm": 1.5553460188172703, + "learning_rate": 6.265255178378663e-07, + "loss": 0.9843, + "step": 8302 + }, + { + "epoch": 0.7487937953735853, + "grad_norm": 1.7190934271597083, + "learning_rate": 6.261009103782861e-07, + "loss": 0.8375, + "step": 8303 + }, + { + "epoch": 0.7488839788970555, + "grad_norm": 1.4649394499989021, + "learning_rate": 6.256764201473519e-07, + "loss": 0.995, + "step": 8304 + }, + { + "epoch": 0.7489741624205257, + "grad_norm": 0.7374253736336797, + "learning_rate": 6.252520471812835e-07, + "loss": 0.7922, + "step": 8305 + }, + { + "epoch": 0.7490643459439961, + "grad_norm": 1.711660711296045, + "learning_rate": 6.248277915162912e-07, + "loss": 0.9513, + "step": 8306 + }, + { + "epoch": 0.7491545294674663, + "grad_norm": 1.3264932818826798, + "learning_rate": 6.244036531885731e-07, + "loss": 0.793, + "step": 8307 + }, + { + "epoch": 0.7492447129909365, + "grad_norm": 1.5243178733766323, + "learning_rate": 6.239796322343216e-07, + "loss": 0.8725, + "step": 8308 + }, + { + "epoch": 0.7493348965144068, + "grad_norm": 1.3602273395503837, + "learning_rate": 6.235557286897137e-07, + "loss": 0.8785, + "step": 8309 + }, + { + "epoch": 0.7494250800378771, + "grad_norm": 1.4405658205003928, + "learning_rate": 6.231319425909223e-07, + "loss": 0.9382, + "step": 8310 + }, + { + "epoch": 0.7495152635613473, + "grad_norm": 1.4300752541274362, + "learning_rate": 6.227082739741045e-07, + "loss": 0.9748, + "step": 8311 + }, + { + "epoch": 0.7496054470848176, + "grad_norm": 1.7144350160001762, + "learning_rate": 6.222847228754113e-07, + "loss": 0.9581, + "step": 8312 + }, + { + "epoch": 0.7496956306082878, + "grad_norm": 1.520617805084504, + "learning_rate": 6.218612893309823e-07, + "loss": 1.039, + "step": 8313 + }, + { + "epoch": 0.7497858141317582, + "grad_norm": 1.412323614939809, + "learning_rate": 6.214379733769468e-07, + "loss": 0.8838, + "step": 8314 + }, + { + "epoch": 0.7498759976552284, + "grad_norm": 1.8111725912144945, + "learning_rate": 6.21014775049425e-07, + "loss": 0.9756, + "step": 8315 + }, + { + "epoch": 0.7499661811786986, + "grad_norm": 0.6061965260076075, + "learning_rate": 6.205916943845267e-07, + "loss": 0.776, + "step": 8316 + }, + { + "epoch": 0.750056364702169, + "grad_norm": 1.6596797483676553, + "learning_rate": 6.201687314183504e-07, + "loss": 0.9856, + "step": 8317 + }, + { + "epoch": 0.7501465482256392, + "grad_norm": 0.6973709539790782, + "learning_rate": 6.197458861869862e-07, + "loss": 0.8419, + "step": 8318 + }, + { + "epoch": 0.7502367317491094, + "grad_norm": 1.3664872474284726, + "learning_rate": 6.193231587265138e-07, + "loss": 0.9787, + "step": 8319 + }, + { + "epoch": 0.7503269152725797, + "grad_norm": 1.56160405221085, + "learning_rate": 6.189005490730024e-07, + "loss": 0.9229, + "step": 8320 + }, + { + "epoch": 0.75041709879605, + "grad_norm": 1.4538290307694395, + "learning_rate": 6.184780572625115e-07, + "loss": 0.96, + "step": 8321 + }, + { + "epoch": 0.7505072823195202, + "grad_norm": 1.8415386710703985, + "learning_rate": 6.180556833310902e-07, + "loss": 0.8424, + "step": 8322 + }, + { + "epoch": 0.7505974658429905, + "grad_norm": 1.396840680957424, + "learning_rate": 6.176334273147788e-07, + "loss": 0.9758, + "step": 8323 + }, + { + "epoch": 0.7506876493664607, + "grad_norm": 1.7089357554467948, + "learning_rate": 6.172112892496042e-07, + "loss": 0.8484, + "step": 8324 + }, + { + "epoch": 0.750777832889931, + "grad_norm": 1.401612752738618, + "learning_rate": 6.167892691715883e-07, + "loss": 0.9797, + "step": 8325 + }, + { + "epoch": 0.7508680164134013, + "grad_norm": 1.450508797924944, + "learning_rate": 6.163673671167378e-07, + "loss": 0.9245, + "step": 8326 + }, + { + "epoch": 0.7509581999368715, + "grad_norm": 1.4793365900285473, + "learning_rate": 6.15945583121054e-07, + "loss": 0.9846, + "step": 8327 + }, + { + "epoch": 0.7510483834603418, + "grad_norm": 1.945723402143199, + "learning_rate": 6.15523917220524e-07, + "loss": 0.8883, + "step": 8328 + }, + { + "epoch": 0.7511385669838121, + "grad_norm": 1.4122570588483632, + "learning_rate": 6.151023694511273e-07, + "loss": 1.0025, + "step": 8329 + }, + { + "epoch": 0.7512287505072823, + "grad_norm": 1.4337348556716467, + "learning_rate": 6.146809398488328e-07, + "loss": 0.8774, + "step": 8330 + }, + { + "epoch": 0.7513189340307526, + "grad_norm": 1.4128633757545646, + "learning_rate": 6.142596284495989e-07, + "loss": 0.9799, + "step": 8331 + }, + { + "epoch": 0.7514091175542228, + "grad_norm": 1.4337735180310314, + "learning_rate": 6.138384352893751e-07, + "loss": 0.8979, + "step": 8332 + }, + { + "epoch": 0.7514993010776931, + "grad_norm": 1.8483216456578913, + "learning_rate": 6.134173604040987e-07, + "loss": 0.9242, + "step": 8333 + }, + { + "epoch": 0.7515894846011634, + "grad_norm": 1.4984859136530941, + "learning_rate": 6.129964038296984e-07, + "loss": 0.9458, + "step": 8334 + }, + { + "epoch": 0.7516796681246336, + "grad_norm": 1.5374851567241932, + "learning_rate": 6.12575565602093e-07, + "loss": 1.0113, + "step": 8335 + }, + { + "epoch": 0.7517698516481038, + "grad_norm": 1.1941401897475796, + "learning_rate": 6.121548457571905e-07, + "loss": 1.0071, + "step": 8336 + }, + { + "epoch": 0.7518600351715742, + "grad_norm": 1.4770057456977264, + "learning_rate": 6.11734244330889e-07, + "loss": 0.8841, + "step": 8337 + }, + { + "epoch": 0.7519502186950444, + "grad_norm": 1.497109807657435, + "learning_rate": 6.113137613590773e-07, + "loss": 0.951, + "step": 8338 + }, + { + "epoch": 0.7520404022185146, + "grad_norm": 1.5584170592692246, + "learning_rate": 6.108933968776313e-07, + "loss": 0.9933, + "step": 8339 + }, + { + "epoch": 0.752130585741985, + "grad_norm": 1.320372224619378, + "learning_rate": 6.104731509224212e-07, + "loss": 0.9356, + "step": 8340 + }, + { + "epoch": 0.7522207692654552, + "grad_norm": 1.2475010211000106, + "learning_rate": 6.100530235293027e-07, + "loss": 0.8717, + "step": 8341 + }, + { + "epoch": 0.7523109527889255, + "grad_norm": 1.6090051911283956, + "learning_rate": 6.096330147341253e-07, + "loss": 0.9615, + "step": 8342 + }, + { + "epoch": 0.7524011363123957, + "grad_norm": 1.7364593902505345, + "learning_rate": 6.09213124572725e-07, + "loss": 0.9978, + "step": 8343 + }, + { + "epoch": 0.752491319835866, + "grad_norm": 1.313189280124522, + "learning_rate": 6.087933530809297e-07, + "loss": 1.0109, + "step": 8344 + }, + { + "epoch": 0.7525815033593363, + "grad_norm": 1.4227814352731707, + "learning_rate": 6.083737002945566e-07, + "loss": 0.929, + "step": 8345 + }, + { + "epoch": 0.7526716868828065, + "grad_norm": 1.3964545826968502, + "learning_rate": 6.079541662494126e-07, + "loss": 0.8902, + "step": 8346 + }, + { + "epoch": 0.7527618704062767, + "grad_norm": 1.4315707617727196, + "learning_rate": 6.075347509812954e-07, + "loss": 0.9514, + "step": 8347 + }, + { + "epoch": 0.7528520539297471, + "grad_norm": 1.3463491099082483, + "learning_rate": 6.0711545452599e-07, + "loss": 0.9054, + "step": 8348 + }, + { + "epoch": 0.7529422374532173, + "grad_norm": 1.508063102808368, + "learning_rate": 6.066962769192756e-07, + "loss": 0.8686, + "step": 8349 + }, + { + "epoch": 0.7530324209766875, + "grad_norm": 1.9436412817672766, + "learning_rate": 6.062772181969167e-07, + "loss": 0.8775, + "step": 8350 + }, + { + "epoch": 0.7531226045001578, + "grad_norm": 1.3675241546310803, + "learning_rate": 6.058582783946706e-07, + "loss": 1.0034, + "step": 8351 + }, + { + "epoch": 0.7532127880236281, + "grad_norm": 1.6497827618093632, + "learning_rate": 6.054394575482833e-07, + "loss": 0.924, + "step": 8352 + }, + { + "epoch": 0.7533029715470984, + "grad_norm": 1.430208538561994, + "learning_rate": 6.05020755693491e-07, + "loss": 0.855, + "step": 8353 + }, + { + "epoch": 0.7533931550705686, + "grad_norm": 1.5883037785078349, + "learning_rate": 6.046021728660198e-07, + "loss": 0.8551, + "step": 8354 + }, + { + "epoch": 0.7534833385940388, + "grad_norm": 1.452624327021726, + "learning_rate": 6.041837091015858e-07, + "loss": 0.981, + "step": 8355 + }, + { + "epoch": 0.7535735221175092, + "grad_norm": 1.3298920488646064, + "learning_rate": 6.037653644358931e-07, + "loss": 0.9893, + "step": 8356 + }, + { + "epoch": 0.7536637056409794, + "grad_norm": 1.4443874388637432, + "learning_rate": 6.033471389046393e-07, + "loss": 0.9524, + "step": 8357 + }, + { + "epoch": 0.7537538891644496, + "grad_norm": 1.4461552817568544, + "learning_rate": 6.029290325435084e-07, + "loss": 0.8889, + "step": 8358 + }, + { + "epoch": 0.7538440726879199, + "grad_norm": 1.591804963073777, + "learning_rate": 6.025110453881756e-07, + "loss": 1.0006, + "step": 8359 + }, + { + "epoch": 0.7539342562113902, + "grad_norm": 1.8523585543770944, + "learning_rate": 6.020931774743061e-07, + "loss": 0.879, + "step": 8360 + }, + { + "epoch": 0.7540244397348604, + "grad_norm": 1.4494215008578588, + "learning_rate": 6.016754288375546e-07, + "loss": 0.8631, + "step": 8361 + }, + { + "epoch": 0.7541146232583307, + "grad_norm": 1.4063750317408847, + "learning_rate": 6.012577995135665e-07, + "loss": 0.9346, + "step": 8362 + }, + { + "epoch": 0.754204806781801, + "grad_norm": 1.5109991682341917, + "learning_rate": 6.008402895379743e-07, + "loss": 0.889, + "step": 8363 + }, + { + "epoch": 0.7542949903052713, + "grad_norm": 1.6679011382575204, + "learning_rate": 6.004228989464047e-07, + "loss": 1.0283, + "step": 8364 + }, + { + "epoch": 0.7543851738287415, + "grad_norm": 2.338610188613812, + "learning_rate": 6.000056277744692e-07, + "loss": 0.9597, + "step": 8365 + }, + { + "epoch": 0.7544753573522117, + "grad_norm": 1.2819943475613533, + "learning_rate": 5.995884760577745e-07, + "loss": 0.91, + "step": 8366 + }, + { + "epoch": 0.7545655408756821, + "grad_norm": 0.6880641053477564, + "learning_rate": 5.99171443831912e-07, + "loss": 0.7954, + "step": 8367 + }, + { + "epoch": 0.7546557243991523, + "grad_norm": 1.2220524768768504, + "learning_rate": 5.98754531132466e-07, + "loss": 0.9314, + "step": 8368 + }, + { + "epoch": 0.7547459079226225, + "grad_norm": 1.6900124390274123, + "learning_rate": 5.983377379950099e-07, + "loss": 0.9477, + "step": 8369 + }, + { + "epoch": 0.7548360914460928, + "grad_norm": 1.6487394661960932, + "learning_rate": 5.979210644551067e-07, + "loss": 0.8826, + "step": 8370 + }, + { + "epoch": 0.7549262749695631, + "grad_norm": 1.8750202813641066, + "learning_rate": 5.975045105483091e-07, + "loss": 0.9357, + "step": 8371 + }, + { + "epoch": 0.7550164584930333, + "grad_norm": 0.6242966747701317, + "learning_rate": 5.970880763101607e-07, + "loss": 0.8078, + "step": 8372 + }, + { + "epoch": 0.7551066420165036, + "grad_norm": 1.4048335677295423, + "learning_rate": 5.966717617761925e-07, + "loss": 0.9575, + "step": 8373 + }, + { + "epoch": 0.7551968255399738, + "grad_norm": 1.4142803214054307, + "learning_rate": 5.962555669819276e-07, + "loss": 0.92, + "step": 8374 + }, + { + "epoch": 0.7552870090634441, + "grad_norm": 1.3537579824506403, + "learning_rate": 5.958394919628777e-07, + "loss": 0.955, + "step": 8375 + }, + { + "epoch": 0.7553771925869144, + "grad_norm": 1.2388264030660607, + "learning_rate": 5.954235367545451e-07, + "loss": 0.8786, + "step": 8376 + }, + { + "epoch": 0.7554673761103846, + "grad_norm": 1.2893871014277234, + "learning_rate": 5.950077013924213e-07, + "loss": 0.9283, + "step": 8377 + }, + { + "epoch": 0.7555575596338548, + "grad_norm": 1.6342965276781505, + "learning_rate": 5.945919859119865e-07, + "loss": 1.0212, + "step": 8378 + }, + { + "epoch": 0.7556477431573252, + "grad_norm": 1.7292116994240962, + "learning_rate": 5.94176390348714e-07, + "loss": 0.8387, + "step": 8379 + }, + { + "epoch": 0.7557379266807954, + "grad_norm": 1.1634873370967966, + "learning_rate": 5.937609147380622e-07, + "loss": 1.0435, + "step": 8380 + }, + { + "epoch": 0.7558281102042657, + "grad_norm": 1.4321502153311543, + "learning_rate": 5.933455591154844e-07, + "loss": 0.8847, + "step": 8381 + }, + { + "epoch": 0.7559182937277359, + "grad_norm": 2.175947912329059, + "learning_rate": 5.929303235164191e-07, + "loss": 0.9091, + "step": 8382 + }, + { + "epoch": 0.7560084772512062, + "grad_norm": 1.5513820301216588, + "learning_rate": 5.92515207976297e-07, + "loss": 0.9127, + "step": 8383 + }, + { + "epoch": 0.7560986607746765, + "grad_norm": 1.4180341553118894, + "learning_rate": 5.921002125305383e-07, + "loss": 0.9251, + "step": 8384 + }, + { + "epoch": 0.7561888442981467, + "grad_norm": 1.5660079487712135, + "learning_rate": 5.916853372145525e-07, + "loss": 0.8701, + "step": 8385 + }, + { + "epoch": 0.756279027821617, + "grad_norm": 1.5360943982849833, + "learning_rate": 5.912705820637389e-07, + "loss": 0.8738, + "step": 8386 + }, + { + "epoch": 0.7563692113450873, + "grad_norm": 1.4634580848511969, + "learning_rate": 5.908559471134871e-07, + "loss": 1.005, + "step": 8387 + }, + { + "epoch": 0.7564593948685575, + "grad_norm": 1.5877916743942306, + "learning_rate": 5.904414323991764e-07, + "loss": 0.8773, + "step": 8388 + }, + { + "epoch": 0.7565495783920277, + "grad_norm": 1.6144214507954775, + "learning_rate": 5.900270379561743e-07, + "loss": 0.9474, + "step": 8389 + }, + { + "epoch": 0.7566397619154981, + "grad_norm": 1.4546310659703159, + "learning_rate": 5.896127638198399e-07, + "loss": 0.9541, + "step": 8390 + }, + { + "epoch": 0.7567299454389683, + "grad_norm": 1.7220905092934244, + "learning_rate": 5.89198610025521e-07, + "loss": 0.9518, + "step": 8391 + }, + { + "epoch": 0.7568201289624386, + "grad_norm": 1.492928525661277, + "learning_rate": 5.887845766085559e-07, + "loss": 1.0123, + "step": 8392 + }, + { + "epoch": 0.7569103124859088, + "grad_norm": 1.56905781738944, + "learning_rate": 5.883706636042722e-07, + "loss": 0.8936, + "step": 8393 + }, + { + "epoch": 0.7570004960093791, + "grad_norm": 1.8766796853656815, + "learning_rate": 5.879568710479879e-07, + "loss": 0.8848, + "step": 8394 + }, + { + "epoch": 0.7570906795328494, + "grad_norm": 1.4065487438343698, + "learning_rate": 5.875431989750078e-07, + "loss": 1.0313, + "step": 8395 + }, + { + "epoch": 0.7571808630563196, + "grad_norm": 1.2484327504780246, + "learning_rate": 5.871296474206313e-07, + "loss": 0.8679, + "step": 8396 + }, + { + "epoch": 0.7572710465797898, + "grad_norm": 1.4322185520815298, + "learning_rate": 5.867162164201427e-07, + "loss": 0.945, + "step": 8397 + }, + { + "epoch": 0.7573612301032602, + "grad_norm": 1.494377646003145, + "learning_rate": 5.863029060088205e-07, + "loss": 0.9692, + "step": 8398 + }, + { + "epoch": 0.7574514136267304, + "grad_norm": 1.5570043428491531, + "learning_rate": 5.858897162219289e-07, + "loss": 0.8336, + "step": 8399 + }, + { + "epoch": 0.7575415971502006, + "grad_norm": 1.7612067787601373, + "learning_rate": 5.854766470947238e-07, + "loss": 0.9839, + "step": 8400 + }, + { + "epoch": 0.7576317806736709, + "grad_norm": 1.253439890807636, + "learning_rate": 5.850636986624511e-07, + "loss": 0.925, + "step": 8401 + }, + { + "epoch": 0.7577219641971412, + "grad_norm": 1.5030810978241835, + "learning_rate": 5.846508709603453e-07, + "loss": 0.9464, + "step": 8402 + }, + { + "epoch": 0.7578121477206115, + "grad_norm": 3.2672290421722305, + "learning_rate": 5.842381640236318e-07, + "loss": 0.9184, + "step": 8403 + }, + { + "epoch": 0.7579023312440817, + "grad_norm": 1.4367977583185902, + "learning_rate": 5.838255778875242e-07, + "loss": 0.9384, + "step": 8404 + }, + { + "epoch": 0.7579925147675519, + "grad_norm": 1.5668941508501018, + "learning_rate": 5.83413112587227e-07, + "loss": 0.9632, + "step": 8405 + }, + { + "epoch": 0.7580826982910223, + "grad_norm": 1.4632334899440178, + "learning_rate": 5.830007681579338e-07, + "loss": 0.9212, + "step": 8406 + }, + { + "epoch": 0.7581728818144925, + "grad_norm": 1.563082090671224, + "learning_rate": 5.825885446348284e-07, + "loss": 0.9521, + "step": 8407 + }, + { + "epoch": 0.7582630653379627, + "grad_norm": 1.4173928344422713, + "learning_rate": 5.821764420530842e-07, + "loss": 0.8857, + "step": 8408 + }, + { + "epoch": 0.7583532488614331, + "grad_norm": 1.2623270652849745, + "learning_rate": 5.817644604478633e-07, + "loss": 0.9544, + "step": 8409 + }, + { + "epoch": 0.7584434323849033, + "grad_norm": 1.4496853223892439, + "learning_rate": 5.81352599854319e-07, + "loss": 0.8764, + "step": 8410 + }, + { + "epoch": 0.7585336159083735, + "grad_norm": 1.5379259589421035, + "learning_rate": 5.809408603075938e-07, + "loss": 0.9762, + "step": 8411 + }, + { + "epoch": 0.7586237994318438, + "grad_norm": 1.5540938274233718, + "learning_rate": 5.805292418428176e-07, + "loss": 0.926, + "step": 8412 + }, + { + "epoch": 0.7587139829553141, + "grad_norm": 2.325960112829145, + "learning_rate": 5.801177444951148e-07, + "loss": 0.9249, + "step": 8413 + }, + { + "epoch": 0.7588041664787843, + "grad_norm": 1.2560414231266848, + "learning_rate": 5.797063682995944e-07, + "loss": 0.976, + "step": 8414 + }, + { + "epoch": 0.7588943500022546, + "grad_norm": 1.5209604323417711, + "learning_rate": 5.792951132913584e-07, + "loss": 1.0089, + "step": 8415 + }, + { + "epoch": 0.7589845335257248, + "grad_norm": 1.5388181076037495, + "learning_rate": 5.788839795054968e-07, + "loss": 0.9868, + "step": 8416 + }, + { + "epoch": 0.7590747170491952, + "grad_norm": 1.159728771021177, + "learning_rate": 5.784729669770898e-07, + "loss": 0.8541, + "step": 8417 + }, + { + "epoch": 0.7591649005726654, + "grad_norm": 1.3165154567438466, + "learning_rate": 5.780620757412084e-07, + "loss": 0.94, + "step": 8418 + }, + { + "epoch": 0.7592550840961356, + "grad_norm": 1.4766437165060589, + "learning_rate": 5.776513058329098e-07, + "loss": 0.9552, + "step": 8419 + }, + { + "epoch": 0.7593452676196059, + "grad_norm": 1.2934945872561847, + "learning_rate": 5.772406572872459e-07, + "loss": 0.9617, + "step": 8420 + }, + { + "epoch": 0.7594354511430762, + "grad_norm": 1.4045360292571296, + "learning_rate": 5.768301301392535e-07, + "loss": 0.9715, + "step": 8421 + }, + { + "epoch": 0.7595256346665464, + "grad_norm": 1.3863359809538964, + "learning_rate": 5.764197244239615e-07, + "loss": 0.9516, + "step": 8422 + }, + { + "epoch": 0.7596158181900167, + "grad_norm": 1.8287569567638646, + "learning_rate": 5.760094401763884e-07, + "loss": 0.9816, + "step": 8423 + }, + { + "epoch": 0.7597060017134869, + "grad_norm": 1.6159782126865336, + "learning_rate": 5.755992774315414e-07, + "loss": 0.9172, + "step": 8424 + }, + { + "epoch": 0.7597961852369572, + "grad_norm": 1.6278326581152431, + "learning_rate": 5.751892362244183e-07, + "loss": 0.9356, + "step": 8425 + }, + { + "epoch": 0.7598863687604275, + "grad_norm": 2.056085147452663, + "learning_rate": 5.747793165900065e-07, + "loss": 0.9916, + "step": 8426 + }, + { + "epoch": 0.7599765522838977, + "grad_norm": 1.5170294949474943, + "learning_rate": 5.743695185632806e-07, + "loss": 0.9928, + "step": 8427 + }, + { + "epoch": 0.7600667358073679, + "grad_norm": 1.7087999342436189, + "learning_rate": 5.739598421792098e-07, + "loss": 0.9847, + "step": 8428 + }, + { + "epoch": 0.7601569193308383, + "grad_norm": 1.3165477371021943, + "learning_rate": 5.735502874727474e-07, + "loss": 0.9198, + "step": 8429 + }, + { + "epoch": 0.7602471028543085, + "grad_norm": 1.4479878177130363, + "learning_rate": 5.731408544788398e-07, + "loss": 1.0644, + "step": 8430 + }, + { + "epoch": 0.7603372863777788, + "grad_norm": 1.2763099803523281, + "learning_rate": 5.727315432324225e-07, + "loss": 0.8558, + "step": 8431 + }, + { + "epoch": 0.760427469901249, + "grad_norm": 1.4934055489863716, + "learning_rate": 5.723223537684196e-07, + "loss": 0.9347, + "step": 8432 + }, + { + "epoch": 0.7605176534247193, + "grad_norm": 1.5992878461482931, + "learning_rate": 5.719132861217462e-07, + "loss": 1.0133, + "step": 8433 + }, + { + "epoch": 0.7606078369481896, + "grad_norm": 1.2266762067244146, + "learning_rate": 5.715043403273044e-07, + "loss": 0.9054, + "step": 8434 + }, + { + "epoch": 0.7606980204716598, + "grad_norm": 1.8603741822156676, + "learning_rate": 5.710955164199902e-07, + "loss": 0.8997, + "step": 8435 + }, + { + "epoch": 0.7607882039951301, + "grad_norm": 1.3503622999412417, + "learning_rate": 5.706868144346841e-07, + "loss": 0.9545, + "step": 8436 + }, + { + "epoch": 0.7608783875186004, + "grad_norm": 1.3324654059007923, + "learning_rate": 5.702782344062613e-07, + "loss": 0.9316, + "step": 8437 + }, + { + "epoch": 0.7609685710420706, + "grad_norm": 1.4641791298252063, + "learning_rate": 5.698697763695826e-07, + "loss": 0.9178, + "step": 8438 + }, + { + "epoch": 0.7610587545655408, + "grad_norm": 1.3815547158212456, + "learning_rate": 5.694614403595002e-07, + "loss": 0.9539, + "step": 8439 + }, + { + "epoch": 0.7611489380890112, + "grad_norm": 1.3742470847326693, + "learning_rate": 5.690532264108554e-07, + "loss": 0.8518, + "step": 8440 + }, + { + "epoch": 0.7612391216124814, + "grad_norm": 1.456722523398884, + "learning_rate": 5.686451345584795e-07, + "loss": 0.9776, + "step": 8441 + }, + { + "epoch": 0.7613293051359517, + "grad_norm": 1.61458684449173, + "learning_rate": 5.682371648371933e-07, + "loss": 1.0324, + "step": 8442 + }, + { + "epoch": 0.7614194886594219, + "grad_norm": 1.5229854597773937, + "learning_rate": 5.678293172818074e-07, + "loss": 0.8985, + "step": 8443 + }, + { + "epoch": 0.7615096721828922, + "grad_norm": 1.6398142673410065, + "learning_rate": 5.674215919271204e-07, + "loss": 0.9565, + "step": 8444 + }, + { + "epoch": 0.7615998557063625, + "grad_norm": 1.546785448352836, + "learning_rate": 5.670139888079224e-07, + "loss": 0.8724, + "step": 8445 + }, + { + "epoch": 0.7616900392298327, + "grad_norm": 1.3097372041162036, + "learning_rate": 5.666065079589924e-07, + "loss": 0.9987, + "step": 8446 + }, + { + "epoch": 0.7617802227533029, + "grad_norm": 1.5450045133111208, + "learning_rate": 5.661991494150986e-07, + "loss": 0.8916, + "step": 8447 + }, + { + "epoch": 0.7618704062767733, + "grad_norm": 1.4663781026700564, + "learning_rate": 5.657919132109999e-07, + "loss": 0.9853, + "step": 8448 + }, + { + "epoch": 0.7619605898002435, + "grad_norm": 1.5218137254853774, + "learning_rate": 5.653847993814421e-07, + "loss": 0.9665, + "step": 8449 + }, + { + "epoch": 0.7620507733237137, + "grad_norm": 1.4357005757163235, + "learning_rate": 5.649778079611647e-07, + "loss": 0.9546, + "step": 8450 + }, + { + "epoch": 0.762140956847184, + "grad_norm": 1.3459457378077784, + "learning_rate": 5.645709389848923e-07, + "loss": 0.9186, + "step": 8451 + }, + { + "epoch": 0.7622311403706543, + "grad_norm": 1.5430335695196098, + "learning_rate": 5.641641924873435e-07, + "loss": 0.8943, + "step": 8452 + }, + { + "epoch": 0.7623213238941245, + "grad_norm": 1.4285902805446873, + "learning_rate": 5.637575685032217e-07, + "loss": 0.9357, + "step": 8453 + }, + { + "epoch": 0.7624115074175948, + "grad_norm": 2.4882040206446194, + "learning_rate": 5.633510670672246e-07, + "loss": 1.0165, + "step": 8454 + }, + { + "epoch": 0.762501690941065, + "grad_norm": 0.6678981185958552, + "learning_rate": 5.629446882140354e-07, + "loss": 0.8219, + "step": 8455 + }, + { + "epoch": 0.7625918744645354, + "grad_norm": 1.3016552895469218, + "learning_rate": 5.625384319783295e-07, + "loss": 0.9424, + "step": 8456 + }, + { + "epoch": 0.7626820579880056, + "grad_norm": 1.763590830887276, + "learning_rate": 5.621322983947705e-07, + "loss": 0.8894, + "step": 8457 + }, + { + "epoch": 0.7627722415114758, + "grad_norm": 1.5273441402503098, + "learning_rate": 5.617262874980122e-07, + "loss": 0.9803, + "step": 8458 + }, + { + "epoch": 0.7628624250349462, + "grad_norm": 1.3532246418621996, + "learning_rate": 5.613203993226981e-07, + "loss": 0.9847, + "step": 8459 + }, + { + "epoch": 0.7629526085584164, + "grad_norm": 1.8963334468417514, + "learning_rate": 5.609146339034599e-07, + "loss": 0.988, + "step": 8460 + }, + { + "epoch": 0.7630427920818866, + "grad_norm": 1.570827039238878, + "learning_rate": 5.605089912749199e-07, + "loss": 0.8822, + "step": 8461 + }, + { + "epoch": 0.7631329756053569, + "grad_norm": 1.8182982076975407, + "learning_rate": 5.601034714716901e-07, + "loss": 0.8796, + "step": 8462 + }, + { + "epoch": 0.7632231591288272, + "grad_norm": 1.2323723010749332, + "learning_rate": 5.59698074528372e-07, + "loss": 0.9126, + "step": 8463 + }, + { + "epoch": 0.7633133426522974, + "grad_norm": 1.3271306972146033, + "learning_rate": 5.592928004795555e-07, + "loss": 0.9135, + "step": 8464 + }, + { + "epoch": 0.7634035261757677, + "grad_norm": 1.3538964906793114, + "learning_rate": 5.58887649359822e-07, + "loss": 0.9423, + "step": 8465 + }, + { + "epoch": 0.7634937096992379, + "grad_norm": 1.407908796431615, + "learning_rate": 5.584826212037393e-07, + "loss": 0.8022, + "step": 8466 + }, + { + "epoch": 0.7635838932227083, + "grad_norm": 1.2163124160982193, + "learning_rate": 5.580777160458689e-07, + "loss": 0.8534, + "step": 8467 + }, + { + "epoch": 0.7636740767461785, + "grad_norm": 1.2914661077437792, + "learning_rate": 5.576729339207574e-07, + "loss": 0.9629, + "step": 8468 + }, + { + "epoch": 0.7637642602696487, + "grad_norm": 1.1589635504486824, + "learning_rate": 5.572682748629449e-07, + "loss": 0.9924, + "step": 8469 + }, + { + "epoch": 0.763854443793119, + "grad_norm": 1.280364172285917, + "learning_rate": 5.568637389069582e-07, + "loss": 0.9235, + "step": 8470 + }, + { + "epoch": 0.7639446273165893, + "grad_norm": 1.3520046679512114, + "learning_rate": 5.564593260873145e-07, + "loss": 0.955, + "step": 8471 + }, + { + "epoch": 0.7640348108400595, + "grad_norm": 1.2668127914685086, + "learning_rate": 5.560550364385206e-07, + "loss": 0.9778, + "step": 8472 + }, + { + "epoch": 0.7641249943635298, + "grad_norm": 1.433574707570013, + "learning_rate": 5.556508699950728e-07, + "loss": 1.0142, + "step": 8473 + }, + { + "epoch": 0.764215177887, + "grad_norm": 1.7677463177489, + "learning_rate": 5.552468267914577e-07, + "loss": 0.8945, + "step": 8474 + }, + { + "epoch": 0.7643053614104703, + "grad_norm": 1.6103776937739216, + "learning_rate": 5.548429068621481e-07, + "loss": 0.8533, + "step": 8475 + }, + { + "epoch": 0.7643955449339406, + "grad_norm": 0.6654388973493354, + "learning_rate": 5.544391102416115e-07, + "loss": 0.807, + "step": 8476 + }, + { + "epoch": 0.7644857284574108, + "grad_norm": 1.6987915905260398, + "learning_rate": 5.540354369643003e-07, + "loss": 0.9976, + "step": 8477 + }, + { + "epoch": 0.764575911980881, + "grad_norm": 1.9243486194097077, + "learning_rate": 5.536318870646586e-07, + "loss": 0.801, + "step": 8478 + }, + { + "epoch": 0.7646660955043514, + "grad_norm": 1.744585858253539, + "learning_rate": 5.532284605771194e-07, + "loss": 0.842, + "step": 8479 + }, + { + "epoch": 0.7647562790278216, + "grad_norm": 1.2992087817426146, + "learning_rate": 5.528251575361052e-07, + "loss": 0.9056, + "step": 8480 + }, + { + "epoch": 0.7648464625512919, + "grad_norm": 1.274783634077985, + "learning_rate": 5.524219779760284e-07, + "loss": 0.9296, + "step": 8481 + }, + { + "epoch": 0.7649366460747622, + "grad_norm": 1.5222005664293605, + "learning_rate": 5.520189219312907e-07, + "loss": 0.9642, + "step": 8482 + }, + { + "epoch": 0.7650268295982324, + "grad_norm": 2.015112641738129, + "learning_rate": 5.516159894362817e-07, + "loss": 0.8203, + "step": 8483 + }, + { + "epoch": 0.7651170131217027, + "grad_norm": 1.3615077596661147, + "learning_rate": 5.512131805253839e-07, + "loss": 0.8782, + "step": 8484 + }, + { + "epoch": 0.7652071966451729, + "grad_norm": 1.3021520316756823, + "learning_rate": 5.508104952329653e-07, + "loss": 0.9513, + "step": 8485 + }, + { + "epoch": 0.7652973801686432, + "grad_norm": 1.8075194494696818, + "learning_rate": 5.504079335933862e-07, + "loss": 0.9636, + "step": 8486 + }, + { + "epoch": 0.7653875636921135, + "grad_norm": 1.4868142414547048, + "learning_rate": 5.500054956409952e-07, + "loss": 0.969, + "step": 8487 + }, + { + "epoch": 0.7654777472155837, + "grad_norm": 1.452003456374329, + "learning_rate": 5.496031814101303e-07, + "loss": 0.8962, + "step": 8488 + }, + { + "epoch": 0.7655679307390539, + "grad_norm": 1.3956839021244578, + "learning_rate": 5.492009909351203e-07, + "loss": 0.9482, + "step": 8489 + }, + { + "epoch": 0.7656581142625243, + "grad_norm": 1.583130631103621, + "learning_rate": 5.4879892425028e-07, + "loss": 0.8621, + "step": 8490 + }, + { + "epoch": 0.7657482977859945, + "grad_norm": 1.3323911477121533, + "learning_rate": 5.483969813899184e-07, + "loss": 1.0709, + "step": 8491 + }, + { + "epoch": 0.7658384813094647, + "grad_norm": 1.2609973179503557, + "learning_rate": 5.479951623883299e-07, + "loss": 1.0129, + "step": 8492 + }, + { + "epoch": 0.765928664832935, + "grad_norm": 1.5756651336751772, + "learning_rate": 5.475934672798004e-07, + "loss": 0.8688, + "step": 8493 + }, + { + "epoch": 0.7660188483564053, + "grad_norm": 1.5421747796520577, + "learning_rate": 5.471918960986047e-07, + "loss": 0.9049, + "step": 8494 + }, + { + "epoch": 0.7661090318798756, + "grad_norm": 1.5784069602547963, + "learning_rate": 5.467904488790071e-07, + "loss": 1.0322, + "step": 8495 + }, + { + "epoch": 0.7661992154033458, + "grad_norm": 1.3829879649418542, + "learning_rate": 5.463891256552615e-07, + "loss": 0.944, + "step": 8496 + }, + { + "epoch": 0.766289398926816, + "grad_norm": 1.667762261145616, + "learning_rate": 5.459879264616107e-07, + "loss": 0.8482, + "step": 8497 + }, + { + "epoch": 0.7663795824502864, + "grad_norm": 1.6539296144502837, + "learning_rate": 5.455868513322874e-07, + "loss": 0.8134, + "step": 8498 + }, + { + "epoch": 0.7664697659737566, + "grad_norm": 1.4704834468756867, + "learning_rate": 5.451859003015143e-07, + "loss": 0.831, + "step": 8499 + }, + { + "epoch": 0.7665599494972268, + "grad_norm": 1.6501068051767438, + "learning_rate": 5.447850734035009e-07, + "loss": 0.9767, + "step": 8500 + }, + { + "epoch": 0.7666501330206971, + "grad_norm": 1.4719341744030658, + "learning_rate": 5.443843706724494e-07, + "loss": 0.9288, + "step": 8501 + }, + { + "epoch": 0.7667403165441674, + "grad_norm": 1.513893239231839, + "learning_rate": 5.439837921425494e-07, + "loss": 0.867, + "step": 8502 + }, + { + "epoch": 0.7668305000676376, + "grad_norm": 1.2314888252962464, + "learning_rate": 5.435833378479807e-07, + "loss": 0.925, + "step": 8503 + }, + { + "epoch": 0.7669206835911079, + "grad_norm": 1.4316720165438424, + "learning_rate": 5.431830078229128e-07, + "loss": 0.7995, + "step": 8504 + }, + { + "epoch": 0.7670108671145782, + "grad_norm": 1.304892198414385, + "learning_rate": 5.427828021015022e-07, + "loss": 0.9536, + "step": 8505 + }, + { + "epoch": 0.7671010506380485, + "grad_norm": 1.3530874304368243, + "learning_rate": 5.42382720717899e-07, + "loss": 0.9676, + "step": 8506 + }, + { + "epoch": 0.7671912341615187, + "grad_norm": 1.322782024029777, + "learning_rate": 5.419827637062384e-07, + "loss": 0.9647, + "step": 8507 + }, + { + "epoch": 0.7672814176849889, + "grad_norm": 1.6009235130829396, + "learning_rate": 5.415829311006487e-07, + "loss": 1.0847, + "step": 8508 + }, + { + "epoch": 0.7673716012084593, + "grad_norm": 1.4804081967332787, + "learning_rate": 5.411832229352447e-07, + "loss": 1.0121, + "step": 8509 + }, + { + "epoch": 0.7674617847319295, + "grad_norm": 1.4368769912739114, + "learning_rate": 5.407836392441319e-07, + "loss": 1.0259, + "step": 8510 + }, + { + "epoch": 0.7675519682553997, + "grad_norm": 1.321881417333298, + "learning_rate": 5.403841800614049e-07, + "loss": 0.9551, + "step": 8511 + }, + { + "epoch": 0.76764215177887, + "grad_norm": 1.1910419344850303, + "learning_rate": 5.39984845421148e-07, + "loss": 0.8902, + "step": 8512 + }, + { + "epoch": 0.7677323353023403, + "grad_norm": 1.548908015111331, + "learning_rate": 5.395856353574344e-07, + "loss": 0.917, + "step": 8513 + }, + { + "epoch": 0.7678225188258105, + "grad_norm": 0.642558459876711, + "learning_rate": 5.391865499043275e-07, + "loss": 0.8066, + "step": 8514 + }, + { + "epoch": 0.7679127023492808, + "grad_norm": 1.2657506432980872, + "learning_rate": 5.387875890958788e-07, + "loss": 0.939, + "step": 8515 + }, + { + "epoch": 0.768002885872751, + "grad_norm": 1.2389032865313967, + "learning_rate": 5.383887529661298e-07, + "loss": 0.9633, + "step": 8516 + }, + { + "epoch": 0.7680930693962214, + "grad_norm": 1.596802201076703, + "learning_rate": 5.379900415491116e-07, + "loss": 0.8926, + "step": 8517 + }, + { + "epoch": 0.7681832529196916, + "grad_norm": 1.3862903201789658, + "learning_rate": 5.375914548788447e-07, + "loss": 0.8888, + "step": 8518 + }, + { + "epoch": 0.7682734364431618, + "grad_norm": 1.4705536501688412, + "learning_rate": 5.371929929893384e-07, + "loss": 0.82, + "step": 8519 + }, + { + "epoch": 0.768363619966632, + "grad_norm": 1.4379746026779392, + "learning_rate": 5.367946559145917e-07, + "loss": 0.9025, + "step": 8520 + }, + { + "epoch": 0.7684538034901024, + "grad_norm": 1.2866088171704386, + "learning_rate": 5.363964436885935e-07, + "loss": 0.9632, + "step": 8521 + }, + { + "epoch": 0.7685439870135726, + "grad_norm": 1.213655485564955, + "learning_rate": 5.359983563453199e-07, + "loss": 1.0187, + "step": 8522 + }, + { + "epoch": 0.7686341705370429, + "grad_norm": 1.720621096228658, + "learning_rate": 5.356003939187402e-07, + "loss": 1.0543, + "step": 8523 + }, + { + "epoch": 0.7687243540605131, + "grad_norm": 1.6256361229797238, + "learning_rate": 5.352025564428082e-07, + "loss": 0.9144, + "step": 8524 + }, + { + "epoch": 0.7688145375839834, + "grad_norm": 1.4856878799014974, + "learning_rate": 5.348048439514723e-07, + "loss": 0.9815, + "step": 8525 + }, + { + "epoch": 0.7689047211074537, + "grad_norm": 1.569359409462965, + "learning_rate": 5.344072564786653e-07, + "loss": 0.8278, + "step": 8526 + }, + { + "epoch": 0.7689949046309239, + "grad_norm": 1.3213712070415342, + "learning_rate": 5.340097940583123e-07, + "loss": 0.948, + "step": 8527 + }, + { + "epoch": 0.7690850881543942, + "grad_norm": 1.6750692353244665, + "learning_rate": 5.336124567243275e-07, + "loss": 0.8981, + "step": 8528 + }, + { + "epoch": 0.7691752716778645, + "grad_norm": 2.5387592780179875, + "learning_rate": 5.33215244510613e-07, + "loss": 0.9498, + "step": 8529 + }, + { + "epoch": 0.7692654552013347, + "grad_norm": 1.567887516701982, + "learning_rate": 5.328181574510624e-07, + "loss": 0.9396, + "step": 8530 + }, + { + "epoch": 0.769355638724805, + "grad_norm": 1.4234164758798291, + "learning_rate": 5.324211955795559e-07, + "loss": 0.9899, + "step": 8531 + }, + { + "epoch": 0.7694458222482753, + "grad_norm": 1.578948835112911, + "learning_rate": 5.320243589299651e-07, + "loss": 0.8669, + "step": 8532 + }, + { + "epoch": 0.7695360057717455, + "grad_norm": 1.4862751092867126, + "learning_rate": 5.316276475361505e-07, + "loss": 0.8998, + "step": 8533 + }, + { + "epoch": 0.7696261892952158, + "grad_norm": 2.0244152402161313, + "learning_rate": 5.312310614319613e-07, + "loss": 0.9732, + "step": 8534 + }, + { + "epoch": 0.769716372818686, + "grad_norm": 1.6727737435380532, + "learning_rate": 5.308346006512367e-07, + "loss": 0.9075, + "step": 8535 + }, + { + "epoch": 0.7698065563421563, + "grad_norm": 1.4567672858560425, + "learning_rate": 5.30438265227805e-07, + "loss": 0.8635, + "step": 8536 + }, + { + "epoch": 0.7698967398656266, + "grad_norm": 0.6661479963834678, + "learning_rate": 5.300420551954837e-07, + "loss": 0.7953, + "step": 8537 + }, + { + "epoch": 0.7699869233890968, + "grad_norm": 1.2682510729183063, + "learning_rate": 5.296459705880798e-07, + "loss": 0.8648, + "step": 8538 + }, + { + "epoch": 0.770077106912567, + "grad_norm": 1.29220618496247, + "learning_rate": 5.292500114393881e-07, + "loss": 0.9897, + "step": 8539 + }, + { + "epoch": 0.7701672904360374, + "grad_norm": 2.1062942047461037, + "learning_rate": 5.288541777831963e-07, + "loss": 0.9587, + "step": 8540 + }, + { + "epoch": 0.7702574739595076, + "grad_norm": 1.2821443274247057, + "learning_rate": 5.284584696532772e-07, + "loss": 0.9902, + "step": 8541 + }, + { + "epoch": 0.7703476574829778, + "grad_norm": 1.3221446310156413, + "learning_rate": 5.280628870833954e-07, + "loss": 0.9412, + "step": 8542 + }, + { + "epoch": 0.7704378410064481, + "grad_norm": 1.7747783549368135, + "learning_rate": 5.276674301073045e-07, + "loss": 0.9712, + "step": 8543 + }, + { + "epoch": 0.7705280245299184, + "grad_norm": 1.2256889391167003, + "learning_rate": 5.272720987587467e-07, + "loss": 0.9462, + "step": 8544 + }, + { + "epoch": 0.7706182080533887, + "grad_norm": 1.7370450199422445, + "learning_rate": 5.268768930714545e-07, + "loss": 0.9009, + "step": 8545 + }, + { + "epoch": 0.7707083915768589, + "grad_norm": 1.4980135320859496, + "learning_rate": 5.264818130791473e-07, + "loss": 0.9714, + "step": 8546 + }, + { + "epoch": 0.7707985751003291, + "grad_norm": 1.7441543629821845, + "learning_rate": 5.260868588155378e-07, + "loss": 0.9412, + "step": 8547 + }, + { + "epoch": 0.7708887586237995, + "grad_norm": 1.2293757352453456, + "learning_rate": 5.256920303143242e-07, + "loss": 0.9642, + "step": 8548 + }, + { + "epoch": 0.7709789421472697, + "grad_norm": 1.5430728152950568, + "learning_rate": 5.252973276091956e-07, + "loss": 1.0452, + "step": 8549 + }, + { + "epoch": 0.7710691256707399, + "grad_norm": 1.3472776461645897, + "learning_rate": 5.249027507338307e-07, + "loss": 1.0028, + "step": 8550 + }, + { + "epoch": 0.7711593091942102, + "grad_norm": 1.2385237302943852, + "learning_rate": 5.245082997218966e-07, + "loss": 0.9103, + "step": 8551 + }, + { + "epoch": 0.7712494927176805, + "grad_norm": 1.734358916336953, + "learning_rate": 5.241139746070499e-07, + "loss": 0.9643, + "step": 8552 + }, + { + "epoch": 0.7713396762411507, + "grad_norm": 1.4118356610914187, + "learning_rate": 5.237197754229376e-07, + "loss": 0.8217, + "step": 8553 + }, + { + "epoch": 0.771429859764621, + "grad_norm": 1.342594981039171, + "learning_rate": 5.233257022031931e-07, + "loss": 0.8457, + "step": 8554 + }, + { + "epoch": 0.7715200432880913, + "grad_norm": 1.3218497181451963, + "learning_rate": 5.229317549814432e-07, + "loss": 0.9436, + "step": 8555 + }, + { + "epoch": 0.7716102268115616, + "grad_norm": 1.5878237326633304, + "learning_rate": 5.225379337912998e-07, + "loss": 1.02, + "step": 8556 + }, + { + "epoch": 0.7717004103350318, + "grad_norm": 1.3913109191416704, + "learning_rate": 5.221442386663663e-07, + "loss": 0.9506, + "step": 8557 + }, + { + "epoch": 0.771790593858502, + "grad_norm": 1.2793059137622005, + "learning_rate": 5.217506696402354e-07, + "loss": 0.9827, + "step": 8558 + }, + { + "epoch": 0.7718807773819724, + "grad_norm": 1.6797222754848522, + "learning_rate": 5.213572267464883e-07, + "loss": 0.95, + "step": 8559 + }, + { + "epoch": 0.7719709609054426, + "grad_norm": 1.4598494777426216, + "learning_rate": 5.209639100186965e-07, + "loss": 0.9227, + "step": 8560 + }, + { + "epoch": 0.7720611444289128, + "grad_norm": 0.7878293892587039, + "learning_rate": 5.205707194904179e-07, + "loss": 0.8705, + "step": 8561 + }, + { + "epoch": 0.7721513279523831, + "grad_norm": 1.6965244725016846, + "learning_rate": 5.201776551952042e-07, + "loss": 0.8681, + "step": 8562 + }, + { + "epoch": 0.7722415114758534, + "grad_norm": 1.9283574883814532, + "learning_rate": 5.197847171665914e-07, + "loss": 0.9, + "step": 8563 + }, + { + "epoch": 0.7723316949993236, + "grad_norm": 1.5881014187806168, + "learning_rate": 5.193919054381095e-07, + "loss": 0.9214, + "step": 8564 + }, + { + "epoch": 0.7724218785227939, + "grad_norm": 1.5081634903575478, + "learning_rate": 5.189992200432738e-07, + "loss": 0.9079, + "step": 8565 + }, + { + "epoch": 0.7725120620462641, + "grad_norm": 2.4906645040597994, + "learning_rate": 5.186066610155906e-07, + "loss": 0.9467, + "step": 8566 + }, + { + "epoch": 0.7726022455697344, + "grad_norm": 1.4567802969886907, + "learning_rate": 5.182142283885555e-07, + "loss": 0.9892, + "step": 8567 + }, + { + "epoch": 0.7726924290932047, + "grad_norm": 1.1697150606439284, + "learning_rate": 5.178219221956528e-07, + "loss": 0.7776, + "step": 8568 + }, + { + "epoch": 0.7727826126166749, + "grad_norm": 1.633034795933514, + "learning_rate": 5.174297424703565e-07, + "loss": 0.9559, + "step": 8569 + }, + { + "epoch": 0.7728727961401451, + "grad_norm": 1.502394592726161, + "learning_rate": 5.170376892461299e-07, + "loss": 0.8756, + "step": 8570 + }, + { + "epoch": 0.7729629796636155, + "grad_norm": 0.7265092563603932, + "learning_rate": 5.16645762556424e-07, + "loss": 0.8566, + "step": 8571 + }, + { + "epoch": 0.7730531631870857, + "grad_norm": 1.3213428336942965, + "learning_rate": 5.162539624346809e-07, + "loss": 0.9881, + "step": 8572 + }, + { + "epoch": 0.773143346710556, + "grad_norm": 1.4162290215011706, + "learning_rate": 5.158622889143309e-07, + "loss": 0.9969, + "step": 8573 + }, + { + "epoch": 0.7732335302340262, + "grad_norm": 1.5635868107502688, + "learning_rate": 5.154707420287939e-07, + "loss": 0.9002, + "step": 8574 + }, + { + "epoch": 0.7733237137574965, + "grad_norm": 1.719068393693705, + "learning_rate": 5.150793218114793e-07, + "loss": 0.8679, + "step": 8575 + }, + { + "epoch": 0.7734138972809668, + "grad_norm": 1.3800754507143103, + "learning_rate": 5.146880282957837e-07, + "loss": 0.9272, + "step": 8576 + }, + { + "epoch": 0.773504080804437, + "grad_norm": 1.478467489489511, + "learning_rate": 5.142968615150964e-07, + "loss": 0.9338, + "step": 8577 + }, + { + "epoch": 0.7735942643279073, + "grad_norm": 1.6745410945311434, + "learning_rate": 5.139058215027921e-07, + "loss": 0.8721, + "step": 8578 + }, + { + "epoch": 0.7736844478513776, + "grad_norm": 1.2045237844327106, + "learning_rate": 5.135149082922383e-07, + "loss": 0.9279, + "step": 8579 + }, + { + "epoch": 0.7737746313748478, + "grad_norm": 1.55197059512057, + "learning_rate": 5.131241219167879e-07, + "loss": 0.8759, + "step": 8580 + }, + { + "epoch": 0.773864814898318, + "grad_norm": 0.7902209776243798, + "learning_rate": 5.127334624097869e-07, + "loss": 0.8391, + "step": 8581 + }, + { + "epoch": 0.7739549984217884, + "grad_norm": 1.4090766895533193, + "learning_rate": 5.123429298045672e-07, + "loss": 0.9956, + "step": 8582 + }, + { + "epoch": 0.7740451819452586, + "grad_norm": 1.4950309942853697, + "learning_rate": 5.119525241344515e-07, + "loss": 0.8314, + "step": 8583 + }, + { + "epoch": 0.7741353654687289, + "grad_norm": 2.3223319465272056, + "learning_rate": 5.115622454327515e-07, + "loss": 0.9498, + "step": 8584 + }, + { + "epoch": 0.7742255489921991, + "grad_norm": 1.5943781045115597, + "learning_rate": 5.11172093732768e-07, + "loss": 0.922, + "step": 8585 + }, + { + "epoch": 0.7743157325156694, + "grad_norm": 1.5030866495105628, + "learning_rate": 5.107820690677911e-07, + "loss": 0.9643, + "step": 8586 + }, + { + "epoch": 0.7744059160391397, + "grad_norm": 1.9275487870351704, + "learning_rate": 5.103921714710991e-07, + "loss": 0.8821, + "step": 8587 + }, + { + "epoch": 0.7744960995626099, + "grad_norm": 1.4897200870650753, + "learning_rate": 5.100024009759605e-07, + "loss": 0.9556, + "step": 8588 + }, + { + "epoch": 0.7745862830860801, + "grad_norm": 1.7140341131316505, + "learning_rate": 5.09612757615633e-07, + "loss": 0.9663, + "step": 8589 + }, + { + "epoch": 0.7746764666095505, + "grad_norm": 1.3899874565189754, + "learning_rate": 5.092232414233628e-07, + "loss": 0.8903, + "step": 8590 + }, + { + "epoch": 0.7747666501330207, + "grad_norm": 1.4756576752912567, + "learning_rate": 5.088338524323858e-07, + "loss": 0.9063, + "step": 8591 + }, + { + "epoch": 0.7748568336564909, + "grad_norm": 1.6897570033565348, + "learning_rate": 5.084445906759271e-07, + "loss": 0.9985, + "step": 8592 + }, + { + "epoch": 0.7749470171799612, + "grad_norm": 1.5361930316391046, + "learning_rate": 5.080554561871995e-07, + "loss": 1.0049, + "step": 8593 + }, + { + "epoch": 0.7750372007034315, + "grad_norm": 1.4087853359065763, + "learning_rate": 5.076664489994078e-07, + "loss": 0.9634, + "step": 8594 + }, + { + "epoch": 0.7751273842269017, + "grad_norm": 1.52633797781752, + "learning_rate": 5.07277569145742e-07, + "loss": 0.9109, + "step": 8595 + }, + { + "epoch": 0.775217567750372, + "grad_norm": 1.400693554747402, + "learning_rate": 5.068888166593861e-07, + "loss": 0.8582, + "step": 8596 + }, + { + "epoch": 0.7753077512738422, + "grad_norm": 1.5161274342636506, + "learning_rate": 5.065001915735087e-07, + "loss": 0.925, + "step": 8597 + }, + { + "epoch": 0.7753979347973126, + "grad_norm": 1.2928118653788034, + "learning_rate": 5.061116939212702e-07, + "loss": 0.9044, + "step": 8598 + }, + { + "epoch": 0.7754881183207828, + "grad_norm": 1.52436080732069, + "learning_rate": 5.05723323735819e-07, + "loss": 0.9355, + "step": 8599 + }, + { + "epoch": 0.775578301844253, + "grad_norm": 0.6420196842981697, + "learning_rate": 5.053350810502932e-07, + "loss": 0.8085, + "step": 8600 + }, + { + "epoch": 0.7756684853677234, + "grad_norm": 1.8304574546839931, + "learning_rate": 5.049469658978202e-07, + "loss": 0.9308, + "step": 8601 + }, + { + "epoch": 0.7757586688911936, + "grad_norm": 0.833684255304106, + "learning_rate": 5.045589783115147e-07, + "loss": 0.7555, + "step": 8602 + }, + { + "epoch": 0.7758488524146638, + "grad_norm": 1.5732132419454676, + "learning_rate": 5.041711183244842e-07, + "loss": 0.9745, + "step": 8603 + }, + { + "epoch": 0.7759390359381341, + "grad_norm": 1.5989558896057474, + "learning_rate": 5.037833859698211e-07, + "loss": 0.9612, + "step": 8604 + }, + { + "epoch": 0.7760292194616044, + "grad_norm": 1.5334581296694145, + "learning_rate": 5.033957812806096e-07, + "loss": 1.004, + "step": 8605 + }, + { + "epoch": 0.7761194029850746, + "grad_norm": 1.607378860584099, + "learning_rate": 5.030083042899223e-07, + "loss": 0.9903, + "step": 8606 + }, + { + "epoch": 0.7762095865085449, + "grad_norm": 1.5810508421673821, + "learning_rate": 5.026209550308207e-07, + "loss": 0.9423, + "step": 8607 + }, + { + "epoch": 0.7762997700320151, + "grad_norm": 1.8269885842938078, + "learning_rate": 5.022337335363558e-07, + "loss": 0.9881, + "step": 8608 + }, + { + "epoch": 0.7763899535554855, + "grad_norm": 1.526393741271745, + "learning_rate": 5.018466398395677e-07, + "loss": 0.959, + "step": 8609 + }, + { + "epoch": 0.7764801370789557, + "grad_norm": 1.2935220047561349, + "learning_rate": 5.01459673973484e-07, + "loss": 0.8719, + "step": 8610 + }, + { + "epoch": 0.7765703206024259, + "grad_norm": 2.037052142764767, + "learning_rate": 5.01072835971125e-07, + "loss": 1.0029, + "step": 8611 + }, + { + "epoch": 0.7766605041258962, + "grad_norm": 1.8033470030364638, + "learning_rate": 5.006861258654959e-07, + "loss": 0.8417, + "step": 8612 + }, + { + "epoch": 0.7767506876493665, + "grad_norm": 1.4545886144411975, + "learning_rate": 5.002995436895938e-07, + "loss": 0.9697, + "step": 8613 + }, + { + "epoch": 0.7768408711728367, + "grad_norm": 1.3038194246908708, + "learning_rate": 4.999130894764039e-07, + "loss": 0.8956, + "step": 8614 + }, + { + "epoch": 0.776931054696307, + "grad_norm": 1.431461421893082, + "learning_rate": 4.995267632589006e-07, + "loss": 0.8779, + "step": 8615 + }, + { + "epoch": 0.7770212382197772, + "grad_norm": 0.7891073308616505, + "learning_rate": 4.99140565070048e-07, + "loss": 0.8353, + "step": 8616 + }, + { + "epoch": 0.7771114217432475, + "grad_norm": 1.2992791102753904, + "learning_rate": 4.987544949427969e-07, + "loss": 0.9678, + "step": 8617 + }, + { + "epoch": 0.7772016052667178, + "grad_norm": 0.6667388166684771, + "learning_rate": 4.98368552910091e-07, + "loss": 0.8061, + "step": 8618 + }, + { + "epoch": 0.777291788790188, + "grad_norm": 2.2726836642503163, + "learning_rate": 4.979827390048596e-07, + "loss": 1.046, + "step": 8619 + }, + { + "epoch": 0.7773819723136582, + "grad_norm": 1.257688528076322, + "learning_rate": 4.975970532600231e-07, + "loss": 0.9057, + "step": 8620 + }, + { + "epoch": 0.7774721558371286, + "grad_norm": 2.0113404149702205, + "learning_rate": 4.972114957084901e-07, + "loss": 0.9569, + "step": 8621 + }, + { + "epoch": 0.7775623393605988, + "grad_norm": 1.2434844917339587, + "learning_rate": 4.968260663831585e-07, + "loss": 0.9172, + "step": 8622 + }, + { + "epoch": 0.777652522884069, + "grad_norm": 1.2393332744736543, + "learning_rate": 4.964407653169154e-07, + "loss": 1.0434, + "step": 8623 + }, + { + "epoch": 0.7777427064075394, + "grad_norm": 1.3079854433842069, + "learning_rate": 4.960555925426366e-07, + "loss": 0.9159, + "step": 8624 + }, + { + "epoch": 0.7778328899310096, + "grad_norm": 1.6913493016899699, + "learning_rate": 4.956705480931876e-07, + "loss": 0.9985, + "step": 8625 + }, + { + "epoch": 0.7779230734544799, + "grad_norm": 1.3361283550793122, + "learning_rate": 4.952856320014225e-07, + "loss": 0.9164, + "step": 8626 + }, + { + "epoch": 0.7780132569779501, + "grad_norm": 1.2009302407967721, + "learning_rate": 4.949008443001838e-07, + "loss": 0.8773, + "step": 8627 + }, + { + "epoch": 0.7781034405014204, + "grad_norm": 1.4076688178745542, + "learning_rate": 4.945161850223041e-07, + "loss": 0.9454, + "step": 8628 + }, + { + "epoch": 0.7781936240248907, + "grad_norm": 1.4783057362425878, + "learning_rate": 4.941316542006044e-07, + "loss": 0.8698, + "step": 8629 + }, + { + "epoch": 0.7782838075483609, + "grad_norm": 1.222925321081538, + "learning_rate": 4.937472518678956e-07, + "loss": 0.9637, + "step": 8630 + }, + { + "epoch": 0.7783739910718311, + "grad_norm": 1.5566093030049442, + "learning_rate": 4.93362978056977e-07, + "loss": 0.9357, + "step": 8631 + }, + { + "epoch": 0.7784641745953015, + "grad_norm": 1.2767145782988922, + "learning_rate": 4.929788328006355e-07, + "loss": 0.8817, + "step": 8632 + }, + { + "epoch": 0.7785543581187717, + "grad_norm": 1.5218417686891768, + "learning_rate": 4.925948161316506e-07, + "loss": 1.0028, + "step": 8633 + }, + { + "epoch": 0.778644541642242, + "grad_norm": 1.5207381371093893, + "learning_rate": 4.922109280827868e-07, + "loss": 0.9301, + "step": 8634 + }, + { + "epoch": 0.7787347251657122, + "grad_norm": 1.4259313216985767, + "learning_rate": 4.918271686868016e-07, + "loss": 0.9334, + "step": 8635 + }, + { + "epoch": 0.7788249086891825, + "grad_norm": 1.5194812756634004, + "learning_rate": 4.914435379764379e-07, + "loss": 0.986, + "step": 8636 + }, + { + "epoch": 0.7789150922126528, + "grad_norm": 1.295586692954067, + "learning_rate": 4.910600359844294e-07, + "loss": 0.9258, + "step": 8637 + }, + { + "epoch": 0.779005275736123, + "grad_norm": 1.5077261628252894, + "learning_rate": 4.90676662743499e-07, + "loss": 0.8511, + "step": 8638 + }, + { + "epoch": 0.7790954592595932, + "grad_norm": 1.6912576024236334, + "learning_rate": 4.902934182863581e-07, + "loss": 0.9268, + "step": 8639 + }, + { + "epoch": 0.7791856427830636, + "grad_norm": 1.8180588290492112, + "learning_rate": 4.899103026457069e-07, + "loss": 0.8125, + "step": 8640 + }, + { + "epoch": 0.7792758263065338, + "grad_norm": 1.082559339645668, + "learning_rate": 4.895273158542361e-07, + "loss": 0.7747, + "step": 8641 + }, + { + "epoch": 0.779366009830004, + "grad_norm": 1.2907719692390214, + "learning_rate": 4.891444579446227e-07, + "loss": 0.9499, + "step": 8642 + }, + { + "epoch": 0.7794561933534743, + "grad_norm": 1.501943441888081, + "learning_rate": 4.887617289495349e-07, + "loss": 0.8924, + "step": 8643 + }, + { + "epoch": 0.7795463768769446, + "grad_norm": 0.6090789467013445, + "learning_rate": 4.883791289016292e-07, + "loss": 0.727, + "step": 8644 + }, + { + "epoch": 0.7796365604004148, + "grad_norm": 1.543856826360227, + "learning_rate": 4.879966578335514e-07, + "loss": 0.9504, + "step": 8645 + }, + { + "epoch": 0.7797267439238851, + "grad_norm": 1.776191488828435, + "learning_rate": 4.876143157779358e-07, + "loss": 0.8308, + "step": 8646 + }, + { + "epoch": 0.7798169274473554, + "grad_norm": 9.104494230079279, + "learning_rate": 4.872321027674058e-07, + "loss": 0.8289, + "step": 8647 + }, + { + "epoch": 0.7799071109708257, + "grad_norm": 1.264031148363763, + "learning_rate": 4.868500188345748e-07, + "loss": 1.0152, + "step": 8648 + }, + { + "epoch": 0.7799972944942959, + "grad_norm": 1.4257280731735817, + "learning_rate": 4.864680640120425e-07, + "loss": 1.0018, + "step": 8649 + }, + { + "epoch": 0.7800874780177661, + "grad_norm": 1.7560834731028605, + "learning_rate": 4.860862383324016e-07, + "loss": 0.941, + "step": 8650 + }, + { + "epoch": 0.7801776615412365, + "grad_norm": 1.781341884987019, + "learning_rate": 4.857045418282295e-07, + "loss": 0.9261, + "step": 8651 + }, + { + "epoch": 0.7802678450647067, + "grad_norm": 1.5787939032628997, + "learning_rate": 4.853229745320966e-07, + "loss": 0.8866, + "step": 8652 + }, + { + "epoch": 0.7803580285881769, + "grad_norm": 1.385725842541466, + "learning_rate": 4.849415364765587e-07, + "loss": 0.9235, + "step": 8653 + }, + { + "epoch": 0.7804482121116472, + "grad_norm": 1.477518085298049, + "learning_rate": 4.845602276941631e-07, + "loss": 1.0053, + "step": 8654 + }, + { + "epoch": 0.7805383956351175, + "grad_norm": 1.318964117303913, + "learning_rate": 4.841790482174449e-07, + "loss": 0.9707, + "step": 8655 + }, + { + "epoch": 0.7806285791585877, + "grad_norm": 1.47825654555442, + "learning_rate": 4.837979980789282e-07, + "loss": 0.9286, + "step": 8656 + }, + { + "epoch": 0.780718762682058, + "grad_norm": 2.983632579549797, + "learning_rate": 4.834170773111273e-07, + "loss": 0.8737, + "step": 8657 + }, + { + "epoch": 0.7808089462055282, + "grad_norm": 1.4579498422574255, + "learning_rate": 4.830362859465431e-07, + "loss": 0.9562, + "step": 8658 + }, + { + "epoch": 0.7808991297289986, + "grad_norm": 1.4492539558350075, + "learning_rate": 4.826556240176675e-07, + "loss": 1.0077, + "step": 8659 + }, + { + "epoch": 0.7809893132524688, + "grad_norm": 1.5175278506794827, + "learning_rate": 4.822750915569807e-07, + "loss": 0.9561, + "step": 8660 + }, + { + "epoch": 0.781079496775939, + "grad_norm": 1.8405401446693317, + "learning_rate": 4.818946885969514e-07, + "loss": 1.0108, + "step": 8661 + }, + { + "epoch": 0.7811696802994093, + "grad_norm": 1.5292295243789997, + "learning_rate": 4.815144151700383e-07, + "loss": 0.9779, + "step": 8662 + }, + { + "epoch": 0.7812598638228796, + "grad_norm": 1.4754860804439953, + "learning_rate": 4.811342713086885e-07, + "loss": 0.9318, + "step": 8663 + }, + { + "epoch": 0.7813500473463498, + "grad_norm": 1.7295166565312738, + "learning_rate": 4.807542570453367e-07, + "loss": 0.9753, + "step": 8664 + }, + { + "epoch": 0.7814402308698201, + "grad_norm": 1.4968903415347692, + "learning_rate": 4.803743724124098e-07, + "loss": 0.9128, + "step": 8665 + }, + { + "epoch": 0.7815304143932903, + "grad_norm": 1.4107834116122453, + "learning_rate": 4.799946174423192e-07, + "loss": 0.9412, + "step": 8666 + }, + { + "epoch": 0.7816205979167606, + "grad_norm": 0.6185095907622603, + "learning_rate": 4.796149921674706e-07, + "loss": 0.7482, + "step": 8667 + }, + { + "epoch": 0.7817107814402309, + "grad_norm": 1.4307485509928755, + "learning_rate": 4.792354966202534e-07, + "loss": 0.9022, + "step": 8668 + }, + { + "epoch": 0.7818009649637011, + "grad_norm": 1.3975507172882433, + "learning_rate": 4.788561308330489e-07, + "loss": 0.7588, + "step": 8669 + }, + { + "epoch": 0.7818911484871713, + "grad_norm": 1.9264678659578298, + "learning_rate": 4.784768948382272e-07, + "loss": 0.8857, + "step": 8670 + }, + { + "epoch": 0.7819813320106417, + "grad_norm": 1.330079066070358, + "learning_rate": 4.780977886681461e-07, + "loss": 0.9421, + "step": 8671 + }, + { + "epoch": 0.7820715155341119, + "grad_norm": 1.4367508802197178, + "learning_rate": 4.777188123551541e-07, + "loss": 0.8839, + "step": 8672 + }, + { + "epoch": 0.7821616990575821, + "grad_norm": 3.7637643609712064, + "learning_rate": 4.773399659315856e-07, + "loss": 0.9709, + "step": 8673 + }, + { + "epoch": 0.7822518825810525, + "grad_norm": 1.657040029715624, + "learning_rate": 4.769612494297681e-07, + "loss": 0.9221, + "step": 8674 + }, + { + "epoch": 0.7823420661045227, + "grad_norm": 0.6846266436289834, + "learning_rate": 4.765826628820142e-07, + "loss": 0.7993, + "step": 8675 + }, + { + "epoch": 0.782432249627993, + "grad_norm": 1.6813369813562355, + "learning_rate": 4.7620420632062775e-07, + "loss": 0.857, + "step": 8676 + }, + { + "epoch": 0.7825224331514632, + "grad_norm": 1.563603126221013, + "learning_rate": 4.758258797779002e-07, + "loss": 0.8989, + "step": 8677 + }, + { + "epoch": 0.7826126166749335, + "grad_norm": 0.6798742410480565, + "learning_rate": 4.7544768328611317e-07, + "loss": 0.8068, + "step": 8678 + }, + { + "epoch": 0.7827028001984038, + "grad_norm": 1.4348597121033675, + "learning_rate": 4.750696168775359e-07, + "loss": 0.9189, + "step": 8679 + }, + { + "epoch": 0.782792983721874, + "grad_norm": 0.6764835611151916, + "learning_rate": 4.746916805844279e-07, + "loss": 0.7787, + "step": 8680 + }, + { + "epoch": 0.7828831672453442, + "grad_norm": 1.5089087577673368, + "learning_rate": 4.743138744390356e-07, + "loss": 0.9693, + "step": 8681 + }, + { + "epoch": 0.7829733507688146, + "grad_norm": 1.437415576611884, + "learning_rate": 4.739361984735959e-07, + "loss": 1.0047, + "step": 8682 + }, + { + "epoch": 0.7830635342922848, + "grad_norm": 1.6871387660192785, + "learning_rate": 4.7355865272033455e-07, + "loss": 0.8964, + "step": 8683 + }, + { + "epoch": 0.783153717815755, + "grad_norm": 1.477876754464153, + "learning_rate": 4.7318123721146563e-07, + "loss": 0.9266, + "step": 8684 + }, + { + "epoch": 0.7832439013392253, + "grad_norm": 1.3499443643196645, + "learning_rate": 4.728039519791924e-07, + "loss": 0.9265, + "step": 8685 + }, + { + "epoch": 0.7833340848626956, + "grad_norm": 0.6877508789425735, + "learning_rate": 4.72426797055707e-07, + "loss": 0.8274, + "step": 8686 + }, + { + "epoch": 0.7834242683861659, + "grad_norm": 1.631675141571968, + "learning_rate": 4.720497724731904e-07, + "loss": 0.9084, + "step": 8687 + }, + { + "epoch": 0.7835144519096361, + "grad_norm": 1.534907663034607, + "learning_rate": 4.7167287826381153e-07, + "loss": 0.9816, + "step": 8688 + }, + { + "epoch": 0.7836046354331063, + "grad_norm": 1.2343028566548857, + "learning_rate": 4.712961144597307e-07, + "loss": 0.9437, + "step": 8689 + }, + { + "epoch": 0.7836948189565767, + "grad_norm": 1.7505036719163019, + "learning_rate": 4.7091948109309343e-07, + "loss": 0.9908, + "step": 8690 + }, + { + "epoch": 0.7837850024800469, + "grad_norm": 1.5366992912359856, + "learning_rate": 4.705429781960384e-07, + "loss": 0.9569, + "step": 8691 + }, + { + "epoch": 0.7838751860035171, + "grad_norm": 1.6851663347772823, + "learning_rate": 4.7016660580068923e-07, + "loss": 0.9422, + "step": 8692 + }, + { + "epoch": 0.7839653695269874, + "grad_norm": 1.552352301656113, + "learning_rate": 4.6979036393916093e-07, + "loss": 0.9697, + "step": 8693 + }, + { + "epoch": 0.7840555530504577, + "grad_norm": 1.3349071790451423, + "learning_rate": 4.6941425264355603e-07, + "loss": 1.0113, + "step": 8694 + }, + { + "epoch": 0.7841457365739279, + "grad_norm": 1.268913987559719, + "learning_rate": 4.6903827194596666e-07, + "loss": 0.952, + "step": 8695 + }, + { + "epoch": 0.7842359200973982, + "grad_norm": 1.4156563559589999, + "learning_rate": 4.686624218784743e-07, + "loss": 0.9916, + "step": 8696 + }, + { + "epoch": 0.7843261036208685, + "grad_norm": 1.8041546187611572, + "learning_rate": 4.6828670247314696e-07, + "loss": 0.9317, + "step": 8697 + }, + { + "epoch": 0.7844162871443388, + "grad_norm": 1.6032281913041369, + "learning_rate": 4.679111137620442e-07, + "loss": 0.9589, + "step": 8698 + }, + { + "epoch": 0.784506470667809, + "grad_norm": 1.4170552823973952, + "learning_rate": 4.67535655777213e-07, + "loss": 0.9047, + "step": 8699 + }, + { + "epoch": 0.7845966541912792, + "grad_norm": 1.4002059750943143, + "learning_rate": 4.6716032855068956e-07, + "loss": 0.9512, + "step": 8700 + }, + { + "epoch": 0.7846868377147496, + "grad_norm": 1.7046022220753294, + "learning_rate": 4.6678513211449867e-07, + "loss": 0.9874, + "step": 8701 + }, + { + "epoch": 0.7847770212382198, + "grad_norm": 2.3704070804764426, + "learning_rate": 4.6641006650065516e-07, + "loss": 0.9043, + "step": 8702 + }, + { + "epoch": 0.78486720476169, + "grad_norm": 1.376405950817535, + "learning_rate": 4.6603513174115973e-07, + "loss": 0.9271, + "step": 8703 + }, + { + "epoch": 0.7849573882851603, + "grad_norm": 1.7377055725775765, + "learning_rate": 4.6566032786800625e-07, + "loss": 0.9532, + "step": 8704 + }, + { + "epoch": 0.7850475718086306, + "grad_norm": 1.4656142179503289, + "learning_rate": 4.6528565491317274e-07, + "loss": 0.9186, + "step": 8705 + }, + { + "epoch": 0.7851377553321008, + "grad_norm": 1.5948227656559113, + "learning_rate": 4.649111129086305e-07, + "loss": 1.0077, + "step": 8706 + }, + { + "epoch": 0.7852279388555711, + "grad_norm": 1.4056695799926668, + "learning_rate": 4.6453670188633596e-07, + "loss": 0.9564, + "step": 8707 + }, + { + "epoch": 0.7853181223790413, + "grad_norm": 1.8333045422576346, + "learning_rate": 4.641624218782365e-07, + "loss": 0.9439, + "step": 8708 + }, + { + "epoch": 0.7854083059025116, + "grad_norm": 1.4603743666301627, + "learning_rate": 4.6378827291626765e-07, + "loss": 0.9058, + "step": 8709 + }, + { + "epoch": 0.7854984894259819, + "grad_norm": 1.1689951287715503, + "learning_rate": 4.634142550323541e-07, + "loss": 0.9967, + "step": 8710 + }, + { + "epoch": 0.7855886729494521, + "grad_norm": 1.7140308443274253, + "learning_rate": 4.6304036825840943e-07, + "loss": 0.994, + "step": 8711 + }, + { + "epoch": 0.7856788564729223, + "grad_norm": 1.3521472349578918, + "learning_rate": 4.626666126263341e-07, + "loss": 0.8469, + "step": 8712 + }, + { + "epoch": 0.7857690399963927, + "grad_norm": 1.5960885636464317, + "learning_rate": 4.622929881680213e-07, + "loss": 0.9669, + "step": 8713 + }, + { + "epoch": 0.7858592235198629, + "grad_norm": 1.8236757568976967, + "learning_rate": 4.6191949491534887e-07, + "loss": 0.857, + "step": 8714 + }, + { + "epoch": 0.7859494070433332, + "grad_norm": 1.5234853883699246, + "learning_rate": 4.6154613290018617e-07, + "loss": 0.946, + "step": 8715 + }, + { + "epoch": 0.7860395905668034, + "grad_norm": 2.6532743054761787, + "learning_rate": 4.6117290215439043e-07, + "loss": 0.8865, + "step": 8716 + }, + { + "epoch": 0.7861297740902737, + "grad_norm": 1.522302136021283, + "learning_rate": 4.6079980270980744e-07, + "loss": 0.8139, + "step": 8717 + }, + { + "epoch": 0.786219957613744, + "grad_norm": 1.5383671772885736, + "learning_rate": 4.6042683459827245e-07, + "loss": 0.9092, + "step": 8718 + }, + { + "epoch": 0.7863101411372142, + "grad_norm": 0.7649731488595563, + "learning_rate": 4.600539978516098e-07, + "loss": 0.8088, + "step": 8719 + }, + { + "epoch": 0.7864003246606845, + "grad_norm": 1.309668438426161, + "learning_rate": 4.5968129250163004e-07, + "loss": 0.9026, + "step": 8720 + }, + { + "epoch": 0.7864905081841548, + "grad_norm": 1.5394973527836626, + "learning_rate": 4.5930871858013653e-07, + "loss": 0.8693, + "step": 8721 + }, + { + "epoch": 0.786580691707625, + "grad_norm": 0.658023119120281, + "learning_rate": 4.589362761189182e-07, + "loss": 0.8452, + "step": 8722 + }, + { + "epoch": 0.7866708752310952, + "grad_norm": 2.281401851250864, + "learning_rate": 4.585639651497539e-07, + "loss": 1.0024, + "step": 8723 + }, + { + "epoch": 0.7867610587545656, + "grad_norm": 1.6527096318462193, + "learning_rate": 4.581917857044115e-07, + "loss": 0.8974, + "step": 8724 + }, + { + "epoch": 0.7868512422780358, + "grad_norm": 1.3812485811390214, + "learning_rate": 4.5781973781464734e-07, + "loss": 0.976, + "step": 8725 + }, + { + "epoch": 0.7869414258015061, + "grad_norm": 1.5286969290603611, + "learning_rate": 4.574478215122073e-07, + "loss": 0.9742, + "step": 8726 + }, + { + "epoch": 0.7870316093249763, + "grad_norm": 1.9168611579845496, + "learning_rate": 4.5707603682882357e-07, + "loss": 0.9238, + "step": 8727 + }, + { + "epoch": 0.7871217928484466, + "grad_norm": 1.5738106096760132, + "learning_rate": 4.56704383796221e-07, + "loss": 0.8074, + "step": 8728 + }, + { + "epoch": 0.7872119763719169, + "grad_norm": 1.4153027232904862, + "learning_rate": 4.5633286244610956e-07, + "loss": 0.9724, + "step": 8729 + }, + { + "epoch": 0.7873021598953871, + "grad_norm": 1.2203920013671992, + "learning_rate": 4.5596147281018993e-07, + "loss": 1.0023, + "step": 8730 + }, + { + "epoch": 0.7873923434188573, + "grad_norm": 1.5511434982344052, + "learning_rate": 4.5559021492015137e-07, + "loss": 1.0436, + "step": 8731 + }, + { + "epoch": 0.7874825269423277, + "grad_norm": 1.3263631915831475, + "learning_rate": 4.552190888076712e-07, + "loss": 0.851, + "step": 8732 + }, + { + "epoch": 0.7875727104657979, + "grad_norm": 1.5815591743583515, + "learning_rate": 4.548480945044164e-07, + "loss": 0.8869, + "step": 8733 + }, + { + "epoch": 0.7876628939892681, + "grad_norm": 1.3507678391434563, + "learning_rate": 4.54477232042042e-07, + "loss": 0.9327, + "step": 8734 + }, + { + "epoch": 0.7877530775127384, + "grad_norm": 1.3099484392021792, + "learning_rate": 4.541065014521921e-07, + "loss": 0.9237, + "step": 8735 + }, + { + "epoch": 0.7878432610362087, + "grad_norm": 1.9061226724093465, + "learning_rate": 4.5373590276649996e-07, + "loss": 0.889, + "step": 8736 + }, + { + "epoch": 0.787933444559679, + "grad_norm": 1.2507679487637438, + "learning_rate": 4.533654360165862e-07, + "loss": 0.9052, + "step": 8737 + }, + { + "epoch": 0.7880236280831492, + "grad_norm": 1.6933887307549547, + "learning_rate": 4.5299510123406115e-07, + "loss": 0.9976, + "step": 8738 + }, + { + "epoch": 0.7881138116066194, + "grad_norm": 1.2886452664313492, + "learning_rate": 4.5262489845052456e-07, + "loss": 0.9033, + "step": 8739 + }, + { + "epoch": 0.7882039951300898, + "grad_norm": 1.7455259761183255, + "learning_rate": 4.5225482769756353e-07, + "loss": 0.9038, + "step": 8740 + }, + { + "epoch": 0.78829417865356, + "grad_norm": 2.4221124348078034, + "learning_rate": 4.5188488900675545e-07, + "loss": 0.8703, + "step": 8741 + }, + { + "epoch": 0.7883843621770302, + "grad_norm": 1.2841704973827528, + "learning_rate": 4.5151508240966363e-07, + "loss": 0.9601, + "step": 8742 + }, + { + "epoch": 0.7884745457005006, + "grad_norm": 1.3423177049783437, + "learning_rate": 4.511454079378445e-07, + "loss": 0.9843, + "step": 8743 + }, + { + "epoch": 0.7885647292239708, + "grad_norm": 1.5792983574757682, + "learning_rate": 4.507758656228382e-07, + "loss": 0.8323, + "step": 8744 + }, + { + "epoch": 0.788654912747441, + "grad_norm": 1.2379823919717239, + "learning_rate": 4.5040645549617864e-07, + "loss": 0.9922, + "step": 8745 + }, + { + "epoch": 0.7887450962709113, + "grad_norm": 1.367836934203254, + "learning_rate": 4.5003717758938384e-07, + "loss": 0.8534, + "step": 8746 + }, + { + "epoch": 0.7888352797943816, + "grad_norm": 1.4429921262424494, + "learning_rate": 4.4966803193396365e-07, + "loss": 0.8775, + "step": 8747 + }, + { + "epoch": 0.7889254633178518, + "grad_norm": 1.4335298861796753, + "learning_rate": 4.492990185614154e-07, + "loss": 0.949, + "step": 8748 + }, + { + "epoch": 0.7890156468413221, + "grad_norm": 1.5513433019227343, + "learning_rate": 4.489301375032255e-07, + "loss": 0.9289, + "step": 8749 + }, + { + "epoch": 0.7891058303647923, + "grad_norm": 1.5632170748370597, + "learning_rate": 4.4856138879086857e-07, + "loss": 0.943, + "step": 8750 + }, + { + "epoch": 0.7891960138882627, + "grad_norm": 1.135361795528593, + "learning_rate": 4.481927724558092e-07, + "loss": 0.8913, + "step": 8751 + }, + { + "epoch": 0.7892861974117329, + "grad_norm": 1.3662569639511832, + "learning_rate": 4.478242885294985e-07, + "loss": 0.9097, + "step": 8752 + }, + { + "epoch": 0.7893763809352031, + "grad_norm": 1.3956988065809206, + "learning_rate": 4.474559370433779e-07, + "loss": 0.8939, + "step": 8753 + }, + { + "epoch": 0.7894665644586734, + "grad_norm": 1.5642490515795213, + "learning_rate": 4.470877180288777e-07, + "loss": 0.8826, + "step": 8754 + }, + { + "epoch": 0.7895567479821437, + "grad_norm": 1.2805826728792453, + "learning_rate": 4.4671963151741574e-07, + "loss": 0.9068, + "step": 8755 + }, + { + "epoch": 0.7896469315056139, + "grad_norm": 1.1334452473323342, + "learning_rate": 4.4635167754039973e-07, + "loss": 0.9155, + "step": 8756 + }, + { + "epoch": 0.7897371150290842, + "grad_norm": 1.178702694943866, + "learning_rate": 4.459838561292253e-07, + "loss": 0.9605, + "step": 8757 + }, + { + "epoch": 0.7898272985525544, + "grad_norm": 1.6214723443688013, + "learning_rate": 4.456161673152774e-07, + "loss": 0.8574, + "step": 8758 + }, + { + "epoch": 0.7899174820760247, + "grad_norm": 1.3405959795320603, + "learning_rate": 4.4524861112992806e-07, + "loss": 0.93, + "step": 8759 + }, + { + "epoch": 0.790007665599495, + "grad_norm": 1.7060651686051116, + "learning_rate": 4.448811876045411e-07, + "loss": 0.9326, + "step": 8760 + }, + { + "epoch": 0.7900978491229652, + "grad_norm": 1.6876551945282798, + "learning_rate": 4.445138967704647e-07, + "loss": 1.0062, + "step": 8761 + }, + { + "epoch": 0.7901880326464354, + "grad_norm": 1.5137600780753235, + "learning_rate": 4.4414673865904075e-07, + "loss": 0.9116, + "step": 8762 + }, + { + "epoch": 0.7902782161699058, + "grad_norm": 1.2876114972918147, + "learning_rate": 4.437797133015955e-07, + "loss": 0.9497, + "step": 8763 + }, + { + "epoch": 0.790368399693376, + "grad_norm": 1.6896616195183907, + "learning_rate": 4.4341282072944586e-07, + "loss": 0.8545, + "step": 8764 + }, + { + "epoch": 0.7904585832168463, + "grad_norm": 1.2635639975960113, + "learning_rate": 4.430460609738973e-07, + "loss": 0.9555, + "step": 8765 + }, + { + "epoch": 0.7905487667403166, + "grad_norm": 0.7595231716194897, + "learning_rate": 4.4267943406624386e-07, + "loss": 0.846, + "step": 8766 + }, + { + "epoch": 0.7906389502637868, + "grad_norm": 2.7398586545810204, + "learning_rate": 4.4231294003776853e-07, + "loss": 1.0432, + "step": 8767 + }, + { + "epoch": 0.7907291337872571, + "grad_norm": 1.6545436634517048, + "learning_rate": 4.419465789197416e-07, + "loss": 0.9733, + "step": 8768 + }, + { + "epoch": 0.7908193173107273, + "grad_norm": 1.3747250975722232, + "learning_rate": 4.415803507434237e-07, + "loss": 0.9311, + "step": 8769 + }, + { + "epoch": 0.7909095008341976, + "grad_norm": 1.4478864689520679, + "learning_rate": 4.4121425554006307e-07, + "loss": 0.9656, + "step": 8770 + }, + { + "epoch": 0.7909996843576679, + "grad_norm": 1.5922908461164056, + "learning_rate": 4.4084829334089744e-07, + "loss": 0.9207, + "step": 8771 + }, + { + "epoch": 0.7910898678811381, + "grad_norm": 1.34516371060373, + "learning_rate": 4.404824641771525e-07, + "loss": 0.8869, + "step": 8772 + }, + { + "epoch": 0.7911800514046083, + "grad_norm": 1.2956825657232096, + "learning_rate": 4.4011676808004327e-07, + "loss": 1.0075, + "step": 8773 + }, + { + "epoch": 0.7912702349280787, + "grad_norm": 1.3398777370757675, + "learning_rate": 4.3975120508077145e-07, + "loss": 0.9466, + "step": 8774 + }, + { + "epoch": 0.7913604184515489, + "grad_norm": 1.3782503551120808, + "learning_rate": 4.39385775210531e-07, + "loss": 0.9836, + "step": 8775 + }, + { + "epoch": 0.7914506019750192, + "grad_norm": 1.9491730036747366, + "learning_rate": 4.390204785005003e-07, + "loss": 0.9576, + "step": 8776 + }, + { + "epoch": 0.7915407854984894, + "grad_norm": 1.341797363636533, + "learning_rate": 4.386553149818504e-07, + "loss": 0.9461, + "step": 8777 + }, + { + "epoch": 0.7916309690219597, + "grad_norm": 1.5164404179822837, + "learning_rate": 4.3829028468573793e-07, + "loss": 0.8079, + "step": 8778 + }, + { + "epoch": 0.79172115254543, + "grad_norm": 1.4905483969912527, + "learning_rate": 4.3792538764330935e-07, + "loss": 0.8732, + "step": 8779 + }, + { + "epoch": 0.7918113360689002, + "grad_norm": 1.523802258766544, + "learning_rate": 4.3756062388569994e-07, + "loss": 0.908, + "step": 8780 + }, + { + "epoch": 0.7919015195923704, + "grad_norm": 0.6459277888925888, + "learning_rate": 4.3719599344403346e-07, + "loss": 0.8037, + "step": 8781 + }, + { + "epoch": 0.7919917031158408, + "grad_norm": 1.8812816300378699, + "learning_rate": 4.3683149634942243e-07, + "loss": 0.9187, + "step": 8782 + }, + { + "epoch": 0.792081886639311, + "grad_norm": 1.461194428887491, + "learning_rate": 4.364671326329663e-07, + "loss": 0.8235, + "step": 8783 + }, + { + "epoch": 0.7921720701627812, + "grad_norm": 1.8463351988972674, + "learning_rate": 4.3610290232575673e-07, + "loss": 0.9504, + "step": 8784 + }, + { + "epoch": 0.7922622536862515, + "grad_norm": 1.409947852800442, + "learning_rate": 4.357388054588702e-07, + "loss": 0.9404, + "step": 8785 + }, + { + "epoch": 0.7923524372097218, + "grad_norm": 1.2566200432531183, + "learning_rate": 4.3537484206337405e-07, + "loss": 0.8884, + "step": 8786 + }, + { + "epoch": 0.792442620733192, + "grad_norm": 1.4679104454882494, + "learning_rate": 4.3501101217032366e-07, + "loss": 0.9649, + "step": 8787 + }, + { + "epoch": 0.7925328042566623, + "grad_norm": 1.5397261529665431, + "learning_rate": 4.346473158107629e-07, + "loss": 0.8675, + "step": 8788 + }, + { + "epoch": 0.7926229877801325, + "grad_norm": 1.543403506293719, + "learning_rate": 4.342837530157244e-07, + "loss": 0.941, + "step": 8789 + }, + { + "epoch": 0.7927131713036029, + "grad_norm": 1.6620076705351647, + "learning_rate": 4.3392032381622987e-07, + "loss": 0.8438, + "step": 8790 + }, + { + "epoch": 0.7928033548270731, + "grad_norm": 1.2692776940303443, + "learning_rate": 4.3355702824328765e-07, + "loss": 0.9052, + "step": 8791 + }, + { + "epoch": 0.7928935383505433, + "grad_norm": 1.4852485896927168, + "learning_rate": 4.3319386632789823e-07, + "loss": 0.9784, + "step": 8792 + }, + { + "epoch": 0.7929837218740137, + "grad_norm": 1.4918105363282728, + "learning_rate": 4.328308381010466e-07, + "loss": 0.9316, + "step": 8793 + }, + { + "epoch": 0.7930739053974839, + "grad_norm": 1.3902391048306992, + "learning_rate": 4.3246794359370933e-07, + "loss": 0.9443, + "step": 8794 + }, + { + "epoch": 0.7931640889209541, + "grad_norm": 1.3884517962062957, + "learning_rate": 4.3210518283685025e-07, + "loss": 0.9196, + "step": 8795 + }, + { + "epoch": 0.7932542724444244, + "grad_norm": 1.5885928619973604, + "learning_rate": 4.317425558614225e-07, + "loss": 0.9369, + "step": 8796 + }, + { + "epoch": 0.7933444559678947, + "grad_norm": 1.3525289266599003, + "learning_rate": 4.3138006269836744e-07, + "loss": 0.898, + "step": 8797 + }, + { + "epoch": 0.793434639491365, + "grad_norm": 1.2714754212704376, + "learning_rate": 4.3101770337861376e-07, + "loss": 0.9211, + "step": 8798 + }, + { + "epoch": 0.7935248230148352, + "grad_norm": 0.6511206667970655, + "learning_rate": 4.30655477933082e-07, + "loss": 0.8254, + "step": 8799 + }, + { + "epoch": 0.7936150065383054, + "grad_norm": 1.1977909796195032, + "learning_rate": 4.30293386392677e-07, + "loss": 0.9594, + "step": 8800 + }, + { + "epoch": 0.7937051900617758, + "grad_norm": 1.188356542365094, + "learning_rate": 4.299314287882967e-07, + "loss": 0.9534, + "step": 8801 + }, + { + "epoch": 0.793795373585246, + "grad_norm": 1.602326299385412, + "learning_rate": 4.2956960515082353e-07, + "loss": 0.9692, + "step": 8802 + }, + { + "epoch": 0.7938855571087162, + "grad_norm": 1.5799077605834793, + "learning_rate": 4.29207915511131e-07, + "loss": 0.8928, + "step": 8803 + }, + { + "epoch": 0.7939757406321865, + "grad_norm": 1.4139500778779348, + "learning_rate": 4.2884635990008024e-07, + "loss": 0.9925, + "step": 8804 + }, + { + "epoch": 0.7940659241556568, + "grad_norm": 1.8747835034313054, + "learning_rate": 4.284849383485214e-07, + "loss": 0.9028, + "step": 8805 + }, + { + "epoch": 0.794156107679127, + "grad_norm": 1.666477438992872, + "learning_rate": 4.2812365088729296e-07, + "loss": 0.9573, + "step": 8806 + }, + { + "epoch": 0.7942462912025973, + "grad_norm": 1.4546840057167183, + "learning_rate": 4.2776249754722227e-07, + "loss": 0.9616, + "step": 8807 + }, + { + "epoch": 0.7943364747260675, + "grad_norm": 1.9774922231142698, + "learning_rate": 4.27401478359124e-07, + "loss": 1.001, + "step": 8808 + }, + { + "epoch": 0.7944266582495378, + "grad_norm": 1.3086878045286374, + "learning_rate": 4.2704059335380283e-07, + "loss": 1.0282, + "step": 8809 + }, + { + "epoch": 0.7945168417730081, + "grad_norm": 1.2458546089133347, + "learning_rate": 4.266798425620515e-07, + "loss": 1.0361, + "step": 8810 + }, + { + "epoch": 0.7946070252964783, + "grad_norm": 1.3926698031215918, + "learning_rate": 4.263192260146511e-07, + "loss": 0.919, + "step": 8811 + }, + { + "epoch": 0.7946972088199485, + "grad_norm": 1.5127251008304097, + "learning_rate": 4.2595874374237216e-07, + "loss": 0.9808, + "step": 8812 + }, + { + "epoch": 0.7947873923434189, + "grad_norm": 1.1756461943561929, + "learning_rate": 4.255983957759712e-07, + "loss": 0.9364, + "step": 8813 + }, + { + "epoch": 0.7948775758668891, + "grad_norm": 1.2311264460192348, + "learning_rate": 4.2523818214619745e-07, + "loss": 0.8764, + "step": 8814 + }, + { + "epoch": 0.7949677593903594, + "grad_norm": 1.3451195767933803, + "learning_rate": 4.24878102883784e-07, + "loss": 0.9848, + "step": 8815 + }, + { + "epoch": 0.7950579429138297, + "grad_norm": 1.865280518285588, + "learning_rate": 4.24518158019457e-07, + "loss": 0.9454, + "step": 8816 + }, + { + "epoch": 0.7951481264372999, + "grad_norm": 1.9492114722473417, + "learning_rate": 4.241583475839274e-07, + "loss": 0.8822, + "step": 8817 + }, + { + "epoch": 0.7952383099607702, + "grad_norm": 1.5477701544788816, + "learning_rate": 4.237986716078965e-07, + "loss": 0.9883, + "step": 8818 + }, + { + "epoch": 0.7953284934842404, + "grad_norm": 1.4921458772776, + "learning_rate": 4.2343913012205433e-07, + "loss": 0.9233, + "step": 8819 + }, + { + "epoch": 0.7954186770077107, + "grad_norm": 1.2409101912702876, + "learning_rate": 4.230797231570784e-07, + "loss": 0.9064, + "step": 8820 + }, + { + "epoch": 0.795508860531181, + "grad_norm": 1.742734404112568, + "learning_rate": 4.227204507436357e-07, + "loss": 0.9375, + "step": 8821 + }, + { + "epoch": 0.7955990440546512, + "grad_norm": 1.3996490259965908, + "learning_rate": 4.223613129123811e-07, + "loss": 0.8575, + "step": 8822 + }, + { + "epoch": 0.7956892275781214, + "grad_norm": 1.3136236739468194, + "learning_rate": 4.220023096939589e-07, + "loss": 0.8476, + "step": 8823 + }, + { + "epoch": 0.7957794111015918, + "grad_norm": 14.771923776396358, + "learning_rate": 4.21643441119e-07, + "loss": 0.9248, + "step": 8824 + }, + { + "epoch": 0.795869594625062, + "grad_norm": 0.708992193509963, + "learning_rate": 4.212847072181256e-07, + "loss": 0.8733, + "step": 8825 + }, + { + "epoch": 0.7959597781485322, + "grad_norm": 1.4914560172808162, + "learning_rate": 4.2092610802194505e-07, + "loss": 0.9082, + "step": 8826 + }, + { + "epoch": 0.7960499616720025, + "grad_norm": 1.2999117582890285, + "learning_rate": 4.2056764356105587e-07, + "loss": 0.8444, + "step": 8827 + }, + { + "epoch": 0.7961401451954728, + "grad_norm": 1.31398775157997, + "learning_rate": 4.202093138660443e-07, + "loss": 0.9813, + "step": 8828 + }, + { + "epoch": 0.7962303287189431, + "grad_norm": 1.2467478407647385, + "learning_rate": 4.198511189674854e-07, + "loss": 0.9642, + "step": 8829 + }, + { + "epoch": 0.7963205122424133, + "grad_norm": 1.4241307568845172, + "learning_rate": 4.1949305889594066e-07, + "loss": 0.952, + "step": 8830 + }, + { + "epoch": 0.7964106957658835, + "grad_norm": 1.3491420792539728, + "learning_rate": 4.191351336819642e-07, + "loss": 0.9307, + "step": 8831 + }, + { + "epoch": 0.7965008792893539, + "grad_norm": 1.638438484385135, + "learning_rate": 4.187773433560939e-07, + "loss": 0.8673, + "step": 8832 + }, + { + "epoch": 0.7965910628128241, + "grad_norm": 1.4222727627930012, + "learning_rate": 4.184196879488604e-07, + "loss": 0.9275, + "step": 8833 + }, + { + "epoch": 0.7966812463362943, + "grad_norm": 1.5435795987250212, + "learning_rate": 4.1806216749077936e-07, + "loss": 1.0004, + "step": 8834 + }, + { + "epoch": 0.7967714298597646, + "grad_norm": 1.2268351355132927, + "learning_rate": 4.177047820123569e-07, + "loss": 0.9779, + "step": 8835 + }, + { + "epoch": 0.7968616133832349, + "grad_norm": 1.4522625404377276, + "learning_rate": 4.1734753154408733e-07, + "loss": 0.8111, + "step": 8836 + }, + { + "epoch": 0.7969517969067051, + "grad_norm": 1.4813757058414654, + "learning_rate": 4.169904161164528e-07, + "loss": 0.9193, + "step": 8837 + }, + { + "epoch": 0.7970419804301754, + "grad_norm": 2.2974767610289906, + "learning_rate": 4.1663343575992526e-07, + "loss": 1.0484, + "step": 8838 + }, + { + "epoch": 0.7971321639536457, + "grad_norm": 1.8836166753841281, + "learning_rate": 4.1627659050496275e-07, + "loss": 0.9479, + "step": 8839 + }, + { + "epoch": 0.797222347477116, + "grad_norm": 1.3740588348318967, + "learning_rate": 4.1591988038201453e-07, + "loss": 0.8571, + "step": 8840 + }, + { + "epoch": 0.7973125310005862, + "grad_norm": 1.386891743789689, + "learning_rate": 4.155633054215164e-07, + "loss": 0.9823, + "step": 8841 + }, + { + "epoch": 0.7974027145240564, + "grad_norm": 1.5340189130181663, + "learning_rate": 4.152068656538934e-07, + "loss": 0.8465, + "step": 8842 + }, + { + "epoch": 0.7974928980475268, + "grad_norm": 1.3462356377475726, + "learning_rate": 4.148505611095594e-07, + "loss": 0.9987, + "step": 8843 + }, + { + "epoch": 0.797583081570997, + "grad_norm": 1.4462226270295915, + "learning_rate": 4.1449439181891563e-07, + "loss": 0.812, + "step": 8844 + }, + { + "epoch": 0.7976732650944672, + "grad_norm": 1.4738939179092911, + "learning_rate": 4.14138357812353e-07, + "loss": 0.9515, + "step": 8845 + }, + { + "epoch": 0.7977634486179375, + "grad_norm": 1.2405520535410277, + "learning_rate": 4.137824591202506e-07, + "loss": 0.9901, + "step": 8846 + }, + { + "epoch": 0.7978536321414078, + "grad_norm": 1.75372531017018, + "learning_rate": 4.134266957729737e-07, + "loss": 0.9236, + "step": 8847 + }, + { + "epoch": 0.797943815664878, + "grad_norm": 1.377599123661303, + "learning_rate": 4.1307106780088065e-07, + "loss": 1.0324, + "step": 8848 + }, + { + "epoch": 0.7980339991883483, + "grad_norm": 1.699403669445866, + "learning_rate": 4.1271557523431387e-07, + "loss": 0.9386, + "step": 8849 + }, + { + "epoch": 0.7981241827118185, + "grad_norm": 1.3905211849251733, + "learning_rate": 4.1236021810360634e-07, + "loss": 0.9233, + "step": 8850 + }, + { + "epoch": 0.7982143662352889, + "grad_norm": 1.9038216178783152, + "learning_rate": 4.120049964390793e-07, + "loss": 0.8815, + "step": 8851 + }, + { + "epoch": 0.7983045497587591, + "grad_norm": 1.660239901959568, + "learning_rate": 4.116499102710418e-07, + "loss": 0.8895, + "step": 8852 + }, + { + "epoch": 0.7983947332822293, + "grad_norm": 1.5816457019591583, + "learning_rate": 4.112949596297928e-07, + "loss": 0.9118, + "step": 8853 + }, + { + "epoch": 0.7984849168056996, + "grad_norm": 1.5048771090093087, + "learning_rate": 4.1094014454561664e-07, + "loss": 0.8799, + "step": 8854 + }, + { + "epoch": 0.7985751003291699, + "grad_norm": 1.6050458290711138, + "learning_rate": 4.1058546504879057e-07, + "loss": 0.8388, + "step": 8855 + }, + { + "epoch": 0.7986652838526401, + "grad_norm": 1.5670936959547903, + "learning_rate": 4.1023092116957583e-07, + "loss": 0.9936, + "step": 8856 + }, + { + "epoch": 0.7987554673761104, + "grad_norm": 1.2546537078829387, + "learning_rate": 4.098765129382249e-07, + "loss": 0.9763, + "step": 8857 + }, + { + "epoch": 0.7988456508995806, + "grad_norm": 1.5266963435430023, + "learning_rate": 4.0952224038497764e-07, + "loss": 1.0062, + "step": 8858 + }, + { + "epoch": 0.7989358344230509, + "grad_norm": 1.474737260338271, + "learning_rate": 4.091681035400627e-07, + "loss": 0.9241, + "step": 8859 + }, + { + "epoch": 0.7990260179465212, + "grad_norm": 1.4404453289669823, + "learning_rate": 4.088141024336971e-07, + "loss": 0.9636, + "step": 8860 + }, + { + "epoch": 0.7991162014699914, + "grad_norm": 1.2042892074712073, + "learning_rate": 4.0846023709608636e-07, + "loss": 1.0114, + "step": 8861 + }, + { + "epoch": 0.7992063849934617, + "grad_norm": 1.7248470708533614, + "learning_rate": 4.081065075574226e-07, + "loss": 1.0028, + "step": 8862 + }, + { + "epoch": 0.799296568516932, + "grad_norm": 1.3764602103532562, + "learning_rate": 4.077529138478906e-07, + "loss": 0.8647, + "step": 8863 + }, + { + "epoch": 0.7993867520404022, + "grad_norm": 0.7188267044933949, + "learning_rate": 4.073994559976588e-07, + "loss": 0.7715, + "step": 8864 + }, + { + "epoch": 0.7994769355638724, + "grad_norm": 1.362743369949048, + "learning_rate": 4.0704613403688716e-07, + "loss": 0.8993, + "step": 8865 + }, + { + "epoch": 0.7995671190873428, + "grad_norm": 1.2572429149228554, + "learning_rate": 4.0669294799572264e-07, + "loss": 0.9884, + "step": 8866 + }, + { + "epoch": 0.799657302610813, + "grad_norm": 1.4731124859271467, + "learning_rate": 4.0633989790430113e-07, + "loss": 1.0353, + "step": 8867 + }, + { + "epoch": 0.7997474861342833, + "grad_norm": 1.4498615330319722, + "learning_rate": 4.059869837927477e-07, + "loss": 0.9443, + "step": 8868 + }, + { + "epoch": 0.7998376696577535, + "grad_norm": 1.5432909665289936, + "learning_rate": 4.056342056911728e-07, + "loss": 0.9284, + "step": 8869 + }, + { + "epoch": 0.7999278531812238, + "grad_norm": 1.4654061420414521, + "learning_rate": 4.052815636296798e-07, + "loss": 0.9498, + "step": 8870 + }, + { + "epoch": 0.8000180367046941, + "grad_norm": 1.2135230241688817, + "learning_rate": 4.0492905763835593e-07, + "loss": 0.9642, + "step": 8871 + }, + { + "epoch": 0.8001082202281643, + "grad_norm": 1.1435989384351715, + "learning_rate": 4.0457668774728115e-07, + "loss": 0.968, + "step": 8872 + }, + { + "epoch": 0.8001984037516345, + "grad_norm": 1.410119645338406, + "learning_rate": 4.0422445398651985e-07, + "loss": 0.9567, + "step": 8873 + }, + { + "epoch": 0.8002885872751049, + "grad_norm": 1.4299327035554463, + "learning_rate": 4.0387235638612706e-07, + "loss": 0.8567, + "step": 8874 + }, + { + "epoch": 0.8003787707985751, + "grad_norm": 1.7699403942438359, + "learning_rate": 4.0352039497614586e-07, + "loss": 0.942, + "step": 8875 + }, + { + "epoch": 0.8004689543220453, + "grad_norm": 1.8890848112721348, + "learning_rate": 4.031685697866074e-07, + "loss": 0.9154, + "step": 8876 + }, + { + "epoch": 0.8005591378455156, + "grad_norm": 1.2967165654852715, + "learning_rate": 4.0281688084753165e-07, + "loss": 0.9748, + "step": 8877 + }, + { + "epoch": 0.8006493213689859, + "grad_norm": 1.3482234479941113, + "learning_rate": 4.0246532818892675e-07, + "loss": 0.9873, + "step": 8878 + }, + { + "epoch": 0.8007395048924562, + "grad_norm": 3.3562145891932147, + "learning_rate": 4.0211391184078814e-07, + "loss": 0.9736, + "step": 8879 + }, + { + "epoch": 0.8008296884159264, + "grad_norm": 1.397157776136089, + "learning_rate": 4.0176263183310135e-07, + "loss": 0.9645, + "step": 8880 + }, + { + "epoch": 0.8009198719393966, + "grad_norm": 1.263716683632486, + "learning_rate": 4.0141148819583925e-07, + "loss": 0.991, + "step": 8881 + }, + { + "epoch": 0.801010055462867, + "grad_norm": 0.635705715369659, + "learning_rate": 4.010604809589637e-07, + "loss": 0.8023, + "step": 8882 + }, + { + "epoch": 0.8011002389863372, + "grad_norm": 1.5050534319343267, + "learning_rate": 4.0070961015242475e-07, + "loss": 0.9489, + "step": 8883 + }, + { + "epoch": 0.8011904225098074, + "grad_norm": 1.2665753505232769, + "learning_rate": 4.0035887580615933e-07, + "loss": 1.0098, + "step": 8884 + }, + { + "epoch": 0.8012806060332777, + "grad_norm": 1.378174411986424, + "learning_rate": 4.0000827795009594e-07, + "loss": 0.9915, + "step": 8885 + }, + { + "epoch": 0.801370789556748, + "grad_norm": 1.6871581967658833, + "learning_rate": 3.996578166141475e-07, + "loss": 0.8992, + "step": 8886 + }, + { + "epoch": 0.8014609730802182, + "grad_norm": 1.7091997632127165, + "learning_rate": 3.9930749182821955e-07, + "loss": 0.9438, + "step": 8887 + }, + { + "epoch": 0.8015511566036885, + "grad_norm": 0.6344158676212914, + "learning_rate": 3.9895730362220116e-07, + "loss": 0.8068, + "step": 8888 + }, + { + "epoch": 0.8016413401271588, + "grad_norm": 1.7177959915597587, + "learning_rate": 3.986072520259749e-07, + "loss": 0.9747, + "step": 8889 + }, + { + "epoch": 0.801731523650629, + "grad_norm": 1.420116385002051, + "learning_rate": 3.9825733706940736e-07, + "loss": 0.9253, + "step": 8890 + }, + { + "epoch": 0.8018217071740993, + "grad_norm": 1.7761001428130159, + "learning_rate": 3.979075587823557e-07, + "loss": 0.9532, + "step": 8891 + }, + { + "epoch": 0.8019118906975695, + "grad_norm": 1.687617757008785, + "learning_rate": 3.9755791719466504e-07, + "loss": 0.8187, + "step": 8892 + }, + { + "epoch": 0.8020020742210399, + "grad_norm": 2.096833421093099, + "learning_rate": 3.9720841233616875e-07, + "loss": 0.8932, + "step": 8893 + }, + { + "epoch": 0.8020922577445101, + "grad_norm": 1.541628871488802, + "learning_rate": 3.968590442366888e-07, + "loss": 0.9127, + "step": 8894 + }, + { + "epoch": 0.8021824412679803, + "grad_norm": 1.458361389253586, + "learning_rate": 3.9650981292603423e-07, + "loss": 0.9185, + "step": 8895 + }, + { + "epoch": 0.8022726247914506, + "grad_norm": 1.449147019577581, + "learning_rate": 3.961607184340041e-07, + "loss": 0.9102, + "step": 8896 + }, + { + "epoch": 0.8023628083149209, + "grad_norm": 1.3661668291804236, + "learning_rate": 3.9581176079038505e-07, + "loss": 0.8334, + "step": 8897 + }, + { + "epoch": 0.8024529918383911, + "grad_norm": 2.1951940104046956, + "learning_rate": 3.954629400249516e-07, + "loss": 0.8912, + "step": 8898 + }, + { + "epoch": 0.8025431753618614, + "grad_norm": 1.543158642636587, + "learning_rate": 3.9511425616746787e-07, + "loss": 1.0045, + "step": 8899 + }, + { + "epoch": 0.8026333588853316, + "grad_norm": 1.5332113666677372, + "learning_rate": 3.947657092476853e-07, + "loss": 0.9962, + "step": 8900 + }, + { + "epoch": 0.802723542408802, + "grad_norm": 1.343851440504196, + "learning_rate": 3.944172992953425e-07, + "loss": 0.9526, + "step": 8901 + }, + { + "epoch": 0.8028137259322722, + "grad_norm": 1.4247959860940604, + "learning_rate": 3.9406902634017e-07, + "loss": 0.8906, + "step": 8902 + }, + { + "epoch": 0.8029039094557424, + "grad_norm": 1.35197830419231, + "learning_rate": 3.9372089041188227e-07, + "loss": 0.9764, + "step": 8903 + }, + { + "epoch": 0.8029940929792126, + "grad_norm": 1.5176122163259775, + "learning_rate": 3.9337289154018593e-07, + "loss": 0.8564, + "step": 8904 + }, + { + "epoch": 0.803084276502683, + "grad_norm": 1.5523570627948118, + "learning_rate": 3.930250297547728e-07, + "loss": 0.9415, + "step": 8905 + }, + { + "epoch": 0.8031744600261532, + "grad_norm": 1.7327367932883122, + "learning_rate": 3.9267730508532513e-07, + "loss": 0.8105, + "step": 8906 + }, + { + "epoch": 0.8032646435496235, + "grad_norm": 1.3739571518081932, + "learning_rate": 3.923297175615121e-07, + "loss": 0.9283, + "step": 8907 + }, + { + "epoch": 0.8033548270730937, + "grad_norm": 1.3058903340450556, + "learning_rate": 3.9198226721299243e-07, + "loss": 0.9819, + "step": 8908 + }, + { + "epoch": 0.803445010596564, + "grad_norm": 1.5770327121097034, + "learning_rate": 3.916349540694128e-07, + "loss": 0.9111, + "step": 8909 + }, + { + "epoch": 0.8035351941200343, + "grad_norm": 1.6051057651170262, + "learning_rate": 3.912877781604063e-07, + "loss": 0.9379, + "step": 8910 + }, + { + "epoch": 0.8036253776435045, + "grad_norm": 1.4830799275657607, + "learning_rate": 3.909407395155977e-07, + "loss": 0.9738, + "step": 8911 + }, + { + "epoch": 0.8037155611669748, + "grad_norm": 1.5429955588800348, + "learning_rate": 3.9059383816459725e-07, + "loss": 0.8801, + "step": 8912 + }, + { + "epoch": 0.8038057446904451, + "grad_norm": 3.092082797798659, + "learning_rate": 3.902470741370045e-07, + "loss": 0.9934, + "step": 8913 + }, + { + "epoch": 0.8038959282139153, + "grad_norm": 1.2729866306205209, + "learning_rate": 3.8990044746240746e-07, + "loss": 0.9284, + "step": 8914 + }, + { + "epoch": 0.8039861117373855, + "grad_norm": 1.7313509860004856, + "learning_rate": 3.8955395817038237e-07, + "loss": 1.0392, + "step": 8915 + }, + { + "epoch": 0.8040762952608559, + "grad_norm": 1.5485456288420154, + "learning_rate": 3.892076062904934e-07, + "loss": 0.9098, + "step": 8916 + }, + { + "epoch": 0.8041664787843261, + "grad_norm": 1.5994073276748317, + "learning_rate": 3.8886139185229384e-07, + "loss": 0.9026, + "step": 8917 + }, + { + "epoch": 0.8042566623077964, + "grad_norm": 1.4378778541534625, + "learning_rate": 3.8851531488532284e-07, + "loss": 0.8515, + "step": 8918 + }, + { + "epoch": 0.8043468458312666, + "grad_norm": 1.336239563104535, + "learning_rate": 3.88169375419112e-07, + "loss": 0.9602, + "step": 8919 + }, + { + "epoch": 0.8044370293547369, + "grad_norm": 1.9232194121464872, + "learning_rate": 3.8782357348317717e-07, + "loss": 0.9156, + "step": 8920 + }, + { + "epoch": 0.8045272128782072, + "grad_norm": 1.7563076104866135, + "learning_rate": 3.8747790910702437e-07, + "loss": 0.9873, + "step": 8921 + }, + { + "epoch": 0.8046173964016774, + "grad_norm": 1.3769344247400381, + "learning_rate": 3.8713238232014776e-07, + "loss": 0.9415, + "step": 8922 + }, + { + "epoch": 0.8047075799251476, + "grad_norm": 1.792516713722868, + "learning_rate": 3.867869931520296e-07, + "loss": 0.9955, + "step": 8923 + }, + { + "epoch": 0.804797763448618, + "grad_norm": 1.4536954518889613, + "learning_rate": 3.864417416321406e-07, + "loss": 0.9531, + "step": 8924 + }, + { + "epoch": 0.8048879469720882, + "grad_norm": 1.456884054567404, + "learning_rate": 3.8609662778993847e-07, + "loss": 0.9188, + "step": 8925 + }, + { + "epoch": 0.8049781304955584, + "grad_norm": 0.6081963168175392, + "learning_rate": 3.85751651654872e-07, + "loss": 0.7721, + "step": 8926 + }, + { + "epoch": 0.8050683140190287, + "grad_norm": 1.6941572199512906, + "learning_rate": 3.8540681325637505e-07, + "loss": 1.0498, + "step": 8927 + }, + { + "epoch": 0.805158497542499, + "grad_norm": 1.3597254684993103, + "learning_rate": 3.8506211262387155e-07, + "loss": 0.9498, + "step": 8928 + }, + { + "epoch": 0.8052486810659693, + "grad_norm": 1.588114705058016, + "learning_rate": 3.847175497867732e-07, + "loss": 0.9476, + "step": 8929 + }, + { + "epoch": 0.8053388645894395, + "grad_norm": 1.659668830676078, + "learning_rate": 3.843731247744801e-07, + "loss": 0.8871, + "step": 8930 + }, + { + "epoch": 0.8054290481129097, + "grad_norm": 1.4472760857784484, + "learning_rate": 3.8402883761638047e-07, + "loss": 0.9836, + "step": 8931 + }, + { + "epoch": 0.8055192316363801, + "grad_norm": 0.720374303719116, + "learning_rate": 3.8368468834185076e-07, + "loss": 0.875, + "step": 8932 + }, + { + "epoch": 0.8056094151598503, + "grad_norm": 3.6380062262265285, + "learning_rate": 3.8334067698025583e-07, + "loss": 0.9803, + "step": 8933 + }, + { + "epoch": 0.8056995986833205, + "grad_norm": 1.8496604195372022, + "learning_rate": 3.8299680356094897e-07, + "loss": 0.8836, + "step": 8934 + }, + { + "epoch": 0.8057897822067909, + "grad_norm": 1.986583832042224, + "learning_rate": 3.8265306811327024e-07, + "loss": 0.8323, + "step": 8935 + }, + { + "epoch": 0.8058799657302611, + "grad_norm": 1.5415499187631174, + "learning_rate": 3.8230947066654994e-07, + "loss": 0.908, + "step": 8936 + }, + { + "epoch": 0.8059701492537313, + "grad_norm": 1.3549091602154026, + "learning_rate": 3.819660112501053e-07, + "loss": 0.9985, + "step": 8937 + }, + { + "epoch": 0.8060603327772016, + "grad_norm": 0.6345367958185326, + "learning_rate": 3.816226898932422e-07, + "loss": 0.8291, + "step": 8938 + }, + { + "epoch": 0.8061505163006719, + "grad_norm": 1.275708426215068, + "learning_rate": 3.812795066252557e-07, + "loss": 0.9323, + "step": 8939 + }, + { + "epoch": 0.8062406998241421, + "grad_norm": 2.433460703438258, + "learning_rate": 3.8093646147542577e-07, + "loss": 0.9569, + "step": 8940 + }, + { + "epoch": 0.8063308833476124, + "grad_norm": 1.3517036584777382, + "learning_rate": 3.805935544730259e-07, + "loss": 0.9084, + "step": 8941 + }, + { + "epoch": 0.8064210668710826, + "grad_norm": 1.249000053992167, + "learning_rate": 3.802507856473118e-07, + "loss": 0.8763, + "step": 8942 + }, + { + "epoch": 0.806511250394553, + "grad_norm": 2.1178224181671506, + "learning_rate": 3.7990815502753317e-07, + "loss": 0.9378, + "step": 8943 + }, + { + "epoch": 0.8066014339180232, + "grad_norm": 1.2383803078293267, + "learning_rate": 3.795656626429231e-07, + "loss": 0.9422, + "step": 8944 + }, + { + "epoch": 0.8066916174414934, + "grad_norm": 1.6732911146519511, + "learning_rate": 3.792233085227059e-07, + "loss": 0.8632, + "step": 8945 + }, + { + "epoch": 0.8067818009649637, + "grad_norm": 1.6652093078745334, + "learning_rate": 3.788810926960928e-07, + "loss": 0.9501, + "step": 8946 + }, + { + "epoch": 0.806871984488434, + "grad_norm": 1.4855386288915806, + "learning_rate": 3.785390151922836e-07, + "loss": 0.9275, + "step": 8947 + }, + { + "epoch": 0.8069621680119042, + "grad_norm": 1.6281343456153845, + "learning_rate": 3.781970760404665e-07, + "loss": 0.9186, + "step": 8948 + }, + { + "epoch": 0.8070523515353745, + "grad_norm": 1.5938904644270737, + "learning_rate": 3.778552752698176e-07, + "loss": 1.0099, + "step": 8949 + }, + { + "epoch": 0.8071425350588447, + "grad_norm": 1.8407675980930371, + "learning_rate": 3.775136129095007e-07, + "loss": 0.8511, + "step": 8950 + }, + { + "epoch": 0.807232718582315, + "grad_norm": 1.4135743452477274, + "learning_rate": 3.771720889886685e-07, + "loss": 0.9125, + "step": 8951 + }, + { + "epoch": 0.8073229021057853, + "grad_norm": 0.7818883195530805, + "learning_rate": 3.7683070353646194e-07, + "loss": 0.8487, + "step": 8952 + }, + { + "epoch": 0.8074130856292555, + "grad_norm": 1.484903422211043, + "learning_rate": 3.7648945658200983e-07, + "loss": 0.9019, + "step": 8953 + }, + { + "epoch": 0.8075032691527257, + "grad_norm": 1.3944808138747886, + "learning_rate": 3.761483481544292e-07, + "loss": 0.8258, + "step": 8954 + }, + { + "epoch": 0.8075934526761961, + "grad_norm": 1.4817894351732435, + "learning_rate": 3.7580737828282525e-07, + "loss": 0.9114, + "step": 8955 + }, + { + "epoch": 0.8076836361996663, + "grad_norm": 1.3141128983965022, + "learning_rate": 3.754665469962921e-07, + "loss": 0.9207, + "step": 8956 + }, + { + "epoch": 0.8077738197231366, + "grad_norm": 1.3807101459073203, + "learning_rate": 3.7512585432390973e-07, + "loss": 0.898, + "step": 8957 + }, + { + "epoch": 0.8078640032466069, + "grad_norm": 1.3758537936215205, + "learning_rate": 3.7478530029474987e-07, + "loss": 0.8743, + "step": 8958 + }, + { + "epoch": 0.8079541867700771, + "grad_norm": 0.638537401513878, + "learning_rate": 3.7444488493786854e-07, + "loss": 0.8418, + "step": 8959 + }, + { + "epoch": 0.8080443702935474, + "grad_norm": 1.5734897941983257, + "learning_rate": 3.7410460828231405e-07, + "loss": 0.9963, + "step": 8960 + }, + { + "epoch": 0.8081345538170176, + "grad_norm": 1.2214790502712132, + "learning_rate": 3.737644703571188e-07, + "loss": 0.8697, + "step": 8961 + }, + { + "epoch": 0.8082247373404879, + "grad_norm": 2.025682419125238, + "learning_rate": 3.734244711913059e-07, + "loss": 1.0139, + "step": 8962 + }, + { + "epoch": 0.8083149208639582, + "grad_norm": 1.402880425154546, + "learning_rate": 3.7308461081388584e-07, + "loss": 0.9082, + "step": 8963 + }, + { + "epoch": 0.8084051043874284, + "grad_norm": 1.444066433268649, + "learning_rate": 3.727448892538576e-07, + "loss": 0.8718, + "step": 8964 + }, + { + "epoch": 0.8084952879108986, + "grad_norm": 1.3581416794463235, + "learning_rate": 3.724053065402086e-07, + "loss": 1.0201, + "step": 8965 + }, + { + "epoch": 0.808585471434369, + "grad_norm": 1.2230160217389783, + "learning_rate": 3.7206586270191285e-07, + "loss": 0.9066, + "step": 8966 + }, + { + "epoch": 0.8086756549578392, + "grad_norm": 1.3671306489296025, + "learning_rate": 3.7172655776793385e-07, + "loss": 0.9863, + "step": 8967 + }, + { + "epoch": 0.8087658384813095, + "grad_norm": 1.3850262266216733, + "learning_rate": 3.7138739176722323e-07, + "loss": 0.8293, + "step": 8968 + }, + { + "epoch": 0.8088560220047797, + "grad_norm": 1.5997445558245793, + "learning_rate": 3.710483647287206e-07, + "loss": 0.9108, + "step": 8969 + }, + { + "epoch": 0.80894620552825, + "grad_norm": 2.4533190802319074, + "learning_rate": 3.707094766813532e-07, + "loss": 0.8385, + "step": 8970 + }, + { + "epoch": 0.8090363890517203, + "grad_norm": 1.5097443849320527, + "learning_rate": 3.7037072765403754e-07, + "loss": 0.905, + "step": 8971 + }, + { + "epoch": 0.8091265725751905, + "grad_norm": 1.9770977516841082, + "learning_rate": 3.700321176756762e-07, + "loss": 0.8969, + "step": 8972 + }, + { + "epoch": 0.8092167560986607, + "grad_norm": 2.052460947164703, + "learning_rate": 3.69693646775163e-07, + "loss": 0.9889, + "step": 8973 + }, + { + "epoch": 0.8093069396221311, + "grad_norm": 1.6032867082384508, + "learning_rate": 3.693553149813764e-07, + "loss": 0.9166, + "step": 8974 + }, + { + "epoch": 0.8093971231456013, + "grad_norm": 0.5915529107043562, + "learning_rate": 3.690171223231866e-07, + "loss": 0.7686, + "step": 8975 + }, + { + "epoch": 0.8094873066690715, + "grad_norm": 1.2591481196116612, + "learning_rate": 3.6867906882944854e-07, + "loss": 1.0316, + "step": 8976 + }, + { + "epoch": 0.8095774901925418, + "grad_norm": 1.3684530449903585, + "learning_rate": 3.6834115452900737e-07, + "loss": 0.9396, + "step": 8977 + }, + { + "epoch": 0.8096676737160121, + "grad_norm": 1.4195105207375172, + "learning_rate": 3.680033794506958e-07, + "loss": 0.8388, + "step": 8978 + }, + { + "epoch": 0.8097578572394823, + "grad_norm": 1.6733300837957636, + "learning_rate": 3.676657436233346e-07, + "loss": 0.7749, + "step": 8979 + }, + { + "epoch": 0.8098480407629526, + "grad_norm": 1.5082548609319655, + "learning_rate": 3.6732824707573305e-07, + "loss": 0.9328, + "step": 8980 + }, + { + "epoch": 0.8099382242864229, + "grad_norm": 1.468401522588395, + "learning_rate": 3.6699088983668716e-07, + "loss": 0.9673, + "step": 8981 + }, + { + "epoch": 0.8100284078098932, + "grad_norm": 1.907209498831597, + "learning_rate": 3.6665367193498376e-07, + "loss": 0.8854, + "step": 8982 + }, + { + "epoch": 0.8101185913333634, + "grad_norm": 2.7008061688956095, + "learning_rate": 3.663165933993948e-07, + "loss": 0.9401, + "step": 8983 + }, + { + "epoch": 0.8102087748568336, + "grad_norm": 1.7218739642851495, + "learning_rate": 3.659796542586822e-07, + "loss": 0.9601, + "step": 8984 + }, + { + "epoch": 0.810298958380304, + "grad_norm": 1.7158151712270013, + "learning_rate": 3.6564285454159526e-07, + "loss": 0.895, + "step": 8985 + }, + { + "epoch": 0.8103891419037742, + "grad_norm": 1.308484200973666, + "learning_rate": 3.653061942768718e-07, + "loss": 0.9743, + "step": 8986 + }, + { + "epoch": 0.8104793254272444, + "grad_norm": 1.5536312176328855, + "learning_rate": 3.649696734932375e-07, + "loss": 0.8998, + "step": 8987 + }, + { + "epoch": 0.8105695089507147, + "grad_norm": 1.4686327948373539, + "learning_rate": 3.646332922194064e-07, + "loss": 0.8664, + "step": 8988 + }, + { + "epoch": 0.810659692474185, + "grad_norm": 1.424726623948493, + "learning_rate": 3.6429705048407943e-07, + "loss": 0.9235, + "step": 8989 + }, + { + "epoch": 0.8107498759976552, + "grad_norm": 1.725278834598526, + "learning_rate": 3.6396094831594804e-07, + "loss": 0.9295, + "step": 8990 + }, + { + "epoch": 0.8108400595211255, + "grad_norm": 1.6160303666122702, + "learning_rate": 3.6362498574368926e-07, + "loss": 0.8913, + "step": 8991 + }, + { + "epoch": 0.8109302430445957, + "grad_norm": 0.7234627012802649, + "learning_rate": 3.6328916279596935e-07, + "loss": 0.856, + "step": 8992 + }, + { + "epoch": 0.811020426568066, + "grad_norm": 1.502142409363514, + "learning_rate": 3.6295347950144305e-07, + "loss": 0.9825, + "step": 8993 + }, + { + "epoch": 0.8111106100915363, + "grad_norm": 1.4003446137811528, + "learning_rate": 3.626179358887522e-07, + "loss": 0.7943, + "step": 8994 + }, + { + "epoch": 0.8112007936150065, + "grad_norm": 1.6220850108455067, + "learning_rate": 3.6228253198652816e-07, + "loss": 0.9966, + "step": 8995 + }, + { + "epoch": 0.8112909771384768, + "grad_norm": 1.621565196413102, + "learning_rate": 3.6194726782338767e-07, + "loss": 0.8838, + "step": 8996 + }, + { + "epoch": 0.8113811606619471, + "grad_norm": 1.5703221410958312, + "learning_rate": 3.6161214342793953e-07, + "loss": 0.9059, + "step": 8997 + }, + { + "epoch": 0.8114713441854173, + "grad_norm": 1.5084979619058883, + "learning_rate": 3.612771588287764e-07, + "loss": 0.9661, + "step": 8998 + }, + { + "epoch": 0.8115615277088876, + "grad_norm": 1.998066563663075, + "learning_rate": 3.609423140544827e-07, + "loss": 0.995, + "step": 8999 + }, + { + "epoch": 0.8116517112323578, + "grad_norm": 1.7143915575140896, + "learning_rate": 3.6060760913362787e-07, + "loss": 0.9539, + "step": 9000 + }, + { + "epoch": 0.8117418947558281, + "grad_norm": 1.3236634657171147, + "learning_rate": 3.6027304409477146e-07, + "loss": 0.9384, + "step": 9001 + }, + { + "epoch": 0.8118320782792984, + "grad_norm": 1.2968601088071086, + "learning_rate": 3.599386189664604e-07, + "loss": 0.9094, + "step": 9002 + }, + { + "epoch": 0.8119222618027686, + "grad_norm": 1.6713342950942676, + "learning_rate": 3.5960433377722945e-07, + "loss": 0.9269, + "step": 9003 + }, + { + "epoch": 0.8120124453262388, + "grad_norm": 1.416372530452187, + "learning_rate": 3.5927018855560174e-07, + "loss": 0.9823, + "step": 9004 + }, + { + "epoch": 0.8121026288497092, + "grad_norm": 1.316961878665821, + "learning_rate": 3.5893618333008904e-07, + "loss": 0.933, + "step": 9005 + }, + { + "epoch": 0.8121928123731794, + "grad_norm": 1.5936236611947934, + "learning_rate": 3.586023181291893e-07, + "loss": 1.0252, + "step": 9006 + }, + { + "epoch": 0.8122829958966497, + "grad_norm": 0.7400894627104847, + "learning_rate": 3.5826859298139044e-07, + "loss": 0.8567, + "step": 9007 + }, + { + "epoch": 0.81237317942012, + "grad_norm": 1.5553498510559247, + "learning_rate": 3.5793500791516773e-07, + "loss": 0.9235, + "step": 9008 + }, + { + "epoch": 0.8124633629435902, + "grad_norm": 1.34234763312619, + "learning_rate": 3.5760156295898415e-07, + "loss": 0.9056, + "step": 9009 + }, + { + "epoch": 0.8125535464670605, + "grad_norm": 1.3139170308736299, + "learning_rate": 3.5726825814129203e-07, + "loss": 0.9394, + "step": 9010 + }, + { + "epoch": 0.8126437299905307, + "grad_norm": 0.7678625535932748, + "learning_rate": 3.5693509349052886e-07, + "loss": 0.8565, + "step": 9011 + }, + { + "epoch": 0.812733913514001, + "grad_norm": 1.3225519723901804, + "learning_rate": 3.5660206903512433e-07, + "loss": 1.0063, + "step": 9012 + }, + { + "epoch": 0.8128240970374713, + "grad_norm": 1.5942910715800298, + "learning_rate": 3.56269184803492e-07, + "loss": 0.9864, + "step": 9013 + }, + { + "epoch": 0.8129142805609415, + "grad_norm": 1.1837027219456964, + "learning_rate": 3.5593644082403727e-07, + "loss": 0.956, + "step": 9014 + }, + { + "epoch": 0.8130044640844117, + "grad_norm": 1.5725573535642583, + "learning_rate": 3.5560383712514994e-07, + "loss": 1.0448, + "step": 9015 + }, + { + "epoch": 0.8130946476078821, + "grad_norm": 1.532834926056201, + "learning_rate": 3.5527137373521066e-07, + "loss": 0.9398, + "step": 9016 + }, + { + "epoch": 0.8131848311313523, + "grad_norm": 1.379082817033436, + "learning_rate": 3.5493905068258645e-07, + "loss": 0.8964, + "step": 9017 + }, + { + "epoch": 0.8132750146548225, + "grad_norm": 1.3414806009881222, + "learning_rate": 3.546068679956333e-07, + "loss": 0.9533, + "step": 9018 + }, + { + "epoch": 0.8133651981782928, + "grad_norm": 1.2989249057134435, + "learning_rate": 3.5427482570269487e-07, + "loss": 1.0001, + "step": 9019 + }, + { + "epoch": 0.8134553817017631, + "grad_norm": 3.0932115799137945, + "learning_rate": 3.539429238321026e-07, + "loss": 0.7806, + "step": 9020 + }, + { + "epoch": 0.8135455652252334, + "grad_norm": 1.5533655575178023, + "learning_rate": 3.536111624121769e-07, + "loss": 0.7902, + "step": 9021 + }, + { + "epoch": 0.8136357487487036, + "grad_norm": 1.4373576881619488, + "learning_rate": 3.532795414712244e-07, + "loss": 0.894, + "step": 9022 + }, + { + "epoch": 0.8137259322721738, + "grad_norm": 1.5010344594040743, + "learning_rate": 3.5294806103754124e-07, + "loss": 0.9545, + "step": 9023 + }, + { + "epoch": 0.8138161157956442, + "grad_norm": 1.5500708809919768, + "learning_rate": 3.526167211394115e-07, + "loss": 0.9487, + "step": 9024 + }, + { + "epoch": 0.8139062993191144, + "grad_norm": 1.3439671984058725, + "learning_rate": 3.522855218051066e-07, + "loss": 0.8689, + "step": 9025 + }, + { + "epoch": 0.8139964828425846, + "grad_norm": 1.5601939159081528, + "learning_rate": 3.5195446306288633e-07, + "loss": 0.9379, + "step": 9026 + }, + { + "epoch": 0.8140866663660549, + "grad_norm": 1.196147552217035, + "learning_rate": 3.51623544940999e-07, + "loss": 0.8848, + "step": 9027 + }, + { + "epoch": 0.8141768498895252, + "grad_norm": 1.427118212791088, + "learning_rate": 3.5129276746767886e-07, + "loss": 0.9183, + "step": 9028 + }, + { + "epoch": 0.8142670334129954, + "grad_norm": 1.4126461797371543, + "learning_rate": 3.5096213067115165e-07, + "loss": 0.908, + "step": 9029 + }, + { + "epoch": 0.8143572169364657, + "grad_norm": 1.7387432727368204, + "learning_rate": 3.506316345796272e-07, + "loss": 0.952, + "step": 9030 + }, + { + "epoch": 0.814447400459936, + "grad_norm": 0.6224462310518795, + "learning_rate": 3.5030127922130714e-07, + "loss": 0.7475, + "step": 9031 + }, + { + "epoch": 0.8145375839834063, + "grad_norm": 1.3508160244120855, + "learning_rate": 3.4997106462437784e-07, + "loss": 0.8306, + "step": 9032 + }, + { + "epoch": 0.8146277675068765, + "grad_norm": 0.6841943336898932, + "learning_rate": 3.496409908170157e-07, + "loss": 0.7814, + "step": 9033 + }, + { + "epoch": 0.8147179510303467, + "grad_norm": 1.6230026587971587, + "learning_rate": 3.493110578273839e-07, + "loss": 0.9132, + "step": 9034 + }, + { + "epoch": 0.8148081345538171, + "grad_norm": 1.5062017187714283, + "learning_rate": 3.489812656836346e-07, + "loss": 0.9977, + "step": 9035 + }, + { + "epoch": 0.8148983180772873, + "grad_norm": 1.3077963605862475, + "learning_rate": 3.486516144139078e-07, + "loss": 0.8587, + "step": 9036 + }, + { + "epoch": 0.8149885016007575, + "grad_norm": 1.6487897885187284, + "learning_rate": 3.4832210404632957e-07, + "loss": 0.899, + "step": 9037 + }, + { + "epoch": 0.8150786851242278, + "grad_norm": 1.3661766020664212, + "learning_rate": 3.479927346090179e-07, + "loss": 0.9422, + "step": 9038 + }, + { + "epoch": 0.8151688686476981, + "grad_norm": 2.001348517695563, + "learning_rate": 3.4766350613007455e-07, + "loss": 0.9204, + "step": 9039 + }, + { + "epoch": 0.8152590521711683, + "grad_norm": 1.4477009601142674, + "learning_rate": 3.4733441863759173e-07, + "loss": 0.9218, + "step": 9040 + }, + { + "epoch": 0.8153492356946386, + "grad_norm": 1.4204917299432038, + "learning_rate": 3.4700547215964916e-07, + "loss": 0.8269, + "step": 9041 + }, + { + "epoch": 0.8154394192181088, + "grad_norm": 1.3933650250610408, + "learning_rate": 3.46676666724314e-07, + "loss": 0.9104, + "step": 9042 + }, + { + "epoch": 0.8155296027415792, + "grad_norm": 1.660686237783944, + "learning_rate": 3.463480023596421e-07, + "loss": 0.9312, + "step": 9043 + }, + { + "epoch": 0.8156197862650494, + "grad_norm": 1.6768539303406038, + "learning_rate": 3.460194790936772e-07, + "loss": 1.0083, + "step": 9044 + }, + { + "epoch": 0.8157099697885196, + "grad_norm": 1.2802092814720043, + "learning_rate": 3.456910969544495e-07, + "loss": 0.9269, + "step": 9045 + }, + { + "epoch": 0.8158001533119899, + "grad_norm": 2.2634696442237225, + "learning_rate": 3.4536285596997994e-07, + "loss": 0.9638, + "step": 9046 + }, + { + "epoch": 0.8158903368354602, + "grad_norm": 1.7546224852123584, + "learning_rate": 3.450347561682747e-07, + "loss": 0.9038, + "step": 9047 + }, + { + "epoch": 0.8159805203589304, + "grad_norm": 1.8312945155438907, + "learning_rate": 3.4470679757732945e-07, + "loss": 0.8824, + "step": 9048 + }, + { + "epoch": 0.8160707038824007, + "grad_norm": 1.4303365599442444, + "learning_rate": 3.4437898022512735e-07, + "loss": 1.0253, + "step": 9049 + }, + { + "epoch": 0.8161608874058709, + "grad_norm": 1.2960796445110043, + "learning_rate": 3.4405130413963977e-07, + "loss": 0.9393, + "step": 9050 + }, + { + "epoch": 0.8162510709293412, + "grad_norm": 1.1940778451875476, + "learning_rate": 3.437237693488262e-07, + "loss": 1.0027, + "step": 9051 + }, + { + "epoch": 0.8163412544528115, + "grad_norm": 1.3529400723360148, + "learning_rate": 3.433963758806322e-07, + "loss": 0.949, + "step": 9052 + }, + { + "epoch": 0.8164314379762817, + "grad_norm": 1.4177011284608905, + "learning_rate": 3.430691237629948e-07, + "loss": 0.7976, + "step": 9053 + }, + { + "epoch": 0.816521621499752, + "grad_norm": 1.4989959217242546, + "learning_rate": 3.427420130238354e-07, + "loss": 0.8834, + "step": 9054 + }, + { + "epoch": 0.8166118050232223, + "grad_norm": 1.2542334869343983, + "learning_rate": 3.424150436910658e-07, + "loss": 1.0178, + "step": 9055 + }, + { + "epoch": 0.8167019885466925, + "grad_norm": 1.756493781604613, + "learning_rate": 3.420882157925842e-07, + "loss": 0.9935, + "step": 9056 + }, + { + "epoch": 0.8167921720701627, + "grad_norm": 1.5857528804564653, + "learning_rate": 3.417615293562777e-07, + "loss": 0.9186, + "step": 9057 + }, + { + "epoch": 0.8168823555936331, + "grad_norm": 1.3371685669145454, + "learning_rate": 3.4143498441002105e-07, + "loss": 0.8524, + "step": 9058 + }, + { + "epoch": 0.8169725391171033, + "grad_norm": 1.3165902934478464, + "learning_rate": 3.411085809816767e-07, + "loss": 0.9431, + "step": 9059 + }, + { + "epoch": 0.8170627226405736, + "grad_norm": 1.5496163016362319, + "learning_rate": 3.407823190990953e-07, + "loss": 1.0545, + "step": 9060 + }, + { + "epoch": 0.8171529061640438, + "grad_norm": 1.5261943423958524, + "learning_rate": 3.4045619879011577e-07, + "loss": 1.0138, + "step": 9061 + }, + { + "epoch": 0.8172430896875141, + "grad_norm": 1.560234105321834, + "learning_rate": 3.4013022008256334e-07, + "loss": 0.8609, + "step": 9062 + }, + { + "epoch": 0.8173332732109844, + "grad_norm": 1.4999667799767886, + "learning_rate": 3.398043830042532e-07, + "loss": 0.8727, + "step": 9063 + }, + { + "epoch": 0.8174234567344546, + "grad_norm": 1.5230746106002018, + "learning_rate": 3.394786875829871e-07, + "loss": 0.9345, + "step": 9064 + }, + { + "epoch": 0.8175136402579248, + "grad_norm": 1.5191821013458395, + "learning_rate": 3.3915313384655564e-07, + "loss": 0.8228, + "step": 9065 + }, + { + "epoch": 0.8176038237813952, + "grad_norm": 1.237836499172929, + "learning_rate": 3.388277218227369e-07, + "loss": 0.948, + "step": 9066 + }, + { + "epoch": 0.8176940073048654, + "grad_norm": 1.8822343500555536, + "learning_rate": 3.3850245153929557e-07, + "loss": 0.9587, + "step": 9067 + }, + { + "epoch": 0.8177841908283356, + "grad_norm": 1.435443069960766, + "learning_rate": 3.381773230239875e-07, + "loss": 0.8901, + "step": 9068 + }, + { + "epoch": 0.8178743743518059, + "grad_norm": 1.5177774777062267, + "learning_rate": 3.3785233630455247e-07, + "loss": 0.9674, + "step": 9069 + }, + { + "epoch": 0.8179645578752762, + "grad_norm": 1.576977454208767, + "learning_rate": 3.375274914087221e-07, + "loss": 0.9498, + "step": 9070 + }, + { + "epoch": 0.8180547413987465, + "grad_norm": 1.328982715772719, + "learning_rate": 3.3720278836421234e-07, + "loss": 0.9098, + "step": 9071 + }, + { + "epoch": 0.8181449249222167, + "grad_norm": 1.6417514385404712, + "learning_rate": 3.368782271987294e-07, + "loss": 0.9607, + "step": 9072 + }, + { + "epoch": 0.8182351084456869, + "grad_norm": 1.5005172791240864, + "learning_rate": 3.3655380793996636e-07, + "loss": 0.945, + "step": 9073 + }, + { + "epoch": 0.8183252919691573, + "grad_norm": 1.2709501817275486, + "learning_rate": 3.362295306156047e-07, + "loss": 0.8614, + "step": 9074 + }, + { + "epoch": 0.8184154754926275, + "grad_norm": 2.409703736199212, + "learning_rate": 3.3590539525331327e-07, + "loss": 0.9462, + "step": 9075 + }, + { + "epoch": 0.8185056590160977, + "grad_norm": 1.3748040059593734, + "learning_rate": 3.3558140188074967e-07, + "loss": 0.8914, + "step": 9076 + }, + { + "epoch": 0.8185958425395681, + "grad_norm": 1.632475165606142, + "learning_rate": 3.3525755052555817e-07, + "loss": 0.9626, + "step": 9077 + }, + { + "epoch": 0.8186860260630383, + "grad_norm": 0.7616901490148286, + "learning_rate": 3.3493384121537147e-07, + "loss": 0.8802, + "step": 9078 + }, + { + "epoch": 0.8187762095865085, + "grad_norm": 1.8144393279800386, + "learning_rate": 3.3461027397781075e-07, + "loss": 0.9424, + "step": 9079 + }, + { + "epoch": 0.8188663931099788, + "grad_norm": 1.4977599743348746, + "learning_rate": 3.3428684884048397e-07, + "loss": 0.9202, + "step": 9080 + }, + { + "epoch": 0.8189565766334491, + "grad_norm": 1.3168200286848453, + "learning_rate": 3.3396356583098826e-07, + "loss": 0.9369, + "step": 9081 + }, + { + "epoch": 0.8190467601569194, + "grad_norm": 1.3274720774873134, + "learning_rate": 3.3364042497690736e-07, + "loss": 0.9659, + "step": 9082 + }, + { + "epoch": 0.8191369436803896, + "grad_norm": 1.1752806957255433, + "learning_rate": 3.3331742630581405e-07, + "loss": 0.9889, + "step": 9083 + }, + { + "epoch": 0.8192271272038598, + "grad_norm": 1.2607420456677156, + "learning_rate": 3.3299456984526717e-07, + "loss": 0.8814, + "step": 9084 + }, + { + "epoch": 0.8193173107273302, + "grad_norm": 1.278570769904257, + "learning_rate": 3.3267185562281605e-07, + "loss": 0.9731, + "step": 9085 + }, + { + "epoch": 0.8194074942508004, + "grad_norm": 1.5110110023395873, + "learning_rate": 3.3234928366599514e-07, + "loss": 0.9295, + "step": 9086 + }, + { + "epoch": 0.8194976777742706, + "grad_norm": 1.5532232702470494, + "learning_rate": 3.3202685400232946e-07, + "loss": 0.9733, + "step": 9087 + }, + { + "epoch": 0.8195878612977409, + "grad_norm": 1.8407492707404023, + "learning_rate": 3.317045666593292e-07, + "loss": 0.9624, + "step": 9088 + }, + { + "epoch": 0.8196780448212112, + "grad_norm": 1.5582258894628094, + "learning_rate": 3.3138242166449426e-07, + "loss": 0.9373, + "step": 9089 + }, + { + "epoch": 0.8197682283446814, + "grad_norm": 1.3288593281709986, + "learning_rate": 3.310604190453117e-07, + "loss": 0.9998, + "step": 9090 + }, + { + "epoch": 0.8198584118681517, + "grad_norm": 1.2246072334136975, + "learning_rate": 3.307385588292566e-07, + "loss": 0.888, + "step": 9091 + }, + { + "epoch": 0.8199485953916219, + "grad_norm": 1.4738969913654094, + "learning_rate": 3.304168410437924e-07, + "loss": 0.9243, + "step": 9092 + }, + { + "epoch": 0.8200387789150922, + "grad_norm": 0.6445137946337222, + "learning_rate": 3.300952657163687e-07, + "loss": 0.8386, + "step": 9093 + }, + { + "epoch": 0.8201289624385625, + "grad_norm": 1.6018817118289403, + "learning_rate": 3.297738328744248e-07, + "loss": 0.9139, + "step": 9094 + }, + { + "epoch": 0.8202191459620327, + "grad_norm": 1.596281920183344, + "learning_rate": 3.2945254254538714e-07, + "loss": 0.9277, + "step": 9095 + }, + { + "epoch": 0.820309329485503, + "grad_norm": 1.2624884935364562, + "learning_rate": 3.2913139475666963e-07, + "loss": 0.9051, + "step": 9096 + }, + { + "epoch": 0.8203995130089733, + "grad_norm": 1.703138928837645, + "learning_rate": 3.288103895356749e-07, + "loss": 0.8895, + "step": 9097 + }, + { + "epoch": 0.8204896965324435, + "grad_norm": 1.4036728025356797, + "learning_rate": 3.284895269097927e-07, + "loss": 0.8712, + "step": 9098 + }, + { + "epoch": 0.8205798800559138, + "grad_norm": 0.6952921189310716, + "learning_rate": 3.281688069063999e-07, + "loss": 0.8131, + "step": 9099 + }, + { + "epoch": 0.8206700635793841, + "grad_norm": 1.5020001110173373, + "learning_rate": 3.2784822955286396e-07, + "loss": 0.7587, + "step": 9100 + }, + { + "epoch": 0.8207602471028543, + "grad_norm": 1.32015227299264, + "learning_rate": 3.275277948765365e-07, + "loss": 1.0053, + "step": 9101 + }, + { + "epoch": 0.8208504306263246, + "grad_norm": 1.3617932962923494, + "learning_rate": 3.2720750290475964e-07, + "loss": 0.9844, + "step": 9102 + }, + { + "epoch": 0.8209406141497948, + "grad_norm": 1.7559894517829397, + "learning_rate": 3.268873536648622e-07, + "loss": 0.9401, + "step": 9103 + }, + { + "epoch": 0.8210307976732651, + "grad_norm": 1.496653160906532, + "learning_rate": 3.265673471841612e-07, + "loss": 0.9092, + "step": 9104 + }, + { + "epoch": 0.8211209811967354, + "grad_norm": 0.6692419132460862, + "learning_rate": 3.262474834899616e-07, + "loss": 0.7616, + "step": 9105 + }, + { + "epoch": 0.8212111647202056, + "grad_norm": 0.6534008438358866, + "learning_rate": 3.2592776260955534e-07, + "loss": 0.8436, + "step": 9106 + }, + { + "epoch": 0.8213013482436758, + "grad_norm": 1.6460378093293668, + "learning_rate": 3.256081845702239e-07, + "loss": 0.9792, + "step": 9107 + }, + { + "epoch": 0.8213915317671462, + "grad_norm": 1.3551389969200616, + "learning_rate": 3.2528874939923335e-07, + "loss": 0.9878, + "step": 9108 + }, + { + "epoch": 0.8214817152906164, + "grad_norm": 1.4832332034372777, + "learning_rate": 3.2496945712384217e-07, + "loss": 0.8968, + "step": 9109 + }, + { + "epoch": 0.8215718988140867, + "grad_norm": 1.549103643822517, + "learning_rate": 3.246503077712923e-07, + "loss": 0.9246, + "step": 9110 + }, + { + "epoch": 0.8216620823375569, + "grad_norm": 1.2548370234228758, + "learning_rate": 3.2433130136881625e-07, + "loss": 1.0272, + "step": 9111 + }, + { + "epoch": 0.8217522658610272, + "grad_norm": 1.944962129101625, + "learning_rate": 3.2401243794363287e-07, + "loss": 0.9282, + "step": 9112 + }, + { + "epoch": 0.8218424493844975, + "grad_norm": 1.2464109871975024, + "learning_rate": 3.236937175229495e-07, + "loss": 0.9937, + "step": 9113 + }, + { + "epoch": 0.8219326329079677, + "grad_norm": 1.8703550023194897, + "learning_rate": 3.233751401339615e-07, + "loss": 0.907, + "step": 9114 + }, + { + "epoch": 0.8220228164314379, + "grad_norm": 1.5687250872453806, + "learning_rate": 3.2305670580385157e-07, + "loss": 1.0162, + "step": 9115 + }, + { + "epoch": 0.8221129999549083, + "grad_norm": 1.2768392704069327, + "learning_rate": 3.227384145597898e-07, + "loss": 0.935, + "step": 9116 + }, + { + "epoch": 0.8222031834783785, + "grad_norm": 1.659894208984091, + "learning_rate": 3.224202664289346e-07, + "loss": 0.9233, + "step": 9117 + }, + { + "epoch": 0.8222933670018487, + "grad_norm": 1.590191102569449, + "learning_rate": 3.2210226143843257e-07, + "loss": 0.8822, + "step": 9118 + }, + { + "epoch": 0.822383550525319, + "grad_norm": 0.6986503635128994, + "learning_rate": 3.217843996154173e-07, + "loss": 0.8376, + "step": 9119 + }, + { + "epoch": 0.8224737340487893, + "grad_norm": 0.6716853472933477, + "learning_rate": 3.2146668098701055e-07, + "loss": 0.752, + "step": 9120 + }, + { + "epoch": 0.8225639175722596, + "grad_norm": 1.475810187294877, + "learning_rate": 3.2114910558032215e-07, + "loss": 1.0654, + "step": 9121 + }, + { + "epoch": 0.8226541010957298, + "grad_norm": 1.6550377782099859, + "learning_rate": 3.2083167342244945e-07, + "loss": 0.9637, + "step": 9122 + }, + { + "epoch": 0.8227442846192, + "grad_norm": 1.5950826421392423, + "learning_rate": 3.205143845404763e-07, + "loss": 1.0317, + "step": 9123 + }, + { + "epoch": 0.8228344681426704, + "grad_norm": 1.4656160887099574, + "learning_rate": 3.201972389614773e-07, + "loss": 0.9148, + "step": 9124 + }, + { + "epoch": 0.8229246516661406, + "grad_norm": 1.8789172261446876, + "learning_rate": 3.198802367125115e-07, + "loss": 0.9429, + "step": 9125 + }, + { + "epoch": 0.8230148351896108, + "grad_norm": 1.4926994204560113, + "learning_rate": 3.195633778206288e-07, + "loss": 0.9458, + "step": 9126 + }, + { + "epoch": 0.8231050187130812, + "grad_norm": 1.6604929145080596, + "learning_rate": 3.19246662312864e-07, + "loss": 1.0136, + "step": 9127 + }, + { + "epoch": 0.8231952022365514, + "grad_norm": 0.6748073152747656, + "learning_rate": 3.189300902162417e-07, + "loss": 0.8029, + "step": 9128 + }, + { + "epoch": 0.8232853857600216, + "grad_norm": 1.599890454833787, + "learning_rate": 3.1861366155777327e-07, + "loss": 0.9439, + "step": 9129 + }, + { + "epoch": 0.8233755692834919, + "grad_norm": 1.218388675045073, + "learning_rate": 3.182973763644583e-07, + "loss": 0.9485, + "step": 9130 + }, + { + "epoch": 0.8234657528069622, + "grad_norm": 1.4798327365643043, + "learning_rate": 3.1798123466328463e-07, + "loss": 0.8646, + "step": 9131 + }, + { + "epoch": 0.8235559363304324, + "grad_norm": 1.5928726025087117, + "learning_rate": 3.17665236481226e-07, + "loss": 0.9413, + "step": 9132 + }, + { + "epoch": 0.8236461198539027, + "grad_norm": 1.6599240129383177, + "learning_rate": 3.1734938184524576e-07, + "loss": 0.9037, + "step": 9133 + }, + { + "epoch": 0.8237363033773729, + "grad_norm": 1.4758413663042762, + "learning_rate": 3.1703367078229427e-07, + "loss": 0.9511, + "step": 9134 + }, + { + "epoch": 0.8238264869008433, + "grad_norm": 1.4997853284446567, + "learning_rate": 3.167181033193096e-07, + "loss": 0.8991, + "step": 9135 + }, + { + "epoch": 0.8239166704243135, + "grad_norm": 1.4063408292471307, + "learning_rate": 3.16402679483218e-07, + "loss": 0.9491, + "step": 9136 + }, + { + "epoch": 0.8240068539477837, + "grad_norm": 1.227424027447904, + "learning_rate": 3.1608739930093366e-07, + "loss": 0.8721, + "step": 9137 + }, + { + "epoch": 0.824097037471254, + "grad_norm": 1.3851008473529831, + "learning_rate": 3.157722627993562e-07, + "loss": 0.8493, + "step": 9138 + }, + { + "epoch": 0.8241872209947243, + "grad_norm": 1.433079016472226, + "learning_rate": 3.1545727000537727e-07, + "loss": 0.9003, + "step": 9139 + }, + { + "epoch": 0.8242774045181945, + "grad_norm": 1.5181139929016285, + "learning_rate": 3.151424209458713e-07, + "loss": 0.9411, + "step": 9140 + }, + { + "epoch": 0.8243675880416648, + "grad_norm": 1.4250349207832278, + "learning_rate": 3.148277156477053e-07, + "loss": 0.9551, + "step": 9141 + }, + { + "epoch": 0.824457771565135, + "grad_norm": 1.851805208289863, + "learning_rate": 3.145131541377299e-07, + "loss": 0.9435, + "step": 9142 + }, + { + "epoch": 0.8245479550886053, + "grad_norm": 1.5118448367506323, + "learning_rate": 3.1419873644278606e-07, + "loss": 0.8994, + "step": 9143 + }, + { + "epoch": 0.8246381386120756, + "grad_norm": 1.6269175146631811, + "learning_rate": 3.1388446258970147e-07, + "loss": 0.9507, + "step": 9144 + }, + { + "epoch": 0.8247283221355458, + "grad_norm": 1.4804228521393725, + "learning_rate": 3.1357033260529145e-07, + "loss": 0.925, + "step": 9145 + }, + { + "epoch": 0.824818505659016, + "grad_norm": 1.6536383280746472, + "learning_rate": 3.1325634651636025e-07, + "loss": 1.0127, + "step": 9146 + }, + { + "epoch": 0.8249086891824864, + "grad_norm": 1.2644847867087492, + "learning_rate": 3.1294250434969694e-07, + "loss": 0.9752, + "step": 9147 + }, + { + "epoch": 0.8249988727059566, + "grad_norm": 1.6842792292670783, + "learning_rate": 3.1262880613208274e-07, + "loss": 0.9772, + "step": 9148 + }, + { + "epoch": 0.8250890562294269, + "grad_norm": 2.284589022148638, + "learning_rate": 3.123152518902823e-07, + "loss": 0.9749, + "step": 9149 + }, + { + "epoch": 0.8251792397528972, + "grad_norm": 1.290644512998843, + "learning_rate": 3.1200184165105017e-07, + "loss": 0.8951, + "step": 9150 + }, + { + "epoch": 0.8252694232763674, + "grad_norm": 1.5032869723601345, + "learning_rate": 3.116885754411287e-07, + "loss": 0.9451, + "step": 9151 + }, + { + "epoch": 0.8253596067998377, + "grad_norm": 1.6638761604103856, + "learning_rate": 3.1137545328724703e-07, + "loss": 0.872, + "step": 9152 + }, + { + "epoch": 0.8254497903233079, + "grad_norm": 1.6053746699405846, + "learning_rate": 3.1106247521612285e-07, + "loss": 0.9364, + "step": 9153 + }, + { + "epoch": 0.8255399738467782, + "grad_norm": 1.5469811913617444, + "learning_rate": 3.107496412544612e-07, + "loss": 0.9556, + "step": 9154 + }, + { + "epoch": 0.8256301573702485, + "grad_norm": 1.3238428983429653, + "learning_rate": 3.1043695142895397e-07, + "loss": 0.9614, + "step": 9155 + }, + { + "epoch": 0.8257203408937187, + "grad_norm": 1.3493129995501432, + "learning_rate": 3.101244057662828e-07, + "loss": 0.8599, + "step": 9156 + }, + { + "epoch": 0.8258105244171889, + "grad_norm": 1.375310299279364, + "learning_rate": 3.098120042931152e-07, + "loss": 0.9254, + "step": 9157 + }, + { + "epoch": 0.8259007079406593, + "grad_norm": 1.363468758440928, + "learning_rate": 3.0949974703610647e-07, + "loss": 0.9823, + "step": 9158 + }, + { + "epoch": 0.8259908914641295, + "grad_norm": 1.461130221297913, + "learning_rate": 3.0918763402190107e-07, + "loss": 0.926, + "step": 9159 + }, + { + "epoch": 0.8260810749875998, + "grad_norm": 1.5224176367669733, + "learning_rate": 3.088756652771296e-07, + "loss": 0.8992, + "step": 9160 + }, + { + "epoch": 0.82617125851107, + "grad_norm": 1.5841406805056597, + "learning_rate": 3.0856384082841147e-07, + "loss": 1.0327, + "step": 9161 + }, + { + "epoch": 0.8262614420345403, + "grad_norm": 1.3147784393101343, + "learning_rate": 3.0825216070235207e-07, + "loss": 0.8866, + "step": 9162 + }, + { + "epoch": 0.8263516255580106, + "grad_norm": 1.7545258716796714, + "learning_rate": 3.0794062492554764e-07, + "loss": 1.0555, + "step": 9163 + }, + { + "epoch": 0.8264418090814808, + "grad_norm": 1.8231992956591154, + "learning_rate": 3.076292335245783e-07, + "loss": 0.9537, + "step": 9164 + }, + { + "epoch": 0.826531992604951, + "grad_norm": 1.491751642133251, + "learning_rate": 3.073179865260145e-07, + "loss": 1.0079, + "step": 9165 + }, + { + "epoch": 0.8266221761284214, + "grad_norm": 1.2919566639048234, + "learning_rate": 3.070068839564135e-07, + "loss": 0.9422, + "step": 9166 + }, + { + "epoch": 0.8267123596518916, + "grad_norm": 2.3389070447739084, + "learning_rate": 3.0669592584232006e-07, + "loss": 0.9711, + "step": 9167 + }, + { + "epoch": 0.8268025431753618, + "grad_norm": 1.456482566632257, + "learning_rate": 3.063851122102672e-07, + "loss": 0.8503, + "step": 9168 + }, + { + "epoch": 0.8268927266988321, + "grad_norm": 1.596307310958989, + "learning_rate": 3.06074443086775e-07, + "loss": 0.8644, + "step": 9169 + }, + { + "epoch": 0.8269829102223024, + "grad_norm": 0.6379002333861739, + "learning_rate": 3.057639184983514e-07, + "loss": 0.7654, + "step": 9170 + }, + { + "epoch": 0.8270730937457726, + "grad_norm": 1.2939809657529104, + "learning_rate": 3.054535384714927e-07, + "loss": 0.9373, + "step": 9171 + }, + { + "epoch": 0.8271632772692429, + "grad_norm": 1.8307370167143848, + "learning_rate": 3.0514330303268135e-07, + "loss": 1.0985, + "step": 9172 + }, + { + "epoch": 0.8272534607927132, + "grad_norm": 1.4469379254822836, + "learning_rate": 3.0483321220838876e-07, + "loss": 0.89, + "step": 9173 + }, + { + "epoch": 0.8273436443161835, + "grad_norm": 1.3233200219267178, + "learning_rate": 3.045232660250734e-07, + "loss": 0.7951, + "step": 9174 + }, + { + "epoch": 0.8274338278396537, + "grad_norm": 1.4018262583081973, + "learning_rate": 3.0421346450918185e-07, + "loss": 0.9655, + "step": 9175 + }, + { + "epoch": 0.8275240113631239, + "grad_norm": 1.3703775301284211, + "learning_rate": 3.039038076871485e-07, + "loss": 0.8856, + "step": 9176 + }, + { + "epoch": 0.8276141948865943, + "grad_norm": 1.3138347832120367, + "learning_rate": 3.035942955853934e-07, + "loss": 0.9555, + "step": 9177 + }, + { + "epoch": 0.8277043784100645, + "grad_norm": 1.4579166452837202, + "learning_rate": 3.0328492823032804e-07, + "loss": 0.9129, + "step": 9178 + }, + { + "epoch": 0.8277945619335347, + "grad_norm": 1.4224133153693375, + "learning_rate": 3.029757056483471e-07, + "loss": 0.8767, + "step": 9179 + }, + { + "epoch": 0.827884745457005, + "grad_norm": 1.2968494918336897, + "learning_rate": 3.026666278658372e-07, + "loss": 0.8278, + "step": 9180 + }, + { + "epoch": 0.8279749289804753, + "grad_norm": 1.2111004627461557, + "learning_rate": 3.023576949091691e-07, + "loss": 0.94, + "step": 9181 + }, + { + "epoch": 0.8280651125039455, + "grad_norm": 1.4668683924808574, + "learning_rate": 3.020489068047032e-07, + "loss": 0.8587, + "step": 9182 + }, + { + "epoch": 0.8281552960274158, + "grad_norm": 1.5089652443151318, + "learning_rate": 3.017402635787869e-07, + "loss": 0.9056, + "step": 9183 + }, + { + "epoch": 0.828245479550886, + "grad_norm": 1.4667379517282277, + "learning_rate": 3.0143176525775537e-07, + "loss": 0.9075, + "step": 9184 + }, + { + "epoch": 0.8283356630743564, + "grad_norm": 1.4197594970879868, + "learning_rate": 3.0112341186793155e-07, + "loss": 0.8822, + "step": 9185 + }, + { + "epoch": 0.8284258465978266, + "grad_norm": 1.3962094763302653, + "learning_rate": 3.008152034356264e-07, + "loss": 0.8756, + "step": 9186 + }, + { + "epoch": 0.8285160301212968, + "grad_norm": 0.6234285865471563, + "learning_rate": 3.005071399871366e-07, + "loss": 0.7809, + "step": 9187 + }, + { + "epoch": 0.828606213644767, + "grad_norm": 1.4736813486042772, + "learning_rate": 3.0019922154874853e-07, + "loss": 0.9997, + "step": 9188 + }, + { + "epoch": 0.8286963971682374, + "grad_norm": 1.8836296492659783, + "learning_rate": 2.998914481467356e-07, + "loss": 0.8969, + "step": 9189 + }, + { + "epoch": 0.8287865806917076, + "grad_norm": 1.5658922374717839, + "learning_rate": 2.9958381980735837e-07, + "loss": 0.8854, + "step": 9190 + }, + { + "epoch": 0.8288767642151779, + "grad_norm": 1.5803756528052095, + "learning_rate": 2.992763365568658e-07, + "loss": 0.8668, + "step": 9191 + }, + { + "epoch": 0.8289669477386481, + "grad_norm": 1.4371860824813538, + "learning_rate": 2.98968998421494e-07, + "loss": 0.9387, + "step": 9192 + }, + { + "epoch": 0.8290571312621184, + "grad_norm": 1.429944791728463, + "learning_rate": 2.98661805427467e-07, + "loss": 0.9259, + "step": 9193 + }, + { + "epoch": 0.8291473147855887, + "grad_norm": 1.3375587664376904, + "learning_rate": 2.9835475760099483e-07, + "loss": 0.9547, + "step": 9194 + }, + { + "epoch": 0.8292374983090589, + "grad_norm": 1.2845753571473757, + "learning_rate": 2.9804785496827856e-07, + "loss": 0.9532, + "step": 9195 + }, + { + "epoch": 0.8293276818325293, + "grad_norm": 1.7121576983425126, + "learning_rate": 2.977410975555028e-07, + "loss": 0.9295, + "step": 9196 + }, + { + "epoch": 0.8294178653559995, + "grad_norm": 1.6393634123543546, + "learning_rate": 2.9743448538884376e-07, + "loss": 0.9877, + "step": 9197 + }, + { + "epoch": 0.8295080488794697, + "grad_norm": 1.2713209477884673, + "learning_rate": 2.9712801849446154e-07, + "loss": 0.9196, + "step": 9198 + }, + { + "epoch": 0.82959823240294, + "grad_norm": 1.4015431363438415, + "learning_rate": 2.9682169689850665e-07, + "loss": 0.8857, + "step": 9199 + }, + { + "epoch": 0.8296884159264103, + "grad_norm": 0.6886767806442637, + "learning_rate": 2.9651552062711573e-07, + "loss": 0.768, + "step": 9200 + }, + { + "epoch": 0.8297785994498805, + "grad_norm": 1.2646757262831612, + "learning_rate": 2.9620948970641333e-07, + "loss": 1.0078, + "step": 9201 + }, + { + "epoch": 0.8298687829733508, + "grad_norm": 1.2934577685563207, + "learning_rate": 2.959036041625125e-07, + "loss": 0.9503, + "step": 9202 + }, + { + "epoch": 0.829958966496821, + "grad_norm": 1.4322510129765533, + "learning_rate": 2.95597864021512e-07, + "loss": 0.8453, + "step": 9203 + }, + { + "epoch": 0.8300491500202913, + "grad_norm": 2.273797285312841, + "learning_rate": 2.9529226930949966e-07, + "loss": 0.9796, + "step": 9204 + }, + { + "epoch": 0.8301393335437616, + "grad_norm": 1.5507880145869466, + "learning_rate": 2.949868200525505e-07, + "loss": 0.9436, + "step": 9205 + }, + { + "epoch": 0.8302295170672318, + "grad_norm": 1.5973605229675243, + "learning_rate": 2.9468151627672734e-07, + "loss": 0.9335, + "step": 9206 + }, + { + "epoch": 0.830319700590702, + "grad_norm": 1.3480972231999675, + "learning_rate": 2.9437635800808026e-07, + "loss": 0.8883, + "step": 9207 + }, + { + "epoch": 0.8304098841141724, + "grad_norm": 1.7138732388381417, + "learning_rate": 2.940713452726473e-07, + "loss": 0.9189, + "step": 9208 + }, + { + "epoch": 0.8305000676376426, + "grad_norm": 1.293590661155901, + "learning_rate": 2.937664780964526e-07, + "loss": 0.9257, + "step": 9209 + }, + { + "epoch": 0.8305902511611128, + "grad_norm": 1.6393274898530912, + "learning_rate": 2.9346175650551133e-07, + "loss": 0.947, + "step": 9210 + }, + { + "epoch": 0.8306804346845831, + "grad_norm": 1.4205373823113798, + "learning_rate": 2.931571805258215e-07, + "loss": 0.9576, + "step": 9211 + }, + { + "epoch": 0.8307706182080534, + "grad_norm": 1.5399933144808227, + "learning_rate": 2.9285275018337353e-07, + "loss": 0.9864, + "step": 9212 + }, + { + "epoch": 0.8308608017315237, + "grad_norm": 1.7166172319843553, + "learning_rate": 2.9254846550414146e-07, + "loss": 1.0364, + "step": 9213 + }, + { + "epoch": 0.8309509852549939, + "grad_norm": 1.597636476874699, + "learning_rate": 2.922443265140893e-07, + "loss": 0.9788, + "step": 9214 + }, + { + "epoch": 0.8310411687784641, + "grad_norm": 1.5504500566328452, + "learning_rate": 2.919403332391674e-07, + "loss": 0.9305, + "step": 9215 + }, + { + "epoch": 0.8311313523019345, + "grad_norm": 1.3450509360924918, + "learning_rate": 2.9163648570531464e-07, + "loss": 0.9546, + "step": 9216 + }, + { + "epoch": 0.8312215358254047, + "grad_norm": 1.6193543443794265, + "learning_rate": 2.9133278393845717e-07, + "loss": 0.887, + "step": 9217 + }, + { + "epoch": 0.8313117193488749, + "grad_norm": 1.3034632072392158, + "learning_rate": 2.9102922796450703e-07, + "loss": 1.0026, + "step": 9218 + }, + { + "epoch": 0.8314019028723453, + "grad_norm": 1.2787238548492201, + "learning_rate": 2.907258178093672e-07, + "loss": 0.8969, + "step": 9219 + }, + { + "epoch": 0.8314920863958155, + "grad_norm": 1.3445391444717576, + "learning_rate": 2.904225534989251e-07, + "loss": 0.852, + "step": 9220 + }, + { + "epoch": 0.8315822699192857, + "grad_norm": 1.4186289697857595, + "learning_rate": 2.901194350590572e-07, + "loss": 0.8409, + "step": 9221 + }, + { + "epoch": 0.831672453442756, + "grad_norm": 1.456210234921804, + "learning_rate": 2.898164625156274e-07, + "loss": 0.8224, + "step": 9222 + }, + { + "epoch": 0.8317626369662263, + "grad_norm": 1.2931855817996927, + "learning_rate": 2.8951363589448676e-07, + "loss": 0.8907, + "step": 9223 + }, + { + "epoch": 0.8318528204896966, + "grad_norm": 1.566010917567055, + "learning_rate": 2.8921095522147434e-07, + "loss": 0.9288, + "step": 9224 + }, + { + "epoch": 0.8319430040131668, + "grad_norm": 1.5310334811595463, + "learning_rate": 2.8890842052241683e-07, + "loss": 0.9658, + "step": 9225 + }, + { + "epoch": 0.832033187536637, + "grad_norm": 0.6016178848730387, + "learning_rate": 2.886060318231267e-07, + "loss": 0.7399, + "step": 9226 + }, + { + "epoch": 0.8321233710601074, + "grad_norm": 1.788667460309304, + "learning_rate": 2.8830378914940755e-07, + "loss": 0.9572, + "step": 9227 + }, + { + "epoch": 0.8322135545835776, + "grad_norm": 2.61755295664385, + "learning_rate": 2.8800169252704675e-07, + "loss": 0.9352, + "step": 9228 + }, + { + "epoch": 0.8323037381070478, + "grad_norm": 1.3101923501415762, + "learning_rate": 2.8769974198182143e-07, + "loss": 0.9572, + "step": 9229 + }, + { + "epoch": 0.8323939216305181, + "grad_norm": 1.3401161561981376, + "learning_rate": 2.873979375394955e-07, + "loss": 0.9357, + "step": 9230 + }, + { + "epoch": 0.8324841051539884, + "grad_norm": 1.3420798990197205, + "learning_rate": 2.870962792258209e-07, + "loss": 0.9647, + "step": 9231 + }, + { + "epoch": 0.8325742886774586, + "grad_norm": 1.2829759184702465, + "learning_rate": 2.8679476706653716e-07, + "loss": 0.9499, + "step": 9232 + }, + { + "epoch": 0.8326644722009289, + "grad_norm": 1.5578574829341063, + "learning_rate": 2.864934010873692e-07, + "loss": 0.96, + "step": 9233 + }, + { + "epoch": 0.8327546557243991, + "grad_norm": 1.3110426577561227, + "learning_rate": 2.8619218131403357e-07, + "loss": 0.9105, + "step": 9234 + }, + { + "epoch": 0.8328448392478695, + "grad_norm": 1.7422936924614678, + "learning_rate": 2.858911077722299e-07, + "loss": 0.9681, + "step": 9235 + }, + { + "epoch": 0.8329350227713397, + "grad_norm": 1.6733114898568024, + "learning_rate": 2.855901804876493e-07, + "loss": 0.7959, + "step": 9236 + }, + { + "epoch": 0.8330252062948099, + "grad_norm": 1.2811926386948935, + "learning_rate": 2.852893994859673e-07, + "loss": 0.9754, + "step": 9237 + }, + { + "epoch": 0.8331153898182801, + "grad_norm": 1.2551329129724407, + "learning_rate": 2.849887647928484e-07, + "loss": 0.8858, + "step": 9238 + }, + { + "epoch": 0.8332055733417505, + "grad_norm": 2.5896447106655436, + "learning_rate": 2.8468827643394465e-07, + "loss": 0.8346, + "step": 9239 + }, + { + "epoch": 0.8332957568652207, + "grad_norm": 1.3612601702461704, + "learning_rate": 2.843879344348954e-07, + "loss": 0.9503, + "step": 9240 + }, + { + "epoch": 0.833385940388691, + "grad_norm": 1.4364859279815665, + "learning_rate": 2.840877388213272e-07, + "loss": 0.9069, + "step": 9241 + }, + { + "epoch": 0.8334761239121612, + "grad_norm": 1.5021616142428103, + "learning_rate": 2.8378768961885515e-07, + "loss": 0.9384, + "step": 9242 + }, + { + "epoch": 0.8335663074356315, + "grad_norm": 1.4346927099812574, + "learning_rate": 2.8348778685307983e-07, + "loss": 0.9677, + "step": 9243 + }, + { + "epoch": 0.8336564909591018, + "grad_norm": 1.423664936609154, + "learning_rate": 2.831880305495915e-07, + "loss": 1.0093, + "step": 9244 + }, + { + "epoch": 0.833746674482572, + "grad_norm": 0.6858845284065865, + "learning_rate": 2.828884207339668e-07, + "loss": 0.8235, + "step": 9245 + }, + { + "epoch": 0.8338368580060423, + "grad_norm": 1.7808287272961245, + "learning_rate": 2.8258895743177014e-07, + "loss": 1.0181, + "step": 9246 + }, + { + "epoch": 0.8339270415295126, + "grad_norm": 1.4192826670115417, + "learning_rate": 2.8228964066855356e-07, + "loss": 1.0064, + "step": 9247 + }, + { + "epoch": 0.8340172250529828, + "grad_norm": 1.389658559948639, + "learning_rate": 2.819904704698555e-07, + "loss": 0.9177, + "step": 9248 + }, + { + "epoch": 0.834107408576453, + "grad_norm": 0.6680144132655299, + "learning_rate": 2.8169144686120437e-07, + "loss": 0.7559, + "step": 9249 + }, + { + "epoch": 0.8341975920999234, + "grad_norm": 1.5127098915180686, + "learning_rate": 2.8139256986811254e-07, + "loss": 0.8476, + "step": 9250 + }, + { + "epoch": 0.8342877756233936, + "grad_norm": 1.791656279718616, + "learning_rate": 2.8109383951608424e-07, + "loss": 0.9909, + "step": 9251 + }, + { + "epoch": 0.8343779591468639, + "grad_norm": 1.4490098005743939, + "learning_rate": 2.8079525583060683e-07, + "loss": 1.0076, + "step": 9252 + }, + { + "epoch": 0.8344681426703341, + "grad_norm": 1.1754751218810437, + "learning_rate": 2.804968188371577e-07, + "loss": 0.9404, + "step": 9253 + }, + { + "epoch": 0.8345583261938044, + "grad_norm": 1.3152798324447172, + "learning_rate": 2.801985285612014e-07, + "loss": 0.9335, + "step": 9254 + }, + { + "epoch": 0.8346485097172747, + "grad_norm": 1.4480489857907548, + "learning_rate": 2.7990038502818934e-07, + "loss": 0.9122, + "step": 9255 + }, + { + "epoch": 0.8347386932407449, + "grad_norm": 1.368097187942817, + "learning_rate": 2.796023882635612e-07, + "loss": 0.915, + "step": 9256 + }, + { + "epoch": 0.8348288767642151, + "grad_norm": 2.0516199921732965, + "learning_rate": 2.7930453829274323e-07, + "loss": 0.9811, + "step": 9257 + }, + { + "epoch": 0.8349190602876855, + "grad_norm": 1.671385648895911, + "learning_rate": 2.7900683514115054e-07, + "loss": 0.9783, + "step": 9258 + }, + { + "epoch": 0.8350092438111557, + "grad_norm": 1.5526209759839353, + "learning_rate": 2.787092788341836e-07, + "loss": 0.874, + "step": 9259 + }, + { + "epoch": 0.8350994273346259, + "grad_norm": 1.4803234018927365, + "learning_rate": 2.7841186939723195e-07, + "loss": 0.9303, + "step": 9260 + }, + { + "epoch": 0.8351896108580962, + "grad_norm": 1.4329758645611994, + "learning_rate": 2.7811460685567255e-07, + "loss": 0.9079, + "step": 9261 + }, + { + "epoch": 0.8352797943815665, + "grad_norm": 1.7016110471975738, + "learning_rate": 2.778174912348692e-07, + "loss": 0.9296, + "step": 9262 + }, + { + "epoch": 0.8353699779050368, + "grad_norm": 1.3098356815832735, + "learning_rate": 2.7752052256017354e-07, + "loss": 0.9022, + "step": 9263 + }, + { + "epoch": 0.835460161428507, + "grad_norm": 1.5919795955327987, + "learning_rate": 2.7722370085692493e-07, + "loss": 0.9105, + "step": 9264 + }, + { + "epoch": 0.8355503449519772, + "grad_norm": 1.510385482290778, + "learning_rate": 2.769270261504486e-07, + "loss": 0.9108, + "step": 9265 + }, + { + "epoch": 0.8356405284754476, + "grad_norm": 1.521999365015687, + "learning_rate": 2.7663049846606015e-07, + "loss": 0.9009, + "step": 9266 + }, + { + "epoch": 0.8357307119989178, + "grad_norm": 1.300416591725362, + "learning_rate": 2.763341178290592e-07, + "loss": 0.9479, + "step": 9267 + }, + { + "epoch": 0.835820895522388, + "grad_norm": 1.258821355318181, + "learning_rate": 2.7603788426473663e-07, + "loss": 0.959, + "step": 9268 + }, + { + "epoch": 0.8359110790458584, + "grad_norm": 1.733906038134238, + "learning_rate": 2.7574179779836695e-07, + "loss": 0.8742, + "step": 9269 + }, + { + "epoch": 0.8360012625693286, + "grad_norm": 1.4153588185782249, + "learning_rate": 2.754458584552146e-07, + "loss": 0.8916, + "step": 9270 + }, + { + "epoch": 0.8360914460927988, + "grad_norm": 2.0708453950076557, + "learning_rate": 2.751500662605308e-07, + "loss": 0.9267, + "step": 9271 + }, + { + "epoch": 0.8361816296162691, + "grad_norm": 1.3793780678635892, + "learning_rate": 2.7485442123955383e-07, + "loss": 0.8948, + "step": 9272 + }, + { + "epoch": 0.8362718131397394, + "grad_norm": 1.710260056803878, + "learning_rate": 2.7455892341751075e-07, + "loss": 0.8179, + "step": 9273 + }, + { + "epoch": 0.8363619966632097, + "grad_norm": 1.543434478392253, + "learning_rate": 2.7426357281961365e-07, + "loss": 0.8109, + "step": 9274 + }, + { + "epoch": 0.8364521801866799, + "grad_norm": 1.6255761372322253, + "learning_rate": 2.7396836947106416e-07, + "loss": 0.936, + "step": 9275 + }, + { + "epoch": 0.8365423637101501, + "grad_norm": 1.380951744777865, + "learning_rate": 2.736733133970506e-07, + "loss": 1.0352, + "step": 9276 + }, + { + "epoch": 0.8366325472336205, + "grad_norm": 1.183636050837539, + "learning_rate": 2.7337840462274896e-07, + "loss": 0.9744, + "step": 9277 + }, + { + "epoch": 0.8367227307570907, + "grad_norm": 1.6585639767663036, + "learning_rate": 2.730836431733221e-07, + "loss": 0.9099, + "step": 9278 + }, + { + "epoch": 0.8368129142805609, + "grad_norm": 1.316034915518865, + "learning_rate": 2.727890290739212e-07, + "loss": 0.9873, + "step": 9279 + }, + { + "epoch": 0.8369030978040312, + "grad_norm": 1.3736380420912333, + "learning_rate": 2.7249456234968395e-07, + "loss": 0.9735, + "step": 9280 + }, + { + "epoch": 0.8369932813275015, + "grad_norm": 1.5007569469448079, + "learning_rate": 2.722002430257364e-07, + "loss": 0.9987, + "step": 9281 + }, + { + "epoch": 0.8370834648509717, + "grad_norm": 1.3732347427706748, + "learning_rate": 2.7190607112719035e-07, + "loss": 0.894, + "step": 9282 + }, + { + "epoch": 0.837173648374442, + "grad_norm": 1.5963495035606428, + "learning_rate": 2.716120466791476e-07, + "loss": 0.8608, + "step": 9283 + }, + { + "epoch": 0.8372638318979122, + "grad_norm": 1.5779084916360946, + "learning_rate": 2.7131816970669483e-07, + "loss": 0.9091, + "step": 9284 + }, + { + "epoch": 0.8373540154213825, + "grad_norm": 1.8234083838536723, + "learning_rate": 2.7102444023490777e-07, + "loss": 0.948, + "step": 9285 + }, + { + "epoch": 0.8374441989448528, + "grad_norm": 2.805994520898474, + "learning_rate": 2.70730858288849e-07, + "loss": 0.9937, + "step": 9286 + }, + { + "epoch": 0.837534382468323, + "grad_norm": 1.3561796143721518, + "learning_rate": 2.704374238935685e-07, + "loss": 0.8755, + "step": 9287 + }, + { + "epoch": 0.8376245659917932, + "grad_norm": 0.6870875855548121, + "learning_rate": 2.70144137074104e-07, + "loss": 0.7985, + "step": 9288 + }, + { + "epoch": 0.8377147495152636, + "grad_norm": 1.2487092983879367, + "learning_rate": 2.6985099785547926e-07, + "loss": 0.9541, + "step": 9289 + }, + { + "epoch": 0.8378049330387338, + "grad_norm": 1.3744092018829694, + "learning_rate": 2.695580062627083e-07, + "loss": 0.9579, + "step": 9290 + }, + { + "epoch": 0.8378951165622041, + "grad_norm": 1.749212496543157, + "learning_rate": 2.692651623207891e-07, + "loss": 0.9354, + "step": 9291 + }, + { + "epoch": 0.8379853000856744, + "grad_norm": 1.361136906026924, + "learning_rate": 2.689724660547097e-07, + "loss": 0.9207, + "step": 9292 + }, + { + "epoch": 0.8380754836091446, + "grad_norm": 0.6240793361312534, + "learning_rate": 2.686799174894441e-07, + "loss": 0.7971, + "step": 9293 + }, + { + "epoch": 0.8381656671326149, + "grad_norm": 0.6836975236871567, + "learning_rate": 2.683875166499545e-07, + "loss": 0.8177, + "step": 9294 + }, + { + "epoch": 0.8382558506560851, + "grad_norm": 1.4432928872700141, + "learning_rate": 2.680952635611899e-07, + "loss": 0.9498, + "step": 9295 + }, + { + "epoch": 0.8383460341795554, + "grad_norm": 1.588176105732363, + "learning_rate": 2.678031582480875e-07, + "loss": 0.8538, + "step": 9296 + }, + { + "epoch": 0.8384362177030257, + "grad_norm": 1.3226063231651188, + "learning_rate": 2.6751120073557e-07, + "loss": 0.9733, + "step": 9297 + }, + { + "epoch": 0.8385264012264959, + "grad_norm": 1.5213449828086063, + "learning_rate": 2.672193910485505e-07, + "loss": 0.9049, + "step": 9298 + }, + { + "epoch": 0.8386165847499661, + "grad_norm": 1.3139206146315119, + "learning_rate": 2.669277292119265e-07, + "loss": 0.9316, + "step": 9299 + }, + { + "epoch": 0.8387067682734365, + "grad_norm": 1.416090885661601, + "learning_rate": 2.666362152505848e-07, + "loss": 0.9395, + "step": 9300 + }, + { + "epoch": 0.8387969517969067, + "grad_norm": 1.5710978655426888, + "learning_rate": 2.663448491893989e-07, + "loss": 0.8707, + "step": 9301 + }, + { + "epoch": 0.838887135320377, + "grad_norm": 1.4222492941044789, + "learning_rate": 2.6605363105322974e-07, + "loss": 0.9125, + "step": 9302 + }, + { + "epoch": 0.8389773188438472, + "grad_norm": 1.489770979721878, + "learning_rate": 2.657625608669263e-07, + "loss": 0.9358, + "step": 9303 + }, + { + "epoch": 0.8390675023673175, + "grad_norm": 1.2668620527722798, + "learning_rate": 2.654716386553224e-07, + "loss": 0.8452, + "step": 9304 + }, + { + "epoch": 0.8391576858907878, + "grad_norm": 1.455064525156663, + "learning_rate": 2.651808644432436e-07, + "loss": 0.9348, + "step": 9305 + }, + { + "epoch": 0.839247869414258, + "grad_norm": 1.5770972653995186, + "learning_rate": 2.6489023825549807e-07, + "loss": 1.0644, + "step": 9306 + }, + { + "epoch": 0.8393380529377282, + "grad_norm": 1.9768139223160395, + "learning_rate": 2.6459976011688547e-07, + "loss": 0.8607, + "step": 9307 + }, + { + "epoch": 0.8394282364611986, + "grad_norm": 1.827081235611109, + "learning_rate": 2.6430943005219e-07, + "loss": 0.9301, + "step": 9308 + }, + { + "epoch": 0.8395184199846688, + "grad_norm": 1.3195967681139835, + "learning_rate": 2.6401924808618447e-07, + "loss": 0.9679, + "step": 9309 + }, + { + "epoch": 0.839608603508139, + "grad_norm": 1.3490344091145703, + "learning_rate": 2.637292142436287e-07, + "loss": 0.9281, + "step": 9310 + }, + { + "epoch": 0.8396987870316093, + "grad_norm": 1.875156523211262, + "learning_rate": 2.6343932854927e-07, + "loss": 0.9099, + "step": 9311 + }, + { + "epoch": 0.8397889705550796, + "grad_norm": 0.7554216324828331, + "learning_rate": 2.6314959102784316e-07, + "loss": 0.8119, + "step": 9312 + }, + { + "epoch": 0.8398791540785498, + "grad_norm": 1.576344382525578, + "learning_rate": 2.6286000170407074e-07, + "loss": 0.9317, + "step": 9313 + }, + { + "epoch": 0.8399693376020201, + "grad_norm": 1.429697505728307, + "learning_rate": 2.625705606026607e-07, + "loss": 0.9119, + "step": 9314 + }, + { + "epoch": 0.8400595211254904, + "grad_norm": 1.3001323834284693, + "learning_rate": 2.622812677483106e-07, + "loss": 1.0067, + "step": 9315 + }, + { + "epoch": 0.8401497046489607, + "grad_norm": 1.7014044373854953, + "learning_rate": 2.6199212316570453e-07, + "loss": 0.9816, + "step": 9316 + }, + { + "epoch": 0.8402398881724309, + "grad_norm": 1.9277230584180562, + "learning_rate": 2.617031268795138e-07, + "loss": 0.9166, + "step": 9317 + }, + { + "epoch": 0.8403300716959011, + "grad_norm": 1.191609999947719, + "learning_rate": 2.614142789143976e-07, + "loss": 0.9669, + "step": 9318 + }, + { + "epoch": 0.8404202552193715, + "grad_norm": 1.3519173746085797, + "learning_rate": 2.6112557929500047e-07, + "loss": 0.9247, + "step": 9319 + }, + { + "epoch": 0.8405104387428417, + "grad_norm": 1.8485453041350703, + "learning_rate": 2.6083702804595817e-07, + "loss": 0.9562, + "step": 9320 + }, + { + "epoch": 0.8406006222663119, + "grad_norm": 1.4775514873176367, + "learning_rate": 2.6054862519188915e-07, + "loss": 0.9207, + "step": 9321 + }, + { + "epoch": 0.8406908057897822, + "grad_norm": 1.499346590778725, + "learning_rate": 2.6026037075740357e-07, + "loss": 0.8647, + "step": 9322 + }, + { + "epoch": 0.8407809893132525, + "grad_norm": 1.6026973518336851, + "learning_rate": 2.5997226476709524e-07, + "loss": 0.905, + "step": 9323 + }, + { + "epoch": 0.8408711728367227, + "grad_norm": 1.467928880086146, + "learning_rate": 2.5968430724554856e-07, + "loss": 0.9341, + "step": 9324 + }, + { + "epoch": 0.840961356360193, + "grad_norm": 1.4227785865433504, + "learning_rate": 2.5939649821733225e-07, + "loss": 0.8779, + "step": 9325 + }, + { + "epoch": 0.8410515398836632, + "grad_norm": 1.6346444981774333, + "learning_rate": 2.5910883770700433e-07, + "loss": 0.9032, + "step": 9326 + }, + { + "epoch": 0.8411417234071336, + "grad_norm": 1.1959935660237027, + "learning_rate": 2.5882132573910965e-07, + "loss": 0.777, + "step": 9327 + }, + { + "epoch": 0.8412319069306038, + "grad_norm": 1.1602956257853487, + "learning_rate": 2.585339623381801e-07, + "loss": 0.9679, + "step": 9328 + }, + { + "epoch": 0.841322090454074, + "grad_norm": 1.4283779796683767, + "learning_rate": 2.582467475287358e-07, + "loss": 0.9087, + "step": 9329 + }, + { + "epoch": 0.8414122739775443, + "grad_norm": 1.5687799519266998, + "learning_rate": 2.5795968133528224e-07, + "loss": 0.9434, + "step": 9330 + }, + { + "epoch": 0.8415024575010146, + "grad_norm": 1.5478098192813672, + "learning_rate": 2.576727637823144e-07, + "loss": 0.9366, + "step": 9331 + }, + { + "epoch": 0.8415926410244848, + "grad_norm": 1.586913611778782, + "learning_rate": 2.5738599489431335e-07, + "loss": 0.9708, + "step": 9332 + }, + { + "epoch": 0.8416828245479551, + "grad_norm": 1.2315434198225177, + "learning_rate": 2.5709937469574794e-07, + "loss": 0.9965, + "step": 9333 + }, + { + "epoch": 0.8417730080714253, + "grad_norm": 1.2453210521677158, + "learning_rate": 2.568129032110742e-07, + "loss": 0.9764, + "step": 9334 + }, + { + "epoch": 0.8418631915948956, + "grad_norm": 1.3912199654983115, + "learning_rate": 2.5652658046473565e-07, + "loss": 1.003, + "step": 9335 + }, + { + "epoch": 0.8419533751183659, + "grad_norm": 1.353866333546415, + "learning_rate": 2.5624040648116184e-07, + "loss": 0.9269, + "step": 9336 + }, + { + "epoch": 0.8420435586418361, + "grad_norm": 1.3918270958963315, + "learning_rate": 2.5595438128477245e-07, + "loss": 0.8726, + "step": 9337 + }, + { + "epoch": 0.8421337421653065, + "grad_norm": 1.5876192093192367, + "learning_rate": 2.5566850489997096e-07, + "loss": 0.9428, + "step": 9338 + }, + { + "epoch": 0.8422239256887767, + "grad_norm": 1.6384010136550882, + "learning_rate": 2.5538277735115166e-07, + "loss": 0.9612, + "step": 9339 + }, + { + "epoch": 0.8423141092122469, + "grad_norm": 1.2483057938522957, + "learning_rate": 2.5509719866269306e-07, + "loss": 0.9859, + "step": 9340 + }, + { + "epoch": 0.8424042927357172, + "grad_norm": 1.4739569224787814, + "learning_rate": 2.548117688589628e-07, + "loss": 0.9937, + "step": 9341 + }, + { + "epoch": 0.8424944762591875, + "grad_norm": 1.4216203566354375, + "learning_rate": 2.545264879643152e-07, + "loss": 0.9228, + "step": 9342 + }, + { + "epoch": 0.8425846597826577, + "grad_norm": 1.4052875192028134, + "learning_rate": 2.542413560030923e-07, + "loss": 0.8887, + "step": 9343 + }, + { + "epoch": 0.842674843306128, + "grad_norm": 1.5360111253852067, + "learning_rate": 2.53956372999623e-07, + "loss": 0.8255, + "step": 9344 + }, + { + "epoch": 0.8427650268295982, + "grad_norm": 0.7459980369010234, + "learning_rate": 2.5367153897822293e-07, + "loss": 0.8431, + "step": 9345 + }, + { + "epoch": 0.8428552103530685, + "grad_norm": 1.2509870446843234, + "learning_rate": 2.5338685396319715e-07, + "loss": 0.925, + "step": 9346 + }, + { + "epoch": 0.8429453938765388, + "grad_norm": 1.6420337986179923, + "learning_rate": 2.531023179788352e-07, + "loss": 0.8698, + "step": 9347 + }, + { + "epoch": 0.843035577400009, + "grad_norm": 1.4592035785118778, + "learning_rate": 2.528179310494158e-07, + "loss": 0.9152, + "step": 9348 + }, + { + "epoch": 0.8431257609234792, + "grad_norm": 1.2969719264566457, + "learning_rate": 2.5253369319920436e-07, + "loss": 0.953, + "step": 9349 + }, + { + "epoch": 0.8432159444469496, + "grad_norm": 1.838801840149293, + "learning_rate": 2.522496044524538e-07, + "loss": 0.9318, + "step": 9350 + }, + { + "epoch": 0.8433061279704198, + "grad_norm": 1.5388475451938841, + "learning_rate": 2.5196566483340386e-07, + "loss": 1.0274, + "step": 9351 + }, + { + "epoch": 0.84339631149389, + "grad_norm": 1.4309695806195775, + "learning_rate": 2.516818743662825e-07, + "loss": 0.9383, + "step": 9352 + }, + { + "epoch": 0.8434864950173603, + "grad_norm": 1.634867638762785, + "learning_rate": 2.5139823307530285e-07, + "loss": 0.9369, + "step": 9353 + }, + { + "epoch": 0.8435766785408306, + "grad_norm": 1.5041491027539224, + "learning_rate": 2.5111474098466836e-07, + "loss": 0.9692, + "step": 9354 + }, + { + "epoch": 0.8436668620643009, + "grad_norm": 1.2869270901839074, + "learning_rate": 2.50831398118567e-07, + "loss": 0.9736, + "step": 9355 + }, + { + "epoch": 0.8437570455877711, + "grad_norm": 1.4642098237684447, + "learning_rate": 2.5054820450117576e-07, + "loss": 0.8827, + "step": 9356 + }, + { + "epoch": 0.8438472291112413, + "grad_norm": 1.34676670072747, + "learning_rate": 2.502651601566579e-07, + "loss": 0.8955, + "step": 9357 + }, + { + "epoch": 0.8439374126347117, + "grad_norm": 1.5014379284935147, + "learning_rate": 2.499822651091645e-07, + "loss": 0.8947, + "step": 9358 + }, + { + "epoch": 0.8440275961581819, + "grad_norm": 1.5872736972296215, + "learning_rate": 2.496995193828344e-07, + "loss": 0.9557, + "step": 9359 + }, + { + "epoch": 0.8441177796816521, + "grad_norm": 1.4340202674125124, + "learning_rate": 2.494169230017913e-07, + "loss": 0.921, + "step": 9360 + }, + { + "epoch": 0.8442079632051224, + "grad_norm": 1.3500083852436886, + "learning_rate": 2.491344759901499e-07, + "loss": 0.8925, + "step": 9361 + }, + { + "epoch": 0.8442981467285927, + "grad_norm": 1.548727140465755, + "learning_rate": 2.488521783720088e-07, + "loss": 0.9294, + "step": 9362 + }, + { + "epoch": 0.844388330252063, + "grad_norm": 1.5715111859391069, + "learning_rate": 2.4857003017145526e-07, + "loss": 0.9748, + "step": 9363 + }, + { + "epoch": 0.8444785137755332, + "grad_norm": 1.4902422143572476, + "learning_rate": 2.482880314125644e-07, + "loss": 0.9711, + "step": 9364 + }, + { + "epoch": 0.8445686972990035, + "grad_norm": 1.3898027110567548, + "learning_rate": 2.4800618211939726e-07, + "loss": 0.9247, + "step": 9365 + }, + { + "epoch": 0.8446588808224738, + "grad_norm": 1.5674642666480878, + "learning_rate": 2.477244823160034e-07, + "loss": 0.9415, + "step": 9366 + }, + { + "epoch": 0.844749064345944, + "grad_norm": 1.4575976377993558, + "learning_rate": 2.474429320264184e-07, + "loss": 0.9501, + "step": 9367 + }, + { + "epoch": 0.8448392478694142, + "grad_norm": 1.367320419389724, + "learning_rate": 2.47161531274666e-07, + "loss": 0.8953, + "step": 9368 + }, + { + "epoch": 0.8449294313928846, + "grad_norm": 1.7739749043785107, + "learning_rate": 2.4688028008475714e-07, + "loss": 0.867, + "step": 9369 + }, + { + "epoch": 0.8450196149163548, + "grad_norm": 1.725660545925294, + "learning_rate": 2.465991784806891e-07, + "loss": 0.9297, + "step": 9370 + }, + { + "epoch": 0.845109798439825, + "grad_norm": 1.3583517059690653, + "learning_rate": 2.463182264864472e-07, + "loss": 1.0221, + "step": 9371 + }, + { + "epoch": 0.8451999819632953, + "grad_norm": 1.654951846410456, + "learning_rate": 2.460374241260039e-07, + "loss": 0.8536, + "step": 9372 + }, + { + "epoch": 0.8452901654867656, + "grad_norm": 1.486850000261899, + "learning_rate": 2.4575677142331884e-07, + "loss": 0.9943, + "step": 9373 + }, + { + "epoch": 0.8453803490102358, + "grad_norm": 1.3475174984476979, + "learning_rate": 2.454762684023395e-07, + "loss": 0.8717, + "step": 9374 + }, + { + "epoch": 0.8454705325337061, + "grad_norm": 1.765250579833889, + "learning_rate": 2.4519591508699823e-07, + "loss": 0.8791, + "step": 9375 + }, + { + "epoch": 0.8455607160571763, + "grad_norm": 1.4884202146357404, + "learning_rate": 2.4491571150121815e-07, + "loss": 0.9781, + "step": 9376 + }, + { + "epoch": 0.8456508995806467, + "grad_norm": 1.729662981005996, + "learning_rate": 2.446356576689062e-07, + "loss": 0.9292, + "step": 9377 + }, + { + "epoch": 0.8457410831041169, + "grad_norm": 1.4570257682799446, + "learning_rate": 2.4435575361395976e-07, + "loss": 0.9516, + "step": 9378 + }, + { + "epoch": 0.8458312666275871, + "grad_norm": 1.4344125846642635, + "learning_rate": 2.440759993602606e-07, + "loss": 0.9122, + "step": 9379 + }, + { + "epoch": 0.8459214501510574, + "grad_norm": 1.471589205263222, + "learning_rate": 2.437963949316793e-07, + "loss": 0.8764, + "step": 9380 + }, + { + "epoch": 0.8460116336745277, + "grad_norm": 1.4848134246655014, + "learning_rate": 2.435169403520729e-07, + "loss": 0.9329, + "step": 9381 + }, + { + "epoch": 0.8461018171979979, + "grad_norm": 1.6323527003572627, + "learning_rate": 2.4323763564528653e-07, + "loss": 0.9768, + "step": 9382 + }, + { + "epoch": 0.8461920007214682, + "grad_norm": 1.5991338411944207, + "learning_rate": 2.429584808351517e-07, + "loss": 0.9527, + "step": 9383 + }, + { + "epoch": 0.8462821842449384, + "grad_norm": 2.2646682297539598, + "learning_rate": 2.42679475945488e-07, + "loss": 1.055, + "step": 9384 + }, + { + "epoch": 0.8463723677684087, + "grad_norm": 1.258877747388108, + "learning_rate": 2.424006210001008e-07, + "loss": 0.9323, + "step": 9385 + }, + { + "epoch": 0.846462551291879, + "grad_norm": 1.4033449896844064, + "learning_rate": 2.421219160227839e-07, + "loss": 0.9565, + "step": 9386 + }, + { + "epoch": 0.8465527348153492, + "grad_norm": 1.373122580816528, + "learning_rate": 2.4184336103731785e-07, + "loss": 0.9269, + "step": 9387 + }, + { + "epoch": 0.8466429183388196, + "grad_norm": 1.3868953968420317, + "learning_rate": 2.4156495606747065e-07, + "loss": 0.9559, + "step": 9388 + }, + { + "epoch": 0.8467331018622898, + "grad_norm": 2.1701295274320653, + "learning_rate": 2.412867011369972e-07, + "loss": 1.0481, + "step": 9389 + }, + { + "epoch": 0.84682328538576, + "grad_norm": 1.4579300549897136, + "learning_rate": 2.4100859626963997e-07, + "loss": 0.9501, + "step": 9390 + }, + { + "epoch": 0.8469134689092302, + "grad_norm": 1.2834072095901798, + "learning_rate": 2.407306414891288e-07, + "loss": 0.9307, + "step": 9391 + }, + { + "epoch": 0.8470036524327006, + "grad_norm": 1.1759452832136998, + "learning_rate": 2.4045283681917893e-07, + "loss": 1.0413, + "step": 9392 + }, + { + "epoch": 0.8470938359561708, + "grad_norm": 1.3827284447937078, + "learning_rate": 2.4017518228349586e-07, + "loss": 1.0457, + "step": 9393 + }, + { + "epoch": 0.8471840194796411, + "grad_norm": 0.6366841827557803, + "learning_rate": 2.3989767790576887e-07, + "loss": 0.8196, + "step": 9394 + }, + { + "epoch": 0.8472742030031113, + "grad_norm": 1.4749119328215727, + "learning_rate": 2.396203237096781e-07, + "loss": 0.9005, + "step": 9395 + }, + { + "epoch": 0.8473643865265816, + "grad_norm": 1.5884405218557567, + "learning_rate": 2.393431197188873e-07, + "loss": 0.9295, + "step": 9396 + }, + { + "epoch": 0.8474545700500519, + "grad_norm": 1.7669628779684452, + "learning_rate": 2.3906606595705004e-07, + "loss": 0.9988, + "step": 9397 + }, + { + "epoch": 0.8475447535735221, + "grad_norm": 1.337546110695285, + "learning_rate": 2.387891624478056e-07, + "loss": 0.984, + "step": 9398 + }, + { + "epoch": 0.8476349370969923, + "grad_norm": 1.3261384805286662, + "learning_rate": 2.3851240921478075e-07, + "loss": 0.9483, + "step": 9399 + }, + { + "epoch": 0.8477251206204627, + "grad_norm": 1.394044507989518, + "learning_rate": 2.3823580628159057e-07, + "loss": 0.9111, + "step": 9400 + }, + { + "epoch": 0.8478153041439329, + "grad_norm": 1.74350616773866, + "learning_rate": 2.3795935367183517e-07, + "loss": 0.9225, + "step": 9401 + }, + { + "epoch": 0.8479054876674031, + "grad_norm": 1.3097318340599207, + "learning_rate": 2.376830514091035e-07, + "loss": 0.947, + "step": 9402 + }, + { + "epoch": 0.8479956711908734, + "grad_norm": 1.2387513913821384, + "learning_rate": 2.3740689951697135e-07, + "loss": 1.0091, + "step": 9403 + }, + { + "epoch": 0.8480858547143437, + "grad_norm": 1.4714180271389907, + "learning_rate": 2.371308980190012e-07, + "loss": 0.9337, + "step": 9404 + }, + { + "epoch": 0.848176038237814, + "grad_norm": 0.6650864504428946, + "learning_rate": 2.3685504693874337e-07, + "loss": 0.8223, + "step": 9405 + }, + { + "epoch": 0.8482662217612842, + "grad_norm": 2.169260550875678, + "learning_rate": 2.3657934629973497e-07, + "loss": 0.9734, + "step": 9406 + }, + { + "epoch": 0.8483564052847544, + "grad_norm": 1.7996753267359054, + "learning_rate": 2.3630379612549944e-07, + "loss": 0.9426, + "step": 9407 + }, + { + "epoch": 0.8484465888082248, + "grad_norm": 0.7598956077807176, + "learning_rate": 2.3602839643954997e-07, + "loss": 0.8679, + "step": 9408 + }, + { + "epoch": 0.848536772331695, + "grad_norm": 1.3988926322697723, + "learning_rate": 2.3575314726538308e-07, + "loss": 0.9384, + "step": 9409 + }, + { + "epoch": 0.8486269558551652, + "grad_norm": 1.4125261338103463, + "learning_rate": 2.3547804862648645e-07, + "loss": 0.9723, + "step": 9410 + }, + { + "epoch": 0.8487171393786356, + "grad_norm": 1.344195003900607, + "learning_rate": 2.3520310054633174e-07, + "loss": 0.9776, + "step": 9411 + }, + { + "epoch": 0.8488073229021058, + "grad_norm": 1.5160173517859674, + "learning_rate": 2.3492830304837973e-07, + "loss": 0.8935, + "step": 9412 + }, + { + "epoch": 0.848897506425576, + "grad_norm": 1.4127319989371385, + "learning_rate": 2.3465365615607723e-07, + "loss": 0.8773, + "step": 9413 + }, + { + "epoch": 0.8489876899490463, + "grad_norm": 1.7468241757555418, + "learning_rate": 2.3437915989285884e-07, + "loss": 0.9256, + "step": 9414 + }, + { + "epoch": 0.8490778734725166, + "grad_norm": 1.7769365116599796, + "learning_rate": 2.3410481428214647e-07, + "loss": 1.007, + "step": 9415 + }, + { + "epoch": 0.8491680569959869, + "grad_norm": 1.3722138353538502, + "learning_rate": 2.338306193473476e-07, + "loss": 0.9499, + "step": 9416 + }, + { + "epoch": 0.8492582405194571, + "grad_norm": 1.3173750756264888, + "learning_rate": 2.3355657511185957e-07, + "loss": 0.9304, + "step": 9417 + }, + { + "epoch": 0.8493484240429273, + "grad_norm": 0.7270398776413335, + "learning_rate": 2.3328268159906428e-07, + "loss": 0.7852, + "step": 9418 + }, + { + "epoch": 0.8494386075663977, + "grad_norm": 1.448667519091797, + "learning_rate": 2.330089388323322e-07, + "loss": 0.9803, + "step": 9419 + }, + { + "epoch": 0.8495287910898679, + "grad_norm": 1.5054488556383272, + "learning_rate": 2.327353468350204e-07, + "loss": 0.9691, + "step": 9420 + }, + { + "epoch": 0.8496189746133381, + "grad_norm": 1.324323666897751, + "learning_rate": 2.3246190563047352e-07, + "loss": 0.922, + "step": 9421 + }, + { + "epoch": 0.8497091581368084, + "grad_norm": 1.413370121006259, + "learning_rate": 2.3218861524202293e-07, + "loss": 0.9151, + "step": 9422 + }, + { + "epoch": 0.8497993416602787, + "grad_norm": 1.562515716473692, + "learning_rate": 2.3191547569298775e-07, + "loss": 0.9383, + "step": 9423 + }, + { + "epoch": 0.8498895251837489, + "grad_norm": 1.5073087176782314, + "learning_rate": 2.3164248700667245e-07, + "loss": 0.9204, + "step": 9424 + }, + { + "epoch": 0.8499797087072192, + "grad_norm": 1.253369558162561, + "learning_rate": 2.313696492063717e-07, + "loss": 0.9598, + "step": 9425 + }, + { + "epoch": 0.8500698922306894, + "grad_norm": 1.6286486232120865, + "learning_rate": 2.3109696231536401e-07, + "loss": 0.9215, + "step": 9426 + }, + { + "epoch": 0.8501600757541597, + "grad_norm": 1.6837784030843725, + "learning_rate": 2.3082442635691722e-07, + "loss": 0.899, + "step": 9427 + }, + { + "epoch": 0.85025025927763, + "grad_norm": 1.477134069191036, + "learning_rate": 2.305520413542854e-07, + "loss": 0.9482, + "step": 9428 + }, + { + "epoch": 0.8503404428011002, + "grad_norm": 1.2680647140920553, + "learning_rate": 2.3027980733071018e-07, + "loss": 0.8633, + "step": 9429 + }, + { + "epoch": 0.8504306263245704, + "grad_norm": 0.6204088381274857, + "learning_rate": 2.3000772430942027e-07, + "loss": 0.76, + "step": 9430 + }, + { + "epoch": 0.8505208098480408, + "grad_norm": 1.4760861713355795, + "learning_rate": 2.2973579231363028e-07, + "loss": 0.9598, + "step": 9431 + }, + { + "epoch": 0.850610993371511, + "grad_norm": 1.4829374881721393, + "learning_rate": 2.2946401136654446e-07, + "loss": 0.9558, + "step": 9432 + }, + { + "epoch": 0.8507011768949813, + "grad_norm": 1.3721423364381418, + "learning_rate": 2.2919238149135077e-07, + "loss": 0.9694, + "step": 9433 + }, + { + "epoch": 0.8507913604184516, + "grad_norm": 1.3081115289642422, + "learning_rate": 2.289209027112282e-07, + "loss": 0.9379, + "step": 9434 + }, + { + "epoch": 0.8508815439419218, + "grad_norm": 1.4867549889819012, + "learning_rate": 2.2864957504933934e-07, + "loss": 0.994, + "step": 9435 + }, + { + "epoch": 0.8509717274653921, + "grad_norm": 2.932318480094066, + "learning_rate": 2.2837839852883589e-07, + "loss": 0.916, + "step": 9436 + }, + { + "epoch": 0.8510619109888623, + "grad_norm": 1.8788964792739065, + "learning_rate": 2.2810737317285623e-07, + "loss": 0.9414, + "step": 9437 + }, + { + "epoch": 0.8511520945123326, + "grad_norm": 1.2623452913275128, + "learning_rate": 2.278364990045254e-07, + "loss": 0.8744, + "step": 9438 + }, + { + "epoch": 0.8512422780358029, + "grad_norm": 1.692962987280008, + "learning_rate": 2.2756577604695625e-07, + "loss": 0.9211, + "step": 9439 + }, + { + "epoch": 0.8513324615592731, + "grad_norm": 1.2690999398794558, + "learning_rate": 2.2729520432324855e-07, + "loss": 0.9491, + "step": 9440 + }, + { + "epoch": 0.8514226450827433, + "grad_norm": 1.3668703855864854, + "learning_rate": 2.2702478385648826e-07, + "loss": 0.861, + "step": 9441 + }, + { + "epoch": 0.8515128286062137, + "grad_norm": 1.714397815603982, + "learning_rate": 2.2675451466974938e-07, + "loss": 0.9336, + "step": 9442 + }, + { + "epoch": 0.8516030121296839, + "grad_norm": 1.5097885228798038, + "learning_rate": 2.26484396786093e-07, + "loss": 0.867, + "step": 9443 + }, + { + "epoch": 0.8516931956531542, + "grad_norm": 0.6673509518166201, + "learning_rate": 2.2621443022856667e-07, + "loss": 0.8304, + "step": 9444 + }, + { + "epoch": 0.8517833791766244, + "grad_norm": 2.0096910053187607, + "learning_rate": 2.2594461502020646e-07, + "loss": 0.9689, + "step": 9445 + }, + { + "epoch": 0.8518735627000947, + "grad_norm": 1.6639548253428817, + "learning_rate": 2.2567495118403278e-07, + "loss": 0.8156, + "step": 9446 + }, + { + "epoch": 0.851963746223565, + "grad_norm": 1.3324709079994872, + "learning_rate": 2.254054387430566e-07, + "loss": 0.9514, + "step": 9447 + }, + { + "epoch": 0.8520539297470352, + "grad_norm": 1.4053908796722114, + "learning_rate": 2.2513607772027243e-07, + "loss": 0.9234, + "step": 9448 + }, + { + "epoch": 0.8521441132705054, + "grad_norm": 1.4362437522493332, + "learning_rate": 2.2486686813866562e-07, + "loss": 0.8967, + "step": 9449 + }, + { + "epoch": 0.8522342967939758, + "grad_norm": 1.4628053851689056, + "learning_rate": 2.2459781002120514e-07, + "loss": 0.9801, + "step": 9450 + }, + { + "epoch": 0.852324480317446, + "grad_norm": 1.4706131501050719, + "learning_rate": 2.243289033908491e-07, + "loss": 1.0034, + "step": 9451 + }, + { + "epoch": 0.8524146638409162, + "grad_norm": 1.4332482859789069, + "learning_rate": 2.2406014827054176e-07, + "loss": 0.9433, + "step": 9452 + }, + { + "epoch": 0.8525048473643865, + "grad_norm": 1.4900054955861173, + "learning_rate": 2.2379154468321525e-07, + "loss": 0.9993, + "step": 9453 + }, + { + "epoch": 0.8525950308878568, + "grad_norm": 1.277183172649373, + "learning_rate": 2.2352309265178793e-07, + "loss": 0.9716, + "step": 9454 + }, + { + "epoch": 0.852685214411327, + "grad_norm": 2.155581716278225, + "learning_rate": 2.2325479219916565e-07, + "loss": 1.0045, + "step": 9455 + }, + { + "epoch": 0.8527753979347973, + "grad_norm": 1.4636969795038992, + "learning_rate": 2.229866433482419e-07, + "loss": 0.9617, + "step": 9456 + }, + { + "epoch": 0.8528655814582676, + "grad_norm": 1.4250139235769963, + "learning_rate": 2.2271864612189552e-07, + "loss": 0.9768, + "step": 9457 + }, + { + "epoch": 0.8529557649817379, + "grad_norm": 1.403979311969958, + "learning_rate": 2.2245080054299415e-07, + "loss": 0.9699, + "step": 9458 + }, + { + "epoch": 0.8530459485052081, + "grad_norm": 1.3060403993354686, + "learning_rate": 2.2218310663439198e-07, + "loss": 0.8899, + "step": 9459 + }, + { + "epoch": 0.8531361320286783, + "grad_norm": 1.5182181914659676, + "learning_rate": 2.2191556441892968e-07, + "loss": 0.9565, + "step": 9460 + }, + { + "epoch": 0.8532263155521487, + "grad_norm": 1.3572407773193407, + "learning_rate": 2.216481739194358e-07, + "loss": 0.941, + "step": 9461 + }, + { + "epoch": 0.8533164990756189, + "grad_norm": 1.4561097859345833, + "learning_rate": 2.2138093515872592e-07, + "loss": 0.8795, + "step": 9462 + }, + { + "epoch": 0.8534066825990891, + "grad_norm": 1.2078124052494303, + "learning_rate": 2.2111384815960087e-07, + "loss": 1.0112, + "step": 9463 + }, + { + "epoch": 0.8534968661225594, + "grad_norm": 1.9804530397051623, + "learning_rate": 2.208469129448518e-07, + "loss": 0.8567, + "step": 9464 + }, + { + "epoch": 0.8535870496460297, + "grad_norm": 0.6948502107478142, + "learning_rate": 2.2058012953725357e-07, + "loss": 0.8042, + "step": 9465 + }, + { + "epoch": 0.8536772331695, + "grad_norm": 1.6220619343834655, + "learning_rate": 2.203134979595711e-07, + "loss": 0.9046, + "step": 9466 + }, + { + "epoch": 0.8537674166929702, + "grad_norm": 1.5152597675521617, + "learning_rate": 2.2004701823455374e-07, + "loss": 0.8856, + "step": 9467 + }, + { + "epoch": 0.8538576002164404, + "grad_norm": 2.8328736624240514, + "learning_rate": 2.1978069038493906e-07, + "loss": 0.9648, + "step": 9468 + }, + { + "epoch": 0.8539477837399108, + "grad_norm": 1.2127222997851483, + "learning_rate": 2.1951451443345225e-07, + "loss": 0.9421, + "step": 9469 + }, + { + "epoch": 0.854037967263381, + "grad_norm": 1.6899651076873763, + "learning_rate": 2.1924849040280425e-07, + "loss": 0.9262, + "step": 9470 + }, + { + "epoch": 0.8541281507868512, + "grad_norm": 2.037662771077205, + "learning_rate": 2.1898261831569465e-07, + "loss": 1.0173, + "step": 9471 + }, + { + "epoch": 0.8542183343103215, + "grad_norm": 1.835261313726371, + "learning_rate": 2.1871689819480798e-07, + "loss": 0.9461, + "step": 9472 + }, + { + "epoch": 0.8543085178337918, + "grad_norm": 1.397495997087072, + "learning_rate": 2.1845133006281745e-07, + "loss": 0.9177, + "step": 9473 + }, + { + "epoch": 0.854398701357262, + "grad_norm": 1.7602123239714054, + "learning_rate": 2.1818591394238294e-07, + "loss": 0.7943, + "step": 9474 + }, + { + "epoch": 0.8544888848807323, + "grad_norm": 2.079590818248627, + "learning_rate": 2.1792064985615076e-07, + "loss": 0.9231, + "step": 9475 + }, + { + "epoch": 0.8545790684042025, + "grad_norm": 1.2899229472457794, + "learning_rate": 2.1765553782675528e-07, + "loss": 0.9103, + "step": 9476 + }, + { + "epoch": 0.8546692519276728, + "grad_norm": 1.2806648220649341, + "learning_rate": 2.1739057787681703e-07, + "loss": 1.0088, + "step": 9477 + }, + { + "epoch": 0.8547594354511431, + "grad_norm": 2.019172680896089, + "learning_rate": 2.1712577002894372e-07, + "loss": 0.9584, + "step": 9478 + }, + { + "epoch": 0.8548496189746133, + "grad_norm": 2.1620381385763507, + "learning_rate": 2.1686111430573105e-07, + "loss": 0.911, + "step": 9479 + }, + { + "epoch": 0.8549398024980835, + "grad_norm": 1.4067300612882456, + "learning_rate": 2.165966107297592e-07, + "loss": 0.9884, + "step": 9480 + }, + { + "epoch": 0.8550299860215539, + "grad_norm": 1.4923994303290946, + "learning_rate": 2.16332259323599e-07, + "loss": 0.9686, + "step": 9481 + }, + { + "epoch": 0.8551201695450241, + "grad_norm": 1.5658912477987932, + "learning_rate": 2.1606806010980504e-07, + "loss": 0.8857, + "step": 9482 + }, + { + "epoch": 0.8552103530684944, + "grad_norm": 1.4462426569034308, + "learning_rate": 2.1580401311092067e-07, + "loss": 0.9348, + "step": 9483 + }, + { + "epoch": 0.8553005365919647, + "grad_norm": 1.2392896523379704, + "learning_rate": 2.1554011834947604e-07, + "loss": 0.9275, + "step": 9484 + }, + { + "epoch": 0.8553907201154349, + "grad_norm": 1.5803721829761663, + "learning_rate": 2.1527637584798764e-07, + "loss": 0.964, + "step": 9485 + }, + { + "epoch": 0.8554809036389052, + "grad_norm": 1.5582939758362553, + "learning_rate": 2.150127856289603e-07, + "loss": 1.0023, + "step": 9486 + }, + { + "epoch": 0.8555710871623754, + "grad_norm": 0.6435240534204636, + "learning_rate": 2.1474934771488363e-07, + "loss": 0.7854, + "step": 9487 + }, + { + "epoch": 0.8556612706858457, + "grad_norm": 1.4415968688797474, + "learning_rate": 2.1448606212823715e-07, + "loss": 0.9811, + "step": 9488 + }, + { + "epoch": 0.855751454209316, + "grad_norm": 1.6551753462362622, + "learning_rate": 2.1422292889148452e-07, + "loss": 0.9212, + "step": 9489 + }, + { + "epoch": 0.8558416377327862, + "grad_norm": 1.401013266501284, + "learning_rate": 2.139599480270784e-07, + "loss": 0.9549, + "step": 9490 + }, + { + "epoch": 0.8559318212562564, + "grad_norm": 1.7419543602107521, + "learning_rate": 2.1369711955745773e-07, + "loss": 0.9234, + "step": 9491 + }, + { + "epoch": 0.8560220047797268, + "grad_norm": 9.32415860959344, + "learning_rate": 2.1343444350504813e-07, + "loss": 0.8868, + "step": 9492 + }, + { + "epoch": 0.856112188303197, + "grad_norm": 1.388323905259474, + "learning_rate": 2.1317191989226302e-07, + "loss": 0.9413, + "step": 9493 + }, + { + "epoch": 0.8562023718266673, + "grad_norm": 1.3728709209888292, + "learning_rate": 2.129095487415027e-07, + "loss": 0.9207, + "step": 9494 + }, + { + "epoch": 0.8562925553501375, + "grad_norm": 1.9865797515499317, + "learning_rate": 2.1264733007515257e-07, + "loss": 0.9499, + "step": 9495 + }, + { + "epoch": 0.8563827388736078, + "grad_norm": 1.4920866767414767, + "learning_rate": 2.1238526391558852e-07, + "loss": 0.9206, + "step": 9496 + }, + { + "epoch": 0.8564729223970781, + "grad_norm": 1.7756826834085482, + "learning_rate": 2.1212335028517003e-07, + "loss": 1.0743, + "step": 9497 + }, + { + "epoch": 0.8565631059205483, + "grad_norm": 1.4370186041130069, + "learning_rate": 2.1186158920624563e-07, + "loss": 0.9468, + "step": 9498 + }, + { + "epoch": 0.8566532894440185, + "grad_norm": 1.5829624779038245, + "learning_rate": 2.1159998070115015e-07, + "loss": 0.9384, + "step": 9499 + }, + { + "epoch": 0.8567434729674889, + "grad_norm": 1.5174797743026036, + "learning_rate": 2.113385247922055e-07, + "loss": 0.9219, + "step": 9500 + }, + { + "epoch": 0.8568336564909591, + "grad_norm": 1.4318337095610394, + "learning_rate": 2.1107722150172068e-07, + "loss": 0.9367, + "step": 9501 + }, + { + "epoch": 0.8569238400144293, + "grad_norm": 1.318124391312366, + "learning_rate": 2.108160708519906e-07, + "loss": 0.9798, + "step": 9502 + }, + { + "epoch": 0.8570140235378996, + "grad_norm": 1.5611096872358665, + "learning_rate": 2.1055507286529984e-07, + "loss": 0.9149, + "step": 9503 + }, + { + "epoch": 0.8571042070613699, + "grad_norm": 3.8555250231376434, + "learning_rate": 2.1029422756391612e-07, + "loss": 0.9628, + "step": 9504 + }, + { + "epoch": 0.8571943905848401, + "grad_norm": 1.4125846601113192, + "learning_rate": 2.1003353497009812e-07, + "loss": 0.8553, + "step": 9505 + }, + { + "epoch": 0.8572845741083104, + "grad_norm": 1.3631713298216155, + "learning_rate": 2.0977299510608825e-07, + "loss": 0.8626, + "step": 9506 + }, + { + "epoch": 0.8573747576317807, + "grad_norm": 1.3417677785947575, + "learning_rate": 2.0951260799411784e-07, + "loss": 0.9728, + "step": 9507 + }, + { + "epoch": 0.857464941155251, + "grad_norm": 1.6597950261888308, + "learning_rate": 2.0925237365640424e-07, + "loss": 0.9268, + "step": 9508 + }, + { + "epoch": 0.8575551246787212, + "grad_norm": 0.6350990006850569, + "learning_rate": 2.0899229211515211e-07, + "loss": 0.8109, + "step": 9509 + }, + { + "epoch": 0.8576453082021914, + "grad_norm": 1.4774504721355384, + "learning_rate": 2.0873236339255306e-07, + "loss": 0.8618, + "step": 9510 + }, + { + "epoch": 0.8577354917256618, + "grad_norm": 2.3076965344219134, + "learning_rate": 2.0847258751078644e-07, + "loss": 0.88, + "step": 9511 + }, + { + "epoch": 0.857825675249132, + "grad_norm": 1.8124127202559388, + "learning_rate": 2.082129644920163e-07, + "loss": 0.916, + "step": 9512 + }, + { + "epoch": 0.8579158587726022, + "grad_norm": 1.7032492356090772, + "learning_rate": 2.0795349435839605e-07, + "loss": 0.9474, + "step": 9513 + }, + { + "epoch": 0.8580060422960725, + "grad_norm": 1.6316813516134592, + "learning_rate": 2.0769417713206484e-07, + "loss": 0.9113, + "step": 9514 + }, + { + "epoch": 0.8580962258195428, + "grad_norm": 1.1373589145232725, + "learning_rate": 2.074350128351492e-07, + "loss": 0.9981, + "step": 9515 + }, + { + "epoch": 0.858186409343013, + "grad_norm": 1.4291454183854384, + "learning_rate": 2.0717600148976256e-07, + "loss": 0.854, + "step": 9516 + }, + { + "epoch": 0.8582765928664833, + "grad_norm": 1.4468082420816757, + "learning_rate": 2.0691714311800436e-07, + "loss": 0.8703, + "step": 9517 + }, + { + "epoch": 0.8583667763899535, + "grad_norm": 1.4668618097675281, + "learning_rate": 2.066584377419631e-07, + "loss": 0.9995, + "step": 9518 + }, + { + "epoch": 0.8584569599134239, + "grad_norm": 1.3432338077996981, + "learning_rate": 2.0639988538371167e-07, + "loss": 0.9588, + "step": 9519 + }, + { + "epoch": 0.8585471434368941, + "grad_norm": 1.5220747892925068, + "learning_rate": 2.0614148606531258e-07, + "loss": 0.9035, + "step": 9520 + }, + { + "epoch": 0.8586373269603643, + "grad_norm": 1.8214137466746312, + "learning_rate": 2.0588323980881285e-07, + "loss": 0.9551, + "step": 9521 + }, + { + "epoch": 0.8587275104838346, + "grad_norm": 1.28373468035181, + "learning_rate": 2.0562514663624752e-07, + "loss": 0.9197, + "step": 9522 + }, + { + "epoch": 0.8588176940073049, + "grad_norm": 1.3478936027821637, + "learning_rate": 2.0536720656963902e-07, + "loss": 0.9966, + "step": 9523 + }, + { + "epoch": 0.8589078775307751, + "grad_norm": 10.057068679895197, + "learning_rate": 2.051094196309957e-07, + "loss": 0.959, + "step": 9524 + }, + { + "epoch": 0.8589980610542454, + "grad_norm": 1.3787019054086187, + "learning_rate": 2.0485178584231378e-07, + "loss": 0.8971, + "step": 9525 + }, + { + "epoch": 0.8590882445777156, + "grad_norm": 0.692565205255049, + "learning_rate": 2.0459430522557587e-07, + "loss": 0.8033, + "step": 9526 + }, + { + "epoch": 0.8591784281011859, + "grad_norm": 1.7487421965135237, + "learning_rate": 2.0433697780275195e-07, + "loss": 0.9415, + "step": 9527 + }, + { + "epoch": 0.8592686116246562, + "grad_norm": 1.2444046674532343, + "learning_rate": 2.040798035957978e-07, + "loss": 0.958, + "step": 9528 + }, + { + "epoch": 0.8593587951481264, + "grad_norm": 0.7146776767284131, + "learning_rate": 2.038227826266574e-07, + "loss": 0.8289, + "step": 9529 + }, + { + "epoch": 0.8594489786715968, + "grad_norm": 1.6791281545054653, + "learning_rate": 2.0356591491726126e-07, + "loss": 0.9407, + "step": 9530 + }, + { + "epoch": 0.859539162195067, + "grad_norm": 1.2482474916548674, + "learning_rate": 2.033092004895267e-07, + "loss": 0.9761, + "step": 9531 + }, + { + "epoch": 0.8596293457185372, + "grad_norm": 1.3398631903569842, + "learning_rate": 2.03052639365358e-07, + "loss": 0.9501, + "step": 9532 + }, + { + "epoch": 0.8597195292420075, + "grad_norm": 1.3373744442199926, + "learning_rate": 2.0279623156664694e-07, + "loss": 0.9523, + "step": 9533 + }, + { + "epoch": 0.8598097127654778, + "grad_norm": 1.3346065114930747, + "learning_rate": 2.0253997711527005e-07, + "loss": 1.0005, + "step": 9534 + }, + { + "epoch": 0.859899896288948, + "grad_norm": 1.6031769593501939, + "learning_rate": 2.0228387603309428e-07, + "loss": 0.9957, + "step": 9535 + }, + { + "epoch": 0.8599900798124183, + "grad_norm": 1.3716521253699574, + "learning_rate": 2.0202792834197035e-07, + "loss": 0.955, + "step": 9536 + }, + { + "epoch": 0.8600802633358885, + "grad_norm": 1.5955189630140032, + "learning_rate": 2.017721340637375e-07, + "loss": 0.8723, + "step": 9537 + }, + { + "epoch": 0.8601704468593588, + "grad_norm": 1.4945323156421808, + "learning_rate": 2.0151649322022134e-07, + "loss": 0.925, + "step": 9538 + }, + { + "epoch": 0.8602606303828291, + "grad_norm": 1.9099982884783842, + "learning_rate": 2.012610058332349e-07, + "loss": 0.7485, + "step": 9539 + }, + { + "epoch": 0.8603508139062993, + "grad_norm": 0.7146583691970528, + "learning_rate": 2.010056719245774e-07, + "loss": 0.8009, + "step": 9540 + }, + { + "epoch": 0.8604409974297695, + "grad_norm": 1.5013191462592357, + "learning_rate": 2.0075049151603563e-07, + "loss": 0.9528, + "step": 9541 + }, + { + "epoch": 0.8605311809532399, + "grad_norm": 1.420308854000838, + "learning_rate": 2.0049546462938326e-07, + "loss": 0.9463, + "step": 9542 + }, + { + "epoch": 0.8606213644767101, + "grad_norm": 1.5207310820742819, + "learning_rate": 2.0024059128637961e-07, + "loss": 0.9543, + "step": 9543 + }, + { + "epoch": 0.8607115480001803, + "grad_norm": 1.297034012571315, + "learning_rate": 1.99985871508773e-07, + "loss": 0.7999, + "step": 9544 + }, + { + "epoch": 0.8608017315236506, + "grad_norm": 1.4284314751190264, + "learning_rate": 1.9973130531829674e-07, + "loss": 0.9674, + "step": 9545 + }, + { + "epoch": 0.8608919150471209, + "grad_norm": 1.5521961742916768, + "learning_rate": 1.994768927366721e-07, + "loss": 0.895, + "step": 9546 + }, + { + "epoch": 0.8609820985705912, + "grad_norm": 1.5811203583055022, + "learning_rate": 1.992226337856069e-07, + "loss": 0.9059, + "step": 9547 + }, + { + "epoch": 0.8610722820940614, + "grad_norm": 1.415709237461842, + "learning_rate": 1.9896852848679592e-07, + "loss": 0.89, + "step": 9548 + }, + { + "epoch": 0.8611624656175316, + "grad_norm": 1.3836820207005998, + "learning_rate": 1.9871457686192094e-07, + "loss": 0.9499, + "step": 9549 + }, + { + "epoch": 0.861252649141002, + "grad_norm": 1.9443735821239632, + "learning_rate": 1.984607789326509e-07, + "loss": 0.9259, + "step": 9550 + }, + { + "epoch": 0.8613428326644722, + "grad_norm": 1.447128392352889, + "learning_rate": 1.982071347206402e-07, + "loss": 0.8889, + "step": 9551 + }, + { + "epoch": 0.8614330161879424, + "grad_norm": 1.2956200928141974, + "learning_rate": 1.9795364424753202e-07, + "loss": 0.9169, + "step": 9552 + }, + { + "epoch": 0.8615231997114128, + "grad_norm": 1.9012755228369753, + "learning_rate": 1.9770030753495505e-07, + "loss": 0.9034, + "step": 9553 + }, + { + "epoch": 0.861613383234883, + "grad_norm": 1.521193823047488, + "learning_rate": 1.9744712460452573e-07, + "loss": 0.8897, + "step": 9554 + }, + { + "epoch": 0.8617035667583532, + "grad_norm": 1.3616874144073443, + "learning_rate": 1.9719409547784703e-07, + "loss": 0.9173, + "step": 9555 + }, + { + "epoch": 0.8617937502818235, + "grad_norm": 2.28626963561622, + "learning_rate": 1.9694122017650837e-07, + "loss": 0.9582, + "step": 9556 + }, + { + "epoch": 0.8618839338052938, + "grad_norm": 1.619196210949313, + "learning_rate": 1.9668849872208738e-07, + "loss": 0.9487, + "step": 9557 + }, + { + "epoch": 0.8619741173287641, + "grad_norm": 1.2470027274963849, + "learning_rate": 1.9643593113614632e-07, + "loss": 0.7843, + "step": 9558 + }, + { + "epoch": 0.8620643008522343, + "grad_norm": 1.3335517863290083, + "learning_rate": 1.961835174402371e-07, + "loss": 0.9787, + "step": 9559 + }, + { + "epoch": 0.8621544843757045, + "grad_norm": 1.7507468401052242, + "learning_rate": 1.9593125765589535e-07, + "loss": 0.9103, + "step": 9560 + }, + { + "epoch": 0.8622446678991749, + "grad_norm": 2.1900424895671815, + "learning_rate": 1.9567915180464721e-07, + "loss": 0.8821, + "step": 9561 + }, + { + "epoch": 0.8623348514226451, + "grad_norm": 2.244380928101833, + "learning_rate": 1.9542719990800217e-07, + "loss": 0.9243, + "step": 9562 + }, + { + "epoch": 0.8624250349461153, + "grad_norm": 1.5062958199904017, + "learning_rate": 1.9517540198745896e-07, + "loss": 0.9979, + "step": 9563 + }, + { + "epoch": 0.8625152184695856, + "grad_norm": 1.8615643090250045, + "learning_rate": 1.94923758064502e-07, + "loss": 0.9249, + "step": 9564 + }, + { + "epoch": 0.8626054019930559, + "grad_norm": 1.297734263151813, + "learning_rate": 1.9467226816060322e-07, + "loss": 0.9044, + "step": 9565 + }, + { + "epoch": 0.8626955855165261, + "grad_norm": 1.6811211433806053, + "learning_rate": 1.9442093229722122e-07, + "loss": 0.8816, + "step": 9566 + }, + { + "epoch": 0.8627857690399964, + "grad_norm": 1.7117057016311483, + "learning_rate": 1.9416975049580085e-07, + "loss": 1.0114, + "step": 9567 + }, + { + "epoch": 0.8628759525634666, + "grad_norm": 1.5890303646953088, + "learning_rate": 1.9391872277777456e-07, + "loss": 0.9206, + "step": 9568 + }, + { + "epoch": 0.862966136086937, + "grad_norm": 1.672601844868503, + "learning_rate": 1.9366784916456158e-07, + "loss": 0.9329, + "step": 9569 + }, + { + "epoch": 0.8630563196104072, + "grad_norm": 1.415967216979672, + "learning_rate": 1.9341712967756774e-07, + "loss": 0.8942, + "step": 9570 + }, + { + "epoch": 0.8631465031338774, + "grad_norm": 1.3952814216185623, + "learning_rate": 1.9316656433818566e-07, + "loss": 0.8537, + "step": 9571 + }, + { + "epoch": 0.8632366866573477, + "grad_norm": 1.4282633875068027, + "learning_rate": 1.929161531677954e-07, + "loss": 0.8337, + "step": 9572 + }, + { + "epoch": 0.863326870180818, + "grad_norm": 1.4491742479426926, + "learning_rate": 1.9266589618776251e-07, + "loss": 0.8892, + "step": 9573 + }, + { + "epoch": 0.8634170537042882, + "grad_norm": 1.5646845995169685, + "learning_rate": 1.924157934194417e-07, + "loss": 0.9854, + "step": 9574 + }, + { + "epoch": 0.8635072372277585, + "grad_norm": 1.4903430024199331, + "learning_rate": 1.9216584488417142e-07, + "loss": 0.8243, + "step": 9575 + }, + { + "epoch": 0.8635974207512288, + "grad_norm": 1.6404461717782242, + "learning_rate": 1.919160506032802e-07, + "loss": 0.9866, + "step": 9576 + }, + { + "epoch": 0.863687604274699, + "grad_norm": 1.3235855163887336, + "learning_rate": 1.916664105980812e-07, + "loss": 0.9938, + "step": 9577 + }, + { + "epoch": 0.8637777877981693, + "grad_norm": 1.4948942390720295, + "learning_rate": 1.914169248898747e-07, + "loss": 0.9724, + "step": 9578 + }, + { + "epoch": 0.8638679713216395, + "grad_norm": 1.3021727671109058, + "learning_rate": 1.9116759349994882e-07, + "loss": 0.8509, + "step": 9579 + }, + { + "epoch": 0.8639581548451098, + "grad_norm": 1.3133690545425039, + "learning_rate": 1.9091841644957763e-07, + "loss": 0.9622, + "step": 9580 + }, + { + "epoch": 0.8640483383685801, + "grad_norm": 1.2081321461126275, + "learning_rate": 1.9066939376002278e-07, + "loss": 1.0184, + "step": 9581 + }, + { + "epoch": 0.8641385218920503, + "grad_norm": 1.8331922419038236, + "learning_rate": 1.9042052545253085e-07, + "loss": 0.8543, + "step": 9582 + }, + { + "epoch": 0.8642287054155205, + "grad_norm": 1.483080490222468, + "learning_rate": 1.901718115483384e-07, + "loss": 0.9151, + "step": 9583 + }, + { + "epoch": 0.8643188889389909, + "grad_norm": 1.2988486381574431, + "learning_rate": 1.8992325206866598e-07, + "loss": 0.92, + "step": 9584 + }, + { + "epoch": 0.8644090724624611, + "grad_norm": 1.2073420152322187, + "learning_rate": 1.8967484703472225e-07, + "loss": 0.9097, + "step": 9585 + }, + { + "epoch": 0.8644992559859314, + "grad_norm": 1.1968982126559657, + "learning_rate": 1.8942659646770288e-07, + "loss": 0.9774, + "step": 9586 + }, + { + "epoch": 0.8645894395094016, + "grad_norm": 1.7284815553227824, + "learning_rate": 1.8917850038878936e-07, + "loss": 1.0025, + "step": 9587 + }, + { + "epoch": 0.8646796230328719, + "grad_norm": 1.464854085249998, + "learning_rate": 1.8893055881915121e-07, + "loss": 0.8434, + "step": 9588 + }, + { + "epoch": 0.8647698065563422, + "grad_norm": 1.492898102770084, + "learning_rate": 1.886827717799442e-07, + "loss": 0.8901, + "step": 9589 + }, + { + "epoch": 0.8648599900798124, + "grad_norm": 1.5162303541503592, + "learning_rate": 1.884351392923096e-07, + "loss": 0.9514, + "step": 9590 + }, + { + "epoch": 0.8649501736032826, + "grad_norm": 1.8216744759134267, + "learning_rate": 1.8818766137737896e-07, + "loss": 0.9753, + "step": 9591 + }, + { + "epoch": 0.865040357126753, + "grad_norm": 1.9477100228706834, + "learning_rate": 1.8794033805626653e-07, + "loss": 0.9134, + "step": 9592 + }, + { + "epoch": 0.8651305406502232, + "grad_norm": 1.3988652347690507, + "learning_rate": 1.876931693500763e-07, + "loss": 0.9285, + "step": 9593 + }, + { + "epoch": 0.8652207241736934, + "grad_norm": 1.8190799846375731, + "learning_rate": 1.8744615527989783e-07, + "loss": 0.7875, + "step": 9594 + }, + { + "epoch": 0.8653109076971637, + "grad_norm": 1.671813108065115, + "learning_rate": 1.871992958668076e-07, + "loss": 1.0182, + "step": 9595 + }, + { + "epoch": 0.865401091220634, + "grad_norm": 1.5184932978447305, + "learning_rate": 1.8695259113186944e-07, + "loss": 0.9722, + "step": 9596 + }, + { + "epoch": 0.8654912747441043, + "grad_norm": 1.3168525731549237, + "learning_rate": 1.8670604109613252e-07, + "loss": 0.9013, + "step": 9597 + }, + { + "epoch": 0.8655814582675745, + "grad_norm": 1.252788057000806, + "learning_rate": 1.8645964578063533e-07, + "loss": 0.9796, + "step": 9598 + }, + { + "epoch": 0.8656716417910447, + "grad_norm": 1.4013722064097842, + "learning_rate": 1.862134052064006e-07, + "loss": 0.9888, + "step": 9599 + }, + { + "epoch": 0.8657618253145151, + "grad_norm": 2.073882397178456, + "learning_rate": 1.8596731939443932e-07, + "loss": 0.9733, + "step": 9600 + }, + { + "epoch": 0.8658520088379853, + "grad_norm": 1.4185872896301344, + "learning_rate": 1.857213883657487e-07, + "loss": 0.9604, + "step": 9601 + }, + { + "epoch": 0.8659421923614555, + "grad_norm": 0.6538591429447236, + "learning_rate": 1.8547561214131303e-07, + "loss": 0.7728, + "step": 9602 + }, + { + "epoch": 0.8660323758849259, + "grad_norm": 1.5551804570916756, + "learning_rate": 1.8522999074210355e-07, + "loss": 1.0184, + "step": 9603 + }, + { + "epoch": 0.8661225594083961, + "grad_norm": 1.5047002108503478, + "learning_rate": 1.849845241890775e-07, + "loss": 0.9768, + "step": 9604 + }, + { + "epoch": 0.8662127429318663, + "grad_norm": 1.5412777676521243, + "learning_rate": 1.8473921250317992e-07, + "loss": 0.9415, + "step": 9605 + }, + { + "epoch": 0.8663029264553366, + "grad_norm": 1.781777755454752, + "learning_rate": 1.8449405570534225e-07, + "loss": 0.869, + "step": 9606 + }, + { + "epoch": 0.8663931099788069, + "grad_norm": 1.975313176301473, + "learning_rate": 1.8424905381648204e-07, + "loss": 0.9747, + "step": 9607 + }, + { + "epoch": 0.8664832935022772, + "grad_norm": 1.2972734138050488, + "learning_rate": 1.8400420685750452e-07, + "loss": 0.8913, + "step": 9608 + }, + { + "epoch": 0.8665734770257474, + "grad_norm": 1.4078344108409122, + "learning_rate": 1.8375951484930142e-07, + "loss": 0.8722, + "step": 9609 + }, + { + "epoch": 0.8666636605492176, + "grad_norm": 1.3575433690430307, + "learning_rate": 1.8351497781275094e-07, + "loss": 0.935, + "step": 9610 + }, + { + "epoch": 0.866753844072688, + "grad_norm": 1.4795125726988763, + "learning_rate": 1.8327059576871907e-07, + "loss": 0.9221, + "step": 9611 + }, + { + "epoch": 0.8668440275961582, + "grad_norm": 1.2453509661047668, + "learning_rate": 1.8302636873805665e-07, + "loss": 0.9826, + "step": 9612 + }, + { + "epoch": 0.8669342111196284, + "grad_norm": 1.4377614488432728, + "learning_rate": 1.8278229674160373e-07, + "loss": 1.0088, + "step": 9613 + }, + { + "epoch": 0.8670243946430987, + "grad_norm": 1.3374937930809816, + "learning_rate": 1.825383798001845e-07, + "loss": 0.8689, + "step": 9614 + }, + { + "epoch": 0.867114578166569, + "grad_norm": 1.3030160487892715, + "learning_rate": 1.8229461793461297e-07, + "loss": 0.8784, + "step": 9615 + }, + { + "epoch": 0.8672047616900392, + "grad_norm": 1.3514169603607662, + "learning_rate": 1.8205101116568698e-07, + "loss": 0.9468, + "step": 9616 + }, + { + "epoch": 0.8672949452135095, + "grad_norm": 1.927246217459459, + "learning_rate": 1.818075595141928e-07, + "loss": 0.9872, + "step": 9617 + }, + { + "epoch": 0.8673851287369797, + "grad_norm": 1.4975123440292637, + "learning_rate": 1.8156426300090288e-07, + "loss": 0.9406, + "step": 9618 + }, + { + "epoch": 0.86747531226045, + "grad_norm": 1.4763207616166685, + "learning_rate": 1.8132112164657686e-07, + "loss": 0.9297, + "step": 9619 + }, + { + "epoch": 0.8675654957839203, + "grad_norm": 1.4378257879859055, + "learning_rate": 1.8107813547196106e-07, + "loss": 0.8266, + "step": 9620 + }, + { + "epoch": 0.8676556793073905, + "grad_norm": 1.49372361291539, + "learning_rate": 1.8083530449778817e-07, + "loss": 0.9711, + "step": 9621 + }, + { + "epoch": 0.8677458628308607, + "grad_norm": 1.3331164193536715, + "learning_rate": 1.8059262874477787e-07, + "loss": 0.8512, + "step": 9622 + }, + { + "epoch": 0.8678360463543311, + "grad_norm": 1.5874642976365647, + "learning_rate": 1.8035010823363627e-07, + "loss": 0.9267, + "step": 9623 + }, + { + "epoch": 0.8679262298778013, + "grad_norm": 1.4010026304722665, + "learning_rate": 1.8010774298505705e-07, + "loss": 0.9736, + "step": 9624 + }, + { + "epoch": 0.8680164134012716, + "grad_norm": 2.1864886534540604, + "learning_rate": 1.7986553301972007e-07, + "loss": 1.0276, + "step": 9625 + }, + { + "epoch": 0.8681065969247419, + "grad_norm": 1.8000736459924922, + "learning_rate": 1.7962347835829171e-07, + "loss": 0.9299, + "step": 9626 + }, + { + "epoch": 0.8681967804482121, + "grad_norm": 1.2552576121342762, + "learning_rate": 1.793815790214257e-07, + "loss": 0.9468, + "step": 9627 + }, + { + "epoch": 0.8682869639716824, + "grad_norm": 1.517713071893832, + "learning_rate": 1.791398350297626e-07, + "loss": 0.8922, + "step": 9628 + }, + { + "epoch": 0.8683771474951526, + "grad_norm": 1.3334405329446068, + "learning_rate": 1.7889824640392813e-07, + "loss": 0.9393, + "step": 9629 + }, + { + "epoch": 0.868467331018623, + "grad_norm": 1.4792603566044282, + "learning_rate": 1.7865681316453741e-07, + "loss": 0.9189, + "step": 9630 + }, + { + "epoch": 0.8685575145420932, + "grad_norm": 1.4657071023487225, + "learning_rate": 1.7841553533218968e-07, + "loss": 0.9251, + "step": 9631 + }, + { + "epoch": 0.8686476980655634, + "grad_norm": 1.5225162165738086, + "learning_rate": 1.7817441292747292e-07, + "loss": 1.0352, + "step": 9632 + }, + { + "epoch": 0.8687378815890336, + "grad_norm": 1.6516782173319986, + "learning_rate": 1.779334459709607e-07, + "loss": 0.9232, + "step": 9633 + }, + { + "epoch": 0.868828065112504, + "grad_norm": 1.889239915163373, + "learning_rate": 1.7769263448321347e-07, + "loss": 0.9576, + "step": 9634 + }, + { + "epoch": 0.8689182486359742, + "grad_norm": 1.3591105762994447, + "learning_rate": 1.7745197848477879e-07, + "loss": 0.9265, + "step": 9635 + }, + { + "epoch": 0.8690084321594445, + "grad_norm": 1.7237505907662136, + "learning_rate": 1.7721147799619063e-07, + "loss": 1.0318, + "step": 9636 + }, + { + "epoch": 0.8690986156829147, + "grad_norm": 0.6038751117924922, + "learning_rate": 1.769711330379704e-07, + "loss": 0.7662, + "step": 9637 + }, + { + "epoch": 0.869188799206385, + "grad_norm": 1.6650627604389456, + "learning_rate": 1.767309436306248e-07, + "loss": 0.9797, + "step": 9638 + }, + { + "epoch": 0.8692789827298553, + "grad_norm": 1.4458809224559253, + "learning_rate": 1.764909097946483e-07, + "loss": 0.9999, + "step": 9639 + }, + { + "epoch": 0.8693691662533255, + "grad_norm": 2.0666155357598672, + "learning_rate": 1.7625103155052236e-07, + "loss": 1.0062, + "step": 9640 + }, + { + "epoch": 0.8694593497767957, + "grad_norm": 1.8231345636838177, + "learning_rate": 1.760113089187143e-07, + "loss": 0.9421, + "step": 9641 + }, + { + "epoch": 0.8695495333002661, + "grad_norm": 1.4679316411954122, + "learning_rate": 1.7577174191967868e-07, + "loss": 0.9613, + "step": 9642 + }, + { + "epoch": 0.8696397168237363, + "grad_norm": 1.6236997684405998, + "learning_rate": 1.755323305738574e-07, + "loss": 0.9477, + "step": 9643 + }, + { + "epoch": 0.8697299003472065, + "grad_norm": 1.237359651640884, + "learning_rate": 1.7529307490167677e-07, + "loss": 0.9709, + "step": 9644 + }, + { + "epoch": 0.8698200838706768, + "grad_norm": 1.6949930197647989, + "learning_rate": 1.7505397492355288e-07, + "loss": 0.9157, + "step": 9645 + }, + { + "epoch": 0.8699102673941471, + "grad_norm": 1.387099866477284, + "learning_rate": 1.7481503065988589e-07, + "loss": 0.9694, + "step": 9646 + }, + { + "epoch": 0.8700004509176174, + "grad_norm": 1.1476597650213147, + "learning_rate": 1.7457624213106526e-07, + "loss": 0.9022, + "step": 9647 + }, + { + "epoch": 0.8700906344410876, + "grad_norm": 1.702291582208928, + "learning_rate": 1.7433760935746465e-07, + "loss": 0.9411, + "step": 9648 + }, + { + "epoch": 0.8701808179645579, + "grad_norm": 1.4252488906071816, + "learning_rate": 1.740991323594456e-07, + "loss": 0.8824, + "step": 9649 + }, + { + "epoch": 0.8702710014880282, + "grad_norm": 1.3976552469624162, + "learning_rate": 1.7386081115735651e-07, + "loss": 0.9112, + "step": 9650 + }, + { + "epoch": 0.8703611850114984, + "grad_norm": 1.486945245982777, + "learning_rate": 1.736226457715324e-07, + "loss": 0.8997, + "step": 9651 + }, + { + "epoch": 0.8704513685349686, + "grad_norm": 1.2910551910570227, + "learning_rate": 1.7338463622229505e-07, + "loss": 0.8905, + "step": 9652 + }, + { + "epoch": 0.870541552058439, + "grad_norm": 1.492346310777597, + "learning_rate": 1.7314678252995152e-07, + "loss": 0.8879, + "step": 9653 + }, + { + "epoch": 0.8706317355819092, + "grad_norm": 1.3468272880016017, + "learning_rate": 1.7290908471479847e-07, + "loss": 0.8735, + "step": 9654 + }, + { + "epoch": 0.8707219191053794, + "grad_norm": 1.4897865032365645, + "learning_rate": 1.7267154279711637e-07, + "loss": 0.933, + "step": 9655 + }, + { + "epoch": 0.8708121026288497, + "grad_norm": 1.2296931653213385, + "learning_rate": 1.724341567971741e-07, + "loss": 0.9847, + "step": 9656 + }, + { + "epoch": 0.87090228615232, + "grad_norm": 1.324826846414754, + "learning_rate": 1.7219692673522657e-07, + "loss": 1.0266, + "step": 9657 + }, + { + "epoch": 0.8709924696757902, + "grad_norm": 1.3448854572773496, + "learning_rate": 1.7195985263151558e-07, + "loss": 0.959, + "step": 9658 + }, + { + "epoch": 0.8710826531992605, + "grad_norm": 1.7296421668806703, + "learning_rate": 1.7172293450626985e-07, + "loss": 0.979, + "step": 9659 + }, + { + "epoch": 0.8711728367227307, + "grad_norm": 1.5255684840491186, + "learning_rate": 1.7148617237970475e-07, + "loss": 0.9367, + "step": 9660 + }, + { + "epoch": 0.8712630202462011, + "grad_norm": 1.2292915647165947, + "learning_rate": 1.7124956627202102e-07, + "loss": 0.8933, + "step": 9661 + }, + { + "epoch": 0.8713532037696713, + "grad_norm": 1.7222999671424393, + "learning_rate": 1.7101311620340852e-07, + "loss": 0.9567, + "step": 9662 + }, + { + "epoch": 0.8714433872931415, + "grad_norm": 1.5654252707263798, + "learning_rate": 1.7077682219404155e-07, + "loss": 0.8739, + "step": 9663 + }, + { + "epoch": 0.8715335708166118, + "grad_norm": 1.7600368413537153, + "learning_rate": 1.705406842640824e-07, + "loss": 1.0633, + "step": 9664 + }, + { + "epoch": 0.8716237543400821, + "grad_norm": 1.265292512524302, + "learning_rate": 1.7030470243367946e-07, + "loss": 0.9242, + "step": 9665 + }, + { + "epoch": 0.8717139378635523, + "grad_norm": 1.2552992074237486, + "learning_rate": 1.7006887672296834e-07, + "loss": 0.8588, + "step": 9666 + }, + { + "epoch": 0.8718041213870226, + "grad_norm": 1.5637825851643603, + "learning_rate": 1.6983320715207094e-07, + "loss": 0.8876, + "step": 9667 + }, + { + "epoch": 0.8718943049104928, + "grad_norm": 1.3074898689854904, + "learning_rate": 1.6959769374109523e-07, + "loss": 0.9542, + "step": 9668 + }, + { + "epoch": 0.8719844884339631, + "grad_norm": 1.4020107798440822, + "learning_rate": 1.6936233651013754e-07, + "loss": 0.8706, + "step": 9669 + }, + { + "epoch": 0.8720746719574334, + "grad_norm": 1.346097226838566, + "learning_rate": 1.691271354792787e-07, + "loss": 0.9625, + "step": 9670 + }, + { + "epoch": 0.8721648554809036, + "grad_norm": 1.3372451004563293, + "learning_rate": 1.6889209066858866e-07, + "loss": 0.8169, + "step": 9671 + }, + { + "epoch": 0.872255039004374, + "grad_norm": 1.5415321325763471, + "learning_rate": 1.6865720209812185e-07, + "loss": 0.9783, + "step": 9672 + }, + { + "epoch": 0.8723452225278442, + "grad_norm": 1.6904146666495765, + "learning_rate": 1.684224697879204e-07, + "loss": 0.9087, + "step": 9673 + }, + { + "epoch": 0.8724354060513144, + "grad_norm": 1.5408284855723864, + "learning_rate": 1.6818789375801302e-07, + "loss": 0.9982, + "step": 9674 + }, + { + "epoch": 0.8725255895747847, + "grad_norm": 1.37875201089389, + "learning_rate": 1.679534740284152e-07, + "loss": 0.8976, + "step": 9675 + }, + { + "epoch": 0.872615773098255, + "grad_norm": 1.625315635544639, + "learning_rate": 1.6771921061912853e-07, + "loss": 0.8859, + "step": 9676 + }, + { + "epoch": 0.8727059566217252, + "grad_norm": 1.69591585313131, + "learning_rate": 1.6748510355014234e-07, + "loss": 0.8813, + "step": 9677 + }, + { + "epoch": 0.8727961401451955, + "grad_norm": 1.3096308911085366, + "learning_rate": 1.6725115284143132e-07, + "loss": 0.9436, + "step": 9678 + }, + { + "epoch": 0.8728863236686657, + "grad_norm": 1.7303203579416089, + "learning_rate": 1.670173585129575e-07, + "loss": 0.9171, + "step": 9679 + }, + { + "epoch": 0.872976507192136, + "grad_norm": 1.4798575475800908, + "learning_rate": 1.667837205846696e-07, + "loss": 0.9945, + "step": 9680 + }, + { + "epoch": 0.8730666907156063, + "grad_norm": 1.4860544441487065, + "learning_rate": 1.6655023907650278e-07, + "loss": 0.9323, + "step": 9681 + }, + { + "epoch": 0.8731568742390765, + "grad_norm": 1.7038487287814221, + "learning_rate": 1.6631691400837954e-07, + "loss": 0.9753, + "step": 9682 + }, + { + "epoch": 0.8732470577625467, + "grad_norm": 2.2705521249036096, + "learning_rate": 1.6608374540020752e-07, + "loss": 0.9874, + "step": 9683 + }, + { + "epoch": 0.8733372412860171, + "grad_norm": 0.6837851338190594, + "learning_rate": 1.658507332718828e-07, + "loss": 0.7565, + "step": 9684 + }, + { + "epoch": 0.8734274248094873, + "grad_norm": 1.5032285277943944, + "learning_rate": 1.656178776432864e-07, + "loss": 0.8662, + "step": 9685 + }, + { + "epoch": 0.8735176083329576, + "grad_norm": 1.3008693630858437, + "learning_rate": 1.6538517853428814e-07, + "loss": 0.9081, + "step": 9686 + }, + { + "epoch": 0.8736077918564278, + "grad_norm": 1.1745793989877258, + "learning_rate": 1.6515263596474194e-07, + "loss": 0.9154, + "step": 9687 + }, + { + "epoch": 0.8736979753798981, + "grad_norm": 1.6251327020306972, + "learning_rate": 1.6492024995449017e-07, + "loss": 0.9177, + "step": 9688 + }, + { + "epoch": 0.8737881589033684, + "grad_norm": 1.4496362295621208, + "learning_rate": 1.6468802052336116e-07, + "loss": 0.8963, + "step": 9689 + }, + { + "epoch": 0.8738783424268386, + "grad_norm": 1.427633508962677, + "learning_rate": 1.6445594769116998e-07, + "loss": 0.9588, + "step": 9690 + }, + { + "epoch": 0.8739685259503088, + "grad_norm": 1.438782824449255, + "learning_rate": 1.6422403147771836e-07, + "loss": 1.0085, + "step": 9691 + }, + { + "epoch": 0.8740587094737792, + "grad_norm": 1.3947643010496371, + "learning_rate": 1.6399227190279485e-07, + "loss": 0.9034, + "step": 9692 + }, + { + "epoch": 0.8741488929972494, + "grad_norm": 1.2700846243621235, + "learning_rate": 1.637606689861748e-07, + "loss": 0.862, + "step": 9693 + }, + { + "epoch": 0.8742390765207196, + "grad_norm": 1.6768107064313005, + "learning_rate": 1.6352922274761883e-07, + "loss": 0.8961, + "step": 9694 + }, + { + "epoch": 0.87432926004419, + "grad_norm": 1.3512625830079454, + "learning_rate": 1.6329793320687602e-07, + "loss": 0.8007, + "step": 9695 + }, + { + "epoch": 0.8744194435676602, + "grad_norm": 1.4312858610366477, + "learning_rate": 1.630668003836808e-07, + "loss": 0.9639, + "step": 9696 + }, + { + "epoch": 0.8745096270911304, + "grad_norm": 1.5004747751995473, + "learning_rate": 1.62835824297755e-07, + "loss": 0.9319, + "step": 9697 + }, + { + "epoch": 0.8745998106146007, + "grad_norm": 1.4289735772991539, + "learning_rate": 1.626050049688066e-07, + "loss": 0.9405, + "step": 9698 + }, + { + "epoch": 0.874689994138071, + "grad_norm": 1.5021343146714126, + "learning_rate": 1.623743424165309e-07, + "loss": 0.9134, + "step": 9699 + }, + { + "epoch": 0.8747801776615413, + "grad_norm": 1.775017996146083, + "learning_rate": 1.6214383666060826e-07, + "loss": 0.8801, + "step": 9700 + }, + { + "epoch": 0.8748703611850115, + "grad_norm": 1.6998088645072844, + "learning_rate": 1.619134877207078e-07, + "loss": 0.9562, + "step": 9701 + }, + { + "epoch": 0.8749605447084817, + "grad_norm": 1.5285711405592113, + "learning_rate": 1.616832956164831e-07, + "loss": 0.9383, + "step": 9702 + }, + { + "epoch": 0.8750507282319521, + "grad_norm": 1.2987545594914636, + "learning_rate": 1.6145326036757667e-07, + "loss": 0.9013, + "step": 9703 + }, + { + "epoch": 0.8751409117554223, + "grad_norm": 1.6332393982533104, + "learning_rate": 1.612233819936155e-07, + "loss": 0.8993, + "step": 9704 + }, + { + "epoch": 0.8752310952788925, + "grad_norm": 1.5709590814313452, + "learning_rate": 1.6099366051421414e-07, + "loss": 0.9353, + "step": 9705 + }, + { + "epoch": 0.8753212788023628, + "grad_norm": 1.3762595736441732, + "learning_rate": 1.6076409594897378e-07, + "loss": 1.0, + "step": 9706 + }, + { + "epoch": 0.8754114623258331, + "grad_norm": 0.6999697780897063, + "learning_rate": 1.605346883174823e-07, + "loss": 0.7609, + "step": 9707 + }, + { + "epoch": 0.8755016458493033, + "grad_norm": 1.5762006905910342, + "learning_rate": 1.6030543763931427e-07, + "loss": 0.9155, + "step": 9708 + }, + { + "epoch": 0.8755918293727736, + "grad_norm": 1.5956089174063044, + "learning_rate": 1.600763439340298e-07, + "loss": 0.9504, + "step": 9709 + }, + { + "epoch": 0.8756820128962438, + "grad_norm": 1.3252508591751857, + "learning_rate": 1.5984740722117707e-07, + "loss": 0.9909, + "step": 9710 + }, + { + "epoch": 0.8757721964197142, + "grad_norm": 1.1472211871374682, + "learning_rate": 1.5961862752028998e-07, + "loss": 0.9443, + "step": 9711 + }, + { + "epoch": 0.8758623799431844, + "grad_norm": 1.7506675809280166, + "learning_rate": 1.5939000485088937e-07, + "loss": 0.8429, + "step": 9712 + }, + { + "epoch": 0.8759525634666546, + "grad_norm": 1.3756896370274152, + "learning_rate": 1.5916153923248254e-07, + "loss": 1.0233, + "step": 9713 + }, + { + "epoch": 0.8760427469901249, + "grad_norm": 1.7120313936055729, + "learning_rate": 1.5893323068456342e-07, + "loss": 0.9816, + "step": 9714 + }, + { + "epoch": 0.8761329305135952, + "grad_norm": 1.3312051398370626, + "learning_rate": 1.5870507922661248e-07, + "loss": 0.9747, + "step": 9715 + }, + { + "epoch": 0.8762231140370654, + "grad_norm": 0.6218212832920323, + "learning_rate": 1.5847708487809763e-07, + "loss": 0.7856, + "step": 9716 + }, + { + "epoch": 0.8763132975605357, + "grad_norm": 1.5695643374504793, + "learning_rate": 1.5824924765847113e-07, + "loss": 1.0242, + "step": 9717 + }, + { + "epoch": 0.8764034810840059, + "grad_norm": 1.2002315655440599, + "learning_rate": 1.5802156758717478e-07, + "loss": 0.9097, + "step": 9718 + }, + { + "epoch": 0.8764936646074762, + "grad_norm": 1.3747229730530333, + "learning_rate": 1.5779404468363433e-07, + "loss": 0.9371, + "step": 9719 + }, + { + "epoch": 0.8765838481309465, + "grad_norm": 1.8467246160584292, + "learning_rate": 1.5756667896726405e-07, + "loss": 0.9912, + "step": 9720 + }, + { + "epoch": 0.8766740316544167, + "grad_norm": 1.6412383613260917, + "learning_rate": 1.5733947045746377e-07, + "loss": 0.953, + "step": 9721 + }, + { + "epoch": 0.876764215177887, + "grad_norm": 1.708839558596396, + "learning_rate": 1.5711241917362018e-07, + "loss": 1.0016, + "step": 9722 + }, + { + "epoch": 0.8768543987013573, + "grad_norm": 1.706033794971709, + "learning_rate": 1.5688552513510688e-07, + "loss": 0.8842, + "step": 9723 + }, + { + "epoch": 0.8769445822248275, + "grad_norm": 1.312961224851388, + "learning_rate": 1.5665878836128266e-07, + "loss": 0.9632, + "step": 9724 + }, + { + "epoch": 0.8770347657482978, + "grad_norm": 1.2132141369829537, + "learning_rate": 1.5643220887149554e-07, + "loss": 0.9743, + "step": 9725 + }, + { + "epoch": 0.8771249492717681, + "grad_norm": 0.7298122454576897, + "learning_rate": 1.562057866850772e-07, + "loss": 0.8335, + "step": 9726 + }, + { + "epoch": 0.8772151327952383, + "grad_norm": 1.4139380637367027, + "learning_rate": 1.5597952182134777e-07, + "loss": 0.8077, + "step": 9727 + }, + { + "epoch": 0.8773053163187086, + "grad_norm": 1.3804384531561957, + "learning_rate": 1.557534142996133e-07, + "loss": 0.9878, + "step": 9728 + }, + { + "epoch": 0.8773954998421788, + "grad_norm": 1.466939093835594, + "learning_rate": 1.5552746413916662e-07, + "loss": 0.9673, + "step": 9729 + }, + { + "epoch": 0.8774856833656491, + "grad_norm": 1.767196158768195, + "learning_rate": 1.5530167135928697e-07, + "loss": 0.9258, + "step": 9730 + }, + { + "epoch": 0.8775758668891194, + "grad_norm": 1.5574519437475847, + "learning_rate": 1.5507603597924068e-07, + "loss": 0.8827, + "step": 9731 + }, + { + "epoch": 0.8776660504125896, + "grad_norm": 1.6069167538191995, + "learning_rate": 1.548505580182793e-07, + "loss": 0.8877, + "step": 9732 + }, + { + "epoch": 0.8777562339360598, + "grad_norm": 1.4310918695641766, + "learning_rate": 1.5462523749564271e-07, + "loss": 0.9363, + "step": 9733 + }, + { + "epoch": 0.8778464174595302, + "grad_norm": 1.4658008754592802, + "learning_rate": 1.5440007443055602e-07, + "loss": 0.932, + "step": 9734 + }, + { + "epoch": 0.8779366009830004, + "grad_norm": 1.6977937181888394, + "learning_rate": 1.541750688422314e-07, + "loss": 0.8759, + "step": 9735 + }, + { + "epoch": 0.8780267845064706, + "grad_norm": 1.408846979128777, + "learning_rate": 1.5395022074986797e-07, + "loss": 0.9432, + "step": 9736 + }, + { + "epoch": 0.8781169680299409, + "grad_norm": 1.3637908595159103, + "learning_rate": 1.5372553017265033e-07, + "loss": 0.8867, + "step": 9737 + }, + { + "epoch": 0.8782071515534112, + "grad_norm": 2.5696980863824197, + "learning_rate": 1.5350099712975116e-07, + "loss": 0.9939, + "step": 9738 + }, + { + "epoch": 0.8782973350768815, + "grad_norm": 1.4946747666569657, + "learning_rate": 1.5327662164032785e-07, + "loss": 0.9263, + "step": 9739 + }, + { + "epoch": 0.8783875186003517, + "grad_norm": 1.5375796692838852, + "learning_rate": 1.5305240372352656e-07, + "loss": 0.7899, + "step": 9740 + }, + { + "epoch": 0.8784777021238219, + "grad_norm": 1.3815273196390498, + "learning_rate": 1.5282834339847738e-07, + "loss": 0.9466, + "step": 9741 + }, + { + "epoch": 0.8785678856472923, + "grad_norm": 1.3949879558275733, + "learning_rate": 1.526044406842999e-07, + "loss": 0.9157, + "step": 9742 + }, + { + "epoch": 0.8786580691707625, + "grad_norm": 1.3347288319219728, + "learning_rate": 1.523806956000977e-07, + "loss": 0.8648, + "step": 9743 + }, + { + "epoch": 0.8787482526942327, + "grad_norm": 1.3592538889285035, + "learning_rate": 1.5215710816496197e-07, + "loss": 0.9815, + "step": 9744 + }, + { + "epoch": 0.8788384362177031, + "grad_norm": 2.0153794249928647, + "learning_rate": 1.5193367839797077e-07, + "loss": 0.8855, + "step": 9745 + }, + { + "epoch": 0.8789286197411733, + "grad_norm": 1.384032966262493, + "learning_rate": 1.5171040631818842e-07, + "loss": 0.9367, + "step": 9746 + }, + { + "epoch": 0.8790188032646435, + "grad_norm": 1.5510505807525214, + "learning_rate": 1.5148729194466547e-07, + "loss": 0.8623, + "step": 9747 + }, + { + "epoch": 0.8791089867881138, + "grad_norm": 1.4155894933967643, + "learning_rate": 1.5126433529643956e-07, + "loss": 1.0043, + "step": 9748 + }, + { + "epoch": 0.8791991703115841, + "grad_norm": 1.473245760758957, + "learning_rate": 1.5104153639253436e-07, + "loss": 0.9376, + "step": 9749 + }, + { + "epoch": 0.8792893538350544, + "grad_norm": 0.6566449974772276, + "learning_rate": 1.5081889525196002e-07, + "loss": 0.8479, + "step": 9750 + }, + { + "epoch": 0.8793795373585246, + "grad_norm": 1.5046912584426224, + "learning_rate": 1.5059641189371398e-07, + "loss": 0.9936, + "step": 9751 + }, + { + "epoch": 0.8794697208819948, + "grad_norm": 1.4255327400079145, + "learning_rate": 1.503740863367795e-07, + "loss": 0.9515, + "step": 9752 + }, + { + "epoch": 0.8795599044054652, + "grad_norm": 1.6071685183459463, + "learning_rate": 1.50151918600127e-07, + "loss": 0.9629, + "step": 9753 + }, + { + "epoch": 0.8796500879289354, + "grad_norm": 1.451254348156943, + "learning_rate": 1.4992990870271217e-07, + "loss": 0.9842, + "step": 9754 + }, + { + "epoch": 0.8797402714524056, + "grad_norm": 1.611202868592778, + "learning_rate": 1.497080566634794e-07, + "loss": 0.9343, + "step": 9755 + }, + { + "epoch": 0.8798304549758759, + "grad_norm": 1.2918043165797148, + "learning_rate": 1.4948636250135693e-07, + "loss": 0.9493, + "step": 9756 + }, + { + "epoch": 0.8799206384993462, + "grad_norm": 1.6077735117642085, + "learning_rate": 1.4926482623526249e-07, + "loss": 1.0, + "step": 9757 + }, + { + "epoch": 0.8800108220228164, + "grad_norm": 1.5990186632270778, + "learning_rate": 1.4904344788409694e-07, + "loss": 0.9373, + "step": 9758 + }, + { + "epoch": 0.8801010055462867, + "grad_norm": 1.5810308613669521, + "learning_rate": 1.4882222746675143e-07, + "loss": 0.8926, + "step": 9759 + }, + { + "epoch": 0.8801911890697569, + "grad_norm": 1.7397522606366984, + "learning_rate": 1.4860116500210018e-07, + "loss": 0.9751, + "step": 9760 + }, + { + "epoch": 0.8802813725932273, + "grad_norm": 3.30151243519409, + "learning_rate": 1.4838026050900632e-07, + "loss": 0.9923, + "step": 9761 + }, + { + "epoch": 0.8803715561166975, + "grad_norm": 1.3637353091755475, + "learning_rate": 1.481595140063181e-07, + "loss": 0.9867, + "step": 9762 + }, + { + "epoch": 0.8804617396401677, + "grad_norm": 1.7382277576916851, + "learning_rate": 1.4793892551287136e-07, + "loss": 1.0238, + "step": 9763 + }, + { + "epoch": 0.880551923163638, + "grad_norm": 1.6692321423724725, + "learning_rate": 1.4771849504748768e-07, + "loss": 0.8651, + "step": 9764 + }, + { + "epoch": 0.8806421066871083, + "grad_norm": 1.3497052082755758, + "learning_rate": 1.4749822262897517e-07, + "loss": 1.0195, + "step": 9765 + }, + { + "epoch": 0.8807322902105785, + "grad_norm": 1.5556209962945824, + "learning_rate": 1.4727810827612895e-07, + "loss": 0.8759, + "step": 9766 + }, + { + "epoch": 0.8808224737340488, + "grad_norm": 2.6367359302102327, + "learning_rate": 1.470581520077303e-07, + "loss": 0.9404, + "step": 9767 + }, + { + "epoch": 0.8809126572575191, + "grad_norm": 1.8596415609135284, + "learning_rate": 1.4683835384254705e-07, + "loss": 0.9307, + "step": 9768 + }, + { + "epoch": 0.8810028407809893, + "grad_norm": 1.534002516027026, + "learning_rate": 1.4661871379933376e-07, + "loss": 1.0252, + "step": 9769 + }, + { + "epoch": 0.8810930243044596, + "grad_norm": 1.5319836474409938, + "learning_rate": 1.4639923189683169e-07, + "loss": 0.991, + "step": 9770 + }, + { + "epoch": 0.8811832078279298, + "grad_norm": 1.6087049552343666, + "learning_rate": 1.461799081537669e-07, + "loss": 0.9852, + "step": 9771 + }, + { + "epoch": 0.8812733913514001, + "grad_norm": 1.8060691135738332, + "learning_rate": 1.4596074258885514e-07, + "loss": 0.9904, + "step": 9772 + }, + { + "epoch": 0.8813635748748704, + "grad_norm": 1.460694563143255, + "learning_rate": 1.4574173522079502e-07, + "loss": 0.9078, + "step": 9773 + }, + { + "epoch": 0.8814537583983406, + "grad_norm": 1.3218126521275713, + "learning_rate": 1.4552288606827513e-07, + "loss": 0.9116, + "step": 9774 + }, + { + "epoch": 0.8815439419218108, + "grad_norm": 1.4266518639007502, + "learning_rate": 1.4530419514996761e-07, + "loss": 1.0129, + "step": 9775 + }, + { + "epoch": 0.8816341254452812, + "grad_norm": 0.7520832296003304, + "learning_rate": 1.4508566248453291e-07, + "loss": 0.8399, + "step": 9776 + }, + { + "epoch": 0.8817243089687514, + "grad_norm": 1.6218292406275066, + "learning_rate": 1.448672880906172e-07, + "loss": 0.9388, + "step": 9777 + }, + { + "epoch": 0.8818144924922217, + "grad_norm": 1.5313157826520762, + "learning_rate": 1.4464907198685382e-07, + "loss": 0.8938, + "step": 9778 + }, + { + "epoch": 0.8819046760156919, + "grad_norm": 1.22375906546394, + "learning_rate": 1.444310141918621e-07, + "loss": 0.9358, + "step": 9779 + }, + { + "epoch": 0.8819948595391622, + "grad_norm": 3.750871684170665, + "learning_rate": 1.4421311472424735e-07, + "loss": 0.9624, + "step": 9780 + }, + { + "epoch": 0.8820850430626325, + "grad_norm": 1.6112443012050945, + "learning_rate": 1.4399537360260273e-07, + "loss": 0.9696, + "step": 9781 + }, + { + "epoch": 0.8821752265861027, + "grad_norm": 1.5557240620050332, + "learning_rate": 1.4377779084550645e-07, + "loss": 0.963, + "step": 9782 + }, + { + "epoch": 0.8822654101095729, + "grad_norm": 1.2942013584374479, + "learning_rate": 1.4356036647152413e-07, + "loss": 0.9353, + "step": 9783 + }, + { + "epoch": 0.8823555936330433, + "grad_norm": 1.4029425402571278, + "learning_rate": 1.4334310049920785e-07, + "loss": 0.9218, + "step": 9784 + }, + { + "epoch": 0.8824457771565135, + "grad_norm": 0.63268530709432, + "learning_rate": 1.431259929470956e-07, + "loss": 0.7552, + "step": 9785 + }, + { + "epoch": 0.8825359606799837, + "grad_norm": 1.432157623492125, + "learning_rate": 1.4290904383371237e-07, + "loss": 1.0094, + "step": 9786 + }, + { + "epoch": 0.882626144203454, + "grad_norm": 1.6559971310457082, + "learning_rate": 1.4269225317756961e-07, + "loss": 0.919, + "step": 9787 + }, + { + "epoch": 0.8827163277269243, + "grad_norm": 1.5578564116354503, + "learning_rate": 1.424756209971645e-07, + "loss": 0.9009, + "step": 9788 + }, + { + "epoch": 0.8828065112503946, + "grad_norm": 1.65256615983852, + "learning_rate": 1.4225914731098199e-07, + "loss": 0.9328, + "step": 9789 + }, + { + "epoch": 0.8828966947738648, + "grad_norm": 1.3971543205620085, + "learning_rate": 1.4204283213749248e-07, + "loss": 0.8785, + "step": 9790 + }, + { + "epoch": 0.8829868782973351, + "grad_norm": 1.2905115941357108, + "learning_rate": 1.4182667549515315e-07, + "loss": 0.9926, + "step": 9791 + }, + { + "epoch": 0.8830770618208054, + "grad_norm": 1.5284841041424706, + "learning_rate": 1.4161067740240752e-07, + "loss": 0.9179, + "step": 9792 + }, + { + "epoch": 0.8831672453442756, + "grad_norm": 1.2848608262955616, + "learning_rate": 1.4139483787768614e-07, + "loss": 0.935, + "step": 9793 + }, + { + "epoch": 0.8832574288677458, + "grad_norm": 1.4643189978056677, + "learning_rate": 1.4117915693940584e-07, + "loss": 0.8468, + "step": 9794 + }, + { + "epoch": 0.8833476123912162, + "grad_norm": 1.3630659485365388, + "learning_rate": 1.409636346059684e-07, + "loss": 0.9297, + "step": 9795 + }, + { + "epoch": 0.8834377959146864, + "grad_norm": 1.7229899042224508, + "learning_rate": 1.4074827089576501e-07, + "loss": 0.9601, + "step": 9796 + }, + { + "epoch": 0.8835279794381566, + "grad_norm": 1.6920015551692948, + "learning_rate": 1.4053306582717085e-07, + "loss": 0.9579, + "step": 9797 + }, + { + "epoch": 0.8836181629616269, + "grad_norm": 1.5317834975743674, + "learning_rate": 1.4031801941854827e-07, + "loss": 0.9684, + "step": 9798 + }, + { + "epoch": 0.8837083464850972, + "grad_norm": 1.2936360921794265, + "learning_rate": 1.401031316882466e-07, + "loss": 0.9033, + "step": 9799 + }, + { + "epoch": 0.8837985300085675, + "grad_norm": 1.4590277607912892, + "learning_rate": 1.39888402654601e-07, + "loss": 0.9548, + "step": 9800 + }, + { + "epoch": 0.8838887135320377, + "grad_norm": 0.6294121218191951, + "learning_rate": 1.3967383233593344e-07, + "loss": 0.824, + "step": 9801 + }, + { + "epoch": 0.8839788970555079, + "grad_norm": 1.5518411622370087, + "learning_rate": 1.3945942075055218e-07, + "loss": 0.8989, + "step": 9802 + }, + { + "epoch": 0.8840690805789783, + "grad_norm": 1.6226734966744518, + "learning_rate": 1.3924516791675212e-07, + "loss": 0.9674, + "step": 9803 + }, + { + "epoch": 0.8841592641024485, + "grad_norm": 1.288966782223995, + "learning_rate": 1.3903107385281487e-07, + "loss": 0.8495, + "step": 9804 + }, + { + "epoch": 0.8842494476259187, + "grad_norm": 1.366088992628348, + "learning_rate": 1.3881713857700717e-07, + "loss": 0.9475, + "step": 9805 + }, + { + "epoch": 0.884339631149389, + "grad_norm": 1.7090378060843863, + "learning_rate": 1.3860336210758372e-07, + "loss": 0.9163, + "step": 9806 + }, + { + "epoch": 0.8844298146728593, + "grad_norm": 1.3628499570307522, + "learning_rate": 1.3838974446278506e-07, + "loss": 0.8916, + "step": 9807 + }, + { + "epoch": 0.8845199981963295, + "grad_norm": 1.346185207525627, + "learning_rate": 1.3817628566083817e-07, + "loss": 0.9132, + "step": 9808 + }, + { + "epoch": 0.8846101817197998, + "grad_norm": 1.2509492607621637, + "learning_rate": 1.3796298571995712e-07, + "loss": 0.9513, + "step": 9809 + }, + { + "epoch": 0.88470036524327, + "grad_norm": 1.5602885809542808, + "learning_rate": 1.377498446583405e-07, + "loss": 0.8416, + "step": 9810 + }, + { + "epoch": 0.8847905487667403, + "grad_norm": 1.378343807851037, + "learning_rate": 1.3753686249417596e-07, + "loss": 0.9127, + "step": 9811 + }, + { + "epoch": 0.8848807322902106, + "grad_norm": 2.1870050142751385, + "learning_rate": 1.373240392456354e-07, + "loss": 0.8891, + "step": 9812 + }, + { + "epoch": 0.8849709158136808, + "grad_norm": 1.4307280542429277, + "learning_rate": 1.37111374930879e-07, + "loss": 0.8783, + "step": 9813 + }, + { + "epoch": 0.885061099337151, + "grad_norm": 1.5018366854094767, + "learning_rate": 1.3689886956805176e-07, + "loss": 0.871, + "step": 9814 + }, + { + "epoch": 0.8851512828606214, + "grad_norm": 1.2586925105553917, + "learning_rate": 1.3668652317528585e-07, + "loss": 0.9266, + "step": 9815 + }, + { + "epoch": 0.8852414663840916, + "grad_norm": 1.5736179009732794, + "learning_rate": 1.3647433577070012e-07, + "loss": 0.9497, + "step": 9816 + }, + { + "epoch": 0.8853316499075619, + "grad_norm": 0.7191187285944353, + "learning_rate": 1.3626230737239942e-07, + "loss": 0.7942, + "step": 9817 + }, + { + "epoch": 0.8854218334310322, + "grad_norm": 0.757292460913856, + "learning_rate": 1.3605043799847527e-07, + "loss": 0.8079, + "step": 9818 + }, + { + "epoch": 0.8855120169545024, + "grad_norm": 1.2908282122295955, + "learning_rate": 1.3583872766700567e-07, + "loss": 0.8691, + "step": 9819 + }, + { + "epoch": 0.8856022004779727, + "grad_norm": 1.4127764676027519, + "learning_rate": 1.3562717639605437e-07, + "loss": 0.924, + "step": 9820 + }, + { + "epoch": 0.8856923840014429, + "grad_norm": 1.4420789683989683, + "learning_rate": 1.3541578420367229e-07, + "loss": 0.9055, + "step": 9821 + }, + { + "epoch": 0.8857825675249132, + "grad_norm": 1.387116195245131, + "learning_rate": 1.3520455110789697e-07, + "loss": 0.9549, + "step": 9822 + }, + { + "epoch": 0.8858727510483835, + "grad_norm": 1.3829319358152068, + "learning_rate": 1.3499347712675158e-07, + "loss": 0.9136, + "step": 9823 + }, + { + "epoch": 0.8859629345718537, + "grad_norm": 1.4511887150194256, + "learning_rate": 1.3478256227824635e-07, + "loss": 0.9054, + "step": 9824 + }, + { + "epoch": 0.8860531180953239, + "grad_norm": 1.4204018475554085, + "learning_rate": 1.3457180658037759e-07, + "loss": 0.849, + "step": 9825 + }, + { + "epoch": 0.8861433016187943, + "grad_norm": 1.2977503384740539, + "learning_rate": 1.3436121005112843e-07, + "loss": 0.9651, + "step": 9826 + }, + { + "epoch": 0.8862334851422645, + "grad_norm": 1.4415572586146213, + "learning_rate": 1.3415077270846719e-07, + "loss": 0.9086, + "step": 9827 + }, + { + "epoch": 0.8863236686657348, + "grad_norm": 2.04066877566435, + "learning_rate": 1.3394049457035105e-07, + "loss": 1.045, + "step": 9828 + }, + { + "epoch": 0.886413852189205, + "grad_norm": 1.3970868711638185, + "learning_rate": 1.3373037565472034e-07, + "loss": 0.9489, + "step": 9829 + }, + { + "epoch": 0.8865040357126753, + "grad_norm": 1.2752286332028102, + "learning_rate": 1.3352041597950537e-07, + "loss": 0.9157, + "step": 9830 + }, + { + "epoch": 0.8865942192361456, + "grad_norm": 1.6257370964334825, + "learning_rate": 1.333106155626196e-07, + "loss": 0.9256, + "step": 9831 + }, + { + "epoch": 0.8866844027596158, + "grad_norm": 1.4923257014801077, + "learning_rate": 1.331009744219651e-07, + "loss": 0.8797, + "step": 9832 + }, + { + "epoch": 0.886774586283086, + "grad_norm": 0.6966435411560551, + "learning_rate": 1.3289149257542943e-07, + "loss": 0.8343, + "step": 9833 + }, + { + "epoch": 0.8868647698065564, + "grad_norm": 1.421876676789018, + "learning_rate": 1.3268217004088666e-07, + "loss": 0.8372, + "step": 9834 + }, + { + "epoch": 0.8869549533300266, + "grad_norm": 1.5034329548974994, + "learning_rate": 1.3247300683619788e-07, + "loss": 0.8284, + "step": 9835 + }, + { + "epoch": 0.8870451368534968, + "grad_norm": 1.6660664987790499, + "learning_rate": 1.3226400297920903e-07, + "loss": 0.9151, + "step": 9836 + }, + { + "epoch": 0.8871353203769671, + "grad_norm": 1.407663567357073, + "learning_rate": 1.3205515848775428e-07, + "loss": 0.9129, + "step": 9837 + }, + { + "epoch": 0.8872255039004374, + "grad_norm": 1.5066091054003423, + "learning_rate": 1.3184647337965316e-07, + "loss": 0.8729, + "step": 9838 + }, + { + "epoch": 0.8873156874239077, + "grad_norm": 1.2381019823533421, + "learning_rate": 1.3163794767271163e-07, + "loss": 0.8867, + "step": 9839 + }, + { + "epoch": 0.8874058709473779, + "grad_norm": 1.2905814729236524, + "learning_rate": 1.314295813847226e-07, + "loss": 0.9052, + "step": 9840 + }, + { + "epoch": 0.8874960544708482, + "grad_norm": 1.5952638654179525, + "learning_rate": 1.3122137453346515e-07, + "loss": 1.0378, + "step": 9841 + }, + { + "epoch": 0.8875862379943185, + "grad_norm": 1.5240170672404671, + "learning_rate": 1.3101332713670376e-07, + "loss": 0.9885, + "step": 9842 + }, + { + "epoch": 0.8876764215177887, + "grad_norm": 0.6619575123780534, + "learning_rate": 1.3080543921219133e-07, + "loss": 0.8213, + "step": 9843 + }, + { + "epoch": 0.8877666050412589, + "grad_norm": 1.5866830510941738, + "learning_rate": 1.3059771077766478e-07, + "loss": 0.9349, + "step": 9844 + }, + { + "epoch": 0.8878567885647293, + "grad_norm": 1.597807562268523, + "learning_rate": 1.3039014185085018e-07, + "loss": 0.9744, + "step": 9845 + }, + { + "epoch": 0.8879469720881995, + "grad_norm": 1.7573020363414855, + "learning_rate": 1.301827324494571e-07, + "loss": 0.9449, + "step": 9846 + }, + { + "epoch": 0.8880371556116697, + "grad_norm": 1.9947047467795707, + "learning_rate": 1.2997548259118342e-07, + "loss": 0.9667, + "step": 9847 + }, + { + "epoch": 0.88812733913514, + "grad_norm": 1.491582298379186, + "learning_rate": 1.2976839229371272e-07, + "loss": 0.967, + "step": 9848 + }, + { + "epoch": 0.8882175226586103, + "grad_norm": 0.6353481495504946, + "learning_rate": 1.2956146157471515e-07, + "loss": 0.8138, + "step": 9849 + }, + { + "epoch": 0.8883077061820805, + "grad_norm": 1.3067073605406083, + "learning_rate": 1.2935469045184745e-07, + "loss": 0.9576, + "step": 9850 + }, + { + "epoch": 0.8883978897055508, + "grad_norm": 1.6961166654397901, + "learning_rate": 1.291480789427517e-07, + "loss": 0.9137, + "step": 9851 + }, + { + "epoch": 0.888488073229021, + "grad_norm": 1.6071539061030284, + "learning_rate": 1.2894162706505807e-07, + "loss": 0.9737, + "step": 9852 + }, + { + "epoch": 0.8885782567524914, + "grad_norm": 1.3730015971069605, + "learning_rate": 1.2873533483638155e-07, + "loss": 0.9337, + "step": 9853 + }, + { + "epoch": 0.8886684402759616, + "grad_norm": 1.4184196322791303, + "learning_rate": 1.285292022743243e-07, + "loss": 0.8817, + "step": 9854 + }, + { + "epoch": 0.8887586237994318, + "grad_norm": 1.677693376495389, + "learning_rate": 1.2832322939647467e-07, + "loss": 0.8808, + "step": 9855 + }, + { + "epoch": 0.8888488073229021, + "grad_norm": 1.5718515170639265, + "learning_rate": 1.281174162204075e-07, + "loss": 0.9199, + "step": 9856 + }, + { + "epoch": 0.8889389908463724, + "grad_norm": 1.293606741893306, + "learning_rate": 1.2791176276368366e-07, + "loss": 0.8172, + "step": 9857 + }, + { + "epoch": 0.8890291743698426, + "grad_norm": 1.2677123670218664, + "learning_rate": 1.2770626904385128e-07, + "loss": 0.9459, + "step": 9858 + }, + { + "epoch": 0.8891193578933129, + "grad_norm": 1.2952994001989273, + "learning_rate": 1.2750093507844306e-07, + "loss": 0.7961, + "step": 9859 + }, + { + "epoch": 0.8892095414167831, + "grad_norm": 1.3937681872630974, + "learning_rate": 1.272957608849805e-07, + "loss": 0.9763, + "step": 9860 + }, + { + "epoch": 0.8892997249402534, + "grad_norm": 1.5136776241070256, + "learning_rate": 1.270907464809694e-07, + "loss": 0.9153, + "step": 9861 + }, + { + "epoch": 0.8893899084637237, + "grad_norm": 1.37125171935983, + "learning_rate": 1.2688589188390285e-07, + "loss": 0.9386, + "step": 9862 + }, + { + "epoch": 0.8894800919871939, + "grad_norm": 1.6122138161238901, + "learning_rate": 1.2668119711126023e-07, + "loss": 0.9442, + "step": 9863 + }, + { + "epoch": 0.8895702755106643, + "grad_norm": 1.2991993309124976, + "learning_rate": 1.2647666218050735e-07, + "loss": 0.9362, + "step": 9864 + }, + { + "epoch": 0.8896604590341345, + "grad_norm": 1.5949599964781564, + "learning_rate": 1.2627228710909643e-07, + "loss": 0.8997, + "step": 9865 + }, + { + "epoch": 0.8897506425576047, + "grad_norm": 1.3072511988401068, + "learning_rate": 1.260680719144649e-07, + "loss": 0.909, + "step": 9866 + }, + { + "epoch": 0.889840826081075, + "grad_norm": 2.0138230660157412, + "learning_rate": 1.2586401661403877e-07, + "loss": 0.8613, + "step": 9867 + }, + { + "epoch": 0.8899310096045453, + "grad_norm": 1.5991846063246078, + "learning_rate": 1.2566012122522817e-07, + "loss": 0.9076, + "step": 9868 + }, + { + "epoch": 0.8900211931280155, + "grad_norm": 1.707980662283319, + "learning_rate": 1.254563857654316e-07, + "loss": 0.9159, + "step": 9869 + }, + { + "epoch": 0.8901113766514858, + "grad_norm": 1.5292628102996875, + "learning_rate": 1.2525281025203205e-07, + "loss": 0.8251, + "step": 9870 + }, + { + "epoch": 0.890201560174956, + "grad_norm": 1.6226841490270563, + "learning_rate": 1.2504939470240006e-07, + "loss": 0.9155, + "step": 9871 + }, + { + "epoch": 0.8902917436984263, + "grad_norm": 1.370744927095846, + "learning_rate": 1.2484613913389196e-07, + "loss": 1.0024, + "step": 9872 + }, + { + "epoch": 0.8903819272218966, + "grad_norm": 1.4621594872168868, + "learning_rate": 1.2464304356385057e-07, + "loss": 0.8518, + "step": 9873 + }, + { + "epoch": 0.8904721107453668, + "grad_norm": 1.7449528254387257, + "learning_rate": 1.2444010800960558e-07, + "loss": 1.0009, + "step": 9874 + }, + { + "epoch": 0.890562294268837, + "grad_norm": 1.2994462227579269, + "learning_rate": 1.2423733248847267e-07, + "loss": 0.9156, + "step": 9875 + }, + { + "epoch": 0.8906524777923074, + "grad_norm": 1.4702433035739702, + "learning_rate": 1.2403471701775293e-07, + "loss": 0.8798, + "step": 9876 + }, + { + "epoch": 0.8907426613157776, + "grad_norm": 1.4084841041819014, + "learning_rate": 1.2383226161473515e-07, + "loss": 0.9725, + "step": 9877 + }, + { + "epoch": 0.8908328448392479, + "grad_norm": 1.6933145308139013, + "learning_rate": 1.2362996629669376e-07, + "loss": 1.0144, + "step": 9878 + }, + { + "epoch": 0.8909230283627181, + "grad_norm": 1.3356593773879926, + "learning_rate": 1.2342783108089007e-07, + "loss": 0.9451, + "step": 9879 + }, + { + "epoch": 0.8910132118861884, + "grad_norm": 1.5936190981494205, + "learning_rate": 1.2322585598457135e-07, + "loss": 0.9903, + "step": 9880 + }, + { + "epoch": 0.8911033954096587, + "grad_norm": 1.136857486399115, + "learning_rate": 1.2302404102497034e-07, + "loss": 0.9171, + "step": 9881 + }, + { + "epoch": 0.8911935789331289, + "grad_norm": 1.6585483798174807, + "learning_rate": 1.228223862193083e-07, + "loss": 0.8702, + "step": 9882 + }, + { + "epoch": 0.8912837624565991, + "grad_norm": 2.2499801846797265, + "learning_rate": 1.2262089158479038e-07, + "loss": 0.9473, + "step": 9883 + }, + { + "epoch": 0.8913739459800695, + "grad_norm": 1.5823275065009739, + "learning_rate": 1.2241955713861042e-07, + "loss": 0.9752, + "step": 9884 + }, + { + "epoch": 0.8914641295035397, + "grad_norm": 1.4623324012906997, + "learning_rate": 1.222183828979464e-07, + "loss": 0.878, + "step": 9885 + }, + { + "epoch": 0.8915543130270099, + "grad_norm": 2.092265841418287, + "learning_rate": 1.2201736887996372e-07, + "loss": 0.9304, + "step": 9886 + }, + { + "epoch": 0.8916444965504803, + "grad_norm": 1.4408449149930327, + "learning_rate": 1.2181651510181444e-07, + "loss": 1.0092, + "step": 9887 + }, + { + "epoch": 0.8917346800739505, + "grad_norm": 1.3541747410851397, + "learning_rate": 1.2161582158063622e-07, + "loss": 0.9624, + "step": 9888 + }, + { + "epoch": 0.8918248635974207, + "grad_norm": 1.4280261616968368, + "learning_rate": 1.214152883335533e-07, + "loss": 0.8502, + "step": 9889 + }, + { + "epoch": 0.891915047120891, + "grad_norm": 1.5428221041369052, + "learning_rate": 1.2121491537767648e-07, + "loss": 0.8844, + "step": 9890 + }, + { + "epoch": 0.8920052306443613, + "grad_norm": 1.3023249084171509, + "learning_rate": 1.2101470273010294e-07, + "loss": 0.9723, + "step": 9891 + }, + { + "epoch": 0.8920954141678316, + "grad_norm": 1.722996269456718, + "learning_rate": 1.2081465040791528e-07, + "loss": 0.9151, + "step": 9892 + }, + { + "epoch": 0.8921855976913018, + "grad_norm": 1.546915535443874, + "learning_rate": 1.2061475842818335e-07, + "loss": 0.9006, + "step": 9893 + }, + { + "epoch": 0.892275781214772, + "grad_norm": 1.2926446329796215, + "learning_rate": 1.2041502680796313e-07, + "loss": 0.9075, + "step": 9894 + }, + { + "epoch": 0.8923659647382424, + "grad_norm": 1.3056542931863855, + "learning_rate": 1.2021545556429648e-07, + "loss": 0.9415, + "step": 9895 + }, + { + "epoch": 0.8924561482617126, + "grad_norm": 1.769982960290301, + "learning_rate": 1.2001604471421245e-07, + "loss": 0.9323, + "step": 9896 + }, + { + "epoch": 0.8925463317851828, + "grad_norm": 1.2943557721620433, + "learning_rate": 1.1981679427472567e-07, + "loss": 0.9875, + "step": 9897 + }, + { + "epoch": 0.8926365153086531, + "grad_norm": 1.4336790636335668, + "learning_rate": 1.196177042628368e-07, + "loss": 0.9847, + "step": 9898 + }, + { + "epoch": 0.8927266988321234, + "grad_norm": 1.6798232622505473, + "learning_rate": 1.194187746955344e-07, + "loss": 0.9426, + "step": 9899 + }, + { + "epoch": 0.8928168823555936, + "grad_norm": 1.3173784237527666, + "learning_rate": 1.1922000558979094e-07, + "loss": 0.9128, + "step": 9900 + }, + { + "epoch": 0.8929070658790639, + "grad_norm": 1.1896854668558041, + "learning_rate": 1.1902139696256752e-07, + "loss": 0.9683, + "step": 9901 + }, + { + "epoch": 0.8929972494025341, + "grad_norm": 1.562777609005629, + "learning_rate": 1.188229488308099e-07, + "loss": 0.872, + "step": 9902 + }, + { + "epoch": 0.8930874329260045, + "grad_norm": 0.558850156183657, + "learning_rate": 1.1862466121145098e-07, + "loss": 0.7702, + "step": 9903 + }, + { + "epoch": 0.8931776164494747, + "grad_norm": 1.8851787845610057, + "learning_rate": 1.184265341214099e-07, + "loss": 0.8803, + "step": 9904 + }, + { + "epoch": 0.8932677999729449, + "grad_norm": 1.429345095129395, + "learning_rate": 1.182285675775918e-07, + "loss": 0.9608, + "step": 9905 + }, + { + "epoch": 0.8933579834964152, + "grad_norm": 1.940257171810281, + "learning_rate": 1.1803076159688851e-07, + "loss": 0.917, + "step": 9906 + }, + { + "epoch": 0.8934481670198855, + "grad_norm": 1.376953125, + "learning_rate": 1.1783311619617741e-07, + "loss": 0.966, + "step": 9907 + }, + { + "epoch": 0.8935383505433557, + "grad_norm": 1.4323169312994284, + "learning_rate": 1.1763563139232302e-07, + "loss": 0.9032, + "step": 9908 + }, + { + "epoch": 0.893628534066826, + "grad_norm": 1.6676534671879963, + "learning_rate": 1.1743830720217562e-07, + "loss": 0.8761, + "step": 9909 + }, + { + "epoch": 0.8937187175902963, + "grad_norm": 1.6365392035646678, + "learning_rate": 1.1724114364257243e-07, + "loss": 0.9141, + "step": 9910 + }, + { + "epoch": 0.8938089011137665, + "grad_norm": 1.3835812603102993, + "learning_rate": 1.1704414073033619e-07, + "loss": 0.9394, + "step": 9911 + }, + { + "epoch": 0.8938990846372368, + "grad_norm": 1.5311146111872038, + "learning_rate": 1.1684729848227636e-07, + "loss": 0.9763, + "step": 9912 + }, + { + "epoch": 0.893989268160707, + "grad_norm": 1.3594738277597136, + "learning_rate": 1.1665061691518884e-07, + "loss": 0.9865, + "step": 9913 + }, + { + "epoch": 0.8940794516841774, + "grad_norm": 1.7584856566971396, + "learning_rate": 1.1645409604585532e-07, + "loss": 0.9337, + "step": 9914 + }, + { + "epoch": 0.8941696352076476, + "grad_norm": 1.3171549855265787, + "learning_rate": 1.162577358910437e-07, + "loss": 0.9485, + "step": 9915 + }, + { + "epoch": 0.8942598187311178, + "grad_norm": 1.4611848835921093, + "learning_rate": 1.160615364675095e-07, + "loss": 0.9155, + "step": 9916 + }, + { + "epoch": 0.894350002254588, + "grad_norm": 0.6278887508450274, + "learning_rate": 1.1586549779199262e-07, + "loss": 0.7891, + "step": 9917 + }, + { + "epoch": 0.8944401857780584, + "grad_norm": 1.3669121165179008, + "learning_rate": 1.1566961988122037e-07, + "loss": 0.9968, + "step": 9918 + }, + { + "epoch": 0.8945303693015286, + "grad_norm": 2.0178571066874467, + "learning_rate": 1.1547390275190627e-07, + "loss": 0.8997, + "step": 9919 + }, + { + "epoch": 0.8946205528249989, + "grad_norm": 1.6340143626409454, + "learning_rate": 1.1527834642075007e-07, + "loss": 0.8827, + "step": 9920 + }, + { + "epoch": 0.8947107363484691, + "grad_norm": 1.5580983533849206, + "learning_rate": 1.1508295090443797e-07, + "loss": 0.9161, + "step": 9921 + }, + { + "epoch": 0.8948009198719394, + "grad_norm": 1.9920288621159714, + "learning_rate": 1.148877162196411e-07, + "loss": 0.8803, + "step": 9922 + }, + { + "epoch": 0.8948911033954097, + "grad_norm": 1.850565903688559, + "learning_rate": 1.1469264238301924e-07, + "loss": 0.868, + "step": 9923 + }, + { + "epoch": 0.8949812869188799, + "grad_norm": 1.4546576999844827, + "learning_rate": 1.1449772941121638e-07, + "loss": 0.9562, + "step": 9924 + }, + { + "epoch": 0.8950714704423501, + "grad_norm": 1.617531781245498, + "learning_rate": 1.1430297732086369e-07, + "loss": 0.9044, + "step": 9925 + }, + { + "epoch": 0.8951616539658205, + "grad_norm": 1.473589047742065, + "learning_rate": 1.1410838612857876e-07, + "loss": 0.9318, + "step": 9926 + }, + { + "epoch": 0.8952518374892907, + "grad_norm": 1.7855352420966222, + "learning_rate": 1.1391395585096497e-07, + "loss": 1.0267, + "step": 9927 + }, + { + "epoch": 0.895342021012761, + "grad_norm": 1.5046124274202433, + "learning_rate": 1.1371968650461216e-07, + "loss": 0.8754, + "step": 9928 + }, + { + "epoch": 0.8954322045362312, + "grad_norm": 0.7227131589184277, + "learning_rate": 1.1352557810609687e-07, + "loss": 0.7983, + "step": 9929 + }, + { + "epoch": 0.8955223880597015, + "grad_norm": 1.4436058897285675, + "learning_rate": 1.1333163067198048e-07, + "loss": 0.9203, + "step": 9930 + }, + { + "epoch": 0.8956125715831718, + "grad_norm": 1.2746426081438627, + "learning_rate": 1.1313784421881311e-07, + "loss": 1.0003, + "step": 9931 + }, + { + "epoch": 0.895702755106642, + "grad_norm": 1.9631875795666336, + "learning_rate": 1.1294421876312865e-07, + "loss": 0.966, + "step": 9932 + }, + { + "epoch": 0.8957929386301122, + "grad_norm": 1.6338924505215193, + "learning_rate": 1.1275075432144831e-07, + "loss": 0.906, + "step": 9933 + }, + { + "epoch": 0.8958831221535826, + "grad_norm": 1.6198898808999536, + "learning_rate": 1.1255745091028002e-07, + "loss": 0.95, + "step": 9934 + }, + { + "epoch": 0.8959733056770528, + "grad_norm": 1.4288408451117955, + "learning_rate": 1.1236430854611723e-07, + "loss": 0.9502, + "step": 9935 + }, + { + "epoch": 0.896063489200523, + "grad_norm": 1.5011595376843083, + "learning_rate": 1.1217132724544032e-07, + "loss": 0.9077, + "step": 9936 + }, + { + "epoch": 0.8961536727239934, + "grad_norm": 1.6333493896914837, + "learning_rate": 1.1197850702471434e-07, + "loss": 0.9845, + "step": 9937 + }, + { + "epoch": 0.8962438562474636, + "grad_norm": 1.5065900363395666, + "learning_rate": 1.1178584790039348e-07, + "loss": 1.0064, + "step": 9938 + }, + { + "epoch": 0.8963340397709338, + "grad_norm": 1.380744681120682, + "learning_rate": 1.1159334988891478e-07, + "loss": 0.8837, + "step": 9939 + }, + { + "epoch": 0.8964242232944041, + "grad_norm": 2.079984707592722, + "learning_rate": 1.1140101300670446e-07, + "loss": 0.9128, + "step": 9940 + }, + { + "epoch": 0.8965144068178744, + "grad_norm": 1.3548738784792436, + "learning_rate": 1.1120883727017338e-07, + "loss": 0.8009, + "step": 9941 + }, + { + "epoch": 0.8966045903413447, + "grad_norm": 1.558305603773402, + "learning_rate": 1.1101682269571889e-07, + "loss": 0.9901, + "step": 9942 + }, + { + "epoch": 0.8966947738648149, + "grad_norm": 1.5445619494214486, + "learning_rate": 1.1082496929972473e-07, + "loss": 0.9166, + "step": 9943 + }, + { + "epoch": 0.8967849573882851, + "grad_norm": 1.4314693332842232, + "learning_rate": 1.1063327709856096e-07, + "loss": 0.9101, + "step": 9944 + }, + { + "epoch": 0.8968751409117555, + "grad_norm": 1.6138001870433563, + "learning_rate": 1.1044174610858403e-07, + "loss": 0.8179, + "step": 9945 + }, + { + "epoch": 0.8969653244352257, + "grad_norm": 1.6250596402301085, + "learning_rate": 1.1025037634613643e-07, + "loss": 0.8883, + "step": 9946 + }, + { + "epoch": 0.8970555079586959, + "grad_norm": 1.2735163190355487, + "learning_rate": 1.1005916782754643e-07, + "loss": 0.9614, + "step": 9947 + }, + { + "epoch": 0.8971456914821662, + "grad_norm": 1.6397091443636402, + "learning_rate": 1.0986812056912919e-07, + "loss": 0.9175, + "step": 9948 + }, + { + "epoch": 0.8972358750056365, + "grad_norm": 1.6425792861916229, + "learning_rate": 1.0967723458718613e-07, + "loss": 0.8133, + "step": 9949 + }, + { + "epoch": 0.8973260585291067, + "grad_norm": 1.8022089199634095, + "learning_rate": 1.0948650989800445e-07, + "loss": 0.9019, + "step": 9950 + }, + { + "epoch": 0.897416242052577, + "grad_norm": 1.3257370967852111, + "learning_rate": 1.0929594651785823e-07, + "loss": 0.9142, + "step": 9951 + }, + { + "epoch": 0.8975064255760472, + "grad_norm": 1.4350316790496642, + "learning_rate": 1.0910554446300646e-07, + "loss": 0.9851, + "step": 9952 + }, + { + "epoch": 0.8975966090995176, + "grad_norm": 1.3767778866771372, + "learning_rate": 1.089153037496966e-07, + "loss": 0.8327, + "step": 9953 + }, + { + "epoch": 0.8976867926229878, + "grad_norm": 2.0994107600463026, + "learning_rate": 1.0872522439415966e-07, + "loss": 0.954, + "step": 9954 + }, + { + "epoch": 0.897776976146458, + "grad_norm": 1.6571313204622125, + "learning_rate": 1.0853530641261554e-07, + "loss": 0.8819, + "step": 9955 + }, + { + "epoch": 0.8978671596699282, + "grad_norm": 1.3644160054047842, + "learning_rate": 1.083455498212682e-07, + "loss": 0.9455, + "step": 9956 + }, + { + "epoch": 0.8979573431933986, + "grad_norm": 1.569203226950325, + "learning_rate": 1.0815595463630911e-07, + "loss": 0.9727, + "step": 9957 + }, + { + "epoch": 0.8980475267168688, + "grad_norm": 1.4343455159932275, + "learning_rate": 1.0796652087391556e-07, + "loss": 0.8079, + "step": 9958 + }, + { + "epoch": 0.8981377102403391, + "grad_norm": 1.343774662235075, + "learning_rate": 1.0777724855025083e-07, + "loss": 1.0264, + "step": 9959 + }, + { + "epoch": 0.8982278937638094, + "grad_norm": 1.3857510050949424, + "learning_rate": 1.075881376814649e-07, + "loss": 0.932, + "step": 9960 + }, + { + "epoch": 0.8983180772872796, + "grad_norm": 1.374306677368659, + "learning_rate": 1.0739918828369377e-07, + "loss": 0.9884, + "step": 9961 + }, + { + "epoch": 0.8984082608107499, + "grad_norm": 1.6440853864581024, + "learning_rate": 1.0721040037305983e-07, + "loss": 0.9333, + "step": 9962 + }, + { + "epoch": 0.8984984443342201, + "grad_norm": 1.4174729838815905, + "learning_rate": 1.0702177396567114e-07, + "loss": 0.8861, + "step": 9963 + }, + { + "epoch": 0.8985886278576904, + "grad_norm": 0.6321487949306447, + "learning_rate": 1.0683330907762233e-07, + "loss": 0.7802, + "step": 9964 + }, + { + "epoch": 0.8986788113811607, + "grad_norm": 1.3670821231013905, + "learning_rate": 1.0664500572499435e-07, + "loss": 0.8713, + "step": 9965 + }, + { + "epoch": 0.8987689949046309, + "grad_norm": 1.558169505573564, + "learning_rate": 1.0645686392385455e-07, + "loss": 0.8806, + "step": 9966 + }, + { + "epoch": 0.8988591784281011, + "grad_norm": 1.273717276140661, + "learning_rate": 1.0626888369025588e-07, + "loss": 0.8701, + "step": 9967 + }, + { + "epoch": 0.8989493619515715, + "grad_norm": 1.3095851957085614, + "learning_rate": 1.0608106504023817e-07, + "loss": 0.967, + "step": 9968 + }, + { + "epoch": 0.8990395454750417, + "grad_norm": 2.4163691787022077, + "learning_rate": 1.0589340798982637e-07, + "loss": 0.9168, + "step": 9969 + }, + { + "epoch": 0.899129728998512, + "grad_norm": 1.345149265359351, + "learning_rate": 1.057059125550337e-07, + "loss": 0.8776, + "step": 9970 + }, + { + "epoch": 0.8992199125219822, + "grad_norm": 1.188877962607883, + "learning_rate": 1.0551857875185732e-07, + "loss": 0.9428, + "step": 9971 + }, + { + "epoch": 0.8993100960454525, + "grad_norm": 1.25511040312135, + "learning_rate": 1.0533140659628181e-07, + "loss": 0.8133, + "step": 9972 + }, + { + "epoch": 0.8994002795689228, + "grad_norm": 1.3288194075190871, + "learning_rate": 1.0514439610427772e-07, + "loss": 0.9867, + "step": 9973 + }, + { + "epoch": 0.899490463092393, + "grad_norm": 1.420699839133238, + "learning_rate": 1.0495754729180206e-07, + "loss": 0.9602, + "step": 9974 + }, + { + "epoch": 0.8995806466158632, + "grad_norm": 1.2926734517955374, + "learning_rate": 1.0477086017479741e-07, + "loss": 1.0316, + "step": 9975 + }, + { + "epoch": 0.8996708301393336, + "grad_norm": 1.4906810725959871, + "learning_rate": 1.0458433476919327e-07, + "loss": 0.8934, + "step": 9976 + }, + { + "epoch": 0.8997610136628038, + "grad_norm": 1.2176924787957135, + "learning_rate": 1.0439797109090509e-07, + "loss": 0.9616, + "step": 9977 + }, + { + "epoch": 0.899851197186274, + "grad_norm": 1.4823138734177665, + "learning_rate": 1.0421176915583396e-07, + "loss": 0.897, + "step": 9978 + }, + { + "epoch": 0.8999413807097443, + "grad_norm": 1.5288092175521724, + "learning_rate": 1.0402572897986828e-07, + "loss": 0.9585, + "step": 9979 + }, + { + "epoch": 0.9000315642332146, + "grad_norm": 0.6050112811132341, + "learning_rate": 1.0383985057888134e-07, + "loss": 0.7936, + "step": 9980 + }, + { + "epoch": 0.9001217477566849, + "grad_norm": 1.6996513429903446, + "learning_rate": 1.036541339687338e-07, + "loss": 0.8448, + "step": 9981 + }, + { + "epoch": 0.9002119312801551, + "grad_norm": 0.7028633902559522, + "learning_rate": 1.0346857916527186e-07, + "loss": 0.7746, + "step": 9982 + }, + { + "epoch": 0.9003021148036254, + "grad_norm": 1.3586939717158002, + "learning_rate": 1.0328318618432819e-07, + "loss": 0.9362, + "step": 9983 + }, + { + "epoch": 0.9003922983270957, + "grad_norm": 1.4561219023983414, + "learning_rate": 1.0309795504172148e-07, + "loss": 0.9958, + "step": 9984 + }, + { + "epoch": 0.9004824818505659, + "grad_norm": 1.5289284370695482, + "learning_rate": 1.0291288575325685e-07, + "loss": 0.9086, + "step": 9985 + }, + { + "epoch": 0.9005726653740361, + "grad_norm": 1.6236469798190996, + "learning_rate": 1.0272797833472502e-07, + "loss": 0.8954, + "step": 9986 + }, + { + "epoch": 0.9006628488975065, + "grad_norm": 0.696933934580711, + "learning_rate": 1.0254323280190335e-07, + "loss": 0.7792, + "step": 9987 + }, + { + "epoch": 0.9007530324209767, + "grad_norm": 1.6805395050263179, + "learning_rate": 1.023586491705557e-07, + "loss": 0.9099, + "step": 9988 + }, + { + "epoch": 0.9008432159444469, + "grad_norm": 1.4666306646581146, + "learning_rate": 1.0217422745643145e-07, + "loss": 0.8717, + "step": 9989 + }, + { + "epoch": 0.9009333994679172, + "grad_norm": 1.4231481197256703, + "learning_rate": 1.0198996767526691e-07, + "loss": 0.8554, + "step": 9990 + }, + { + "epoch": 0.9010235829913875, + "grad_norm": 1.4929436171016655, + "learning_rate": 1.018058698427835e-07, + "loss": 0.9172, + "step": 9991 + }, + { + "epoch": 0.9011137665148578, + "grad_norm": 1.2935004394436131, + "learning_rate": 1.0162193397469021e-07, + "loss": 0.9735, + "step": 9992 + }, + { + "epoch": 0.901203950038328, + "grad_norm": 1.4706411158646941, + "learning_rate": 1.0143816008668049e-07, + "loss": 0.8663, + "step": 9993 + }, + { + "epoch": 0.9012941335617982, + "grad_norm": 0.6433108479971426, + "learning_rate": 1.0125454819443624e-07, + "loss": 0.8098, + "step": 9994 + }, + { + "epoch": 0.9013843170852686, + "grad_norm": 1.2324224553658498, + "learning_rate": 1.0107109831362315e-07, + "loss": 0.8373, + "step": 9995 + }, + { + "epoch": 0.9014745006087388, + "grad_norm": 1.37052466599745, + "learning_rate": 1.0088781045989447e-07, + "loss": 0.9425, + "step": 9996 + }, + { + "epoch": 0.901564684132209, + "grad_norm": 1.5755820606619513, + "learning_rate": 1.0070468464888926e-07, + "loss": 0.9787, + "step": 9997 + }, + { + "epoch": 0.9016548676556793, + "grad_norm": 1.5861461483362231, + "learning_rate": 1.0052172089623324e-07, + "loss": 0.9485, + "step": 9998 + }, + { + "epoch": 0.9017450511791496, + "grad_norm": 1.517268754411694, + "learning_rate": 1.0033891921753746e-07, + "loss": 0.9706, + "step": 9999 + }, + { + "epoch": 0.9018352347026198, + "grad_norm": 1.5524673328249214, + "learning_rate": 1.0015627962839968e-07, + "loss": 0.849, + "step": 10000 + }, + { + "epoch": 0.9019254182260901, + "grad_norm": 1.3453353578617173, + "learning_rate": 9.99738021444041e-08, + "loss": 0.9086, + "step": 10001 + }, + { + "epoch": 0.9020156017495603, + "grad_norm": 1.211106762285231, + "learning_rate": 9.979148678112003e-08, + "loss": 0.8642, + "step": 10002 + }, + { + "epoch": 0.9021057852730306, + "grad_norm": 1.765519468141279, + "learning_rate": 9.960933355410417e-08, + "loss": 1.0312, + "step": 10003 + }, + { + "epoch": 0.9021959687965009, + "grad_norm": 1.4281204056613737, + "learning_rate": 9.942734247889828e-08, + "loss": 0.8498, + "step": 10004 + }, + { + "epoch": 0.9022861523199711, + "grad_norm": 1.323118940293568, + "learning_rate": 9.92455135710315e-08, + "loss": 0.9419, + "step": 10005 + }, + { + "epoch": 0.9023763358434415, + "grad_norm": 1.5016412497996903, + "learning_rate": 9.906384684601787e-08, + "loss": 0.8756, + "step": 10006 + }, + { + "epoch": 0.9024665193669117, + "grad_norm": 1.3093794556316596, + "learning_rate": 9.8882342319359e-08, + "loss": 0.9768, + "step": 10007 + }, + { + "epoch": 0.9025567028903819, + "grad_norm": 1.2659565055812294, + "learning_rate": 9.870100000654048e-08, + "loss": 0.8808, + "step": 10008 + }, + { + "epoch": 0.9026468864138522, + "grad_norm": 1.2340849523561812, + "learning_rate": 9.851981992303704e-08, + "loss": 0.9049, + "step": 10009 + }, + { + "epoch": 0.9027370699373225, + "grad_norm": 1.267827272543009, + "learning_rate": 9.833880208430678e-08, + "loss": 0.9333, + "step": 10010 + }, + { + "epoch": 0.9028272534607927, + "grad_norm": 1.5716391505923624, + "learning_rate": 9.815794650579601e-08, + "loss": 0.9203, + "step": 10011 + }, + { + "epoch": 0.902917436984263, + "grad_norm": 1.3997888695371492, + "learning_rate": 9.797725320293548e-08, + "loss": 0.9117, + "step": 10012 + }, + { + "epoch": 0.9030076205077332, + "grad_norm": 1.6342885769519828, + "learning_rate": 9.779672219114354e-08, + "loss": 1.0023, + "step": 10013 + }, + { + "epoch": 0.9030978040312035, + "grad_norm": 1.4700952416012583, + "learning_rate": 9.761635348582386e-08, + "loss": 0.9151, + "step": 10014 + }, + { + "epoch": 0.9031879875546738, + "grad_norm": 1.6923609763540073, + "learning_rate": 9.743614710236658e-08, + "loss": 0.9638, + "step": 10015 + }, + { + "epoch": 0.903278171078144, + "grad_norm": 1.5083986240116662, + "learning_rate": 9.725610305614806e-08, + "loss": 0.8913, + "step": 10016 + }, + { + "epoch": 0.9033683546016142, + "grad_norm": 1.3135303358677843, + "learning_rate": 9.707622136253002e-08, + "loss": 0.9293, + "step": 10017 + }, + { + "epoch": 0.9034585381250846, + "grad_norm": 1.3362737756653327, + "learning_rate": 9.689650203686173e-08, + "loss": 0.9565, + "step": 10018 + }, + { + "epoch": 0.9035487216485548, + "grad_norm": 1.5538431299584012, + "learning_rate": 9.671694509447715e-08, + "loss": 0.9113, + "step": 10019 + }, + { + "epoch": 0.903638905172025, + "grad_norm": 1.4808641373767313, + "learning_rate": 9.653755055069757e-08, + "loss": 0.9376, + "step": 10020 + }, + { + "epoch": 0.9037290886954953, + "grad_norm": 1.1814880272560953, + "learning_rate": 9.635831842082987e-08, + "loss": 0.9036, + "step": 10021 + }, + { + "epoch": 0.9038192722189656, + "grad_norm": 1.5311247326740645, + "learning_rate": 9.617924872016691e-08, + "loss": 0.945, + "step": 10022 + }, + { + "epoch": 0.9039094557424359, + "grad_norm": 1.4579848372635114, + "learning_rate": 9.600034146398806e-08, + "loss": 0.972, + "step": 10023 + }, + { + "epoch": 0.9039996392659061, + "grad_norm": 0.8047171652057107, + "learning_rate": 9.582159666755863e-08, + "loss": 0.7982, + "step": 10024 + }, + { + "epoch": 0.9040898227893763, + "grad_norm": 1.2894191826458665, + "learning_rate": 9.564301434612976e-08, + "loss": 0.9022, + "step": 10025 + }, + { + "epoch": 0.9041800063128467, + "grad_norm": 1.4619558936118193, + "learning_rate": 9.546459451494015e-08, + "loss": 0.9717, + "step": 10026 + }, + { + "epoch": 0.9042701898363169, + "grad_norm": 0.681876483324712, + "learning_rate": 9.528633718921231e-08, + "loss": 0.8301, + "step": 10027 + }, + { + "epoch": 0.9043603733597871, + "grad_norm": 1.2114601238015552, + "learning_rate": 9.510824238415672e-08, + "loss": 0.9821, + "step": 10028 + }, + { + "epoch": 0.9044505568832575, + "grad_norm": 1.317640410872827, + "learning_rate": 9.493031011496944e-08, + "loss": 0.9636, + "step": 10029 + }, + { + "epoch": 0.9045407404067277, + "grad_norm": 1.4762909654687761, + "learning_rate": 9.475254039683234e-08, + "loss": 0.7915, + "step": 10030 + }, + { + "epoch": 0.904630923930198, + "grad_norm": 1.5107164633495915, + "learning_rate": 9.45749332449144e-08, + "loss": 0.9001, + "step": 10031 + }, + { + "epoch": 0.9047211074536682, + "grad_norm": 0.640885416459992, + "learning_rate": 9.439748867436903e-08, + "loss": 0.83, + "step": 10032 + }, + { + "epoch": 0.9048112909771385, + "grad_norm": 1.401081675510102, + "learning_rate": 9.42202067003377e-08, + "loss": 0.8937, + "step": 10033 + }, + { + "epoch": 0.9049014745006088, + "grad_norm": 1.617116954204627, + "learning_rate": 9.404308733794652e-08, + "loss": 1.0671, + "step": 10034 + }, + { + "epoch": 0.904991658024079, + "grad_norm": 1.2849851917645585, + "learning_rate": 9.38661306023083e-08, + "loss": 0.95, + "step": 10035 + }, + { + "epoch": 0.9050818415475492, + "grad_norm": 1.4933707454087877, + "learning_rate": 9.368933650852229e-08, + "loss": 0.9255, + "step": 10036 + }, + { + "epoch": 0.9051720250710196, + "grad_norm": 0.745243806692326, + "learning_rate": 9.351270507167352e-08, + "loss": 0.8401, + "step": 10037 + }, + { + "epoch": 0.9052622085944898, + "grad_norm": 1.5067258091236755, + "learning_rate": 9.333623630683285e-08, + "loss": 0.8738, + "step": 10038 + }, + { + "epoch": 0.90535239211796, + "grad_norm": 2.305223512549284, + "learning_rate": 9.315993022905799e-08, + "loss": 0.7902, + "step": 10039 + }, + { + "epoch": 0.9054425756414303, + "grad_norm": 1.402353398281899, + "learning_rate": 9.298378685339158e-08, + "loss": 0.9123, + "step": 10040 + }, + { + "epoch": 0.9055327591649006, + "grad_norm": 1.6908396417165774, + "learning_rate": 9.280780619486406e-08, + "loss": 0.9985, + "step": 10041 + }, + { + "epoch": 0.9056229426883708, + "grad_norm": 1.3533098689207728, + "learning_rate": 9.26319882684905e-08, + "loss": 1.0143, + "step": 10042 + }, + { + "epoch": 0.9057131262118411, + "grad_norm": 1.3003544837648178, + "learning_rate": 9.245633308927293e-08, + "loss": 0.9067, + "step": 10043 + }, + { + "epoch": 0.9058033097353113, + "grad_norm": 1.6348112731175297, + "learning_rate": 9.228084067219888e-08, + "loss": 0.9221, + "step": 10044 + }, + { + "epoch": 0.9058934932587817, + "grad_norm": 1.4661695660239435, + "learning_rate": 9.210551103224284e-08, + "loss": 0.8799, + "step": 10045 + }, + { + "epoch": 0.9059836767822519, + "grad_norm": 1.283994062228749, + "learning_rate": 9.193034418436463e-08, + "loss": 0.9814, + "step": 10046 + }, + { + "epoch": 0.9060738603057221, + "grad_norm": 0.6536276968052102, + "learning_rate": 9.175534014351005e-08, + "loss": 0.8269, + "step": 10047 + }, + { + "epoch": 0.9061640438291924, + "grad_norm": 2.158196165323903, + "learning_rate": 9.158049892461228e-08, + "loss": 0.8794, + "step": 10048 + }, + { + "epoch": 0.9062542273526627, + "grad_norm": 1.228732237042304, + "learning_rate": 9.140582054258871e-08, + "loss": 0.8723, + "step": 10049 + }, + { + "epoch": 0.9063444108761329, + "grad_norm": 1.3846694202658967, + "learning_rate": 9.123130501234499e-08, + "loss": 0.9421, + "step": 10050 + }, + { + "epoch": 0.9064345943996032, + "grad_norm": 1.594293838163852, + "learning_rate": 9.105695234877098e-08, + "loss": 0.9857, + "step": 10051 + }, + { + "epoch": 0.9065247779230734, + "grad_norm": 1.2476544307917623, + "learning_rate": 9.088276256674344e-08, + "loss": 0.9528, + "step": 10052 + }, + { + "epoch": 0.9066149614465437, + "grad_norm": 1.320243607930141, + "learning_rate": 9.070873568112536e-08, + "loss": 0.9109, + "step": 10053 + }, + { + "epoch": 0.906705144970014, + "grad_norm": 1.5332731000154398, + "learning_rate": 9.053487170676577e-08, + "loss": 0.9866, + "step": 10054 + }, + { + "epoch": 0.9067953284934842, + "grad_norm": 1.4298703436326734, + "learning_rate": 9.036117065849968e-08, + "loss": 0.9924, + "step": 10055 + }, + { + "epoch": 0.9068855120169546, + "grad_norm": 1.6753610648822506, + "learning_rate": 9.018763255114837e-08, + "loss": 0.9095, + "step": 10056 + }, + { + "epoch": 0.9069756955404248, + "grad_norm": 1.502412048786953, + "learning_rate": 9.00142573995184e-08, + "loss": 0.8991, + "step": 10057 + }, + { + "epoch": 0.907065879063895, + "grad_norm": 1.3372000366821422, + "learning_rate": 8.984104521840375e-08, + "loss": 1.0136, + "step": 10058 + }, + { + "epoch": 0.9071560625873653, + "grad_norm": 1.4760347260729478, + "learning_rate": 8.966799602258346e-08, + "loss": 0.958, + "step": 10059 + }, + { + "epoch": 0.9072462461108356, + "grad_norm": 1.3541901023988687, + "learning_rate": 8.949510982682329e-08, + "loss": 0.9725, + "step": 10060 + }, + { + "epoch": 0.9073364296343058, + "grad_norm": 1.6339557058804552, + "learning_rate": 8.932238664587499e-08, + "loss": 1.001, + "step": 10061 + }, + { + "epoch": 0.9074266131577761, + "grad_norm": 1.4692177534967175, + "learning_rate": 8.914982649447567e-08, + "loss": 1.0208, + "step": 10062 + }, + { + "epoch": 0.9075167966812463, + "grad_norm": 1.3896496384723376, + "learning_rate": 8.897742938734975e-08, + "loss": 0.999, + "step": 10063 + }, + { + "epoch": 0.9076069802047166, + "grad_norm": 1.5485949731659718, + "learning_rate": 8.880519533920661e-08, + "loss": 0.9582, + "step": 10064 + }, + { + "epoch": 0.9076971637281869, + "grad_norm": 1.213715449429745, + "learning_rate": 8.863312436474268e-08, + "loss": 0.9289, + "step": 10065 + }, + { + "epoch": 0.9077873472516571, + "grad_norm": 1.7950793374680032, + "learning_rate": 8.846121647863936e-08, + "loss": 0.9359, + "step": 10066 + }, + { + "epoch": 0.9078775307751273, + "grad_norm": 1.4226507229726435, + "learning_rate": 8.828947169556555e-08, + "loss": 0.9329, + "step": 10067 + }, + { + "epoch": 0.9079677142985977, + "grad_norm": 1.2153275192011461, + "learning_rate": 8.81178900301749e-08, + "loss": 0.8962, + "step": 10068 + }, + { + "epoch": 0.9080578978220679, + "grad_norm": 1.4357048103491599, + "learning_rate": 8.794647149710787e-08, + "loss": 0.8591, + "step": 10069 + }, + { + "epoch": 0.9081480813455381, + "grad_norm": 1.6040714615482885, + "learning_rate": 8.777521611099081e-08, + "loss": 0.9231, + "step": 10070 + }, + { + "epoch": 0.9082382648690084, + "grad_norm": 2.0238171077767824, + "learning_rate": 8.760412388643624e-08, + "loss": 0.8739, + "step": 10071 + }, + { + "epoch": 0.9083284483924787, + "grad_norm": 1.2776648908082433, + "learning_rate": 8.74331948380429e-08, + "loss": 0.9383, + "step": 10072 + }, + { + "epoch": 0.908418631915949, + "grad_norm": 1.450011496662734, + "learning_rate": 8.726242898039516e-08, + "loss": 0.9322, + "step": 10073 + }, + { + "epoch": 0.9085088154394192, + "grad_norm": 1.4507611644630163, + "learning_rate": 8.709182632806334e-08, + "loss": 0.9343, + "step": 10074 + }, + { + "epoch": 0.9085989989628894, + "grad_norm": 1.4884281436386624, + "learning_rate": 8.692138689560469e-08, + "loss": 0.9407, + "step": 10075 + }, + { + "epoch": 0.9086891824863598, + "grad_norm": 1.3646447650777567, + "learning_rate": 8.675111069756203e-08, + "loss": 0.9226, + "step": 10076 + }, + { + "epoch": 0.90877936600983, + "grad_norm": 1.3149501093403968, + "learning_rate": 8.658099774846395e-08, + "loss": 0.9003, + "step": 10077 + }, + { + "epoch": 0.9088695495333002, + "grad_norm": 1.2214007284023274, + "learning_rate": 8.641104806282595e-08, + "loss": 0.8759, + "step": 10078 + }, + { + "epoch": 0.9089597330567706, + "grad_norm": 1.5601899427539914, + "learning_rate": 8.624126165514845e-08, + "loss": 0.9437, + "step": 10079 + }, + { + "epoch": 0.9090499165802408, + "grad_norm": 1.5864251801033795, + "learning_rate": 8.607163853991917e-08, + "loss": 0.8932, + "step": 10080 + }, + { + "epoch": 0.909140100103711, + "grad_norm": 1.5393066212641255, + "learning_rate": 8.590217873161054e-08, + "loss": 0.7789, + "step": 10081 + }, + { + "epoch": 0.9092302836271813, + "grad_norm": 1.3684945098856027, + "learning_rate": 8.573288224468255e-08, + "loss": 0.95, + "step": 10082 + }, + { + "epoch": 0.9093204671506516, + "grad_norm": 3.146578332124486, + "learning_rate": 8.556374909358011e-08, + "loss": 0.8544, + "step": 10083 + }, + { + "epoch": 0.9094106506741219, + "grad_norm": 1.6320959097551608, + "learning_rate": 8.539477929273476e-08, + "loss": 0.9089, + "step": 10084 + }, + { + "epoch": 0.9095008341975921, + "grad_norm": 1.2758948832057924, + "learning_rate": 8.522597285656386e-08, + "loss": 0.9662, + "step": 10085 + }, + { + "epoch": 0.9095910177210623, + "grad_norm": 0.8059469570212986, + "learning_rate": 8.505732979947078e-08, + "loss": 0.8115, + "step": 10086 + }, + { + "epoch": 0.9096812012445327, + "grad_norm": 1.1279088138118591, + "learning_rate": 8.488885013584557e-08, + "loss": 0.9492, + "step": 10087 + }, + { + "epoch": 0.9097713847680029, + "grad_norm": 1.421586311293533, + "learning_rate": 8.472053388006295e-08, + "loss": 0.8758, + "step": 10088 + }, + { + "epoch": 0.9098615682914731, + "grad_norm": 4.855934225098846, + "learning_rate": 8.455238104648565e-08, + "loss": 1.0197, + "step": 10089 + }, + { + "epoch": 0.9099517518149434, + "grad_norm": 1.4752200043393537, + "learning_rate": 8.438439164946043e-08, + "loss": 0.804, + "step": 10090 + }, + { + "epoch": 0.9100419353384137, + "grad_norm": 1.4619044403595332, + "learning_rate": 8.42165657033218e-08, + "loss": 0.9493, + "step": 10091 + }, + { + "epoch": 0.9101321188618839, + "grad_norm": 1.4478757655818417, + "learning_rate": 8.4048903222389e-08, + "loss": 0.971, + "step": 10092 + }, + { + "epoch": 0.9102223023853542, + "grad_norm": 1.3133964655962644, + "learning_rate": 8.388140422096856e-08, + "loss": 0.9802, + "step": 10093 + }, + { + "epoch": 0.9103124859088244, + "grad_norm": 1.5260299144027616, + "learning_rate": 8.371406871335173e-08, + "loss": 0.9387, + "step": 10094 + }, + { + "epoch": 0.9104026694322948, + "grad_norm": 1.5318458332100597, + "learning_rate": 8.354689671381732e-08, + "loss": 0.9269, + "step": 10095 + }, + { + "epoch": 0.910492852955765, + "grad_norm": 1.2956857858946158, + "learning_rate": 8.337988823662834e-08, + "loss": 0.9233, + "step": 10096 + }, + { + "epoch": 0.9105830364792352, + "grad_norm": 1.400701512260217, + "learning_rate": 8.321304329603607e-08, + "loss": 0.8866, + "step": 10097 + }, + { + "epoch": 0.9106732200027055, + "grad_norm": 0.8001055588221961, + "learning_rate": 8.304636190627557e-08, + "loss": 0.8167, + "step": 10098 + }, + { + "epoch": 0.9107634035261758, + "grad_norm": 1.76697279539037, + "learning_rate": 8.287984408156945e-08, + "loss": 0.9477, + "step": 10099 + }, + { + "epoch": 0.910853587049646, + "grad_norm": 1.5462079391988177, + "learning_rate": 8.271348983612591e-08, + "loss": 0.839, + "step": 10100 + }, + { + "epoch": 0.9109437705731163, + "grad_norm": 1.3215386830851767, + "learning_rate": 8.254729918413938e-08, + "loss": 0.8918, + "step": 10101 + }, + { + "epoch": 0.9110339540965866, + "grad_norm": 1.3205975106407226, + "learning_rate": 8.238127213979006e-08, + "loss": 0.9026, + "step": 10102 + }, + { + "epoch": 0.9111241376200568, + "grad_norm": 1.607531037453695, + "learning_rate": 8.221540871724398e-08, + "loss": 1.0011, + "step": 10103 + }, + { + "epoch": 0.9112143211435271, + "grad_norm": 1.5772674043269255, + "learning_rate": 8.2049708930654e-08, + "loss": 0.822, + "step": 10104 + }, + { + "epoch": 0.9113045046669973, + "grad_norm": 1.7268364460272794, + "learning_rate": 8.188417279415793e-08, + "loss": 0.903, + "step": 10105 + }, + { + "epoch": 0.9113946881904677, + "grad_norm": 1.5682844284221522, + "learning_rate": 8.171880032188117e-08, + "loss": 1.0318, + "step": 10106 + }, + { + "epoch": 0.9114848717139379, + "grad_norm": 1.5359970003635204, + "learning_rate": 8.155359152793351e-08, + "loss": 0.9124, + "step": 10107 + }, + { + "epoch": 0.9115750552374081, + "grad_norm": 1.5030278799043535, + "learning_rate": 8.138854642641147e-08, + "loss": 0.9207, + "step": 10108 + }, + { + "epoch": 0.9116652387608783, + "grad_norm": 1.393528211592069, + "learning_rate": 8.122366503139777e-08, + "loss": 0.86, + "step": 10109 + }, + { + "epoch": 0.9117554222843487, + "grad_norm": 3.301795650108541, + "learning_rate": 8.105894735696117e-08, + "loss": 0.8773, + "step": 10110 + }, + { + "epoch": 0.9118456058078189, + "grad_norm": 1.5134030601778041, + "learning_rate": 8.089439341715576e-08, + "loss": 0.909, + "step": 10111 + }, + { + "epoch": 0.9119357893312892, + "grad_norm": 2.0426477942346066, + "learning_rate": 8.073000322602319e-08, + "loss": 0.9595, + "step": 10112 + }, + { + "epoch": 0.9120259728547594, + "grad_norm": 1.3668126927649127, + "learning_rate": 8.056577679758891e-08, + "loss": 0.9492, + "step": 10113 + }, + { + "epoch": 0.9121161563782297, + "grad_norm": 1.5303824945185465, + "learning_rate": 8.040171414586638e-08, + "loss": 0.8841, + "step": 10114 + }, + { + "epoch": 0.9122063399017, + "grad_norm": 1.4374980926501018, + "learning_rate": 8.023781528485419e-08, + "loss": 0.8966, + "step": 10115 + }, + { + "epoch": 0.9122965234251702, + "grad_norm": 1.5538751214516584, + "learning_rate": 8.00740802285369e-08, + "loss": 0.9479, + "step": 10116 + }, + { + "epoch": 0.9123867069486404, + "grad_norm": 1.3991782501510666, + "learning_rate": 7.99105089908858e-08, + "loss": 0.8013, + "step": 10117 + }, + { + "epoch": 0.9124768904721108, + "grad_norm": 1.1696143152047118, + "learning_rate": 7.974710158585685e-08, + "loss": 0.906, + "step": 10118 + }, + { + "epoch": 0.912567073995581, + "grad_norm": 1.4279860080386126, + "learning_rate": 7.958385802739375e-08, + "loss": 0.9635, + "step": 10119 + }, + { + "epoch": 0.9126572575190512, + "grad_norm": 1.6955519827881036, + "learning_rate": 7.942077832942452e-08, + "loss": 0.9883, + "step": 10120 + }, + { + "epoch": 0.9127474410425215, + "grad_norm": 0.7137323714048428, + "learning_rate": 7.925786250586508e-08, + "loss": 0.8194, + "step": 10121 + }, + { + "epoch": 0.9128376245659918, + "grad_norm": 2.5249378952093666, + "learning_rate": 7.909511057061524e-08, + "loss": 0.8081, + "step": 10122 + }, + { + "epoch": 0.9129278080894621, + "grad_norm": 1.38125634860828, + "learning_rate": 7.893252253756234e-08, + "loss": 0.9168, + "step": 10123 + }, + { + "epoch": 0.9130179916129323, + "grad_norm": 1.665501505154491, + "learning_rate": 7.877009842057925e-08, + "loss": 0.8596, + "step": 10124 + }, + { + "epoch": 0.9131081751364026, + "grad_norm": 1.4531010451444533, + "learning_rate": 7.860783823352512e-08, + "loss": 0.9598, + "step": 10125 + }, + { + "epoch": 0.9131983586598729, + "grad_norm": 1.8145092974981962, + "learning_rate": 7.844574199024445e-08, + "loss": 0.9367, + "step": 10126 + }, + { + "epoch": 0.9132885421833431, + "grad_norm": 1.4600027429215259, + "learning_rate": 7.82838097045686e-08, + "loss": 0.9727, + "step": 10127 + }, + { + "epoch": 0.9133787257068133, + "grad_norm": 1.7240582424689987, + "learning_rate": 7.812204139031454e-08, + "loss": 0.926, + "step": 10128 + }, + { + "epoch": 0.9134689092302837, + "grad_norm": 1.796278215902136, + "learning_rate": 7.796043706128474e-08, + "loss": 0.9522, + "step": 10129 + }, + { + "epoch": 0.9135590927537539, + "grad_norm": 1.4650025141827878, + "learning_rate": 7.779899673126844e-08, + "loss": 0.9621, + "step": 10130 + }, + { + "epoch": 0.9136492762772241, + "grad_norm": 1.3317157002438396, + "learning_rate": 7.76377204140406e-08, + "loss": 0.9786, + "step": 10131 + }, + { + "epoch": 0.9137394598006944, + "grad_norm": 1.3661615936770244, + "learning_rate": 7.74766081233622e-08, + "loss": 0.9247, + "step": 10132 + }, + { + "epoch": 0.9138296433241647, + "grad_norm": 1.3169001436662013, + "learning_rate": 7.73156598729805e-08, + "loss": 0.9941, + "step": 10133 + }, + { + "epoch": 0.913919826847635, + "grad_norm": 1.5879147235688642, + "learning_rate": 7.715487567662849e-08, + "loss": 0.9787, + "step": 10134 + }, + { + "epoch": 0.9140100103711052, + "grad_norm": 1.7458181142333327, + "learning_rate": 7.69942555480243e-08, + "loss": 0.9019, + "step": 10135 + }, + { + "epoch": 0.9141001938945754, + "grad_norm": 1.6337392266990172, + "learning_rate": 7.68337995008741e-08, + "loss": 1.0326, + "step": 10136 + }, + { + "epoch": 0.9141903774180458, + "grad_norm": 1.2777727905581537, + "learning_rate": 7.667350754886803e-08, + "loss": 0.7869, + "step": 10137 + }, + { + "epoch": 0.914280560941516, + "grad_norm": 2.148628476064561, + "learning_rate": 7.651337970568361e-08, + "loss": 0.9216, + "step": 10138 + }, + { + "epoch": 0.9143707444649862, + "grad_norm": 1.327136625640156, + "learning_rate": 7.635341598498368e-08, + "loss": 0.8982, + "step": 10139 + }, + { + "epoch": 0.9144609279884565, + "grad_norm": 1.3785903612060817, + "learning_rate": 7.61936164004171e-08, + "loss": 0.9249, + "step": 10140 + }, + { + "epoch": 0.9145511115119268, + "grad_norm": 1.3460920018357998, + "learning_rate": 7.603398096561875e-08, + "loss": 0.9719, + "step": 10141 + }, + { + "epoch": 0.914641295035397, + "grad_norm": 1.7704237819620523, + "learning_rate": 7.587450969420994e-08, + "loss": 0.9434, + "step": 10142 + }, + { + "epoch": 0.9147314785588673, + "grad_norm": 0.7113399414555801, + "learning_rate": 7.571520259979757e-08, + "loss": 0.8133, + "step": 10143 + }, + { + "epoch": 0.9148216620823375, + "grad_norm": 1.474243846329966, + "learning_rate": 7.555605969597455e-08, + "loss": 0.7715, + "step": 10144 + }, + { + "epoch": 0.9149118456058078, + "grad_norm": 1.3825889880907953, + "learning_rate": 7.539708099631959e-08, + "loss": 1.0196, + "step": 10145 + }, + { + "epoch": 0.9150020291292781, + "grad_norm": 1.349090697325419, + "learning_rate": 7.52382665143978e-08, + "loss": 0.9384, + "step": 10146 + }, + { + "epoch": 0.9150922126527483, + "grad_norm": 1.6841485686968005, + "learning_rate": 7.507961626376014e-08, + "loss": 0.9895, + "step": 10147 + }, + { + "epoch": 0.9151823961762187, + "grad_norm": 1.5161873472246612, + "learning_rate": 7.492113025794378e-08, + "loss": 0.9704, + "step": 10148 + }, + { + "epoch": 0.9152725796996889, + "grad_norm": 1.3659066003757403, + "learning_rate": 7.476280851047101e-08, + "loss": 0.8838, + "step": 10149 + }, + { + "epoch": 0.9153627632231591, + "grad_norm": 1.1723673485586463, + "learning_rate": 7.460465103485125e-08, + "loss": 0.9581, + "step": 10150 + }, + { + "epoch": 0.9154529467466294, + "grad_norm": 1.7539331650739678, + "learning_rate": 7.444665784457948e-08, + "loss": 0.9213, + "step": 10151 + }, + { + "epoch": 0.9155431302700997, + "grad_norm": 1.5056437178382933, + "learning_rate": 7.42888289531356e-08, + "loss": 0.8931, + "step": 10152 + }, + { + "epoch": 0.9156333137935699, + "grad_norm": 1.4026270078245744, + "learning_rate": 7.41311643739877e-08, + "loss": 0.856, + "step": 10153 + }, + { + "epoch": 0.9157234973170402, + "grad_norm": 1.3188176015729267, + "learning_rate": 7.39736641205877e-08, + "loss": 0.9714, + "step": 10154 + }, + { + "epoch": 0.9158136808405104, + "grad_norm": 1.3811811317777953, + "learning_rate": 7.381632820637462e-08, + "loss": 0.9594, + "step": 10155 + }, + { + "epoch": 0.9159038643639807, + "grad_norm": 1.3698582747864079, + "learning_rate": 7.365915664477352e-08, + "loss": 0.9224, + "step": 10156 + }, + { + "epoch": 0.915994047887451, + "grad_norm": 2.9638305058479677, + "learning_rate": 7.350214944919474e-08, + "loss": 0.9428, + "step": 10157 + }, + { + "epoch": 0.9160842314109212, + "grad_norm": 1.4205628932898797, + "learning_rate": 7.334530663303539e-08, + "loss": 0.9561, + "step": 10158 + }, + { + "epoch": 0.9161744149343914, + "grad_norm": 1.2125872295503088, + "learning_rate": 7.318862820967742e-08, + "loss": 0.9343, + "step": 10159 + }, + { + "epoch": 0.9162645984578618, + "grad_norm": 1.960557695667894, + "learning_rate": 7.303211419249056e-08, + "loss": 0.961, + "step": 10160 + }, + { + "epoch": 0.916354781981332, + "grad_norm": 1.3741362199404208, + "learning_rate": 7.287576459482858e-08, + "loss": 0.9477, + "step": 10161 + }, + { + "epoch": 0.9164449655048023, + "grad_norm": 1.4466865400335125, + "learning_rate": 7.271957943003259e-08, + "loss": 0.9749, + "step": 10162 + }, + { + "epoch": 0.9165351490282725, + "grad_norm": 1.6925998907320658, + "learning_rate": 7.256355871142883e-08, + "loss": 0.9356, + "step": 10163 + }, + { + "epoch": 0.9166253325517428, + "grad_norm": 1.524400612028265, + "learning_rate": 7.240770245233019e-08, + "loss": 0.9852, + "step": 10164 + }, + { + "epoch": 0.9167155160752131, + "grad_norm": 1.4172010629293024, + "learning_rate": 7.225201066603492e-08, + "loss": 0.851, + "step": 10165 + }, + { + "epoch": 0.9168056995986833, + "grad_norm": 1.3206075304846454, + "learning_rate": 7.209648336582774e-08, + "loss": 1.0437, + "step": 10166 + }, + { + "epoch": 0.9168958831221535, + "grad_norm": 1.4267676716698199, + "learning_rate": 7.19411205649787e-08, + "loss": 0.9183, + "step": 10167 + }, + { + "epoch": 0.9169860666456239, + "grad_norm": 1.3567141217797853, + "learning_rate": 7.178592227674474e-08, + "loss": 0.9548, + "step": 10168 + }, + { + "epoch": 0.9170762501690941, + "grad_norm": 1.6173392699154436, + "learning_rate": 7.163088851436771e-08, + "loss": 0.853, + "step": 10169 + }, + { + "epoch": 0.9171664336925643, + "grad_norm": 1.3098890583730216, + "learning_rate": 7.147601929107639e-08, + "loss": 0.8923, + "step": 10170 + }, + { + "epoch": 0.9172566172160346, + "grad_norm": 1.3222378568677715, + "learning_rate": 7.132131462008461e-08, + "loss": 0.9743, + "step": 10171 + }, + { + "epoch": 0.9173468007395049, + "grad_norm": 1.4839039908555904, + "learning_rate": 7.116677451459297e-08, + "loss": 0.9338, + "step": 10172 + }, + { + "epoch": 0.9174369842629752, + "grad_norm": 1.20774064929827, + "learning_rate": 7.101239898778799e-08, + "loss": 0.957, + "step": 10173 + }, + { + "epoch": 0.9175271677864454, + "grad_norm": 1.2910954945568758, + "learning_rate": 7.085818805284094e-08, + "loss": 0.9662, + "step": 10174 + }, + { + "epoch": 0.9176173513099157, + "grad_norm": 1.5648947103261415, + "learning_rate": 7.070414172291083e-08, + "loss": 1.013, + "step": 10175 + }, + { + "epoch": 0.917707534833386, + "grad_norm": 1.2214989593289065, + "learning_rate": 7.055026001114095e-08, + "loss": 0.8687, + "step": 10176 + }, + { + "epoch": 0.9177977183568562, + "grad_norm": 0.6758993188988642, + "learning_rate": 7.039654293066211e-08, + "loss": 0.7925, + "step": 10177 + }, + { + "epoch": 0.9178879018803264, + "grad_norm": 1.6386737664340245, + "learning_rate": 7.024299049459003e-08, + "loss": 0.8586, + "step": 10178 + }, + { + "epoch": 0.9179780854037968, + "grad_norm": 1.7088039804344395, + "learning_rate": 7.008960271602627e-08, + "loss": 0.8526, + "step": 10179 + }, + { + "epoch": 0.918068268927267, + "grad_norm": 1.6405538634363883, + "learning_rate": 6.993637960805921e-08, + "loss": 0.9025, + "step": 10180 + }, + { + "epoch": 0.9181584524507372, + "grad_norm": 1.634359329761196, + "learning_rate": 6.97833211837624e-08, + "loss": 0.9585, + "step": 10181 + }, + { + "epoch": 0.9182486359742075, + "grad_norm": 2.1012113483420967, + "learning_rate": 6.963042745619562e-08, + "loss": 0.8785, + "step": 10182 + }, + { + "epoch": 0.9183388194976778, + "grad_norm": 1.3133201761280546, + "learning_rate": 6.947769843840511e-08, + "loss": 0.8988, + "step": 10183 + }, + { + "epoch": 0.918429003021148, + "grad_norm": 1.4955053541928747, + "learning_rate": 6.9325134143422e-08, + "loss": 0.9647, + "step": 10184 + }, + { + "epoch": 0.9185191865446183, + "grad_norm": 1.651810435990136, + "learning_rate": 6.917273458426387e-08, + "loss": 0.9149, + "step": 10185 + }, + { + "epoch": 0.9186093700680885, + "grad_norm": 0.7331369393523672, + "learning_rate": 6.902049977393476e-08, + "loss": 0.7802, + "step": 10186 + }, + { + "epoch": 0.9186995535915589, + "grad_norm": 1.3594535717235345, + "learning_rate": 6.886842972542362e-08, + "loss": 0.9334, + "step": 10187 + }, + { + "epoch": 0.9187897371150291, + "grad_norm": 1.460281143031408, + "learning_rate": 6.871652445170672e-08, + "loss": 0.8214, + "step": 10188 + }, + { + "epoch": 0.9188799206384993, + "grad_norm": 1.5572290397307604, + "learning_rate": 6.856478396574416e-08, + "loss": 1.0135, + "step": 10189 + }, + { + "epoch": 0.9189701041619696, + "grad_norm": 1.5535911643367284, + "learning_rate": 6.841320828048491e-08, + "loss": 0.9878, + "step": 10190 + }, + { + "epoch": 0.9190602876854399, + "grad_norm": 1.404597180001308, + "learning_rate": 6.826179740886062e-08, + "loss": 0.9277, + "step": 10191 + }, + { + "epoch": 0.9191504712089101, + "grad_norm": 1.3659582224722506, + "learning_rate": 6.811055136379184e-08, + "loss": 0.9925, + "step": 10192 + }, + { + "epoch": 0.9192406547323804, + "grad_norm": 1.7030881825238506, + "learning_rate": 6.79594701581827e-08, + "loss": 0.9031, + "step": 10193 + }, + { + "epoch": 0.9193308382558506, + "grad_norm": 1.739581018625269, + "learning_rate": 6.780855380492511e-08, + "loss": 0.9403, + "step": 10194 + }, + { + "epoch": 0.919421021779321, + "grad_norm": 1.2846993326891656, + "learning_rate": 6.765780231689544e-08, + "loss": 0.9472, + "step": 10195 + }, + { + "epoch": 0.9195112053027912, + "grad_norm": 1.5066062569252698, + "learning_rate": 6.750721570695695e-08, + "loss": 0.9926, + "step": 10196 + }, + { + "epoch": 0.9196013888262614, + "grad_norm": 2.3681548002396187, + "learning_rate": 6.735679398795868e-08, + "loss": 0.9013, + "step": 10197 + }, + { + "epoch": 0.9196915723497318, + "grad_norm": 2.0104458531713645, + "learning_rate": 6.720653717273506e-08, + "loss": 0.8736, + "step": 10198 + }, + { + "epoch": 0.919781755873202, + "grad_norm": 1.3539159493778543, + "learning_rate": 6.705644527410714e-08, + "loss": 0.9329, + "step": 10199 + }, + { + "epoch": 0.9198719393966722, + "grad_norm": 1.6729993515515025, + "learning_rate": 6.690651830488136e-08, + "loss": 0.9031, + "step": 10200 + }, + { + "epoch": 0.9199621229201425, + "grad_norm": 1.6658314360467368, + "learning_rate": 6.675675627785037e-08, + "loss": 0.9204, + "step": 10201 + }, + { + "epoch": 0.9200523064436128, + "grad_norm": 1.7843816566677158, + "learning_rate": 6.660715920579263e-08, + "loss": 0.9518, + "step": 10202 + }, + { + "epoch": 0.920142489967083, + "grad_norm": 0.641158509819926, + "learning_rate": 6.645772710147279e-08, + "loss": 0.7988, + "step": 10203 + }, + { + "epoch": 0.9202326734905533, + "grad_norm": 1.3904116177526769, + "learning_rate": 6.630845997764112e-08, + "loss": 0.9005, + "step": 10204 + }, + { + "epoch": 0.9203228570140235, + "grad_norm": 1.3720700952930212, + "learning_rate": 6.615935784703409e-08, + "loss": 0.8759, + "step": 10205 + }, + { + "epoch": 0.9204130405374938, + "grad_norm": 1.5947747021783383, + "learning_rate": 6.601042072237328e-08, + "loss": 0.8553, + "step": 10206 + }, + { + "epoch": 0.9205032240609641, + "grad_norm": 1.293537395165304, + "learning_rate": 6.586164861636767e-08, + "loss": 0.9663, + "step": 10207 + }, + { + "epoch": 0.9205934075844343, + "grad_norm": 1.7130153353973923, + "learning_rate": 6.571304154171065e-08, + "loss": 0.849, + "step": 10208 + }, + { + "epoch": 0.9206835911079045, + "grad_norm": 1.452793452678959, + "learning_rate": 6.556459951108273e-08, + "loss": 0.9543, + "step": 10209 + }, + { + "epoch": 0.9207737746313749, + "grad_norm": 1.6045887709632545, + "learning_rate": 6.541632253714957e-08, + "loss": 0.9164, + "step": 10210 + }, + { + "epoch": 0.9208639581548451, + "grad_norm": 1.3915021465996358, + "learning_rate": 6.526821063256261e-08, + "loss": 0.8618, + "step": 10211 + }, + { + "epoch": 0.9209541416783154, + "grad_norm": 1.549509675396097, + "learning_rate": 6.512026380996016e-08, + "loss": 0.8373, + "step": 10212 + }, + { + "epoch": 0.9210443252017856, + "grad_norm": 2.0688361896181817, + "learning_rate": 6.49724820819657e-08, + "loss": 0.9047, + "step": 10213 + }, + { + "epoch": 0.9211345087252559, + "grad_norm": 1.3704396396581464, + "learning_rate": 6.48248654611887e-08, + "loss": 0.9641, + "step": 10214 + }, + { + "epoch": 0.9212246922487262, + "grad_norm": 1.505716477873611, + "learning_rate": 6.467741396022419e-08, + "loss": 0.9819, + "step": 10215 + }, + { + "epoch": 0.9213148757721964, + "grad_norm": 1.5368127015507866, + "learning_rate": 6.453012759165455e-08, + "loss": 0.938, + "step": 10216 + }, + { + "epoch": 0.9214050592956666, + "grad_norm": 1.3579045377558163, + "learning_rate": 6.438300636804639e-08, + "loss": 0.8971, + "step": 10217 + }, + { + "epoch": 0.921495242819137, + "grad_norm": 1.376474500218323, + "learning_rate": 6.423605030195278e-08, + "loss": 0.9695, + "step": 10218 + }, + { + "epoch": 0.9215854263426072, + "grad_norm": 1.715984477753227, + "learning_rate": 6.408925940591304e-08, + "loss": 1.0062, + "step": 10219 + }, + { + "epoch": 0.9216756098660774, + "grad_norm": 1.6784418499566534, + "learning_rate": 6.394263369245222e-08, + "loss": 0.8708, + "step": 10220 + }, + { + "epoch": 0.9217657933895478, + "grad_norm": 1.5553522270391487, + "learning_rate": 6.379617317408126e-08, + "loss": 0.9397, + "step": 10221 + }, + { + "epoch": 0.921855976913018, + "grad_norm": 1.9316857991273562, + "learning_rate": 6.364987786329723e-08, + "loss": 0.9254, + "step": 10222 + }, + { + "epoch": 0.9219461604364882, + "grad_norm": 1.407909981827541, + "learning_rate": 6.350374777258193e-08, + "loss": 0.9213, + "step": 10223 + }, + { + "epoch": 0.9220363439599585, + "grad_norm": 1.5999377685603873, + "learning_rate": 6.335778291440519e-08, + "loss": 0.9156, + "step": 10224 + }, + { + "epoch": 0.9221265274834288, + "grad_norm": 1.3557201669017886, + "learning_rate": 6.321198330122057e-08, + "loss": 0.8419, + "step": 10225 + }, + { + "epoch": 0.9222167110068991, + "grad_norm": 1.7636196935629147, + "learning_rate": 6.306634894546902e-08, + "loss": 0.8769, + "step": 10226 + }, + { + "epoch": 0.9223068945303693, + "grad_norm": 1.2156267004319876, + "learning_rate": 6.292087985957661e-08, + "loss": 0.883, + "step": 10227 + }, + { + "epoch": 0.9223970780538395, + "grad_norm": 1.2972468107444919, + "learning_rate": 6.277557605595585e-08, + "loss": 0.9728, + "step": 10228 + }, + { + "epoch": 0.9224872615773099, + "grad_norm": 1.251706246301801, + "learning_rate": 6.263043754700481e-08, + "loss": 0.8601, + "step": 10229 + }, + { + "epoch": 0.9225774451007801, + "grad_norm": 1.6257032193289234, + "learning_rate": 6.248546434510671e-08, + "loss": 0.9311, + "step": 10230 + }, + { + "epoch": 0.9226676286242503, + "grad_norm": 1.4106040095777599, + "learning_rate": 6.234065646263298e-08, + "loss": 0.8631, + "step": 10231 + }, + { + "epoch": 0.9227578121477206, + "grad_norm": 3.3284129703209873, + "learning_rate": 6.219601391193796e-08, + "loss": 0.8418, + "step": 10232 + }, + { + "epoch": 0.9228479956711909, + "grad_norm": 2.539088040736926, + "learning_rate": 6.205153670536423e-08, + "loss": 0.864, + "step": 10233 + }, + { + "epoch": 0.9229381791946611, + "grad_norm": 1.2894589825866651, + "learning_rate": 6.190722485523902e-08, + "loss": 0.9596, + "step": 10234 + }, + { + "epoch": 0.9230283627181314, + "grad_norm": 1.4386179350188026, + "learning_rate": 6.176307837387607e-08, + "loss": 0.906, + "step": 10235 + }, + { + "epoch": 0.9231185462416016, + "grad_norm": 1.4629451400316793, + "learning_rate": 6.16190972735744e-08, + "loss": 0.888, + "step": 10236 + }, + { + "epoch": 0.923208729765072, + "grad_norm": 1.4300265717872014, + "learning_rate": 6.147528156661974e-08, + "loss": 0.9552, + "step": 10237 + }, + { + "epoch": 0.9232989132885422, + "grad_norm": 1.3974004981661792, + "learning_rate": 6.133163126528273e-08, + "loss": 0.851, + "step": 10238 + }, + { + "epoch": 0.9233890968120124, + "grad_norm": 1.3806934391878443, + "learning_rate": 6.11881463818209e-08, + "loss": 0.9404, + "step": 10239 + }, + { + "epoch": 0.9234792803354827, + "grad_norm": 1.3467266470153887, + "learning_rate": 6.104482692847668e-08, + "loss": 0.9199, + "step": 10240 + }, + { + "epoch": 0.923569463858953, + "grad_norm": 1.3809091863861043, + "learning_rate": 6.090167291747917e-08, + "loss": 0.9037, + "step": 10241 + }, + { + "epoch": 0.9236596473824232, + "grad_norm": 1.3608867088326433, + "learning_rate": 6.075868436104303e-08, + "loss": 0.8276, + "step": 10242 + }, + { + "epoch": 0.9237498309058935, + "grad_norm": 5.388958925052077, + "learning_rate": 6.061586127136875e-08, + "loss": 0.9184, + "step": 10243 + }, + { + "epoch": 0.9238400144293638, + "grad_norm": 1.3920705333000596, + "learning_rate": 6.047320366064324e-08, + "loss": 0.9025, + "step": 10244 + }, + { + "epoch": 0.923930197952834, + "grad_norm": 1.3689545465507065, + "learning_rate": 6.033071154103786e-08, + "loss": 0.9395, + "step": 10245 + }, + { + "epoch": 0.9240203814763043, + "grad_norm": 1.461301054572509, + "learning_rate": 6.018838492471178e-08, + "loss": 0.9528, + "step": 10246 + }, + { + "epoch": 0.9241105649997745, + "grad_norm": 0.6331264282315894, + "learning_rate": 6.00462238238082e-08, + "loss": 0.8091, + "step": 10247 + }, + { + "epoch": 0.9242007485232449, + "grad_norm": 1.3062170964527646, + "learning_rate": 5.990422825045827e-08, + "loss": 0.8972, + "step": 10248 + }, + { + "epoch": 0.9242909320467151, + "grad_norm": 1.3433721144089625, + "learning_rate": 5.976239821677675e-08, + "loss": 0.9077, + "step": 10249 + }, + { + "epoch": 0.9243811155701853, + "grad_norm": 1.342455617572392, + "learning_rate": 5.962073373486598e-08, + "loss": 0.8758, + "step": 10250 + }, + { + "epoch": 0.9244712990936556, + "grad_norm": 1.661914854400299, + "learning_rate": 5.947923481681316e-08, + "loss": 0.8949, + "step": 10251 + }, + { + "epoch": 0.9245614826171259, + "grad_norm": 1.4156683976213942, + "learning_rate": 5.933790147469198e-08, + "loss": 0.912, + "step": 10252 + }, + { + "epoch": 0.9246516661405961, + "grad_norm": 1.6122359244403122, + "learning_rate": 5.9196733720561665e-08, + "loss": 0.8849, + "step": 10253 + }, + { + "epoch": 0.9247418496640664, + "grad_norm": 2.404659786151419, + "learning_rate": 5.905573156646793e-08, + "loss": 1.0094, + "step": 10254 + }, + { + "epoch": 0.9248320331875366, + "grad_norm": 1.7002204836187775, + "learning_rate": 5.8914895024441134e-08, + "loss": 1.0143, + "step": 10255 + }, + { + "epoch": 0.9249222167110069, + "grad_norm": 1.6428879163359562, + "learning_rate": 5.877422410649857e-08, + "loss": 0.9739, + "step": 10256 + }, + { + "epoch": 0.9250124002344772, + "grad_norm": 1.6287486780004632, + "learning_rate": 5.863371882464285e-08, + "loss": 0.9401, + "step": 10257 + }, + { + "epoch": 0.9251025837579474, + "grad_norm": 1.3195514629977305, + "learning_rate": 5.849337919086283e-08, + "loss": 0.9139, + "step": 10258 + }, + { + "epoch": 0.9251927672814176, + "grad_norm": 1.7165807733373928, + "learning_rate": 5.835320521713316e-08, + "loss": 0.9104, + "step": 10259 + }, + { + "epoch": 0.925282950804888, + "grad_norm": 1.341346365247888, + "learning_rate": 5.8213196915414264e-08, + "loss": 0.9234, + "step": 10260 + }, + { + "epoch": 0.9253731343283582, + "grad_norm": 2.0177735698383463, + "learning_rate": 5.807335429765237e-08, + "loss": 0.9912, + "step": 10261 + }, + { + "epoch": 0.9254633178518284, + "grad_norm": 1.7299510448342732, + "learning_rate": 5.7933677375779034e-08, + "loss": 0.9736, + "step": 10262 + }, + { + "epoch": 0.9255535013752987, + "grad_norm": 5.363344196303047, + "learning_rate": 5.77941661617134e-08, + "loss": 0.9809, + "step": 10263 + }, + { + "epoch": 0.925643684898769, + "grad_norm": 1.7240941281344957, + "learning_rate": 5.765482066735816e-08, + "loss": 0.8643, + "step": 10264 + }, + { + "epoch": 0.9257338684222393, + "grad_norm": 1.494639195055039, + "learning_rate": 5.7515640904604256e-08, + "loss": 0.934, + "step": 10265 + }, + { + "epoch": 0.9258240519457095, + "grad_norm": 1.354403030112494, + "learning_rate": 5.7376626885326187e-08, + "loss": 0.9104, + "step": 10266 + }, + { + "epoch": 0.9259142354691798, + "grad_norm": 1.4100395429085004, + "learning_rate": 5.723777862138601e-08, + "loss": 0.9957, + "step": 10267 + }, + { + "epoch": 0.9260044189926501, + "grad_norm": 1.4394628972170833, + "learning_rate": 5.7099096124630705e-08, + "loss": 1.0115, + "step": 10268 + }, + { + "epoch": 0.9260946025161203, + "grad_norm": 1.1766050880698202, + "learning_rate": 5.696057940689347e-08, + "loss": 0.944, + "step": 10269 + }, + { + "epoch": 0.9261847860395905, + "grad_norm": 1.600327029185534, + "learning_rate": 5.6822228479993736e-08, + "loss": 0.9477, + "step": 10270 + }, + { + "epoch": 0.9262749695630609, + "grad_norm": 1.610075566668627, + "learning_rate": 5.668404335573584e-08, + "loss": 0.9722, + "step": 10271 + }, + { + "epoch": 0.9263651530865311, + "grad_norm": 1.406788447398198, + "learning_rate": 5.654602404591058e-08, + "loss": 0.8761, + "step": 10272 + }, + { + "epoch": 0.9264553366100013, + "grad_norm": 1.3643615290020012, + "learning_rate": 5.640817056229474e-08, + "loss": 0.983, + "step": 10273 + }, + { + "epoch": 0.9265455201334716, + "grad_norm": 1.3930339735917465, + "learning_rate": 5.6270482916650706e-08, + "loss": 0.975, + "step": 10274 + }, + { + "epoch": 0.9266357036569419, + "grad_norm": 1.5264969841945732, + "learning_rate": 5.613296112072663e-08, + "loss": 1.0108, + "step": 10275 + }, + { + "epoch": 0.9267258871804122, + "grad_norm": 1.9592485183217623, + "learning_rate": 5.59956051862569e-08, + "loss": 0.9393, + "step": 10276 + }, + { + "epoch": 0.9268160707038824, + "grad_norm": 1.4048006430134572, + "learning_rate": 5.585841512496081e-08, + "loss": 0.998, + "step": 10277 + }, + { + "epoch": 0.9269062542273526, + "grad_norm": 1.7147666548862672, + "learning_rate": 5.5721390948545e-08, + "loss": 1.0359, + "step": 10278 + }, + { + "epoch": 0.926996437750823, + "grad_norm": 1.4129231958323734, + "learning_rate": 5.558453266870056e-08, + "loss": 0.9808, + "step": 10279 + }, + { + "epoch": 0.9270866212742932, + "grad_norm": 1.4679026492890859, + "learning_rate": 5.544784029710525e-08, + "loss": 0.8747, + "step": 10280 + }, + { + "epoch": 0.9271768047977634, + "grad_norm": 1.5740557034777254, + "learning_rate": 5.531131384542242e-08, + "loss": 1.0047, + "step": 10281 + }, + { + "epoch": 0.9272669883212337, + "grad_norm": 1.632710065777072, + "learning_rate": 5.51749533253012e-08, + "loss": 0.8887, + "step": 10282 + }, + { + "epoch": 0.927357171844704, + "grad_norm": 1.3650301670052614, + "learning_rate": 5.503875874837649e-08, + "loss": 0.8741, + "step": 10283 + }, + { + "epoch": 0.9274473553681742, + "grad_norm": 1.5253316987557979, + "learning_rate": 5.4902730126269225e-08, + "loss": 0.8949, + "step": 10284 + }, + { + "epoch": 0.9275375388916445, + "grad_norm": 1.4285023263839562, + "learning_rate": 5.476686747058656e-08, + "loss": 0.9604, + "step": 10285 + }, + { + "epoch": 0.9276277224151147, + "grad_norm": 2.1090962543688874, + "learning_rate": 5.4631170792920124e-08, + "loss": 0.9046, + "step": 10286 + }, + { + "epoch": 0.927717905938585, + "grad_norm": 1.7627071475321179, + "learning_rate": 5.449564010484953e-08, + "loss": 0.912, + "step": 10287 + }, + { + "epoch": 0.9278080894620553, + "grad_norm": 1.8540544440208417, + "learning_rate": 5.436027541793775e-08, + "loss": 0.962, + "step": 10288 + }, + { + "epoch": 0.9278982729855255, + "grad_norm": 1.3349176719368518, + "learning_rate": 5.4225076743735554e-08, + "loss": 0.911, + "step": 10289 + }, + { + "epoch": 0.9279884565089958, + "grad_norm": 1.4461584965963559, + "learning_rate": 5.409004409377882e-08, + "loss": 0.8533, + "step": 10290 + }, + { + "epoch": 0.9280786400324661, + "grad_norm": 1.192765607944025, + "learning_rate": 5.3955177479589e-08, + "loss": 0.8605, + "step": 10291 + }, + { + "epoch": 0.9281688235559363, + "grad_norm": 1.3329857681184323, + "learning_rate": 5.3820476912674e-08, + "loss": 0.9375, + "step": 10292 + }, + { + "epoch": 0.9282590070794066, + "grad_norm": 1.3610147255010507, + "learning_rate": 5.3685942404527063e-08, + "loss": 0.9569, + "step": 10293 + }, + { + "epoch": 0.9283491906028769, + "grad_norm": 1.476203834528918, + "learning_rate": 5.355157396662702e-08, + "loss": 0.9187, + "step": 10294 + }, + { + "epoch": 0.9284393741263471, + "grad_norm": 1.2648620660691525, + "learning_rate": 5.34173716104398e-08, + "loss": 0.9179, + "step": 10295 + }, + { + "epoch": 0.9285295576498174, + "grad_norm": 2.452080473875217, + "learning_rate": 5.328333534741536e-08, + "loss": 0.9395, + "step": 10296 + }, + { + "epoch": 0.9286197411732876, + "grad_norm": 1.5939259899432676, + "learning_rate": 5.314946518899099e-08, + "loss": 0.8912, + "step": 10297 + }, + { + "epoch": 0.928709924696758, + "grad_norm": 1.4780155681724505, + "learning_rate": 5.301576114658912e-08, + "loss": 1.0057, + "step": 10298 + }, + { + "epoch": 0.9288001082202282, + "grad_norm": 1.5714705263457345, + "learning_rate": 5.288222323161795e-08, + "loss": 0.9639, + "step": 10299 + }, + { + "epoch": 0.9288902917436984, + "grad_norm": 1.5515048167098302, + "learning_rate": 5.274885145547214e-08, + "loss": 0.9779, + "step": 10300 + }, + { + "epoch": 0.9289804752671686, + "grad_norm": 1.3752721603846598, + "learning_rate": 5.261564582953082e-08, + "loss": 0.9267, + "step": 10301 + }, + { + "epoch": 0.929070658790639, + "grad_norm": 1.8578560659627852, + "learning_rate": 5.248260636516066e-08, + "loss": 0.9137, + "step": 10302 + }, + { + "epoch": 0.9291608423141092, + "grad_norm": 1.6825555490106685, + "learning_rate": 5.2349733073712824e-08, + "loss": 0.8767, + "step": 10303 + }, + { + "epoch": 0.9292510258375795, + "grad_norm": 1.2914566002530705, + "learning_rate": 5.221702596652533e-08, + "loss": 1.0216, + "step": 10304 + }, + { + "epoch": 0.9293412093610497, + "grad_norm": 1.830359438319877, + "learning_rate": 5.208448505492091e-08, + "loss": 0.8829, + "step": 10305 + }, + { + "epoch": 0.92943139288452, + "grad_norm": 1.4050532440875714, + "learning_rate": 5.1952110350208965e-08, + "loss": 0.9187, + "step": 10306 + }, + { + "epoch": 0.9295215764079903, + "grad_norm": 1.47529588091636, + "learning_rate": 5.181990186368446e-08, + "loss": 0.8924, + "step": 10307 + }, + { + "epoch": 0.9296117599314605, + "grad_norm": 1.3581687135152403, + "learning_rate": 5.1687859606627915e-08, + "loss": 1.0304, + "step": 10308 + }, + { + "epoch": 0.9297019434549307, + "grad_norm": 2.4195255820208272, + "learning_rate": 5.1555983590306327e-08, + "loss": 0.8481, + "step": 10309 + }, + { + "epoch": 0.9297921269784011, + "grad_norm": 0.7420340931609598, + "learning_rate": 5.1424273825971806e-08, + "loss": 0.8171, + "step": 10310 + }, + { + "epoch": 0.9298823105018713, + "grad_norm": 1.9027389335225304, + "learning_rate": 5.1292730324862475e-08, + "loss": 0.8977, + "step": 10311 + }, + { + "epoch": 0.9299724940253415, + "grad_norm": 1.274817953015882, + "learning_rate": 5.116135309820224e-08, + "loss": 0.9471, + "step": 10312 + }, + { + "epoch": 0.9300626775488118, + "grad_norm": 1.7008223760396135, + "learning_rate": 5.103014215720147e-08, + "loss": 0.9686, + "step": 10313 + }, + { + "epoch": 0.9301528610722821, + "grad_norm": 1.4500549042928916, + "learning_rate": 5.0899097513055214e-08, + "loss": 0.9936, + "step": 10314 + }, + { + "epoch": 0.9302430445957524, + "grad_norm": 4.0527701429144525, + "learning_rate": 5.076821917694563e-08, + "loss": 0.9253, + "step": 10315 + }, + { + "epoch": 0.9303332281192226, + "grad_norm": 1.4428439942992368, + "learning_rate": 5.063750716003889e-08, + "loss": 0.9849, + "step": 10316 + }, + { + "epoch": 0.9304234116426929, + "grad_norm": 1.5266772130465869, + "learning_rate": 5.050696147348921e-08, + "loss": 0.917, + "step": 10317 + }, + { + "epoch": 0.9305135951661632, + "grad_norm": 1.5696534249284613, + "learning_rate": 5.037658212843454e-08, + "loss": 0.9084, + "step": 10318 + }, + { + "epoch": 0.9306037786896334, + "grad_norm": 1.3261693580839518, + "learning_rate": 5.0246369136000444e-08, + "loss": 0.8225, + "step": 10319 + }, + { + "epoch": 0.9306939622131036, + "grad_norm": 1.4448043342297199, + "learning_rate": 5.011632250729691e-08, + "loss": 1.0068, + "step": 10320 + }, + { + "epoch": 0.930784145736574, + "grad_norm": 1.466636029198016, + "learning_rate": 4.998644225342019e-08, + "loss": 1.0212, + "step": 10321 + }, + { + "epoch": 0.9308743292600442, + "grad_norm": 1.436077077098722, + "learning_rate": 4.9856728385452296e-08, + "loss": 0.9427, + "step": 10322 + }, + { + "epoch": 0.9309645127835144, + "grad_norm": 1.3823064535462108, + "learning_rate": 4.9727180914461485e-08, + "loss": 1.0207, + "step": 10323 + }, + { + "epoch": 0.9310546963069847, + "grad_norm": 1.5464180936241292, + "learning_rate": 4.959779985150137e-08, + "loss": 0.9352, + "step": 10324 + }, + { + "epoch": 0.931144879830455, + "grad_norm": 1.8746951809112293, + "learning_rate": 4.9468585207611105e-08, + "loss": 0.8825, + "step": 10325 + }, + { + "epoch": 0.9312350633539253, + "grad_norm": 1.7073880658525675, + "learning_rate": 4.9339536993816764e-08, + "loss": 0.9052, + "step": 10326 + }, + { + "epoch": 0.9313252468773955, + "grad_norm": 1.3691407556030386, + "learning_rate": 4.921065522112844e-08, + "loss": 0.877, + "step": 10327 + }, + { + "epoch": 0.9314154304008657, + "grad_norm": 1.523237557617539, + "learning_rate": 4.908193990054377e-08, + "loss": 0.9773, + "step": 10328 + }, + { + "epoch": 0.9315056139243361, + "grad_norm": 1.322832580457251, + "learning_rate": 4.89533910430453e-08, + "loss": 0.9556, + "step": 10329 + }, + { + "epoch": 0.9315957974478063, + "grad_norm": 1.4794897702641145, + "learning_rate": 4.8825008659601376e-08, + "loss": 0.924, + "step": 10330 + }, + { + "epoch": 0.9316859809712765, + "grad_norm": 1.753362286670696, + "learning_rate": 4.869679276116634e-08, + "loss": 0.9465, + "step": 10331 + }, + { + "epoch": 0.9317761644947468, + "grad_norm": 1.319720615239763, + "learning_rate": 4.856874335868055e-08, + "loss": 0.9543, + "step": 10332 + }, + { + "epoch": 0.9318663480182171, + "grad_norm": 1.4834301802633607, + "learning_rate": 4.844086046306928e-08, + "loss": 1.0417, + "step": 10333 + }, + { + "epoch": 0.9319565315416873, + "grad_norm": 1.3666151756185412, + "learning_rate": 4.8313144085244896e-08, + "loss": 0.934, + "step": 10334 + }, + { + "epoch": 0.9320467150651576, + "grad_norm": 0.6768455503543627, + "learning_rate": 4.818559423610424e-08, + "loss": 0.8026, + "step": 10335 + }, + { + "epoch": 0.9321368985886278, + "grad_norm": 1.3294630154776816, + "learning_rate": 4.8058210926531284e-08, + "loss": 0.8832, + "step": 10336 + }, + { + "epoch": 0.9322270821120981, + "grad_norm": 1.377153877071187, + "learning_rate": 4.7930994167394435e-08, + "loss": 0.9689, + "step": 10337 + }, + { + "epoch": 0.9323172656355684, + "grad_norm": 1.9463764636163032, + "learning_rate": 4.7803943969548786e-08, + "loss": 0.9761, + "step": 10338 + }, + { + "epoch": 0.9324074491590386, + "grad_norm": 1.448132789271467, + "learning_rate": 4.7677060343834784e-08, + "loss": 0.8869, + "step": 10339 + }, + { + "epoch": 0.932497632682509, + "grad_norm": 1.6965521573819313, + "learning_rate": 4.75503433010791e-08, + "loss": 0.9268, + "step": 10340 + }, + { + "epoch": 0.9325878162059792, + "grad_norm": 1.6448506024822085, + "learning_rate": 4.742379285209419e-08, + "loss": 0.9135, + "step": 10341 + }, + { + "epoch": 0.9326779997294494, + "grad_norm": 1.4227957626219618, + "learning_rate": 4.72974090076772e-08, + "loss": 1.0341, + "step": 10342 + }, + { + "epoch": 0.9327681832529197, + "grad_norm": 1.1933913406111607, + "learning_rate": 4.717119177861262e-08, + "loss": 0.8895, + "step": 10343 + }, + { + "epoch": 0.93285836677639, + "grad_norm": 1.4712146614128254, + "learning_rate": 4.70451411756696e-08, + "loss": 0.9431, + "step": 10344 + }, + { + "epoch": 0.9329485502998602, + "grad_norm": 1.3292495006153546, + "learning_rate": 4.691925720960355e-08, + "loss": 0.9717, + "step": 10345 + }, + { + "epoch": 0.9330387338233305, + "grad_norm": 1.5750218465591745, + "learning_rate": 4.6793539891155645e-08, + "loss": 0.9576, + "step": 10346 + }, + { + "epoch": 0.9331289173468007, + "grad_norm": 1.4391940542389814, + "learning_rate": 4.6667989231052864e-08, + "loss": 0.9103, + "step": 10347 + }, + { + "epoch": 0.933219100870271, + "grad_norm": 1.6060101044219401, + "learning_rate": 4.654260524000797e-08, + "loss": 0.9417, + "step": 10348 + }, + { + "epoch": 0.9333092843937413, + "grad_norm": 1.2097424424548893, + "learning_rate": 4.6417387928719076e-08, + "loss": 0.9279, + "step": 10349 + }, + { + "epoch": 0.9333994679172115, + "grad_norm": 1.7705195279793722, + "learning_rate": 4.629233730787052e-08, + "loss": 0.8093, + "step": 10350 + }, + { + "epoch": 0.9334896514406817, + "grad_norm": 1.6049709615724657, + "learning_rate": 4.616745338813266e-08, + "loss": 0.8972, + "step": 10351 + }, + { + "epoch": 0.9335798349641521, + "grad_norm": 1.2542246476687353, + "learning_rate": 4.6042736180160744e-08, + "loss": 0.9527, + "step": 10352 + }, + { + "epoch": 0.9336700184876223, + "grad_norm": 1.5849107697938765, + "learning_rate": 4.591818569459671e-08, + "loss": 1.0518, + "step": 10353 + }, + { + "epoch": 0.9337602020110926, + "grad_norm": 1.598494032590035, + "learning_rate": 4.5793801942067614e-08, + "loss": 0.9579, + "step": 10354 + }, + { + "epoch": 0.9338503855345628, + "grad_norm": 1.6366624481058754, + "learning_rate": 4.566958493318673e-08, + "loss": 0.7975, + "step": 10355 + }, + { + "epoch": 0.9339405690580331, + "grad_norm": 0.6294611739455648, + "learning_rate": 4.554553467855316e-08, + "loss": 0.8267, + "step": 10356 + }, + { + "epoch": 0.9340307525815034, + "grad_norm": 1.6207119043884286, + "learning_rate": 4.5421651188751074e-08, + "loss": 0.9463, + "step": 10357 + }, + { + "epoch": 0.9341209361049736, + "grad_norm": 1.407912140938984, + "learning_rate": 4.529793447435137e-08, + "loss": 0.9496, + "step": 10358 + }, + { + "epoch": 0.9342111196284438, + "grad_norm": 1.330297690038705, + "learning_rate": 4.5174384545909824e-08, + "loss": 0.9364, + "step": 10359 + }, + { + "epoch": 0.9343013031519142, + "grad_norm": 1.6650688459579637, + "learning_rate": 4.505100141396867e-08, + "loss": 0.9755, + "step": 10360 + }, + { + "epoch": 0.9343914866753844, + "grad_norm": 1.6475602116095502, + "learning_rate": 4.492778508905548e-08, + "loss": 0.9609, + "step": 10361 + }, + { + "epoch": 0.9344816701988546, + "grad_norm": 1.4716962928569242, + "learning_rate": 4.480473558168385e-08, + "loss": 0.8837, + "step": 10362 + }, + { + "epoch": 0.934571853722325, + "grad_norm": 1.6189701740658382, + "learning_rate": 4.4681852902352936e-08, + "loss": 0.8439, + "step": 10363 + }, + { + "epoch": 0.9346620372457952, + "grad_norm": 1.5496394568702303, + "learning_rate": 4.455913706154812e-08, + "loss": 0.8463, + "step": 10364 + }, + { + "epoch": 0.9347522207692655, + "grad_norm": 1.5173907661401391, + "learning_rate": 4.443658806973949e-08, + "loss": 0.9617, + "step": 10365 + }, + { + "epoch": 0.9348424042927357, + "grad_norm": 0.6576458753917178, + "learning_rate": 4.431420593738444e-08, + "loss": 0.8065, + "step": 10366 + }, + { + "epoch": 0.934932587816206, + "grad_norm": 1.545180114691413, + "learning_rate": 4.419199067492485e-08, + "loss": 0.8405, + "step": 10367 + }, + { + "epoch": 0.9350227713396763, + "grad_norm": 1.599370731349554, + "learning_rate": 4.4069942292788596e-08, + "loss": 0.9415, + "step": 10368 + }, + { + "epoch": 0.9351129548631465, + "grad_norm": 1.5904177067851275, + "learning_rate": 4.39480608013898e-08, + "loss": 0.9267, + "step": 10369 + }, + { + "epoch": 0.9352031383866167, + "grad_norm": 1.2639859736304133, + "learning_rate": 4.3826346211128126e-08, + "loss": 1.0171, + "step": 10370 + }, + { + "epoch": 0.9352933219100871, + "grad_norm": 1.3316435935003306, + "learning_rate": 4.370479853238884e-08, + "loss": 0.9356, + "step": 10371 + }, + { + "epoch": 0.9353835054335573, + "grad_norm": 1.4648593749166676, + "learning_rate": 4.3583417775542756e-08, + "loss": 0.9039, + "step": 10372 + }, + { + "epoch": 0.9354736889570275, + "grad_norm": 1.2451556749566333, + "learning_rate": 4.3462203950947575e-08, + "loss": 0.8879, + "step": 10373 + }, + { + "epoch": 0.9355638724804978, + "grad_norm": 1.4883463687617786, + "learning_rate": 4.3341157068944814e-08, + "loss": 0.916, + "step": 10374 + }, + { + "epoch": 0.9356540560039681, + "grad_norm": 1.510055691784664, + "learning_rate": 4.322027713986376e-08, + "loss": 0.9449, + "step": 10375 + }, + { + "epoch": 0.9357442395274383, + "grad_norm": 1.6759934368824427, + "learning_rate": 4.309956417401816e-08, + "loss": 1.0061, + "step": 10376 + }, + { + "epoch": 0.9358344230509086, + "grad_norm": 1.3278441637105918, + "learning_rate": 4.297901818170801e-08, + "loss": 0.9002, + "step": 10377 + }, + { + "epoch": 0.9359246065743788, + "grad_norm": 1.2508827430391671, + "learning_rate": 4.285863917321886e-08, + "loss": 0.8635, + "step": 10378 + }, + { + "epoch": 0.9360147900978492, + "grad_norm": 1.4464557971444034, + "learning_rate": 4.2738427158822253e-08, + "loss": 0.8679, + "step": 10379 + }, + { + "epoch": 0.9361049736213194, + "grad_norm": 1.5900569838383238, + "learning_rate": 4.261838214877511e-08, + "loss": 0.9448, + "step": 10380 + }, + { + "epoch": 0.9361951571447896, + "grad_norm": 1.2577877160704882, + "learning_rate": 4.249850415332079e-08, + "loss": 0.9402, + "step": 10381 + }, + { + "epoch": 0.9362853406682599, + "grad_norm": 1.4732054639940233, + "learning_rate": 4.237879318268756e-08, + "loss": 0.9958, + "step": 10382 + }, + { + "epoch": 0.9363755241917302, + "grad_norm": 1.3073136630493774, + "learning_rate": 4.225924924708968e-08, + "loss": 0.8616, + "step": 10383 + }, + { + "epoch": 0.9364657077152004, + "grad_norm": 1.5640242961596413, + "learning_rate": 4.2139872356727665e-08, + "loss": 0.9462, + "step": 10384 + }, + { + "epoch": 0.9365558912386707, + "grad_norm": 1.564980407324075, + "learning_rate": 4.202066252178738e-08, + "loss": 0.9061, + "step": 10385 + }, + { + "epoch": 0.936646074762141, + "grad_norm": 1.3205876261256055, + "learning_rate": 4.1901619752440445e-08, + "loss": 1.0285, + "step": 10386 + }, + { + "epoch": 0.9367362582856112, + "grad_norm": 1.4858114419730462, + "learning_rate": 4.178274405884363e-08, + "loss": 0.9345, + "step": 10387 + }, + { + "epoch": 0.9368264418090815, + "grad_norm": 1.3896701406257184, + "learning_rate": 4.166403545114105e-08, + "loss": 0.943, + "step": 10388 + }, + { + "epoch": 0.9369166253325517, + "grad_norm": 1.729446419143135, + "learning_rate": 4.154549393946083e-08, + "loss": 0.9865, + "step": 10389 + }, + { + "epoch": 0.937006808856022, + "grad_norm": 1.3778044104992946, + "learning_rate": 4.14271195339182e-08, + "loss": 0.96, + "step": 10390 + }, + { + "epoch": 0.9370969923794923, + "grad_norm": 1.311846388695206, + "learning_rate": 4.1308912244613084e-08, + "loss": 0.8999, + "step": 10391 + }, + { + "epoch": 0.9371871759029625, + "grad_norm": 1.3176361134556858, + "learning_rate": 4.1190872081631636e-08, + "loss": 0.8884, + "step": 10392 + }, + { + "epoch": 0.9372773594264328, + "grad_norm": 1.6218396780177216, + "learning_rate": 4.107299905504558e-08, + "loss": 0.9937, + "step": 10393 + }, + { + "epoch": 0.9373675429499031, + "grad_norm": 1.3708741536580014, + "learning_rate": 4.095529317491286e-08, + "loss": 0.9302, + "step": 10394 + }, + { + "epoch": 0.9374577264733733, + "grad_norm": 1.7267264033751952, + "learning_rate": 4.0837754451276575e-08, + "loss": 0.9463, + "step": 10395 + }, + { + "epoch": 0.9375479099968436, + "grad_norm": 1.305429807090109, + "learning_rate": 4.072038289416557e-08, + "loss": 0.9241, + "step": 10396 + }, + { + "epoch": 0.9376380935203138, + "grad_norm": 1.3288698239635184, + "learning_rate": 4.0603178513595185e-08, + "loss": 0.9396, + "step": 10397 + }, + { + "epoch": 0.9377282770437841, + "grad_norm": 2.279818778281335, + "learning_rate": 4.0486141319565624e-08, + "loss": 0.9407, + "step": 10398 + }, + { + "epoch": 0.9378184605672544, + "grad_norm": 1.4787750621229347, + "learning_rate": 4.0369271322062916e-08, + "loss": 0.8805, + "step": 10399 + }, + { + "epoch": 0.9379086440907246, + "grad_norm": 1.4374435247854547, + "learning_rate": 4.0252568531059295e-08, + "loss": 0.9549, + "step": 10400 + }, + { + "epoch": 0.9379988276141948, + "grad_norm": 1.7122424997128403, + "learning_rate": 4.013603295651235e-08, + "loss": 0.9341, + "step": 10401 + }, + { + "epoch": 0.9380890111376652, + "grad_norm": 1.3964612412037092, + "learning_rate": 4.001966460836592e-08, + "loss": 0.9632, + "step": 10402 + }, + { + "epoch": 0.9381791946611354, + "grad_norm": 1.6749463257801382, + "learning_rate": 3.990346349654894e-08, + "loss": 1.0082, + "step": 10403 + }, + { + "epoch": 0.9382693781846057, + "grad_norm": 1.1728162926953833, + "learning_rate": 3.9787429630975924e-08, + "loss": 0.9101, + "step": 10404 + }, + { + "epoch": 0.9383595617080759, + "grad_norm": 1.33531593899024, + "learning_rate": 3.967156302154828e-08, + "loss": 0.9253, + "step": 10405 + }, + { + "epoch": 0.9384497452315462, + "grad_norm": 2.2699987878670167, + "learning_rate": 3.955586367815189e-08, + "loss": 0.8912, + "step": 10406 + }, + { + "epoch": 0.9385399287550165, + "grad_norm": 1.6214257325495072, + "learning_rate": 3.944033161065907e-08, + "loss": 1.0034, + "step": 10407 + }, + { + "epoch": 0.9386301122784867, + "grad_norm": 1.4420317659781166, + "learning_rate": 3.93249668289275e-08, + "loss": 0.8296, + "step": 10408 + }, + { + "epoch": 0.9387202958019569, + "grad_norm": 1.7430696178677902, + "learning_rate": 3.920976934280063e-08, + "loss": 0.8649, + "step": 10409 + }, + { + "epoch": 0.9388104793254273, + "grad_norm": 1.5883176635230747, + "learning_rate": 3.909473916210815e-08, + "loss": 1.0147, + "step": 10410 + }, + { + "epoch": 0.9389006628488975, + "grad_norm": 1.7770445184020265, + "learning_rate": 3.897987629666488e-08, + "loss": 0.9012, + "step": 10411 + }, + { + "epoch": 0.9389908463723677, + "grad_norm": 1.1720452248599993, + "learning_rate": 3.886518075627143e-08, + "loss": 1.0049, + "step": 10412 + }, + { + "epoch": 0.9390810298958381, + "grad_norm": 1.7480611960394512, + "learning_rate": 3.875065255071419e-08, + "loss": 0.9264, + "step": 10413 + }, + { + "epoch": 0.9391712134193083, + "grad_norm": 1.5715161924580856, + "learning_rate": 3.863629168976579e-08, + "loss": 0.9076, + "step": 10414 + }, + { + "epoch": 0.9392613969427785, + "grad_norm": 1.365612627056296, + "learning_rate": 3.852209818318375e-08, + "loss": 0.9845, + "step": 10415 + }, + { + "epoch": 0.9393515804662488, + "grad_norm": 1.5675516483248695, + "learning_rate": 3.840807204071161e-08, + "loss": 0.8382, + "step": 10416 + }, + { + "epoch": 0.9394417639897191, + "grad_norm": 1.5207501305939573, + "learning_rate": 3.829421327207894e-08, + "loss": 0.9179, + "step": 10417 + }, + { + "epoch": 0.9395319475131894, + "grad_norm": 1.4732090243951557, + "learning_rate": 3.8180521887000825e-08, + "loss": 0.8903, + "step": 10418 + }, + { + "epoch": 0.9396221310366596, + "grad_norm": 0.6519526148497526, + "learning_rate": 3.806699789517775e-08, + "loss": 0.8418, + "step": 10419 + }, + { + "epoch": 0.9397123145601298, + "grad_norm": 1.4327824347117415, + "learning_rate": 3.7953641306296635e-08, + "loss": 0.9197, + "step": 10420 + }, + { + "epoch": 0.9398024980836002, + "grad_norm": 1.6243120351097395, + "learning_rate": 3.784045213002951e-08, + "loss": 0.7858, + "step": 10421 + }, + { + "epoch": 0.9398926816070704, + "grad_norm": 1.461200629216896, + "learning_rate": 3.7727430376033986e-08, + "loss": 0.9548, + "step": 10422 + }, + { + "epoch": 0.9399828651305406, + "grad_norm": 1.7881436049591142, + "learning_rate": 3.7614576053954126e-08, + "loss": 0.9385, + "step": 10423 + }, + { + "epoch": 0.9400730486540109, + "grad_norm": 1.4521146974318802, + "learning_rate": 3.75018891734189e-08, + "loss": 0.9345, + "step": 10424 + }, + { + "epoch": 0.9401632321774812, + "grad_norm": 1.5996071392784283, + "learning_rate": 3.738936974404372e-08, + "loss": 0.9761, + "step": 10425 + }, + { + "epoch": 0.9402534157009514, + "grad_norm": 1.1478830672999092, + "learning_rate": 3.7277017775429354e-08, + "loss": 0.9975, + "step": 10426 + }, + { + "epoch": 0.9403435992244217, + "grad_norm": 1.2596360250832623, + "learning_rate": 3.7164833277162136e-08, + "loss": 0.9601, + "step": 10427 + }, + { + "epoch": 0.9404337827478919, + "grad_norm": 1.6550804183184193, + "learning_rate": 3.705281625881418e-08, + "loss": 0.9145, + "step": 10428 + }, + { + "epoch": 0.9405239662713623, + "grad_norm": 1.376463631273854, + "learning_rate": 3.694096672994362e-08, + "loss": 0.8467, + "step": 10429 + }, + { + "epoch": 0.9406141497948325, + "grad_norm": 1.4943376797651098, + "learning_rate": 3.682928470009394e-08, + "loss": 0.9892, + "step": 10430 + }, + { + "epoch": 0.9407043333183027, + "grad_norm": 1.4703798382021254, + "learning_rate": 3.6717770178794406e-08, + "loss": 0.8967, + "step": 10431 + }, + { + "epoch": 0.940794516841773, + "grad_norm": 1.3147760819210759, + "learning_rate": 3.6606423175560287e-08, + "loss": 1.0301, + "step": 10432 + }, + { + "epoch": 0.9408847003652433, + "grad_norm": 0.6521845411993009, + "learning_rate": 3.649524369989221e-08, + "loss": 0.7677, + "step": 10433 + }, + { + "epoch": 0.9409748838887135, + "grad_norm": 1.4068289942360204, + "learning_rate": 3.638423176127636e-08, + "loss": 0.9671, + "step": 10434 + }, + { + "epoch": 0.9410650674121838, + "grad_norm": 1.5383953836779434, + "learning_rate": 3.6273387369185396e-08, + "loss": 0.9721, + "step": 10435 + }, + { + "epoch": 0.9411552509356541, + "grad_norm": 1.4208740232277537, + "learning_rate": 3.616271053307685e-08, + "loss": 0.8845, + "step": 10436 + }, + { + "epoch": 0.9412454344591243, + "grad_norm": 1.5010907657736972, + "learning_rate": 3.6052201262394275e-08, + "loss": 0.9562, + "step": 10437 + }, + { + "epoch": 0.9413356179825946, + "grad_norm": 1.845052226911677, + "learning_rate": 3.5941859566566816e-08, + "loss": 0.9846, + "step": 10438 + }, + { + "epoch": 0.9414258015060648, + "grad_norm": 0.7086992347203953, + "learning_rate": 3.583168545500981e-08, + "loss": 0.8144, + "step": 10439 + }, + { + "epoch": 0.9415159850295352, + "grad_norm": 1.3847772896387018, + "learning_rate": 3.5721678937123746e-08, + "loss": 0.8832, + "step": 10440 + }, + { + "epoch": 0.9416061685530054, + "grad_norm": 1.348995704084135, + "learning_rate": 3.561184002229467e-08, + "loss": 1.0154, + "step": 10441 + }, + { + "epoch": 0.9416963520764756, + "grad_norm": 1.3238704076944308, + "learning_rate": 3.550216871989531e-08, + "loss": 0.8805, + "step": 10442 + }, + { + "epoch": 0.9417865355999459, + "grad_norm": 0.6314281108482483, + "learning_rate": 3.539266503928262e-08, + "loss": 0.8608, + "step": 10443 + }, + { + "epoch": 0.9418767191234162, + "grad_norm": 1.3230377602411065, + "learning_rate": 3.528332898980091e-08, + "loss": 0.9475, + "step": 10444 + }, + { + "epoch": 0.9419669026468864, + "grad_norm": 1.3318764157513703, + "learning_rate": 3.517416058077849e-08, + "loss": 0.9026, + "step": 10445 + }, + { + "epoch": 0.9420570861703567, + "grad_norm": 1.42955216564572, + "learning_rate": 3.506515982153102e-08, + "loss": 0.8439, + "step": 10446 + }, + { + "epoch": 0.9421472696938269, + "grad_norm": 1.3595605920839529, + "learning_rate": 3.495632672135862e-08, + "loss": 0.942, + "step": 10447 + }, + { + "epoch": 0.9422374532172972, + "grad_norm": 1.2060966453512552, + "learning_rate": 3.4847661289547417e-08, + "loss": 0.964, + "step": 10448 + }, + { + "epoch": 0.9423276367407675, + "grad_norm": 1.5098171052631557, + "learning_rate": 3.473916353536932e-08, + "loss": 0.889, + "step": 10449 + }, + { + "epoch": 0.9424178202642377, + "grad_norm": 1.4167407895312676, + "learning_rate": 3.463083346808249e-08, + "loss": 0.9099, + "step": 10450 + }, + { + "epoch": 0.9425080037877079, + "grad_norm": 2.348134326972135, + "learning_rate": 3.452267109692975e-08, + "loss": 0.8319, + "step": 10451 + }, + { + "epoch": 0.9425981873111783, + "grad_norm": 1.3701632132382093, + "learning_rate": 3.441467643114016e-08, + "loss": 0.9298, + "step": 10452 + }, + { + "epoch": 0.9426883708346485, + "grad_norm": 0.5957703599352218, + "learning_rate": 3.430684947992857e-08, + "loss": 0.7766, + "step": 10453 + }, + { + "epoch": 0.9427785543581187, + "grad_norm": 1.6999475779585107, + "learning_rate": 3.419919025249518e-08, + "loss": 0.9508, + "step": 10454 + }, + { + "epoch": 0.942868737881589, + "grad_norm": 1.431752532290655, + "learning_rate": 3.40916987580262e-08, + "loss": 0.9547, + "step": 10455 + }, + { + "epoch": 0.9429589214050593, + "grad_norm": 1.4367394301153629, + "learning_rate": 3.398437500569362e-08, + "loss": 0.9149, + "step": 10456 + }, + { + "epoch": 0.9430491049285296, + "grad_norm": 1.4105210189118684, + "learning_rate": 3.3877219004654347e-08, + "loss": 0.8218, + "step": 10457 + }, + { + "epoch": 0.9431392884519998, + "grad_norm": 1.5574498005916322, + "learning_rate": 3.3770230764051946e-08, + "loss": 0.973, + "step": 10458 + }, + { + "epoch": 0.9432294719754701, + "grad_norm": 1.541544582928634, + "learning_rate": 3.366341029301534e-08, + "loss": 0.9488, + "step": 10459 + }, + { + "epoch": 0.9433196554989404, + "grad_norm": 1.6592937787252175, + "learning_rate": 3.355675760065857e-08, + "loss": 0.9398, + "step": 10460 + }, + { + "epoch": 0.9434098390224106, + "grad_norm": 0.7316006545163497, + "learning_rate": 3.345027269608236e-08, + "loss": 0.7448, + "step": 10461 + }, + { + "epoch": 0.9435000225458808, + "grad_norm": 0.9612085843930424, + "learning_rate": 3.334395558837211e-08, + "loss": 0.8107, + "step": 10462 + }, + { + "epoch": 0.9435902060693512, + "grad_norm": 1.2332960314307686, + "learning_rate": 3.3237806286599667e-08, + "loss": 0.8457, + "step": 10463 + }, + { + "epoch": 0.9436803895928214, + "grad_norm": 1.368373509017426, + "learning_rate": 3.313182479982224e-08, + "loss": 0.9272, + "step": 10464 + }, + { + "epoch": 0.9437705731162916, + "grad_norm": 1.4318620164499867, + "learning_rate": 3.302601113708259e-08, + "loss": 1.0, + "step": 10465 + }, + { + "epoch": 0.9438607566397619, + "grad_norm": 1.1823366747564164, + "learning_rate": 3.292036530740972e-08, + "loss": 0.984, + "step": 10466 + }, + { + "epoch": 0.9439509401632322, + "grad_norm": 1.2959828352880554, + "learning_rate": 3.2814887319817294e-08, + "loss": 0.932, + "step": 10467 + }, + { + "epoch": 0.9440411236867025, + "grad_norm": 1.466563199829741, + "learning_rate": 3.270957718330591e-08, + "loss": 0.9811, + "step": 10468 + }, + { + "epoch": 0.9441313072101727, + "grad_norm": 1.207679993934879, + "learning_rate": 3.260443490686082e-08, + "loss": 0.9779, + "step": 10469 + }, + { + "epoch": 0.9442214907336429, + "grad_norm": 1.5910296691679282, + "learning_rate": 3.249946049945351e-08, + "loss": 0.9467, + "step": 10470 + }, + { + "epoch": 0.9443116742571133, + "grad_norm": 1.622159455919885, + "learning_rate": 3.239465397004082e-08, + "loss": 0.9462, + "step": 10471 + }, + { + "epoch": 0.9444018577805835, + "grad_norm": 1.4706257955578987, + "learning_rate": 3.229001532756559e-08, + "loss": 0.8959, + "step": 10472 + }, + { + "epoch": 0.9444920413040537, + "grad_norm": 1.8796551455077966, + "learning_rate": 3.218554458095602e-08, + "loss": 0.9308, + "step": 10473 + }, + { + "epoch": 0.944582224827524, + "grad_norm": 1.4743762912073501, + "learning_rate": 3.20812417391263e-08, + "loss": 0.9457, + "step": 10474 + }, + { + "epoch": 0.9446724083509943, + "grad_norm": 1.1471918837656565, + "learning_rate": 3.1977106810975764e-08, + "loss": 0.9292, + "step": 10475 + }, + { + "epoch": 0.9447625918744645, + "grad_norm": 0.8301608414669281, + "learning_rate": 3.187313980539042e-08, + "loss": 0.8698, + "step": 10476 + }, + { + "epoch": 0.9448527753979348, + "grad_norm": 1.2325685054164064, + "learning_rate": 3.176934073124071e-08, + "loss": 1.023, + "step": 10477 + }, + { + "epoch": 0.944942958921405, + "grad_norm": 1.5198163154944144, + "learning_rate": 3.166570959738357e-08, + "loss": 0.9321, + "step": 10478 + }, + { + "epoch": 0.9450331424448754, + "grad_norm": 1.4681966023139628, + "learning_rate": 3.1562246412661476e-08, + "loss": 0.8763, + "step": 10479 + }, + { + "epoch": 0.9451233259683456, + "grad_norm": 1.3793137248178702, + "learning_rate": 3.145895118590225e-08, + "loss": 0.9713, + "step": 10480 + }, + { + "epoch": 0.9452135094918158, + "grad_norm": 1.5134034540228978, + "learning_rate": 3.135582392591996e-08, + "loss": 0.8506, + "step": 10481 + }, + { + "epoch": 0.9453036930152862, + "grad_norm": 0.5967843421272248, + "learning_rate": 3.125286464151333e-08, + "loss": 0.7599, + "step": 10482 + }, + { + "epoch": 0.9453938765387564, + "grad_norm": 1.65423598284387, + "learning_rate": 3.115007334146824e-08, + "loss": 0.909, + "step": 10483 + }, + { + "epoch": 0.9454840600622266, + "grad_norm": 0.6558588769762013, + "learning_rate": 3.104745003455478e-08, + "loss": 0.8045, + "step": 10484 + }, + { + "epoch": 0.9455742435856969, + "grad_norm": 1.3830052473143841, + "learning_rate": 3.094499472952972e-08, + "loss": 0.9538, + "step": 10485 + }, + { + "epoch": 0.9456644271091672, + "grad_norm": 1.9823779773080035, + "learning_rate": 3.084270743513495e-08, + "loss": 0.9034, + "step": 10486 + }, + { + "epoch": 0.9457546106326374, + "grad_norm": 1.5623211567570177, + "learning_rate": 3.074058816009817e-08, + "loss": 0.8292, + "step": 10487 + }, + { + "epoch": 0.9458447941561077, + "grad_norm": 1.501894946019456, + "learning_rate": 3.063863691313284e-08, + "loss": 0.8903, + "step": 10488 + }, + { + "epoch": 0.9459349776795779, + "grad_norm": 1.4656167394083988, + "learning_rate": 3.0536853702937794e-08, + "loss": 0.8799, + "step": 10489 + }, + { + "epoch": 0.9460251612030482, + "grad_norm": 1.7157198464557717, + "learning_rate": 3.043523853819807e-08, + "loss": 0.94, + "step": 10490 + }, + { + "epoch": 0.9461153447265185, + "grad_norm": 1.8711696919935807, + "learning_rate": 3.0333791427583855e-08, + "loss": 0.951, + "step": 10491 + }, + { + "epoch": 0.9462055282499887, + "grad_norm": 1.337677475037118, + "learning_rate": 3.023251237975111e-08, + "loss": 1.0575, + "step": 10492 + }, + { + "epoch": 0.946295711773459, + "grad_norm": 0.6711053210005902, + "learning_rate": 3.0131401403341584e-08, + "loss": 0.8219, + "step": 10493 + }, + { + "epoch": 0.9463858952969293, + "grad_norm": 1.2455165566623039, + "learning_rate": 3.00304585069826e-08, + "loss": 0.9185, + "step": 10494 + }, + { + "epoch": 0.9464760788203995, + "grad_norm": 1.926827291714054, + "learning_rate": 2.992968369928728e-08, + "loss": 0.9292, + "step": 10495 + }, + { + "epoch": 0.9465662623438698, + "grad_norm": 1.420988875719375, + "learning_rate": 2.982907698885429e-08, + "loss": 0.9506, + "step": 10496 + }, + { + "epoch": 0.94665644586734, + "grad_norm": 1.945348716306796, + "learning_rate": 2.9728638384267645e-08, + "loss": 0.9591, + "step": 10497 + }, + { + "epoch": 0.9467466293908103, + "grad_norm": 1.5543727843756199, + "learning_rate": 2.962836789409784e-08, + "loss": 0.9424, + "step": 10498 + }, + { + "epoch": 0.9468368129142806, + "grad_norm": 1.381078851030149, + "learning_rate": 2.95282655268998e-08, + "loss": 1.0311, + "step": 10499 + }, + { + "epoch": 0.9469269964377508, + "grad_norm": 2.2087173367963464, + "learning_rate": 2.942833129121558e-08, + "loss": 0.9347, + "step": 10500 + }, + { + "epoch": 0.947017179961221, + "grad_norm": 1.366240604015824, + "learning_rate": 2.9328565195571475e-08, + "loss": 0.8635, + "step": 10501 + }, + { + "epoch": 0.9471073634846914, + "grad_norm": 1.487002647023003, + "learning_rate": 2.9228967248480675e-08, + "loss": 1.0056, + "step": 10502 + }, + { + "epoch": 0.9471975470081616, + "grad_norm": 1.6503758175879744, + "learning_rate": 2.912953745844082e-08, + "loss": 0.9798, + "step": 10503 + }, + { + "epoch": 0.9472877305316318, + "grad_norm": 1.3728130027445309, + "learning_rate": 2.9030275833936247e-08, + "loss": 0.9271, + "step": 10504 + }, + { + "epoch": 0.9473779140551022, + "grad_norm": 1.5930283352426555, + "learning_rate": 2.893118238343617e-08, + "loss": 0.8441, + "step": 10505 + }, + { + "epoch": 0.9474680975785724, + "grad_norm": 1.5902503990840906, + "learning_rate": 2.8832257115396052e-08, + "loss": 0.8515, + "step": 10506 + }, + { + "epoch": 0.9475582811020427, + "grad_norm": 1.4729797655516967, + "learning_rate": 2.873350003825692e-08, + "loss": 0.9206, + "step": 10507 + }, + { + "epoch": 0.9476484646255129, + "grad_norm": 0.5981721832734556, + "learning_rate": 2.8634911160444696e-08, + "loss": 0.6991, + "step": 10508 + }, + { + "epoch": 0.9477386481489832, + "grad_norm": 1.344225865759501, + "learning_rate": 2.853649049037199e-08, + "loss": 0.999, + "step": 10509 + }, + { + "epoch": 0.9478288316724535, + "grad_norm": 1.539871235703293, + "learning_rate": 2.8438238036436525e-08, + "loss": 0.9917, + "step": 10510 + }, + { + "epoch": 0.9479190151959237, + "grad_norm": 2.04628518029453, + "learning_rate": 2.834015380702137e-08, + "loss": 0.9363, + "step": 10511 + }, + { + "epoch": 0.9480091987193939, + "grad_norm": 0.6150428110434254, + "learning_rate": 2.824223781049606e-08, + "loss": 0.7385, + "step": 10512 + }, + { + "epoch": 0.9480993822428643, + "grad_norm": 2.032429162136477, + "learning_rate": 2.8144490055215465e-08, + "loss": 1.0, + "step": 10513 + }, + { + "epoch": 0.9481895657663345, + "grad_norm": 1.4200736572074462, + "learning_rate": 2.8046910549519355e-08, + "loss": 0.9733, + "step": 10514 + }, + { + "epoch": 0.9482797492898047, + "grad_norm": 1.6150434351179244, + "learning_rate": 2.794949930173418e-08, + "loss": 0.9441, + "step": 10515 + }, + { + "epoch": 0.948369932813275, + "grad_norm": 1.451164563906339, + "learning_rate": 2.7852256320171296e-08, + "loss": 0.9707, + "step": 10516 + }, + { + "epoch": 0.9484601163367453, + "grad_norm": 1.6049891588552008, + "learning_rate": 2.775518161312851e-08, + "loss": 0.7917, + "step": 10517 + }, + { + "epoch": 0.9485502998602156, + "grad_norm": 1.2390451571969365, + "learning_rate": 2.76582751888883e-08, + "loss": 0.9668, + "step": 10518 + }, + { + "epoch": 0.9486404833836858, + "grad_norm": 1.634039969596264, + "learning_rate": 2.756153705571962e-08, + "loss": 0.9529, + "step": 10519 + }, + { + "epoch": 0.948730666907156, + "grad_norm": 1.23590230011518, + "learning_rate": 2.74649672218763e-08, + "loss": 0.9225, + "step": 10520 + }, + { + "epoch": 0.9488208504306264, + "grad_norm": 1.40184543437283, + "learning_rate": 2.7368565695598424e-08, + "loss": 0.9288, + "step": 10521 + }, + { + "epoch": 0.9489110339540966, + "grad_norm": 1.3508379543177556, + "learning_rate": 2.727233248511185e-08, + "loss": 0.8666, + "step": 10522 + }, + { + "epoch": 0.9490012174775668, + "grad_norm": 1.6618425489042676, + "learning_rate": 2.71762675986269e-08, + "loss": 0.8899, + "step": 10523 + }, + { + "epoch": 0.9490914010010371, + "grad_norm": 1.2869458015014452, + "learning_rate": 2.7080371044341242e-08, + "loss": 0.9763, + "step": 10524 + }, + { + "epoch": 0.9491815845245074, + "grad_norm": 1.307740437152708, + "learning_rate": 2.6984642830436556e-08, + "loss": 0.9608, + "step": 10525 + }, + { + "epoch": 0.9492717680479776, + "grad_norm": 1.457990560665893, + "learning_rate": 2.688908296508141e-08, + "loss": 0.9194, + "step": 10526 + }, + { + "epoch": 0.9493619515714479, + "grad_norm": 1.6459757425367882, + "learning_rate": 2.679369145642929e-08, + "loss": 0.9884, + "step": 10527 + }, + { + "epoch": 0.9494521350949181, + "grad_norm": 1.4816833988756788, + "learning_rate": 2.669846831261946e-08, + "loss": 0.9591, + "step": 10528 + }, + { + "epoch": 0.9495423186183884, + "grad_norm": 1.9106532037442137, + "learning_rate": 2.6603413541776976e-08, + "loss": 0.9279, + "step": 10529 + }, + { + "epoch": 0.9496325021418587, + "grad_norm": 1.286497952035222, + "learning_rate": 2.6508527152012683e-08, + "loss": 0.8938, + "step": 10530 + }, + { + "epoch": 0.9497226856653289, + "grad_norm": 1.3001214264133907, + "learning_rate": 2.641380915142233e-08, + "loss": 0.8405, + "step": 10531 + }, + { + "epoch": 0.9498128691887993, + "grad_norm": 1.5060093034454365, + "learning_rate": 2.6319259548088334e-08, + "loss": 0.9547, + "step": 10532 + }, + { + "epoch": 0.9499030527122695, + "grad_norm": 1.7403259595753606, + "learning_rate": 2.6224878350077585e-08, + "loss": 0.9371, + "step": 10533 + }, + { + "epoch": 0.9499932362357397, + "grad_norm": 1.422804224906399, + "learning_rate": 2.6130665565443633e-08, + "loss": 0.8518, + "step": 10534 + }, + { + "epoch": 0.95008341975921, + "grad_norm": 1.221027398335501, + "learning_rate": 2.603662120222494e-08, + "loss": 0.9401, + "step": 10535 + }, + { + "epoch": 0.9501736032826803, + "grad_norm": 1.5757891297793503, + "learning_rate": 2.59427452684462e-08, + "loss": 0.7829, + "step": 10536 + }, + { + "epoch": 0.9502637868061505, + "grad_norm": 1.3060576958990326, + "learning_rate": 2.5849037772117443e-08, + "loss": 0.9146, + "step": 10537 + }, + { + "epoch": 0.9503539703296208, + "grad_norm": 1.4509326434706304, + "learning_rate": 2.575549872123384e-08, + "loss": 0.9644, + "step": 10538 + }, + { + "epoch": 0.950444153853091, + "grad_norm": 1.3303923606782833, + "learning_rate": 2.5662128123776994e-08, + "loss": 0.9375, + "step": 10539 + }, + { + "epoch": 0.9505343373765613, + "grad_norm": 1.4231680555368496, + "learning_rate": 2.5568925987713875e-08, + "loss": 0.9276, + "step": 10540 + }, + { + "epoch": 0.9506245209000316, + "grad_norm": 1.6574472382722323, + "learning_rate": 2.5475892320996785e-08, + "loss": 0.9135, + "step": 10541 + }, + { + "epoch": 0.9507147044235018, + "grad_norm": 0.7542978959395198, + "learning_rate": 2.5383027131564038e-08, + "loss": 0.8135, + "step": 10542 + }, + { + "epoch": 0.950804887946972, + "grad_norm": 1.664294231201215, + "learning_rate": 2.52903304273393e-08, + "loss": 0.9178, + "step": 10543 + }, + { + "epoch": 0.9508950714704424, + "grad_norm": 1.2451607012157555, + "learning_rate": 2.519780221623202e-08, + "loss": 0.9214, + "step": 10544 + }, + { + "epoch": 0.9509852549939126, + "grad_norm": 1.663281897907122, + "learning_rate": 2.510544250613722e-08, + "loss": 0.8923, + "step": 10545 + }, + { + "epoch": 0.9510754385173829, + "grad_norm": 1.4684947989309682, + "learning_rate": 2.501325130493548e-08, + "loss": 1.029, + "step": 10546 + }, + { + "epoch": 0.9511656220408531, + "grad_norm": 0.7311373827689976, + "learning_rate": 2.4921228620493395e-08, + "loss": 0.811, + "step": 10547 + }, + { + "epoch": 0.9512558055643234, + "grad_norm": 0.7336942073789368, + "learning_rate": 2.4829374460662244e-08, + "loss": 0.839, + "step": 10548 + }, + { + "epoch": 0.9513459890877937, + "grad_norm": 1.2778607643287527, + "learning_rate": 2.473768883327976e-08, + "loss": 0.9495, + "step": 10549 + }, + { + "epoch": 0.9514361726112639, + "grad_norm": 1.4539351307226074, + "learning_rate": 2.464617174616923e-08, + "loss": 0.904, + "step": 10550 + }, + { + "epoch": 0.9515263561347341, + "grad_norm": 1.5173097665316415, + "learning_rate": 2.455482320713953e-08, + "loss": 0.9338, + "step": 10551 + }, + { + "epoch": 0.9516165396582045, + "grad_norm": 1.3965581701027616, + "learning_rate": 2.4463643223984643e-08, + "loss": 0.993, + "step": 10552 + }, + { + "epoch": 0.9517067231816747, + "grad_norm": 1.7681417167361462, + "learning_rate": 2.4372631804484567e-08, + "loss": 1.0115, + "step": 10553 + }, + { + "epoch": 0.9517969067051449, + "grad_norm": 1.516378864117687, + "learning_rate": 2.4281788956405313e-08, + "loss": 1.0152, + "step": 10554 + }, + { + "epoch": 0.9518870902286153, + "grad_norm": 1.5934791334765175, + "learning_rate": 2.4191114687497572e-08, + "loss": 0.8736, + "step": 10555 + }, + { + "epoch": 0.9519772737520855, + "grad_norm": 1.7880605365609454, + "learning_rate": 2.4100609005498706e-08, + "loss": 0.9034, + "step": 10556 + }, + { + "epoch": 0.9520674572755558, + "grad_norm": 1.5164085800884097, + "learning_rate": 2.4010271918130764e-08, + "loss": 0.87, + "step": 10557 + }, + { + "epoch": 0.952157640799026, + "grad_norm": 1.3973289656686105, + "learning_rate": 2.39201034331018e-08, + "loss": 0.9006, + "step": 10558 + }, + { + "epoch": 0.9522478243224963, + "grad_norm": 1.5528003990483348, + "learning_rate": 2.3830103558105663e-08, + "loss": 0.9267, + "step": 10559 + }, + { + "epoch": 0.9523380078459666, + "grad_norm": 1.8935400188599902, + "learning_rate": 2.374027230082154e-08, + "loss": 0.9448, + "step": 10560 + }, + { + "epoch": 0.9524281913694368, + "grad_norm": 0.7286487692900363, + "learning_rate": 2.365060966891441e-08, + "loss": 0.828, + "step": 10561 + }, + { + "epoch": 0.952518374892907, + "grad_norm": 1.4629388656098103, + "learning_rate": 2.3561115670034827e-08, + "loss": 0.9307, + "step": 10562 + }, + { + "epoch": 0.9526085584163774, + "grad_norm": 1.4802458502220532, + "learning_rate": 2.3471790311818675e-08, + "loss": 0.9578, + "step": 10563 + }, + { + "epoch": 0.9526987419398476, + "grad_norm": 1.3709349798453692, + "learning_rate": 2.338263360188808e-08, + "loss": 0.929, + "step": 10564 + }, + { + "epoch": 0.9527889254633178, + "grad_norm": 2.2240431067385145, + "learning_rate": 2.329364554784985e-08, + "loss": 0.8819, + "step": 10565 + }, + { + "epoch": 0.9528791089867881, + "grad_norm": 1.416938176036009, + "learning_rate": 2.3204826157297465e-08, + "loss": 1.0038, + "step": 10566 + }, + { + "epoch": 0.9529692925102584, + "grad_norm": 1.4400024194167262, + "learning_rate": 2.3116175437809082e-08, + "loss": 0.8719, + "step": 10567 + }, + { + "epoch": 0.9530594760337286, + "grad_norm": 0.7209539209685826, + "learning_rate": 2.30276933969491e-08, + "loss": 0.8341, + "step": 10568 + }, + { + "epoch": 0.9531496595571989, + "grad_norm": 1.3110016489986056, + "learning_rate": 2.2939380042267255e-08, + "loss": 0.9913, + "step": 10569 + }, + { + "epoch": 0.9532398430806691, + "grad_norm": 1.4460427578715553, + "learning_rate": 2.2851235381298627e-08, + "loss": 0.8442, + "step": 10570 + }, + { + "epoch": 0.9533300266041395, + "grad_norm": 1.4516967809877013, + "learning_rate": 2.2763259421564986e-08, + "loss": 0.8627, + "step": 10571 + }, + { + "epoch": 0.9534202101276097, + "grad_norm": 1.4147083641082727, + "learning_rate": 2.2675452170571873e-08, + "loss": 0.9105, + "step": 10572 + }, + { + "epoch": 0.9535103936510799, + "grad_norm": 1.3836405370847797, + "learning_rate": 2.2587813635812414e-08, + "loss": 0.932, + "step": 10573 + }, + { + "epoch": 0.9536005771745502, + "grad_norm": 1.4260926625078456, + "learning_rate": 2.2500343824763958e-08, + "loss": 1.0131, + "step": 10574 + }, + { + "epoch": 0.9536907606980205, + "grad_norm": 1.6332924607411383, + "learning_rate": 2.2413042744890088e-08, + "loss": 0.8791, + "step": 10575 + }, + { + "epoch": 0.9537809442214907, + "grad_norm": 1.3468424233305007, + "learning_rate": 2.2325910403639514e-08, + "loss": 0.9291, + "step": 10576 + }, + { + "epoch": 0.953871127744961, + "grad_norm": 1.3884896160117954, + "learning_rate": 2.223894680844718e-08, + "loss": 0.8881, + "step": 10577 + }, + { + "epoch": 0.9539613112684313, + "grad_norm": 1.26806640625, + "learning_rate": 2.2152151966733146e-08, + "loss": 0.7733, + "step": 10578 + }, + { + "epoch": 0.9540514947919015, + "grad_norm": 1.1938950854906354, + "learning_rate": 2.2065525885903267e-08, + "loss": 0.9645, + "step": 10579 + }, + { + "epoch": 0.9541416783153718, + "grad_norm": 1.376179795860951, + "learning_rate": 2.1979068573348747e-08, + "loss": 0.9841, + "step": 10580 + }, + { + "epoch": 0.954231861838842, + "grad_norm": 1.3568955533137173, + "learning_rate": 2.1892780036447013e-08, + "loss": 0.897, + "step": 10581 + }, + { + "epoch": 0.9543220453623124, + "grad_norm": 1.84588719010269, + "learning_rate": 2.1806660282560175e-08, + "loss": 1.0789, + "step": 10582 + }, + { + "epoch": 0.9544122288857826, + "grad_norm": 1.2915545794347738, + "learning_rate": 2.1720709319037024e-08, + "loss": 0.9575, + "step": 10583 + }, + { + "epoch": 0.9545024124092528, + "grad_norm": 1.452355622093757, + "learning_rate": 2.1634927153211023e-08, + "loss": 0.9027, + "step": 10584 + }, + { + "epoch": 0.954592595932723, + "grad_norm": 1.6333749340883823, + "learning_rate": 2.1549313792401437e-08, + "loss": 1.0071, + "step": 10585 + }, + { + "epoch": 0.9546827794561934, + "grad_norm": 1.4110796327701054, + "learning_rate": 2.1463869243913746e-08, + "loss": 1.0256, + "step": 10586 + }, + { + "epoch": 0.9547729629796636, + "grad_norm": 1.6767662727973442, + "learning_rate": 2.1378593515037902e-08, + "loss": 0.9206, + "step": 10587 + }, + { + "epoch": 0.9548631465031339, + "grad_norm": 1.3989664281562135, + "learning_rate": 2.129348661305075e-08, + "loss": 0.9999, + "step": 10588 + }, + { + "epoch": 0.9549533300266041, + "grad_norm": 1.4970518386009537, + "learning_rate": 2.1208548545213813e-08, + "loss": 0.9635, + "step": 10589 + }, + { + "epoch": 0.9550435135500744, + "grad_norm": 1.4843782123731688, + "learning_rate": 2.1123779318774404e-08, + "loss": 0.8951, + "step": 10590 + }, + { + "epoch": 0.9551336970735447, + "grad_norm": 1.2963820750122808, + "learning_rate": 2.1039178940965408e-08, + "loss": 0.8788, + "step": 10591 + }, + { + "epoch": 0.9552238805970149, + "grad_norm": 1.2534228667701095, + "learning_rate": 2.0954747419005712e-08, + "loss": 0.9972, + "step": 10592 + }, + { + "epoch": 0.9553140641204851, + "grad_norm": 1.4429168642275076, + "learning_rate": 2.087048476009934e-08, + "loss": 0.9184, + "step": 10593 + }, + { + "epoch": 0.9554042476439555, + "grad_norm": 1.643292755815009, + "learning_rate": 2.0786390971435862e-08, + "loss": 0.9061, + "step": 10594 + }, + { + "epoch": 0.9554944311674257, + "grad_norm": 1.600139599908951, + "learning_rate": 2.070246606019088e-08, + "loss": 0.8953, + "step": 10595 + }, + { + "epoch": 0.955584614690896, + "grad_norm": 2.480519307740171, + "learning_rate": 2.0618710033525112e-08, + "loss": 0.8538, + "step": 10596 + }, + { + "epoch": 0.9556747982143662, + "grad_norm": 1.459098188462596, + "learning_rate": 2.053512289858528e-08, + "loss": 0.899, + "step": 10597 + }, + { + "epoch": 0.9557649817378365, + "grad_norm": 1.7430617529500119, + "learning_rate": 2.0451704662503456e-08, + "loss": 0.9747, + "step": 10598 + }, + { + "epoch": 0.9558551652613068, + "grad_norm": 1.6568023192567678, + "learning_rate": 2.0368455332397282e-08, + "loss": 1.0126, + "step": 10599 + }, + { + "epoch": 0.955945348784777, + "grad_norm": 1.338220575118539, + "learning_rate": 2.0285374915369967e-08, + "loss": 0.9508, + "step": 10600 + }, + { + "epoch": 0.9560355323082473, + "grad_norm": 1.542525061059094, + "learning_rate": 2.020246341851073e-08, + "loss": 0.9751, + "step": 10601 + }, + { + "epoch": 0.9561257158317176, + "grad_norm": 1.4814630958731994, + "learning_rate": 2.0119720848893463e-08, + "loss": 0.981, + "step": 10602 + }, + { + "epoch": 0.9562158993551878, + "grad_norm": 1.3945766836934264, + "learning_rate": 2.0037147213578964e-08, + "loss": 0.8898, + "step": 10603 + }, + { + "epoch": 0.956306082878658, + "grad_norm": 1.3152743490662957, + "learning_rate": 1.9954742519612265e-08, + "loss": 0.9647, + "step": 10604 + }, + { + "epoch": 0.9563962664021284, + "grad_norm": 1.4672613513303996, + "learning_rate": 1.9872506774024633e-08, + "loss": 0.9061, + "step": 10605 + }, + { + "epoch": 0.9564864499255986, + "grad_norm": 1.4730104379691988, + "learning_rate": 1.979043998383334e-08, + "loss": 0.9459, + "step": 10606 + }, + { + "epoch": 0.9565766334490688, + "grad_norm": 1.1859591174260062, + "learning_rate": 1.970854215604034e-08, + "loss": 0.8861, + "step": 10607 + }, + { + "epoch": 0.9566668169725391, + "grad_norm": 1.48058316636637, + "learning_rate": 1.9626813297633826e-08, + "loss": 0.9145, + "step": 10608 + }, + { + "epoch": 0.9567570004960094, + "grad_norm": 1.6300260870548575, + "learning_rate": 1.954525341558688e-08, + "loss": 0.8333, + "step": 10609 + }, + { + "epoch": 0.9568471840194797, + "grad_norm": 1.6080660635818884, + "learning_rate": 1.9463862516859498e-08, + "loss": 0.8967, + "step": 10610 + }, + { + "epoch": 0.9569373675429499, + "grad_norm": 1.500817632358287, + "learning_rate": 1.938264060839545e-08, + "loss": 1.0174, + "step": 10611 + }, + { + "epoch": 0.9570275510664201, + "grad_norm": 1.5407713876626594, + "learning_rate": 1.9301587697126086e-08, + "loss": 0.9112, + "step": 10612 + }, + { + "epoch": 0.9571177345898905, + "grad_norm": 1.615114588141584, + "learning_rate": 1.9220703789966318e-08, + "loss": 0.8925, + "step": 10613 + }, + { + "epoch": 0.9572079181133607, + "grad_norm": 1.504711777599162, + "learning_rate": 1.913998889381818e-08, + "loss": 0.8972, + "step": 10614 + }, + { + "epoch": 0.9572981016368309, + "grad_norm": 1.4350323436162447, + "learning_rate": 1.9059443015568387e-08, + "loss": 0.9422, + "step": 10615 + }, + { + "epoch": 0.9573882851603012, + "grad_norm": 1.3619200023202653, + "learning_rate": 1.8979066162089884e-08, + "loss": 0.9427, + "step": 10616 + }, + { + "epoch": 0.9574784686837715, + "grad_norm": 1.5382554315092631, + "learning_rate": 1.889885834024052e-08, + "loss": 0.9364, + "step": 10617 + }, + { + "epoch": 0.9575686522072417, + "grad_norm": 1.3825916178527164, + "learning_rate": 1.8818819556864374e-08, + "loss": 0.8947, + "step": 10618 + }, + { + "epoch": 0.957658835730712, + "grad_norm": 1.4666234306262678, + "learning_rate": 1.873894981879065e-08, + "loss": 0.923, + "step": 10619 + }, + { + "epoch": 0.9577490192541822, + "grad_norm": 0.6632252969494623, + "learning_rate": 1.8659249132834342e-08, + "loss": 0.7973, + "step": 10620 + }, + { + "epoch": 0.9578392027776526, + "grad_norm": 1.4015036699244954, + "learning_rate": 1.857971750579579e-08, + "loss": 0.9173, + "step": 10621 + }, + { + "epoch": 0.9579293863011228, + "grad_norm": 1.325887208479775, + "learning_rate": 1.8500354944461116e-08, + "loss": 0.8212, + "step": 10622 + }, + { + "epoch": 0.958019569824593, + "grad_norm": 1.1984689462209053, + "learning_rate": 1.8421161455602242e-08, + "loss": 0.9455, + "step": 10623 + }, + { + "epoch": 0.9581097533480634, + "grad_norm": 1.3285273223065879, + "learning_rate": 1.834213704597598e-08, + "loss": 0.9654, + "step": 10624 + }, + { + "epoch": 0.9581999368715336, + "grad_norm": 1.3289656727292103, + "learning_rate": 1.8263281722325385e-08, + "loss": 0.9452, + "step": 10625 + }, + { + "epoch": 0.9582901203950038, + "grad_norm": 1.438239197787096, + "learning_rate": 1.818459549137885e-08, + "loss": 0.9435, + "step": 10626 + }, + { + "epoch": 0.9583803039184741, + "grad_norm": 1.465549146564138, + "learning_rate": 1.8106078359850117e-08, + "loss": 0.9226, + "step": 10627 + }, + { + "epoch": 0.9584704874419444, + "grad_norm": 1.4462319413675704, + "learning_rate": 1.802773033443894e-08, + "loss": 0.9489, + "step": 10628 + }, + { + "epoch": 0.9585606709654146, + "grad_norm": 1.5181911806182324, + "learning_rate": 1.7949551421830413e-08, + "loss": 0.9058, + "step": 10629 + }, + { + "epoch": 0.9586508544888849, + "grad_norm": 1.4182643109451367, + "learning_rate": 1.7871541628694752e-08, + "loss": 0.9079, + "step": 10630 + }, + { + "epoch": 0.9587410380123551, + "grad_norm": 1.291087369324827, + "learning_rate": 1.779370096168864e-08, + "loss": 0.845, + "step": 10631 + }, + { + "epoch": 0.9588312215358255, + "grad_norm": 1.3675018385154738, + "learning_rate": 1.771602942745387e-08, + "loss": 0.8964, + "step": 10632 + }, + { + "epoch": 0.9589214050592957, + "grad_norm": 1.8342238849737318, + "learning_rate": 1.763852703261759e-08, + "loss": 0.9193, + "step": 10633 + }, + { + "epoch": 0.9590115885827659, + "grad_norm": 1.3661571871127796, + "learning_rate": 1.756119378379295e-08, + "loss": 0.9969, + "step": 10634 + }, + { + "epoch": 0.9591017721062362, + "grad_norm": 1.4801442940711431, + "learning_rate": 1.7484029687578005e-08, + "loss": 1.0277, + "step": 10635 + }, + { + "epoch": 0.9591919556297065, + "grad_norm": 1.3614300507709127, + "learning_rate": 1.740703475055727e-08, + "loss": 0.9123, + "step": 10636 + }, + { + "epoch": 0.9592821391531767, + "grad_norm": 2.092091373118431, + "learning_rate": 1.7330208979300153e-08, + "loss": 0.9734, + "step": 10637 + }, + { + "epoch": 0.959372322676647, + "grad_norm": 1.232764968632658, + "learning_rate": 1.725355238036208e-08, + "loss": 0.9782, + "step": 10638 + }, + { + "epoch": 0.9594625062001172, + "grad_norm": 1.573285604833277, + "learning_rate": 1.7177064960283594e-08, + "loss": 0.9842, + "step": 10639 + }, + { + "epoch": 0.9595526897235875, + "grad_norm": 1.5065102759921427, + "learning_rate": 1.7100746725591253e-08, + "loss": 0.9289, + "step": 10640 + }, + { + "epoch": 0.9596428732470578, + "grad_norm": 2.1496756020599475, + "learning_rate": 1.7024597682796517e-08, + "loss": 0.9254, + "step": 10641 + }, + { + "epoch": 0.959733056770528, + "grad_norm": 1.7422468919423877, + "learning_rate": 1.6948617838397293e-08, + "loss": 1.0027, + "step": 10642 + }, + { + "epoch": 0.9598232402939982, + "grad_norm": 1.712548599110456, + "learning_rate": 1.6872807198876404e-08, + "loss": 0.9649, + "step": 10643 + }, + { + "epoch": 0.9599134238174686, + "grad_norm": 1.4840009519208954, + "learning_rate": 1.679716577070245e-08, + "loss": 1.0287, + "step": 10644 + }, + { + "epoch": 0.9600036073409388, + "grad_norm": 1.4861189385482303, + "learning_rate": 1.6721693560329596e-08, + "loss": 1.028, + "step": 10645 + }, + { + "epoch": 0.960093790864409, + "grad_norm": 1.4457499074182245, + "learning_rate": 1.6646390574197366e-08, + "loss": 0.8427, + "step": 10646 + }, + { + "epoch": 0.9601839743878793, + "grad_norm": 0.7891822572985487, + "learning_rate": 1.6571256818731504e-08, + "loss": 0.8513, + "step": 10647 + }, + { + "epoch": 0.9602741579113496, + "grad_norm": 1.2519445552919453, + "learning_rate": 1.6496292300342218e-08, + "loss": 0.8672, + "step": 10648 + }, + { + "epoch": 0.9603643414348199, + "grad_norm": 1.5542781425089287, + "learning_rate": 1.642149702542639e-08, + "loss": 0.8768, + "step": 10649 + }, + { + "epoch": 0.9604545249582901, + "grad_norm": 1.3715568694987554, + "learning_rate": 1.634687100036558e-08, + "loss": 0.9065, + "step": 10650 + }, + { + "epoch": 0.9605447084817604, + "grad_norm": 1.5469578422172772, + "learning_rate": 1.627241423152781e-08, + "loss": 0.8691, + "step": 10651 + }, + { + "epoch": 0.9606348920052307, + "grad_norm": 1.5649149733288321, + "learning_rate": 1.619812672526555e-08, + "loss": 0.9895, + "step": 10652 + }, + { + "epoch": 0.9607250755287009, + "grad_norm": 1.5070333098164626, + "learning_rate": 1.6124008487917727e-08, + "loss": 0.9202, + "step": 10653 + }, + { + "epoch": 0.9608152590521711, + "grad_norm": 0.7224997273612911, + "learning_rate": 1.6050059525808623e-08, + "loss": 0.8224, + "step": 10654 + }, + { + "epoch": 0.9609054425756415, + "grad_norm": 0.6653472813535884, + "learning_rate": 1.597627984524763e-08, + "loss": 0.8076, + "step": 10655 + }, + { + "epoch": 0.9609956260991117, + "grad_norm": 1.2815042569093107, + "learning_rate": 1.590266945253038e-08, + "loss": 0.972, + "step": 10656 + }, + { + "epoch": 0.9610858096225819, + "grad_norm": 1.2734233176722956, + "learning_rate": 1.582922835393763e-08, + "loss": 0.9451, + "step": 10657 + }, + { + "epoch": 0.9611759931460522, + "grad_norm": 1.3920561038089425, + "learning_rate": 1.5755956555735473e-08, + "loss": 0.9151, + "step": 10658 + }, + { + "epoch": 0.9612661766695225, + "grad_norm": 0.8073818689630838, + "learning_rate": 1.5682854064176244e-08, + "loss": 0.9137, + "step": 10659 + }, + { + "epoch": 0.9613563601929928, + "grad_norm": 1.4933538222752263, + "learning_rate": 1.5609920885497395e-08, + "loss": 0.8677, + "step": 10660 + }, + { + "epoch": 0.961446543716463, + "grad_norm": 1.3816926376392893, + "learning_rate": 1.5537157025921732e-08, + "loss": 0.9497, + "step": 10661 + }, + { + "epoch": 0.9615367272399332, + "grad_norm": 1.7504245379345948, + "learning_rate": 1.5464562491658285e-08, + "loss": 0.915, + "step": 10662 + }, + { + "epoch": 0.9616269107634036, + "grad_norm": 1.3276922530708843, + "learning_rate": 1.5392137288900764e-08, + "loss": 0.9723, + "step": 10663 + }, + { + "epoch": 0.9617170942868738, + "grad_norm": 0.6324518376887284, + "learning_rate": 1.531988142382934e-08, + "loss": 0.8106, + "step": 10664 + }, + { + "epoch": 0.961807277810344, + "grad_norm": 1.4369157350132988, + "learning_rate": 1.5247794902608634e-08, + "loss": 0.8736, + "step": 10665 + }, + { + "epoch": 0.9618974613338143, + "grad_norm": 1.5674254034322357, + "learning_rate": 1.5175877731390398e-08, + "loss": 0.8913, + "step": 10666 + }, + { + "epoch": 0.9619876448572846, + "grad_norm": 1.5402580303917124, + "learning_rate": 1.510412991631016e-08, + "loss": 0.9645, + "step": 10667 + }, + { + "epoch": 0.9620778283807548, + "grad_norm": 1.267582074880691, + "learning_rate": 1.503255146349014e-08, + "loss": 0.9635, + "step": 10668 + }, + { + "epoch": 0.9621680119042251, + "grad_norm": 1.5160392902889117, + "learning_rate": 1.4961142379037893e-08, + "loss": 0.904, + "step": 10669 + }, + { + "epoch": 0.9622581954276953, + "grad_norm": 1.3820124752651965, + "learning_rate": 1.4889902669046327e-08, + "loss": 0.9743, + "step": 10670 + }, + { + "epoch": 0.9623483789511657, + "grad_norm": 1.6917478996028181, + "learning_rate": 1.4818832339594135e-08, + "loss": 0.93, + "step": 10671 + }, + { + "epoch": 0.9624385624746359, + "grad_norm": 1.3085643252580337, + "learning_rate": 1.474793139674535e-08, + "loss": 0.8856, + "step": 10672 + }, + { + "epoch": 0.9625287459981061, + "grad_norm": 1.3535225387770977, + "learning_rate": 1.4677199846549581e-08, + "loss": 0.9276, + "step": 10673 + }, + { + "epoch": 0.9626189295215765, + "grad_norm": 1.4470928052919363, + "learning_rate": 1.4606637695042224e-08, + "loss": 0.9357, + "step": 10674 + }, + { + "epoch": 0.9627091130450467, + "grad_norm": 1.7225920996107102, + "learning_rate": 1.4536244948243793e-08, + "loss": 1.0323, + "step": 10675 + }, + { + "epoch": 0.9627992965685169, + "grad_norm": 1.4300762544321197, + "learning_rate": 1.4466021612160595e-08, + "loss": 0.9004, + "step": 10676 + }, + { + "epoch": 0.9628894800919872, + "grad_norm": 1.4705825087491904, + "learning_rate": 1.4395967692784505e-08, + "loss": 0.9004, + "step": 10677 + }, + { + "epoch": 0.9629796636154575, + "grad_norm": 1.375843049740384, + "learning_rate": 1.4326083196092963e-08, + "loss": 0.866, + "step": 10678 + }, + { + "epoch": 0.9630698471389277, + "grad_norm": 1.337679970300032, + "learning_rate": 1.42563681280492e-08, + "loss": 0.9492, + "step": 10679 + }, + { + "epoch": 0.963160030662398, + "grad_norm": 1.380797388824876, + "learning_rate": 1.4186822494600902e-08, + "loss": 0.9132, + "step": 10680 + }, + { + "epoch": 0.9632502141858682, + "grad_norm": 1.3143668295206823, + "learning_rate": 1.4117446301682877e-08, + "loss": 0.9288, + "step": 10681 + }, + { + "epoch": 0.9633403977093385, + "grad_norm": 1.246438005780973, + "learning_rate": 1.4048239555214392e-08, + "loss": 0.8869, + "step": 10682 + }, + { + "epoch": 0.9634305812328088, + "grad_norm": 1.330982813640354, + "learning_rate": 1.3979202261100497e-08, + "loss": 0.9678, + "step": 10683 + }, + { + "epoch": 0.963520764756279, + "grad_norm": 1.5611034255481533, + "learning_rate": 1.3910334425231817e-08, + "loss": 0.9913, + "step": 10684 + }, + { + "epoch": 0.9636109482797492, + "grad_norm": 1.5072433108904073, + "learning_rate": 1.384163605348454e-08, + "loss": 0.9364, + "step": 10685 + }, + { + "epoch": 0.9637011318032196, + "grad_norm": 1.5994416126781037, + "learning_rate": 1.3773107151720642e-08, + "loss": 0.9955, + "step": 10686 + }, + { + "epoch": 0.9637913153266898, + "grad_norm": 1.5395558142708328, + "learning_rate": 1.3704747725787003e-08, + "loss": 0.9953, + "step": 10687 + }, + { + "epoch": 0.9638814988501601, + "grad_norm": 0.8013198901803328, + "learning_rate": 1.3636557781516512e-08, + "loss": 0.83, + "step": 10688 + }, + { + "epoch": 0.9639716823736303, + "grad_norm": 1.5924209588285936, + "learning_rate": 1.3568537324727847e-08, + "loss": 0.935, + "step": 10689 + }, + { + "epoch": 0.9640618658971006, + "grad_norm": 1.413402930441464, + "learning_rate": 1.3500686361224589e-08, + "loss": 0.9335, + "step": 10690 + }, + { + "epoch": 0.9641520494205709, + "grad_norm": 1.5741643020641742, + "learning_rate": 1.3433004896796108e-08, + "loss": 0.9391, + "step": 10691 + }, + { + "epoch": 0.9642422329440411, + "grad_norm": 1.7006654390674338, + "learning_rate": 1.336549293721756e-08, + "loss": 0.8276, + "step": 10692 + }, + { + "epoch": 0.9643324164675113, + "grad_norm": 0.7018533864009651, + "learning_rate": 1.3298150488249227e-08, + "loss": 0.8094, + "step": 10693 + }, + { + "epoch": 0.9644225999909817, + "grad_norm": 1.6464170394649638, + "learning_rate": 1.3230977555637401e-08, + "loss": 1.0112, + "step": 10694 + }, + { + "epoch": 0.9645127835144519, + "grad_norm": 1.3940612257314784, + "learning_rate": 1.3163974145113499e-08, + "loss": 0.8937, + "step": 10695 + }, + { + "epoch": 0.9646029670379221, + "grad_norm": 1.5431964175143664, + "learning_rate": 1.3097140262394723e-08, + "loss": 0.8366, + "step": 10696 + }, + { + "epoch": 0.9646931505613925, + "grad_norm": 1.6432919578424634, + "learning_rate": 1.303047591318318e-08, + "loss": 0.9713, + "step": 10697 + }, + { + "epoch": 0.9647833340848627, + "grad_norm": 1.6497290013030863, + "learning_rate": 1.2963981103167875e-08, + "loss": 0.8688, + "step": 10698 + }, + { + "epoch": 0.964873517608333, + "grad_norm": 1.4120720611749698, + "learning_rate": 1.2897655838021825e-08, + "loss": 0.9165, + "step": 10699 + }, + { + "epoch": 0.9649637011318032, + "grad_norm": 1.8477002515654453, + "learning_rate": 1.2831500123404726e-08, + "loss": 0.9428, + "step": 10700 + }, + { + "epoch": 0.9650538846552735, + "grad_norm": 1.690784436956056, + "learning_rate": 1.2765513964961172e-08, + "loss": 1.0838, + "step": 10701 + }, + { + "epoch": 0.9651440681787438, + "grad_norm": 1.4027578863405954, + "learning_rate": 1.2699697368321549e-08, + "loss": 0.9269, + "step": 10702 + }, + { + "epoch": 0.965234251702214, + "grad_norm": 1.319237130034914, + "learning_rate": 1.2634050339101366e-08, + "loss": 0.9167, + "step": 10703 + }, + { + "epoch": 0.9653244352256842, + "grad_norm": 1.2956423589092834, + "learning_rate": 1.2568572882902361e-08, + "loss": 0.9104, + "step": 10704 + }, + { + "epoch": 0.9654146187491546, + "grad_norm": 1.316179295567224, + "learning_rate": 1.2503265005311402e-08, + "loss": 0.9063, + "step": 10705 + }, + { + "epoch": 0.9655048022726248, + "grad_norm": 2.7020482912244796, + "learning_rate": 1.2438126711900698e-08, + "loss": 0.9014, + "step": 10706 + }, + { + "epoch": 0.965594985796095, + "grad_norm": 1.4216763703498445, + "learning_rate": 1.2373158008228247e-08, + "loss": 0.783, + "step": 10707 + }, + { + "epoch": 0.9656851693195653, + "grad_norm": 1.2390943679463609, + "learning_rate": 1.2308358899837833e-08, + "loss": 0.9033, + "step": 10708 + }, + { + "epoch": 0.9657753528430356, + "grad_norm": 1.4899373289663684, + "learning_rate": 1.224372939225815e-08, + "loss": 0.9149, + "step": 10709 + }, + { + "epoch": 0.9658655363665059, + "grad_norm": 1.3238737393937707, + "learning_rate": 1.2179269491003674e-08, + "loss": 1.0224, + "step": 10710 + }, + { + "epoch": 0.9659557198899761, + "grad_norm": 1.3121351007300084, + "learning_rate": 1.2114979201574894e-08, + "loss": 0.8114, + "step": 10711 + }, + { + "epoch": 0.9660459034134463, + "grad_norm": 1.816934553862469, + "learning_rate": 1.2050858529456975e-08, + "loss": 0.954, + "step": 10712 + }, + { + "epoch": 0.9661360869369167, + "grad_norm": 1.3997280623087514, + "learning_rate": 1.1986907480121545e-08, + "loss": 0.9347, + "step": 10713 + }, + { + "epoch": 0.9662262704603869, + "grad_norm": 1.4384138271017268, + "learning_rate": 1.192312605902468e-08, + "loss": 0.9516, + "step": 10714 + }, + { + "epoch": 0.9663164539838571, + "grad_norm": 0.8152693356674946, + "learning_rate": 1.1859514271608917e-08, + "loss": 0.8483, + "step": 10715 + }, + { + "epoch": 0.9664066375073274, + "grad_norm": 1.9118617894045788, + "learning_rate": 1.1796072123301914e-08, + "loss": 0.9644, + "step": 10716 + }, + { + "epoch": 0.9664968210307977, + "grad_norm": 0.6702157757738857, + "learning_rate": 1.1732799619516897e-08, + "loss": 0.8265, + "step": 10717 + }, + { + "epoch": 0.9665870045542679, + "grad_norm": 2.0453824724157523, + "learning_rate": 1.1669696765652659e-08, + "loss": 0.9196, + "step": 10718 + }, + { + "epoch": 0.9666771880777382, + "grad_norm": 1.3587340235889493, + "learning_rate": 1.1606763567093336e-08, + "loss": 0.9642, + "step": 10719 + }, + { + "epoch": 0.9667673716012085, + "grad_norm": 1.1074575051794078, + "learning_rate": 1.1544000029208857e-08, + "loss": 0.8056, + "step": 10720 + }, + { + "epoch": 0.9668575551246787, + "grad_norm": 1.4510507853984367, + "learning_rate": 1.148140615735449e-08, + "loss": 0.8004, + "step": 10721 + }, + { + "epoch": 0.966947738648149, + "grad_norm": 1.3431242883253454, + "learning_rate": 1.1418981956871076e-08, + "loss": 0.9107, + "step": 10722 + }, + { + "epoch": 0.9670379221716192, + "grad_norm": 1.8638843384087254, + "learning_rate": 1.1356727433085245e-08, + "loss": 0.925, + "step": 10723 + }, + { + "epoch": 0.9671281056950896, + "grad_norm": 1.2231341515745964, + "learning_rate": 1.1294642591308524e-08, + "loss": 0.9626, + "step": 10724 + }, + { + "epoch": 0.9672182892185598, + "grad_norm": 1.2541825890190434, + "learning_rate": 1.1232727436838452e-08, + "loss": 0.9232, + "step": 10725 + }, + { + "epoch": 0.96730847274203, + "grad_norm": 1.460129050077509, + "learning_rate": 1.1170981974958138e-08, + "loss": 0.9813, + "step": 10726 + }, + { + "epoch": 0.9673986562655003, + "grad_norm": 1.7599844844090864, + "learning_rate": 1.1109406210936035e-08, + "loss": 1.001, + "step": 10727 + }, + { + "epoch": 0.9674888397889706, + "grad_norm": 1.5528321049481932, + "learning_rate": 1.1048000150025939e-08, + "loss": 1.0555, + "step": 10728 + }, + { + "epoch": 0.9675790233124408, + "grad_norm": 1.6388044155930075, + "learning_rate": 1.0986763797467213e-08, + "loss": 0.8311, + "step": 10729 + }, + { + "epoch": 0.9676692068359111, + "grad_norm": 0.6435736970308086, + "learning_rate": 1.0925697158485459e-08, + "loss": 0.7546, + "step": 10730 + }, + { + "epoch": 0.9677593903593813, + "grad_norm": 1.3733999739761762, + "learning_rate": 1.0864800238290727e-08, + "loss": 0.9411, + "step": 10731 + }, + { + "epoch": 0.9678495738828516, + "grad_norm": 2.032186321719862, + "learning_rate": 1.0804073042079309e-08, + "loss": 0.8983, + "step": 10732 + }, + { + "epoch": 0.9679397574063219, + "grad_norm": 0.6947674061220366, + "learning_rate": 1.0743515575032392e-08, + "loss": 0.8097, + "step": 10733 + }, + { + "epoch": 0.9680299409297921, + "grad_norm": 1.4619580136752865, + "learning_rate": 1.0683127842317619e-08, + "loss": 0.9456, + "step": 10734 + }, + { + "epoch": 0.9681201244532623, + "grad_norm": 1.5760249148544008, + "learning_rate": 1.0622909849087314e-08, + "loss": 0.9578, + "step": 10735 + }, + { + "epoch": 0.9682103079767327, + "grad_norm": 1.3488790962581956, + "learning_rate": 1.0562861600479588e-08, + "loss": 0.9954, + "step": 10736 + }, + { + "epoch": 0.9683004915002029, + "grad_norm": 1.8024879692194642, + "learning_rate": 1.0502983101618345e-08, + "loss": 1.0276, + "step": 10737 + }, + { + "epoch": 0.9683906750236732, + "grad_norm": 1.5337494672480547, + "learning_rate": 1.0443274357612386e-08, + "loss": 0.8099, + "step": 10738 + }, + { + "epoch": 0.9684808585471434, + "grad_norm": 1.4448391525873616, + "learning_rate": 1.0383735373556524e-08, + "loss": 0.9531, + "step": 10739 + }, + { + "epoch": 0.9685710420706137, + "grad_norm": 1.3185839652448679, + "learning_rate": 1.0324366154531139e-08, + "loss": 0.9662, + "step": 10740 + }, + { + "epoch": 0.968661225594084, + "grad_norm": 1.5540886880711233, + "learning_rate": 1.0265166705601735e-08, + "loss": 0.8994, + "step": 10741 + }, + { + "epoch": 0.9687514091175542, + "grad_norm": 0.7853462786379349, + "learning_rate": 1.0206137031819606e-08, + "loss": 0.8752, + "step": 10742 + }, + { + "epoch": 0.9688415926410244, + "grad_norm": 1.3426332490740593, + "learning_rate": 1.0147277138221388e-08, + "loss": 0.9454, + "step": 10743 + }, + { + "epoch": 0.9689317761644948, + "grad_norm": 2.1348190091535306, + "learning_rate": 1.0088587029829287e-08, + "loss": 0.9754, + "step": 10744 + }, + { + "epoch": 0.969021959687965, + "grad_norm": 1.3712223786012403, + "learning_rate": 1.003006671165152e-08, + "loss": 1.0343, + "step": 10745 + }, + { + "epoch": 0.9691121432114352, + "grad_norm": 1.3514338525834813, + "learning_rate": 9.971716188680978e-09, + "loss": 0.9248, + "step": 10746 + }, + { + "epoch": 0.9692023267349056, + "grad_norm": 1.832811982267783, + "learning_rate": 9.91353546589635e-09, + "loss": 0.9064, + "step": 10747 + }, + { + "epoch": 0.9692925102583758, + "grad_norm": 1.3649494708842969, + "learning_rate": 9.855524548262106e-09, + "loss": 0.8658, + "step": 10748 + }, + { + "epoch": 0.969382693781846, + "grad_norm": 1.6723247885624826, + "learning_rate": 9.797683440728288e-09, + "loss": 0.8822, + "step": 10749 + }, + { + "epoch": 0.9694728773053163, + "grad_norm": 1.4143943607962288, + "learning_rate": 9.740012148229836e-09, + "loss": 0.8556, + "step": 10750 + }, + { + "epoch": 0.9695630608287866, + "grad_norm": 1.2888636580186368, + "learning_rate": 9.682510675687705e-09, + "loss": 0.8894, + "step": 10751 + }, + { + "epoch": 0.9696532443522569, + "grad_norm": 1.4129066590824968, + "learning_rate": 9.625179028008191e-09, + "loss": 0.8534, + "step": 10752 + }, + { + "epoch": 0.9697434278757271, + "grad_norm": 1.50011276774906, + "learning_rate": 9.568017210083379e-09, + "loss": 0.7856, + "step": 10753 + }, + { + "epoch": 0.9698336113991973, + "grad_norm": 1.4983089769793232, + "learning_rate": 9.511025226790259e-09, + "loss": 0.916, + "step": 10754 + }, + { + "epoch": 0.9699237949226677, + "grad_norm": 1.3388562358516805, + "learning_rate": 9.454203082992052e-09, + "loss": 0.9138, + "step": 10755 + }, + { + "epoch": 0.9700139784461379, + "grad_norm": 1.6005541348512637, + "learning_rate": 9.3975507835371e-09, + "loss": 0.8379, + "step": 10756 + }, + { + "epoch": 0.9701041619696081, + "grad_norm": 0.6660147300557587, + "learning_rate": 9.341068333259094e-09, + "loss": 0.8116, + "step": 10757 + }, + { + "epoch": 0.9701943454930784, + "grad_norm": 1.4117586536604982, + "learning_rate": 9.28475573697729e-09, + "loss": 0.9127, + "step": 10758 + }, + { + "epoch": 0.9702845290165487, + "grad_norm": 1.3554635171486933, + "learning_rate": 9.228612999497177e-09, + "loss": 0.9383, + "step": 10759 + }, + { + "epoch": 0.970374712540019, + "grad_norm": 2.1706345123601025, + "learning_rate": 9.172640125608478e-09, + "loss": 0.8819, + "step": 10760 + }, + { + "epoch": 0.9704648960634892, + "grad_norm": 1.2248829805184434, + "learning_rate": 9.116837120087817e-09, + "loss": 0.9441, + "step": 10761 + }, + { + "epoch": 0.9705550795869594, + "grad_norm": 1.547462120763463, + "learning_rate": 9.061203987695832e-09, + "loss": 1.0062, + "step": 10762 + }, + { + "epoch": 0.9706452631104298, + "grad_norm": 1.5363617257818907, + "learning_rate": 9.005740733180055e-09, + "loss": 0.8834, + "step": 10763 + }, + { + "epoch": 0.9707354466339, + "grad_norm": 1.637153731564185, + "learning_rate": 8.950447361272483e-09, + "loss": 1.0193, + "step": 10764 + }, + { + "epoch": 0.9708256301573702, + "grad_norm": 1.9045793059790583, + "learning_rate": 8.895323876691784e-09, + "loss": 0.9408, + "step": 10765 + }, + { + "epoch": 0.9709158136808405, + "grad_norm": 1.5792693719537791, + "learning_rate": 8.840370284140419e-09, + "loss": 0.9854, + "step": 10766 + }, + { + "epoch": 0.9710059972043108, + "grad_norm": 1.454604841233737, + "learning_rate": 8.78558658830797e-09, + "loss": 0.8494, + "step": 10767 + }, + { + "epoch": 0.971096180727781, + "grad_norm": 0.6766085549552573, + "learning_rate": 8.730972793868696e-09, + "loss": 0.8233, + "step": 10768 + }, + { + "epoch": 0.9711863642512513, + "grad_norm": 1.7084940702912068, + "learning_rate": 8.67652890548265e-09, + "loss": 0.8943, + "step": 10769 + }, + { + "epoch": 0.9712765477747216, + "grad_norm": 1.2914274773581027, + "learning_rate": 8.622254927795004e-09, + "loss": 0.9088, + "step": 10770 + }, + { + "epoch": 0.9713667312981918, + "grad_norm": 1.4711175869040147, + "learning_rate": 8.568150865436941e-09, + "loss": 0.8956, + "step": 10771 + }, + { + "epoch": 0.9714569148216621, + "grad_norm": 1.2739412680372137, + "learning_rate": 8.514216723024991e-09, + "loss": 0.9421, + "step": 10772 + }, + { + "epoch": 0.9715470983451323, + "grad_norm": 3.571405222680199, + "learning_rate": 8.460452505161031e-09, + "loss": 0.8588, + "step": 10773 + }, + { + "epoch": 0.9716372818686027, + "grad_norm": 1.3279081728595468, + "learning_rate": 8.4068582164325e-09, + "loss": 0.9605, + "step": 10774 + }, + { + "epoch": 0.9717274653920729, + "grad_norm": 1.4140062268267066, + "learning_rate": 8.353433861412406e-09, + "loss": 0.9125, + "step": 10775 + }, + { + "epoch": 0.9718176489155431, + "grad_norm": 1.4731197690162796, + "learning_rate": 8.300179444658883e-09, + "loss": 0.8902, + "step": 10776 + }, + { + "epoch": 0.9719078324390134, + "grad_norm": 1.5868891224001733, + "learning_rate": 8.247094970716296e-09, + "loss": 1.0294, + "step": 10777 + }, + { + "epoch": 0.9719980159624837, + "grad_norm": 1.380403176236098, + "learning_rate": 8.19418044411413e-09, + "loss": 0.968, + "step": 10778 + }, + { + "epoch": 0.9720881994859539, + "grad_norm": 1.2309690881159085, + "learning_rate": 8.141435869367219e-09, + "loss": 0.9323, + "step": 10779 + }, + { + "epoch": 0.9721783830094242, + "grad_norm": 1.3253241682269796, + "learning_rate": 8.088861250975742e-09, + "loss": 0.9713, + "step": 10780 + }, + { + "epoch": 0.9722685665328944, + "grad_norm": 1.5476724900111394, + "learning_rate": 8.036456593426111e-09, + "loss": 0.9065, + "step": 10781 + }, + { + "epoch": 0.9723587500563647, + "grad_norm": 1.472190236797797, + "learning_rate": 7.984221901189415e-09, + "loss": 0.9681, + "step": 10782 + }, + { + "epoch": 0.972448933579835, + "grad_norm": 1.3877560027110702, + "learning_rate": 7.932157178722976e-09, + "loss": 0.8586, + "step": 10783 + }, + { + "epoch": 0.9725391171033052, + "grad_norm": 1.5224162273191626, + "learning_rate": 7.880262430468799e-09, + "loss": 0.9029, + "step": 10784 + }, + { + "epoch": 0.9726293006267754, + "grad_norm": 1.7688347327759, + "learning_rate": 7.828537660855339e-09, + "loss": 0.8681, + "step": 10785 + }, + { + "epoch": 0.9727194841502458, + "grad_norm": 2.0518486805348246, + "learning_rate": 7.776982874295512e-09, + "loss": 0.9383, + "step": 10786 + }, + { + "epoch": 0.972809667673716, + "grad_norm": 1.7932001101750568, + "learning_rate": 7.725598075188688e-09, + "loss": 0.8417, + "step": 10787 + }, + { + "epoch": 0.9728998511971862, + "grad_norm": 4.839040324696425, + "learning_rate": 7.674383267918916e-09, + "loss": 0.8857, + "step": 10788 + }, + { + "epoch": 0.9729900347206565, + "grad_norm": 1.4127558366786241, + "learning_rate": 7.623338456856476e-09, + "loss": 0.922, + "step": 10789 + }, + { + "epoch": 0.9730802182441268, + "grad_norm": 1.5163741472433843, + "learning_rate": 7.572463646356554e-09, + "loss": 0.8261, + "step": 10790 + }, + { + "epoch": 0.9731704017675971, + "grad_norm": 1.4966046529183452, + "learning_rate": 7.521758840760339e-09, + "loss": 0.9055, + "step": 10791 + }, + { + "epoch": 0.9732605852910673, + "grad_norm": 1.8301970647798982, + "learning_rate": 7.471224044393931e-09, + "loss": 0.8876, + "step": 10792 + }, + { + "epoch": 0.9733507688145376, + "grad_norm": 1.3215042243569584, + "learning_rate": 7.420859261569434e-09, + "loss": 0.9972, + "step": 10793 + }, + { + "epoch": 0.9734409523380079, + "grad_norm": 1.5413174448470357, + "learning_rate": 7.370664496584078e-09, + "loss": 0.9912, + "step": 10794 + }, + { + "epoch": 0.9735311358614781, + "grad_norm": 1.372306222421001, + "learning_rate": 7.3206397537211026e-09, + "loss": 0.8511, + "step": 10795 + }, + { + "epoch": 0.9736213193849483, + "grad_norm": 2.139535445655849, + "learning_rate": 7.270785037248428e-09, + "loss": 0.8998, + "step": 10796 + }, + { + "epoch": 0.9737115029084187, + "grad_norm": 1.4657648471247529, + "learning_rate": 7.221100351420428e-09, + "loss": 0.8936, + "step": 10797 + }, + { + "epoch": 0.9738016864318889, + "grad_norm": 1.3460493154603155, + "learning_rate": 7.171585700475935e-09, + "loss": 0.8607, + "step": 10798 + }, + { + "epoch": 0.9738918699553591, + "grad_norm": 1.5233878103124368, + "learning_rate": 7.122241088640235e-09, + "loss": 0.9907, + "step": 10799 + }, + { + "epoch": 0.9739820534788294, + "grad_norm": 1.4389890339309608, + "learning_rate": 7.073066520123516e-09, + "loss": 1.0533, + "step": 10800 + }, + { + "epoch": 0.9740722370022997, + "grad_norm": 1.5696406659068054, + "learning_rate": 7.0240619991217555e-09, + "loss": 0.823, + "step": 10801 + }, + { + "epoch": 0.97416242052577, + "grad_norm": 2.0061641116192424, + "learning_rate": 6.975227529816052e-09, + "loss": 0.9934, + "step": 10802 + }, + { + "epoch": 0.9742526040492402, + "grad_norm": 1.516853463892759, + "learning_rate": 6.926563116373296e-09, + "loss": 0.9735, + "step": 10803 + }, + { + "epoch": 0.9743427875727104, + "grad_norm": 1.4994656087729055, + "learning_rate": 6.878068762945943e-09, + "loss": 0.8742, + "step": 10804 + }, + { + "epoch": 0.9744329710961808, + "grad_norm": 1.2971260563159681, + "learning_rate": 6.829744473671794e-09, + "loss": 0.9623, + "step": 10805 + }, + { + "epoch": 0.974523154619651, + "grad_norm": 0.6176457454329506, + "learning_rate": 6.781590252674219e-09, + "loss": 0.7066, + "step": 10806 + }, + { + "epoch": 0.9746133381431212, + "grad_norm": 1.129964471106563, + "learning_rate": 6.733606104061484e-09, + "loss": 0.9378, + "step": 10807 + }, + { + "epoch": 0.9747035216665915, + "grad_norm": 1.3567515522568572, + "learning_rate": 6.6857920319283165e-09, + "loss": 0.884, + "step": 10808 + }, + { + "epoch": 0.9747937051900618, + "grad_norm": 1.3831781022556384, + "learning_rate": 6.638148040354563e-09, + "loss": 0.9092, + "step": 10809 + }, + { + "epoch": 0.974883888713532, + "grad_norm": 1.4442367261712754, + "learning_rate": 6.590674133405194e-09, + "loss": 0.8897, + "step": 10810 + }, + { + "epoch": 0.9749740722370023, + "grad_norm": 1.3501458212899238, + "learning_rate": 6.5433703151311914e-09, + "loss": 0.94, + "step": 10811 + }, + { + "epoch": 0.9750642557604725, + "grad_norm": 1.5769025392429534, + "learning_rate": 6.49623658956866e-09, + "loss": 0.9326, + "step": 10812 + }, + { + "epoch": 0.9751544392839429, + "grad_norm": 1.8580153166074942, + "learning_rate": 6.44927296073905e-09, + "loss": 0.8156, + "step": 10813 + }, + { + "epoch": 0.9752446228074131, + "grad_norm": 1.3857357355657147, + "learning_rate": 6.402479432649821e-09, + "loss": 1.0152, + "step": 10814 + }, + { + "epoch": 0.9753348063308833, + "grad_norm": 1.6026450615509344, + "learning_rate": 6.355856009293781e-09, + "loss": 0.928, + "step": 10815 + }, + { + "epoch": 0.9754249898543537, + "grad_norm": 1.8428092108055596, + "learning_rate": 6.3094026946488575e-09, + "loss": 0.91, + "step": 10816 + }, + { + "epoch": 0.9755151733778239, + "grad_norm": 1.322979327342365, + "learning_rate": 6.2631194926787704e-09, + "loss": 0.9323, + "step": 10817 + }, + { + "epoch": 0.9756053569012941, + "grad_norm": 2.075620705480363, + "learning_rate": 6.217006407332581e-09, + "loss": 0.8345, + "step": 10818 + }, + { + "epoch": 0.9756955404247644, + "grad_norm": 1.3464138330142243, + "learning_rate": 6.1710634425453654e-09, + "loss": 0.8783, + "step": 10819 + }, + { + "epoch": 0.9757857239482347, + "grad_norm": 1.4975335983812315, + "learning_rate": 6.1252906022366544e-09, + "loss": 0.9472, + "step": 10820 + }, + { + "epoch": 0.9758759074717049, + "grad_norm": 1.4944314910409224, + "learning_rate": 6.079687890312213e-09, + "loss": 0.9077, + "step": 10821 + }, + { + "epoch": 0.9759660909951752, + "grad_norm": 1.562483520420908, + "learning_rate": 6.034255310663372e-09, + "loss": 0.9127, + "step": 10822 + }, + { + "epoch": 0.9760562745186454, + "grad_norm": 1.6041386424337423, + "learning_rate": 5.988992867166143e-09, + "loss": 1.0228, + "step": 10823 + }, + { + "epoch": 0.9761464580421157, + "grad_norm": 1.6628353268311673, + "learning_rate": 5.943900563682991e-09, + "loss": 0.9695, + "step": 10824 + }, + { + "epoch": 0.976236641565586, + "grad_norm": 0.7746043795195151, + "learning_rate": 5.898978404061506e-09, + "loss": 0.8775, + "step": 10825 + }, + { + "epoch": 0.9763268250890562, + "grad_norm": 1.9029874554620039, + "learning_rate": 5.85422639213462e-09, + "loss": 0.9296, + "step": 10826 + }, + { + "epoch": 0.9764170086125264, + "grad_norm": 1.3374363037343613, + "learning_rate": 5.809644531720614e-09, + "loss": 0.9417, + "step": 10827 + }, + { + "epoch": 0.9765071921359968, + "grad_norm": 1.333707230515883, + "learning_rate": 5.765232826623556e-09, + "loss": 0.9619, + "step": 10828 + }, + { + "epoch": 0.976597375659467, + "grad_norm": 1.4132814305074988, + "learning_rate": 5.720991280633081e-09, + "loss": 0.8988, + "step": 10829 + }, + { + "epoch": 0.9766875591829373, + "grad_norm": 1.2643940437757075, + "learning_rate": 5.676919897523724e-09, + "loss": 0.9156, + "step": 10830 + }, + { + "epoch": 0.9767777427064075, + "grad_norm": 1.246403814023858, + "learning_rate": 5.633018681056256e-09, + "loss": 0.9392, + "step": 10831 + }, + { + "epoch": 0.9768679262298778, + "grad_norm": 1.554669634078996, + "learning_rate": 5.589287634976569e-09, + "loss": 0.9319, + "step": 10832 + }, + { + "epoch": 0.9769581097533481, + "grad_norm": 1.2791848170000284, + "learning_rate": 5.5457267630159014e-09, + "loss": 0.9015, + "step": 10833 + }, + { + "epoch": 0.9770482932768183, + "grad_norm": 1.311867334380705, + "learning_rate": 5.5023360688910555e-09, + "loss": 0.9687, + "step": 10834 + }, + { + "epoch": 0.9771384768002885, + "grad_norm": 1.3820255863813298, + "learning_rate": 5.459115556304183e-09, + "loss": 0.9762, + "step": 10835 + }, + { + "epoch": 0.9772286603237589, + "grad_norm": 1.7018838262031417, + "learning_rate": 5.416065228943889e-09, + "loss": 0.8753, + "step": 10836 + }, + { + "epoch": 0.9773188438472291, + "grad_norm": 1.5230132465901078, + "learning_rate": 5.373185090482568e-09, + "loss": 0.887, + "step": 10837 + }, + { + "epoch": 0.9774090273706993, + "grad_norm": 1.6347925327506634, + "learning_rate": 5.330475144579516e-09, + "loss": 0.9226, + "step": 10838 + }, + { + "epoch": 0.9774992108941697, + "grad_norm": 1.3782931685832418, + "learning_rate": 5.2879353948787065e-09, + "loss": 1.0137, + "step": 10839 + }, + { + "epoch": 0.9775893944176399, + "grad_norm": 1.5966858843295268, + "learning_rate": 5.245565845010125e-09, + "loss": 0.9869, + "step": 10840 + }, + { + "epoch": 0.9776795779411102, + "grad_norm": 1.287462339498572, + "learning_rate": 5.2033664985886575e-09, + "loss": 0.8611, + "step": 10841 + }, + { + "epoch": 0.9777697614645804, + "grad_norm": 1.575144379673888, + "learning_rate": 5.161337359215201e-09, + "loss": 1.0755, + "step": 10842 + }, + { + "epoch": 0.9778599449880507, + "grad_norm": 1.5091464610075076, + "learning_rate": 5.119478430475999e-09, + "loss": 0.9488, + "step": 10843 + }, + { + "epoch": 0.977950128511521, + "grad_norm": 2.0081854447909353, + "learning_rate": 5.077789715942416e-09, + "loss": 0.8688, + "step": 10844 + }, + { + "epoch": 0.9780403120349912, + "grad_norm": 1.5559128576113594, + "learning_rate": 5.036271219171606e-09, + "loss": 0.9486, + "step": 10845 + }, + { + "epoch": 0.9781304955584614, + "grad_norm": 1.7839207620557531, + "learning_rate": 4.994922943706514e-09, + "loss": 0.9675, + "step": 10846 + }, + { + "epoch": 0.9782206790819318, + "grad_norm": 1.340561388191624, + "learning_rate": 4.953744893074763e-09, + "loss": 0.9124, + "step": 10847 + }, + { + "epoch": 0.978310862605402, + "grad_norm": 3.8953213822821664, + "learning_rate": 4.912737070789985e-09, + "loss": 0.8608, + "step": 10848 + }, + { + "epoch": 0.9784010461288722, + "grad_norm": 1.5623922692353251, + "learning_rate": 4.871899480351604e-09, + "loss": 1.0034, + "step": 10849 + }, + { + "epoch": 0.9784912296523425, + "grad_norm": 1.61142096974465, + "learning_rate": 4.831232125243501e-09, + "loss": 0.9427, + "step": 10850 + }, + { + "epoch": 0.9785814131758128, + "grad_norm": 1.3028623869767688, + "learning_rate": 4.7907350089360086e-09, + "loss": 1.0261, + "step": 10851 + }, + { + "epoch": 0.978671596699283, + "grad_norm": 1.4449239672552607, + "learning_rate": 4.750408134884365e-09, + "loss": 0.8241, + "step": 10852 + }, + { + "epoch": 0.9787617802227533, + "grad_norm": 1.4385981304441342, + "learning_rate": 4.710251506529816e-09, + "loss": 0.8399, + "step": 10853 + }, + { + "epoch": 0.9788519637462235, + "grad_norm": 1.3695199307693686, + "learning_rate": 4.6702651272982894e-09, + "loss": 0.9515, + "step": 10854 + }, + { + "epoch": 0.9789421472696939, + "grad_norm": 1.564661285991435, + "learning_rate": 4.630449000602166e-09, + "loss": 0.9027, + "step": 10855 + }, + { + "epoch": 0.9790323307931641, + "grad_norm": 1.6760595130642013, + "learning_rate": 4.590803129838283e-09, + "loss": 0.9956, + "step": 10856 + }, + { + "epoch": 0.9791225143166343, + "grad_norm": 1.4444249907838256, + "learning_rate": 4.551327518389714e-09, + "loss": 0.931, + "step": 10857 + }, + { + "epoch": 0.9792126978401046, + "grad_norm": 1.8603989162075287, + "learning_rate": 4.512022169624652e-09, + "loss": 0.9249, + "step": 10858 + }, + { + "epoch": 0.9793028813635749, + "grad_norm": 1.2991339072615193, + "learning_rate": 4.472887086896637e-09, + "loss": 0.9076, + "step": 10859 + }, + { + "epoch": 0.9793930648870451, + "grad_norm": 1.54089509712631, + "learning_rate": 4.433922273545443e-09, + "loss": 0.8804, + "step": 10860 + }, + { + "epoch": 0.9794832484105154, + "grad_norm": 0.5893842663243835, + "learning_rate": 4.395127732895299e-09, + "loss": 0.791, + "step": 10861 + }, + { + "epoch": 0.9795734319339856, + "grad_norm": 1.8850587607010494, + "learning_rate": 4.356503468256445e-09, + "loss": 1.0119, + "step": 10862 + }, + { + "epoch": 0.979663615457456, + "grad_norm": 1.8472867116291254, + "learning_rate": 4.318049482924913e-09, + "loss": 0.9442, + "step": 10863 + }, + { + "epoch": 0.9797537989809262, + "grad_norm": 1.6144764608472943, + "learning_rate": 4.279765780181188e-09, + "loss": 0.9045, + "step": 10864 + }, + { + "epoch": 0.9798439825043964, + "grad_norm": 1.7960797747797972, + "learning_rate": 4.241652363291992e-09, + "loss": 0.9473, + "step": 10865 + }, + { + "epoch": 0.9799341660278668, + "grad_norm": 4.837751846263447, + "learning_rate": 4.203709235509834e-09, + "loss": 0.9056, + "step": 10866 + }, + { + "epoch": 0.980024349551337, + "grad_norm": 1.485678411734475, + "learning_rate": 4.165936400071679e-09, + "loss": 0.947, + "step": 10867 + }, + { + "epoch": 0.9801145330748072, + "grad_norm": 1.331890467925811, + "learning_rate": 4.12833386020095e-09, + "loss": 0.8784, + "step": 10868 + }, + { + "epoch": 0.9802047165982775, + "grad_norm": 1.8252215682050144, + "learning_rate": 4.090901619105746e-09, + "loss": 0.9752, + "step": 10869 + }, + { + "epoch": 0.9802949001217478, + "grad_norm": 1.392593961521322, + "learning_rate": 4.053639679980181e-09, + "loss": 0.8314, + "step": 10870 + }, + { + "epoch": 0.980385083645218, + "grad_norm": 1.5565066022625675, + "learning_rate": 4.01654804600371e-09, + "loss": 0.9389, + "step": 10871 + }, + { + "epoch": 0.9804752671686883, + "grad_norm": 1.2948786065192899, + "learning_rate": 3.9796267203409114e-09, + "loss": 0.9414, + "step": 10872 + }, + { + "epoch": 0.9805654506921585, + "grad_norm": 1.2761117202317427, + "learning_rate": 3.942875706142379e-09, + "loss": 0.9163, + "step": 10873 + }, + { + "epoch": 0.9806556342156288, + "grad_norm": 1.6169042661751318, + "learning_rate": 3.906295006543825e-09, + "loss": 0.9895, + "step": 10874 + }, + { + "epoch": 0.9807458177390991, + "grad_norm": 1.4874047128627068, + "learning_rate": 3.8698846246665305e-09, + "loss": 0.9147, + "step": 10875 + }, + { + "epoch": 0.9808360012625693, + "grad_norm": 1.4129440774211464, + "learning_rate": 3.833644563617344e-09, + "loss": 0.9346, + "step": 10876 + }, + { + "epoch": 0.9809261847860395, + "grad_norm": 1.607942554785763, + "learning_rate": 3.797574826488237e-09, + "loss": 0.9416, + "step": 10877 + }, + { + "epoch": 0.9810163683095099, + "grad_norm": 1.4762190969464293, + "learning_rate": 3.761675416356969e-09, + "loss": 0.8605, + "step": 10878 + }, + { + "epoch": 0.9811065518329801, + "grad_norm": 1.2974152416409515, + "learning_rate": 3.725946336286867e-09, + "loss": 0.8396, + "step": 10879 + }, + { + "epoch": 0.9811967353564504, + "grad_norm": 1.2348573442454402, + "learning_rate": 3.6903875893261604e-09, + "loss": 0.9105, + "step": 10880 + }, + { + "epoch": 0.9812869188799206, + "grad_norm": 1.6464654054827317, + "learning_rate": 3.6549991785093105e-09, + "loss": 0.8679, + "step": 10881 + }, + { + "epoch": 0.9813771024033909, + "grad_norm": 1.4287680081945544, + "learning_rate": 3.6197811068554575e-09, + "loss": 0.9194, + "step": 10882 + }, + { + "epoch": 0.9814672859268612, + "grad_norm": 1.2187835982166486, + "learning_rate": 3.584733377369975e-09, + "loss": 1.0099, + "step": 10883 + }, + { + "epoch": 0.9815574694503314, + "grad_norm": 1.6398828190133632, + "learning_rate": 3.549855993043138e-09, + "loss": 0.9058, + "step": 10884 + }, + { + "epoch": 0.9816476529738016, + "grad_norm": 1.384983535214277, + "learning_rate": 3.5151489568507887e-09, + "loss": 0.9202, + "step": 10885 + }, + { + "epoch": 0.981737836497272, + "grad_norm": 1.583214663358329, + "learning_rate": 3.4806122717545572e-09, + "loss": 0.9511, + "step": 10886 + }, + { + "epoch": 0.9818280200207422, + "grad_norm": 1.865345385378681, + "learning_rate": 3.446245940701198e-09, + "loss": 0.8947, + "step": 10887 + }, + { + "epoch": 0.9819182035442124, + "grad_norm": 1.4977689681720086, + "learning_rate": 3.41204996662281e-09, + "loss": 0.8702, + "step": 10888 + }, + { + "epoch": 0.9820083870676828, + "grad_norm": 1.1984583030993958, + "learning_rate": 3.3780243524375028e-09, + "loss": 0.9313, + "step": 10889 + }, + { + "epoch": 0.982098570591153, + "grad_norm": 1.403674118898808, + "learning_rate": 3.3441691010485107e-09, + "loss": 0.9578, + "step": 10890 + }, + { + "epoch": 0.9821887541146233, + "grad_norm": 1.5827087458715905, + "learning_rate": 3.3104842153444113e-09, + "loss": 1.012, + "step": 10891 + }, + { + "epoch": 0.9822789376380935, + "grad_norm": 1.5743779180059767, + "learning_rate": 3.27696969819935e-09, + "loss": 0.928, + "step": 10892 + }, + { + "epoch": 0.9823691211615638, + "grad_norm": 1.301925797151338, + "learning_rate": 3.2436255524732615e-09, + "loss": 0.8894, + "step": 10893 + }, + { + "epoch": 0.9824593046850341, + "grad_norm": 1.260748380827521, + "learning_rate": 3.210451781010759e-09, + "loss": 0.9123, + "step": 10894 + }, + { + "epoch": 0.9825494882085043, + "grad_norm": 1.3896832652764106, + "learning_rate": 3.1774483866426895e-09, + "loss": 0.7995, + "step": 10895 + }, + { + "epoch": 0.9826396717319745, + "grad_norm": 1.28119468569563, + "learning_rate": 3.144615372185244e-09, + "loss": 0.8958, + "step": 10896 + }, + { + "epoch": 0.9827298552554449, + "grad_norm": 1.2000714479475099, + "learning_rate": 3.1119527404399604e-09, + "loss": 1.0142, + "step": 10897 + }, + { + "epoch": 0.9828200387789151, + "grad_norm": 1.7867559364895953, + "learning_rate": 3.0794604941932754e-09, + "loss": 0.9363, + "step": 10898 + }, + { + "epoch": 0.9829102223023853, + "grad_norm": 1.3160847347656806, + "learning_rate": 3.0471386362180827e-09, + "loss": 0.829, + "step": 10899 + }, + { + "epoch": 0.9830004058258556, + "grad_norm": 2.114982554767347, + "learning_rate": 3.0149871692719542e-09, + "loss": 0.984, + "step": 10900 + }, + { + "epoch": 0.9830905893493259, + "grad_norm": 1.3139606703993554, + "learning_rate": 2.9830060960984728e-09, + "loss": 0.9198, + "step": 10901 + }, + { + "epoch": 0.9831807728727961, + "grad_norm": 0.6825743349347111, + "learning_rate": 2.9511954194263442e-09, + "loss": 0.8048, + "step": 10902 + }, + { + "epoch": 0.9832709563962664, + "grad_norm": 1.4041175780879973, + "learning_rate": 2.9195551419698426e-09, + "loss": 0.9362, + "step": 10903 + }, + { + "epoch": 0.9833611399197366, + "grad_norm": 1.6172306220898194, + "learning_rate": 2.888085266428808e-09, + "loss": 0.8936, + "step": 10904 + }, + { + "epoch": 0.983451323443207, + "grad_norm": 1.7147423924968241, + "learning_rate": 2.8567857954882037e-09, + "loss": 0.887, + "step": 10905 + }, + { + "epoch": 0.9835415069666772, + "grad_norm": 1.5343773494166395, + "learning_rate": 2.82565673181856e-09, + "loss": 0.9855, + "step": 10906 + }, + { + "epoch": 0.9836316904901474, + "grad_norm": 1.3441798831047933, + "learning_rate": 2.7946980780764184e-09, + "loss": 0.9565, + "step": 10907 + }, + { + "epoch": 0.9837218740136177, + "grad_norm": 1.6236306069215818, + "learning_rate": 2.763909836903e-09, + "loss": 0.9902, + "step": 10908 + }, + { + "epoch": 0.983812057537088, + "grad_norm": 1.2578275217616435, + "learning_rate": 2.7332920109255364e-09, + "loss": 0.8528, + "step": 10909 + }, + { + "epoch": 0.9839022410605582, + "grad_norm": 1.664910758436358, + "learning_rate": 2.702844602756382e-09, + "loss": 0.9135, + "step": 10910 + }, + { + "epoch": 0.9839924245840285, + "grad_norm": 2.1128325206280287, + "learning_rate": 2.6725676149936814e-09, + "loss": 0.9045, + "step": 10911 + }, + { + "epoch": 0.9840826081074988, + "grad_norm": 1.481136765155719, + "learning_rate": 2.642461050220479e-09, + "loss": 1.0034, + "step": 10912 + }, + { + "epoch": 0.984172791630969, + "grad_norm": 1.3221955273753534, + "learning_rate": 2.612524911005609e-09, + "loss": 0.9072, + "step": 10913 + }, + { + "epoch": 0.9842629751544393, + "grad_norm": 1.4100965238904488, + "learning_rate": 2.582759199903917e-09, + "loss": 0.8127, + "step": 10914 + }, + { + "epoch": 0.9843531586779095, + "grad_norm": 1.4318535244420616, + "learning_rate": 2.553163919454704e-09, + "loss": 0.8903, + "step": 10915 + }, + { + "epoch": 0.9844433422013799, + "grad_norm": 1.3906028402898876, + "learning_rate": 2.523739072183506e-09, + "loss": 0.916, + "step": 10916 + }, + { + "epoch": 0.9845335257248501, + "grad_norm": 1.2824797427564887, + "learning_rate": 2.4944846606007597e-09, + "loss": 0.8972, + "step": 10917 + }, + { + "epoch": 0.9846237092483203, + "grad_norm": 0.6522510028625241, + "learning_rate": 2.46540068720269e-09, + "loss": 0.843, + "step": 10918 + }, + { + "epoch": 0.9847138927717906, + "grad_norm": 1.2842874576036063, + "learning_rate": 2.4364871544708674e-09, + "loss": 0.9678, + "step": 10919 + }, + { + "epoch": 0.9848040762952609, + "grad_norm": 1.5320154729066753, + "learning_rate": 2.4077440648726523e-09, + "loss": 0.879, + "step": 10920 + }, + { + "epoch": 0.9848942598187311, + "grad_norm": 1.7697659435801538, + "learning_rate": 2.379171420860082e-09, + "loss": 0.9969, + "step": 10921 + }, + { + "epoch": 0.9849844433422014, + "grad_norm": 1.4948798052846672, + "learning_rate": 2.3507692248714296e-09, + "loss": 0.8562, + "step": 10922 + }, + { + "epoch": 0.9850746268656716, + "grad_norm": 1.7125389233920174, + "learning_rate": 2.322537479330089e-09, + "loss": 0.9426, + "step": 10923 + }, + { + "epoch": 0.9851648103891419, + "grad_norm": 1.354039914972639, + "learning_rate": 2.2944761866450223e-09, + "loss": 1.0043, + "step": 10924 + }, + { + "epoch": 0.9852549939126122, + "grad_norm": 2.116148521302081, + "learning_rate": 2.266585349210315e-09, + "loss": 0.9178, + "step": 10925 + }, + { + "epoch": 0.9853451774360824, + "grad_norm": 1.7707543598591549, + "learning_rate": 2.2388649694060623e-09, + "loss": 0.885, + "step": 10926 + }, + { + "epoch": 0.9854353609595526, + "grad_norm": 1.3196868768573566, + "learning_rate": 2.211315049597262e-09, + "loss": 0.9947, + "step": 10927 + }, + { + "epoch": 0.985525544483023, + "grad_norm": 1.356086260496578, + "learning_rate": 2.1839355921349224e-09, + "loss": 0.9709, + "step": 10928 + }, + { + "epoch": 0.9856157280064932, + "grad_norm": 1.7275166356435387, + "learning_rate": 2.156726599354952e-09, + "loss": 0.9236, + "step": 10929 + }, + { + "epoch": 0.9857059115299635, + "grad_norm": 1.5934899061809158, + "learning_rate": 2.129688073578828e-09, + "loss": 0.9522, + "step": 10930 + }, + { + "epoch": 0.9857960950534337, + "grad_norm": 1.5515947106159367, + "learning_rate": 2.1028200171142597e-09, + "loss": 1.0609, + "step": 10931 + }, + { + "epoch": 0.985886278576904, + "grad_norm": 1.4986726928320535, + "learning_rate": 2.076122432253191e-09, + "loss": 0.9215, + "step": 10932 + }, + { + "epoch": 0.9859764621003743, + "grad_norm": 1.631787795356025, + "learning_rate": 2.0495953212738005e-09, + "loss": 0.9717, + "step": 10933 + }, + { + "epoch": 0.9860666456238445, + "grad_norm": 1.1989071220554863, + "learning_rate": 2.0232386864396102e-09, + "loss": 1.014, + "step": 10934 + }, + { + "epoch": 0.9861568291473148, + "grad_norm": 1.3397527724983185, + "learning_rate": 1.9970525299992656e-09, + "loss": 0.9522, + "step": 10935 + }, + { + "epoch": 0.9862470126707851, + "grad_norm": 1.934223665840872, + "learning_rate": 1.9710368541874245e-09, + "loss": 0.8972, + "step": 10936 + }, + { + "epoch": 0.9863371961942553, + "grad_norm": 0.5779869584935406, + "learning_rate": 1.945191661223644e-09, + "loss": 0.7962, + "step": 10937 + }, + { + "epoch": 0.9864273797177255, + "grad_norm": 1.3930963137973218, + "learning_rate": 1.9195169533132714e-09, + "loss": 0.9759, + "step": 10938 + }, + { + "epoch": 0.9865175632411959, + "grad_norm": 0.6007703544511697, + "learning_rate": 1.894012732646999e-09, + "loss": 0.8044, + "step": 10939 + }, + { + "epoch": 0.9866077467646661, + "grad_norm": 1.6336944242567317, + "learning_rate": 1.8686790014010854e-09, + "loss": 0.8922, + "step": 10940 + }, + { + "epoch": 0.9866979302881363, + "grad_norm": 0.6041184658247315, + "learning_rate": 1.8435157617369134e-09, + "loss": 0.7645, + "step": 10941 + }, + { + "epoch": 0.9867881138116066, + "grad_norm": 1.6070504298199155, + "learning_rate": 1.818523015801876e-09, + "loss": 1.0773, + "step": 10942 + }, + { + "epoch": 0.9868782973350769, + "grad_norm": 1.5566178802398478, + "learning_rate": 1.7937007657282677e-09, + "loss": 0.8926, + "step": 10943 + }, + { + "epoch": 0.9869684808585472, + "grad_norm": 1.3756990389763504, + "learning_rate": 1.7690490136341718e-09, + "loss": 0.8725, + "step": 10944 + }, + { + "epoch": 0.9870586643820174, + "grad_norm": 1.171071196498606, + "learning_rate": 1.744567761622795e-09, + "loss": 0.93, + "step": 10945 + }, + { + "epoch": 0.9871488479054876, + "grad_norm": 1.3533730700176003, + "learning_rate": 1.7202570117831327e-09, + "loss": 0.9302, + "step": 10946 + }, + { + "epoch": 0.987239031428958, + "grad_norm": 1.680044446765957, + "learning_rate": 1.696116766189526e-09, + "loss": 0.9081, + "step": 10947 + }, + { + "epoch": 0.9873292149524282, + "grad_norm": 1.3475604037050086, + "learning_rate": 1.6721470269021042e-09, + "loss": 0.9746, + "step": 10948 + }, + { + "epoch": 0.9874193984758984, + "grad_norm": 1.6937368779536308, + "learning_rate": 1.6483477959654546e-09, + "loss": 0.9001, + "step": 10949 + }, + { + "epoch": 0.9875095819993687, + "grad_norm": 1.260158554425057, + "learning_rate": 1.6247190754106187e-09, + "loss": 0.9155, + "step": 10950 + }, + { + "epoch": 0.987599765522839, + "grad_norm": 1.4199198600828207, + "learning_rate": 1.6012608672537619e-09, + "loss": 0.8977, + "step": 10951 + }, + { + "epoch": 0.9876899490463092, + "grad_norm": 1.6018485953498591, + "learning_rate": 1.5779731734963942e-09, + "loss": 0.9364, + "step": 10952 + }, + { + "epoch": 0.9877801325697795, + "grad_norm": 1.5275146700939717, + "learning_rate": 1.5548559961253705e-09, + "loss": 0.9851, + "step": 10953 + }, + { + "epoch": 0.9878703160932497, + "grad_norm": 1.5605085270339143, + "learning_rate": 1.5319093371135573e-09, + "loss": 0.8594, + "step": 10954 + }, + { + "epoch": 0.9879604996167201, + "grad_norm": 1.303348196478256, + "learning_rate": 1.5091331984184997e-09, + "loss": 1.0245, + "step": 10955 + }, + { + "epoch": 0.9880506831401903, + "grad_norm": 1.2123518037190504, + "learning_rate": 1.486527581983754e-09, + "loss": 0.9291, + "step": 10956 + }, + { + "epoch": 0.9881408666636605, + "grad_norm": 1.4648576659495245, + "learning_rate": 1.4640924897382223e-09, + "loss": 0.9853, + "step": 10957 + }, + { + "epoch": 0.9882310501871309, + "grad_norm": 1.2888846534809224, + "learning_rate": 1.4418279235961506e-09, + "loss": 0.9686, + "step": 10958 + }, + { + "epoch": 0.9883212337106011, + "grad_norm": 1.451904686875946, + "learning_rate": 1.4197338854573526e-09, + "loss": 0.8349, + "step": 10959 + }, + { + "epoch": 0.9884114172340713, + "grad_norm": 1.5127771895963924, + "learning_rate": 1.3978103772067651e-09, + "loss": 0.895, + "step": 10960 + }, + { + "epoch": 0.9885016007575416, + "grad_norm": 1.5724886718244928, + "learning_rate": 1.3760574007153358e-09, + "loss": 0.8552, + "step": 10961 + }, + { + "epoch": 0.9885917842810119, + "grad_norm": 1.6818113162863624, + "learning_rate": 1.3544749578389137e-09, + "loss": 1.0179, + "step": 10962 + }, + { + "epoch": 0.9886819678044821, + "grad_norm": 1.437690639293918, + "learning_rate": 1.3330630504189143e-09, + "loss": 0.9708, + "step": 10963 + }, + { + "epoch": 0.9887721513279524, + "grad_norm": 1.343307599719662, + "learning_rate": 1.3118216802827652e-09, + "loss": 0.9022, + "step": 10964 + }, + { + "epoch": 0.9888623348514226, + "grad_norm": 1.6156084395543082, + "learning_rate": 1.2907508492425722e-09, + "loss": 0.9101, + "step": 10965 + }, + { + "epoch": 0.988952518374893, + "grad_norm": 1.6994670817999336, + "learning_rate": 1.2698505590962305e-09, + "loss": 0.8969, + "step": 10966 + }, + { + "epoch": 0.9890427018983632, + "grad_norm": 1.398156036523587, + "learning_rate": 1.2491208116272022e-09, + "loss": 0.9289, + "step": 10967 + }, + { + "epoch": 0.9891328854218334, + "grad_norm": 1.4862808841611752, + "learning_rate": 1.2285616086040728e-09, + "loss": 0.9654, + "step": 10968 + }, + { + "epoch": 0.9892230689453037, + "grad_norm": 1.2485580233380853, + "learning_rate": 1.2081729517812167e-09, + "loss": 0.9944, + "step": 10969 + }, + { + "epoch": 0.989313252468774, + "grad_norm": 1.3762682787576124, + "learning_rate": 1.1879548428983533e-09, + "loss": 0.9272, + "step": 10970 + }, + { + "epoch": 0.9894034359922442, + "grad_norm": 1.638063811287724, + "learning_rate": 1.167907283680547e-09, + "loss": 0.8791, + "step": 10971 + }, + { + "epoch": 0.9894936195157145, + "grad_norm": 1.383289448873123, + "learning_rate": 1.1480302758382077e-09, + "loss": 0.8725, + "step": 10972 + }, + { + "epoch": 0.9895838030391847, + "grad_norm": 1.5408240756466278, + "learning_rate": 1.1283238210675338e-09, + "loss": 0.9193, + "step": 10973 + }, + { + "epoch": 0.989673986562655, + "grad_norm": 1.6933179100090154, + "learning_rate": 1.1087879210498474e-09, + "loss": 0.8655, + "step": 10974 + }, + { + "epoch": 0.9897641700861253, + "grad_norm": 1.309462802464976, + "learning_rate": 1.0894225774522592e-09, + "loss": 0.9799, + "step": 10975 + }, + { + "epoch": 0.9898543536095955, + "grad_norm": 0.6336536938854243, + "learning_rate": 1.070227791927003e-09, + "loss": 0.8124, + "step": 10976 + }, + { + "epoch": 0.9899445371330657, + "grad_norm": 1.538465045044644, + "learning_rate": 1.0512035661118802e-09, + "loss": 1.0272, + "step": 10977 + }, + { + "epoch": 0.9900347206565361, + "grad_norm": 1.9906064210800234, + "learning_rate": 1.0323499016300364e-09, + "loss": 0.9526, + "step": 10978 + }, + { + "epoch": 0.9901249041800063, + "grad_norm": 6.952263870920623, + "learning_rate": 1.013666800090407e-09, + "loss": 0.886, + "step": 10979 + }, + { + "epoch": 0.9902150877034765, + "grad_norm": 1.4487196070434587, + "learning_rate": 9.951542630870502e-10, + "loss": 0.9238, + "step": 10980 + }, + { + "epoch": 0.9903052712269468, + "grad_norm": 1.5031732531632103, + "learning_rate": 9.768122921995915e-10, + "loss": 0.9623, + "step": 10981 + }, + { + "epoch": 0.9903954547504171, + "grad_norm": 1.5692077850259536, + "learning_rate": 9.58640888992779e-10, + "loss": 0.866, + "step": 10982 + }, + { + "epoch": 0.9904856382738874, + "grad_norm": 1.501427686278028, + "learning_rate": 9.40640055017594e-10, + "loss": 0.8495, + "step": 10983 + }, + { + "epoch": 0.9905758217973576, + "grad_norm": 1.570394864935114, + "learning_rate": 9.228097918094757e-10, + "loss": 0.9331, + "step": 10984 + }, + { + "epoch": 0.9906660053208279, + "grad_norm": 1.5330957460158927, + "learning_rate": 9.051501008900952e-10, + "loss": 0.9029, + "step": 10985 + }, + { + "epoch": 0.9907561888442982, + "grad_norm": 1.3537921925712266, + "learning_rate": 8.876609837662475e-10, + "loss": 0.9427, + "step": 10986 + }, + { + "epoch": 0.9908463723677684, + "grad_norm": 1.3396284636651785, + "learning_rate": 8.70342441930294e-10, + "loss": 1.0192, + "step": 10987 + }, + { + "epoch": 0.9909365558912386, + "grad_norm": 1.5495633740877883, + "learning_rate": 8.531944768594979e-10, + "loss": 0.9091, + "step": 10988 + }, + { + "epoch": 0.991026739414709, + "grad_norm": 1.2698707478749929, + "learning_rate": 8.362170900175769e-10, + "loss": 1.0105, + "step": 10989 + }, + { + "epoch": 0.9911169229381792, + "grad_norm": 1.399033190480083, + "learning_rate": 8.194102828527061e-10, + "loss": 0.8882, + "step": 10990 + }, + { + "epoch": 0.9912071064616494, + "grad_norm": 1.7942141527174975, + "learning_rate": 8.027740567992936e-10, + "loss": 0.9711, + "step": 10991 + }, + { + "epoch": 0.9912972899851197, + "grad_norm": 1.3789994853975194, + "learning_rate": 7.863084132766484e-10, + "loss": 0.9263, + "step": 10992 + }, + { + "epoch": 0.99138747350859, + "grad_norm": 1.5681954912235425, + "learning_rate": 7.700133536896469e-10, + "loss": 0.9545, + "step": 10993 + }, + { + "epoch": 0.9914776570320603, + "grad_norm": 1.3241894023872776, + "learning_rate": 7.538888794287324e-10, + "loss": 0.8952, + "step": 10994 + }, + { + "epoch": 0.9915678405555305, + "grad_norm": 1.450229754825633, + "learning_rate": 7.379349918696931e-10, + "loss": 0.9582, + "step": 10995 + }, + { + "epoch": 0.9916580240790007, + "grad_norm": 1.5998737643034662, + "learning_rate": 7.221516923738846e-10, + "loss": 0.906, + "step": 10996 + }, + { + "epoch": 0.9917482076024711, + "grad_norm": 2.080711418779583, + "learning_rate": 7.065389822880075e-10, + "loss": 1.0344, + "step": 10997 + }, + { + "epoch": 0.9918383911259413, + "grad_norm": 1.4210887873197517, + "learning_rate": 6.910968629443292e-10, + "loss": 1.0001, + "step": 10998 + }, + { + "epoch": 0.9919285746494115, + "grad_norm": 1.6241234836282097, + "learning_rate": 6.758253356602406e-10, + "loss": 0.93, + "step": 10999 + }, + { + "epoch": 0.9920187581728818, + "grad_norm": 1.4264101914388871, + "learning_rate": 6.607244017389213e-10, + "loss": 1.0242, + "step": 11000 + }, + { + "epoch": 0.9921089416963521, + "grad_norm": 1.7983645029109645, + "learning_rate": 6.457940624686742e-10, + "loss": 0.9554, + "step": 11001 + }, + { + "epoch": 0.9921991252198223, + "grad_norm": 1.6365515138680924, + "learning_rate": 6.310343191238132e-10, + "loss": 1.0125, + "step": 11002 + }, + { + "epoch": 0.9922893087432926, + "grad_norm": 1.926273801385476, + "learning_rate": 6.164451729635534e-10, + "loss": 0.8314, + "step": 11003 + }, + { + "epoch": 0.9923794922667628, + "grad_norm": 1.6490108704124802, + "learning_rate": 6.020266252324546e-10, + "loss": 0.8029, + "step": 11004 + }, + { + "epoch": 0.9924696757902332, + "grad_norm": 1.9517804209685128, + "learning_rate": 5.877786771610882e-10, + "loss": 0.9018, + "step": 11005 + }, + { + "epoch": 0.9925598593137034, + "grad_norm": 0.795345783848714, + "learning_rate": 5.737013299651483e-10, + "loss": 0.9041, + "step": 11006 + }, + { + "epoch": 0.9926500428371736, + "grad_norm": 1.6688151657582093, + "learning_rate": 5.597945848458963e-10, + "loss": 0.9305, + "step": 11007 + }, + { + "epoch": 0.992740226360644, + "grad_norm": 1.5741374181746672, + "learning_rate": 5.460584429894944e-10, + "loss": 0.9102, + "step": 11008 + }, + { + "epoch": 0.9928304098841142, + "grad_norm": 1.693957653196429, + "learning_rate": 5.32492905568338e-10, + "loss": 0.934, + "step": 11009 + }, + { + "epoch": 0.9929205934075844, + "grad_norm": 1.3817074341889213, + "learning_rate": 5.190979737399459e-10, + "loss": 0.9662, + "step": 11010 + }, + { + "epoch": 0.9930107769310547, + "grad_norm": 1.5250928444037477, + "learning_rate": 5.058736486469594e-10, + "loss": 0.8208, + "step": 11011 + }, + { + "epoch": 0.993100960454525, + "grad_norm": 1.4016121573778793, + "learning_rate": 4.928199314180314e-10, + "loss": 0.9666, + "step": 11012 + }, + { + "epoch": 0.9931911439779952, + "grad_norm": 1.9836363731874107, + "learning_rate": 4.799368231669376e-10, + "loss": 0.8453, + "step": 11013 + }, + { + "epoch": 0.9932813275014655, + "grad_norm": 1.596275945824498, + "learning_rate": 4.672243249927988e-10, + "loss": 0.9157, + "step": 11014 + }, + { + "epoch": 0.9933715110249357, + "grad_norm": 1.4540865598254342, + "learning_rate": 4.546824379803027e-10, + "loss": 0.9582, + "step": 11015 + }, + { + "epoch": 0.993461694548406, + "grad_norm": 1.520803355439732, + "learning_rate": 4.4231116319970454e-10, + "loss": 0.9535, + "step": 11016 + }, + { + "epoch": 0.9935518780718763, + "grad_norm": 1.7484743416103232, + "learning_rate": 4.3011050170660423e-10, + "loss": 0.9275, + "step": 11017 + }, + { + "epoch": 0.9936420615953465, + "grad_norm": 1.5443850427329595, + "learning_rate": 4.18080454542169e-10, + "loss": 0.929, + "step": 11018 + }, + { + "epoch": 0.9937322451188167, + "grad_norm": 0.6592299830710905, + "learning_rate": 4.0622102273246694e-10, + "loss": 0.7925, + "step": 11019 + }, + { + "epoch": 0.9938224286422871, + "grad_norm": 1.2365169531604434, + "learning_rate": 3.945322072897994e-10, + "loss": 0.9897, + "step": 11020 + }, + { + "epoch": 0.9939126121657573, + "grad_norm": 1.714326958784584, + "learning_rate": 3.830140092111467e-10, + "loss": 0.9853, + "step": 11021 + }, + { + "epoch": 0.9940027956892276, + "grad_norm": 2.2654907186768902, + "learning_rate": 3.7166642947972225e-10, + "loss": 0.934, + "step": 11022 + }, + { + "epoch": 0.9940929792126978, + "grad_norm": 1.544832673131972, + "learning_rate": 3.604894690634186e-10, + "loss": 0.9082, + "step": 11023 + }, + { + "epoch": 0.9941831627361681, + "grad_norm": 1.396009073534009, + "learning_rate": 3.494831289161393e-10, + "loss": 1.0154, + "step": 11024 + }, + { + "epoch": 0.9942733462596384, + "grad_norm": 1.2858793780887445, + "learning_rate": 3.3864740997668897e-10, + "loss": 0.9688, + "step": 11025 + }, + { + "epoch": 0.9943635297831086, + "grad_norm": 1.4243970867083269, + "learning_rate": 3.279823131701054e-10, + "loss": 0.9579, + "step": 11026 + }, + { + "epoch": 0.9944537133065788, + "grad_norm": 1.2230237707041, + "learning_rate": 3.1748783940610536e-10, + "loss": 0.8872, + "step": 11027 + }, + { + "epoch": 0.9945438968300492, + "grad_norm": 1.441845503870387, + "learning_rate": 3.071639895801947e-10, + "loss": 0.888, + "step": 11028 + }, + { + "epoch": 0.9946340803535194, + "grad_norm": 1.621223462901582, + "learning_rate": 2.9701076457322447e-10, + "loss": 0.9978, + "step": 11029 + }, + { + "epoch": 0.9947242638769896, + "grad_norm": 1.8619592483273206, + "learning_rate": 2.870281652513906e-10, + "loss": 0.9678, + "step": 11030 + }, + { + "epoch": 0.99481444740046, + "grad_norm": 1.5136504534774533, + "learning_rate": 2.772161924669003e-10, + "loss": 0.9165, + "step": 11031 + }, + { + "epoch": 0.9949046309239302, + "grad_norm": 1.3948229743723883, + "learning_rate": 2.6757484705641765e-10, + "loss": 0.9164, + "step": 11032 + }, + { + "epoch": 0.9949948144474005, + "grad_norm": 1.5965829242468372, + "learning_rate": 2.58104129843062e-10, + "loss": 0.9314, + "step": 11033 + }, + { + "epoch": 0.9950849979708707, + "grad_norm": 1.6584315058468864, + "learning_rate": 2.4880404163463154e-10, + "loss": 0.8828, + "step": 11034 + }, + { + "epoch": 0.995175181494341, + "grad_norm": 1.7223154020567741, + "learning_rate": 2.3967458322471377e-10, + "loss": 0.9141, + "step": 11035 + }, + { + "epoch": 0.9952653650178113, + "grad_norm": 1.3767642926537007, + "learning_rate": 2.307157553922412e-10, + "loss": 0.9395, + "step": 11036 + }, + { + "epoch": 0.9953555485412815, + "grad_norm": 1.3548500341921432, + "learning_rate": 2.2192755890193538e-10, + "loss": 0.9696, + "step": 11037 + }, + { + "epoch": 0.9954457320647517, + "grad_norm": 0.6908873857934303, + "learning_rate": 2.133099945034189e-10, + "loss": 0.7673, + "step": 11038 + }, + { + "epoch": 0.9955359155882221, + "grad_norm": 1.6051262631767464, + "learning_rate": 2.048630629318815e-10, + "loss": 0.9553, + "step": 11039 + }, + { + "epoch": 0.9956260991116923, + "grad_norm": 1.29681217087371, + "learning_rate": 1.965867649080799e-10, + "loss": 0.925, + "step": 11040 + }, + { + "epoch": 0.9957162826351625, + "grad_norm": 0.6350347329950843, + "learning_rate": 1.8848110113856008e-10, + "loss": 0.7798, + "step": 11041 + }, + { + "epoch": 0.9958064661586328, + "grad_norm": 1.395876665534024, + "learning_rate": 1.8054607231454687e-10, + "loss": 0.9697, + "step": 11042 + }, + { + "epoch": 0.9958966496821031, + "grad_norm": 1.2846678295592142, + "learning_rate": 1.7278167911327635e-10, + "loss": 0.8846, + "step": 11043 + }, + { + "epoch": 0.9959868332055734, + "grad_norm": 1.40047264124968, + "learning_rate": 1.6518792219710753e-10, + "loss": 0.9641, + "step": 11044 + }, + { + "epoch": 0.9960770167290436, + "grad_norm": 1.42315021383661, + "learning_rate": 1.5776480221418865e-10, + "loss": 0.9369, + "step": 11045 + }, + { + "epoch": 0.9961672002525138, + "grad_norm": 1.7360207936847867, + "learning_rate": 1.505123197977909e-10, + "loss": 0.9335, + "step": 11046 + }, + { + "epoch": 0.9962573837759842, + "grad_norm": 1.5787681553175021, + "learning_rate": 1.4343047556675258e-10, + "loss": 0.8736, + "step": 11047 + }, + { + "epoch": 0.9963475672994544, + "grad_norm": 1.4810639244414336, + "learning_rate": 1.3651927012503506e-10, + "loss": 0.936, + "step": 11048 + }, + { + "epoch": 0.9964377508229246, + "grad_norm": 1.5035566443786628, + "learning_rate": 1.297787040630549e-10, + "loss": 0.9408, + "step": 11049 + }, + { + "epoch": 0.9965279343463949, + "grad_norm": 1.8636409004562202, + "learning_rate": 1.2320877795524153e-10, + "loss": 0.9517, + "step": 11050 + }, + { + "epoch": 0.9966181178698652, + "grad_norm": 1.6727296302660057, + "learning_rate": 1.1680949236247962e-10, + "loss": 0.9229, + "step": 11051 + }, + { + "epoch": 0.9967083013933354, + "grad_norm": 1.3416134016153538, + "learning_rate": 1.1058084783099886e-10, + "loss": 0.9381, + "step": 11052 + }, + { + "epoch": 0.9967984849168057, + "grad_norm": 1.5178877466786975, + "learning_rate": 1.0452284489170793e-10, + "loss": 0.9404, + "step": 11053 + }, + { + "epoch": 0.996888668440276, + "grad_norm": 1.3347199005431378, + "learning_rate": 9.86354840621928e-11, + "loss": 0.9166, + "step": 11054 + }, + { + "epoch": 0.9969788519637462, + "grad_norm": 1.5205720999375196, + "learning_rate": 9.291876584427427e-11, + "loss": 0.8106, + "step": 11055 + }, + { + "epoch": 0.9970690354872165, + "grad_norm": 1.8958947377862905, + "learning_rate": 8.737269072578435e-11, + "loss": 0.9118, + "step": 11056 + }, + { + "epoch": 0.9971592190106867, + "grad_norm": 1.3260510126919753, + "learning_rate": 8.199725918012212e-11, + "loss": 0.9214, + "step": 11057 + }, + { + "epoch": 0.9972494025341571, + "grad_norm": 1.513388251527899, + "learning_rate": 7.679247166603175e-11, + "loss": 0.9364, + "step": 11058 + }, + { + "epoch": 0.9973395860576273, + "grad_norm": 1.34354008654072, + "learning_rate": 7.17583286273804e-11, + "loss": 0.9348, + "step": 11059 + }, + { + "epoch": 0.9974297695810975, + "grad_norm": 1.3424276010608416, + "learning_rate": 6.689483049360233e-11, + "loss": 0.9561, + "step": 11060 + }, + { + "epoch": 0.9975199531045678, + "grad_norm": 1.6806444857408636, + "learning_rate": 6.220197768014302e-11, + "loss": 0.904, + "step": 11061 + }, + { + "epoch": 0.9976101366280381, + "grad_norm": 1.4286286649137119, + "learning_rate": 5.7679770587126806e-11, + "loss": 0.9435, + "step": 11062 + }, + { + "epoch": 0.9977003201515083, + "grad_norm": 0.6370274316234925, + "learning_rate": 5.33282096002452e-11, + "loss": 0.8712, + "step": 11063 + }, + { + "epoch": 0.9977905036749786, + "grad_norm": 1.5663329604357394, + "learning_rate": 4.914729509120086e-11, + "loss": 0.9055, + "step": 11064 + }, + { + "epoch": 0.9978806871984488, + "grad_norm": 1.2389930580566615, + "learning_rate": 4.513702741637537e-11, + "loss": 0.856, + "step": 11065 + }, + { + "epoch": 0.9979708707219191, + "grad_norm": 1.551211511225137, + "learning_rate": 4.129740691816153e-11, + "loss": 0.9908, + "step": 11066 + }, + { + "epoch": 0.9980610542453894, + "grad_norm": 1.3215533413031828, + "learning_rate": 3.762843392429715e-11, + "loss": 0.9566, + "step": 11067 + }, + { + "epoch": 0.9981512377688596, + "grad_norm": 1.7362409983896636, + "learning_rate": 3.413010874742106e-11, + "loss": 0.919, + "step": 11068 + }, + { + "epoch": 0.9982414212923298, + "grad_norm": 1.5595544899058429, + "learning_rate": 3.080243168618324e-11, + "loss": 0.9267, + "step": 11069 + }, + { + "epoch": 0.9983316048158002, + "grad_norm": 1.515486563426341, + "learning_rate": 2.7645403024800783e-11, + "loss": 0.8792, + "step": 11070 + }, + { + "epoch": 0.9984217883392704, + "grad_norm": 1.1970353021666398, + "learning_rate": 2.4659023032391756e-11, + "loss": 1.0054, + "step": 11071 + }, + { + "epoch": 0.9985119718627407, + "grad_norm": 1.1942420105326514, + "learning_rate": 2.1843291963863364e-11, + "loss": 0.9813, + "step": 11072 + }, + { + "epoch": 0.9986021553862109, + "grad_norm": 2.4050374134032477, + "learning_rate": 1.9198210059245822e-11, + "loss": 0.9479, + "step": 11073 + }, + { + "epoch": 0.9986923389096812, + "grad_norm": 1.6574752162501618, + "learning_rate": 1.672377754458054e-11, + "loss": 0.9342, + "step": 11074 + }, + { + "epoch": 0.9987825224331515, + "grad_norm": 1.3915259625161092, + "learning_rate": 1.4419994630809895e-11, + "loss": 0.9206, + "step": 11075 + }, + { + "epoch": 0.9988727059566217, + "grad_norm": 1.410096988859317, + "learning_rate": 1.2286861514443358e-11, + "loss": 0.8833, + "step": 11076 + }, + { + "epoch": 0.998962889480092, + "grad_norm": 1.2392585339555375, + "learning_rate": 1.0324378377779553e-11, + "loss": 0.9747, + "step": 11077 + }, + { + "epoch": 0.9990530730035623, + "grad_norm": 1.36618127033997, + "learning_rate": 8.532545388018064e-12, + "loss": 0.9251, + "step": 11078 + }, + { + "epoch": 0.9991432565270325, + "grad_norm": 1.3225839251478329, + "learning_rate": 6.911362697925582e-12, + "loss": 0.7729, + "step": 11079 + }, + { + "epoch": 0.9992334400505027, + "grad_norm": 1.4803428898358064, + "learning_rate": 5.46083044605794e-12, + "loss": 0.9064, + "step": 11080 + }, + { + "epoch": 0.9993236235739731, + "grad_norm": 1.4422428835466623, + "learning_rate": 4.1809487563160276e-12, + "loss": 0.9783, + "step": 11081 + }, + { + "epoch": 0.9994138070974433, + "grad_norm": 1.2982344798029575, + "learning_rate": 3.0717177375017e-12, + "loss": 0.9354, + "step": 11082 + }, + { + "epoch": 0.9995039906209136, + "grad_norm": 1.5371787844428464, + "learning_rate": 2.1331374846500495e-12, + "loss": 0.9337, + "step": 11083 + }, + { + "epoch": 0.9995941741443838, + "grad_norm": 1.5940980718158901, + "learning_rate": 1.3652080774750885e-12, + "loss": 1.0315, + "step": 11084 + }, + { + "epoch": 0.9996843576678541, + "grad_norm": 1.1117544311888055, + "learning_rate": 7.679295817020204e-13, + "loss": 0.852, + "step": 11085 + }, + { + "epoch": 0.9997745411913244, + "grad_norm": 1.4196586522323007, + "learning_rate": 3.413020484011042e-13, + "loss": 0.9151, + "step": 11086 + }, + { + "epoch": 0.9998647247147946, + "grad_norm": 1.3630249307133806, + "learning_rate": 8.53255139876552e-14, + "loss": 0.9719, + "step": 11087 + }, + { + "epoch": 0.9999549082382648, + "grad_norm": 1.4799297227154524, + "learning_rate": 0.0, + "loss": 0.8842, + "step": 11088 + }, + { + "epoch": 0.9999549082382648, + "step": 11088, + "total_flos": 7.509663169988526e+17, + "train_loss": 0.9493302904381209, + "train_runtime": 163917.1841, + "train_samples_per_second": 4.059, + "train_steps_per_second": 0.068 + } + ], + "logging_steps": 1.0, + "max_steps": 11088, + "num_input_tokens_seen": 0, + "num_train_epochs": 1, + "save_steps": 100, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 7.509663169988526e+17, + "train_batch_size": 5, + "trial_name": null, + "trial_params": null +}