| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 280, | |
| "global_step": 916, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.001091703056768559, | |
| "grad_norm": 6.9168360306400665, | |
| "learning_rate": 2.2857142857142855e-07, | |
| "loss": 0.8069, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.002183406113537118, | |
| "grad_norm": 8.619261383294274, | |
| "learning_rate": 4.571428571428571e-07, | |
| "loss": 0.918, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0032751091703056767, | |
| "grad_norm": 7.692605504695248, | |
| "learning_rate": 6.857142857142857e-07, | |
| "loss": 0.9234, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.004366812227074236, | |
| "grad_norm": 7.122858120605265, | |
| "learning_rate": 9.142857142857142e-07, | |
| "loss": 0.9265, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0054585152838427945, | |
| "grad_norm": 7.537326535977078, | |
| "learning_rate": 1.1428571428571428e-06, | |
| "loss": 0.7695, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.006550218340611353, | |
| "grad_norm": 6.260583368018694, | |
| "learning_rate": 1.3714285714285715e-06, | |
| "loss": 0.9215, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.007641921397379912, | |
| "grad_norm": 5.203784202592674, | |
| "learning_rate": 1.6e-06, | |
| "loss": 0.7629, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.008733624454148471, | |
| "grad_norm": 4.233618238431581, | |
| "learning_rate": 1.8285714285714284e-06, | |
| "loss": 0.8116, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.009825327510917031, | |
| "grad_norm": 3.327308477876956, | |
| "learning_rate": 2.057142857142857e-06, | |
| "loss": 0.7596, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.010917030567685589, | |
| "grad_norm": 2.9654255350852723, | |
| "learning_rate": 2.2857142857142856e-06, | |
| "loss": 0.7323, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.012008733624454149, | |
| "grad_norm": 3.4039963472657875, | |
| "learning_rate": 2.5142857142857142e-06, | |
| "loss": 0.773, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.013100436681222707, | |
| "grad_norm": 2.764952769431326, | |
| "learning_rate": 2.742857142857143e-06, | |
| "loss": 0.7028, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.014192139737991267, | |
| "grad_norm": 2.645047626519114, | |
| "learning_rate": 2.9714285714285716e-06, | |
| "loss": 0.6944, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.015283842794759825, | |
| "grad_norm": 2.5003386957924816, | |
| "learning_rate": 3.2e-06, | |
| "loss": 0.6705, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.016375545851528384, | |
| "grad_norm": 2.2872854432818657, | |
| "learning_rate": 3.428571428571428e-06, | |
| "loss": 0.6705, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.017467248908296942, | |
| "grad_norm": 1.928277117833659, | |
| "learning_rate": 3.657142857142857e-06, | |
| "loss": 0.7259, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.018558951965065504, | |
| "grad_norm": 1.8574032707760006, | |
| "learning_rate": 3.885714285714286e-06, | |
| "loss": 0.6962, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.019650655021834062, | |
| "grad_norm": 1.9310574643116005, | |
| "learning_rate": 4.114285714285714e-06, | |
| "loss": 0.7249, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.02074235807860262, | |
| "grad_norm": 1.8624427266334949, | |
| "learning_rate": 4.342857142857142e-06, | |
| "loss": 0.715, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.021834061135371178, | |
| "grad_norm": 1.8660926001733764, | |
| "learning_rate": 4.571428571428571e-06, | |
| "loss": 0.7817, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02292576419213974, | |
| "grad_norm": 1.9682589360779612, | |
| "learning_rate": 4.8e-06, | |
| "loss": 0.5957, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.024017467248908297, | |
| "grad_norm": 1.7481064474330001, | |
| "learning_rate": 5.0285714285714285e-06, | |
| "loss": 0.6397, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.025109170305676855, | |
| "grad_norm": 1.4998856719874816, | |
| "learning_rate": 5.257142857142857e-06, | |
| "loss": 0.5489, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.026200873362445413, | |
| "grad_norm": 1.669636786128378, | |
| "learning_rate": 5.485714285714286e-06, | |
| "loss": 0.6943, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.027292576419213975, | |
| "grad_norm": 1.5957918499269743, | |
| "learning_rate": 5.7142857142857145e-06, | |
| "loss": 0.6927, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.028384279475982533, | |
| "grad_norm": 1.6133288301808093, | |
| "learning_rate": 5.942857142857143e-06, | |
| "loss": 0.7117, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.02947598253275109, | |
| "grad_norm": 1.4864207057916419, | |
| "learning_rate": 6.171428571428571e-06, | |
| "loss": 0.5169, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.03056768558951965, | |
| "grad_norm": 1.5150567145731058, | |
| "learning_rate": 6.4e-06, | |
| "loss": 0.5613, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.03165938864628821, | |
| "grad_norm": 1.564571555194077, | |
| "learning_rate": 6.628571428571428e-06, | |
| "loss": 0.6743, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.03275109170305677, | |
| "grad_norm": 1.93767931683908, | |
| "learning_rate": 6.857142857142856e-06, | |
| "loss": 0.642, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03384279475982533, | |
| "grad_norm": 1.5789902137704794, | |
| "learning_rate": 7.085714285714285e-06, | |
| "loss": 0.6701, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.034934497816593885, | |
| "grad_norm": 1.6974375239936534, | |
| "learning_rate": 7.314285714285714e-06, | |
| "loss": 0.5485, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.036026200873362446, | |
| "grad_norm": 1.3204997268284404, | |
| "learning_rate": 7.542857142857142e-06, | |
| "loss": 0.5434, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.03711790393013101, | |
| "grad_norm": 1.4597825234641069, | |
| "learning_rate": 7.771428571428572e-06, | |
| "loss": 0.6605, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.03820960698689956, | |
| "grad_norm": 1.6181689606941454, | |
| "learning_rate": 8e-06, | |
| "loss": 0.5726, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.039301310043668124, | |
| "grad_norm": 1.637262387439095, | |
| "learning_rate": 7.999980926133991e-06, | |
| "loss": 0.6037, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.04039301310043668, | |
| "grad_norm": 1.664670294379062, | |
| "learning_rate": 7.999923704778513e-06, | |
| "loss": 0.7399, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.04148471615720524, | |
| "grad_norm": 1.7493240614718077, | |
| "learning_rate": 7.99982833666118e-06, | |
| "loss": 0.6127, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.0425764192139738, | |
| "grad_norm": 2.092392677058526, | |
| "learning_rate": 7.999694822994692e-06, | |
| "loss": 0.668, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.043668122270742356, | |
| "grad_norm": 1.7013397213106045, | |
| "learning_rate": 7.999523165476792e-06, | |
| "loss": 0.6856, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04475982532751092, | |
| "grad_norm": 1.474118425250071, | |
| "learning_rate": 7.999313366290262e-06, | |
| "loss": 0.5802, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.04585152838427948, | |
| "grad_norm": 1.660443265060205, | |
| "learning_rate": 7.99906542810289e-06, | |
| "loss": 0.6324, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.04694323144104803, | |
| "grad_norm": 1.6459243125827774, | |
| "learning_rate": 7.998779354067437e-06, | |
| "loss": 0.6614, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.048034934497816595, | |
| "grad_norm": 1.5196086504507762, | |
| "learning_rate": 7.998455147821595e-06, | |
| "loss": 0.6091, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.04912663755458515, | |
| "grad_norm": 1.429090294880644, | |
| "learning_rate": 7.99809281348794e-06, | |
| "loss": 0.6485, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.05021834061135371, | |
| "grad_norm": 1.3827763726996563, | |
| "learning_rate": 7.997692355673884e-06, | |
| "loss": 0.5785, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.05131004366812227, | |
| "grad_norm": 1.7588449103100179, | |
| "learning_rate": 7.997253779471614e-06, | |
| "loss": 0.6473, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.05240174672489083, | |
| "grad_norm": 1.5172171755749084, | |
| "learning_rate": 7.996777090458023e-06, | |
| "loss": 0.581, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.05349344978165939, | |
| "grad_norm": 1.6076648219406835, | |
| "learning_rate": 7.996262294694649e-06, | |
| "loss": 0.5681, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.05458515283842795, | |
| "grad_norm": 1.4244628514708157, | |
| "learning_rate": 7.995709398727587e-06, | |
| "loss": 0.4754, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.055676855895196505, | |
| "grad_norm": 1.2927489245734367, | |
| "learning_rate": 7.995118409587412e-06, | |
| "loss": 0.6537, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.056768558951965066, | |
| "grad_norm": 1.6748840284841289, | |
| "learning_rate": 7.99448933478909e-06, | |
| "loss": 0.5275, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.05786026200873363, | |
| "grad_norm": 1.5128577258259677, | |
| "learning_rate": 7.993822182331882e-06, | |
| "loss": 0.6424, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.05895196506550218, | |
| "grad_norm": 1.3310295240779713, | |
| "learning_rate": 7.993116960699236e-06, | |
| "loss": 0.6035, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.060043668122270744, | |
| "grad_norm": 1.6799361475060814, | |
| "learning_rate": 7.99237367885869e-06, | |
| "loss": 0.6795, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.0611353711790393, | |
| "grad_norm": 1.546417812619001, | |
| "learning_rate": 7.991592346261747e-06, | |
| "loss": 0.5519, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.06222707423580786, | |
| "grad_norm": 1.5066644174359114, | |
| "learning_rate": 7.990772972843764e-06, | |
| "loss": 0.6043, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.06331877729257641, | |
| "grad_norm": 1.489274779760962, | |
| "learning_rate": 7.989915569023819e-06, | |
| "loss": 0.6095, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.06441048034934498, | |
| "grad_norm": 1.7091568832403692, | |
| "learning_rate": 7.989020145704584e-06, | |
| "loss": 0.7029, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.06550218340611354, | |
| "grad_norm": 1.3568973285059558, | |
| "learning_rate": 7.98808671427218e-06, | |
| "loss": 0.6448, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0665938864628821, | |
| "grad_norm": 1.4801827231358118, | |
| "learning_rate": 7.987115286596038e-06, | |
| "loss": 0.6638, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.06768558951965066, | |
| "grad_norm": 1.600407108115019, | |
| "learning_rate": 7.986105875028748e-06, | |
| "loss": 0.6321, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.06877729257641921, | |
| "grad_norm": 1.5576366146182474, | |
| "learning_rate": 7.985058492405896e-06, | |
| "loss": 0.7121, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.06986899563318777, | |
| "grad_norm": 1.6147036677777984, | |
| "learning_rate": 7.983973152045903e-06, | |
| "loss": 0.6388, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.07096069868995633, | |
| "grad_norm": 1.7851503357701477, | |
| "learning_rate": 7.982849867749865e-06, | |
| "loss": 0.6858, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.07205240174672489, | |
| "grad_norm": 1.2859661673668508, | |
| "learning_rate": 7.981688653801361e-06, | |
| "loss": 0.5384, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.07314410480349345, | |
| "grad_norm": 1.60696565837722, | |
| "learning_rate": 7.980489524966288e-06, | |
| "loss": 0.5506, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.07423580786026202, | |
| "grad_norm": 1.5405282936232578, | |
| "learning_rate": 7.979252496492655e-06, | |
| "loss": 0.4455, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.07532751091703056, | |
| "grad_norm": 1.7009157245583173, | |
| "learning_rate": 7.977977584110411e-06, | |
| "loss": 0.5581, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.07641921397379912, | |
| "grad_norm": 1.4022427304469514, | |
| "learning_rate": 7.976664804031223e-06, | |
| "loss": 0.6153, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.07751091703056769, | |
| "grad_norm": 1.5035374403149062, | |
| "learning_rate": 7.975314172948291e-06, | |
| "loss": 0.684, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.07860262008733625, | |
| "grad_norm": 1.4093854673180422, | |
| "learning_rate": 7.973925708036115e-06, | |
| "loss": 0.7426, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.07969432314410481, | |
| "grad_norm": 1.6851458145111584, | |
| "learning_rate": 7.972499426950292e-06, | |
| "loss": 0.5668, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.08078602620087336, | |
| "grad_norm": 1.529050078017108, | |
| "learning_rate": 7.971035347827286e-06, | |
| "loss": 0.5895, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.08187772925764192, | |
| "grad_norm": 1.6942196136539975, | |
| "learning_rate": 7.969533489284195e-06, | |
| "loss": 0.7048, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.08296943231441048, | |
| "grad_norm": 1.4777780150565798, | |
| "learning_rate": 7.967993870418517e-06, | |
| "loss": 0.7128, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.08406113537117904, | |
| "grad_norm": 1.680209877486954, | |
| "learning_rate": 7.96641651080791e-06, | |
| "loss": 0.4864, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.0851528384279476, | |
| "grad_norm": 1.42998101662559, | |
| "learning_rate": 7.964801430509938e-06, | |
| "loss": 0.5904, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.08624454148471616, | |
| "grad_norm": 1.3659967634997836, | |
| "learning_rate": 7.963148650061817e-06, | |
| "loss": 0.5854, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.08733624454148471, | |
| "grad_norm": 1.5702401689586527, | |
| "learning_rate": 7.961458190480154e-06, | |
| "loss": 0.6448, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.08842794759825327, | |
| "grad_norm": 1.6021520859056189, | |
| "learning_rate": 7.959730073260684e-06, | |
| "loss": 0.62, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.08951965065502183, | |
| "grad_norm": 1.3559678768559176, | |
| "learning_rate": 7.957964320377992e-06, | |
| "loss": 0.5855, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.0906113537117904, | |
| "grad_norm": 1.5008897257417335, | |
| "learning_rate": 7.956160954285234e-06, | |
| "loss": 0.5917, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.09170305676855896, | |
| "grad_norm": 1.3325553922283428, | |
| "learning_rate": 7.954319997913848e-06, | |
| "loss": 0.5747, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.0927947598253275, | |
| "grad_norm": 1.6334929695605136, | |
| "learning_rate": 7.952441474673275e-06, | |
| "loss": 0.6271, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.09388646288209607, | |
| "grad_norm": 1.5780752995999416, | |
| "learning_rate": 7.950525408450647e-06, | |
| "loss": 0.6098, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.09497816593886463, | |
| "grad_norm": 1.5582598019357035, | |
| "learning_rate": 7.948571823610492e-06, | |
| "loss": 0.5995, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.09606986899563319, | |
| "grad_norm": 1.349598296171142, | |
| "learning_rate": 7.946580744994418e-06, | |
| "loss": 0.6231, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.09716157205240175, | |
| "grad_norm": 1.4067679148676697, | |
| "learning_rate": 7.944552197920806e-06, | |
| "loss": 0.7023, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.0982532751091703, | |
| "grad_norm": 1.489512752201376, | |
| "learning_rate": 7.942486208184478e-06, | |
| "loss": 0.6443, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.09934497816593886, | |
| "grad_norm": 1.422726492645584, | |
| "learning_rate": 7.940382802056374e-06, | |
| "loss": 0.6031, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.10043668122270742, | |
| "grad_norm": 1.55466766394215, | |
| "learning_rate": 7.938242006283219e-06, | |
| "loss": 0.5998, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.10152838427947598, | |
| "grad_norm": 1.6313884014991245, | |
| "learning_rate": 7.936063848087182e-06, | |
| "loss": 0.5822, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.10262008733624454, | |
| "grad_norm": 1.3837342347132842, | |
| "learning_rate": 7.933848355165526e-06, | |
| "loss": 0.6168, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.1037117903930131, | |
| "grad_norm": 1.3175406249386525, | |
| "learning_rate": 7.931595555690261e-06, | |
| "loss": 0.5629, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.10480349344978165, | |
| "grad_norm": 1.4980147704457836, | |
| "learning_rate": 7.929305478307787e-06, | |
| "loss": 0.6575, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.10589519650655022, | |
| "grad_norm": 1.4934490360354107, | |
| "learning_rate": 7.92697815213852e-06, | |
| "loss": 0.6395, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.10698689956331878, | |
| "grad_norm": 1.6143096306904618, | |
| "learning_rate": 7.924613606776535e-06, | |
| "loss": 0.6362, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.10807860262008734, | |
| "grad_norm": 1.5507045899750025, | |
| "learning_rate": 7.922211872289176e-06, | |
| "loss": 0.6258, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.1091703056768559, | |
| "grad_norm": 1.7417116462175397, | |
| "learning_rate": 7.919772979216687e-06, | |
| "loss": 0.578, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.11026200873362445, | |
| "grad_norm": 1.5564506490699812, | |
| "learning_rate": 7.917296958571813e-06, | |
| "loss": 0.5899, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.11135371179039301, | |
| "grad_norm": 1.51376629823034, | |
| "learning_rate": 7.914783841839412e-06, | |
| "loss": 0.546, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.11244541484716157, | |
| "grad_norm": 1.5871820189984336, | |
| "learning_rate": 7.91223366097605e-06, | |
| "loss": 0.7067, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.11353711790393013, | |
| "grad_norm": 1.684703978601094, | |
| "learning_rate": 7.909646448409604e-06, | |
| "loss": 0.5795, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.1146288209606987, | |
| "grad_norm": 1.504074132778666, | |
| "learning_rate": 7.90702223703883e-06, | |
| "loss": 0.6801, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.11572052401746726, | |
| "grad_norm": 1.3449630071756726, | |
| "learning_rate": 7.904361060232974e-06, | |
| "loss": 0.5508, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.1168122270742358, | |
| "grad_norm": 1.397180371022227, | |
| "learning_rate": 7.901662951831316e-06, | |
| "loss": 0.6593, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.11790393013100436, | |
| "grad_norm": 1.3698872191395284, | |
| "learning_rate": 7.898927946142763e-06, | |
| "loss": 0.6885, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.11899563318777293, | |
| "grad_norm": 1.5829183415829775, | |
| "learning_rate": 7.896156077945407e-06, | |
| "loss": 0.5311, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.12008733624454149, | |
| "grad_norm": 1.451977799352687, | |
| "learning_rate": 7.893347382486072e-06, | |
| "loss": 0.6202, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.12117903930131005, | |
| "grad_norm": 1.3994680718242534, | |
| "learning_rate": 7.890501895479879e-06, | |
| "loss": 0.6208, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.1222707423580786, | |
| "grad_norm": 1.6636406803513806, | |
| "learning_rate": 7.887619653109788e-06, | |
| "loss": 0.6677, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.12336244541484716, | |
| "grad_norm": 1.4532843659222607, | |
| "learning_rate": 7.884700692026134e-06, | |
| "loss": 0.5871, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.12445414847161572, | |
| "grad_norm": 1.4662200934134768, | |
| "learning_rate": 7.881745049346169e-06, | |
| "loss": 0.5987, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.12554585152838427, | |
| "grad_norm": 1.475099944420446, | |
| "learning_rate": 7.878752762653575e-06, | |
| "loss": 0.647, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.12663755458515283, | |
| "grad_norm": 1.5380967311487281, | |
| "learning_rate": 7.875723869998009e-06, | |
| "loss": 0.7604, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.1277292576419214, | |
| "grad_norm": 1.571718225913752, | |
| "learning_rate": 7.872658409894593e-06, | |
| "loss": 0.6236, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.12882096069868995, | |
| "grad_norm": 1.4096634500954293, | |
| "learning_rate": 7.86955642132345e-06, | |
| "loss": 0.6519, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.1299126637554585, | |
| "grad_norm": 1.4944998170948933, | |
| "learning_rate": 7.866417943729186e-06, | |
| "loss": 0.619, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.13100436681222707, | |
| "grad_norm": 1.5310843619532999, | |
| "learning_rate": 7.863243017020403e-06, | |
| "loss": 0.625, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.13209606986899564, | |
| "grad_norm": 1.441967778632453, | |
| "learning_rate": 7.860031681569183e-06, | |
| "loss": 0.6129, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.1331877729257642, | |
| "grad_norm": 1.4984295970887447, | |
| "learning_rate": 7.856783978210586e-06, | |
| "loss": 0.6419, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.13427947598253276, | |
| "grad_norm": 1.3778311870438549, | |
| "learning_rate": 7.853499948242111e-06, | |
| "loss": 0.6067, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.13537117903930132, | |
| "grad_norm": 1.5135481075273571, | |
| "learning_rate": 7.850179633423195e-06, | |
| "loss": 0.6112, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.13646288209606988, | |
| "grad_norm": 1.3279833242854147, | |
| "learning_rate": 7.846823075974663e-06, | |
| "loss": 0.5415, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.13755458515283842, | |
| "grad_norm": 1.4776236100804023, | |
| "learning_rate": 7.843430318578198e-06, | |
| "loss": 0.5892, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.13864628820960698, | |
| "grad_norm": 1.3091913650405782, | |
| "learning_rate": 7.840001404375803e-06, | |
| "loss": 0.6429, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.13973799126637554, | |
| "grad_norm": 1.2931286140842404, | |
| "learning_rate": 7.836536376969242e-06, | |
| "loss": 0.4929, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.1408296943231441, | |
| "grad_norm": 1.3706911268954516, | |
| "learning_rate": 7.833035280419495e-06, | |
| "loss": 0.7081, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.14192139737991266, | |
| "grad_norm": 1.4028173055022224, | |
| "learning_rate": 7.829498159246193e-06, | |
| "loss": 0.5754, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.14301310043668122, | |
| "grad_norm": 1.5579836702697056, | |
| "learning_rate": 7.825925058427054e-06, | |
| "loss": 0.5518, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.14410480349344978, | |
| "grad_norm": 1.3789925803317493, | |
| "learning_rate": 7.822316023397308e-06, | |
| "loss": 0.67, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.14519650655021835, | |
| "grad_norm": 1.6438796790386405, | |
| "learning_rate": 7.81867110004912e-06, | |
| "loss": 0.6262, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.1462882096069869, | |
| "grad_norm": 1.5932532515551787, | |
| "learning_rate": 7.814990334731015e-06, | |
| "loss": 0.5472, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.14737991266375547, | |
| "grad_norm": 1.4218597455087487, | |
| "learning_rate": 7.811273774247271e-06, | |
| "loss": 0.6212, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.14847161572052403, | |
| "grad_norm": 1.4979752973234424, | |
| "learning_rate": 7.80752146585734e-06, | |
| "loss": 0.5449, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.14956331877729256, | |
| "grad_norm": 1.5503434977081532, | |
| "learning_rate": 7.803733457275241e-06, | |
| "loss": 0.5627, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.15065502183406113, | |
| "grad_norm": 1.3385399338417763, | |
| "learning_rate": 7.799909796668952e-06, | |
| "loss": 0.6285, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.1517467248908297, | |
| "grad_norm": 1.6086857843123978, | |
| "learning_rate": 7.796050532659801e-06, | |
| "loss": 0.6201, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.15283842794759825, | |
| "grad_norm": 1.5042189043053371, | |
| "learning_rate": 7.792155714321844e-06, | |
| "loss": 0.5448, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1539301310043668, | |
| "grad_norm": 1.496614591995746, | |
| "learning_rate": 7.788225391181244e-06, | |
| "loss": 0.5677, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.15502183406113537, | |
| "grad_norm": 1.3673478795664078, | |
| "learning_rate": 7.784259613215637e-06, | |
| "loss": 0.4902, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.15611353711790393, | |
| "grad_norm": 1.5463971091149502, | |
| "learning_rate": 7.780258430853501e-06, | |
| "loss": 0.6009, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.1572052401746725, | |
| "grad_norm": 1.528859261250924, | |
| "learning_rate": 7.776221894973517e-06, | |
| "loss": 0.6209, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.15829694323144106, | |
| "grad_norm": 1.427566766037643, | |
| "learning_rate": 7.77215005690391e-06, | |
| "loss": 0.6433, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.15938864628820962, | |
| "grad_norm": 1.5336258102759508, | |
| "learning_rate": 7.768042968421814e-06, | |
| "loss": 0.6244, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.16048034934497818, | |
| "grad_norm": 1.4516187807428587, | |
| "learning_rate": 7.763900681752594e-06, | |
| "loss": 0.4526, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.1615720524017467, | |
| "grad_norm": 1.2426578447642056, | |
| "learning_rate": 7.759723249569202e-06, | |
| "loss": 0.5119, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.16266375545851527, | |
| "grad_norm": 1.3690342156175288, | |
| "learning_rate": 7.755510724991491e-06, | |
| "loss": 0.596, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.16375545851528384, | |
| "grad_norm": 1.4398744824849907, | |
| "learning_rate": 7.751263161585547e-06, | |
| "loss": 0.5291, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1648471615720524, | |
| "grad_norm": 1.4736827868329099, | |
| "learning_rate": 7.746980613363004e-06, | |
| "loss": 0.5622, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.16593886462882096, | |
| "grad_norm": 1.1576108683772315, | |
| "learning_rate": 7.742663134780369e-06, | |
| "loss": 0.4897, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.16703056768558952, | |
| "grad_norm": 1.5090960302871992, | |
| "learning_rate": 7.738310780738307e-06, | |
| "loss": 0.712, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.16812227074235808, | |
| "grad_norm": 1.3964072666308516, | |
| "learning_rate": 7.733923606580966e-06, | |
| "loss": 0.6472, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.16921397379912664, | |
| "grad_norm": 1.6059506328673856, | |
| "learning_rate": 7.729501668095264e-06, | |
| "loss": 0.6786, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.1703056768558952, | |
| "grad_norm": 1.3441849797912846, | |
| "learning_rate": 7.72504502151017e-06, | |
| "loss": 0.5136, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.17139737991266377, | |
| "grad_norm": 1.636706525867172, | |
| "learning_rate": 7.72055372349601e-06, | |
| "loss": 0.6795, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.17248908296943233, | |
| "grad_norm": 1.4433457862208092, | |
| "learning_rate": 7.716027831163723e-06, | |
| "loss": 0.5496, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.17358078602620086, | |
| "grad_norm": 1.4594871883636047, | |
| "learning_rate": 7.711467402064155e-06, | |
| "loss": 0.5887, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.17467248908296942, | |
| "grad_norm": 1.3298172020543817, | |
| "learning_rate": 7.706872494187313e-06, | |
| "loss": 0.4975, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.17576419213973798, | |
| "grad_norm": 1.4792366232102332, | |
| "learning_rate": 7.702243165961635e-06, | |
| "loss": 0.5748, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.17685589519650655, | |
| "grad_norm": 1.1963230765110093, | |
| "learning_rate": 7.697579476253248e-06, | |
| "loss": 0.5333, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.1779475982532751, | |
| "grad_norm": 1.3100993334968998, | |
| "learning_rate": 7.69288148436521e-06, | |
| "loss": 0.529, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.17903930131004367, | |
| "grad_norm": 1.4299716683505044, | |
| "learning_rate": 7.68814925003677e-06, | |
| "loss": 0.4922, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.18013100436681223, | |
| "grad_norm": 1.2819297147739053, | |
| "learning_rate": 7.683382833442594e-06, | |
| "loss": 0.4941, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.1812227074235808, | |
| "grad_norm": 1.383396362069611, | |
| "learning_rate": 7.678582295192009e-06, | |
| "loss": 0.519, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.18231441048034935, | |
| "grad_norm": 1.5772925336837018, | |
| "learning_rate": 7.673747696328235e-06, | |
| "loss": 0.7051, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.18340611353711792, | |
| "grad_norm": 1.5305700874690167, | |
| "learning_rate": 7.668879098327596e-06, | |
| "loss": 0.5226, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.18449781659388648, | |
| "grad_norm": 1.7043647886602058, | |
| "learning_rate": 7.663976563098748e-06, | |
| "loss": 0.5749, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.185589519650655, | |
| "grad_norm": 1.4744519506752671, | |
| "learning_rate": 7.659040152981894e-06, | |
| "loss": 0.6675, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.18668122270742357, | |
| "grad_norm": 1.4080879804081063, | |
| "learning_rate": 7.654069930747982e-06, | |
| "loss": 0.5026, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.18777292576419213, | |
| "grad_norm": 1.3184115543301183, | |
| "learning_rate": 7.649065959597916e-06, | |
| "loss": 0.5183, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.1888646288209607, | |
| "grad_norm": 1.6589387141904088, | |
| "learning_rate": 7.644028303161743e-06, | |
| "loss": 0.5887, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.18995633187772926, | |
| "grad_norm": 1.5321842642074932, | |
| "learning_rate": 7.638957025497857e-06, | |
| "loss": 0.6049, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.19104803493449782, | |
| "grad_norm": 1.5678479812006205, | |
| "learning_rate": 7.633852191092166e-06, | |
| "loss": 0.5664, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.19213973799126638, | |
| "grad_norm": 1.416930808688244, | |
| "learning_rate": 7.6287138648572946e-06, | |
| "loss": 0.5525, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.19323144104803494, | |
| "grad_norm": 1.335762536841671, | |
| "learning_rate": 7.623542112131734e-06, | |
| "loss": 0.5139, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.1943231441048035, | |
| "grad_norm": 1.473220842403004, | |
| "learning_rate": 7.618336998679034e-06, | |
| "loss": 0.6093, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.19541484716157206, | |
| "grad_norm": 1.4576307884390227, | |
| "learning_rate": 7.6130985906869515e-06, | |
| "loss": 0.5685, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.1965065502183406, | |
| "grad_norm": 1.4673412599144768, | |
| "learning_rate": 7.6078269547666135e-06, | |
| "loss": 0.5365, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.19759825327510916, | |
| "grad_norm": 1.5183161060171537, | |
| "learning_rate": 7.602522157951672e-06, | |
| "loss": 0.521, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.19868995633187772, | |
| "grad_norm": 1.4768425270547154, | |
| "learning_rate": 7.597184267697449e-06, | |
| "loss": 0.5472, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.19978165938864628, | |
| "grad_norm": 1.5653932400158284, | |
| "learning_rate": 7.591813351880081e-06, | |
| "loss": 0.5521, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.20087336244541484, | |
| "grad_norm": 1.739179296646375, | |
| "learning_rate": 7.586409478795654e-06, | |
| "loss": 0.5815, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.2019650655021834, | |
| "grad_norm": 1.8305945169666482, | |
| "learning_rate": 7.580972717159335e-06, | |
| "loss": 0.5967, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.20305676855895197, | |
| "grad_norm": 1.5537911239908309, | |
| "learning_rate": 7.5755031361044985e-06, | |
| "loss": 0.5975, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.20414847161572053, | |
| "grad_norm": 1.4213609050860452, | |
| "learning_rate": 7.57000080518185e-06, | |
| "loss": 0.4776, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.2052401746724891, | |
| "grad_norm": 1.4789155471626043, | |
| "learning_rate": 7.564465794358536e-06, | |
| "loss": 0.6826, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.20633187772925765, | |
| "grad_norm": 1.566883946086422, | |
| "learning_rate": 7.558898174017262e-06, | |
| "loss": 0.5313, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.2074235807860262, | |
| "grad_norm": 1.7724230033266728, | |
| "learning_rate": 7.55329801495539e-06, | |
| "loss": 0.5543, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.20851528384279475, | |
| "grad_norm": 1.494585694856317, | |
| "learning_rate": 7.547665388384041e-06, | |
| "loss": 0.5714, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.2096069868995633, | |
| "grad_norm": 1.6571585826088926, | |
| "learning_rate": 7.542000365927194e-06, | |
| "loss": 0.5344, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.21069868995633187, | |
| "grad_norm": 1.37196223933786, | |
| "learning_rate": 7.536303019620765e-06, | |
| "loss": 0.6217, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.21179039301310043, | |
| "grad_norm": 1.4468057089753752, | |
| "learning_rate": 7.530573421911705e-06, | |
| "loss": 0.5434, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.212882096069869, | |
| "grad_norm": 1.5177202673287582, | |
| "learning_rate": 7.524811645657063e-06, | |
| "loss": 0.6779, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.21397379912663755, | |
| "grad_norm": 1.5725281443663255, | |
| "learning_rate": 7.519017764123074e-06, | |
| "loss": 0.6112, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.21506550218340612, | |
| "grad_norm": 1.575239994763357, | |
| "learning_rate": 7.513191850984215e-06, | |
| "loss": 0.5418, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.21615720524017468, | |
| "grad_norm": 1.3004073921667239, | |
| "learning_rate": 7.507333980322279e-06, | |
| "loss": 0.5046, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.21724890829694324, | |
| "grad_norm": 1.332980149587294, | |
| "learning_rate": 7.501444226625426e-06, | |
| "loss": 0.5145, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.2183406113537118, | |
| "grad_norm": 1.4395725224087588, | |
| "learning_rate": 7.495522664787237e-06, | |
| "loss": 0.5503, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.21943231441048036, | |
| "grad_norm": 1.5415342261120932, | |
| "learning_rate": 7.4895693701057655e-06, | |
| "loss": 0.5744, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.2205240174672489, | |
| "grad_norm": 1.4676635626586152, | |
| "learning_rate": 7.4835844182825726e-06, | |
| "loss": 0.5928, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.22161572052401746, | |
| "grad_norm": 1.5559087687931528, | |
| "learning_rate": 7.477567885421771e-06, | |
| "loss": 0.5411, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.22270742358078602, | |
| "grad_norm": 1.296850939133172, | |
| "learning_rate": 7.471519848029057e-06, | |
| "loss": 0.5444, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.22379912663755458, | |
| "grad_norm": 1.4777489551219587, | |
| "learning_rate": 7.465440383010733e-06, | |
| "loss": 0.565, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.22489082969432314, | |
| "grad_norm": 1.42155992331424, | |
| "learning_rate": 7.459329567672732e-06, | |
| "loss": 0.654, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.2259825327510917, | |
| "grad_norm": 1.4070072461083232, | |
| "learning_rate": 7.453187479719637e-06, | |
| "loss": 0.5257, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.22707423580786026, | |
| "grad_norm": 1.4076749777799764, | |
| "learning_rate": 7.4470141972536895e-06, | |
| "loss": 0.6039, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.22816593886462883, | |
| "grad_norm": 1.3129034828037942, | |
| "learning_rate": 7.440809798773798e-06, | |
| "loss": 0.5754, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.2292576419213974, | |
| "grad_norm": 1.4692678959747856, | |
| "learning_rate": 7.43457436317454e-06, | |
| "loss": 0.6623, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.23034934497816595, | |
| "grad_norm": 1.2858994143144755, | |
| "learning_rate": 7.428307969745156e-06, | |
| "loss": 0.5861, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.2314410480349345, | |
| "grad_norm": 1.604597407185956, | |
| "learning_rate": 7.422010698168546e-06, | |
| "loss": 0.5987, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.23253275109170304, | |
| "grad_norm": 1.3655281919289834, | |
| "learning_rate": 7.415682628520253e-06, | |
| "loss": 0.5937, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.2336244541484716, | |
| "grad_norm": 1.506921841428789, | |
| "learning_rate": 7.409323841267445e-06, | |
| "loss": 0.6669, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.23471615720524017, | |
| "grad_norm": 1.5101680215020887, | |
| "learning_rate": 7.402934417267893e-06, | |
| "loss": 0.6308, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.23580786026200873, | |
| "grad_norm": 1.489150088241989, | |
| "learning_rate": 7.396514437768942e-06, | |
| "loss": 0.6392, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.2368995633187773, | |
| "grad_norm": 1.4119779294110884, | |
| "learning_rate": 7.3900639844064754e-06, | |
| "loss": 0.5056, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.23799126637554585, | |
| "grad_norm": 1.445389704500089, | |
| "learning_rate": 7.383583139203887e-06, | |
| "loss": 0.5456, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.2390829694323144, | |
| "grad_norm": 1.4709438420906595, | |
| "learning_rate": 7.3770719845710206e-06, | |
| "loss": 0.6319, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.24017467248908297, | |
| "grad_norm": 1.3633406909433219, | |
| "learning_rate": 7.370530603303139e-06, | |
| "loss": 0.5647, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.24126637554585154, | |
| "grad_norm": 1.4987673997428108, | |
| "learning_rate": 7.363959078579863e-06, | |
| "loss": 0.5942, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.2423580786026201, | |
| "grad_norm": 1.4832422188486167, | |
| "learning_rate": 7.3573574939641115e-06, | |
| "loss": 0.5636, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.24344978165938866, | |
| "grad_norm": 1.4162439765343409, | |
| "learning_rate": 7.350725933401046e-06, | |
| "loss": 0.5328, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.2445414847161572, | |
| "grad_norm": 1.4080662897353993, | |
| "learning_rate": 7.344064481217e-06, | |
| "loss": 0.5343, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.24563318777292575, | |
| "grad_norm": 1.5492494250984743, | |
| "learning_rate": 7.3373732221184015e-06, | |
| "loss": 0.5613, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.24672489082969432, | |
| "grad_norm": 2.649801771711655, | |
| "learning_rate": 7.330652241190705e-06, | |
| "loss": 0.6204, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.24781659388646288, | |
| "grad_norm": 1.472867143202289, | |
| "learning_rate": 7.3239016238973034e-06, | |
| "loss": 0.4709, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.24890829694323144, | |
| "grad_norm": 1.3984113233132964, | |
| "learning_rate": 7.3171214560784426e-06, | |
| "loss": 0.4408, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.3248399752948739, | |
| "learning_rate": 7.310311823950132e-06, | |
| "loss": 0.6315, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.25109170305676853, | |
| "grad_norm": 1.393688466478258, | |
| "learning_rate": 7.3034728141030436e-06, | |
| "loss": 0.6316, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.2521834061135371, | |
| "grad_norm": 1.5554375243032204, | |
| "learning_rate": 7.2966045135014176e-06, | |
| "loss": 0.552, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.25327510917030566, | |
| "grad_norm": 1.5113408431573132, | |
| "learning_rate": 7.289707009481951e-06, | |
| "loss": 0.5209, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.25436681222707425, | |
| "grad_norm": 1.8008902213945412, | |
| "learning_rate": 7.282780389752688e-06, | |
| "loss": 0.5615, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.2554585152838428, | |
| "grad_norm": 1.5242476878510804, | |
| "learning_rate": 7.275824742391906e-06, | |
| "loss": 0.5996, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.25655021834061137, | |
| "grad_norm": 1.6201534221012874, | |
| "learning_rate": 7.2688401558469955e-06, | |
| "loss": 0.6023, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.2576419213973799, | |
| "grad_norm": 1.65373334840132, | |
| "learning_rate": 7.261826718933335e-06, | |
| "loss": 0.6784, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.2587336244541485, | |
| "grad_norm": 1.357403882003097, | |
| "learning_rate": 7.254784520833163e-06, | |
| "loss": 0.537, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.259825327510917, | |
| "grad_norm": 1.5116722340731783, | |
| "learning_rate": 7.247713651094441e-06, | |
| "loss": 0.5698, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.2609170305676856, | |
| "grad_norm": 1.4288603307083088, | |
| "learning_rate": 7.240614199629716e-06, | |
| "loss": 0.6177, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.26200873362445415, | |
| "grad_norm": 1.4338899312326936, | |
| "learning_rate": 7.23348625671498e-06, | |
| "loss": 0.6443, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.2631004366812227, | |
| "grad_norm": 1.4962646720928754, | |
| "learning_rate": 7.226329912988517e-06, | |
| "loss": 0.6777, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.26419213973799127, | |
| "grad_norm": 1.2699992707782755, | |
| "learning_rate": 7.219145259449756e-06, | |
| "loss": 0.4732, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.2652838427947598, | |
| "grad_norm": 1.4131898857557792, | |
| "learning_rate": 7.211932387458108e-06, | |
| "loss": 0.6738, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.2663755458515284, | |
| "grad_norm": 1.5277531162623728, | |
| "learning_rate": 7.2046913887318105e-06, | |
| "loss": 0.612, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.26746724890829693, | |
| "grad_norm": 1.7351026071707434, | |
| "learning_rate": 7.197422355346756e-06, | |
| "loss": 0.6016, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2685589519650655, | |
| "grad_norm": 1.613296891262151, | |
| "learning_rate": 7.190125379735322e-06, | |
| "loss": 0.476, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.26965065502183405, | |
| "grad_norm": 1.2399403442028993, | |
| "learning_rate": 7.182800554685201e-06, | |
| "loss": 0.568, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.27074235807860264, | |
| "grad_norm": 1.602746922015574, | |
| "learning_rate": 7.175447973338213e-06, | |
| "loss": 0.6195, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.2718340611353712, | |
| "grad_norm": 1.3651825200088947, | |
| "learning_rate": 7.168067729189126e-06, | |
| "loss": 0.5219, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.27292576419213976, | |
| "grad_norm": 1.4769248274303346, | |
| "learning_rate": 7.160659916084464e-06, | |
| "loss": 0.5377, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2740174672489083, | |
| "grad_norm": 1.625431355976466, | |
| "learning_rate": 7.153224628221318e-06, | |
| "loss": 0.5888, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.27510917030567683, | |
| "grad_norm": 1.363907269155982, | |
| "learning_rate": 7.145761960146144e-06, | |
| "loss": 0.6062, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.2762008733624454, | |
| "grad_norm": 1.85268561061027, | |
| "learning_rate": 7.138272006753563e-06, | |
| "loss": 0.4969, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.27729257641921395, | |
| "grad_norm": 1.5428882847739833, | |
| "learning_rate": 7.130754863285153e-06, | |
| "loss": 0.4758, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.27838427947598254, | |
| "grad_norm": 1.6972894826270524, | |
| "learning_rate": 7.123210625328238e-06, | |
| "loss": 0.6367, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.2794759825327511, | |
| "grad_norm": 1.56171538368843, | |
| "learning_rate": 7.115639388814675e-06, | |
| "loss": 0.5483, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.28056768558951967, | |
| "grad_norm": 1.616159463723734, | |
| "learning_rate": 7.1080412500196316e-06, | |
| "loss": 0.6188, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.2816593886462882, | |
| "grad_norm": 1.664727956285899, | |
| "learning_rate": 7.10041630556036e-06, | |
| "loss": 0.5104, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.2827510917030568, | |
| "grad_norm": 1.472585818104954, | |
| "learning_rate": 7.092764652394974e-06, | |
| "loss": 0.5146, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.2838427947598253, | |
| "grad_norm": 1.2854288778227903, | |
| "learning_rate": 7.085086387821213e-06, | |
| "loss": 0.6012, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.2849344978165939, | |
| "grad_norm": 1.2852267764551335, | |
| "learning_rate": 7.0773816094752015e-06, | |
| "loss": 0.6049, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.28602620087336245, | |
| "grad_norm": 1.3567820396359396, | |
| "learning_rate": 7.069650415330213e-06, | |
| "loss": 0.531, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.287117903930131, | |
| "grad_norm": 1.460404193275241, | |
| "learning_rate": 7.061892903695423e-06, | |
| "loss": 0.5811, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.28820960698689957, | |
| "grad_norm": 1.4744335745677344, | |
| "learning_rate": 7.0541091732146535e-06, | |
| "loss": 0.6736, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.2893013100436681, | |
| "grad_norm": 1.4944561183840575, | |
| "learning_rate": 7.046299322865131e-06, | |
| "loss": 0.62, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.2903930131004367, | |
| "grad_norm": 1.5993459040844344, | |
| "learning_rate": 7.038463451956209e-06, | |
| "loss": 0.6203, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.2914847161572052, | |
| "grad_norm": 1.398558452093382, | |
| "learning_rate": 7.030601660128127e-06, | |
| "loss": 0.6007, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.2925764192139738, | |
| "grad_norm": 1.490504616058761, | |
| "learning_rate": 7.0227140473507245e-06, | |
| "loss": 0.5718, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.29366812227074235, | |
| "grad_norm": 1.3890197651526783, | |
| "learning_rate": 7.014800713922181e-06, | |
| "loss": 0.6712, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.29475982532751094, | |
| "grad_norm": 1.475360288498539, | |
| "learning_rate": 7.006861760467741e-06, | |
| "loss": 0.6257, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.29585152838427947, | |
| "grad_norm": 1.4498851473925982, | |
| "learning_rate": 6.998897287938424e-06, | |
| "loss": 0.5378, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.29694323144104806, | |
| "grad_norm": 1.3144961611880193, | |
| "learning_rate": 6.990907397609751e-06, | |
| "loss": 0.5108, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.2980349344978166, | |
| "grad_norm": 1.3859005230286172, | |
| "learning_rate": 6.982892191080456e-06, | |
| "loss": 0.4885, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.29912663755458513, | |
| "grad_norm": 1.221223897043554, | |
| "learning_rate": 6.974851770271186e-06, | |
| "loss": 0.5224, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.3002183406113537, | |
| "grad_norm": 1.38427922807046, | |
| "learning_rate": 6.966786237423218e-06, | |
| "loss": 0.5956, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.30131004366812225, | |
| "grad_norm": 1.2894973461951942, | |
| "learning_rate": 6.958695695097145e-06, | |
| "loss": 0.4793, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.30240174672489084, | |
| "grad_norm": 1.4482446520872734, | |
| "learning_rate": 6.950580246171581e-06, | |
| "loss": 0.6768, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.3034934497816594, | |
| "grad_norm": 1.5030922903307724, | |
| "learning_rate": 6.942439993841848e-06, | |
| "loss": 0.4834, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.30458515283842796, | |
| "grad_norm": 1.4439095189849094, | |
| "learning_rate": 6.93427504161867e-06, | |
| "loss": 0.4781, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.3056768558951965, | |
| "grad_norm": 1.4054270284734758, | |
| "learning_rate": 6.9260854933268466e-06, | |
| "loss": 0.5164, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.3056768558951965, | |
| "eval_accuracy": 0.8122814246384281, | |
| "eval_accuracy_first_token": 0.7722520229111738, | |
| "eval_accuracy_first_token_<": 0.9699042407660738, | |
| "eval_accuracy_first_token_<_total": 2924, | |
| "eval_accuracy_first_token_<|python_tag|>": 0.8661971830985915, | |
| "eval_accuracy_first_token_<|python_tag|>_total": 994, | |
| "eval_accuracy_first_token_Here": 0.6010362694300518, | |
| "eval_accuracy_first_token_Here_total": 386, | |
| "eval_accuracy_first_token_The": 0.9305943358595932, | |
| "eval_accuracy_first_token_The_total": 2507, | |
| "eval_accuracy_first_token_To": 0.7403846153846154, | |
| "eval_accuracy_first_token_To_total": 936, | |
| "eval_first_token_param_values": 0.9361062892241955, | |
| "eval_first_token_param_values_total": 14978, | |
| "eval_loss": 0.5944764614105225, | |
| "eval_perplexity": 1.1145611460638327, | |
| "eval_runtime": 404.5683, | |
| "eval_samples_per_second": 1.916, | |
| "eval_steps_per_second": 0.24, | |
| "eval_total_number_first_token": 10999, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.3067685589519651, | |
| "grad_norm": 1.2731720997992346, | |
| "learning_rate": 6.917871453103946e-06, | |
| "loss": 0.5342, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.3078602620087336, | |
| "grad_norm": 1.4627726722347458, | |
| "learning_rate": 6.909633025398968e-06, | |
| "loss": 0.4906, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.3089519650655022, | |
| "grad_norm": 1.391286615450489, | |
| "learning_rate": 6.901370314971025e-06, | |
| "loss": 0.5165, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.31004366812227074, | |
| "grad_norm": 1.461047803005325, | |
| "learning_rate": 6.893083426888001e-06, | |
| "loss": 0.6427, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.3111353711790393, | |
| "grad_norm": 1.4333799644406657, | |
| "learning_rate": 6.884772466525229e-06, | |
| "loss": 0.5478, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.31222707423580787, | |
| "grad_norm": 1.4838513414542995, | |
| "learning_rate": 6.8764375395641365e-06, | |
| "loss": 0.6074, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.3133187772925764, | |
| "grad_norm": 1.2646171911891406, | |
| "learning_rate": 6.86807875199091e-06, | |
| "loss": 0.5755, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.314410480349345, | |
| "grad_norm": 1.2542325136137744, | |
| "learning_rate": 6.859696210095146e-06, | |
| "loss": 0.5502, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.3155021834061135, | |
| "grad_norm": 1.2321799849318462, | |
| "learning_rate": 6.851290020468498e-06, | |
| "loss": 0.6221, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.3165938864628821, | |
| "grad_norm": 1.2953758966810032, | |
| "learning_rate": 6.842860290003322e-06, | |
| "loss": 0.5903, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.31768558951965065, | |
| "grad_norm": 1.7395783863975123, | |
| "learning_rate": 6.8344071258913206e-06, | |
| "loss": 0.5963, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.31877729257641924, | |
| "grad_norm": 1.4642573503581784, | |
| "learning_rate": 6.825930635622169e-06, | |
| "loss": 0.5011, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.31986899563318777, | |
| "grad_norm": 1.7487735502303772, | |
| "learning_rate": 6.817430926982163e-06, | |
| "loss": 0.5699, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.32096069868995636, | |
| "grad_norm": 1.4436349581997563, | |
| "learning_rate": 6.80890810805284e-06, | |
| "loss": 0.559, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.3220524017467249, | |
| "grad_norm": 1.4902082225296027, | |
| "learning_rate": 6.8003622872096e-06, | |
| "loss": 0.6149, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.3231441048034934, | |
| "grad_norm": 1.5021927515338005, | |
| "learning_rate": 6.791793573120342e-06, | |
| "loss": 0.5098, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.324235807860262, | |
| "grad_norm": 1.2568489718668712, | |
| "learning_rate": 6.7832020747440645e-06, | |
| "loss": 0.4616, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.32532751091703055, | |
| "grad_norm": 1.85090367162997, | |
| "learning_rate": 6.7745879013294945e-06, | |
| "loss": 0.6342, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.32641921397379914, | |
| "grad_norm": 1.3622630820375037, | |
| "learning_rate": 6.765951162413695e-06, | |
| "loss": 0.574, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.32751091703056767, | |
| "grad_norm": 1.3544768555004285, | |
| "learning_rate": 6.757291967820663e-06, | |
| "loss": 0.4971, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.32860262008733626, | |
| "grad_norm": 1.2812885948189523, | |
| "learning_rate": 6.748610427659944e-06, | |
| "loss": 0.4419, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.3296943231441048, | |
| "grad_norm": 1.3044190835387586, | |
| "learning_rate": 6.739906652325228e-06, | |
| "loss": 0.5298, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.3307860262008734, | |
| "grad_norm": 1.6049637774921335, | |
| "learning_rate": 6.731180752492944e-06, | |
| "loss": 0.5731, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.3318777292576419, | |
| "grad_norm": 1.5188076633711975, | |
| "learning_rate": 6.722432839120857e-06, | |
| "loss": 0.5466, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.3329694323144105, | |
| "grad_norm": 1.2200311097592393, | |
| "learning_rate": 6.713663023446648e-06, | |
| "loss": 0.5754, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.33406113537117904, | |
| "grad_norm": 1.1943617882666278, | |
| "learning_rate": 6.704871416986513e-06, | |
| "loss": 0.6153, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.3351528384279476, | |
| "grad_norm": 1.2566424888713876, | |
| "learning_rate": 6.696058131533732e-06, | |
| "loss": 0.5391, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.33624454148471616, | |
| "grad_norm": 1.5354142684922412, | |
| "learning_rate": 6.687223279157258e-06, | |
| "loss": 0.512, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.3373362445414847, | |
| "grad_norm": 1.7632124793181871, | |
| "learning_rate": 6.678366972200284e-06, | |
| "loss": 0.5118, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.3384279475982533, | |
| "grad_norm": 1.4051894291399412, | |
| "learning_rate": 6.669489323278818e-06, | |
| "loss": 0.5959, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.3395196506550218, | |
| "grad_norm": 1.5575194677153044, | |
| "learning_rate": 6.660590445280248e-06, | |
| "loss": 0.5217, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.3406113537117904, | |
| "grad_norm": 1.445909145564962, | |
| "learning_rate": 6.651670451361916e-06, | |
| "loss": 0.5329, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.34170305676855894, | |
| "grad_norm": 1.6148708469036988, | |
| "learning_rate": 6.642729454949665e-06, | |
| "loss": 0.5441, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.34279475982532753, | |
| "grad_norm": 1.3877288714622236, | |
| "learning_rate": 6.633767569736406e-06, | |
| "loss": 0.4973, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.34388646288209607, | |
| "grad_norm": 1.5705634975133718, | |
| "learning_rate": 6.624784909680674e-06, | |
| "loss": 0.5379, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.34497816593886466, | |
| "grad_norm": 1.4683909348634254, | |
| "learning_rate": 6.61578158900517e-06, | |
| "loss": 0.5646, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.3460698689956332, | |
| "grad_norm": 1.444455571760505, | |
| "learning_rate": 6.606757722195315e-06, | |
| "loss": 0.6329, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.3471615720524017, | |
| "grad_norm": 1.4016572432346093, | |
| "learning_rate": 6.597713423997795e-06, | |
| "loss": 0.5736, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.3482532751091703, | |
| "grad_norm": 1.8469454503294669, | |
| "learning_rate": 6.588648809419096e-06, | |
| "loss": 0.5856, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.34934497816593885, | |
| "grad_norm": 1.3217897187946839, | |
| "learning_rate": 6.579563993724048e-06, | |
| "loss": 0.5124, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.35043668122270744, | |
| "grad_norm": 1.5194866076068712, | |
| "learning_rate": 6.570459092434356e-06, | |
| "loss": 0.4867, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.35152838427947597, | |
| "grad_norm": 1.408417064891368, | |
| "learning_rate": 6.561334221327133e-06, | |
| "loss": 0.5776, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.35262008733624456, | |
| "grad_norm": 1.4452329373302548, | |
| "learning_rate": 6.552189496433421e-06, | |
| "loss": 0.5251, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.3537117903930131, | |
| "grad_norm": 1.4435343192846628, | |
| "learning_rate": 6.543025034036729e-06, | |
| "loss": 0.5047, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.3548034934497817, | |
| "grad_norm": 1.3953834298183048, | |
| "learning_rate": 6.533840950671539e-06, | |
| "loss": 0.5376, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.3558951965065502, | |
| "grad_norm": 1.5746860190061827, | |
| "learning_rate": 6.524637363121837e-06, | |
| "loss": 0.5363, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.3569868995633188, | |
| "grad_norm": 1.430598020020894, | |
| "learning_rate": 6.515414388419619e-06, | |
| "loss": 0.6226, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.35807860262008734, | |
| "grad_norm": 1.2707811617436353, | |
| "learning_rate": 6.5061721438434076e-06, | |
| "loss": 0.4761, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.35917030567685587, | |
| "grad_norm": 1.5783432705751568, | |
| "learning_rate": 6.496910746916759e-06, | |
| "loss": 0.5214, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.36026200873362446, | |
| "grad_norm": 1.83574416889433, | |
| "learning_rate": 6.487630315406768e-06, | |
| "loss": 0.5599, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.361353711790393, | |
| "grad_norm": 1.322420845333774, | |
| "learning_rate": 6.478330967322577e-06, | |
| "loss": 0.5336, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.3624454148471616, | |
| "grad_norm": 1.3612789015918931, | |
| "learning_rate": 6.469012820913861e-06, | |
| "loss": 0.5508, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.3635371179039301, | |
| "grad_norm": 1.3891730681529915, | |
| "learning_rate": 6.45967599466934e-06, | |
| "loss": 0.4888, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.3646288209606987, | |
| "grad_norm": 1.4417746815764274, | |
| "learning_rate": 6.45032060731526e-06, | |
| "loss": 0.5416, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.36572052401746724, | |
| "grad_norm": 1.4895891533428605, | |
| "learning_rate": 6.440946777813895e-06, | |
| "loss": 0.5417, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.36681222707423583, | |
| "grad_norm": 1.4141847222045185, | |
| "learning_rate": 6.431554625362018e-06, | |
| "loss": 0.4896, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.36790393013100436, | |
| "grad_norm": 1.5117794325198755, | |
| "learning_rate": 6.422144269389406e-06, | |
| "loss": 0.5532, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.36899563318777295, | |
| "grad_norm": 1.58498155036596, | |
| "learning_rate": 6.412715829557301e-06, | |
| "loss": 0.4635, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.3700873362445415, | |
| "grad_norm": 1.3847721442020027, | |
| "learning_rate": 6.403269425756905e-06, | |
| "loss": 0.4702, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.37117903930131, | |
| "grad_norm": 1.3681322527990918, | |
| "learning_rate": 6.3938051781078405e-06, | |
| "loss": 0.5593, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.3722707423580786, | |
| "grad_norm": 1.503661732921771, | |
| "learning_rate": 6.3843232069566396e-06, | |
| "loss": 0.5693, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.37336244541484714, | |
| "grad_norm": 1.5045172592179, | |
| "learning_rate": 6.374823632875197e-06, | |
| "loss": 0.5619, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.37445414847161573, | |
| "grad_norm": 1.4568114586028496, | |
| "learning_rate": 6.365306576659252e-06, | |
| "loss": 0.5992, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.37554585152838427, | |
| "grad_norm": 1.2603187279915327, | |
| "learning_rate": 6.3557721593268375e-06, | |
| "loss": 0.5808, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.37663755458515286, | |
| "grad_norm": 1.5105002968929564, | |
| "learning_rate": 6.3462205021167545e-06, | |
| "loss": 0.4838, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.3777292576419214, | |
| "grad_norm": 1.7236853852664615, | |
| "learning_rate": 6.336651726487023e-06, | |
| "loss": 0.519, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.37882096069869, | |
| "grad_norm": 1.5823366228954794, | |
| "learning_rate": 6.327065954113339e-06, | |
| "loss": 0.5357, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.3799126637554585, | |
| "grad_norm": 1.6501056758652983, | |
| "learning_rate": 6.317463306887525e-06, | |
| "loss": 0.7162, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.38100436681222705, | |
| "grad_norm": 1.612984303794767, | |
| "learning_rate": 6.307843906915991e-06, | |
| "loss": 0.5861, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.38209606986899564, | |
| "grad_norm": 2.0839316924516287, | |
| "learning_rate": 6.298207876518162e-06, | |
| "loss": 0.5356, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.38318777292576417, | |
| "grad_norm": 1.6442415918972877, | |
| "learning_rate": 6.288555338224944e-06, | |
| "loss": 0.4907, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.38427947598253276, | |
| "grad_norm": 1.4680570384981029, | |
| "learning_rate": 6.278886414777148e-06, | |
| "loss": 0.5877, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.3853711790393013, | |
| "grad_norm": 1.5004977869240974, | |
| "learning_rate": 6.269201229123943e-06, | |
| "loss": 0.5518, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.3864628820960699, | |
| "grad_norm": 1.5820396873030045, | |
| "learning_rate": 6.259499904421285e-06, | |
| "loss": 0.566, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.3875545851528384, | |
| "grad_norm": 1.2944420053683188, | |
| "learning_rate": 6.249782564030352e-06, | |
| "loss": 0.41, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.388646288209607, | |
| "grad_norm": 1.2182802013913994, | |
| "learning_rate": 6.240049331515976e-06, | |
| "loss": 0.4647, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.38973799126637554, | |
| "grad_norm": 1.5151046008059181, | |
| "learning_rate": 6.230300330645072e-06, | |
| "loss": 0.591, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.39082969432314413, | |
| "grad_norm": 1.4656723716881035, | |
| "learning_rate": 6.220535685385064e-06, | |
| "loss": 0.5486, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.39192139737991266, | |
| "grad_norm": 1.256305336512033, | |
| "learning_rate": 6.21075551990231e-06, | |
| "loss": 0.558, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.3930131004366812, | |
| "grad_norm": 1.3298929325184357, | |
| "learning_rate": 6.20095995856052e-06, | |
| "loss": 0.5805, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3941048034934498, | |
| "grad_norm": 1.4212920029170337, | |
| "learning_rate": 6.1911491259191766e-06, | |
| "loss": 0.4801, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.3951965065502183, | |
| "grad_norm": 1.366679203076844, | |
| "learning_rate": 6.181323146731951e-06, | |
| "loss": 0.5107, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.3962882096069869, | |
| "grad_norm": 1.38703232756071, | |
| "learning_rate": 6.1714821459451186e-06, | |
| "loss": 0.5678, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.39737991266375544, | |
| "grad_norm": 1.4858572589764882, | |
| "learning_rate": 6.1616262486959646e-06, | |
| "loss": 0.4529, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.39847161572052403, | |
| "grad_norm": 1.5348563857001403, | |
| "learning_rate": 6.151755580311199e-06, | |
| "loss": 0.5933, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.39956331877729256, | |
| "grad_norm": 1.4176620057000997, | |
| "learning_rate": 6.141870266305358e-06, | |
| "loss": 0.4918, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.40065502183406115, | |
| "grad_norm": 1.4951222520742187, | |
| "learning_rate": 6.131970432379213e-06, | |
| "loss": 0.5709, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.4017467248908297, | |
| "grad_norm": 1.6210277269111266, | |
| "learning_rate": 6.122056204418166e-06, | |
| "loss": 0.5339, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.4028384279475983, | |
| "grad_norm": 1.8538643894848557, | |
| "learning_rate": 6.112127708490657e-06, | |
| "loss": 0.5994, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.4039301310043668, | |
| "grad_norm": 1.3107005664617082, | |
| "learning_rate": 6.102185070846551e-06, | |
| "loss": 0.5299, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.40502183406113534, | |
| "grad_norm": 1.52599657247444, | |
| "learning_rate": 6.092228417915541e-06, | |
| "loss": 0.6073, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.40611353711790393, | |
| "grad_norm": 1.4601043256949482, | |
| "learning_rate": 6.082257876305534e-06, | |
| "loss": 0.5638, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.40720524017467247, | |
| "grad_norm": 1.4005447907301034, | |
| "learning_rate": 6.07227357280105e-06, | |
| "loss": 0.5575, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.40829694323144106, | |
| "grad_norm": 1.5877468909166246, | |
| "learning_rate": 6.062275634361599e-06, | |
| "loss": 0.6142, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.4093886462882096, | |
| "grad_norm": 1.8682639604384457, | |
| "learning_rate": 6.0522641881200734e-06, | |
| "loss": 0.6107, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.4104803493449782, | |
| "grad_norm": 1.3289776248271887, | |
| "learning_rate": 6.042239361381128e-06, | |
| "loss": 0.5571, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.4115720524017467, | |
| "grad_norm": 1.2005221136761943, | |
| "learning_rate": 6.032201281619565e-06, | |
| "loss": 0.6254, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.4126637554585153, | |
| "grad_norm": 1.3166126771313549, | |
| "learning_rate": 6.02215007647871e-06, | |
| "loss": 0.4617, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.41375545851528384, | |
| "grad_norm": 1.5210638954921578, | |
| "learning_rate": 6.012085873768791e-06, | |
| "loss": 0.5461, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.4148471615720524, | |
| "grad_norm": 1.5029108053726574, | |
| "learning_rate": 6.002008801465307e-06, | |
| "loss": 0.5954, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.41593886462882096, | |
| "grad_norm": 1.4322948512523916, | |
| "learning_rate": 5.991918987707414e-06, | |
| "loss": 0.5222, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.4170305676855895, | |
| "grad_norm": 1.5951914575628332, | |
| "learning_rate": 5.981816560796278e-06, | |
| "loss": 0.5121, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.4181222707423581, | |
| "grad_norm": 1.4733981716395201, | |
| "learning_rate": 5.971701649193461e-06, | |
| "loss": 0.5417, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.4192139737991266, | |
| "grad_norm": 1.4093851168934195, | |
| "learning_rate": 5.961574381519272e-06, | |
| "loss": 0.532, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.4203056768558952, | |
| "grad_norm": 1.8731153406678291, | |
| "learning_rate": 5.951434886551144e-06, | |
| "loss": 0.596, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.42139737991266374, | |
| "grad_norm": 1.3983487740826432, | |
| "learning_rate": 5.941283293221989e-06, | |
| "loss": 0.5818, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.42248908296943233, | |
| "grad_norm": 1.438105551234948, | |
| "learning_rate": 5.931119730618561e-06, | |
| "loss": 0.6056, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.42358078602620086, | |
| "grad_norm": 1.5050579045957733, | |
| "learning_rate": 5.920944327979814e-06, | |
| "loss": 0.5443, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.42467248908296945, | |
| "grad_norm": 1.7282154077005178, | |
| "learning_rate": 5.910757214695259e-06, | |
| "loss": 0.5875, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.425764192139738, | |
| "grad_norm": 1.280595735861228, | |
| "learning_rate": 5.900558520303318e-06, | |
| "loss": 0.5573, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.4268558951965066, | |
| "grad_norm": 1.2728154446007562, | |
| "learning_rate": 5.89034837448968e-06, | |
| "loss": 0.523, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.4279475982532751, | |
| "grad_norm": 1.5426782593172892, | |
| "learning_rate": 5.880126907085643e-06, | |
| "loss": 0.5358, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.42903930131004364, | |
| "grad_norm": 1.5200061103097575, | |
| "learning_rate": 5.8698942480664775e-06, | |
| "loss": 0.5418, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.43013100436681223, | |
| "grad_norm": 1.7356764870493646, | |
| "learning_rate": 5.859650527549761e-06, | |
| "loss": 0.5654, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.43122270742358076, | |
| "grad_norm": 1.3654496081014231, | |
| "learning_rate": 5.849395875793726e-06, | |
| "loss": 0.5915, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.43231441048034935, | |
| "grad_norm": 2.808147594668986, | |
| "learning_rate": 5.839130423195609e-06, | |
| "loss": 0.5665, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.4334061135371179, | |
| "grad_norm": 1.4833752079280529, | |
| "learning_rate": 5.82885430028999e-06, | |
| "loss": 0.5413, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.4344978165938865, | |
| "grad_norm": 1.6449672988864015, | |
| "learning_rate": 5.8185676377471295e-06, | |
| "loss": 0.5401, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.435589519650655, | |
| "grad_norm": 1.1977061243594427, | |
| "learning_rate": 5.808270566371307e-06, | |
| "loss": 0.4718, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.4366812227074236, | |
| "grad_norm": 1.3405831214725057, | |
| "learning_rate": 5.7979632170991665e-06, | |
| "loss": 0.568, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.43777292576419213, | |
| "grad_norm": 2.094672745602528, | |
| "learning_rate": 5.787645720998037e-06, | |
| "loss": 0.6035, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.4388646288209607, | |
| "grad_norm": 1.2660085035873359, | |
| "learning_rate": 5.7773182092642785e-06, | |
| "loss": 0.5269, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.43995633187772926, | |
| "grad_norm": 1.4836506744346154, | |
| "learning_rate": 5.766980813221607e-06, | |
| "loss": 0.5382, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.4410480349344978, | |
| "grad_norm": 1.4272157208447975, | |
| "learning_rate": 5.75663366431943e-06, | |
| "loss": 0.5292, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.4421397379912664, | |
| "grad_norm": 1.712850218127739, | |
| "learning_rate": 5.746276894131165e-06, | |
| "loss": 0.5852, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.4432314410480349, | |
| "grad_norm": 1.2286185333492807, | |
| "learning_rate": 5.735910634352579e-06, | |
| "loss": 0.5017, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.4443231441048035, | |
| "grad_norm": 1.238407584969687, | |
| "learning_rate": 5.725535016800103e-06, | |
| "loss": 0.4684, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.44541484716157204, | |
| "grad_norm": 1.5146880310733182, | |
| "learning_rate": 5.7151501734091655e-06, | |
| "loss": 0.6328, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.4465065502183406, | |
| "grad_norm": 1.2536815602440197, | |
| "learning_rate": 5.704756236232505e-06, | |
| "loss": 0.4686, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.44759825327510916, | |
| "grad_norm": 1.3163930911885626, | |
| "learning_rate": 5.6943533374385e-06, | |
| "loss": 0.5248, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.44868995633187775, | |
| "grad_norm": 1.4295888211351506, | |
| "learning_rate": 5.683941609309483e-06, | |
| "loss": 0.5344, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.4497816593886463, | |
| "grad_norm": 1.3389542681794442, | |
| "learning_rate": 5.6735211842400565e-06, | |
| "loss": 0.5823, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.45087336244541487, | |
| "grad_norm": 1.411103751375743, | |
| "learning_rate": 5.6630921947354155e-06, | |
| "loss": 0.5351, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.4519650655021834, | |
| "grad_norm": 1.3449450319960814, | |
| "learning_rate": 5.652654773409659e-06, | |
| "loss": 0.5219, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.45305676855895194, | |
| "grad_norm": 1.2260548801396869, | |
| "learning_rate": 5.642209052984106e-06, | |
| "loss": 0.5583, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.45414847161572053, | |
| "grad_norm": 1.2016754154637224, | |
| "learning_rate": 5.631755166285602e-06, | |
| "loss": 0.4704, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.45524017467248906, | |
| "grad_norm": 1.6555020620471408, | |
| "learning_rate": 5.6212932462448376e-06, | |
| "loss": 0.484, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.45633187772925765, | |
| "grad_norm": 1.5311624701116364, | |
| "learning_rate": 5.610823425894653e-06, | |
| "loss": 0.5493, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.4574235807860262, | |
| "grad_norm": 1.7090470948405863, | |
| "learning_rate": 5.600345838368348e-06, | |
| "loss": 0.5095, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.4585152838427948, | |
| "grad_norm": 1.512713841712156, | |
| "learning_rate": 5.589860616897991e-06, | |
| "loss": 0.5147, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.4596069868995633, | |
| "grad_norm": 1.7872665411814004, | |
| "learning_rate": 5.5793678948127214e-06, | |
| "loss": 0.5598, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.4606986899563319, | |
| "grad_norm": 1.6670125232959876, | |
| "learning_rate": 5.568867805537055e-06, | |
| "loss": 0.6108, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.46179039301310043, | |
| "grad_norm": 1.3800090550625805, | |
| "learning_rate": 5.558360482589189e-06, | |
| "loss": 0.4621, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.462882096069869, | |
| "grad_norm": 1.4361795723883242, | |
| "learning_rate": 5.547846059579304e-06, | |
| "loss": 0.6029, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.46397379912663755, | |
| "grad_norm": 1.2533304542951986, | |
| "learning_rate": 5.537324670207865e-06, | |
| "loss": 0.5281, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.4650655021834061, | |
| "grad_norm": 1.5542519820585374, | |
| "learning_rate": 5.526796448263917e-06, | |
| "loss": 0.4954, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.4661572052401747, | |
| "grad_norm": 1.6743052467665012, | |
| "learning_rate": 5.516261527623391e-06, | |
| "loss": 0.5058, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.4672489082969432, | |
| "grad_norm": 1.4149693200756928, | |
| "learning_rate": 5.5057200422473964e-06, | |
| "loss": 0.5571, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.4683406113537118, | |
| "grad_norm": 1.377375482313666, | |
| "learning_rate": 5.49517212618052e-06, | |
| "loss": 0.5241, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.46943231441048033, | |
| "grad_norm": 1.7214155025614608, | |
| "learning_rate": 5.48461791354912e-06, | |
| "loss": 0.5233, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.4705240174672489, | |
| "grad_norm": 1.4624248908282391, | |
| "learning_rate": 5.474057538559622e-06, | |
| "loss": 0.7009, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.47161572052401746, | |
| "grad_norm": 1.3755085340809183, | |
| "learning_rate": 5.463491135496809e-06, | |
| "loss": 0.559, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.47270742358078605, | |
| "grad_norm": 1.8480554892919654, | |
| "learning_rate": 5.452918838722122e-06, | |
| "loss": 0.5358, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.4737991266375546, | |
| "grad_norm": 1.370591853187486, | |
| "learning_rate": 5.44234078267194e-06, | |
| "loss": 0.4598, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.47489082969432317, | |
| "grad_norm": 1.4652044355564495, | |
| "learning_rate": 5.431757101855878e-06, | |
| "loss": 0.5229, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.4759825327510917, | |
| "grad_norm": 1.6442082838896726, | |
| "learning_rate": 5.421167930855078e-06, | |
| "loss": 0.4233, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.47707423580786024, | |
| "grad_norm": 1.4443466098404825, | |
| "learning_rate": 5.410573404320491e-06, | |
| "loss": 0.5414, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.4781659388646288, | |
| "grad_norm": 1.3720527302642247, | |
| "learning_rate": 5.399973656971172e-06, | |
| "loss": 0.5629, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.47925764192139736, | |
| "grad_norm": 1.420619258102723, | |
| "learning_rate": 5.3893688235925585e-06, | |
| "loss": 0.5166, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.48034934497816595, | |
| "grad_norm": 1.5277571440758837, | |
| "learning_rate": 5.378759039034766e-06, | |
| "loss": 0.554, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4814410480349345, | |
| "grad_norm": 1.4638317307089392, | |
| "learning_rate": 5.368144438210867e-06, | |
| "loss": 0.4772, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.48253275109170307, | |
| "grad_norm": 1.5797672430691585, | |
| "learning_rate": 5.357525156095178e-06, | |
| "loss": 0.5166, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.4836244541484716, | |
| "grad_norm": 1.8314646046493335, | |
| "learning_rate": 5.346901327721542e-06, | |
| "loss": 0.5416, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.4847161572052402, | |
| "grad_norm": 1.6655392592879021, | |
| "learning_rate": 5.3362730881816105e-06, | |
| "loss": 0.5114, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.48580786026200873, | |
| "grad_norm": 1.717857275474268, | |
| "learning_rate": 5.325640572623128e-06, | |
| "loss": 0.5098, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.4868995633187773, | |
| "grad_norm": 1.3491745179920687, | |
| "learning_rate": 5.315003916248214e-06, | |
| "loss": 0.4975, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.48799126637554585, | |
| "grad_norm": 1.2024838454094096, | |
| "learning_rate": 5.304363254311639e-06, | |
| "loss": 0.4725, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.4890829694323144, | |
| "grad_norm": 1.6943727192213853, | |
| "learning_rate": 5.2937187221191095e-06, | |
| "loss": 0.6957, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.490174672489083, | |
| "grad_norm": 1.4214839188213413, | |
| "learning_rate": 5.283070455025549e-06, | |
| "loss": 0.5138, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.4912663755458515, | |
| "grad_norm": 1.7874547320870642, | |
| "learning_rate": 5.272418588433366e-06, | |
| "loss": 0.4976, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.4923580786026201, | |
| "grad_norm": 1.6025810609170663, | |
| "learning_rate": 5.26176325779075e-06, | |
| "loss": 0.4854, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.49344978165938863, | |
| "grad_norm": 1.3537064325091892, | |
| "learning_rate": 5.25110459858993e-06, | |
| "loss": 0.5205, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.4945414847161572, | |
| "grad_norm": 1.4501493604145486, | |
| "learning_rate": 5.240442746365465e-06, | |
| "loss": 0.7607, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.49563318777292575, | |
| "grad_norm": 1.4215275546800406, | |
| "learning_rate": 5.229777836692516e-06, | |
| "loss": 0.4881, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.49672489082969434, | |
| "grad_norm": 1.6725560271729976, | |
| "learning_rate": 5.219110005185121e-06, | |
| "loss": 0.7687, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.4978165938864629, | |
| "grad_norm": 1.5074766611660562, | |
| "learning_rate": 5.208439387494473e-06, | |
| "loss": 0.6112, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.49890829694323147, | |
| "grad_norm": 1.3597839340846551, | |
| "learning_rate": 5.197766119307193e-06, | |
| "loss": 0.4853, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.6691192782977187, | |
| "learning_rate": 5.187090336343605e-06, | |
| "loss": 0.5366, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.5010917030567685, | |
| "grad_norm": 1.2963947269544678, | |
| "learning_rate": 5.176412174356015e-06, | |
| "loss": 0.4896, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.5021834061135371, | |
| "grad_norm": 1.4474979354084274, | |
| "learning_rate": 5.165731769126973e-06, | |
| "loss": 0.4894, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.5032751091703057, | |
| "grad_norm": 1.4441187219316847, | |
| "learning_rate": 5.15504925646756e-06, | |
| "loss": 0.4403, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.5043668122270742, | |
| "grad_norm": 1.4514534998495667, | |
| "learning_rate": 5.14436477221565e-06, | |
| "loss": 0.637, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.5054585152838428, | |
| "grad_norm": 1.5553610264211595, | |
| "learning_rate": 5.133678452234194e-06, | |
| "loss": 0.4584, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.5065502183406113, | |
| "grad_norm": 1.6280569904422444, | |
| "learning_rate": 5.122990432409481e-06, | |
| "loss": 0.5366, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.50764192139738, | |
| "grad_norm": 1.6171411741302026, | |
| "learning_rate": 5.112300848649417e-06, | |
| "loss": 0.5351, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.5087336244541485, | |
| "grad_norm": 1.5642871863260377, | |
| "learning_rate": 5.101609836881791e-06, | |
| "loss": 0.5112, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.509825327510917, | |
| "grad_norm": 1.3618839192737355, | |
| "learning_rate": 5.0909175330525565e-06, | |
| "loss": 0.5649, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.5109170305676856, | |
| "grad_norm": 1.4461003640687564, | |
| "learning_rate": 5.080224073124093e-06, | |
| "loss": 0.4662, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.5120087336244541, | |
| "grad_norm": 1.448092639750284, | |
| "learning_rate": 5.069529593073484e-06, | |
| "loss": 0.5392, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.5131004366812227, | |
| "grad_norm": 1.4766768695379655, | |
| "learning_rate": 5.058834228890778e-06, | |
| "loss": 0.5216, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.5141921397379913, | |
| "grad_norm": 1.388192539641165, | |
| "learning_rate": 5.048138116577276e-06, | |
| "loss": 0.5121, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.5152838427947598, | |
| "grad_norm": 1.442377666445445, | |
| "learning_rate": 5.03744139214378e-06, | |
| "loss": 0.4964, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.5163755458515283, | |
| "grad_norm": 1.4380858560991479, | |
| "learning_rate": 5.026744191608888e-06, | |
| "loss": 0.6296, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.517467248908297, | |
| "grad_norm": 1.4066071827611244, | |
| "learning_rate": 5.0160466509972444e-06, | |
| "loss": 0.5366, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.5185589519650655, | |
| "grad_norm": 1.4771695892234902, | |
| "learning_rate": 5.005348906337819e-06, | |
| "loss": 0.4818, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.519650655021834, | |
| "grad_norm": 1.3482597250692996, | |
| "learning_rate": 4.994651093662181e-06, | |
| "loss": 0.621, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.5207423580786026, | |
| "grad_norm": 1.4656112769819456, | |
| "learning_rate": 4.9839533490027555e-06, | |
| "loss": 0.5968, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.5218340611353712, | |
| "grad_norm": 1.4194678586304768, | |
| "learning_rate": 4.973255808391112e-06, | |
| "loss": 0.4855, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.5229257641921398, | |
| "grad_norm": 1.3385237861754027, | |
| "learning_rate": 4.96255860785622e-06, | |
| "loss": 0.446, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.5240174672489083, | |
| "grad_norm": 1.4199997853851958, | |
| "learning_rate": 4.951861883422725e-06, | |
| "loss": 0.5232, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.5251091703056768, | |
| "grad_norm": 1.4504354793413279, | |
| "learning_rate": 4.941165771109221e-06, | |
| "loss": 0.5941, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.5262008733624454, | |
| "grad_norm": 1.2469715040512819, | |
| "learning_rate": 4.930470406926517e-06, | |
| "loss": 0.5222, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.527292576419214, | |
| "grad_norm": 1.2822739519744986, | |
| "learning_rate": 4.919775926875906e-06, | |
| "loss": 0.4774, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.5283842794759825, | |
| "grad_norm": 1.5801715611186389, | |
| "learning_rate": 4.909082466947443e-06, | |
| "loss": 0.4725, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.5294759825327511, | |
| "grad_norm": 1.474196427294419, | |
| "learning_rate": 4.898390163118207e-06, | |
| "loss": 0.5055, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.5305676855895196, | |
| "grad_norm": 1.2140076028979254, | |
| "learning_rate": 4.887699151350585e-06, | |
| "loss": 0.5698, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.5316593886462883, | |
| "grad_norm": 1.55426631015283, | |
| "learning_rate": 4.877009567590519e-06, | |
| "loss": 0.5059, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.5327510917030568, | |
| "grad_norm": 1.0298037690986848, | |
| "learning_rate": 4.866321547765805e-06, | |
| "loss": 0.5384, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.5338427947598253, | |
| "grad_norm": 1.4188304779909608, | |
| "learning_rate": 4.855635227784349e-06, | |
| "loss": 0.509, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.5349344978165939, | |
| "grad_norm": 1.3254069119369634, | |
| "learning_rate": 4.8449507435324414e-06, | |
| "loss": 0.4632, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.5360262008733624, | |
| "grad_norm": 1.5099230232121612, | |
| "learning_rate": 4.834268230873028e-06, | |
| "loss": 0.4655, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.537117903930131, | |
| "grad_norm": 1.6476817572317615, | |
| "learning_rate": 4.823587825643985e-06, | |
| "loss": 0.4971, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.5382096069868996, | |
| "grad_norm": 1.3898484269183884, | |
| "learning_rate": 4.812909663656393e-06, | |
| "loss": 0.5795, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.5393013100436681, | |
| "grad_norm": 1.3882570065353528, | |
| "learning_rate": 4.802233880692807e-06, | |
| "loss": 0.4848, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.5403930131004366, | |
| "grad_norm": 1.28772744355436, | |
| "learning_rate": 4.791560612505528e-06, | |
| "loss": 0.5398, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.5414847161572053, | |
| "grad_norm": 1.368216878184212, | |
| "learning_rate": 4.780889994814879e-06, | |
| "loss": 0.5512, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.5425764192139738, | |
| "grad_norm": 1.4144230061626577, | |
| "learning_rate": 4.770222163307484e-06, | |
| "loss": 0.4963, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.5436681222707423, | |
| "grad_norm": 1.3794044239685685, | |
| "learning_rate": 4.759557253634534e-06, | |
| "loss": 0.4659, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.5447598253275109, | |
| "grad_norm": 1.524418837426529, | |
| "learning_rate": 4.748895401410071e-06, | |
| "loss": 0.5042, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.5458515283842795, | |
| "grad_norm": 1.6044354987036333, | |
| "learning_rate": 4.738236742209249e-06, | |
| "loss": 0.4827, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.5469432314410481, | |
| "grad_norm": 1.5500030376180924, | |
| "learning_rate": 4.727581411566632e-06, | |
| "loss": 0.5257, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.5480349344978166, | |
| "grad_norm": 1.4421277932778935, | |
| "learning_rate": 4.716929544974451e-06, | |
| "loss": 0.5487, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.5491266375545851, | |
| "grad_norm": 1.5990643037111425, | |
| "learning_rate": 4.7062812778808904e-06, | |
| "loss": 0.5438, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.5502183406113537, | |
| "grad_norm": 1.4502389607629171, | |
| "learning_rate": 4.695636745688361e-06, | |
| "loss": 0.4929, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.5513100436681223, | |
| "grad_norm": 1.3204441944279035, | |
| "learning_rate": 4.684996083751785e-06, | |
| "loss": 0.5448, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.5524017467248908, | |
| "grad_norm": 1.4355806504382989, | |
| "learning_rate": 4.674359427376872e-06, | |
| "loss": 0.5551, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.5534934497816594, | |
| "grad_norm": 1.3692565246056831, | |
| "learning_rate": 4.66372691181839e-06, | |
| "loss": 0.5209, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.5545851528384279, | |
| "grad_norm": 1.3879798213129202, | |
| "learning_rate": 4.653098672278458e-06, | |
| "loss": 0.5219, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.5556768558951966, | |
| "grad_norm": 1.481778083844796, | |
| "learning_rate": 4.6424748439048204e-06, | |
| "loss": 0.5543, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.5567685589519651, | |
| "grad_norm": 1.7087241965837792, | |
| "learning_rate": 4.631855561789132e-06, | |
| "loss": 0.5794, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.5578602620087336, | |
| "grad_norm": 1.477176476343145, | |
| "learning_rate": 4.621240960965233e-06, | |
| "loss": 0.5764, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.5589519650655022, | |
| "grad_norm": 1.3365328113829895, | |
| "learning_rate": 4.610631176407442e-06, | |
| "loss": 0.4716, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.5600436681222707, | |
| "grad_norm": 1.3617920239093109, | |
| "learning_rate": 4.600026343028828e-06, | |
| "loss": 0.4821, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.5611353711790393, | |
| "grad_norm": 1.4742907544937358, | |
| "learning_rate": 4.589426595679509e-06, | |
| "loss": 0.5276, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.5622270742358079, | |
| "grad_norm": 1.2133762965906438, | |
| "learning_rate": 4.578832069144922e-06, | |
| "loss": 0.554, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.5633187772925764, | |
| "grad_norm": 1.3499071499642659, | |
| "learning_rate": 4.568242898144121e-06, | |
| "loss": 0.5079, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.5644104803493449, | |
| "grad_norm": 1.716937961960104, | |
| "learning_rate": 4.55765921732806e-06, | |
| "loss": 0.5128, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.5655021834061136, | |
| "grad_norm": 1.4301074339999107, | |
| "learning_rate": 4.547081161277878e-06, | |
| "loss": 0.4896, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.5665938864628821, | |
| "grad_norm": 1.470837341341121, | |
| "learning_rate": 4.53650886450319e-06, | |
| "loss": 0.5082, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.5676855895196506, | |
| "grad_norm": 1.2685205731280647, | |
| "learning_rate": 4.5259424614403774e-06, | |
| "loss": 0.5761, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.5687772925764192, | |
| "grad_norm": 1.5260496629984845, | |
| "learning_rate": 4.515382086450879e-06, | |
| "loss": 0.5009, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.5698689956331878, | |
| "grad_norm": 1.3553156778244764, | |
| "learning_rate": 4.50482787381948e-06, | |
| "loss": 0.4602, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.5709606986899564, | |
| "grad_norm": 1.5048924602650442, | |
| "learning_rate": 4.4942799577526035e-06, | |
| "loss": 0.6232, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.5720524017467249, | |
| "grad_norm": 1.4163718659252338, | |
| "learning_rate": 4.483738472376609e-06, | |
| "loss": 0.595, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.5731441048034934, | |
| "grad_norm": 1.256504078133773, | |
| "learning_rate": 4.473203551736082e-06, | |
| "loss": 0.4841, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.574235807860262, | |
| "grad_norm": 1.8309113360149538, | |
| "learning_rate": 4.462675329792135e-06, | |
| "loss": 0.5268, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.5753275109170306, | |
| "grad_norm": 1.4616749715688826, | |
| "learning_rate": 4.4521539404206955e-06, | |
| "loss": 0.5201, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.5764192139737991, | |
| "grad_norm": 1.4866810995067377, | |
| "learning_rate": 4.44163951741081e-06, | |
| "loss": 0.4819, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.5775109170305677, | |
| "grad_norm": 1.2959474071911306, | |
| "learning_rate": 4.431132194462945e-06, | |
| "loss": 0.4599, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.5786026200873362, | |
| "grad_norm": 1.503165783836535, | |
| "learning_rate": 4.4206321051872785e-06, | |
| "loss": 0.5553, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.5796943231441049, | |
| "grad_norm": 1.4738805985403882, | |
| "learning_rate": 4.410139383102009e-06, | |
| "loss": 0.4724, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.5807860262008734, | |
| "grad_norm": 1.331771857702602, | |
| "learning_rate": 4.3996541616316516e-06, | |
| "loss": 0.4447, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.5818777292576419, | |
| "grad_norm": 1.4673541177247749, | |
| "learning_rate": 4.3891765741053465e-06, | |
| "loss": 0.6408, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.5829694323144105, | |
| "grad_norm": 1.4020555533377803, | |
| "learning_rate": 4.378706753755162e-06, | |
| "loss": 0.5344, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.584061135371179, | |
| "grad_norm": 1.6218461769836316, | |
| "learning_rate": 4.368244833714397e-06, | |
| "loss": 0.5655, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.5851528384279476, | |
| "grad_norm": 1.4187950178971758, | |
| "learning_rate": 4.357790947015893e-06, | |
| "loss": 0.5704, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.5862445414847162, | |
| "grad_norm": 1.7413424456473428, | |
| "learning_rate": 4.34734522659034e-06, | |
| "loss": 0.4735, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.5873362445414847, | |
| "grad_norm": 1.4967102117444473, | |
| "learning_rate": 4.3369078052645845e-06, | |
| "loss": 0.4856, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.5884279475982532, | |
| "grad_norm": 1.3591454057028716, | |
| "learning_rate": 4.3264788157599435e-06, | |
| "loss": 0.6019, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.5895196506550219, | |
| "grad_norm": 1.4646999752701761, | |
| "learning_rate": 4.316058390690517e-06, | |
| "loss": 0.6017, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5906113537117904, | |
| "grad_norm": 1.1539983694011575, | |
| "learning_rate": 4.305646662561498e-06, | |
| "loss": 0.5945, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.5917030567685589, | |
| "grad_norm": 1.499171453680021, | |
| "learning_rate": 4.2952437637674944e-06, | |
| "loss": 0.6161, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.5927947598253275, | |
| "grad_norm": 1.2754209219978767, | |
| "learning_rate": 4.284849826590835e-06, | |
| "loss": 0.4973, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.5938864628820961, | |
| "grad_norm": 1.6027113994625042, | |
| "learning_rate": 4.2744649831998964e-06, | |
| "loss": 0.5622, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.5949781659388647, | |
| "grad_norm": 1.5148520907544383, | |
| "learning_rate": 4.264089365647421e-06, | |
| "loss": 0.5654, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5960698689956332, | |
| "grad_norm": 1.409327342683608, | |
| "learning_rate": 4.253723105868835e-06, | |
| "loss": 0.5958, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.5971615720524017, | |
| "grad_norm": 1.432267443294078, | |
| "learning_rate": 4.24336633568057e-06, | |
| "loss": 0.4894, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.5982532751091703, | |
| "grad_norm": 1.4322901416716356, | |
| "learning_rate": 4.233019186778392e-06, | |
| "loss": 0.4671, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.5993449781659389, | |
| "grad_norm": 1.4857292999991856, | |
| "learning_rate": 4.222681790735721e-06, | |
| "loss": 0.4431, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.6004366812227074, | |
| "grad_norm": 1.5103278913268807, | |
| "learning_rate": 4.212354279001963e-06, | |
| "loss": 0.5286, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.601528384279476, | |
| "grad_norm": 1.3661166974305412, | |
| "learning_rate": 4.2020367829008335e-06, | |
| "loss": 0.495, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.6026200873362445, | |
| "grad_norm": 1.4657473729106474, | |
| "learning_rate": 4.191729433628692e-06, | |
| "loss": 0.4859, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.6037117903930131, | |
| "grad_norm": 1.37621037582073, | |
| "learning_rate": 4.1814323622528704e-06, | |
| "loss": 0.5261, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.6048034934497817, | |
| "grad_norm": 1.8525067725209052, | |
| "learning_rate": 4.17114569971001e-06, | |
| "loss": 0.6356, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.6058951965065502, | |
| "grad_norm": 1.3676971383382535, | |
| "learning_rate": 4.160869576804391e-06, | |
| "loss": 0.4843, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.6069868995633187, | |
| "grad_norm": 1.454872378059662, | |
| "learning_rate": 4.150604124206275e-06, | |
| "loss": 0.5104, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.6080786026200873, | |
| "grad_norm": 1.6090732359643547, | |
| "learning_rate": 4.140349472450239e-06, | |
| "loss": 0.4617, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.6091703056768559, | |
| "grad_norm": 1.3513538232854985, | |
| "learning_rate": 4.1301057519335225e-06, | |
| "loss": 0.4536, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.6102620087336245, | |
| "grad_norm": 1.4626389154409178, | |
| "learning_rate": 4.119873092914357e-06, | |
| "loss": 0.4666, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.611353711790393, | |
| "grad_norm": 1.5581131305282525, | |
| "learning_rate": 4.10965162551032e-06, | |
| "loss": 0.4777, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.611353711790393, | |
| "eval_accuracy": 0.8165034143027314, | |
| "eval_accuracy_first_token": 0.7887080643694881, | |
| "eval_accuracy_first_token_<": 0.9859781121751026, | |
| "eval_accuracy_first_token_<_total": 2924, | |
| "eval_accuracy_first_token_<|python_tag|>": 0.9255533199195171, | |
| "eval_accuracy_first_token_<|python_tag|>_total": 994, | |
| "eval_accuracy_first_token_Here": 0.572538860103627, | |
| "eval_accuracy_first_token_Here_total": 386, | |
| "eval_accuracy_first_token_The": 0.8994814519345832, | |
| "eval_accuracy_first_token_The_total": 2507, | |
| "eval_accuracy_first_token_To": 0.7873931623931624, | |
| "eval_accuracy_first_token_To_total": 936, | |
| "eval_first_token_param_values": 0.9387768727466952, | |
| "eval_first_token_param_values_total": 14978, | |
| "eval_loss": 0.5781970620155334, | |
| "eval_perplexity": 1.1114759783723378, | |
| "eval_runtime": 391.1056, | |
| "eval_samples_per_second": 1.982, | |
| "eval_steps_per_second": 0.248, | |
| "eval_total_number_first_token": 10999, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.6124454148471615, | |
| "grad_norm": 1.582252798674935, | |
| "learning_rate": 4.099441479696681e-06, | |
| "loss": 0.4602, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.6135371179039302, | |
| "grad_norm": 1.608295893756117, | |
| "learning_rate": 4.089242785304742e-06, | |
| "loss": 0.5076, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.6146288209606987, | |
| "grad_norm": 1.3485004651056143, | |
| "learning_rate": 4.079055672020186e-06, | |
| "loss": 0.4661, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.6157205240174672, | |
| "grad_norm": 1.291513507710692, | |
| "learning_rate": 4.06888026938144e-06, | |
| "loss": 0.5642, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.6168122270742358, | |
| "grad_norm": 1.3170533583485662, | |
| "learning_rate": 4.058716706778011e-06, | |
| "loss": 0.5438, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.6179039301310044, | |
| "grad_norm": 1.4441714827082741, | |
| "learning_rate": 4.048565113448857e-06, | |
| "loss": 0.5078, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.618995633187773, | |
| "grad_norm": 1.6783163773061296, | |
| "learning_rate": 4.038425618480729e-06, | |
| "loss": 0.5576, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.6200873362445415, | |
| "grad_norm": 1.4188507978385323, | |
| "learning_rate": 4.02829835080654e-06, | |
| "loss": 0.5537, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.62117903930131, | |
| "grad_norm": 1.3779540707380475, | |
| "learning_rate": 4.01818343920372e-06, | |
| "loss": 0.4405, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.6222707423580786, | |
| "grad_norm": 1.4499236635379809, | |
| "learning_rate": 4.008081012292587e-06, | |
| "loss": 0.4844, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.6233624454148472, | |
| "grad_norm": 1.523491954701273, | |
| "learning_rate": 3.997991198534692e-06, | |
| "loss": 0.5044, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.6244541484716157, | |
| "grad_norm": 1.4598644387268775, | |
| "learning_rate": 3.98791412623121e-06, | |
| "loss": 0.5019, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.6255458515283843, | |
| "grad_norm": 1.4990035332267648, | |
| "learning_rate": 3.977849923521289e-06, | |
| "loss": 0.5348, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.6266375545851528, | |
| "grad_norm": 1.5319645001618205, | |
| "learning_rate": 3.967798718380435e-06, | |
| "loss": 0.4354, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.6277292576419214, | |
| "grad_norm": 1.3922243251918538, | |
| "learning_rate": 3.957760638618872e-06, | |
| "loss": 0.4926, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.62882096069869, | |
| "grad_norm": 1.533896226358977, | |
| "learning_rate": 3.9477358118799265e-06, | |
| "loss": 0.4322, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.6299126637554585, | |
| "grad_norm": 1.422842914473825, | |
| "learning_rate": 3.9377243656384e-06, | |
| "loss": 0.4712, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.631004366812227, | |
| "grad_norm": 1.2246188538633846, | |
| "learning_rate": 3.927726427198949e-06, | |
| "loss": 0.4364, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.6320960698689956, | |
| "grad_norm": 1.4664888990426166, | |
| "learning_rate": 3.917742123694466e-06, | |
| "loss": 0.4323, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.6331877729257642, | |
| "grad_norm": 1.310290618069973, | |
| "learning_rate": 3.907771582084459e-06, | |
| "loss": 0.5143, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.6342794759825328, | |
| "grad_norm": 1.372424891579117, | |
| "learning_rate": 3.897814929153449e-06, | |
| "loss": 0.4977, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.6353711790393013, | |
| "grad_norm": 1.5281095237236761, | |
| "learning_rate": 3.887872291509343e-06, | |
| "loss": 0.5587, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.6364628820960698, | |
| "grad_norm": 1.3721835230254955, | |
| "learning_rate": 3.877943795581833e-06, | |
| "loss": 0.4939, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.6375545851528385, | |
| "grad_norm": 1.388427881647579, | |
| "learning_rate": 3.868029567620787e-06, | |
| "loss": 0.4247, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.638646288209607, | |
| "grad_norm": 1.497847291110233, | |
| "learning_rate": 3.858129733694641e-06, | |
| "loss": 0.5298, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.6397379912663755, | |
| "grad_norm": 1.4255242399699755, | |
| "learning_rate": 3.848244419688801e-06, | |
| "loss": 0.5293, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.6408296943231441, | |
| "grad_norm": 1.4552294636702017, | |
| "learning_rate": 3.838373751304035e-06, | |
| "loss": 0.4229, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.6419213973799127, | |
| "grad_norm": 1.626066546172521, | |
| "learning_rate": 3.828517854054881e-06, | |
| "loss": 0.5642, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.6430131004366813, | |
| "grad_norm": 1.456810943997373, | |
| "learning_rate": 3.818676853268048e-06, | |
| "loss": 0.5599, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.6441048034934498, | |
| "grad_norm": 1.4719729624676086, | |
| "learning_rate": 3.8088508740808243e-06, | |
| "loss": 0.5027, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.6451965065502183, | |
| "grad_norm": 1.5160593159695923, | |
| "learning_rate": 3.79904004143948e-06, | |
| "loss": 0.5762, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.6462882096069869, | |
| "grad_norm": 1.5202694953006204, | |
| "learning_rate": 3.7892444800976896e-06, | |
| "loss": 0.4658, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.6473799126637555, | |
| "grad_norm": 1.665985658035627, | |
| "learning_rate": 3.7794643146149357e-06, | |
| "loss": 0.5121, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.648471615720524, | |
| "grad_norm": 1.4305093245291967, | |
| "learning_rate": 3.769699669354928e-06, | |
| "loss": 0.5636, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.6495633187772926, | |
| "grad_norm": 1.3175981932047456, | |
| "learning_rate": 3.759950668484024e-06, | |
| "loss": 0.5314, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.6506550218340611, | |
| "grad_norm": 1.4531299264572408, | |
| "learning_rate": 3.750217435969648e-06, | |
| "loss": 0.4879, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.6517467248908297, | |
| "grad_norm": 1.4355775633746033, | |
| "learning_rate": 3.7405000955787146e-06, | |
| "loss": 0.4265, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.6528384279475983, | |
| "grad_norm": 1.3377384511360848, | |
| "learning_rate": 3.730798770876057e-06, | |
| "loss": 0.5584, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.6539301310043668, | |
| "grad_norm": 1.3618246171688768, | |
| "learning_rate": 3.721113585222853e-06, | |
| "loss": 0.5308, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.6550218340611353, | |
| "grad_norm": 1.5237374024272177, | |
| "learning_rate": 3.711444661775057e-06, | |
| "loss": 0.5637, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6561135371179039, | |
| "grad_norm": 1.4476718647379752, | |
| "learning_rate": 3.7017921234818383e-06, | |
| "loss": 0.5441, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.6572052401746725, | |
| "grad_norm": 1.5976684200512656, | |
| "learning_rate": 3.6921560930840103e-06, | |
| "loss": 0.5968, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.6582969432314411, | |
| "grad_norm": 1.4849123279512841, | |
| "learning_rate": 3.6825366931124744e-06, | |
| "loss": 0.4681, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.6593886462882096, | |
| "grad_norm": 1.4283815786057126, | |
| "learning_rate": 3.672934045886661e-06, | |
| "loss": 0.5145, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.6604803493449781, | |
| "grad_norm": 1.4467852106957109, | |
| "learning_rate": 3.6633482735129765e-06, | |
| "loss": 0.5078, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.6615720524017468, | |
| "grad_norm": 1.3301330189214893, | |
| "learning_rate": 3.6537794978832454e-06, | |
| "loss": 0.4711, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.6626637554585153, | |
| "grad_norm": 1.2939570426381055, | |
| "learning_rate": 3.6442278406731616e-06, | |
| "loss": 0.5188, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.6637554585152838, | |
| "grad_norm": 1.4335427746020373, | |
| "learning_rate": 3.6346934233407475e-06, | |
| "loss": 0.4148, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.6648471615720524, | |
| "grad_norm": 1.312602515327031, | |
| "learning_rate": 3.6251763671248022e-06, | |
| "loss": 0.4515, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.665938864628821, | |
| "grad_norm": 1.4935409138139255, | |
| "learning_rate": 3.615676793043361e-06, | |
| "loss": 0.4728, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.6670305676855895, | |
| "grad_norm": 1.564394383484283, | |
| "learning_rate": 3.6061948218921587e-06, | |
| "loss": 0.5202, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.6681222707423581, | |
| "grad_norm": 1.420319000780053, | |
| "learning_rate": 3.5967305742430954e-06, | |
| "loss": 0.4943, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.6692139737991266, | |
| "grad_norm": 1.3312001561036553, | |
| "learning_rate": 3.5872841704426983e-06, | |
| "loss": 0.4934, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.6703056768558951, | |
| "grad_norm": 1.3965985189004924, | |
| "learning_rate": 3.5778557306105945e-06, | |
| "loss": 0.5391, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.6713973799126638, | |
| "grad_norm": 1.3680149448615597, | |
| "learning_rate": 3.5684453746379806e-06, | |
| "loss": 0.538, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.6724890829694323, | |
| "grad_norm": 1.542142361004548, | |
| "learning_rate": 3.5590532221861053e-06, | |
| "loss": 0.4204, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.6735807860262009, | |
| "grad_norm": 1.34324454974894, | |
| "learning_rate": 3.5496793926847383e-06, | |
| "loss": 0.5017, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.6746724890829694, | |
| "grad_norm": 1.3232757727120024, | |
| "learning_rate": 3.540324005330661e-06, | |
| "loss": 0.4585, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.675764192139738, | |
| "grad_norm": 1.416803667589595, | |
| "learning_rate": 3.5309871790861385e-06, | |
| "loss": 0.4689, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.6768558951965066, | |
| "grad_norm": 1.5089099592021908, | |
| "learning_rate": 3.521669032677423e-06, | |
| "loss": 0.4154, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.6779475982532751, | |
| "grad_norm": 1.85540232423345, | |
| "learning_rate": 3.5123696845932294e-06, | |
| "loss": 0.445, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.6790393013100436, | |
| "grad_norm": 1.4563985724787059, | |
| "learning_rate": 3.5030892530832416e-06, | |
| "loss": 0.5235, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.6801310043668122, | |
| "grad_norm": 1.3838318346145242, | |
| "learning_rate": 3.4938278561565924e-06, | |
| "loss": 0.6341, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.6812227074235808, | |
| "grad_norm": 1.31089716942976, | |
| "learning_rate": 3.4845856115803815e-06, | |
| "loss": 0.4908, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.6823144104803494, | |
| "grad_norm": 1.2353223624644207, | |
| "learning_rate": 3.4753626368781625e-06, | |
| "loss": 0.4287, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.6834061135371179, | |
| "grad_norm": 1.6062380899237767, | |
| "learning_rate": 3.4661590493284615e-06, | |
| "loss": 0.4917, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.6844978165938864, | |
| "grad_norm": 1.5759254996812047, | |
| "learning_rate": 3.456974965963271e-06, | |
| "loss": 0.4592, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.6855895196506551, | |
| "grad_norm": 1.5669304480812016, | |
| "learning_rate": 3.447810503566579e-06, | |
| "loss": 0.484, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.6866812227074236, | |
| "grad_norm": 1.5033628816852151, | |
| "learning_rate": 3.438665778672867e-06, | |
| "loss": 0.5692, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.6877729257641921, | |
| "grad_norm": 1.4892610870512741, | |
| "learning_rate": 3.429540907565644e-06, | |
| "loss": 0.5574, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.6888646288209607, | |
| "grad_norm": 1.4346767102540499, | |
| "learning_rate": 3.4204360062759518e-06, | |
| "loss": 0.4922, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.6899563318777293, | |
| "grad_norm": 1.6385962403810959, | |
| "learning_rate": 3.4113511905809043e-06, | |
| "loss": 0.5122, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.6910480349344978, | |
| "grad_norm": 1.5484369704845515, | |
| "learning_rate": 3.4022865760022044e-06, | |
| "loss": 0.5603, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.6921397379912664, | |
| "grad_norm": 1.3328218691814475, | |
| "learning_rate": 3.3932422778046854e-06, | |
| "loss": 0.4696, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.6932314410480349, | |
| "grad_norm": 1.3201936226659459, | |
| "learning_rate": 3.3842184109948303e-06, | |
| "loss": 0.6217, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.6943231441048034, | |
| "grad_norm": 1.463942512713303, | |
| "learning_rate": 3.3752150903193265e-06, | |
| "loss": 0.622, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.6954148471615721, | |
| "grad_norm": 1.6139395561000496, | |
| "learning_rate": 3.366232430263593e-06, | |
| "loss": 0.4245, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.6965065502183406, | |
| "grad_norm": 1.3285149685117137, | |
| "learning_rate": 3.357270545050337e-06, | |
| "loss": 0.4324, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.6975982532751092, | |
| "grad_norm": 1.5088166603297204, | |
| "learning_rate": 3.3483295486380846e-06, | |
| "loss": 0.4563, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.6986899563318777, | |
| "grad_norm": 1.3095999354264778, | |
| "learning_rate": 3.3394095547197526e-06, | |
| "loss": 0.4683, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.6997816593886463, | |
| "grad_norm": 1.58544588878384, | |
| "learning_rate": 3.330510676721183e-06, | |
| "loss": 0.5485, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.7008733624454149, | |
| "grad_norm": 1.495660640862744, | |
| "learning_rate": 3.3216330277997157e-06, | |
| "loss": 0.5082, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.7019650655021834, | |
| "grad_norm": 1.3658052652721495, | |
| "learning_rate": 3.312776720842741e-06, | |
| "loss": 0.4509, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.7030567685589519, | |
| "grad_norm": 1.4640768663316701, | |
| "learning_rate": 3.3039418684662676e-06, | |
| "loss": 0.5417, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.7041484716157205, | |
| "grad_norm": 1.4247855209179192, | |
| "learning_rate": 3.295128583013486e-06, | |
| "loss": 0.4318, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.7052401746724891, | |
| "grad_norm": 1.3800459453105371, | |
| "learning_rate": 3.286336976553352e-06, | |
| "loss": 0.5208, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.7063318777292577, | |
| "grad_norm": 1.2554511851900485, | |
| "learning_rate": 3.277567160879144e-06, | |
| "loss": 0.4666, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.7074235807860262, | |
| "grad_norm": 1.629496057033825, | |
| "learning_rate": 3.268819247507056e-06, | |
| "loss": 0.5119, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.7085152838427947, | |
| "grad_norm": 1.679530653212758, | |
| "learning_rate": 3.260093347674772e-06, | |
| "loss": 0.5256, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.7096069868995634, | |
| "grad_norm": 1.5573971849859753, | |
| "learning_rate": 3.251389572340056e-06, | |
| "loss": 0.5222, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.7106986899563319, | |
| "grad_norm": 1.3889141401111582, | |
| "learning_rate": 3.2427080321793374e-06, | |
| "loss": 0.4432, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.7117903930131004, | |
| "grad_norm": 1.3127355149090743, | |
| "learning_rate": 3.2340488375863047e-06, | |
| "loss": 0.5384, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.712882096069869, | |
| "grad_norm": 1.3983921070638459, | |
| "learning_rate": 3.225412098670504e-06, | |
| "loss": 0.4823, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.7139737991266376, | |
| "grad_norm": 1.4752891184006638, | |
| "learning_rate": 3.216797925255936e-06, | |
| "loss": 0.5408, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.7150655021834061, | |
| "grad_norm": 1.4616253015066785, | |
| "learning_rate": 3.2082064268796592e-06, | |
| "loss": 0.5643, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.7161572052401747, | |
| "grad_norm": 1.6766027674126829, | |
| "learning_rate": 3.1996377127903985e-06, | |
| "loss": 0.5628, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.7172489082969432, | |
| "grad_norm": 1.5205329442665274, | |
| "learning_rate": 3.1910918919471596e-06, | |
| "loss": 0.514, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.7183406113537117, | |
| "grad_norm": 1.6037099096063592, | |
| "learning_rate": 3.1825690730178357e-06, | |
| "loss": 0.399, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.7194323144104804, | |
| "grad_norm": 1.7203937525493616, | |
| "learning_rate": 3.17406936437783e-06, | |
| "loss": 0.4892, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.7205240174672489, | |
| "grad_norm": 1.6170397886834587, | |
| "learning_rate": 3.165592874108679e-06, | |
| "loss": 0.4833, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.7216157205240175, | |
| "grad_norm": 1.4171579690456109, | |
| "learning_rate": 3.1571397099966767e-06, | |
| "loss": 0.4428, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.722707423580786, | |
| "grad_norm": 1.7081299315746081, | |
| "learning_rate": 3.148709979531502e-06, | |
| "loss": 0.4435, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.7237991266375546, | |
| "grad_norm": 1.4489036867577625, | |
| "learning_rate": 3.1403037899048547e-06, | |
| "loss": 0.4866, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.7248908296943232, | |
| "grad_norm": 1.6131445114377558, | |
| "learning_rate": 3.1319212480090895e-06, | |
| "loss": 0.4946, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.7259825327510917, | |
| "grad_norm": 1.8063942650562066, | |
| "learning_rate": 3.123562460435863e-06, | |
| "loss": 0.5204, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.7270742358078602, | |
| "grad_norm": 1.7518173609793894, | |
| "learning_rate": 3.115227533474771e-06, | |
| "loss": 0.5224, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.7281659388646288, | |
| "grad_norm": 1.6254743756924752, | |
| "learning_rate": 3.1069165731119987e-06, | |
| "loss": 0.5313, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.7292576419213974, | |
| "grad_norm": 1.3009078308401743, | |
| "learning_rate": 3.0986296850289756e-06, | |
| "loss": 0.5709, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.730349344978166, | |
| "grad_norm": 1.448104409567261, | |
| "learning_rate": 3.0903669746010312e-06, | |
| "loss": 0.4633, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.7314410480349345, | |
| "grad_norm": 1.4606508569088716, | |
| "learning_rate": 3.0821285468960542e-06, | |
| "loss": 0.5297, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.732532751091703, | |
| "grad_norm": 1.4136587749469944, | |
| "learning_rate": 3.0739145066731534e-06, | |
| "loss": 0.5312, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.7336244541484717, | |
| "grad_norm": 1.5905997409531767, | |
| "learning_rate": 3.0657249583813307e-06, | |
| "loss": 0.5227, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.7347161572052402, | |
| "grad_norm": 1.5538511136048576, | |
| "learning_rate": 3.0575600061581518e-06, | |
| "loss": 0.5521, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.7358078602620087, | |
| "grad_norm": 1.486422100568419, | |
| "learning_rate": 3.0494197538284193e-06, | |
| "loss": 0.5114, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.7368995633187773, | |
| "grad_norm": 1.3670813674791609, | |
| "learning_rate": 3.041304304902855e-06, | |
| "loss": 0.4841, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.7379912663755459, | |
| "grad_norm": 1.334243639446627, | |
| "learning_rate": 3.033213762576781e-06, | |
| "loss": 0.4348, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.7390829694323144, | |
| "grad_norm": 1.3652145593061606, | |
| "learning_rate": 3.0251482297288125e-06, | |
| "loss": 0.5032, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.740174672489083, | |
| "grad_norm": 1.5217375260885644, | |
| "learning_rate": 3.0171078089195447e-06, | |
| "loss": 0.5636, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.7412663755458515, | |
| "grad_norm": 1.382594820324151, | |
| "learning_rate": 3.0090926023902494e-06, | |
| "loss": 0.4378, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.74235807860262, | |
| "grad_norm": 1.45824384976189, | |
| "learning_rate": 3.001102712061577e-06, | |
| "loss": 0.4924, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.7434497816593887, | |
| "grad_norm": 1.3772141004468592, | |
| "learning_rate": 2.9931382395322595e-06, | |
| "loss": 0.5824, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.7445414847161572, | |
| "grad_norm": 1.5056226458754594, | |
| "learning_rate": 2.985199286077818e-06, | |
| "loss": 0.5283, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.7456331877729258, | |
| "grad_norm": 1.2862985634824098, | |
| "learning_rate": 2.977285952649276e-06, | |
| "loss": 0.4862, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.7467248908296943, | |
| "grad_norm": 1.358956864438878, | |
| "learning_rate": 2.9693983398718738e-06, | |
| "loss": 0.4513, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.7478165938864629, | |
| "grad_norm": 1.681388781002674, | |
| "learning_rate": 2.9615365480437903e-06, | |
| "loss": 0.4229, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.7489082969432315, | |
| "grad_norm": 1.7706615580410843, | |
| "learning_rate": 2.95370067713487e-06, | |
| "loss": 0.4856, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.5889451935714294, | |
| "learning_rate": 2.9458908267853452e-06, | |
| "loss": 0.4419, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.7510917030567685, | |
| "grad_norm": 1.544459411681686, | |
| "learning_rate": 2.9381070963045776e-06, | |
| "loss": 0.5503, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.7521834061135371, | |
| "grad_norm": 1.4057907314213063, | |
| "learning_rate": 2.930349584669787e-06, | |
| "loss": 0.5223, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.7532751091703057, | |
| "grad_norm": 1.5378927118318613, | |
| "learning_rate": 2.922618390524799e-06, | |
| "loss": 0.5557, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.7543668122270742, | |
| "grad_norm": 1.4035201644525876, | |
| "learning_rate": 2.914913612178787e-06, | |
| "loss": 0.4913, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.7554585152838428, | |
| "grad_norm": 1.561354983159288, | |
| "learning_rate": 2.9072353476050254e-06, | |
| "loss": 0.4545, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.7565502183406113, | |
| "grad_norm": 1.2907793257634048, | |
| "learning_rate": 2.8995836944396394e-06, | |
| "loss": 0.545, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.75764192139738, | |
| "grad_norm": 1.429751557254624, | |
| "learning_rate": 2.891958749980369e-06, | |
| "loss": 0.5632, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.7587336244541485, | |
| "grad_norm": 1.3060316717397793, | |
| "learning_rate": 2.8843606111853247e-06, | |
| "loss": 0.4993, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.759825327510917, | |
| "grad_norm": 1.3881628331724296, | |
| "learning_rate": 2.8767893746717614e-06, | |
| "loss": 0.4583, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.7609170305676856, | |
| "grad_norm": 1.2188750020693087, | |
| "learning_rate": 2.869245136714846e-06, | |
| "loss": 0.4878, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.7620087336244541, | |
| "grad_norm": 1.3688514324159553, | |
| "learning_rate": 2.861727993246437e-06, | |
| "loss": 0.5454, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.7631004366812227, | |
| "grad_norm": 1.4693627823600672, | |
| "learning_rate": 2.854238039853855e-06, | |
| "loss": 0.4887, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.7641921397379913, | |
| "grad_norm": 1.4063749888320647, | |
| "learning_rate": 2.8467753717786818e-06, | |
| "loss": 0.514, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.7652838427947598, | |
| "grad_norm": 1.7262735326314635, | |
| "learning_rate": 2.8393400839155354e-06, | |
| "loss": 0.5922, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.7663755458515283, | |
| "grad_norm": 1.2160175934089779, | |
| "learning_rate": 2.831932270810875e-06, | |
| "loss": 0.4369, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.767467248908297, | |
| "grad_norm": 1.5736504616224942, | |
| "learning_rate": 2.824552026661787e-06, | |
| "loss": 0.3896, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.7685589519650655, | |
| "grad_norm": 1.334413826428926, | |
| "learning_rate": 2.8171994453147993e-06, | |
| "loss": 0.4353, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.769650655021834, | |
| "grad_norm": 1.2822977853771844, | |
| "learning_rate": 2.8098746202646773e-06, | |
| "loss": 0.4925, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.7707423580786026, | |
| "grad_norm": 1.5498532031156085, | |
| "learning_rate": 2.8025776446532452e-06, | |
| "loss": 0.5633, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.7718340611353712, | |
| "grad_norm": 1.2449777128846868, | |
| "learning_rate": 2.7953086112681894e-06, | |
| "loss": 0.4179, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.7729257641921398, | |
| "grad_norm": 1.5646874311946461, | |
| "learning_rate": 2.7880676125418926e-06, | |
| "loss": 0.4719, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.7740174672489083, | |
| "grad_norm": 1.3665728473586682, | |
| "learning_rate": 2.7808547405502437e-06, | |
| "loss": 0.3991, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.7751091703056768, | |
| "grad_norm": 1.501171484718762, | |
| "learning_rate": 2.773670087011484e-06, | |
| "loss": 0.5744, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.7762008733624454, | |
| "grad_norm": 1.6011435161099792, | |
| "learning_rate": 2.7665137432850203e-06, | |
| "loss": 0.5695, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.777292576419214, | |
| "grad_norm": 1.5658472473873142, | |
| "learning_rate": 2.759385800370284e-06, | |
| "loss": 0.5355, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.7783842794759825, | |
| "grad_norm": 1.4901124794820078, | |
| "learning_rate": 2.7522863489055586e-06, | |
| "loss": 0.462, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.7794759825327511, | |
| "grad_norm": 1.5298330174312822, | |
| "learning_rate": 2.7452154791668375e-06, | |
| "loss": 0.5594, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.7805676855895196, | |
| "grad_norm": 1.4147385808285522, | |
| "learning_rate": 2.7381732810666647e-06, | |
| "loss": 0.4539, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.7816593886462883, | |
| "grad_norm": 1.4608859654086022, | |
| "learning_rate": 2.731159844153005e-06, | |
| "loss": 0.4534, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.7827510917030568, | |
| "grad_norm": 1.5117169302712787, | |
| "learning_rate": 2.724175257608094e-06, | |
| "loss": 0.5258, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.7838427947598253, | |
| "grad_norm": 1.2658257724311885, | |
| "learning_rate": 2.7172196102473122e-06, | |
| "loss": 0.4096, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.7849344978165939, | |
| "grad_norm": 1.551758238229153, | |
| "learning_rate": 2.710292990518049e-06, | |
| "loss": 0.6434, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.7860262008733624, | |
| "grad_norm": 1.4076094951453688, | |
| "learning_rate": 2.703395486498582e-06, | |
| "loss": 0.5038, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.787117903930131, | |
| "grad_norm": 1.5353091781216452, | |
| "learning_rate": 2.6965271858969556e-06, | |
| "loss": 0.5531, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.7882096069868996, | |
| "grad_norm": 1.443747275507301, | |
| "learning_rate": 2.6896881760498684e-06, | |
| "loss": 0.6324, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.7893013100436681, | |
| "grad_norm": 2.2066790531801828, | |
| "learning_rate": 2.6828785439215574e-06, | |
| "loss": 0.4867, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.7903930131004366, | |
| "grad_norm": 1.3039506134634729, | |
| "learning_rate": 2.676098376102697e-06, | |
| "loss": 0.3967, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.7914847161572053, | |
| "grad_norm": 1.4973744024054314, | |
| "learning_rate": 2.6693477588092945e-06, | |
| "loss": 0.4741, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.7925764192139738, | |
| "grad_norm": 1.6625078614001911, | |
| "learning_rate": 2.6626267778815984e-06, | |
| "loss": 0.4454, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.7936681222707423, | |
| "grad_norm": 1.5392825520879216, | |
| "learning_rate": 2.655935518783e-06, | |
| "loss": 0.4482, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.7947598253275109, | |
| "grad_norm": 1.2279287786501736, | |
| "learning_rate": 2.649274066598952e-06, | |
| "loss": 0.4494, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.7958515283842795, | |
| "grad_norm": 1.644472266754525, | |
| "learning_rate": 2.6426425060358877e-06, | |
| "loss": 0.4583, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.7969432314410481, | |
| "grad_norm": 1.3349263839032603, | |
| "learning_rate": 2.6360409214201368e-06, | |
| "loss": 0.5458, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.7980349344978166, | |
| "grad_norm": 1.353325917442229, | |
| "learning_rate": 2.6294693966968603e-06, | |
| "loss": 0.4996, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.7991266375545851, | |
| "grad_norm": 1.75020327032623, | |
| "learning_rate": 2.622928015428979e-06, | |
| "loss": 0.5484, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.8002183406113537, | |
| "grad_norm": 1.7507615696882926, | |
| "learning_rate": 2.616416860796113e-06, | |
| "loss": 0.5119, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.8013100436681223, | |
| "grad_norm": 1.5319391439632237, | |
| "learning_rate": 2.6099360155935237e-06, | |
| "loss": 0.5243, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.8024017467248908, | |
| "grad_norm": 1.271596095829789, | |
| "learning_rate": 2.603485562231059e-06, | |
| "loss": 0.4004, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.8034934497816594, | |
| "grad_norm": 1.5168565334204749, | |
| "learning_rate": 2.5970655827321066e-06, | |
| "loss": 0.5833, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.8045851528384279, | |
| "grad_norm": 1.425305835480303, | |
| "learning_rate": 2.590676158732555e-06, | |
| "loss": 0.6107, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.8056768558951966, | |
| "grad_norm": 1.433983687848821, | |
| "learning_rate": 2.584317371479747e-06, | |
| "loss": 0.5336, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.8067685589519651, | |
| "grad_norm": 1.5171771559883467, | |
| "learning_rate": 2.5779893018314545e-06, | |
| "loss": 0.4953, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.8078602620087336, | |
| "grad_norm": 1.2466215612737617, | |
| "learning_rate": 2.571692030254844e-06, | |
| "loss": 0.4111, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.8089519650655022, | |
| "grad_norm": 1.2958293442264301, | |
| "learning_rate": 2.5654256368254606e-06, | |
| "loss": 0.4796, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.8100436681222707, | |
| "grad_norm": 1.366221486976765, | |
| "learning_rate": 2.5591902012262018e-06, | |
| "loss": 0.479, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.8111353711790393, | |
| "grad_norm": 1.4757039124958742, | |
| "learning_rate": 2.552985802746311e-06, | |
| "loss": 0.4448, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.8122270742358079, | |
| "grad_norm": 1.3937974822769155, | |
| "learning_rate": 2.5468125202803624e-06, | |
| "loss": 0.4178, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.8133187772925764, | |
| "grad_norm": 1.595101474275964, | |
| "learning_rate": 2.5406704323272677e-06, | |
| "loss": 0.5009, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.8144104803493449, | |
| "grad_norm": 1.4109055917732352, | |
| "learning_rate": 2.5345596169892675e-06, | |
| "loss": 0.3471, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.8155021834061136, | |
| "grad_norm": 1.434951280260996, | |
| "learning_rate": 2.5284801519709426e-06, | |
| "loss": 0.5631, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.8165938864628821, | |
| "grad_norm": 1.3792152619204336, | |
| "learning_rate": 2.522432114578228e-06, | |
| "loss": 0.5299, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.8176855895196506, | |
| "grad_norm": 1.2965847499667549, | |
| "learning_rate": 2.5164155817174274e-06, | |
| "loss": 0.5049, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.8187772925764192, | |
| "grad_norm": 1.7699360693412276, | |
| "learning_rate": 2.5104306298942345e-06, | |
| "loss": 0.5608, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.8198689956331878, | |
| "grad_norm": 1.4105028322760174, | |
| "learning_rate": 2.5044773352127624e-06, | |
| "loss": 0.5555, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.8209606986899564, | |
| "grad_norm": 1.7025442354294646, | |
| "learning_rate": 2.4985557733745733e-06, | |
| "loss": 0.4905, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.8220524017467249, | |
| "grad_norm": 1.5676630984513402, | |
| "learning_rate": 2.4926660196777203e-06, | |
| "loss": 0.4812, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.8231441048034934, | |
| "grad_norm": 1.5833360948505186, | |
| "learning_rate": 2.486808149015785e-06, | |
| "loss": 0.5356, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.824235807860262, | |
| "grad_norm": 1.4178730350285935, | |
| "learning_rate": 2.480982235876927e-06, | |
| "loss": 0.5062, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.8253275109170306, | |
| "grad_norm": 1.5145732571443271, | |
| "learning_rate": 2.4751883543429365e-06, | |
| "loss": 0.502, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.8264192139737991, | |
| "grad_norm": 1.507197459921713, | |
| "learning_rate": 2.4694265780882952e-06, | |
| "loss": 0.4461, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.8275109170305677, | |
| "grad_norm": 1.6314783612840427, | |
| "learning_rate": 2.4636969803792347e-06, | |
| "loss": 0.5186, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.8286026200873362, | |
| "grad_norm": 1.2676790104347715, | |
| "learning_rate": 2.457999634072807e-06, | |
| "loss": 0.4751, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.8296943231441049, | |
| "grad_norm": 1.48067735218539, | |
| "learning_rate": 2.4523346116159586e-06, | |
| "loss": 0.4172, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.8307860262008734, | |
| "grad_norm": 1.5204394932078549, | |
| "learning_rate": 2.446701985044611e-06, | |
| "loss": 0.5344, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.8318777292576419, | |
| "grad_norm": 1.5467915314176128, | |
| "learning_rate": 2.441101825982738e-06, | |
| "loss": 0.4792, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.8329694323144105, | |
| "grad_norm": 1.5788360356770055, | |
| "learning_rate": 2.4355342056414632e-06, | |
| "loss": 0.555, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.834061135371179, | |
| "grad_norm": 1.3178495446349063, | |
| "learning_rate": 2.4299991948181503e-06, | |
| "loss": 0.4323, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.8351528384279476, | |
| "grad_norm": 1.6402225350694717, | |
| "learning_rate": 2.4244968638955014e-06, | |
| "loss": 0.5475, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.8362445414847162, | |
| "grad_norm": 1.3512463029540156, | |
| "learning_rate": 2.419027282840665e-06, | |
| "loss": 0.5547, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.8373362445414847, | |
| "grad_norm": 1.4426233346314499, | |
| "learning_rate": 2.4135905212043453e-06, | |
| "loss": 0.5333, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.8384279475982532, | |
| "grad_norm": 1.5182561345065841, | |
| "learning_rate": 2.408186648119918e-06, | |
| "loss": 0.5274, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.8395196506550219, | |
| "grad_norm": 1.5815269832741783, | |
| "learning_rate": 2.40281573230255e-06, | |
| "loss": 0.5156, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.8406113537117904, | |
| "grad_norm": 1.3659003932004359, | |
| "learning_rate": 2.397477842048328e-06, | |
| "loss": 0.522, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.8417030567685589, | |
| "grad_norm": 1.4916431814675448, | |
| "learning_rate": 2.3921730452333865e-06, | |
| "loss": 0.4973, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.8427947598253275, | |
| "grad_norm": 1.5921458181749883, | |
| "learning_rate": 2.3869014093130484e-06, | |
| "loss": 0.5033, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.8438864628820961, | |
| "grad_norm": 1.3488255455196068, | |
| "learning_rate": 2.3816630013209653e-06, | |
| "loss": 0.4981, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.8449781659388647, | |
| "grad_norm": 1.4884625204401833, | |
| "learning_rate": 2.3764578878682658e-06, | |
| "loss": 0.4492, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.8460698689956332, | |
| "grad_norm": 1.403679565738453, | |
| "learning_rate": 2.371286135142706e-06, | |
| "loss": 0.4386, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.8471615720524017, | |
| "grad_norm": 1.5140972915615711, | |
| "learning_rate": 2.366147808907833e-06, | |
| "loss": 0.3872, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.8482532751091703, | |
| "grad_norm": 1.2931794718268395, | |
| "learning_rate": 2.3610429745021433e-06, | |
| "loss": 0.5277, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.8493449781659389, | |
| "grad_norm": 1.65094958261332, | |
| "learning_rate": 2.3559716968382566e-06, | |
| "loss": 0.5036, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.8504366812227074, | |
| "grad_norm": 1.3484114437655963, | |
| "learning_rate": 2.3509340404020845e-06, | |
| "loss": 0.4085, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.851528384279476, | |
| "grad_norm": 1.5024637038030444, | |
| "learning_rate": 2.3459300692520177e-06, | |
| "loss": 0.5749, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.8526200873362445, | |
| "grad_norm": 1.7268258750716359, | |
| "learning_rate": 2.340959847018106e-06, | |
| "loss": 0.5552, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.8537117903930131, | |
| "grad_norm": 1.4471671123643692, | |
| "learning_rate": 2.336023436901253e-06, | |
| "loss": 0.5025, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.8548034934497817, | |
| "grad_norm": 1.3544346171446477, | |
| "learning_rate": 2.3311209016724047e-06, | |
| "loss": 0.4235, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.8558951965065502, | |
| "grad_norm": 1.668582349166636, | |
| "learning_rate": 2.3262523036717657e-06, | |
| "loss": 0.5257, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.8569868995633187, | |
| "grad_norm": 1.849430371551395, | |
| "learning_rate": 2.32141770480799e-06, | |
| "loss": 0.4966, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.8580786026200873, | |
| "grad_norm": 1.4257795015278287, | |
| "learning_rate": 2.3166171665574068e-06, | |
| "loss": 0.3834, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.8591703056768559, | |
| "grad_norm": 1.5867478801563297, | |
| "learning_rate": 2.3118507499632304e-06, | |
| "loss": 0.512, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.8602620087336245, | |
| "grad_norm": 1.6022282216088801, | |
| "learning_rate": 2.3071185156347896e-06, | |
| "loss": 0.5756, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.861353711790393, | |
| "grad_norm": 1.653250014006023, | |
| "learning_rate": 2.302420523746752e-06, | |
| "loss": 0.4727, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.8624454148471615, | |
| "grad_norm": 1.5698471805784935, | |
| "learning_rate": 2.297756834038365e-06, | |
| "loss": 0.5493, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.8635371179039302, | |
| "grad_norm": 1.3890720239074847, | |
| "learning_rate": 2.293127505812687e-06, | |
| "loss": 0.442, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.8646288209606987, | |
| "grad_norm": 1.3473874382458382, | |
| "learning_rate": 2.288532597935846e-06, | |
| "loss": 0.4535, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.8657205240174672, | |
| "grad_norm": 1.5754647979260004, | |
| "learning_rate": 2.2839721688362764e-06, | |
| "loss": 0.4703, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.8668122270742358, | |
| "grad_norm": 1.3874265635155072, | |
| "learning_rate": 2.27944627650399e-06, | |
| "loss": 0.4903, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.8679039301310044, | |
| "grad_norm": 1.5255706234718944, | |
| "learning_rate": 2.2749549784898288e-06, | |
| "loss": 0.4296, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.868995633187773, | |
| "grad_norm": 1.3874604308608358, | |
| "learning_rate": 2.2704983319047363e-06, | |
| "loss": 0.436, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.8700873362445415, | |
| "grad_norm": 1.4466236624321545, | |
| "learning_rate": 2.2660763934190323e-06, | |
| "loss": 0.5945, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.87117903930131, | |
| "grad_norm": 1.6017282651545426, | |
| "learning_rate": 2.261689219261693e-06, | |
| "loss": 0.5227, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.8722707423580786, | |
| "grad_norm": 1.63954651756713, | |
| "learning_rate": 2.2573368652196324e-06, | |
| "loss": 0.5144, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.8733624454148472, | |
| "grad_norm": 1.9052064611930404, | |
| "learning_rate": 2.253019386636995e-06, | |
| "loss": 0.4255, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.8744541484716157, | |
| "grad_norm": 1.44956679873312, | |
| "learning_rate": 2.248736838414454e-06, | |
| "loss": 0.4413, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.8755458515283843, | |
| "grad_norm": 1.5373891062534013, | |
| "learning_rate": 2.244489275008509e-06, | |
| "loss": 0.477, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.8766375545851528, | |
| "grad_norm": 1.337436818012808, | |
| "learning_rate": 2.2402767504307973e-06, | |
| "loss": 0.4437, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.8777292576419214, | |
| "grad_norm": 1.4288499680577358, | |
| "learning_rate": 2.2360993182474043e-06, | |
| "loss": 0.4113, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.87882096069869, | |
| "grad_norm": 1.4761288915146535, | |
| "learning_rate": 2.231957031578186e-06, | |
| "loss": 0.53, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.8799126637554585, | |
| "grad_norm": 1.4559595441230249, | |
| "learning_rate": 2.227849943096089e-06, | |
| "loss": 0.5839, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.881004366812227, | |
| "grad_norm": 1.356122629089398, | |
| "learning_rate": 2.223778105026483e-06, | |
| "loss": 0.5175, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.8820960698689956, | |
| "grad_norm": 1.4285104513981248, | |
| "learning_rate": 2.2197415691464978e-06, | |
| "loss": 0.5656, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.8831877729257642, | |
| "grad_norm": 1.6446151125998247, | |
| "learning_rate": 2.215740386784363e-06, | |
| "loss": 0.4949, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.8842794759825328, | |
| "grad_norm": 1.5697329931755901, | |
| "learning_rate": 2.211774608818756e-06, | |
| "loss": 0.4485, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.8853711790393013, | |
| "grad_norm": 1.4212775687957804, | |
| "learning_rate": 2.207844285678155e-06, | |
| "loss": 0.4552, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.8864628820960698, | |
| "grad_norm": 1.5563125426433158, | |
| "learning_rate": 2.203949467340198e-06, | |
| "loss": 0.4584, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.8875545851528385, | |
| "grad_norm": 1.4100313864446468, | |
| "learning_rate": 2.200090203331047e-06, | |
| "loss": 0.4462, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.888646288209607, | |
| "grad_norm": 1.3400049168659227, | |
| "learning_rate": 2.196266542724759e-06, | |
| "loss": 0.5074, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.8897379912663755, | |
| "grad_norm": 1.2777197081112, | |
| "learning_rate": 2.1924785341426607e-06, | |
| "loss": 0.4549, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.8908296943231441, | |
| "grad_norm": 1.6729756071220223, | |
| "learning_rate": 2.18872622575273e-06, | |
| "loss": 0.5515, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.8919213973799127, | |
| "grad_norm": 1.5391238422917355, | |
| "learning_rate": 2.185009665268985e-06, | |
| "loss": 0.4776, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.8930131004366813, | |
| "grad_norm": 1.3890773151501015, | |
| "learning_rate": 2.181328899950878e-06, | |
| "loss": 0.4669, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.8941048034934498, | |
| "grad_norm": 1.441746914318561, | |
| "learning_rate": 2.1776839766026917e-06, | |
| "loss": 0.5172, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.8951965065502183, | |
| "grad_norm": 1.4698990931490696, | |
| "learning_rate": 2.174074941572945e-06, | |
| "loss": 0.4015, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.8962882096069869, | |
| "grad_norm": 1.5602622399868518, | |
| "learning_rate": 2.1705018407538052e-06, | |
| "loss": 0.4702, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.8973799126637555, | |
| "grad_norm": 1.6630230037043825, | |
| "learning_rate": 2.1669647195805045e-06, | |
| "loss": 0.4247, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.898471615720524, | |
| "grad_norm": 1.23838619288341, | |
| "learning_rate": 2.1634636230307583e-06, | |
| "loss": 0.5549, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.8995633187772926, | |
| "grad_norm": 1.303752704544832, | |
| "learning_rate": 2.1599985956241974e-06, | |
| "loss": 0.5294, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.9006550218340611, | |
| "grad_norm": 1.5965392918352368, | |
| "learning_rate": 2.156569681421802e-06, | |
| "loss": 0.5207, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.9017467248908297, | |
| "grad_norm": 1.5658093803338047, | |
| "learning_rate": 2.153176924025338e-06, | |
| "loss": 0.5036, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.9028384279475983, | |
| "grad_norm": 1.4362641435970003, | |
| "learning_rate": 2.1498203665768054e-06, | |
| "loss": 0.5309, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.9039301310043668, | |
| "grad_norm": 1.3768885226349667, | |
| "learning_rate": 2.1465000517578882e-06, | |
| "loss": 0.4467, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.9050218340611353, | |
| "grad_norm": 1.560858950459365, | |
| "learning_rate": 2.1432160217894144e-06, | |
| "loss": 0.5254, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.9061135371179039, | |
| "grad_norm": 1.4857990016793732, | |
| "learning_rate": 2.1399683184308157e-06, | |
| "loss": 0.4494, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.9072052401746725, | |
| "grad_norm": 1.3924147851916997, | |
| "learning_rate": 2.1367569829795975e-06, | |
| "loss": 0.4939, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.9082969432314411, | |
| "grad_norm": 1.467257632878726, | |
| "learning_rate": 2.1335820562708133e-06, | |
| "loss": 0.3817, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.9093886462882096, | |
| "grad_norm": 1.4338221579858819, | |
| "learning_rate": 2.1304435786765496e-06, | |
| "loss": 0.4756, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.9104803493449781, | |
| "grad_norm": 1.4476290815470778, | |
| "learning_rate": 2.127341590105406e-06, | |
| "loss": 0.4872, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.9115720524017468, | |
| "grad_norm": 1.4128458095745486, | |
| "learning_rate": 2.124276130001992e-06, | |
| "loss": 0.6456, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.9126637554585153, | |
| "grad_norm": 1.6789922152845889, | |
| "learning_rate": 2.1212472373464245e-06, | |
| "loss": 0.4783, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.9137554585152838, | |
| "grad_norm": 1.3775065717539268, | |
| "learning_rate": 2.1182549506538323e-06, | |
| "loss": 0.46, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.9148471615720524, | |
| "grad_norm": 1.5218752861219855, | |
| "learning_rate": 2.115299307973865e-06, | |
| "loss": 0.4672, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.915938864628821, | |
| "grad_norm": 1.6179423760169276, | |
| "learning_rate": 2.1123803468902117e-06, | |
| "loss": 0.459, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.9170305676855895, | |
| "grad_norm": 1.6296532181092986, | |
| "learning_rate": 2.1094981045201208e-06, | |
| "loss": 0.4474, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.9170305676855895, | |
| "eval_accuracy": 0.8187617923415552, | |
| "eval_accuracy_first_token": 0.7932539321756523, | |
| "eval_accuracy_first_token_<": 0.9678522571819426, | |
| "eval_accuracy_first_token_<_total": 2924, | |
| "eval_accuracy_first_token_<|python_tag|>": 0.8853118712273642, | |
| "eval_accuracy_first_token_<|python_tag|>_total": 994, | |
| "eval_accuracy_first_token_Here": 0.7616580310880829, | |
| "eval_accuracy_first_token_Here_total": 386, | |
| "eval_accuracy_first_token_The": 0.9054646988432389, | |
| "eval_accuracy_first_token_The_total": 2507, | |
| "eval_accuracy_first_token_To": 0.8397435897435898, | |
| "eval_accuracy_first_token_To_total": 936, | |
| "eval_first_token_param_values": 0.9391774602750701, | |
| "eval_first_token_param_values_total": 14978, | |
| "eval_loss": 0.5705814957618713, | |
| "eval_perplexity": 1.1099794468961024, | |
| "eval_runtime": 393.4493, | |
| "eval_samples_per_second": 1.97, | |
| "eval_steps_per_second": 0.247, | |
| "eval_total_number_first_token": 10999, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.9181222707423581, | |
| "grad_norm": 1.205254153426699, | |
| "learning_rate": 2.1066526175139284e-06, | |
| "loss": 0.5144, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.9192139737991266, | |
| "grad_norm": 1.6709674318452725, | |
| "learning_rate": 2.103843922054593e-06, | |
| "loss": 0.5163, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.9203056768558951, | |
| "grad_norm": 1.437027858243901, | |
| "learning_rate": 2.1010720538572353e-06, | |
| "loss": 0.4309, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.9213973799126638, | |
| "grad_norm": 1.4494707759764556, | |
| "learning_rate": 2.098337048168684e-06, | |
| "loss": 0.5106, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.9224890829694323, | |
| "grad_norm": 1.1335356667481837, | |
| "learning_rate": 2.095638939767026e-06, | |
| "loss": 0.5262, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.9235807860262009, | |
| "grad_norm": 1.4089437898384287, | |
| "learning_rate": 2.092977762961169e-06, | |
| "loss": 0.456, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.9246724890829694, | |
| "grad_norm": 1.5580485353987292, | |
| "learning_rate": 2.0903535515903973e-06, | |
| "loss": 0.4332, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.925764192139738, | |
| "grad_norm": 1.4205570846295756, | |
| "learning_rate": 2.087766339023949e-06, | |
| "loss": 0.3294, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.9268558951965066, | |
| "grad_norm": 1.5282550968605033, | |
| "learning_rate": 2.0852161581605876e-06, | |
| "loss": 0.4687, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.9279475982532751, | |
| "grad_norm": 1.7856045037336745, | |
| "learning_rate": 2.0827030414281865e-06, | |
| "loss": 0.4517, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.9290393013100436, | |
| "grad_norm": 1.5465010658627751, | |
| "learning_rate": 2.0802270207833125e-06, | |
| "loss": 0.4412, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.9301310043668122, | |
| "grad_norm": 1.3738941993193405, | |
| "learning_rate": 2.0777881277108233e-06, | |
| "loss": 0.542, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.9312227074235808, | |
| "grad_norm": 2.321698082996745, | |
| "learning_rate": 2.075386393223465e-06, | |
| "loss": 0.4124, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.9323144104803494, | |
| "grad_norm": 1.324438187196309, | |
| "learning_rate": 2.073021847861479e-06, | |
| "loss": 0.4605, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.9334061135371179, | |
| "grad_norm": 1.463444924977783, | |
| "learning_rate": 2.0706945216922125e-06, | |
| "loss": 0.5823, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.9344978165938864, | |
| "grad_norm": 1.4922667981099709, | |
| "learning_rate": 2.068404444309738e-06, | |
| "loss": 0.4646, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.9355895196506551, | |
| "grad_norm": 1.3847315177416661, | |
| "learning_rate": 2.066151644834474e-06, | |
| "loss": 0.4343, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.9366812227074236, | |
| "grad_norm": 1.5940379300724075, | |
| "learning_rate": 2.0639361519128192e-06, | |
| "loss": 0.5482, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.9377729257641921, | |
| "grad_norm": 1.4677814031748417, | |
| "learning_rate": 2.0617579937167808e-06, | |
| "loss": 0.517, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.9388646288209607, | |
| "grad_norm": 1.7341602702329544, | |
| "learning_rate": 2.0596171979436262e-06, | |
| "loss": 0.4918, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.9399563318777293, | |
| "grad_norm": 1.634439110526545, | |
| "learning_rate": 2.0575137918155216e-06, | |
| "loss": 0.4462, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.9410480349344978, | |
| "grad_norm": 1.5065600663982117, | |
| "learning_rate": 2.055447802079193e-06, | |
| "loss": 0.6019, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.9421397379912664, | |
| "grad_norm": 2.0495851259000837, | |
| "learning_rate": 2.053419255005581e-06, | |
| "loss": 0.5173, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.9432314410480349, | |
| "grad_norm": 1.5961930641084219, | |
| "learning_rate": 2.0514281763895078e-06, | |
| "loss": 0.4916, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.9443231441048034, | |
| "grad_norm": 1.4756850948436897, | |
| "learning_rate": 2.0494745915493524e-06, | |
| "loss": 0.4054, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.9454148471615721, | |
| "grad_norm": 1.4560227055974817, | |
| "learning_rate": 2.0475585253267246e-06, | |
| "loss": 0.4926, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.9465065502183406, | |
| "grad_norm": 1.4957883384826696, | |
| "learning_rate": 2.0456800020861516e-06, | |
| "loss": 0.4415, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.9475982532751092, | |
| "grad_norm": 1.5238486125969655, | |
| "learning_rate": 2.043839045714766e-06, | |
| "loss": 0.4261, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.9486899563318777, | |
| "grad_norm": 1.434004624625696, | |
| "learning_rate": 2.0420356796220064e-06, | |
| "loss": 0.422, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.9497816593886463, | |
| "grad_norm": 1.406938061379921, | |
| "learning_rate": 2.040269926739314e-06, | |
| "loss": 0.4445, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.9508733624454149, | |
| "grad_norm": 1.4076211144815238, | |
| "learning_rate": 2.038541809519845e-06, | |
| "loss": 0.4958, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.9519650655021834, | |
| "grad_norm": 1.4839385418962134, | |
| "learning_rate": 2.0368513499381834e-06, | |
| "loss": 0.4866, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.9530567685589519, | |
| "grad_norm": 1.3849492456103984, | |
| "learning_rate": 2.0351985694900617e-06, | |
| "loss": 0.509, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.9541484716157205, | |
| "grad_norm": 1.4308287691303287, | |
| "learning_rate": 2.0335834891920897e-06, | |
| "loss": 0.4827, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.9552401746724891, | |
| "grad_norm": 1.2106458283071144, | |
| "learning_rate": 2.0320061295814825e-06, | |
| "loss": 0.4897, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.9563318777292577, | |
| "grad_norm": 1.5606989919569907, | |
| "learning_rate": 2.0304665107158052e-06, | |
| "loss": 0.4196, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.9574235807860262, | |
| "grad_norm": 1.3347004227714003, | |
| "learning_rate": 2.028964652172714e-06, | |
| "loss": 0.3604, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.9585152838427947, | |
| "grad_norm": 1.4116522769368487, | |
| "learning_rate": 2.027500573049707e-06, | |
| "loss": 0.5023, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.9596069868995634, | |
| "grad_norm": 1.5195001851239565, | |
| "learning_rate": 2.0260742919638845e-06, | |
| "loss": 0.4883, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.9606986899563319, | |
| "grad_norm": 1.5253017255371202, | |
| "learning_rate": 2.0246858270517086e-06, | |
| "loss": 0.4582, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.9617903930131004, | |
| "grad_norm": 1.5989482719027508, | |
| "learning_rate": 2.0233351959687758e-06, | |
| "loss": 0.4625, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.962882096069869, | |
| "grad_norm": 1.4605962460768873, | |
| "learning_rate": 2.0220224158895895e-06, | |
| "loss": 0.4964, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.9639737991266376, | |
| "grad_norm": 1.4426450316388095, | |
| "learning_rate": 2.020747503507345e-06, | |
| "loss": 0.4593, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.9650655021834061, | |
| "grad_norm": 1.469436868223501, | |
| "learning_rate": 2.0195104750337126e-06, | |
| "loss": 0.6242, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.9661572052401747, | |
| "grad_norm": 1.4929428338509867, | |
| "learning_rate": 2.0183113461986374e-06, | |
| "loss": 0.5312, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.9672489082969432, | |
| "grad_norm": 1.49713177404691, | |
| "learning_rate": 2.017150132250134e-06, | |
| "loss": 0.5664, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.9683406113537117, | |
| "grad_norm": 1.592472152020729, | |
| "learning_rate": 2.0160268479540953e-06, | |
| "loss": 0.4745, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.9694323144104804, | |
| "grad_norm": 1.4810365652583912, | |
| "learning_rate": 2.0149415075941037e-06, | |
| "loss": 0.4927, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.9705240174672489, | |
| "grad_norm": 1.4236888171085202, | |
| "learning_rate": 2.013894124971251e-06, | |
| "loss": 0.6274, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.9716157205240175, | |
| "grad_norm": 1.4546508808937473, | |
| "learning_rate": 2.0128847134039603e-06, | |
| "loss": 0.4766, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.972707423580786, | |
| "grad_norm": 1.5088320910965887, | |
| "learning_rate": 2.01191328572782e-06, | |
| "loss": 0.5803, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.9737991266375546, | |
| "grad_norm": 1.4031003174167702, | |
| "learning_rate": 2.0109798542954166e-06, | |
| "loss": 0.5278, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.9748908296943232, | |
| "grad_norm": 1.370703125544038, | |
| "learning_rate": 2.010084430976181e-06, | |
| "loss": 0.527, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.9759825327510917, | |
| "grad_norm": 1.349030326926829, | |
| "learning_rate": 2.009227027156236e-06, | |
| "loss": 0.539, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.9770742358078602, | |
| "grad_norm": 1.43566220138461, | |
| "learning_rate": 2.008407653738253e-06, | |
| "loss": 0.5392, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.9781659388646288, | |
| "grad_norm": 1.5178256542279296, | |
| "learning_rate": 2.0076263211413094e-06, | |
| "loss": 0.4182, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.9792576419213974, | |
| "grad_norm": 1.5979341650799257, | |
| "learning_rate": 2.0068830393007625e-06, | |
| "loss": 0.5014, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.980349344978166, | |
| "grad_norm": 1.4678783759443443, | |
| "learning_rate": 2.0061778176681174e-06, | |
| "loss": 0.4365, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.9814410480349345, | |
| "grad_norm": 1.4727108227205978, | |
| "learning_rate": 2.005510665210909e-06, | |
| "loss": 0.5425, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.982532751091703, | |
| "grad_norm": 1.4590600569771008, | |
| "learning_rate": 2.004881590412587e-06, | |
| "loss": 0.4367, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.9836244541484717, | |
| "grad_norm": 1.4380866049192125, | |
| "learning_rate": 2.004290601272413e-06, | |
| "loss": 0.4171, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.9847161572052402, | |
| "grad_norm": 1.370884653997314, | |
| "learning_rate": 2.003737705305351e-06, | |
| "loss": 0.4152, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.9858078602620087, | |
| "grad_norm": 1.5721117133850033, | |
| "learning_rate": 2.003222909541976e-06, | |
| "loss": 0.4556, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.9868995633187773, | |
| "grad_norm": 1.6954405110580915, | |
| "learning_rate": 2.0027462205283858e-06, | |
| "loss": 0.4836, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.9879912663755459, | |
| "grad_norm": 1.5357666181603002, | |
| "learning_rate": 2.002307644326115e-06, | |
| "loss": 0.4659, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.9890829694323144, | |
| "grad_norm": 1.3036572912689448, | |
| "learning_rate": 2.0019071865120597e-06, | |
| "loss": 0.4509, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.990174672489083, | |
| "grad_norm": 1.4565706250847452, | |
| "learning_rate": 2.0015448521784045e-06, | |
| "loss": 0.5369, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.9912663755458515, | |
| "grad_norm": 1.6290260214981989, | |
| "learning_rate": 2.0012206459325623e-06, | |
| "loss": 0.53, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.99235807860262, | |
| "grad_norm": 1.475184053581015, | |
| "learning_rate": 2.0009345718971095e-06, | |
| "loss": 0.6147, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.9934497816593887, | |
| "grad_norm": 1.3972057243681786, | |
| "learning_rate": 2.0006866337097385e-06, | |
| "loss": 0.3797, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.9945414847161572, | |
| "grad_norm": 1.539646657802833, | |
| "learning_rate": 2.0004768345232086e-06, | |
| "loss": 0.4444, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.9956331877729258, | |
| "grad_norm": 1.448519957622226, | |
| "learning_rate": 2.000305177005308e-06, | |
| "loss": 0.496, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.9967248908296943, | |
| "grad_norm": 1.5861995286170092, | |
| "learning_rate": 2.000171663338818e-06, | |
| "loss": 0.5035, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.9978165938864629, | |
| "grad_norm": 1.5202816160223769, | |
| "learning_rate": 2.000076295221487e-06, | |
| "loss": 0.558, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.9989082969432315, | |
| "grad_norm": 1.4109310416901248, | |
| "learning_rate": 2.0000190738660073e-06, | |
| "loss": 0.5627, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.541149773382253, | |
| "learning_rate": 2e-06, | |
| "loss": 0.4722, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 916, | |
| "total_flos": 287687646904320.0, | |
| "train_loss": 0.5413380475088498, | |
| "train_runtime": 14548.2375, | |
| "train_samples_per_second": 0.755, | |
| "train_steps_per_second": 0.063 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 916, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 287687646904320.0, | |
| "train_batch_size": 3, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |