farmery's picture
Training in progress, epoch 0, checkpoint
b4b1f6c verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.15362753005338556,
"eval_steps": 500,
"global_step": 1000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.00015362753005338555,
"grad_norm": 1.4335267543792725,
"learning_rate": 0.00019999950652018584,
"loss": 4.4381,
"step": 1
},
{
"epoch": 0.0003072550601067711,
"grad_norm": 1.9977083206176758,
"learning_rate": 0.0001999980260856137,
"loss": 3.3634,
"step": 2
},
{
"epoch": 0.0004608825901601567,
"grad_norm": 1.9106627702713013,
"learning_rate": 0.000199995558710895,
"loss": 2.1026,
"step": 3
},
{
"epoch": 0.0006145101202135422,
"grad_norm": 1.4140522480010986,
"learning_rate": 0.00019999210442038162,
"loss": 1.4398,
"step": 4
},
{
"epoch": 0.0007681376502669279,
"grad_norm": NaN,
"learning_rate": 0.00019999210442038162,
"loss": 2.3337,
"step": 5
},
{
"epoch": 0.0009217651803203134,
"grad_norm": 1.170066475868225,
"learning_rate": 0.00019998766324816607,
"loss": 1.5668,
"step": 6
},
{
"epoch": 0.001075392710373699,
"grad_norm": 1.1518230438232422,
"learning_rate": 0.0001999822352380809,
"loss": 1.4619,
"step": 7
},
{
"epoch": 0.0012290202404270844,
"grad_norm": 0.9642981886863708,
"learning_rate": 0.00019997582044369843,
"loss": 1.0451,
"step": 8
},
{
"epoch": 0.00138264777048047,
"grad_norm": 1.3188344240188599,
"learning_rate": 0.00019996841892833,
"loss": 0.7012,
"step": 9
},
{
"epoch": 0.0015362753005338558,
"grad_norm": 0.8676035404205322,
"learning_rate": 0.00019996003076502565,
"loss": 0.3335,
"step": 10
},
{
"epoch": 0.0016899028305872412,
"grad_norm": 1.2693771123886108,
"learning_rate": 0.00019995065603657316,
"loss": 0.5447,
"step": 11
},
{
"epoch": 0.0018435303606406269,
"grad_norm": 0.5754665732383728,
"learning_rate": 0.0001999402948354973,
"loss": 0.212,
"step": 12
},
{
"epoch": 0.0019971578906940123,
"grad_norm": 0.6668656468391418,
"learning_rate": 0.00019992894726405893,
"loss": 0.2628,
"step": 13
},
{
"epoch": 0.002150785420747398,
"grad_norm": 0.9263312220573425,
"learning_rate": 0.000199916613434254,
"loss": 0.3356,
"step": 14
},
{
"epoch": 0.0023044129508007836,
"grad_norm": 1.4411181211471558,
"learning_rate": 0.0001999032934678125,
"loss": 0.4613,
"step": 15
},
{
"epoch": 0.002458040480854169,
"grad_norm": 2.610933542251587,
"learning_rate": 0.00019988898749619702,
"loss": 0.4783,
"step": 16
},
{
"epoch": 0.0026116680109075545,
"grad_norm": 1.475306510925293,
"learning_rate": 0.00019987369566060176,
"loss": 0.2315,
"step": 17
},
{
"epoch": 0.00276529554096094,
"grad_norm": 1.2833693027496338,
"learning_rate": 0.00019985741811195097,
"loss": 0.2743,
"step": 18
},
{
"epoch": 0.002918923071014326,
"grad_norm": 1.7153571844100952,
"learning_rate": 0.00019984015501089752,
"loss": 0.54,
"step": 19
},
{
"epoch": 0.0030725506010677115,
"grad_norm": 1.2130733728408813,
"learning_rate": 0.0001998219065278212,
"loss": 0.2181,
"step": 20
},
{
"epoch": 0.0032261781311210967,
"grad_norm": 2.236994981765747,
"learning_rate": 0.00019980267284282717,
"loss": 0.2897,
"step": 21
},
{
"epoch": 0.0033798056611744824,
"grad_norm": 3.898953676223755,
"learning_rate": 0.00019978245414574417,
"loss": 0.272,
"step": 22
},
{
"epoch": 0.003533433191227868,
"grad_norm": 3.425849676132202,
"learning_rate": 0.00019976125063612252,
"loss": 0.2642,
"step": 23
},
{
"epoch": 0.0036870607212812537,
"grad_norm": 0.9692144393920898,
"learning_rate": 0.00019973906252323238,
"loss": 0.1686,
"step": 24
},
{
"epoch": 0.003840688251334639,
"grad_norm": 0.6725673079490662,
"learning_rate": 0.0001997158900260614,
"loss": 0.1418,
"step": 25
},
{
"epoch": 0.003994315781388025,
"grad_norm": 1.8308030366897583,
"learning_rate": 0.0001996917333733128,
"loss": 0.2172,
"step": 26
},
{
"epoch": 0.00414794331144141,
"grad_norm": 1.0318214893341064,
"learning_rate": 0.00019966659280340297,
"loss": 0.1873,
"step": 27
},
{
"epoch": 0.004301570841494796,
"grad_norm": 0.9152179956436157,
"learning_rate": 0.00019964046856445924,
"loss": 0.2706,
"step": 28
},
{
"epoch": 0.004455198371548182,
"grad_norm": 2.2424752712249756,
"learning_rate": 0.00019961336091431727,
"loss": 0.2168,
"step": 29
},
{
"epoch": 0.004608825901601567,
"grad_norm": 0.5365583300590515,
"learning_rate": 0.00019958527012051857,
"loss": 0.1102,
"step": 30
},
{
"epoch": 0.004762453431654953,
"grad_norm": 1.251253604888916,
"learning_rate": 0.00019955619646030802,
"loss": 0.2697,
"step": 31
},
{
"epoch": 0.004916080961708338,
"grad_norm": 0.813782274723053,
"learning_rate": 0.00019952614022063084,
"loss": 0.226,
"step": 32
},
{
"epoch": 0.005069708491761723,
"grad_norm": 0.5317302346229553,
"learning_rate": 0.00019949510169813003,
"loss": 0.1474,
"step": 33
},
{
"epoch": 0.005223336021815109,
"grad_norm": 0.5142704248428345,
"learning_rate": 0.00019946308119914323,
"loss": 0.1352,
"step": 34
},
{
"epoch": 0.005376963551868495,
"grad_norm": 0.448001503944397,
"learning_rate": 0.0001994300790396999,
"loss": 0.1196,
"step": 35
},
{
"epoch": 0.00553059108192188,
"grad_norm": 0.5577914118766785,
"learning_rate": 0.000199396095545518,
"loss": 0.1919,
"step": 36
},
{
"epoch": 0.005684218611975266,
"grad_norm": 0.5910277366638184,
"learning_rate": 0.00019936113105200085,
"loss": 0.1736,
"step": 37
},
{
"epoch": 0.005837846142028652,
"grad_norm": 0.6294355392456055,
"learning_rate": 0.00019932518590423394,
"loss": 0.1977,
"step": 38
},
{
"epoch": 0.005991473672082037,
"grad_norm": 0.61626797914505,
"learning_rate": 0.00019928826045698136,
"loss": 0.1124,
"step": 39
},
{
"epoch": 0.006145101202135423,
"grad_norm": 0.4948277771472931,
"learning_rate": 0.0001992503550746824,
"loss": 0.1463,
"step": 40
},
{
"epoch": 0.006298728732188808,
"grad_norm": 0.29908713698387146,
"learning_rate": 0.0001992114701314478,
"loss": 0.0867,
"step": 41
},
{
"epoch": 0.0064523562622421935,
"grad_norm": 0.7047778367996216,
"learning_rate": 0.0001991716060110563,
"loss": 0.1536,
"step": 42
},
{
"epoch": 0.006605983792295579,
"grad_norm": 0.6480337977409363,
"learning_rate": 0.00019913076310695068,
"loss": 0.0656,
"step": 43
},
{
"epoch": 0.006759611322348965,
"grad_norm": 0.8253292441368103,
"learning_rate": 0.00019908894182223388,
"loss": 0.2251,
"step": 44
},
{
"epoch": 0.0069132388524023505,
"grad_norm": 0.4333663582801819,
"learning_rate": 0.00019904614256966512,
"loss": 0.1107,
"step": 45
},
{
"epoch": 0.007066866382455736,
"grad_norm": 0.5141597390174866,
"learning_rate": 0.00019900236577165576,
"loss": 0.1337,
"step": 46
},
{
"epoch": 0.007220493912509122,
"grad_norm": 7.190249919891357,
"learning_rate": 0.0001989576118602651,
"loss": 0.17,
"step": 47
},
{
"epoch": 0.0073741214425625074,
"grad_norm": 0.44444581866264343,
"learning_rate": 0.00019891188127719618,
"loss": 0.1257,
"step": 48
},
{
"epoch": 0.007527748972615893,
"grad_norm": 0.6360841393470764,
"learning_rate": 0.0001988651744737914,
"loss": 0.1476,
"step": 49
},
{
"epoch": 0.007681376502669278,
"grad_norm": 0.600468635559082,
"learning_rate": 0.00019881749191102808,
"loss": 0.078,
"step": 50
},
{
"epoch": 0.007835004032722664,
"grad_norm": 2.166731357574463,
"learning_rate": 0.00019876883405951377,
"loss": 0.103,
"step": 51
},
{
"epoch": 0.00798863156277605,
"grad_norm": 0.5944610834121704,
"learning_rate": 0.00019871920139948192,
"loss": 0.13,
"step": 52
},
{
"epoch": 0.008142259092829436,
"grad_norm": 0.45094043016433716,
"learning_rate": 0.0001986685944207868,
"loss": 0.1794,
"step": 53
},
{
"epoch": 0.00829588662288282,
"grad_norm": 0.49688029289245605,
"learning_rate": 0.0001986170136228989,
"loss": 0.1671,
"step": 54
},
{
"epoch": 0.008449514152936205,
"grad_norm": 0.45774996280670166,
"learning_rate": 0.00019856445951489982,
"loss": 0.1015,
"step": 55
},
{
"epoch": 0.008603141682989592,
"grad_norm": 0.26904016733169556,
"learning_rate": 0.0001985109326154774,
"loss": 0.0585,
"step": 56
},
{
"epoch": 0.008756769213042977,
"grad_norm": 0.6482771039009094,
"learning_rate": 0.00019845643345292054,
"loss": 0.2137,
"step": 57
},
{
"epoch": 0.008910396743096363,
"grad_norm": 0.4970140755176544,
"learning_rate": 0.00019840096256511398,
"loss": 0.2009,
"step": 58
},
{
"epoch": 0.009064024273149748,
"grad_norm": 0.43053507804870605,
"learning_rate": 0.00019834452049953297,
"loss": 0.1544,
"step": 59
},
{
"epoch": 0.009217651803203135,
"grad_norm": 0.3757063150405884,
"learning_rate": 0.00019828710781323792,
"loss": 0.1076,
"step": 60
},
{
"epoch": 0.00937127933325652,
"grad_norm": 0.30272766947746277,
"learning_rate": 0.0001982287250728689,
"loss": 0.0718,
"step": 61
},
{
"epoch": 0.009524906863309906,
"grad_norm": 0.27745816111564636,
"learning_rate": 0.0001981693728546399,
"loss": 0.0766,
"step": 62
},
{
"epoch": 0.00967853439336329,
"grad_norm": 0.4021260738372803,
"learning_rate": 0.0001981090517443334,
"loss": 0.0695,
"step": 63
},
{
"epoch": 0.009832161923416675,
"grad_norm": 0.4044291079044342,
"learning_rate": 0.00019804776233729444,
"loss": 0.1653,
"step": 64
},
{
"epoch": 0.009985789453470062,
"grad_norm": 0.2142949104309082,
"learning_rate": 0.0001979855052384247,
"loss": 0.0665,
"step": 65
},
{
"epoch": 0.010139416983523447,
"grad_norm": 0.29861870408058167,
"learning_rate": 0.00019792228106217658,
"loss": 0.0492,
"step": 66
},
{
"epoch": 0.010293044513576833,
"grad_norm": 0.30163586139678955,
"learning_rate": 0.00019785809043254722,
"loss": 0.1081,
"step": 67
},
{
"epoch": 0.010446672043630218,
"grad_norm": 0.6799619197845459,
"learning_rate": 0.0001977929339830722,
"loss": 0.1692,
"step": 68
},
{
"epoch": 0.010600299573683605,
"grad_norm": 0.24740102887153625,
"learning_rate": 0.00019772681235681936,
"loss": 0.0701,
"step": 69
},
{
"epoch": 0.01075392710373699,
"grad_norm": 0.47756919264793396,
"learning_rate": 0.00019765972620638248,
"loss": 0.131,
"step": 70
},
{
"epoch": 0.010907554633790376,
"grad_norm": 0.5168080925941467,
"learning_rate": 0.00019759167619387476,
"loss": 0.2123,
"step": 71
},
{
"epoch": 0.01106118216384376,
"grad_norm": 0.37097570300102234,
"learning_rate": 0.00019752266299092236,
"loss": 0.0892,
"step": 72
},
{
"epoch": 0.011214809693897146,
"grad_norm": 0.23660291731357574,
"learning_rate": 0.00019745268727865774,
"loss": 0.0312,
"step": 73
},
{
"epoch": 0.011368437223950532,
"grad_norm": 0.34768179059028625,
"learning_rate": 0.0001973817497477129,
"loss": 0.1131,
"step": 74
},
{
"epoch": 0.011522064754003917,
"grad_norm": 0.28349238634109497,
"learning_rate": 0.00019730985109821266,
"loss": 0.0446,
"step": 75
},
{
"epoch": 0.011675692284057303,
"grad_norm": 0.519523024559021,
"learning_rate": 0.00019723699203976766,
"loss": 0.0896,
"step": 76
},
{
"epoch": 0.011829319814110688,
"grad_norm": 0.3170166313648224,
"learning_rate": 0.0001971631732914674,
"loss": 0.0914,
"step": 77
},
{
"epoch": 0.011982947344164075,
"grad_norm": 0.4500735104084015,
"learning_rate": 0.0001970883955818731,
"loss": 0.1276,
"step": 78
},
{
"epoch": 0.01213657487421746,
"grad_norm": 0.28541991114616394,
"learning_rate": 0.0001970126596490106,
"loss": 0.0802,
"step": 79
},
{
"epoch": 0.012290202404270846,
"grad_norm": 0.5267351269721985,
"learning_rate": 0.00019693596624036292,
"loss": 0.0907,
"step": 80
},
{
"epoch": 0.01244382993432423,
"grad_norm": 0.29661211371421814,
"learning_rate": 0.0001968583161128631,
"loss": 0.0672,
"step": 81
},
{
"epoch": 0.012597457464377616,
"grad_norm": 0.7149848341941833,
"learning_rate": 0.00019677971003288655,
"loss": 0.1109,
"step": 82
},
{
"epoch": 0.012751084994431002,
"grad_norm": 0.40326982736587524,
"learning_rate": 0.00019670014877624353,
"loss": 0.1031,
"step": 83
},
{
"epoch": 0.012904712524484387,
"grad_norm": 0.7958882451057434,
"learning_rate": 0.00019661963312817148,
"loss": 0.1508,
"step": 84
},
{
"epoch": 0.013058340054537773,
"grad_norm": 0.25845202803611755,
"learning_rate": 0.0001965381638833274,
"loss": 0.067,
"step": 85
},
{
"epoch": 0.013211967584591158,
"grad_norm": 0.6514424085617065,
"learning_rate": 0.00019645574184577982,
"loss": 0.1296,
"step": 86
},
{
"epoch": 0.013365595114644545,
"grad_norm": 0.22582142055034637,
"learning_rate": 0.000196372367829001,
"loss": 0.0428,
"step": 87
},
{
"epoch": 0.01351922264469793,
"grad_norm": 0.34265944361686707,
"learning_rate": 0.00019628804265585877,
"loss": 0.0815,
"step": 88
},
{
"epoch": 0.013672850174751316,
"grad_norm": 0.34472209215164185,
"learning_rate": 0.0001962027671586086,
"loss": 0.1057,
"step": 89
},
{
"epoch": 0.013826477704804701,
"grad_norm": 0.3242131173610687,
"learning_rate": 0.0001961165421788852,
"loss": 0.071,
"step": 90
},
{
"epoch": 0.013980105234858086,
"grad_norm": 0.38684889674186707,
"learning_rate": 0.0001960293685676943,
"loss": 0.1058,
"step": 91
},
{
"epoch": 0.014133732764911472,
"grad_norm": 0.523155927658081,
"learning_rate": 0.0001959412471854043,
"loss": 0.1355,
"step": 92
},
{
"epoch": 0.014287360294964857,
"grad_norm": 0.6945334076881409,
"learning_rate": 0.0001958521789017376,
"loss": 0.1895,
"step": 93
},
{
"epoch": 0.014440987825018244,
"grad_norm": 0.2630568742752075,
"learning_rate": 0.00019576216459576222,
"loss": 0.038,
"step": 94
},
{
"epoch": 0.014594615355071628,
"grad_norm": 1.111454963684082,
"learning_rate": 0.00019567120515588308,
"loss": 0.1504,
"step": 95
},
{
"epoch": 0.014748242885125015,
"grad_norm": 0.5980623364448547,
"learning_rate": 0.00019557930147983302,
"loss": 0.0957,
"step": 96
},
{
"epoch": 0.0149018704151784,
"grad_norm": 0.37585559487342834,
"learning_rate": 0.00019548645447466431,
"loss": 0.0439,
"step": 97
},
{
"epoch": 0.015055497945231786,
"grad_norm": 0.3527520000934601,
"learning_rate": 0.00019539266505673938,
"loss": 0.0594,
"step": 98
},
{
"epoch": 0.015209125475285171,
"grad_norm": 0.12810342013835907,
"learning_rate": 0.00019529793415172192,
"loss": 0.0204,
"step": 99
},
{
"epoch": 0.015362753005338556,
"grad_norm": 0.6953453421592712,
"learning_rate": 0.00019520226269456768,
"loss": 0.1609,
"step": 100
},
{
"epoch": 0.015516380535391942,
"grad_norm": 0.9400885701179504,
"learning_rate": 0.00019510565162951537,
"loss": 0.1655,
"step": 101
},
{
"epoch": 0.01567000806544533,
"grad_norm": 0.7159962058067322,
"learning_rate": 0.00019500810191007718,
"loss": 0.1363,
"step": 102
},
{
"epoch": 0.015823635595498712,
"grad_norm": 0.1889261156320572,
"learning_rate": 0.00019490961449902946,
"loss": 0.0258,
"step": 103
},
{
"epoch": 0.0159772631255521,
"grad_norm": 0.4154360592365265,
"learning_rate": 0.0001948101903684032,
"loss": 0.0956,
"step": 104
},
{
"epoch": 0.016130890655605485,
"grad_norm": 0.531460165977478,
"learning_rate": 0.00019470983049947444,
"loss": 0.1141,
"step": 105
},
{
"epoch": 0.01628451818565887,
"grad_norm": 0.375203013420105,
"learning_rate": 0.00019460853588275454,
"loss": 0.0621,
"step": 106
},
{
"epoch": 0.016438145715712255,
"grad_norm": 0.6316067576408386,
"learning_rate": 0.00019450630751798048,
"loss": 0.1332,
"step": 107
},
{
"epoch": 0.01659177324576564,
"grad_norm": 0.3298954963684082,
"learning_rate": 0.000194403146414105,
"loss": 0.0613,
"step": 108
},
{
"epoch": 0.016745400775819028,
"grad_norm": 0.39221131801605225,
"learning_rate": 0.00019429905358928646,
"loss": 0.0881,
"step": 109
},
{
"epoch": 0.01689902830587241,
"grad_norm": 0.3961494565010071,
"learning_rate": 0.00019419403007087907,
"loss": 0.121,
"step": 110
},
{
"epoch": 0.017052655835925797,
"grad_norm": 0.48982882499694824,
"learning_rate": 0.00019408807689542257,
"loss": 0.1838,
"step": 111
},
{
"epoch": 0.017206283365979184,
"grad_norm": 1.97594153881073,
"learning_rate": 0.00019398119510863197,
"loss": 0.1074,
"step": 112
},
{
"epoch": 0.01735991089603257,
"grad_norm": 0.46495580673217773,
"learning_rate": 0.00019387338576538744,
"loss": 0.1015,
"step": 113
},
{
"epoch": 0.017513538426085953,
"grad_norm": 0.4214487075805664,
"learning_rate": 0.00019376464992972356,
"loss": 0.1202,
"step": 114
},
{
"epoch": 0.01766716595613934,
"grad_norm": 0.397897332906723,
"learning_rate": 0.00019365498867481923,
"loss": 0.0906,
"step": 115
},
{
"epoch": 0.017820793486192726,
"grad_norm": 0.22124041616916656,
"learning_rate": 0.00019354440308298675,
"loss": 0.0569,
"step": 116
},
{
"epoch": 0.017974421016246113,
"grad_norm": 0.24207240343093872,
"learning_rate": 0.00019343289424566122,
"loss": 0.0692,
"step": 117
},
{
"epoch": 0.018128048546299496,
"grad_norm": 0.41026973724365234,
"learning_rate": 0.00019332046326338986,
"loss": 0.0686,
"step": 118
},
{
"epoch": 0.018281676076352883,
"grad_norm": 0.16712917387485504,
"learning_rate": 0.0001932071112458211,
"loss": 0.0428,
"step": 119
},
{
"epoch": 0.01843530360640627,
"grad_norm": 0.5548776984214783,
"learning_rate": 0.00019309283931169356,
"loss": 0.148,
"step": 120
},
{
"epoch": 0.018588931136459652,
"grad_norm": 0.41364169120788574,
"learning_rate": 0.00019297764858882514,
"loss": 0.0574,
"step": 121
},
{
"epoch": 0.01874255866651304,
"grad_norm": 0.2608810365200043,
"learning_rate": 0.00019286154021410173,
"loss": 0.0724,
"step": 122
},
{
"epoch": 0.018896186196566425,
"grad_norm": 0.39230769872665405,
"learning_rate": 0.00019274451533346615,
"loss": 0.1025,
"step": 123
},
{
"epoch": 0.01904981372661981,
"grad_norm": 0.32246139645576477,
"learning_rate": 0.00019262657510190666,
"loss": 0.0837,
"step": 124
},
{
"epoch": 0.019203441256673195,
"grad_norm": 0.2555975615978241,
"learning_rate": 0.0001925077206834458,
"loss": 0.0657,
"step": 125
},
{
"epoch": 0.01935706878672658,
"grad_norm": 0.6545979380607605,
"learning_rate": 0.0001923879532511287,
"loss": 0.1372,
"step": 126
},
{
"epoch": 0.019510696316779968,
"grad_norm": 0.12058461457490921,
"learning_rate": 0.0001922672739870115,
"loss": 0.0237,
"step": 127
},
{
"epoch": 0.01966432384683335,
"grad_norm": 0.22008401155471802,
"learning_rate": 0.00019214568408214985,
"loss": 0.0658,
"step": 128
},
{
"epoch": 0.019817951376886737,
"grad_norm": 0.23489922285079956,
"learning_rate": 0.00019202318473658705,
"loss": 0.0849,
"step": 129
},
{
"epoch": 0.019971578906940124,
"grad_norm": 0.2398349642753601,
"learning_rate": 0.00019189977715934213,
"loss": 0.0614,
"step": 130
},
{
"epoch": 0.02012520643699351,
"grad_norm": 0.25383076071739197,
"learning_rate": 0.00019177546256839812,
"loss": 0.0368,
"step": 131
},
{
"epoch": 0.020278833967046894,
"grad_norm": 0.25539273023605347,
"learning_rate": 0.0001916502421906898,
"loss": 0.0945,
"step": 132
},
{
"epoch": 0.02043246149710028,
"grad_norm": 0.4173387885093689,
"learning_rate": 0.00019152411726209176,
"loss": 0.0928,
"step": 133
},
{
"epoch": 0.020586089027153667,
"grad_norm": 0.1956920623779297,
"learning_rate": 0.00019139708902740613,
"loss": 0.0376,
"step": 134
},
{
"epoch": 0.020739716557207053,
"grad_norm": 0.26302215456962585,
"learning_rate": 0.0001912691587403503,
"loss": 0.0623,
"step": 135
},
{
"epoch": 0.020893344087260436,
"grad_norm": 0.058415770530700684,
"learning_rate": 0.00019114032766354453,
"loss": 0.009,
"step": 136
},
{
"epoch": 0.021046971617313823,
"grad_norm": 0.18043197691440582,
"learning_rate": 0.00019101059706849957,
"loss": 0.052,
"step": 137
},
{
"epoch": 0.02120059914736721,
"grad_norm": 0.6324962973594666,
"learning_rate": 0.00019087996823560402,
"loss": 0.1532,
"step": 138
},
{
"epoch": 0.021354226677420592,
"grad_norm": 0.24394692480564117,
"learning_rate": 0.0001907484424541117,
"loss": 0.0751,
"step": 139
},
{
"epoch": 0.02150785420747398,
"grad_norm": 0.19500701129436493,
"learning_rate": 0.00019061602102212898,
"loss": 0.0505,
"step": 140
},
{
"epoch": 0.021661481737527365,
"grad_norm": 0.32753410935401917,
"learning_rate": 0.00019048270524660196,
"loss": 0.0578,
"step": 141
},
{
"epoch": 0.021815109267580752,
"grad_norm": 0.16466738283634186,
"learning_rate": 0.0001903484964433035,
"loss": 0.0232,
"step": 142
},
{
"epoch": 0.021968736797634135,
"grad_norm": 0.2467702478170395,
"learning_rate": 0.00019021339593682028,
"loss": 0.0385,
"step": 143
},
{
"epoch": 0.02212236432768752,
"grad_norm": 0.16543933749198914,
"learning_rate": 0.00019007740506053983,
"loss": 0.035,
"step": 144
},
{
"epoch": 0.022275991857740908,
"grad_norm": 0.4182642102241516,
"learning_rate": 0.0001899405251566371,
"loss": 0.0509,
"step": 145
},
{
"epoch": 0.02242961938779429,
"grad_norm": 0.5877979397773743,
"learning_rate": 0.00018980275757606157,
"loss": 0.0667,
"step": 146
},
{
"epoch": 0.022583246917847678,
"grad_norm": 0.49787595868110657,
"learning_rate": 0.00018966410367852362,
"loss": 0.0751,
"step": 147
},
{
"epoch": 0.022736874447901064,
"grad_norm": 0.49547550082206726,
"learning_rate": 0.00018952456483248119,
"loss": 0.1118,
"step": 148
},
{
"epoch": 0.02289050197795445,
"grad_norm": 0.3361258804798126,
"learning_rate": 0.0001893841424151264,
"loss": 0.0486,
"step": 149
},
{
"epoch": 0.023044129508007834,
"grad_norm": 0.5897171497344971,
"learning_rate": 0.0001892428378123718,
"loss": 0.1015,
"step": 150
},
{
"epoch": 0.02319775703806122,
"grad_norm": 0.39473700523376465,
"learning_rate": 0.0001891006524188368,
"loss": 0.0652,
"step": 151
},
{
"epoch": 0.023351384568114607,
"grad_norm": 0.5028777122497559,
"learning_rate": 0.00018895758763783383,
"loss": 0.0317,
"step": 152
},
{
"epoch": 0.023505012098167993,
"grad_norm": 0.2087724208831787,
"learning_rate": 0.00018881364488135448,
"loss": 0.0441,
"step": 153
},
{
"epoch": 0.023658639628221376,
"grad_norm": 0.3778011202812195,
"learning_rate": 0.00018866882557005567,
"loss": 0.0483,
"step": 154
},
{
"epoch": 0.023812267158274763,
"grad_norm": 2.1279866695404053,
"learning_rate": 0.00018852313113324552,
"loss": 0.0447,
"step": 155
},
{
"epoch": 0.02396589468832815,
"grad_norm": 1.2959928512573242,
"learning_rate": 0.00018837656300886937,
"loss": 0.1827,
"step": 156
},
{
"epoch": 0.024119522218381532,
"grad_norm": 0.4625653028488159,
"learning_rate": 0.00018822912264349534,
"loss": 0.0396,
"step": 157
},
{
"epoch": 0.02427314974843492,
"grad_norm": 0.4235970079898834,
"learning_rate": 0.00018808081149230036,
"loss": 0.1079,
"step": 158
},
{
"epoch": 0.024426777278488306,
"grad_norm": 0.784400999546051,
"learning_rate": 0.00018793163101905563,
"loss": 0.0804,
"step": 159
},
{
"epoch": 0.024580404808541692,
"grad_norm": 0.6724461317062378,
"learning_rate": 0.00018778158269611218,
"loss": 0.0688,
"step": 160
},
{
"epoch": 0.024734032338595075,
"grad_norm": 0.5144526362419128,
"learning_rate": 0.00018763066800438636,
"loss": 0.0255,
"step": 161
},
{
"epoch": 0.02488765986864846,
"grad_norm": 0.48256728053092957,
"learning_rate": 0.0001874788884333453,
"loss": 0.1009,
"step": 162
},
{
"epoch": 0.025041287398701848,
"grad_norm": 0.5391762852668762,
"learning_rate": 0.00018732624548099204,
"loss": 0.0976,
"step": 163
},
{
"epoch": 0.02519491492875523,
"grad_norm": 0.9985032677650452,
"learning_rate": 0.0001871727406538509,
"loss": 0.07,
"step": 164
},
{
"epoch": 0.025348542458808618,
"grad_norm": 0.7602483630180359,
"learning_rate": 0.0001870183754669526,
"loss": 0.1039,
"step": 165
},
{
"epoch": 0.025502169988862004,
"grad_norm": 0.32144758105278015,
"learning_rate": 0.00018686315144381913,
"loss": 0.076,
"step": 166
},
{
"epoch": 0.02565579751891539,
"grad_norm": 0.2595352232456207,
"learning_rate": 0.000186707070116449,
"loss": 0.0406,
"step": 167
},
{
"epoch": 0.025809425048968774,
"grad_norm": 0.10767323523759842,
"learning_rate": 0.0001865501330253019,
"loss": 0.0122,
"step": 168
},
{
"epoch": 0.02596305257902216,
"grad_norm": 0.38751021027565,
"learning_rate": 0.00018639234171928353,
"loss": 0.0897,
"step": 169
},
{
"epoch": 0.026116680109075547,
"grad_norm": 0.36105862259864807,
"learning_rate": 0.0001862336977557304,
"loss": 0.0371,
"step": 170
},
{
"epoch": 0.026270307639128933,
"grad_norm": 0.5462656021118164,
"learning_rate": 0.0001860742027003944,
"loss": 0.1149,
"step": 171
},
{
"epoch": 0.026423935169182317,
"grad_norm": 0.4504707157611847,
"learning_rate": 0.00018591385812742725,
"loss": 0.1002,
"step": 172
},
{
"epoch": 0.026577562699235703,
"grad_norm": 0.33760473132133484,
"learning_rate": 0.00018575266561936523,
"loss": 0.0405,
"step": 173
},
{
"epoch": 0.02673119022928909,
"grad_norm": 0.5172603130340576,
"learning_rate": 0.00018559062676711332,
"loss": 0.1152,
"step": 174
},
{
"epoch": 0.026884817759342473,
"grad_norm": 0.3163921535015106,
"learning_rate": 0.0001854277431699295,
"loss": 0.0841,
"step": 175
},
{
"epoch": 0.02703844528939586,
"grad_norm": 0.2841356694698334,
"learning_rate": 0.00018526401643540922,
"loss": 0.0727,
"step": 176
},
{
"epoch": 0.027192072819449246,
"grad_norm": 0.3015337586402893,
"learning_rate": 0.00018509944817946922,
"loss": 0.0892,
"step": 177
},
{
"epoch": 0.027345700349502632,
"grad_norm": 0.5001025199890137,
"learning_rate": 0.00018493404002633166,
"loss": 0.1096,
"step": 178
},
{
"epoch": 0.027499327879556015,
"grad_norm": 0.19355419278144836,
"learning_rate": 0.00018476779360850832,
"loss": 0.0428,
"step": 179
},
{
"epoch": 0.027652955409609402,
"grad_norm": 0.12335726618766785,
"learning_rate": 0.00018460071056678422,
"loss": 0.0357,
"step": 180
},
{
"epoch": 0.02780658293966279,
"grad_norm": 0.39522913098335266,
"learning_rate": 0.00018443279255020152,
"loss": 0.1,
"step": 181
},
{
"epoch": 0.02796021046971617,
"grad_norm": 0.7891809940338135,
"learning_rate": 0.00018426404121604323,
"loss": 0.0834,
"step": 182
},
{
"epoch": 0.028113837999769558,
"grad_norm": 0.6537089943885803,
"learning_rate": 0.00018409445822981693,
"loss": 0.098,
"step": 183
},
{
"epoch": 0.028267465529822944,
"grad_norm": 0.8604905009269714,
"learning_rate": 0.00018392404526523817,
"loss": 0.1043,
"step": 184
},
{
"epoch": 0.02842109305987633,
"grad_norm": 0.7672120332717896,
"learning_rate": 0.0001837528040042142,
"loss": 0.1319,
"step": 185
},
{
"epoch": 0.028574720589929714,
"grad_norm": 0.19023366272449493,
"learning_rate": 0.00018358073613682706,
"loss": 0.0418,
"step": 186
},
{
"epoch": 0.0287283481199831,
"grad_norm": 0.2923077344894409,
"learning_rate": 0.00018340784336131713,
"loss": 0.0999,
"step": 187
},
{
"epoch": 0.028881975650036487,
"grad_norm": 0.5535213947296143,
"learning_rate": 0.00018323412738406635,
"loss": 0.0833,
"step": 188
},
{
"epoch": 0.029035603180089874,
"grad_norm": 0.195821613073349,
"learning_rate": 0.00018305958991958127,
"loss": 0.0378,
"step": 189
},
{
"epoch": 0.029189230710143257,
"grad_norm": 0.34129655361175537,
"learning_rate": 0.0001828842326904762,
"loss": 0.0925,
"step": 190
},
{
"epoch": 0.029342858240196643,
"grad_norm": 0.45693081617355347,
"learning_rate": 0.00018270805742745617,
"loss": 0.0939,
"step": 191
},
{
"epoch": 0.02949648577025003,
"grad_norm": 2.0506601333618164,
"learning_rate": 0.00018253106586929997,
"loss": 0.1912,
"step": 192
},
{
"epoch": 0.029650113300303413,
"grad_norm": 0.15767133235931396,
"learning_rate": 0.00018235325976284275,
"loss": 0.0285,
"step": 193
},
{
"epoch": 0.0298037408303568,
"grad_norm": 0.2514001727104187,
"learning_rate": 0.00018217464086295904,
"loss": 0.0699,
"step": 194
},
{
"epoch": 0.029957368360410186,
"grad_norm": 0.2210742086172104,
"learning_rate": 0.00018199521093254523,
"loss": 0.0585,
"step": 195
},
{
"epoch": 0.030110995890463572,
"grad_norm": 0.29208460450172424,
"learning_rate": 0.00018181497174250236,
"loss": 0.0407,
"step": 196
},
{
"epoch": 0.030264623420516956,
"grad_norm": 0.325731486082077,
"learning_rate": 0.00018163392507171842,
"loss": 0.1293,
"step": 197
},
{
"epoch": 0.030418250950570342,
"grad_norm": 0.21301786601543427,
"learning_rate": 0.00018145207270705096,
"loss": 0.0669,
"step": 198
},
{
"epoch": 0.03057187848062373,
"grad_norm": 0.27570822834968567,
"learning_rate": 0.0001812694164433094,
"loss": 0.1013,
"step": 199
},
{
"epoch": 0.03072550601067711,
"grad_norm": 0.9570952653884888,
"learning_rate": 0.00018108595808323736,
"loss": 0.0765,
"step": 200
},
{
"epoch": 0.030879133540730498,
"grad_norm": 0.4946801960468292,
"learning_rate": 0.00018090169943749476,
"loss": 0.0848,
"step": 201
},
{
"epoch": 0.031032761070783885,
"grad_norm": 0.4584733843803406,
"learning_rate": 0.00018071664232464002,
"loss": 0.1413,
"step": 202
},
{
"epoch": 0.03118638860083727,
"grad_norm": 0.3248249888420105,
"learning_rate": 0.0001805307885711122,
"loss": 0.0644,
"step": 203
},
{
"epoch": 0.03134001613089066,
"grad_norm": 0.5062127113342285,
"learning_rate": 0.00018034414001121278,
"loss": 0.0975,
"step": 204
},
{
"epoch": 0.031493643660944044,
"grad_norm": 0.17189987003803253,
"learning_rate": 0.00018015669848708767,
"loss": 0.0545,
"step": 205
},
{
"epoch": 0.031647271190997424,
"grad_norm": 0.350965291261673,
"learning_rate": 0.00017996846584870908,
"loss": 0.0893,
"step": 206
},
{
"epoch": 0.03180089872105081,
"grad_norm": 0.20302747189998627,
"learning_rate": 0.0001797794439538571,
"loss": 0.0748,
"step": 207
},
{
"epoch": 0.0319545262511042,
"grad_norm": 0.24611130356788635,
"learning_rate": 0.0001795896346681016,
"loss": 0.0478,
"step": 208
},
{
"epoch": 0.03210815378115758,
"grad_norm": 0.23928868770599365,
"learning_rate": 0.00017939903986478355,
"loss": 0.0544,
"step": 209
},
{
"epoch": 0.03226178131121097,
"grad_norm": 0.3032814562320709,
"learning_rate": 0.00017920766142499672,
"loss": 0.0696,
"step": 210
},
{
"epoch": 0.032415408841264357,
"grad_norm": 0.3256490230560303,
"learning_rate": 0.00017901550123756906,
"loss": 0.0858,
"step": 211
},
{
"epoch": 0.03256903637131774,
"grad_norm": 0.7531484365463257,
"learning_rate": 0.00017882256119904403,
"loss": 0.055,
"step": 212
},
{
"epoch": 0.03272266390137112,
"grad_norm": 0.38031473755836487,
"learning_rate": 0.00017862884321366188,
"loss": 0.0845,
"step": 213
},
{
"epoch": 0.03287629143142451,
"grad_norm": 0.3779667615890503,
"learning_rate": 0.000178434349193341,
"loss": 0.0737,
"step": 214
},
{
"epoch": 0.033029918961477896,
"grad_norm": 0.27775999903678894,
"learning_rate": 0.0001782390810576588,
"loss": 0.0744,
"step": 215
},
{
"epoch": 0.03318354649153128,
"grad_norm": 0.24565085768699646,
"learning_rate": 0.000178043040733833,
"loss": 0.0432,
"step": 216
},
{
"epoch": 0.03333717402158467,
"grad_norm": 0.33276185393333435,
"learning_rate": 0.00017784623015670238,
"loss": 0.0911,
"step": 217
},
{
"epoch": 0.033490801551638055,
"grad_norm": 0.35384228825569153,
"learning_rate": 0.00017764865126870786,
"loss": 0.1031,
"step": 218
},
{
"epoch": 0.03364442908169144,
"grad_norm": 0.30936190485954285,
"learning_rate": 0.00017745030601987337,
"loss": 0.051,
"step": 219
},
{
"epoch": 0.03379805661174482,
"grad_norm": 0.4029456079006195,
"learning_rate": 0.00017725119636778644,
"loss": 0.0566,
"step": 220
},
{
"epoch": 0.03395168414179821,
"grad_norm": 0.253628134727478,
"learning_rate": 0.00017705132427757895,
"loss": 0.0361,
"step": 221
},
{
"epoch": 0.034105311671851594,
"grad_norm": 0.243647500872612,
"learning_rate": 0.00017685069172190766,
"loss": 0.0619,
"step": 222
},
{
"epoch": 0.03425893920190498,
"grad_norm": 0.5266451239585876,
"learning_rate": 0.00017664930068093498,
"loss": 0.1212,
"step": 223
},
{
"epoch": 0.03441256673195837,
"grad_norm": 0.3951440155506134,
"learning_rate": 0.00017644715314230918,
"loss": 0.0769,
"step": 224
},
{
"epoch": 0.034566194262011754,
"grad_norm": 0.27978089451789856,
"learning_rate": 0.0001762442511011448,
"loss": 0.0662,
"step": 225
},
{
"epoch": 0.03471982179206514,
"grad_norm": 0.43587276339530945,
"learning_rate": 0.0001760405965600031,
"loss": 0.1133,
"step": 226
},
{
"epoch": 0.03487344932211853,
"grad_norm": 0.23213185369968414,
"learning_rate": 0.0001758361915288722,
"loss": 0.0545,
"step": 227
},
{
"epoch": 0.03502707685217191,
"grad_norm": 0.3889080584049225,
"learning_rate": 0.0001756310380251472,
"loss": 0.0935,
"step": 228
},
{
"epoch": 0.03518070438222529,
"grad_norm": 0.25491082668304443,
"learning_rate": 0.00017542513807361037,
"loss": 0.0642,
"step": 229
},
{
"epoch": 0.03533433191227868,
"grad_norm": 0.3429853320121765,
"learning_rate": 0.00017521849370641114,
"loss": 0.0574,
"step": 230
},
{
"epoch": 0.035487959442332066,
"grad_norm": 0.14039883017539978,
"learning_rate": 0.00017501110696304596,
"loss": 0.0491,
"step": 231
},
{
"epoch": 0.03564158697238545,
"grad_norm": 0.3402402997016907,
"learning_rate": 0.00017480297989033825,
"loss": 0.0805,
"step": 232
},
{
"epoch": 0.03579521450243884,
"grad_norm": 0.3947245478630066,
"learning_rate": 0.00017459411454241822,
"loss": 0.0774,
"step": 233
},
{
"epoch": 0.035948842032492226,
"grad_norm": 0.32539045810699463,
"learning_rate": 0.00017438451298070252,
"loss": 0.0852,
"step": 234
},
{
"epoch": 0.036102469562545605,
"grad_norm": 0.3519819974899292,
"learning_rate": 0.00017417417727387394,
"loss": 0.0551,
"step": 235
},
{
"epoch": 0.03625609709259899,
"grad_norm": 0.5373510122299194,
"learning_rate": 0.000173963109497861,
"loss": 0.0895,
"step": 236
},
{
"epoch": 0.03640972462265238,
"grad_norm": 0.1669626384973526,
"learning_rate": 0.0001737513117358174,
"loss": 0.0408,
"step": 237
},
{
"epoch": 0.036563352152705765,
"grad_norm": 0.43224310874938965,
"learning_rate": 0.0001735387860781016,
"loss": 0.2075,
"step": 238
},
{
"epoch": 0.03671697968275915,
"grad_norm": 0.5993608236312866,
"learning_rate": 0.00017332553462225602,
"loss": 0.0862,
"step": 239
},
{
"epoch": 0.03687060721281254,
"grad_norm": 0.5543537735939026,
"learning_rate": 0.00017311155947298643,
"loss": 0.0552,
"step": 240
},
{
"epoch": 0.037024234742865925,
"grad_norm": 0.32902243733406067,
"learning_rate": 0.00017289686274214118,
"loss": 0.0619,
"step": 241
},
{
"epoch": 0.037177862272919304,
"grad_norm": 0.23019050061702728,
"learning_rate": 0.0001726814465486903,
"loss": 0.0534,
"step": 242
},
{
"epoch": 0.03733148980297269,
"grad_norm": 0.5031976699829102,
"learning_rate": 0.0001724653130187047,
"loss": 0.1041,
"step": 243
},
{
"epoch": 0.03748511733302608,
"grad_norm": 0.11503899842500687,
"learning_rate": 0.00017224846428533499,
"loss": 0.0233,
"step": 244
},
{
"epoch": 0.037638744863079464,
"grad_norm": 0.21917864680290222,
"learning_rate": 0.0001720309024887907,
"loss": 0.0593,
"step": 245
},
{
"epoch": 0.03779237239313285,
"grad_norm": 0.21906092762947083,
"learning_rate": 0.00017181262977631888,
"loss": 0.0601,
"step": 246
},
{
"epoch": 0.03794599992318624,
"grad_norm": 0.2885070741176605,
"learning_rate": 0.00017159364830218312,
"loss": 0.0504,
"step": 247
},
{
"epoch": 0.03809962745323962,
"grad_norm": 0.11525951325893402,
"learning_rate": 0.00017137396022764214,
"loss": 0.0184,
"step": 248
},
{
"epoch": 0.038253254983293,
"grad_norm": 0.3490850329399109,
"learning_rate": 0.00017115356772092857,
"loss": 0.1358,
"step": 249
},
{
"epoch": 0.03840688251334639,
"grad_norm": 0.26495930552482605,
"learning_rate": 0.0001709324729572274,
"loss": 0.0491,
"step": 250
},
{
"epoch": 0.038560510043399776,
"grad_norm": 0.4300294816493988,
"learning_rate": 0.00017071067811865476,
"loss": 0.0641,
"step": 251
},
{
"epoch": 0.03871413757345316,
"grad_norm": 0.2714085876941681,
"learning_rate": 0.00017048818539423615,
"loss": 0.095,
"step": 252
},
{
"epoch": 0.03886776510350655,
"grad_norm": 0.4457074999809265,
"learning_rate": 0.00017026499697988493,
"loss": 0.071,
"step": 253
},
{
"epoch": 0.039021392633559936,
"grad_norm": 0.521300733089447,
"learning_rate": 0.00017004111507838064,
"loss": 0.0829,
"step": 254
},
{
"epoch": 0.03917502016361332,
"grad_norm": 0.42931923270225525,
"learning_rate": 0.00016981654189934727,
"loss": 0.0873,
"step": 255
},
{
"epoch": 0.0393286476936667,
"grad_norm": 0.31839171051979065,
"learning_rate": 0.00016959127965923142,
"loss": 0.0791,
"step": 256
},
{
"epoch": 0.03948227522372009,
"grad_norm": 0.3187679350376129,
"learning_rate": 0.0001693653305812805,
"loss": 0.0444,
"step": 257
},
{
"epoch": 0.039635902753773475,
"grad_norm": 0.43490517139434814,
"learning_rate": 0.00016913869689552064,
"loss": 0.1723,
"step": 258
},
{
"epoch": 0.03978953028382686,
"grad_norm": 0.3070506751537323,
"learning_rate": 0.00016891138083873487,
"loss": 0.0454,
"step": 259
},
{
"epoch": 0.03994315781388025,
"grad_norm": 0.1700257807970047,
"learning_rate": 0.00016868338465444085,
"loss": 0.0382,
"step": 260
},
{
"epoch": 0.040096785343933634,
"grad_norm": 0.39158254861831665,
"learning_rate": 0.00016845471059286887,
"loss": 0.0846,
"step": 261
},
{
"epoch": 0.04025041287398702,
"grad_norm": 0.4131738841533661,
"learning_rate": 0.00016822536091093965,
"loss": 0.1023,
"step": 262
},
{
"epoch": 0.04040404040404041,
"grad_norm": 0.23972563445568085,
"learning_rate": 0.00016799533787224192,
"loss": 0.0464,
"step": 263
},
{
"epoch": 0.04055766793409379,
"grad_norm": 0.3113212585449219,
"learning_rate": 0.00016776464374701025,
"loss": 0.0636,
"step": 264
},
{
"epoch": 0.040711295464147174,
"grad_norm": 0.12121966481208801,
"learning_rate": 0.00016753328081210245,
"loss": 0.0293,
"step": 265
},
{
"epoch": 0.04086492299420056,
"grad_norm": 0.2930208146572113,
"learning_rate": 0.00016730125135097735,
"loss": 0.0955,
"step": 266
},
{
"epoch": 0.04101855052425395,
"grad_norm": 0.2109433263540268,
"learning_rate": 0.000167068557653672,
"loss": 0.0616,
"step": 267
},
{
"epoch": 0.04117217805430733,
"grad_norm": 0.2804703116416931,
"learning_rate": 0.0001668352020167793,
"loss": 0.0415,
"step": 268
},
{
"epoch": 0.04132580558436072,
"grad_norm": 0.4212825298309326,
"learning_rate": 0.00016660118674342517,
"loss": 0.0925,
"step": 269
},
{
"epoch": 0.041479433114414106,
"grad_norm": 0.2759626507759094,
"learning_rate": 0.00016636651414324587,
"loss": 0.0533,
"step": 270
},
{
"epoch": 0.041633060644467486,
"grad_norm": 0.2067808210849762,
"learning_rate": 0.00016613118653236518,
"loss": 0.0492,
"step": 271
},
{
"epoch": 0.04178668817452087,
"grad_norm": 0.2483331710100174,
"learning_rate": 0.0001658952062333717,
"loss": 0.049,
"step": 272
},
{
"epoch": 0.04194031570457426,
"grad_norm": 0.24180814623832703,
"learning_rate": 0.00016565857557529566,
"loss": 0.0449,
"step": 273
},
{
"epoch": 0.042093943234627645,
"grad_norm": 0.3256412744522095,
"learning_rate": 0.00016542129689358612,
"loss": 0.0497,
"step": 274
},
{
"epoch": 0.04224757076468103,
"grad_norm": 0.245501309633255,
"learning_rate": 0.0001651833725300879,
"loss": 0.0603,
"step": 275
},
{
"epoch": 0.04240119829473442,
"grad_norm": 0.25460636615753174,
"learning_rate": 0.00016494480483301836,
"loss": 0.047,
"step": 276
},
{
"epoch": 0.042554825824787805,
"grad_norm": 0.2094864845275879,
"learning_rate": 0.00016470559615694446,
"loss": 0.0556,
"step": 277
},
{
"epoch": 0.042708453354841185,
"grad_norm": 0.7049180865287781,
"learning_rate": 0.00016446574886275913,
"loss": 0.1005,
"step": 278
},
{
"epoch": 0.04286208088489457,
"grad_norm": 0.20705457031726837,
"learning_rate": 0.00016422526531765846,
"loss": 0.0849,
"step": 279
},
{
"epoch": 0.04301570841494796,
"grad_norm": 0.4032752811908722,
"learning_rate": 0.00016398414789511786,
"loss": 0.0995,
"step": 280
},
{
"epoch": 0.043169335945001344,
"grad_norm": 0.4395906627178192,
"learning_rate": 0.000163742398974869,
"loss": 0.0729,
"step": 281
},
{
"epoch": 0.04332296347505473,
"grad_norm": 0.19817042350769043,
"learning_rate": 0.00016350002094287609,
"loss": 0.0589,
"step": 282
},
{
"epoch": 0.04347659100510812,
"grad_norm": 0.2096862643957138,
"learning_rate": 0.00016325701619131246,
"loss": 0.0465,
"step": 283
},
{
"epoch": 0.043630218535161504,
"grad_norm": 0.3213491439819336,
"learning_rate": 0.00016301338711853693,
"loss": 0.0961,
"step": 284
},
{
"epoch": 0.04378384606521488,
"grad_norm": 0.39762693643569946,
"learning_rate": 0.00016276913612907007,
"loss": 0.0569,
"step": 285
},
{
"epoch": 0.04393747359526827,
"grad_norm": 0.15976397693157196,
"learning_rate": 0.00016252426563357055,
"loss": 0.033,
"step": 286
},
{
"epoch": 0.044091101125321656,
"grad_norm": 0.19210144877433777,
"learning_rate": 0.00016227877804881127,
"loss": 0.0466,
"step": 287
},
{
"epoch": 0.04424472865537504,
"grad_norm": 0.2829888164997101,
"learning_rate": 0.00016203267579765563,
"loss": 0.1227,
"step": 288
},
{
"epoch": 0.04439835618542843,
"grad_norm": 0.34546658396720886,
"learning_rate": 0.00016178596130903344,
"loss": 0.0929,
"step": 289
},
{
"epoch": 0.044551983715481816,
"grad_norm": 0.20140397548675537,
"learning_rate": 0.00016153863701791717,
"loss": 0.0288,
"step": 290
},
{
"epoch": 0.0447056112455352,
"grad_norm": 0.134894460439682,
"learning_rate": 0.00016129070536529766,
"loss": 0.0258,
"step": 291
},
{
"epoch": 0.04485923877558858,
"grad_norm": 0.31787165999412537,
"learning_rate": 0.00016104216879816026,
"loss": 0.0358,
"step": 292
},
{
"epoch": 0.04501286630564197,
"grad_norm": 0.15907998383045197,
"learning_rate": 0.00016079302976946055,
"loss": 0.0444,
"step": 293
},
{
"epoch": 0.045166493835695355,
"grad_norm": 0.24359673261642456,
"learning_rate": 0.00016054329073810015,
"loss": 0.0807,
"step": 294
},
{
"epoch": 0.04532012136574874,
"grad_norm": 0.2678831219673157,
"learning_rate": 0.00016029295416890248,
"loss": 0.0848,
"step": 295
},
{
"epoch": 0.04547374889580213,
"grad_norm": 0.4118996262550354,
"learning_rate": 0.00016004202253258842,
"loss": 0.0902,
"step": 296
},
{
"epoch": 0.045627376425855515,
"grad_norm": 0.17833048105239868,
"learning_rate": 0.0001597904983057519,
"loss": 0.0125,
"step": 297
},
{
"epoch": 0.0457810039559089,
"grad_norm": 0.5123883485794067,
"learning_rate": 0.00015953838397083552,
"loss": 0.1492,
"step": 298
},
{
"epoch": 0.04593463148596229,
"grad_norm": 0.4247935712337494,
"learning_rate": 0.00015928568201610595,
"loss": 0.0747,
"step": 299
},
{
"epoch": 0.04608825901601567,
"grad_norm": 0.32961946725845337,
"learning_rate": 0.00015903239493562948,
"loss": 0.0715,
"step": 300
},
{
"epoch": 0.046241886546069054,
"grad_norm": 0.17399932444095612,
"learning_rate": 0.00015877852522924732,
"loss": 0.0219,
"step": 301
},
{
"epoch": 0.04639551407612244,
"grad_norm": 0.21129140257835388,
"learning_rate": 0.00015852407540255104,
"loss": 0.0292,
"step": 302
},
{
"epoch": 0.04654914160617583,
"grad_norm": 0.22028450667858124,
"learning_rate": 0.00015826904796685762,
"loss": 0.0496,
"step": 303
},
{
"epoch": 0.046702769136229214,
"grad_norm": 0.45381373167037964,
"learning_rate": 0.00015801344543918495,
"loss": 0.1126,
"step": 304
},
{
"epoch": 0.0468563966662826,
"grad_norm": 0.23090185225009918,
"learning_rate": 0.00015775727034222675,
"loss": 0.0347,
"step": 305
},
{
"epoch": 0.04701002419633599,
"grad_norm": 0.4438919126987457,
"learning_rate": 0.00015750052520432787,
"loss": 0.1657,
"step": 306
},
{
"epoch": 0.047163651726389366,
"grad_norm": 0.21569062769412994,
"learning_rate": 0.0001572432125594591,
"loss": 0.0534,
"step": 307
},
{
"epoch": 0.04731727925644275,
"grad_norm": 0.26655662059783936,
"learning_rate": 0.00015698533494719238,
"loss": 0.0508,
"step": 308
},
{
"epoch": 0.04747090678649614,
"grad_norm": 0.2410474568605423,
"learning_rate": 0.00015672689491267567,
"loss": 0.0525,
"step": 309
},
{
"epoch": 0.047624534316549526,
"grad_norm": 0.5542080402374268,
"learning_rate": 0.00015646789500660773,
"loss": 0.0899,
"step": 310
},
{
"epoch": 0.04777816184660291,
"grad_norm": 0.2762816846370697,
"learning_rate": 0.00015620833778521307,
"loss": 0.0543,
"step": 311
},
{
"epoch": 0.0479317893766563,
"grad_norm": 0.11424004286527634,
"learning_rate": 0.0001559482258102167,
"loss": 0.0176,
"step": 312
},
{
"epoch": 0.048085416906709685,
"grad_norm": 0.2782093584537506,
"learning_rate": 0.00015568756164881882,
"loss": 0.0639,
"step": 313
},
{
"epoch": 0.048239044436763065,
"grad_norm": 0.15162143111228943,
"learning_rate": 0.00015542634787366942,
"loss": 0.0377,
"step": 314
},
{
"epoch": 0.04839267196681645,
"grad_norm": 0.27814531326293945,
"learning_rate": 0.00015516458706284303,
"loss": 0.0431,
"step": 315
},
{
"epoch": 0.04854629949686984,
"grad_norm": 0.5571882724761963,
"learning_rate": 0.0001549022817998132,
"loss": 0.0994,
"step": 316
},
{
"epoch": 0.048699927026923225,
"grad_norm": 0.2220301628112793,
"learning_rate": 0.00015463943467342693,
"loss": 0.045,
"step": 317
},
{
"epoch": 0.04885355455697661,
"grad_norm": 0.3094988167285919,
"learning_rate": 0.00015437604827787927,
"loss": 0.0612,
"step": 318
},
{
"epoch": 0.04900718208703,
"grad_norm": 0.24796605110168457,
"learning_rate": 0.00015411212521268758,
"loss": 0.0543,
"step": 319
},
{
"epoch": 0.049160809617083384,
"grad_norm": 0.4730873107910156,
"learning_rate": 0.00015384766808266602,
"loss": 0.0782,
"step": 320
},
{
"epoch": 0.049314437147136764,
"grad_norm": 0.2413204312324524,
"learning_rate": 0.00015358267949789966,
"loss": 0.0564,
"step": 321
},
{
"epoch": 0.04946806467719015,
"grad_norm": 0.2795536518096924,
"learning_rate": 0.00015331716207371888,
"loss": 0.0356,
"step": 322
},
{
"epoch": 0.04962169220724354,
"grad_norm": 0.4484255015850067,
"learning_rate": 0.0001530511184306734,
"loss": 0.0918,
"step": 323
},
{
"epoch": 0.04977531973729692,
"grad_norm": 0.2117013931274414,
"learning_rate": 0.00015278455119450664,
"loss": 0.0529,
"step": 324
},
{
"epoch": 0.04992894726735031,
"grad_norm": 0.3193001449108124,
"learning_rate": 0.0001525174629961296,
"loss": 0.0455,
"step": 325
},
{
"epoch": 0.050082574797403696,
"grad_norm": 0.2935343384742737,
"learning_rate": 0.0001522498564715949,
"loss": 0.0296,
"step": 326
},
{
"epoch": 0.05023620232745708,
"grad_norm": 0.287106454372406,
"learning_rate": 0.00015198173426207094,
"loss": 0.0411,
"step": 327
},
{
"epoch": 0.05038982985751046,
"grad_norm": 0.08104746788740158,
"learning_rate": 0.00015171309901381572,
"loss": 0.0074,
"step": 328
},
{
"epoch": 0.05054345738756385,
"grad_norm": 0.3838059902191162,
"learning_rate": 0.00015144395337815064,
"loss": 0.0804,
"step": 329
},
{
"epoch": 0.050697084917617236,
"grad_norm": 0.30714330077171326,
"learning_rate": 0.00015117430001143452,
"loss": 0.0763,
"step": 330
},
{
"epoch": 0.05085071244767062,
"grad_norm": 0.34072619676589966,
"learning_rate": 0.00015090414157503714,
"loss": 0.0546,
"step": 331
},
{
"epoch": 0.05100433997772401,
"grad_norm": 0.2532688081264496,
"learning_rate": 0.00015063348073531324,
"loss": 0.0561,
"step": 332
},
{
"epoch": 0.051157967507777395,
"grad_norm": 0.18965189158916473,
"learning_rate": 0.0001503623201635761,
"loss": 0.0324,
"step": 333
},
{
"epoch": 0.05131159503783078,
"grad_norm": 0.46736273169517517,
"learning_rate": 0.000150090662536071,
"loss": 0.115,
"step": 334
},
{
"epoch": 0.05146522256788417,
"grad_norm": 0.2359018176794052,
"learning_rate": 0.0001498185105339491,
"loss": 0.0356,
"step": 335
},
{
"epoch": 0.05161885009793755,
"grad_norm": 0.2292761504650116,
"learning_rate": 0.00014954586684324078,
"loss": 0.0261,
"step": 336
},
{
"epoch": 0.051772477627990934,
"grad_norm": 0.2411034256219864,
"learning_rate": 0.00014927273415482915,
"loss": 0.0393,
"step": 337
},
{
"epoch": 0.05192610515804432,
"grad_norm": 0.20834824442863464,
"learning_rate": 0.00014899911516442365,
"loss": 0.0526,
"step": 338
},
{
"epoch": 0.05207973268809771,
"grad_norm": 0.2317834049463272,
"learning_rate": 0.00014872501257253323,
"loss": 0.0642,
"step": 339
},
{
"epoch": 0.052233360218151094,
"grad_norm": 0.08348555862903595,
"learning_rate": 0.0001484504290844398,
"loss": 0.0057,
"step": 340
},
{
"epoch": 0.05238698774820448,
"grad_norm": 0.17602545022964478,
"learning_rate": 0.00014817536741017152,
"loss": 0.048,
"step": 341
},
{
"epoch": 0.05254061527825787,
"grad_norm": 0.24774643778800964,
"learning_rate": 0.00014789983026447612,
"loss": 0.0481,
"step": 342
},
{
"epoch": 0.05269424280831125,
"grad_norm": 0.18600314855575562,
"learning_rate": 0.0001476238203667939,
"loss": 0.0318,
"step": 343
},
{
"epoch": 0.05284787033836463,
"grad_norm": 0.2651784121990204,
"learning_rate": 0.0001473473404412312,
"loss": 0.046,
"step": 344
},
{
"epoch": 0.05300149786841802,
"grad_norm": 0.18483799695968628,
"learning_rate": 0.0001470703932165333,
"loss": 0.0362,
"step": 345
},
{
"epoch": 0.053155125398471406,
"grad_norm": 0.18730659782886505,
"learning_rate": 0.00014679298142605734,
"loss": 0.0154,
"step": 346
},
{
"epoch": 0.05330875292852479,
"grad_norm": 0.34418991208076477,
"learning_rate": 0.00014651510780774583,
"loss": 0.0701,
"step": 347
},
{
"epoch": 0.05346238045857818,
"grad_norm": 0.1924070417881012,
"learning_rate": 0.00014623677510409918,
"loss": 0.0618,
"step": 348
},
{
"epoch": 0.053616007988631566,
"grad_norm": 0.2839564085006714,
"learning_rate": 0.00014595798606214882,
"loss": 0.0619,
"step": 349
},
{
"epoch": 0.053769635518684945,
"grad_norm": 0.6963027119636536,
"learning_rate": 0.00014567874343342997,
"loss": 0.0528,
"step": 350
},
{
"epoch": 0.05392326304873833,
"grad_norm": 0.20343132317066193,
"learning_rate": 0.00014539904997395468,
"loss": 0.0254,
"step": 351
},
{
"epoch": 0.05407689057879172,
"grad_norm": 0.544143557548523,
"learning_rate": 0.00014511890844418453,
"loss": 0.0396,
"step": 352
},
{
"epoch": 0.054230518108845105,
"grad_norm": 0.35805410146713257,
"learning_rate": 0.00014483832160900326,
"loss": 0.093,
"step": 353
},
{
"epoch": 0.05438414563889849,
"grad_norm": 0.21492372453212738,
"learning_rate": 0.00014455729223768966,
"loss": 0.0296,
"step": 354
},
{
"epoch": 0.05453777316895188,
"grad_norm": 0.32545286417007446,
"learning_rate": 0.0001442758231038902,
"loss": 0.0669,
"step": 355
},
{
"epoch": 0.054691400699005265,
"grad_norm": 0.165277898311615,
"learning_rate": 0.00014399391698559152,
"loss": 0.0382,
"step": 356
},
{
"epoch": 0.054845028229058644,
"grad_norm": 0.164682075381279,
"learning_rate": 0.0001437115766650933,
"loss": 0.0184,
"step": 357
},
{
"epoch": 0.05499865575911203,
"grad_norm": 0.5814030766487122,
"learning_rate": 0.00014342880492898048,
"loss": 0.0548,
"step": 358
},
{
"epoch": 0.05515228328916542,
"grad_norm": 0.35537633299827576,
"learning_rate": 0.0001431456045680959,
"loss": 0.0461,
"step": 359
},
{
"epoch": 0.055305910819218804,
"grad_norm": 0.47674354910850525,
"learning_rate": 0.00014286197837751286,
"loss": 0.0607,
"step": 360
},
{
"epoch": 0.05545953834927219,
"grad_norm": 0.2724376618862152,
"learning_rate": 0.00014257792915650728,
"loss": 0.0665,
"step": 361
},
{
"epoch": 0.05561316587932558,
"grad_norm": 0.49017325043678284,
"learning_rate": 0.00014229345970853032,
"loss": 0.0595,
"step": 362
},
{
"epoch": 0.05576679340937896,
"grad_norm": 0.4462433159351349,
"learning_rate": 0.00014200857284118066,
"loss": 0.0813,
"step": 363
},
{
"epoch": 0.05592042093943234,
"grad_norm": 0.3304632008075714,
"learning_rate": 0.00014172327136617656,
"loss": 0.0744,
"step": 364
},
{
"epoch": 0.05607404846948573,
"grad_norm": 0.28009843826293945,
"learning_rate": 0.00014143755809932845,
"loss": 0.0448,
"step": 365
},
{
"epoch": 0.056227675999539116,
"grad_norm": 0.24038590490818024,
"learning_rate": 0.00014115143586051088,
"loss": 0.0439,
"step": 366
},
{
"epoch": 0.0563813035295925,
"grad_norm": 0.29066967964172363,
"learning_rate": 0.00014086490747363493,
"loss": 0.0519,
"step": 367
},
{
"epoch": 0.05653493105964589,
"grad_norm": 0.3668765425682068,
"learning_rate": 0.00014057797576662,
"loss": 0.067,
"step": 368
},
{
"epoch": 0.056688558589699276,
"grad_norm": 0.13805338740348816,
"learning_rate": 0.00014029064357136628,
"loss": 0.0228,
"step": 369
},
{
"epoch": 0.05684218611975266,
"grad_norm": 0.12917551398277283,
"learning_rate": 0.00014000291372372647,
"loss": 0.0068,
"step": 370
},
{
"epoch": 0.05699581364980605,
"grad_norm": 0.2294468879699707,
"learning_rate": 0.00013971478906347806,
"loss": 0.0476,
"step": 371
},
{
"epoch": 0.05714944117985943,
"grad_norm": 0.313985675573349,
"learning_rate": 0.00013942627243429512,
"loss": 0.088,
"step": 372
},
{
"epoch": 0.057303068709912815,
"grad_norm": 0.311063677072525,
"learning_rate": 0.00013913736668372026,
"loss": 0.0554,
"step": 373
},
{
"epoch": 0.0574566962399662,
"grad_norm": 0.20751863718032837,
"learning_rate": 0.00013884807466313663,
"loss": 0.0616,
"step": 374
},
{
"epoch": 0.05761032377001959,
"grad_norm": 0.34579184651374817,
"learning_rate": 0.00013855839922773968,
"loss": 0.0575,
"step": 375
},
{
"epoch": 0.057763951300072974,
"grad_norm": 0.35236915946006775,
"learning_rate": 0.000138268343236509,
"loss": 0.0398,
"step": 376
},
{
"epoch": 0.05791757883012636,
"grad_norm": 0.20263420045375824,
"learning_rate": 0.00013797790955218014,
"loss": 0.0204,
"step": 377
},
{
"epoch": 0.05807120636017975,
"grad_norm": 0.219743549823761,
"learning_rate": 0.00013768710104121627,
"loss": 0.0301,
"step": 378
},
{
"epoch": 0.05822483389023313,
"grad_norm": 0.3241548240184784,
"learning_rate": 0.00013739592057378003,
"loss": 0.0431,
"step": 379
},
{
"epoch": 0.058378461420286513,
"grad_norm": 0.3282346725463867,
"learning_rate": 0.0001371043710237051,
"loss": 0.0697,
"step": 380
},
{
"epoch": 0.0585320889503399,
"grad_norm": 0.5584028959274292,
"learning_rate": 0.00013681245526846783,
"loss": 0.0989,
"step": 381
},
{
"epoch": 0.05868571648039329,
"grad_norm": 0.2438870519399643,
"learning_rate": 0.0001365201761891588,
"loss": 0.0418,
"step": 382
},
{
"epoch": 0.05883934401044667,
"grad_norm": 0.269885390996933,
"learning_rate": 0.00013622753667045457,
"loss": 0.0321,
"step": 383
},
{
"epoch": 0.05899297154050006,
"grad_norm": 0.32022541761398315,
"learning_rate": 0.00013593453960058908,
"loss": 0.0536,
"step": 384
},
{
"epoch": 0.059146599070553446,
"grad_norm": 0.18096782267093658,
"learning_rate": 0.00013564118787132506,
"loss": 0.0193,
"step": 385
},
{
"epoch": 0.059300226600606826,
"grad_norm": 0.288135826587677,
"learning_rate": 0.00013534748437792573,
"loss": 0.0202,
"step": 386
},
{
"epoch": 0.05945385413066021,
"grad_norm": 0.2746815085411072,
"learning_rate": 0.0001350534320191259,
"loss": 0.0431,
"step": 387
},
{
"epoch": 0.0596074816607136,
"grad_norm": 0.16531936824321747,
"learning_rate": 0.0001347590336971037,
"loss": 0.0383,
"step": 388
},
{
"epoch": 0.059761109190766985,
"grad_norm": 0.3049300014972687,
"learning_rate": 0.0001344642923174517,
"loss": 0.0421,
"step": 389
},
{
"epoch": 0.05991473672082037,
"grad_norm": 0.36772802472114563,
"learning_rate": 0.00013416921078914835,
"loss": 0.1227,
"step": 390
},
{
"epoch": 0.06006836425087376,
"grad_norm": 0.12184538692235947,
"learning_rate": 0.00013387379202452917,
"loss": 0.0178,
"step": 391
},
{
"epoch": 0.060221991780927145,
"grad_norm": 0.3842985928058624,
"learning_rate": 0.00013357803893925807,
"loss": 0.0709,
"step": 392
},
{
"epoch": 0.060375619310980524,
"grad_norm": 0.10112696886062622,
"learning_rate": 0.00013328195445229868,
"loss": 0.0269,
"step": 393
},
{
"epoch": 0.06052924684103391,
"grad_norm": 0.15096262097358704,
"learning_rate": 0.00013298554148588528,
"loss": 0.036,
"step": 394
},
{
"epoch": 0.0606828743710873,
"grad_norm": 0.44363564252853394,
"learning_rate": 0.00013268880296549425,
"loss": 0.0386,
"step": 395
},
{
"epoch": 0.060836501901140684,
"grad_norm": 0.23141491413116455,
"learning_rate": 0.00013239174181981495,
"loss": 0.0475,
"step": 396
},
{
"epoch": 0.06099012943119407,
"grad_norm": 0.3385567367076874,
"learning_rate": 0.00013209436098072095,
"loss": 0.0641,
"step": 397
},
{
"epoch": 0.06114375696124746,
"grad_norm": 0.2965680658817291,
"learning_rate": 0.00013179666338324108,
"loss": 0.052,
"step": 398
},
{
"epoch": 0.061297384491300844,
"grad_norm": 0.2945363223552704,
"learning_rate": 0.0001314986519655305,
"loss": 0.0614,
"step": 399
},
{
"epoch": 0.06145101202135422,
"grad_norm": 0.36109647154808044,
"learning_rate": 0.0001312003296688415,
"loss": 0.093,
"step": 400
},
{
"epoch": 0.06160463955140761,
"grad_norm": 0.27289536595344543,
"learning_rate": 0.00013090169943749476,
"loss": 0.0377,
"step": 401
},
{
"epoch": 0.061758267081460996,
"grad_norm": 0.29473528265953064,
"learning_rate": 0.0001306027642188501,
"loss": 0.0569,
"step": 402
},
{
"epoch": 0.06191189461151438,
"grad_norm": 0.3701915144920349,
"learning_rate": 0.00013030352696327742,
"loss": 0.0631,
"step": 403
},
{
"epoch": 0.06206552214156777,
"grad_norm": 0.36468610167503357,
"learning_rate": 0.00013000399062412763,
"loss": 0.044,
"step": 404
},
{
"epoch": 0.062219149671621156,
"grad_norm": 0.43115100264549255,
"learning_rate": 0.0001297041581577035,
"loss": 0.0344,
"step": 405
},
{
"epoch": 0.06237277720167454,
"grad_norm": 0.08293578773736954,
"learning_rate": 0.0001294040325232304,
"loss": 0.0058,
"step": 406
},
{
"epoch": 0.06252640473172792,
"grad_norm": 0.6668057441711426,
"learning_rate": 0.00012910361668282719,
"loss": 0.0649,
"step": 407
},
{
"epoch": 0.06268003226178132,
"grad_norm": 0.544089674949646,
"learning_rate": 0.00012880291360147693,
"loss": 0.173,
"step": 408
},
{
"epoch": 0.0628336597918347,
"grad_norm": 0.33513277769088745,
"learning_rate": 0.0001285019262469976,
"loss": 0.0819,
"step": 409
},
{
"epoch": 0.06298728732188809,
"grad_norm": 0.4561985731124878,
"learning_rate": 0.00012820065759001293,
"loss": 0.0547,
"step": 410
},
{
"epoch": 0.06314091485194147,
"grad_norm": 0.3749920427799225,
"learning_rate": 0.00012789911060392294,
"loss": 0.0651,
"step": 411
},
{
"epoch": 0.06329454238199485,
"grad_norm": 0.23372085392475128,
"learning_rate": 0.0001275972882648746,
"loss": 0.0662,
"step": 412
},
{
"epoch": 0.06344816991204824,
"grad_norm": 0.4702025353908539,
"learning_rate": 0.00012729519355173254,
"loss": 0.1167,
"step": 413
},
{
"epoch": 0.06360179744210162,
"grad_norm": 0.5980305671691895,
"learning_rate": 0.00012699282944604967,
"loss": 0.0735,
"step": 414
},
{
"epoch": 0.06375542497215501,
"grad_norm": 0.5085580945014954,
"learning_rate": 0.00012669019893203759,
"loss": 0.059,
"step": 415
},
{
"epoch": 0.0639090525022084,
"grad_norm": 0.24032224714756012,
"learning_rate": 0.0001263873049965373,
"loss": 0.0485,
"step": 416
},
{
"epoch": 0.06406268003226179,
"grad_norm": 0.32948529720306396,
"learning_rate": 0.00012608415062898972,
"loss": 0.0412,
"step": 417
},
{
"epoch": 0.06421630756231517,
"grad_norm": 0.2689811885356903,
"learning_rate": 0.000125780738821406,
"loss": 0.0803,
"step": 418
},
{
"epoch": 0.06436993509236855,
"grad_norm": 0.18247628211975098,
"learning_rate": 0.00012547707256833823,
"loss": 0.0357,
"step": 419
},
{
"epoch": 0.06452356262242194,
"grad_norm": 0.3221795856952667,
"learning_rate": 0.00012517315486684972,
"loss": 0.0552,
"step": 420
},
{
"epoch": 0.06467719015247532,
"grad_norm": 0.4333018362522125,
"learning_rate": 0.0001248689887164855,
"loss": 0.0573,
"step": 421
},
{
"epoch": 0.06483081768252871,
"grad_norm": 0.2154187113046646,
"learning_rate": 0.00012456457711924266,
"loss": 0.0182,
"step": 422
},
{
"epoch": 0.06498444521258209,
"grad_norm": 0.11889325082302094,
"learning_rate": 0.00012425992307954075,
"loss": 0.0305,
"step": 423
},
{
"epoch": 0.06513807274263549,
"grad_norm": 0.1065244972705841,
"learning_rate": 0.0001239550296041922,
"loss": 0.0193,
"step": 424
},
{
"epoch": 0.06529170027268887,
"grad_norm": 0.4158475697040558,
"learning_rate": 0.00012364989970237248,
"loss": 0.0581,
"step": 425
},
{
"epoch": 0.06544532780274225,
"grad_norm": 0.25911587476730347,
"learning_rate": 0.00012334453638559057,
"loss": 0.0671,
"step": 426
},
{
"epoch": 0.06559895533279564,
"grad_norm": 0.2207699418067932,
"learning_rate": 0.00012303894266765908,
"loss": 0.0136,
"step": 427
},
{
"epoch": 0.06575258286284902,
"grad_norm": 0.29638662934303284,
"learning_rate": 0.00012273312156466464,
"loss": 0.074,
"step": 428
},
{
"epoch": 0.06590621039290241,
"grad_norm": 0.5546718239784241,
"learning_rate": 0.00012242707609493814,
"loss": 0.05,
"step": 429
},
{
"epoch": 0.06605983792295579,
"grad_norm": 0.18047912418842316,
"learning_rate": 0.00012212080927902474,
"loss": 0.04,
"step": 430
},
{
"epoch": 0.06621346545300918,
"grad_norm": 0.4788497984409332,
"learning_rate": 0.00012181432413965428,
"loss": 0.0891,
"step": 431
},
{
"epoch": 0.06636709298306256,
"grad_norm": 0.5535939335823059,
"learning_rate": 0.00012150762370171136,
"loss": 0.0851,
"step": 432
},
{
"epoch": 0.06652072051311594,
"grad_norm": 0.14160263538360596,
"learning_rate": 0.00012120071099220549,
"loss": 0.0233,
"step": 433
},
{
"epoch": 0.06667434804316934,
"grad_norm": 0.3702748715877533,
"learning_rate": 0.00012089358904024117,
"loss": 0.0669,
"step": 434
},
{
"epoch": 0.06682797557322272,
"grad_norm": 0.6442322731018066,
"learning_rate": 0.00012058626087698814,
"loss": 0.0724,
"step": 435
},
{
"epoch": 0.06698160310327611,
"grad_norm": 0.5247227549552917,
"learning_rate": 0.00012027872953565125,
"loss": 0.1368,
"step": 436
},
{
"epoch": 0.06713523063332949,
"grad_norm": 0.2273179143667221,
"learning_rate": 0.00011997099805144069,
"loss": 0.0576,
"step": 437
},
{
"epoch": 0.06728885816338288,
"grad_norm": 0.21384556591510773,
"learning_rate": 0.000119663069461542,
"loss": 0.0421,
"step": 438
},
{
"epoch": 0.06744248569343626,
"grad_norm": 0.2525802552700043,
"learning_rate": 0.00011935494680508606,
"loss": 0.0636,
"step": 439
},
{
"epoch": 0.06759611322348964,
"grad_norm": 0.5619597434997559,
"learning_rate": 0.00011904663312311901,
"loss": 0.0817,
"step": 440
},
{
"epoch": 0.06774974075354304,
"grad_norm": 0.29529374837875366,
"learning_rate": 0.00011873813145857249,
"loss": 0.0567,
"step": 441
},
{
"epoch": 0.06790336828359642,
"grad_norm": 0.3265819251537323,
"learning_rate": 0.00011842944485623335,
"loss": 0.0956,
"step": 442
},
{
"epoch": 0.06805699581364981,
"grad_norm": 0.2801079750061035,
"learning_rate": 0.00011812057636271374,
"loss": 0.0602,
"step": 443
},
{
"epoch": 0.06821062334370319,
"grad_norm": 0.226077601313591,
"learning_rate": 0.000117811529026421,
"loss": 0.0634,
"step": 444
},
{
"epoch": 0.06836425087375658,
"grad_norm": 0.29351064562797546,
"learning_rate": 0.00011750230589752762,
"loss": 0.0351,
"step": 445
},
{
"epoch": 0.06851787840380996,
"grad_norm": 0.1966533213853836,
"learning_rate": 0.00011719291002794096,
"loss": 0.0372,
"step": 446
},
{
"epoch": 0.06867150593386334,
"grad_norm": 0.23948480188846588,
"learning_rate": 0.00011688334447127338,
"loss": 0.043,
"step": 447
},
{
"epoch": 0.06882513346391674,
"grad_norm": 0.40046682953834534,
"learning_rate": 0.00011657361228281199,
"loss": 0.0637,
"step": 448
},
{
"epoch": 0.06897876099397011,
"grad_norm": 0.44718092679977417,
"learning_rate": 0.00011626371651948838,
"loss": 0.0763,
"step": 449
},
{
"epoch": 0.06913238852402351,
"grad_norm": 0.2273903787136078,
"learning_rate": 0.00011595366023984864,
"loss": 0.0998,
"step": 450
},
{
"epoch": 0.06928601605407689,
"grad_norm": 0.2031620293855667,
"learning_rate": 0.0001156434465040231,
"loss": 0.0523,
"step": 451
},
{
"epoch": 0.06943964358413028,
"grad_norm": 0.23378509283065796,
"learning_rate": 0.00011533307837369607,
"loss": 0.054,
"step": 452
},
{
"epoch": 0.06959327111418366,
"grad_norm": 0.12463603168725967,
"learning_rate": 0.00011502255891207572,
"loss": 0.0259,
"step": 453
},
{
"epoch": 0.06974689864423705,
"grad_norm": 0.6662011742591858,
"learning_rate": 0.00011471189118386375,
"loss": 0.0464,
"step": 454
},
{
"epoch": 0.06990052617429043,
"grad_norm": 0.2289581447839737,
"learning_rate": 0.00011440107825522521,
"loss": 0.0521,
"step": 455
},
{
"epoch": 0.07005415370434381,
"grad_norm": 0.4058312177658081,
"learning_rate": 0.00011409012319375827,
"loss": 0.085,
"step": 456
},
{
"epoch": 0.0702077812343972,
"grad_norm": 0.2459128201007843,
"learning_rate": 0.0001137790290684638,
"loss": 0.0369,
"step": 457
},
{
"epoch": 0.07036140876445059,
"grad_norm": 0.3200896680355072,
"learning_rate": 0.00011346779894971527,
"loss": 0.0919,
"step": 458
},
{
"epoch": 0.07051503629450398,
"grad_norm": 0.2388496845960617,
"learning_rate": 0.00011315643590922827,
"loss": 0.0326,
"step": 459
},
{
"epoch": 0.07066866382455736,
"grad_norm": 0.20864158868789673,
"learning_rate": 0.0001128449430200303,
"loss": 0.0307,
"step": 460
},
{
"epoch": 0.07082229135461075,
"grad_norm": 0.20150598883628845,
"learning_rate": 0.00011253332335643043,
"loss": 0.0492,
"step": 461
},
{
"epoch": 0.07097591888466413,
"grad_norm": 0.08483496308326721,
"learning_rate": 0.00011222157999398895,
"loss": 0.0115,
"step": 462
},
{
"epoch": 0.07112954641471751,
"grad_norm": 0.5694580674171448,
"learning_rate": 0.00011190971600948699,
"loss": 0.0998,
"step": 463
},
{
"epoch": 0.0712831739447709,
"grad_norm": 0.39275580644607544,
"learning_rate": 0.00011159773448089614,
"loss": 0.0532,
"step": 464
},
{
"epoch": 0.07143680147482429,
"grad_norm": 0.29319897294044495,
"learning_rate": 0.00011128563848734816,
"loss": 0.087,
"step": 465
},
{
"epoch": 0.07159042900487768,
"grad_norm": 0.3537808954715729,
"learning_rate": 0.00011097343110910452,
"loss": 0.0855,
"step": 466
},
{
"epoch": 0.07174405653493106,
"grad_norm": 0.6485673189163208,
"learning_rate": 0.000110661115427526,
"loss": 0.0812,
"step": 467
},
{
"epoch": 0.07189768406498445,
"grad_norm": 0.3742178976535797,
"learning_rate": 0.00011034869452504226,
"loss": 0.0582,
"step": 468
},
{
"epoch": 0.07205131159503783,
"grad_norm": 0.27808234095573425,
"learning_rate": 0.00011003617148512149,
"loss": 0.0871,
"step": 469
},
{
"epoch": 0.07220493912509121,
"grad_norm": 0.14937719702720642,
"learning_rate": 0.00010972354939223996,
"loss": 0.0177,
"step": 470
},
{
"epoch": 0.0723585666551446,
"grad_norm": 0.12721075117588043,
"learning_rate": 0.00010941083133185146,
"loss": 0.0191,
"step": 471
},
{
"epoch": 0.07251219418519798,
"grad_norm": 0.26675689220428467,
"learning_rate": 0.00010909802039035701,
"loss": 0.07,
"step": 472
},
{
"epoch": 0.07266582171525138,
"grad_norm": 0.36761045455932617,
"learning_rate": 0.00010878511965507434,
"loss": 0.0993,
"step": 473
},
{
"epoch": 0.07281944924530476,
"grad_norm": 0.4738931655883789,
"learning_rate": 0.00010847213221420736,
"loss": 0.0531,
"step": 474
},
{
"epoch": 0.07297307677535815,
"grad_norm": 0.2081698477268219,
"learning_rate": 0.00010815906115681578,
"loss": 0.0481,
"step": 475
},
{
"epoch": 0.07312670430541153,
"grad_norm": 0.2514849603176117,
"learning_rate": 0.0001078459095727845,
"loss": 0.0547,
"step": 476
},
{
"epoch": 0.07328033183546491,
"grad_norm": 0.1828579157590866,
"learning_rate": 0.00010753268055279329,
"loss": 0.0407,
"step": 477
},
{
"epoch": 0.0734339593655183,
"grad_norm": 0.2780367136001587,
"learning_rate": 0.0001072193771882861,
"loss": 0.0833,
"step": 478
},
{
"epoch": 0.07358758689557168,
"grad_norm": 0.17228709161281586,
"learning_rate": 0.00010690600257144061,
"loss": 0.0351,
"step": 479
},
{
"epoch": 0.07374121442562508,
"grad_norm": 0.13529503345489502,
"learning_rate": 0.0001065925597951378,
"loss": 0.0248,
"step": 480
},
{
"epoch": 0.07389484195567846,
"grad_norm": 0.2639714777469635,
"learning_rate": 0.00010627905195293135,
"loss": 0.0656,
"step": 481
},
{
"epoch": 0.07404846948573185,
"grad_norm": 0.19288095831871033,
"learning_rate": 0.00010596548213901708,
"loss": 0.0286,
"step": 482
},
{
"epoch": 0.07420209701578523,
"grad_norm": 0.18714022636413574,
"learning_rate": 0.00010565185344820247,
"loss": 0.0674,
"step": 483
},
{
"epoch": 0.07435572454583861,
"grad_norm": 0.025677742436528206,
"learning_rate": 0.00010533816897587606,
"loss": 0.0021,
"step": 484
},
{
"epoch": 0.074509352075892,
"grad_norm": 0.2747335433959961,
"learning_rate": 0.00010502443181797697,
"loss": 0.0539,
"step": 485
},
{
"epoch": 0.07466297960594538,
"grad_norm": 0.19186265766620636,
"learning_rate": 0.00010471064507096426,
"loss": 0.0282,
"step": 486
},
{
"epoch": 0.07481660713599877,
"grad_norm": 0.20964659750461578,
"learning_rate": 0.0001043968118317865,
"loss": 0.0452,
"step": 487
},
{
"epoch": 0.07497023466605215,
"grad_norm": 0.20152372121810913,
"learning_rate": 0.00010408293519785101,
"loss": 0.0471,
"step": 488
},
{
"epoch": 0.07512386219610555,
"grad_norm": 0.6125838160514832,
"learning_rate": 0.00010376901826699348,
"loss": 0.1388,
"step": 489
},
{
"epoch": 0.07527748972615893,
"grad_norm": 0.29963189363479614,
"learning_rate": 0.00010345506413744726,
"loss": 0.057,
"step": 490
},
{
"epoch": 0.07543111725621231,
"grad_norm": 0.4599164128303528,
"learning_rate": 0.00010314107590781284,
"loss": 0.0702,
"step": 491
},
{
"epoch": 0.0755847447862657,
"grad_norm": 0.15473169088363647,
"learning_rate": 0.00010282705667702734,
"loss": 0.0165,
"step": 492
},
{
"epoch": 0.07573837231631908,
"grad_norm": 0.3452393412590027,
"learning_rate": 0.00010251300954433376,
"loss": 0.0514,
"step": 493
},
{
"epoch": 0.07589199984637247,
"grad_norm": 0.36609262228012085,
"learning_rate": 0.00010219893760925052,
"loss": 0.0808,
"step": 494
},
{
"epoch": 0.07604562737642585,
"grad_norm": 0.3795943260192871,
"learning_rate": 0.00010188484397154084,
"loss": 0.0521,
"step": 495
},
{
"epoch": 0.07619925490647925,
"grad_norm": 0.38397809863090515,
"learning_rate": 0.00010157073173118208,
"loss": 0.0762,
"step": 496
},
{
"epoch": 0.07635288243653263,
"grad_norm": 0.32320910692214966,
"learning_rate": 0.00010125660398833528,
"loss": 0.038,
"step": 497
},
{
"epoch": 0.076506509966586,
"grad_norm": 0.16499805450439453,
"learning_rate": 0.00010094246384331442,
"loss": 0.029,
"step": 498
},
{
"epoch": 0.0766601374966394,
"grad_norm": 0.6236213445663452,
"learning_rate": 0.00010062831439655591,
"loss": 0.0632,
"step": 499
},
{
"epoch": 0.07681376502669278,
"grad_norm": 0.10359746217727661,
"learning_rate": 0.00010031415874858797,
"loss": 0.0152,
"step": 500
},
{
"epoch": 0.07696739255674617,
"grad_norm": 0.3780750036239624,
"learning_rate": 0.0001,
"loss": 0.0911,
"step": 501
},
{
"epoch": 0.07712102008679955,
"grad_norm": 0.39151090383529663,
"learning_rate": 9.968584125141204e-05,
"loss": 0.0533,
"step": 502
},
{
"epoch": 0.07727464761685295,
"grad_norm": 0.6179499626159668,
"learning_rate": 9.937168560344412e-05,
"loss": 0.0954,
"step": 503
},
{
"epoch": 0.07742827514690633,
"grad_norm": 0.21095699071884155,
"learning_rate": 9.90575361566856e-05,
"loss": 0.0364,
"step": 504
},
{
"epoch": 0.0775819026769597,
"grad_norm": 0.3666071593761444,
"learning_rate": 9.874339601166473e-05,
"loss": 0.0601,
"step": 505
},
{
"epoch": 0.0777355302070131,
"grad_norm": 0.22786995768547058,
"learning_rate": 9.842926826881796e-05,
"loss": 0.0467,
"step": 506
},
{
"epoch": 0.07788915773706648,
"grad_norm": 0.26915279030799866,
"learning_rate": 9.81151560284592e-05,
"loss": 0.0641,
"step": 507
},
{
"epoch": 0.07804278526711987,
"grad_norm": 0.34105777740478516,
"learning_rate": 9.78010623907495e-05,
"loss": 0.0932,
"step": 508
},
{
"epoch": 0.07819641279717325,
"grad_norm": 0.36050137877464294,
"learning_rate": 9.748699045566626e-05,
"loss": 0.0629,
"step": 509
},
{
"epoch": 0.07835004032722664,
"grad_norm": 0.1312742531299591,
"learning_rate": 9.717294332297268e-05,
"loss": 0.0147,
"step": 510
},
{
"epoch": 0.07850366785728002,
"grad_norm": 0.18004198372364044,
"learning_rate": 9.685892409218717e-05,
"loss": 0.0374,
"step": 511
},
{
"epoch": 0.0786572953873334,
"grad_norm": 0.27153411507606506,
"learning_rate": 9.654493586255278e-05,
"loss": 0.0576,
"step": 512
},
{
"epoch": 0.0788109229173868,
"grad_norm": 0.2748028635978699,
"learning_rate": 9.623098173300654e-05,
"loss": 0.0557,
"step": 513
},
{
"epoch": 0.07896455044744018,
"grad_norm": 0.31094884872436523,
"learning_rate": 9.591706480214901e-05,
"loss": 0.0315,
"step": 514
},
{
"epoch": 0.07911817797749357,
"grad_norm": 0.22636176645755768,
"learning_rate": 9.560318816821353e-05,
"loss": 0.0267,
"step": 515
},
{
"epoch": 0.07927180550754695,
"grad_norm": 0.6712326407432556,
"learning_rate": 9.528935492903575e-05,
"loss": 0.1466,
"step": 516
},
{
"epoch": 0.07942543303760034,
"grad_norm": 0.27219417691230774,
"learning_rate": 9.497556818202306e-05,
"loss": 0.0439,
"step": 517
},
{
"epoch": 0.07957906056765372,
"grad_norm": 0.3414011597633362,
"learning_rate": 9.466183102412395e-05,
"loss": 0.1083,
"step": 518
},
{
"epoch": 0.0797326880977071,
"grad_norm": 0.1752224564552307,
"learning_rate": 9.434814655179755e-05,
"loss": 0.0195,
"step": 519
},
{
"epoch": 0.0798863156277605,
"grad_norm": 0.17755602300167084,
"learning_rate": 9.403451786098294e-05,
"loss": 0.0178,
"step": 520
},
{
"epoch": 0.08003994315781388,
"grad_norm": 0.36306050419807434,
"learning_rate": 9.372094804706867e-05,
"loss": 0.0564,
"step": 521
},
{
"epoch": 0.08019357068786727,
"grad_norm": 0.13871534168720245,
"learning_rate": 9.340744020486222e-05,
"loss": 0.0091,
"step": 522
},
{
"epoch": 0.08034719821792065,
"grad_norm": 0.29350242018699646,
"learning_rate": 9.309399742855942e-05,
"loss": 0.0133,
"step": 523
},
{
"epoch": 0.08050082574797404,
"grad_norm": 0.0819118469953537,
"learning_rate": 9.278062281171393e-05,
"loss": 0.0119,
"step": 524
},
{
"epoch": 0.08065445327802742,
"grad_norm": 0.3594493269920349,
"learning_rate": 9.246731944720675e-05,
"loss": 0.0546,
"step": 525
},
{
"epoch": 0.08080808080808081,
"grad_norm": 0.10841453075408936,
"learning_rate": 9.215409042721552e-05,
"loss": 0.013,
"step": 526
},
{
"epoch": 0.0809617083381342,
"grad_norm": 0.16208495199680328,
"learning_rate": 9.184093884318425e-05,
"loss": 0.0296,
"step": 527
},
{
"epoch": 0.08111533586818757,
"grad_norm": 0.38196682929992676,
"learning_rate": 9.152786778579267e-05,
"loss": 0.0374,
"step": 528
},
{
"epoch": 0.08126896339824097,
"grad_norm": 0.37776368856430054,
"learning_rate": 9.121488034492569e-05,
"loss": 0.0827,
"step": 529
},
{
"epoch": 0.08142259092829435,
"grad_norm": 0.4797121584415436,
"learning_rate": 9.090197960964301e-05,
"loss": 0.0713,
"step": 530
},
{
"epoch": 0.08157621845834774,
"grad_norm": 0.14758506417274475,
"learning_rate": 9.058916866814858e-05,
"loss": 0.0279,
"step": 531
},
{
"epoch": 0.08172984598840112,
"grad_norm": 0.7103621959686279,
"learning_rate": 9.027645060776006e-05,
"loss": 0.0708,
"step": 532
},
{
"epoch": 0.08188347351845451,
"grad_norm": 0.3715115785598755,
"learning_rate": 8.99638285148785e-05,
"loss": 0.0657,
"step": 533
},
{
"epoch": 0.0820371010485079,
"grad_norm": 0.1888379454612732,
"learning_rate": 8.965130547495776e-05,
"loss": 0.055,
"step": 534
},
{
"epoch": 0.08219072857856127,
"grad_norm": 0.31988903880119324,
"learning_rate": 8.933888457247402e-05,
"loss": 0.0652,
"step": 535
},
{
"epoch": 0.08234435610861467,
"grad_norm": 0.4653870165348053,
"learning_rate": 8.902656889089548e-05,
"loss": 0.0997,
"step": 536
},
{
"epoch": 0.08249798363866805,
"grad_norm": 0.22763140499591827,
"learning_rate": 8.871436151265184e-05,
"loss": 0.0502,
"step": 537
},
{
"epoch": 0.08265161116872144,
"grad_norm": 0.1724206954240799,
"learning_rate": 8.840226551910387e-05,
"loss": 0.0582,
"step": 538
},
{
"epoch": 0.08280523869877482,
"grad_norm": 0.38404709100723267,
"learning_rate": 8.809028399051302e-05,
"loss": 0.0403,
"step": 539
},
{
"epoch": 0.08295886622882821,
"grad_norm": 0.3798343241214752,
"learning_rate": 8.777842000601105e-05,
"loss": 0.052,
"step": 540
},
{
"epoch": 0.08311249375888159,
"grad_norm": 0.2501683831214905,
"learning_rate": 8.746667664356956e-05,
"loss": 0.0462,
"step": 541
},
{
"epoch": 0.08326612128893497,
"grad_norm": 0.13855481147766113,
"learning_rate": 8.715505697996971e-05,
"loss": 0.0145,
"step": 542
},
{
"epoch": 0.08341974881898837,
"grad_norm": 0.24310243129730225,
"learning_rate": 8.684356409077176e-05,
"loss": 0.0588,
"step": 543
},
{
"epoch": 0.08357337634904174,
"grad_norm": 0.31051376461982727,
"learning_rate": 8.653220105028474e-05,
"loss": 0.0447,
"step": 544
},
{
"epoch": 0.08372700387909514,
"grad_norm": 0.229783833026886,
"learning_rate": 8.62209709315362e-05,
"loss": 0.0425,
"step": 545
},
{
"epoch": 0.08388063140914852,
"grad_norm": 0.1056528314948082,
"learning_rate": 8.590987680624174e-05,
"loss": 0.0074,
"step": 546
},
{
"epoch": 0.08403425893920191,
"grad_norm": 0.1544754058122635,
"learning_rate": 8.559892174477479e-05,
"loss": 0.0255,
"step": 547
},
{
"epoch": 0.08418788646925529,
"grad_norm": 0.4850304424762726,
"learning_rate": 8.528810881613626e-05,
"loss": 0.0647,
"step": 548
},
{
"epoch": 0.08434151399930867,
"grad_norm": 0.2984982430934906,
"learning_rate": 8.497744108792429e-05,
"loss": 0.0552,
"step": 549
},
{
"epoch": 0.08449514152936206,
"grad_norm": 0.1731100082397461,
"learning_rate": 8.466692162630392e-05,
"loss": 0.0196,
"step": 550
},
{
"epoch": 0.08464876905941544,
"grad_norm": 0.14767980575561523,
"learning_rate": 8.435655349597689e-05,
"loss": 0.0213,
"step": 551
},
{
"epoch": 0.08480239658946884,
"grad_norm": 0.36554577946662903,
"learning_rate": 8.404633976015134e-05,
"loss": 0.0751,
"step": 552
},
{
"epoch": 0.08495602411952222,
"grad_norm": 0.342966228723526,
"learning_rate": 8.373628348051165e-05,
"loss": 0.0343,
"step": 553
},
{
"epoch": 0.08510965164957561,
"grad_norm": 0.3607073724269867,
"learning_rate": 8.342638771718802e-05,
"loss": 0.0341,
"step": 554
},
{
"epoch": 0.08526327917962899,
"grad_norm": 0.6577918529510498,
"learning_rate": 8.311665552872662e-05,
"loss": 0.0955,
"step": 555
},
{
"epoch": 0.08541690670968237,
"grad_norm": 0.2845357656478882,
"learning_rate": 8.280708997205904e-05,
"loss": 0.0964,
"step": 556
},
{
"epoch": 0.08557053423973576,
"grad_norm": 0.32145005464553833,
"learning_rate": 8.249769410247239e-05,
"loss": 0.0622,
"step": 557
},
{
"epoch": 0.08572416176978914,
"grad_norm": 0.22209350764751434,
"learning_rate": 8.218847097357898e-05,
"loss": 0.0251,
"step": 558
},
{
"epoch": 0.08587778929984254,
"grad_norm": 0.5599908828735352,
"learning_rate": 8.187942363728625e-05,
"loss": 0.0555,
"step": 559
},
{
"epoch": 0.08603141682989592,
"grad_norm": 1.01771080493927,
"learning_rate": 8.157055514376666e-05,
"loss": 0.1571,
"step": 560
},
{
"epoch": 0.08618504435994931,
"grad_norm": 0.18217921257019043,
"learning_rate": 8.126186854142752e-05,
"loss": 0.0423,
"step": 561
},
{
"epoch": 0.08633867189000269,
"grad_norm": 0.3009016513824463,
"learning_rate": 8.095336687688102e-05,
"loss": 0.0728,
"step": 562
},
{
"epoch": 0.08649229942005607,
"grad_norm": 0.37825891375541687,
"learning_rate": 8.064505319491398e-05,
"loss": 0.0568,
"step": 563
},
{
"epoch": 0.08664592695010946,
"grad_norm": 0.29226231575012207,
"learning_rate": 8.033693053845801e-05,
"loss": 0.0547,
"step": 564
},
{
"epoch": 0.08679955448016284,
"grad_norm": 0.5288081169128418,
"learning_rate": 8.002900194855932e-05,
"loss": 0.0437,
"step": 565
},
{
"epoch": 0.08695318201021623,
"grad_norm": 0.34164926409721375,
"learning_rate": 7.972127046434878e-05,
"loss": 0.0495,
"step": 566
},
{
"epoch": 0.08710680954026961,
"grad_norm": 0.14765234291553497,
"learning_rate": 7.941373912301189e-05,
"loss": 0.0234,
"step": 567
},
{
"epoch": 0.08726043707032301,
"grad_norm": 0.28211361169815063,
"learning_rate": 7.910641095975886e-05,
"loss": 0.0558,
"step": 568
},
{
"epoch": 0.08741406460037639,
"grad_norm": 0.29715636372566223,
"learning_rate": 7.879928900779456e-05,
"loss": 0.0751,
"step": 569
},
{
"epoch": 0.08756769213042977,
"grad_norm": 0.21927212178707123,
"learning_rate": 7.849237629828869e-05,
"loss": 0.0355,
"step": 570
},
{
"epoch": 0.08772131966048316,
"grad_norm": 0.29238730669021606,
"learning_rate": 7.818567586034577e-05,
"loss": 0.0387,
"step": 571
},
{
"epoch": 0.08787494719053654,
"grad_norm": 0.22075101733207703,
"learning_rate": 7.787919072097531e-05,
"loss": 0.0336,
"step": 572
},
{
"epoch": 0.08802857472058993,
"grad_norm": 0.36297523975372314,
"learning_rate": 7.75729239050619e-05,
"loss": 0.0583,
"step": 573
},
{
"epoch": 0.08818220225064331,
"grad_norm": 0.4085044264793396,
"learning_rate": 7.726687843533538e-05,
"loss": 0.0962,
"step": 574
},
{
"epoch": 0.0883358297806967,
"grad_norm": 0.26279351115226746,
"learning_rate": 7.696105733234098e-05,
"loss": 0.0454,
"step": 575
},
{
"epoch": 0.08848945731075009,
"grad_norm": 0.3345972001552582,
"learning_rate": 7.66554636144095e-05,
"loss": 0.103,
"step": 576
},
{
"epoch": 0.08864308484080347,
"grad_norm": 0.16322675347328186,
"learning_rate": 7.635010029762756e-05,
"loss": 0.0226,
"step": 577
},
{
"epoch": 0.08879671237085686,
"grad_norm": 0.3042701780796051,
"learning_rate": 7.604497039580785e-05,
"loss": 0.0861,
"step": 578
},
{
"epoch": 0.08895033990091024,
"grad_norm": 0.6168767213821411,
"learning_rate": 7.574007692045928e-05,
"loss": 0.0366,
"step": 579
},
{
"epoch": 0.08910396743096363,
"grad_norm": 0.12984946370124817,
"learning_rate": 7.543542288075739e-05,
"loss": 0.0156,
"step": 580
},
{
"epoch": 0.08925759496101701,
"grad_norm": 0.29632243514060974,
"learning_rate": 7.513101128351454e-05,
"loss": 0.0617,
"step": 581
},
{
"epoch": 0.0894112224910704,
"grad_norm": 0.25821030139923096,
"learning_rate": 7.48268451331503e-05,
"loss": 0.0462,
"step": 582
},
{
"epoch": 0.08956485002112378,
"grad_norm": 0.47066113352775574,
"learning_rate": 7.45229274316618e-05,
"loss": 0.0534,
"step": 583
},
{
"epoch": 0.08971847755117716,
"grad_norm": 0.3067626953125,
"learning_rate": 7.421926117859403e-05,
"loss": 0.0519,
"step": 584
},
{
"epoch": 0.08987210508123056,
"grad_norm": 0.3371565341949463,
"learning_rate": 7.391584937101033e-05,
"loss": 0.0868,
"step": 585
},
{
"epoch": 0.09002573261128394,
"grad_norm": 0.2494596540927887,
"learning_rate": 7.361269500346274e-05,
"loss": 0.0378,
"step": 586
},
{
"epoch": 0.09017936014133733,
"grad_norm": 0.15213797986507416,
"learning_rate": 7.330980106796246e-05,
"loss": 0.0232,
"step": 587
},
{
"epoch": 0.09033298767139071,
"grad_norm": 0.28482556343078613,
"learning_rate": 7.300717055395039e-05,
"loss": 0.0666,
"step": 588
},
{
"epoch": 0.0904866152014441,
"grad_norm": 0.2553330957889557,
"learning_rate": 7.270480644826749e-05,
"loss": 0.035,
"step": 589
},
{
"epoch": 0.09064024273149748,
"grad_norm": 0.20339122414588928,
"learning_rate": 7.240271173512546e-05,
"loss": 0.0495,
"step": 590
},
{
"epoch": 0.09079387026155086,
"grad_norm": 0.27367886900901794,
"learning_rate": 7.210088939607708e-05,
"loss": 0.0357,
"step": 591
},
{
"epoch": 0.09094749779160426,
"grad_norm": 0.26410624384880066,
"learning_rate": 7.179934240998706e-05,
"loss": 0.0613,
"step": 592
},
{
"epoch": 0.09110112532165764,
"grad_norm": 0.20462387800216675,
"learning_rate": 7.149807375300239e-05,
"loss": 0.0464,
"step": 593
},
{
"epoch": 0.09125475285171103,
"grad_norm": 0.4443984627723694,
"learning_rate": 7.119708639852312e-05,
"loss": 0.0456,
"step": 594
},
{
"epoch": 0.09140838038176441,
"grad_norm": 0.5332699418067932,
"learning_rate": 7.089638331717284e-05,
"loss": 0.0453,
"step": 595
},
{
"epoch": 0.0915620079118178,
"grad_norm": 0.3862667679786682,
"learning_rate": 7.059596747676962e-05,
"loss": 0.0649,
"step": 596
},
{
"epoch": 0.09171563544187118,
"grad_norm": 0.2426959127187729,
"learning_rate": 7.029584184229653e-05,
"loss": 0.0813,
"step": 597
},
{
"epoch": 0.09186926297192458,
"grad_norm": 0.17714688181877136,
"learning_rate": 6.999600937587239e-05,
"loss": 0.0312,
"step": 598
},
{
"epoch": 0.09202289050197796,
"grad_norm": 0.3105527460575104,
"learning_rate": 6.969647303672262e-05,
"loss": 0.0691,
"step": 599
},
{
"epoch": 0.09217651803203133,
"grad_norm": 0.13272526860237122,
"learning_rate": 6.939723578114993e-05,
"loss": 0.0154,
"step": 600
},
{
"epoch": 0.09233014556208473,
"grad_norm": 0.2830723226070404,
"learning_rate": 6.909830056250527e-05,
"loss": 0.0271,
"step": 601
},
{
"epoch": 0.09248377309213811,
"grad_norm": 0.6450408101081848,
"learning_rate": 6.879967033115853e-05,
"loss": 0.0733,
"step": 602
},
{
"epoch": 0.0926374006221915,
"grad_norm": 0.06351689249277115,
"learning_rate": 6.850134803446954e-05,
"loss": 0.0067,
"step": 603
},
{
"epoch": 0.09279102815224488,
"grad_norm": 0.34038659930229187,
"learning_rate": 6.820333661675893e-05,
"loss": 0.0261,
"step": 604
},
{
"epoch": 0.09294465568229827,
"grad_norm": 0.2655782699584961,
"learning_rate": 6.790563901927907e-05,
"loss": 0.0289,
"step": 605
},
{
"epoch": 0.09309828321235165,
"grad_norm": 0.37365785241127014,
"learning_rate": 6.760825818018508e-05,
"loss": 0.0172,
"step": 606
},
{
"epoch": 0.09325191074240503,
"grad_norm": 0.5386453866958618,
"learning_rate": 6.731119703450577e-05,
"loss": 0.0884,
"step": 607
},
{
"epoch": 0.09340553827245843,
"grad_norm": 0.19912388920783997,
"learning_rate": 6.701445851411472e-05,
"loss": 0.0121,
"step": 608
},
{
"epoch": 0.0935591658025118,
"grad_norm": 0.8350620865821838,
"learning_rate": 6.671804554770135e-05,
"loss": 0.0658,
"step": 609
},
{
"epoch": 0.0937127933325652,
"grad_norm": 0.20652072131633759,
"learning_rate": 6.642196106074194e-05,
"loss": 0.0382,
"step": 610
},
{
"epoch": 0.09386642086261858,
"grad_norm": 0.20942322909832,
"learning_rate": 6.612620797547087e-05,
"loss": 0.0269,
"step": 611
},
{
"epoch": 0.09402004839267197,
"grad_norm": 0.27834099531173706,
"learning_rate": 6.583078921085167e-05,
"loss": 0.0448,
"step": 612
},
{
"epoch": 0.09417367592272535,
"grad_norm": 0.2429392784833908,
"learning_rate": 6.55357076825483e-05,
"loss": 0.0176,
"step": 613
},
{
"epoch": 0.09432730345277873,
"grad_norm": 0.23163901269435883,
"learning_rate": 6.52409663028963e-05,
"loss": 0.0339,
"step": 614
},
{
"epoch": 0.09448093098283213,
"grad_norm": 0.32905009388923645,
"learning_rate": 6.494656798087412e-05,
"loss": 0.021,
"step": 615
},
{
"epoch": 0.0946345585128855,
"grad_norm": 0.8219784498214722,
"learning_rate": 6.465251562207431e-05,
"loss": 0.0627,
"step": 616
},
{
"epoch": 0.0947881860429389,
"grad_norm": 0.17754356563091278,
"learning_rate": 6.435881212867493e-05,
"loss": 0.0405,
"step": 617
},
{
"epoch": 0.09494181357299228,
"grad_norm": 0.6421652436256409,
"learning_rate": 6.406546039941094e-05,
"loss": 0.1034,
"step": 618
},
{
"epoch": 0.09509544110304567,
"grad_norm": 0.3092856705188751,
"learning_rate": 6.377246332954544e-05,
"loss": 0.0332,
"step": 619
},
{
"epoch": 0.09524906863309905,
"grad_norm": 0.16882169246673584,
"learning_rate": 6.347982381084123e-05,
"loss": 0.0066,
"step": 620
},
{
"epoch": 0.09540269616315243,
"grad_norm": 0.39279523491859436,
"learning_rate": 6.318754473153221e-05,
"loss": 0.0557,
"step": 621
},
{
"epoch": 0.09555632369320582,
"grad_norm": 0.8362782597541809,
"learning_rate": 6.289562897629492e-05,
"loss": 0.0547,
"step": 622
},
{
"epoch": 0.0957099512232592,
"grad_norm": 0.43888476490974426,
"learning_rate": 6.260407942621998e-05,
"loss": 0.0923,
"step": 623
},
{
"epoch": 0.0958635787533126,
"grad_norm": 0.5503817796707153,
"learning_rate": 6.231289895878375e-05,
"loss": 0.0323,
"step": 624
},
{
"epoch": 0.09601720628336598,
"grad_norm": 0.13516651093959808,
"learning_rate": 6.20220904478199e-05,
"loss": 0.0156,
"step": 625
},
{
"epoch": 0.09617083381341937,
"grad_norm": 0.09748262912034988,
"learning_rate": 6.173165676349103e-05,
"loss": 0.0073,
"step": 626
},
{
"epoch": 0.09632446134347275,
"grad_norm": 0.310822069644928,
"learning_rate": 6.144160077226036e-05,
"loss": 0.0641,
"step": 627
},
{
"epoch": 0.09647808887352613,
"grad_norm": 0.5397434830665588,
"learning_rate": 6.11519253368634e-05,
"loss": 0.0696,
"step": 628
},
{
"epoch": 0.09663171640357952,
"grad_norm": 0.6019011735916138,
"learning_rate": 6.086263331627976e-05,
"loss": 0.0704,
"step": 629
},
{
"epoch": 0.0967853439336329,
"grad_norm": 0.2008989155292511,
"learning_rate": 6.05737275657049e-05,
"loss": 0.01,
"step": 630
},
{
"epoch": 0.0969389714636863,
"grad_norm": 0.43931102752685547,
"learning_rate": 6.0285210936521955e-05,
"loss": 0.0423,
"step": 631
},
{
"epoch": 0.09709259899373968,
"grad_norm": 0.06365258246660233,
"learning_rate": 5.999708627627354e-05,
"loss": 0.004,
"step": 632
},
{
"epoch": 0.09724622652379307,
"grad_norm": 0.32905471324920654,
"learning_rate": 5.9709356428633746e-05,
"loss": 0.0556,
"step": 633
},
{
"epoch": 0.09739985405384645,
"grad_norm": 0.44533899426460266,
"learning_rate": 5.9422024233380013e-05,
"loss": 0.0948,
"step": 634
},
{
"epoch": 0.09755348158389983,
"grad_norm": 0.21245959401130676,
"learning_rate": 5.913509252636511e-05,
"loss": 0.0163,
"step": 635
},
{
"epoch": 0.09770710911395322,
"grad_norm": 0.27286478877067566,
"learning_rate": 5.884856413948913e-05,
"loss": 0.0484,
"step": 636
},
{
"epoch": 0.0978607366440066,
"grad_norm": 0.2064603865146637,
"learning_rate": 5.856244190067159e-05,
"loss": 0.0387,
"step": 637
},
{
"epoch": 0.09801436417406,
"grad_norm": 0.44962942600250244,
"learning_rate": 5.82767286338235e-05,
"loss": 0.0544,
"step": 638
},
{
"epoch": 0.09816799170411337,
"grad_norm": 0.25784626603126526,
"learning_rate": 5.799142715881938e-05,
"loss": 0.0379,
"step": 639
},
{
"epoch": 0.09832161923416677,
"grad_norm": 0.10226144641637802,
"learning_rate": 5.770654029146969e-05,
"loss": 0.0097,
"step": 640
},
{
"epoch": 0.09847524676422015,
"grad_norm": 0.17848530411720276,
"learning_rate": 5.7422070843492734e-05,
"loss": 0.0115,
"step": 641
},
{
"epoch": 0.09862887429427353,
"grad_norm": 0.2896134555339813,
"learning_rate": 5.713802162248718e-05,
"loss": 0.0294,
"step": 642
},
{
"epoch": 0.09878250182432692,
"grad_norm": 0.40836718678474426,
"learning_rate": 5.6854395431904094e-05,
"loss": 0.041,
"step": 643
},
{
"epoch": 0.0989361293543803,
"grad_norm": 0.20250079035758972,
"learning_rate": 5.657119507101954e-05,
"loss": 0.0147,
"step": 644
},
{
"epoch": 0.0990897568844337,
"grad_norm": 0.20654729008674622,
"learning_rate": 5.6288423334906735e-05,
"loss": 0.0284,
"step": 645
},
{
"epoch": 0.09924338441448707,
"grad_norm": 0.330913782119751,
"learning_rate": 5.6006083014408484e-05,
"loss": 0.0343,
"step": 646
},
{
"epoch": 0.09939701194454047,
"grad_norm": 0.3797610104084015,
"learning_rate": 5.572417689610987e-05,
"loss": 0.0448,
"step": 647
},
{
"epoch": 0.09955063947459385,
"grad_norm": 0.15866832435131073,
"learning_rate": 5.544270776231038e-05,
"loss": 0.014,
"step": 648
},
{
"epoch": 0.09970426700464723,
"grad_norm": 0.2744625210762024,
"learning_rate": 5.5161678390996796e-05,
"loss": 0.0277,
"step": 649
},
{
"epoch": 0.09985789453470062,
"grad_norm": 0.5866727232933044,
"learning_rate": 5.488109155581549e-05,
"loss": 0.0358,
"step": 650
},
{
"epoch": 0.100011522064754,
"grad_norm": 0.15096555650234222,
"learning_rate": 5.4600950026045326e-05,
"loss": 0.0229,
"step": 651
},
{
"epoch": 0.10016514959480739,
"grad_norm": 0.4028722643852234,
"learning_rate": 5.4321256566570036e-05,
"loss": 0.0882,
"step": 652
},
{
"epoch": 0.10031877712486077,
"grad_norm": 0.295682817697525,
"learning_rate": 5.404201393785122e-05,
"loss": 0.08,
"step": 653
},
{
"epoch": 0.10047240465491417,
"grad_norm": 0.4358142614364624,
"learning_rate": 5.3763224895900846e-05,
"loss": 0.0386,
"step": 654
},
{
"epoch": 0.10062603218496755,
"grad_norm": 0.12598295509815216,
"learning_rate": 5.348489219225416e-05,
"loss": 0.0148,
"step": 655
},
{
"epoch": 0.10077965971502093,
"grad_norm": 0.5864266157150269,
"learning_rate": 5.320701857394268e-05,
"loss": 0.0485,
"step": 656
},
{
"epoch": 0.10093328724507432,
"grad_norm": 0.7225477695465088,
"learning_rate": 5.292960678346675e-05,
"loss": 0.0378,
"step": 657
},
{
"epoch": 0.1010869147751277,
"grad_norm": 0.4586561322212219,
"learning_rate": 5.265265955876879e-05,
"loss": 0.0218,
"step": 658
},
{
"epoch": 0.10124054230518109,
"grad_norm": 0.6800941228866577,
"learning_rate": 5.237617963320608e-05,
"loss": 0.0638,
"step": 659
},
{
"epoch": 0.10139416983523447,
"grad_norm": 0.15682150423526764,
"learning_rate": 5.210016973552391e-05,
"loss": 0.022,
"step": 660
},
{
"epoch": 0.10154779736528786,
"grad_norm": 0.2382485568523407,
"learning_rate": 5.182463258982846e-05,
"loss": 0.0145,
"step": 661
},
{
"epoch": 0.10170142489534124,
"grad_norm": 0.29511716961860657,
"learning_rate": 5.1549570915560206e-05,
"loss": 0.0351,
"step": 662
},
{
"epoch": 0.10185505242539462,
"grad_norm": 0.5219296813011169,
"learning_rate": 5.127498742746675e-05,
"loss": 0.0274,
"step": 663
},
{
"epoch": 0.10200867995544802,
"grad_norm": 0.23303106427192688,
"learning_rate": 5.100088483557634e-05,
"loss": 0.0387,
"step": 664
},
{
"epoch": 0.1021623074855014,
"grad_norm": 0.3924555778503418,
"learning_rate": 5.072726584517086e-05,
"loss": 0.0661,
"step": 665
},
{
"epoch": 0.10231593501555479,
"grad_norm": 0.3202173411846161,
"learning_rate": 5.045413315675924e-05,
"loss": 0.0549,
"step": 666
},
{
"epoch": 0.10246956254560817,
"grad_norm": 0.20926810801029205,
"learning_rate": 5.018148946605092e-05,
"loss": 0.0399,
"step": 667
},
{
"epoch": 0.10262319007566156,
"grad_norm": 0.41968145966529846,
"learning_rate": 4.990933746392899e-05,
"loss": 0.0278,
"step": 668
},
{
"epoch": 0.10277681760571494,
"grad_norm": 0.21604523062705994,
"learning_rate": 4.9637679836423924e-05,
"loss": 0.0193,
"step": 669
},
{
"epoch": 0.10293044513576834,
"grad_norm": 0.159501850605011,
"learning_rate": 4.9366519264686725e-05,
"loss": 0.0162,
"step": 670
},
{
"epoch": 0.10308407266582172,
"grad_norm": 0.26928532123565674,
"learning_rate": 4.909585842496287e-05,
"loss": 0.0279,
"step": 671
},
{
"epoch": 0.1032377001958751,
"grad_norm": 0.22564271092414856,
"learning_rate": 4.8825699988565485e-05,
"loss": 0.0273,
"step": 672
},
{
"epoch": 0.10339132772592849,
"grad_norm": 0.15354351699352264,
"learning_rate": 4.8556046621849346e-05,
"loss": 0.0088,
"step": 673
},
{
"epoch": 0.10354495525598187,
"grad_norm": 0.20060743391513824,
"learning_rate": 4.828690098618429e-05,
"loss": 0.0201,
"step": 674
},
{
"epoch": 0.10369858278603526,
"grad_norm": 0.35412031412124634,
"learning_rate": 4.8018265737929044e-05,
"loss": 0.0234,
"step": 675
},
{
"epoch": 0.10385221031608864,
"grad_norm": 0.5052732825279236,
"learning_rate": 4.7750143528405126e-05,
"loss": 0.0822,
"step": 676
},
{
"epoch": 0.10400583784614204,
"grad_norm": 0.36089015007019043,
"learning_rate": 4.748253700387042e-05,
"loss": 0.0615,
"step": 677
},
{
"epoch": 0.10415946537619541,
"grad_norm": 0.3423704504966736,
"learning_rate": 4.721544880549337e-05,
"loss": 0.0185,
"step": 678
},
{
"epoch": 0.1043130929062488,
"grad_norm": 0.3519861102104187,
"learning_rate": 4.694888156932658e-05,
"loss": 0.0464,
"step": 679
},
{
"epoch": 0.10446672043630219,
"grad_norm": 0.2810473144054413,
"learning_rate": 4.668283792628114e-05,
"loss": 0.0277,
"step": 680
},
{
"epoch": 0.10462034796635557,
"grad_norm": 0.2300633043050766,
"learning_rate": 4.6417320502100316e-05,
"loss": 0.0314,
"step": 681
},
{
"epoch": 0.10477397549640896,
"grad_norm": 0.5359747409820557,
"learning_rate": 4.615233191733398e-05,
"loss": 0.1124,
"step": 682
},
{
"epoch": 0.10492760302646234,
"grad_norm": 0.33638763427734375,
"learning_rate": 4.588787478731242e-05,
"loss": 0.092,
"step": 683
},
{
"epoch": 0.10508123055651573,
"grad_norm": 0.5147281885147095,
"learning_rate": 4.5623951722120736e-05,
"loss": 0.0707,
"step": 684
},
{
"epoch": 0.10523485808656911,
"grad_norm": 0.238368421792984,
"learning_rate": 4.5360565326573104e-05,
"loss": 0.0312,
"step": 685
},
{
"epoch": 0.1053884856166225,
"grad_norm": 0.27238187193870544,
"learning_rate": 4.5097718200186814e-05,
"loss": 0.0569,
"step": 686
},
{
"epoch": 0.10554211314667589,
"grad_norm": 0.5643981099128723,
"learning_rate": 4.483541293715698e-05,
"loss": 0.0485,
"step": 687
},
{
"epoch": 0.10569574067672927,
"grad_norm": 0.5961344838142395,
"learning_rate": 4.457365212633058e-05,
"loss": 0.0311,
"step": 688
},
{
"epoch": 0.10584936820678266,
"grad_norm": 0.28255996108055115,
"learning_rate": 4.431243835118124e-05,
"loss": 0.0263,
"step": 689
},
{
"epoch": 0.10600299573683604,
"grad_norm": 0.28440266847610474,
"learning_rate": 4.4051774189783315e-05,
"loss": 0.0468,
"step": 690
},
{
"epoch": 0.10615662326688943,
"grad_norm": 0.2887694835662842,
"learning_rate": 4.379166221478697e-05,
"loss": 0.0269,
"step": 691
},
{
"epoch": 0.10631025079694281,
"grad_norm": 0.34084567427635193,
"learning_rate": 4.3532104993392306e-05,
"loss": 0.0439,
"step": 692
},
{
"epoch": 0.10646387832699619,
"grad_norm": 0.43698835372924805,
"learning_rate": 4.327310508732437e-05,
"loss": 0.0974,
"step": 693
},
{
"epoch": 0.10661750585704959,
"grad_norm": 0.6095538139343262,
"learning_rate": 4.301466505280762e-05,
"loss": 0.0325,
"step": 694
},
{
"epoch": 0.10677113338710296,
"grad_norm": 0.0971674844622612,
"learning_rate": 4.2756787440540936e-05,
"loss": 0.0067,
"step": 695
},
{
"epoch": 0.10692476091715636,
"grad_norm": 0.6854341626167297,
"learning_rate": 4.249947479567218e-05,
"loss": 0.1977,
"step": 696
},
{
"epoch": 0.10707838844720974,
"grad_norm": 0.28634732961654663,
"learning_rate": 4.224272965777326e-05,
"loss": 0.0164,
"step": 697
},
{
"epoch": 0.10723201597726313,
"grad_norm": 0.4830421805381775,
"learning_rate": 4.1986554560815096e-05,
"loss": 0.0976,
"step": 698
},
{
"epoch": 0.10738564350731651,
"grad_norm": 0.4344654977321625,
"learning_rate": 4.173095203314241e-05,
"loss": 0.0446,
"step": 699
},
{
"epoch": 0.10753927103736989,
"grad_norm": 0.2128065973520279,
"learning_rate": 4.1475924597449024e-05,
"loss": 0.0338,
"step": 700
},
{
"epoch": 0.10769289856742328,
"grad_norm": 0.42196571826934814,
"learning_rate": 4.12214747707527e-05,
"loss": 0.0693,
"step": 701
},
{
"epoch": 0.10784652609747666,
"grad_norm": 0.08933784067630768,
"learning_rate": 4.096760506437057e-05,
"loss": 0.0048,
"step": 702
},
{
"epoch": 0.10800015362753006,
"grad_norm": 0.2536148428916931,
"learning_rate": 4.071431798389408e-05,
"loss": 0.0427,
"step": 703
},
{
"epoch": 0.10815378115758344,
"grad_norm": 0.32576629519462585,
"learning_rate": 4.0461616029164526e-05,
"loss": 0.0579,
"step": 704
},
{
"epoch": 0.10830740868763683,
"grad_norm": 0.16896280646324158,
"learning_rate": 4.020950169424815e-05,
"loss": 0.0298,
"step": 705
},
{
"epoch": 0.10846103621769021,
"grad_norm": 0.36833593249320984,
"learning_rate": 3.9957977467411615e-05,
"loss": 0.0545,
"step": 706
},
{
"epoch": 0.10861466374774359,
"grad_norm": 0.7563644647598267,
"learning_rate": 3.9707045831097555e-05,
"loss": 0.1293,
"step": 707
},
{
"epoch": 0.10876829127779698,
"grad_norm": 0.2748095393180847,
"learning_rate": 3.945670926189987e-05,
"loss": 0.0268,
"step": 708
},
{
"epoch": 0.10892191880785036,
"grad_norm": 0.21131619811058044,
"learning_rate": 3.920697023053949e-05,
"loss": 0.0391,
"step": 709
},
{
"epoch": 0.10907554633790376,
"grad_norm": 0.17908282577991486,
"learning_rate": 3.895783120183976e-05,
"loss": 0.0131,
"step": 710
},
{
"epoch": 0.10922917386795714,
"grad_norm": 0.343631386756897,
"learning_rate": 3.8709294634702376e-05,
"loss": 0.0684,
"step": 711
},
{
"epoch": 0.10938280139801053,
"grad_norm": 0.47369644045829773,
"learning_rate": 3.846136298208285e-05,
"loss": 0.0786,
"step": 712
},
{
"epoch": 0.10953642892806391,
"grad_norm": 0.23939087986946106,
"learning_rate": 3.821403869096658e-05,
"loss": 0.075,
"step": 713
},
{
"epoch": 0.10969005645811729,
"grad_norm": 0.6267966032028198,
"learning_rate": 3.796732420234443e-05,
"loss": 0.1362,
"step": 714
},
{
"epoch": 0.10984368398817068,
"grad_norm": 0.4084293246269226,
"learning_rate": 3.7721221951188765e-05,
"loss": 0.0816,
"step": 715
},
{
"epoch": 0.10999731151822406,
"grad_norm": 0.3442552089691162,
"learning_rate": 3.747573436642951e-05,
"loss": 0.0448,
"step": 716
},
{
"epoch": 0.11015093904827745,
"grad_norm": 0.2749521732330322,
"learning_rate": 3.7230863870929964e-05,
"loss": 0.0299,
"step": 717
},
{
"epoch": 0.11030456657833083,
"grad_norm": 0.29545220732688904,
"learning_rate": 3.698661288146311e-05,
"loss": 0.0345,
"step": 718
},
{
"epoch": 0.11045819410838423,
"grad_norm": 0.19477668404579163,
"learning_rate": 3.674298380868756e-05,
"loss": 0.0171,
"step": 719
},
{
"epoch": 0.11061182163843761,
"grad_norm": 0.270500510931015,
"learning_rate": 3.649997905712396e-05,
"loss": 0.0248,
"step": 720
},
{
"epoch": 0.11076544916849099,
"grad_norm": 0.6715177893638611,
"learning_rate": 3.6257601025131026e-05,
"loss": 0.0543,
"step": 721
},
{
"epoch": 0.11091907669854438,
"grad_norm": 0.3106316030025482,
"learning_rate": 3.601585210488218e-05,
"loss": 0.0315,
"step": 722
},
{
"epoch": 0.11107270422859776,
"grad_norm": 0.17719969153404236,
"learning_rate": 3.577473468234156e-05,
"loss": 0.0412,
"step": 723
},
{
"epoch": 0.11122633175865115,
"grad_norm": 0.2095143049955368,
"learning_rate": 3.553425113724088e-05,
"loss": 0.0555,
"step": 724
},
{
"epoch": 0.11137995928870453,
"grad_norm": 0.6922562122344971,
"learning_rate": 3.52944038430556e-05,
"loss": 0.0413,
"step": 725
},
{
"epoch": 0.11153358681875793,
"grad_norm": 0.15403993427753448,
"learning_rate": 3.5055195166981645e-05,
"loss": 0.0122,
"step": 726
},
{
"epoch": 0.1116872143488113,
"grad_norm": 0.34367793798446655,
"learning_rate": 3.481662746991214e-05,
"loss": 0.0478,
"step": 727
},
{
"epoch": 0.11184084187886469,
"grad_norm": 0.281046599149704,
"learning_rate": 3.4578703106413904e-05,
"loss": 0.0503,
"step": 728
},
{
"epoch": 0.11199446940891808,
"grad_norm": 0.2603316307067871,
"learning_rate": 3.4341424424704375e-05,
"loss": 0.0275,
"step": 729
},
{
"epoch": 0.11214809693897146,
"grad_norm": 0.17615608870983124,
"learning_rate": 3.4104793766628304e-05,
"loss": 0.0371,
"step": 730
},
{
"epoch": 0.11230172446902485,
"grad_norm": 0.19366087019443512,
"learning_rate": 3.386881346763483e-05,
"loss": 0.0248,
"step": 731
},
{
"epoch": 0.11245535199907823,
"grad_norm": 0.27182114124298096,
"learning_rate": 3.363348585675414e-05,
"loss": 0.0243,
"step": 732
},
{
"epoch": 0.11260897952913163,
"grad_norm": 0.43272659182548523,
"learning_rate": 3.339881325657484e-05,
"loss": 0.0547,
"step": 733
},
{
"epoch": 0.112762607059185,
"grad_norm": 0.33674025535583496,
"learning_rate": 3.316479798322072e-05,
"loss": 0.035,
"step": 734
},
{
"epoch": 0.1129162345892384,
"grad_norm": 0.34288397431373596,
"learning_rate": 3.2931442346328004e-05,
"loss": 0.0424,
"step": 735
},
{
"epoch": 0.11306986211929178,
"grad_norm": 0.26150768995285034,
"learning_rate": 3.269874864902269e-05,
"loss": 0.056,
"step": 736
},
{
"epoch": 0.11322348964934516,
"grad_norm": 0.21396790444850922,
"learning_rate": 3.246671918789755e-05,
"loss": 0.0569,
"step": 737
},
{
"epoch": 0.11337711717939855,
"grad_norm": 0.7504220604896545,
"learning_rate": 3.223535625298979e-05,
"loss": 0.0718,
"step": 738
},
{
"epoch": 0.11353074470945193,
"grad_norm": 0.40869784355163574,
"learning_rate": 3.200466212775808e-05,
"loss": 0.0364,
"step": 739
},
{
"epoch": 0.11368437223950532,
"grad_norm": 0.5008652806282043,
"learning_rate": 3.1774639089060363e-05,
"loss": 0.0795,
"step": 740
},
{
"epoch": 0.1138379997695587,
"grad_norm": 0.26602503657341003,
"learning_rate": 3.154528940713113e-05,
"loss": 0.0394,
"step": 741
},
{
"epoch": 0.1139916272996121,
"grad_norm": 0.3456244170665741,
"learning_rate": 3.1316615345559185e-05,
"loss": 0.0398,
"step": 742
},
{
"epoch": 0.11414525482966548,
"grad_norm": 0.19652892649173737,
"learning_rate": 3.108861916126518e-05,
"loss": 0.023,
"step": 743
},
{
"epoch": 0.11429888235971886,
"grad_norm": 0.3422330319881439,
"learning_rate": 3.086130310447937e-05,
"loss": 0.043,
"step": 744
},
{
"epoch": 0.11445250988977225,
"grad_norm": 0.4514871835708618,
"learning_rate": 3.063466941871952e-05,
"loss": 0.0242,
"step": 745
},
{
"epoch": 0.11460613741982563,
"grad_norm": 0.29108673334121704,
"learning_rate": 3.0408720340768572e-05,
"loss": 0.0392,
"step": 746
},
{
"epoch": 0.11475976494987902,
"grad_norm": 0.35643014311790466,
"learning_rate": 3.018345810065275e-05,
"loss": 0.0347,
"step": 747
},
{
"epoch": 0.1149133924799324,
"grad_norm": 0.2743763327598572,
"learning_rate": 2.9958884921619367e-05,
"loss": 0.0313,
"step": 748
},
{
"epoch": 0.1150670200099858,
"grad_norm": 0.9116131067276001,
"learning_rate": 2.9735003020115092e-05,
"loss": 0.0457,
"step": 749
},
{
"epoch": 0.11522064754003918,
"grad_norm": 0.26586630940437317,
"learning_rate": 2.9511814605763855e-05,
"loss": 0.0243,
"step": 750
},
{
"epoch": 0.11537427507009256,
"grad_norm": 0.18353892862796783,
"learning_rate": 2.9289321881345254e-05,
"loss": 0.03,
"step": 751
},
{
"epoch": 0.11552790260014595,
"grad_norm": 0.38012245297431946,
"learning_rate": 2.9067527042772636e-05,
"loss": 0.0408,
"step": 752
},
{
"epoch": 0.11568153013019933,
"grad_norm": 0.5171905755996704,
"learning_rate": 2.8846432279071467e-05,
"loss": 0.0272,
"step": 753
},
{
"epoch": 0.11583515766025272,
"grad_norm": 0.32101190090179443,
"learning_rate": 2.8626039772357882e-05,
"loss": 0.0478,
"step": 754
},
{
"epoch": 0.1159887851903061,
"grad_norm": 0.6422986388206482,
"learning_rate": 2.840635169781688e-05,
"loss": 0.0147,
"step": 755
},
{
"epoch": 0.1161424127203595,
"grad_norm": 0.4292686879634857,
"learning_rate": 2.8187370223681132e-05,
"loss": 0.0367,
"step": 756
},
{
"epoch": 0.11629604025041287,
"grad_norm": 0.823320746421814,
"learning_rate": 2.7969097511209308e-05,
"loss": 0.0612,
"step": 757
},
{
"epoch": 0.11644966778046625,
"grad_norm": 0.3667905330657959,
"learning_rate": 2.775153571466502e-05,
"loss": 0.042,
"step": 758
},
{
"epoch": 0.11660329531051965,
"grad_norm": 0.48690491914749146,
"learning_rate": 2.753468698129533e-05,
"loss": 0.0249,
"step": 759
},
{
"epoch": 0.11675692284057303,
"grad_norm": 0.300660103559494,
"learning_rate": 2.7318553451309726e-05,
"loss": 0.031,
"step": 760
},
{
"epoch": 0.11691055037062642,
"grad_norm": 0.17729440331459045,
"learning_rate": 2.7103137257858868e-05,
"loss": 0.0189,
"step": 761
},
{
"epoch": 0.1170641779006798,
"grad_norm": 0.6813021898269653,
"learning_rate": 2.688844052701359e-05,
"loss": 0.0502,
"step": 762
},
{
"epoch": 0.1172178054307332,
"grad_norm": 0.3975362479686737,
"learning_rate": 2.6674465377744017e-05,
"loss": 0.031,
"step": 763
},
{
"epoch": 0.11737143296078657,
"grad_norm": 0.3633306622505188,
"learning_rate": 2.646121392189841e-05,
"loss": 0.0438,
"step": 764
},
{
"epoch": 0.11752506049083995,
"grad_norm": 0.22573494911193848,
"learning_rate": 2.624868826418262e-05,
"loss": 0.0113,
"step": 765
},
{
"epoch": 0.11767868802089335,
"grad_norm": 0.21217849850654602,
"learning_rate": 2.603689050213902e-05,
"loss": 0.0297,
"step": 766
},
{
"epoch": 0.11783231555094673,
"grad_norm": 0.33074015378952026,
"learning_rate": 2.582582272612609e-05,
"loss": 0.08,
"step": 767
},
{
"epoch": 0.11798594308100012,
"grad_norm": 0.27630868554115295,
"learning_rate": 2.561548701929749e-05,
"loss": 0.0552,
"step": 768
},
{
"epoch": 0.1181395706110535,
"grad_norm": 0.4829549491405487,
"learning_rate": 2.540588545758179e-05,
"loss": 0.1101,
"step": 769
},
{
"epoch": 0.11829319814110689,
"grad_norm": 0.334322065114975,
"learning_rate": 2.5197020109661772e-05,
"loss": 0.053,
"step": 770
},
{
"epoch": 0.11844682567116027,
"grad_norm": 0.37972596287727356,
"learning_rate": 2.4988893036954043e-05,
"loss": 0.0156,
"step": 771
},
{
"epoch": 0.11860045320121365,
"grad_norm": 0.18245559930801392,
"learning_rate": 2.4781506293588873e-05,
"loss": 0.0292,
"step": 772
},
{
"epoch": 0.11875408073126704,
"grad_norm": 0.3700565993785858,
"learning_rate": 2.4574861926389615e-05,
"loss": 0.0473,
"step": 773
},
{
"epoch": 0.11890770826132042,
"grad_norm": 0.4302944540977478,
"learning_rate": 2.436896197485282e-05,
"loss": 0.0295,
"step": 774
},
{
"epoch": 0.11906133579137382,
"grad_norm": 0.16051656007766724,
"learning_rate": 2.4163808471127812e-05,
"loss": 0.0228,
"step": 775
},
{
"epoch": 0.1192149633214272,
"grad_norm": 0.21104241907596588,
"learning_rate": 2.3959403439996907e-05,
"loss": 0.0323,
"step": 776
},
{
"epoch": 0.11936859085148059,
"grad_norm": 0.4265983998775482,
"learning_rate": 2.37557488988552e-05,
"loss": 0.0324,
"step": 777
},
{
"epoch": 0.11952221838153397,
"grad_norm": 0.38489940762519836,
"learning_rate": 2.3552846857690846e-05,
"loss": 0.0402,
"step": 778
},
{
"epoch": 0.11967584591158735,
"grad_norm": 0.41113337874412537,
"learning_rate": 2.3350699319065026e-05,
"loss": 0.041,
"step": 779
},
{
"epoch": 0.11982947344164074,
"grad_norm": 0.4150101840496063,
"learning_rate": 2.3149308278092342e-05,
"loss": 0.037,
"step": 780
},
{
"epoch": 0.11998310097169412,
"grad_norm": 0.11619318276643753,
"learning_rate": 2.2948675722421086e-05,
"loss": 0.0129,
"step": 781
},
{
"epoch": 0.12013672850174752,
"grad_norm": 0.33115360140800476,
"learning_rate": 2.2748803632213557e-05,
"loss": 0.0264,
"step": 782
},
{
"epoch": 0.1202903560318009,
"grad_norm": 0.5806334614753723,
"learning_rate": 2.254969398012663e-05,
"loss": 0.0395,
"step": 783
},
{
"epoch": 0.12044398356185429,
"grad_norm": 0.3120412528514862,
"learning_rate": 2.235134873129213e-05,
"loss": 0.0655,
"step": 784
},
{
"epoch": 0.12059761109190767,
"grad_norm": 0.2077431082725525,
"learning_rate": 2.2153769843297667e-05,
"loss": 0.039,
"step": 785
},
{
"epoch": 0.12075123862196105,
"grad_norm": 0.24149702489376068,
"learning_rate": 2.195695926616702e-05,
"loss": 0.0406,
"step": 786
},
{
"epoch": 0.12090486615201444,
"grad_norm": 0.5412996411323547,
"learning_rate": 2.1760918942341192e-05,
"loss": 0.0538,
"step": 787
},
{
"epoch": 0.12105849368206782,
"grad_norm": 0.30975329875946045,
"learning_rate": 2.1565650806658975e-05,
"loss": 0.0412,
"step": 788
},
{
"epoch": 0.12121212121212122,
"grad_norm": 0.709606409072876,
"learning_rate": 2.137115678633811e-05,
"loss": 0.0328,
"step": 789
},
{
"epoch": 0.1213657487421746,
"grad_norm": 0.4397044777870178,
"learning_rate": 2.1177438800956007e-05,
"loss": 0.0828,
"step": 790
},
{
"epoch": 0.12151937627222799,
"grad_norm": 0.09754655510187149,
"learning_rate": 2.098449876243096e-05,
"loss": 0.0308,
"step": 791
},
{
"epoch": 0.12167300380228137,
"grad_norm": 0.21479186415672302,
"learning_rate": 2.07923385750033e-05,
"loss": 0.0363,
"step": 792
},
{
"epoch": 0.12182663133233475,
"grad_norm": 0.3449937105178833,
"learning_rate": 2.0600960135216462e-05,
"loss": 0.0446,
"step": 793
},
{
"epoch": 0.12198025886238814,
"grad_norm": 0.41584691405296326,
"learning_rate": 2.0410365331898416e-05,
"loss": 0.0884,
"step": 794
},
{
"epoch": 0.12213388639244152,
"grad_norm": 0.32898953557014465,
"learning_rate": 2.0220556046142893e-05,
"loss": 0.0276,
"step": 795
},
{
"epoch": 0.12228751392249491,
"grad_norm": 0.3684637248516083,
"learning_rate": 2.0031534151290943e-05,
"loss": 0.0675,
"step": 796
},
{
"epoch": 0.1224411414525483,
"grad_norm": 0.29086053371429443,
"learning_rate": 1.9843301512912327e-05,
"loss": 0.0652,
"step": 797
},
{
"epoch": 0.12259476898260169,
"grad_norm": 0.36780989170074463,
"learning_rate": 1.965585998878724e-05,
"loss": 0.0492,
"step": 798
},
{
"epoch": 0.12274839651265507,
"grad_norm": 0.07338400930166245,
"learning_rate": 1.946921142888781e-05,
"loss": 0.0065,
"step": 799
},
{
"epoch": 0.12290202404270845,
"grad_norm": 0.19284406304359436,
"learning_rate": 1.928335767535997e-05,
"loss": 0.033,
"step": 800
},
{
"epoch": 0.12305565157276184,
"grad_norm": 0.3301224112510681,
"learning_rate": 1.9098300562505266e-05,
"loss": 0.0368,
"step": 801
},
{
"epoch": 0.12320927910281522,
"grad_norm": 0.16345694661140442,
"learning_rate": 1.891404191676265e-05,
"loss": 0.0225,
"step": 802
},
{
"epoch": 0.12336290663286861,
"grad_norm": 0.7950462102890015,
"learning_rate": 1.8730583556690605e-05,
"loss": 0.0527,
"step": 803
},
{
"epoch": 0.12351653416292199,
"grad_norm": 0.2953856885433197,
"learning_rate": 1.854792729294905e-05,
"loss": 0.0773,
"step": 804
},
{
"epoch": 0.12367016169297539,
"grad_norm": 0.9576331973075867,
"learning_rate": 1.8366074928281607e-05,
"loss": 0.052,
"step": 805
},
{
"epoch": 0.12382378922302877,
"grad_norm": 0.15577109158039093,
"learning_rate": 1.818502825749764e-05,
"loss": 0.0225,
"step": 806
},
{
"epoch": 0.12397741675308216,
"grad_norm": 0.24769802391529083,
"learning_rate": 1.8004789067454764e-05,
"loss": 0.048,
"step": 807
},
{
"epoch": 0.12413104428313554,
"grad_norm": 0.14375796914100647,
"learning_rate": 1.7825359137040988e-05,
"loss": 0.0245,
"step": 808
},
{
"epoch": 0.12428467181318892,
"grad_norm": 0.47710293531417847,
"learning_rate": 1.7646740237157256e-05,
"loss": 0.0459,
"step": 809
},
{
"epoch": 0.12443829934324231,
"grad_norm": 0.4463082551956177,
"learning_rate": 1.7468934130700044e-05,
"loss": 0.0562,
"step": 810
},
{
"epoch": 0.12459192687329569,
"grad_norm": 0.24746567010879517,
"learning_rate": 1.7291942572543807e-05,
"loss": 0.0408,
"step": 811
},
{
"epoch": 0.12474555440334908,
"grad_norm": 0.9491742253303528,
"learning_rate": 1.7115767309523812e-05,
"loss": 0.0572,
"step": 812
},
{
"epoch": 0.12489918193340246,
"grad_norm": 0.42646944522857666,
"learning_rate": 1.6940410080418723e-05,
"loss": 0.1103,
"step": 813
},
{
"epoch": 0.12505280946345584,
"grad_norm": 0.37025365233421326,
"learning_rate": 1.6765872615933677e-05,
"loss": 0.047,
"step": 814
},
{
"epoch": 0.12520643699350922,
"grad_norm": 0.5282210111618042,
"learning_rate": 1.6592156638682886e-05,
"loss": 0.034,
"step": 815
},
{
"epoch": 0.12536006452356263,
"grad_norm": 0.3117554485797882,
"learning_rate": 1.6419263863172997e-05,
"loss": 0.0229,
"step": 816
},
{
"epoch": 0.125513692053616,
"grad_norm": 0.3928287625312805,
"learning_rate": 1.6247195995785837e-05,
"loss": 0.0224,
"step": 817
},
{
"epoch": 0.1256673195836694,
"grad_norm": 0.4059083163738251,
"learning_rate": 1.6075954734761845e-05,
"loss": 0.0528,
"step": 818
},
{
"epoch": 0.12582094711372277,
"grad_norm": 0.28537142276763916,
"learning_rate": 1.5905541770183096e-05,
"loss": 0.0499,
"step": 819
},
{
"epoch": 0.12597457464377618,
"grad_norm": 0.24158331751823425,
"learning_rate": 1.5735958783956794e-05,
"loss": 0.0348,
"step": 820
},
{
"epoch": 0.12612820217382956,
"grad_norm": 0.21823833882808685,
"learning_rate": 1.5567207449798515e-05,
"loss": 0.0379,
"step": 821
},
{
"epoch": 0.12628182970388294,
"grad_norm": 0.3099862337112427,
"learning_rate": 1.539928943321579e-05,
"loss": 0.0584,
"step": 822
},
{
"epoch": 0.12643545723393632,
"grad_norm": 0.33067381381988525,
"learning_rate": 1.5232206391491699e-05,
"loss": 0.0764,
"step": 823
},
{
"epoch": 0.1265890847639897,
"grad_norm": 0.73545902967453,
"learning_rate": 1.5065959973668353e-05,
"loss": 0.0824,
"step": 824
},
{
"epoch": 0.1267427122940431,
"grad_norm": 0.5218185186386108,
"learning_rate": 1.4900551820530828e-05,
"loss": 0.0968,
"step": 825
},
{
"epoch": 0.12689633982409648,
"grad_norm": 0.3948110342025757,
"learning_rate": 1.4735983564590783e-05,
"loss": 0.0389,
"step": 826
},
{
"epoch": 0.12704996735414986,
"grad_norm": 0.036857958883047104,
"learning_rate": 1.4572256830070497e-05,
"loss": 0.0029,
"step": 827
},
{
"epoch": 0.12720359488420324,
"grad_norm": 0.23741693794727325,
"learning_rate": 1.4409373232886702e-05,
"loss": 0.0239,
"step": 828
},
{
"epoch": 0.12735722241425662,
"grad_norm": 0.2457609474658966,
"learning_rate": 1.4247334380634792e-05,
"loss": 0.0571,
"step": 829
},
{
"epoch": 0.12751084994431003,
"grad_norm": 0.4082302451133728,
"learning_rate": 1.4086141872572789e-05,
"loss": 0.0488,
"step": 830
},
{
"epoch": 0.1276644774743634,
"grad_norm": 0.1847798377275467,
"learning_rate": 1.3925797299605647e-05,
"loss": 0.0223,
"step": 831
},
{
"epoch": 0.1278181050044168,
"grad_norm": 0.18577976524829865,
"learning_rate": 1.3766302244269624e-05,
"loss": 0.0344,
"step": 832
},
{
"epoch": 0.12797173253447017,
"grad_norm": 0.22289955615997314,
"learning_rate": 1.3607658280716473e-05,
"loss": 0.021,
"step": 833
},
{
"epoch": 0.12812536006452357,
"grad_norm": 0.5953392386436462,
"learning_rate": 1.3449866974698122e-05,
"loss": 0.0277,
"step": 834
},
{
"epoch": 0.12827898759457695,
"grad_norm": 0.5892924070358276,
"learning_rate": 1.3292929883550998e-05,
"loss": 0.0964,
"step": 835
},
{
"epoch": 0.12843261512463033,
"grad_norm": 0.4014976918697357,
"learning_rate": 1.3136848556180892e-05,
"loss": 0.0973,
"step": 836
},
{
"epoch": 0.1285862426546837,
"grad_norm": 0.2599301338195801,
"learning_rate": 1.2981624533047432e-05,
"loss": 0.041,
"step": 837
},
{
"epoch": 0.1287398701847371,
"grad_norm": 0.3025781214237213,
"learning_rate": 1.2827259346149122e-05,
"loss": 0.0557,
"step": 838
},
{
"epoch": 0.1288934977147905,
"grad_norm": 0.33187365531921387,
"learning_rate": 1.2673754519008008e-05,
"loss": 0.0457,
"step": 839
},
{
"epoch": 0.12904712524484388,
"grad_norm": 0.2707712948322296,
"learning_rate": 1.2521111566654731e-05,
"loss": 0.0683,
"step": 840
},
{
"epoch": 0.12920075277489726,
"grad_norm": 0.37803420424461365,
"learning_rate": 1.2369331995613665e-05,
"loss": 0.0397,
"step": 841
},
{
"epoch": 0.12935438030495064,
"grad_norm": 0.2581973075866699,
"learning_rate": 1.2218417303887842e-05,
"loss": 0.0786,
"step": 842
},
{
"epoch": 0.12950800783500405,
"grad_norm": 0.3860432505607605,
"learning_rate": 1.206836898094439e-05,
"loss": 0.0913,
"step": 843
},
{
"epoch": 0.12966163536505743,
"grad_norm": 0.31896284222602844,
"learning_rate": 1.191918850769964e-05,
"loss": 0.0282,
"step": 844
},
{
"epoch": 0.1298152628951108,
"grad_norm": 0.2464367151260376,
"learning_rate": 1.1770877356504683e-05,
"loss": 0.0381,
"step": 845
},
{
"epoch": 0.12996889042516419,
"grad_norm": 0.36452198028564453,
"learning_rate": 1.1623436991130654e-05,
"loss": 0.0264,
"step": 846
},
{
"epoch": 0.13012251795521756,
"grad_norm": 0.42515426874160767,
"learning_rate": 1.1476868866754486e-05,
"loss": 0.0693,
"step": 847
},
{
"epoch": 0.13027614548527097,
"grad_norm": 0.26798519492149353,
"learning_rate": 1.1331174429944347e-05,
"loss": 0.043,
"step": 848
},
{
"epoch": 0.13042977301532435,
"grad_norm": 0.27414458990097046,
"learning_rate": 1.1186355118645554e-05,
"loss": 0.037,
"step": 849
},
{
"epoch": 0.13058340054537773,
"grad_norm": 0.10538000613451004,
"learning_rate": 1.1042412362166222e-05,
"loss": 0.0158,
"step": 850
},
{
"epoch": 0.1307370280754311,
"grad_norm": 0.3702573776245117,
"learning_rate": 1.0899347581163221e-05,
"loss": 0.051,
"step": 851
},
{
"epoch": 0.1308906556054845,
"grad_norm": 0.5375767350196838,
"learning_rate": 1.0757162187628222e-05,
"loss": 0.1216,
"step": 852
},
{
"epoch": 0.1310442831355379,
"grad_norm": 0.33878618478775024,
"learning_rate": 1.0615857584873623e-05,
"loss": 0.0896,
"step": 853
},
{
"epoch": 0.13119791066559128,
"grad_norm": 0.3007965087890625,
"learning_rate": 1.0475435167518843e-05,
"loss": 0.0347,
"step": 854
},
{
"epoch": 0.13135153819564466,
"grad_norm": 0.2966013550758362,
"learning_rate": 1.0335896321476413e-05,
"loss": 0.0592,
"step": 855
},
{
"epoch": 0.13150516572569804,
"grad_norm": 0.18396848440170288,
"learning_rate": 1.0197242423938446e-05,
"loss": 0.0267,
"step": 856
},
{
"epoch": 0.13165879325575144,
"grad_norm": 0.2198018580675125,
"learning_rate": 1.0059474843362892e-05,
"loss": 0.0375,
"step": 857
},
{
"epoch": 0.13181242078580482,
"grad_norm": 0.4812876582145691,
"learning_rate": 9.922594939460194e-06,
"loss": 0.0347,
"step": 858
},
{
"epoch": 0.1319660483158582,
"grad_norm": 0.3861396312713623,
"learning_rate": 9.786604063179728e-06,
"loss": 0.0703,
"step": 859
},
{
"epoch": 0.13211967584591158,
"grad_norm": 0.20481643080711365,
"learning_rate": 9.651503556696516e-06,
"loss": 0.013,
"step": 860
},
{
"epoch": 0.13227330337596496,
"grad_norm": 0.3819121718406677,
"learning_rate": 9.517294753398064e-06,
"loss": 0.0971,
"step": 861
},
{
"epoch": 0.13242693090601837,
"grad_norm": 0.20617301762104034,
"learning_rate": 9.383978977871021e-06,
"loss": 0.0279,
"step": 862
},
{
"epoch": 0.13258055843607175,
"grad_norm": 0.2655280828475952,
"learning_rate": 9.251557545888312e-06,
"loss": 0.0265,
"step": 863
},
{
"epoch": 0.13273418596612513,
"grad_norm": 0.32171371579170227,
"learning_rate": 9.120031764395987e-06,
"loss": 0.0656,
"step": 864
},
{
"epoch": 0.1328878134961785,
"grad_norm": 0.12199762463569641,
"learning_rate": 8.989402931500434e-06,
"loss": 0.006,
"step": 865
},
{
"epoch": 0.1330414410262319,
"grad_norm": 0.5248639583587646,
"learning_rate": 8.85967233645547e-06,
"loss": 0.0506,
"step": 866
},
{
"epoch": 0.1331950685562853,
"grad_norm": 0.28909486532211304,
"learning_rate": 8.730841259649725e-06,
"loss": 0.0485,
"step": 867
},
{
"epoch": 0.13334869608633867,
"grad_norm": 0.2370561957359314,
"learning_rate": 8.602910972593892e-06,
"loss": 0.0426,
"step": 868
},
{
"epoch": 0.13350232361639205,
"grad_norm": 0.2912999391555786,
"learning_rate": 8.475882737908248e-06,
"loss": 0.0121,
"step": 869
},
{
"epoch": 0.13365595114644543,
"grad_norm": 0.45137980580329895,
"learning_rate": 8.34975780931021e-06,
"loss": 0.0705,
"step": 870
},
{
"epoch": 0.13380957867649884,
"grad_norm": 0.35219690203666687,
"learning_rate": 8.224537431601886e-06,
"loss": 0.0545,
"step": 871
},
{
"epoch": 0.13396320620655222,
"grad_norm": 0.37813490629196167,
"learning_rate": 8.100222840657878e-06,
"loss": 0.0404,
"step": 872
},
{
"epoch": 0.1341168337366056,
"grad_norm": 0.3680269122123718,
"learning_rate": 7.976815263412963e-06,
"loss": 0.0666,
"step": 873
},
{
"epoch": 0.13427046126665898,
"grad_norm": 0.2527633011341095,
"learning_rate": 7.854315917850163e-06,
"loss": 0.0867,
"step": 874
},
{
"epoch": 0.13442408879671236,
"grad_norm": 0.3979645371437073,
"learning_rate": 7.73272601298851e-06,
"loss": 0.072,
"step": 875
},
{
"epoch": 0.13457771632676577,
"grad_norm": 0.3535909652709961,
"learning_rate": 7.612046748871327e-06,
"loss": 0.0528,
"step": 876
},
{
"epoch": 0.13473134385681915,
"grad_norm": 0.34823527932167053,
"learning_rate": 7.492279316554207e-06,
"loss": 0.0427,
"step": 877
},
{
"epoch": 0.13488497138687253,
"grad_norm": 0.33774158358573914,
"learning_rate": 7.3734248980933395e-06,
"loss": 0.0406,
"step": 878
},
{
"epoch": 0.1350385989169259,
"grad_norm": 0.1883544921875,
"learning_rate": 7.255484666533874e-06,
"loss": 0.0232,
"step": 879
},
{
"epoch": 0.13519222644697929,
"grad_norm": 0.22137466073036194,
"learning_rate": 7.138459785898266e-06,
"loss": 0.0322,
"step": 880
},
{
"epoch": 0.1353458539770327,
"grad_norm": 0.2191513031721115,
"learning_rate": 7.022351411174866e-06,
"loss": 0.0177,
"step": 881
},
{
"epoch": 0.13549948150708607,
"grad_norm": 0.17891040444374084,
"learning_rate": 6.907160688306425e-06,
"loss": 0.0094,
"step": 882
},
{
"epoch": 0.13565310903713945,
"grad_norm": 0.12853822112083435,
"learning_rate": 6.7928887541789055e-06,
"loss": 0.0151,
"step": 883
},
{
"epoch": 0.13580673656719283,
"grad_norm": 0.3677392899990082,
"learning_rate": 6.679536736610137e-06,
"loss": 0.0551,
"step": 884
},
{
"epoch": 0.13596036409724624,
"grad_norm": 0.08899609744548798,
"learning_rate": 6.5671057543387985e-06,
"loss": 0.0045,
"step": 885
},
{
"epoch": 0.13611399162729962,
"grad_norm": 0.44172629714012146,
"learning_rate": 6.455596917013273e-06,
"loss": 0.0472,
"step": 886
},
{
"epoch": 0.136267619157353,
"grad_norm": 0.15009059011936188,
"learning_rate": 6.345011325180772e-06,
"loss": 0.031,
"step": 887
},
{
"epoch": 0.13642124668740638,
"grad_norm": 0.22758415341377258,
"learning_rate": 6.235350070276447e-06,
"loss": 0.0328,
"step": 888
},
{
"epoch": 0.13657487421745976,
"grad_norm": 0.19135577976703644,
"learning_rate": 6.126614234612593e-06,
"loss": 0.0304,
"step": 889
},
{
"epoch": 0.13672850174751316,
"grad_norm": 0.3660348355770111,
"learning_rate": 6.018804891368035e-06,
"loss": 0.04,
"step": 890
},
{
"epoch": 0.13688212927756654,
"grad_norm": 0.30344316363334656,
"learning_rate": 5.911923104577455e-06,
"loss": 0.0468,
"step": 891
},
{
"epoch": 0.13703575680761992,
"grad_norm": 0.31564733386039734,
"learning_rate": 5.805969929120947e-06,
"loss": 0.0308,
"step": 892
},
{
"epoch": 0.1371893843376733,
"grad_norm": 0.35533609986305237,
"learning_rate": 5.700946410713548e-06,
"loss": 0.0595,
"step": 893
},
{
"epoch": 0.13734301186772668,
"grad_norm": 0.16614094376564026,
"learning_rate": 5.5968535858950345e-06,
"loss": 0.0416,
"step": 894
},
{
"epoch": 0.1374966393977801,
"grad_norm": 0.2255018651485443,
"learning_rate": 5.49369248201953e-06,
"loss": 0.0457,
"step": 895
},
{
"epoch": 0.13765026692783347,
"grad_norm": 0.6730954647064209,
"learning_rate": 5.39146411724547e-06,
"loss": 0.0714,
"step": 896
},
{
"epoch": 0.13780389445788685,
"grad_norm": 0.38751596212387085,
"learning_rate": 5.290169500525577e-06,
"loss": 0.0613,
"step": 897
},
{
"epoch": 0.13795752198794023,
"grad_norm": 0.34039607644081116,
"learning_rate": 5.189809631596798e-06,
"loss": 0.0246,
"step": 898
},
{
"epoch": 0.13811114951799364,
"grad_norm": 0.17095424234867096,
"learning_rate": 5.0903855009705514e-06,
"loss": 0.0131,
"step": 899
},
{
"epoch": 0.13826477704804702,
"grad_norm": 0.28924375772476196,
"learning_rate": 4.991898089922819e-06,
"loss": 0.0391,
"step": 900
},
{
"epoch": 0.1384184045781004,
"grad_norm": 0.2250155508518219,
"learning_rate": 4.8943483704846475e-06,
"loss": 0.0235,
"step": 901
},
{
"epoch": 0.13857203210815378,
"grad_norm": 0.3394258916378021,
"learning_rate": 4.797737305432337e-06,
"loss": 0.0721,
"step": 902
},
{
"epoch": 0.13872565963820715,
"grad_norm": 0.21985206007957458,
"learning_rate": 4.702065848278126e-06,
"loss": 0.0252,
"step": 903
},
{
"epoch": 0.13887928716826056,
"grad_norm": 0.2618788182735443,
"learning_rate": 4.607334943260655e-06,
"loss": 0.0211,
"step": 904
},
{
"epoch": 0.13903291469831394,
"grad_norm": 0.35371533036231995,
"learning_rate": 4.513545525335705e-06,
"loss": 0.0571,
"step": 905
},
{
"epoch": 0.13918654222836732,
"grad_norm": 0.4113069772720337,
"learning_rate": 4.420698520166988e-06,
"loss": 0.0271,
"step": 906
},
{
"epoch": 0.1393401697584207,
"grad_norm": 0.22423537075519562,
"learning_rate": 4.328794844116946e-06,
"loss": 0.0485,
"step": 907
},
{
"epoch": 0.1394937972884741,
"grad_norm": 0.34244272112846375,
"learning_rate": 4.237835404237778e-06,
"loss": 0.0554,
"step": 908
},
{
"epoch": 0.1396474248185275,
"grad_norm": 0.27201080322265625,
"learning_rate": 4.147821098262405e-06,
"loss": 0.0441,
"step": 909
},
{
"epoch": 0.13980105234858087,
"grad_norm": 0.2116294950246811,
"learning_rate": 4.0587528145957235e-06,
"loss": 0.0265,
"step": 910
},
{
"epoch": 0.13995467987863425,
"grad_norm": 0.4444669783115387,
"learning_rate": 3.970631432305694e-06,
"loss": 0.0243,
"step": 911
},
{
"epoch": 0.14010830740868763,
"grad_norm": 0.28802183270454407,
"learning_rate": 3.883457821114811e-06,
"loss": 0.0158,
"step": 912
},
{
"epoch": 0.14026193493874103,
"grad_norm": 0.5275750160217285,
"learning_rate": 3.797232841391407e-06,
"loss": 0.0466,
"step": 913
},
{
"epoch": 0.1404155624687944,
"grad_norm": 0.2683006525039673,
"learning_rate": 3.711957344141237e-06,
"loss": 0.0519,
"step": 914
},
{
"epoch": 0.1405691899988478,
"grad_norm": 0.2940289080142975,
"learning_rate": 3.627632170999029e-06,
"loss": 0.0097,
"step": 915
},
{
"epoch": 0.14072281752890117,
"grad_norm": 0.325249046087265,
"learning_rate": 3.5442581542201923e-06,
"loss": 0.03,
"step": 916
},
{
"epoch": 0.14087644505895455,
"grad_norm": 0.2177402377128601,
"learning_rate": 3.461836116672612e-06,
"loss": 0.043,
"step": 917
},
{
"epoch": 0.14103007258900796,
"grad_norm": 0.12443643808364868,
"learning_rate": 3.380366871828522e-06,
"loss": 0.0214,
"step": 918
},
{
"epoch": 0.14118370011906134,
"grad_norm": 0.2948092520236969,
"learning_rate": 3.2998512237565005e-06,
"loss": 0.0595,
"step": 919
},
{
"epoch": 0.14133732764911472,
"grad_norm": 0.22343342006206512,
"learning_rate": 3.2202899671134546e-06,
"loss": 0.0599,
"step": 920
},
{
"epoch": 0.1414909551791681,
"grad_norm": 0.21625959873199463,
"learning_rate": 3.1416838871368924e-06,
"loss": 0.036,
"step": 921
},
{
"epoch": 0.1416445827092215,
"grad_norm": 0.2470436990261078,
"learning_rate": 3.064033759637064e-06,
"loss": 0.0246,
"step": 922
},
{
"epoch": 0.14179821023927489,
"grad_norm": 0.11411121487617493,
"learning_rate": 2.9873403509894203e-06,
"loss": 0.0123,
"step": 923
},
{
"epoch": 0.14195183776932827,
"grad_norm": 0.2494584619998932,
"learning_rate": 2.9116044181269007e-06,
"loss": 0.045,
"step": 924
},
{
"epoch": 0.14210546529938164,
"grad_norm": 0.3116079270839691,
"learning_rate": 2.836826708532603e-06,
"loss": 0.0404,
"step": 925
},
{
"epoch": 0.14225909282943502,
"grad_norm": 0.15428714454174042,
"learning_rate": 2.7630079602323442e-06,
"loss": 0.0123,
"step": 926
},
{
"epoch": 0.14241272035948843,
"grad_norm": 0.35774946212768555,
"learning_rate": 2.690148901787337e-06,
"loss": 0.0717,
"step": 927
},
{
"epoch": 0.1425663478895418,
"grad_norm": 0.14527635276317596,
"learning_rate": 2.618250252287113e-06,
"loss": 0.0087,
"step": 928
},
{
"epoch": 0.1427199754195952,
"grad_norm": 0.2148403823375702,
"learning_rate": 2.5473127213422763e-06,
"loss": 0.0562,
"step": 929
},
{
"epoch": 0.14287360294964857,
"grad_norm": 0.17063549160957336,
"learning_rate": 2.4773370090776626e-06,
"loss": 0.0317,
"step": 930
},
{
"epoch": 0.14302723047970195,
"grad_norm": 0.30612626671791077,
"learning_rate": 2.4083238061252567e-06,
"loss": 0.0479,
"step": 931
},
{
"epoch": 0.14318085800975536,
"grad_norm": 0.21359722316265106,
"learning_rate": 2.3402737936175425e-06,
"loss": 0.0126,
"step": 932
},
{
"epoch": 0.14333448553980874,
"grad_norm": 0.26253458857536316,
"learning_rate": 2.273187643180652e-06,
"loss": 0.043,
"step": 933
},
{
"epoch": 0.14348811306986212,
"grad_norm": 0.41115203499794006,
"learning_rate": 2.2070660169278166e-06,
"loss": 0.0355,
"step": 934
},
{
"epoch": 0.1436417405999155,
"grad_norm": 0.25147250294685364,
"learning_rate": 2.141909567452793e-06,
"loss": 0.0129,
"step": 935
},
{
"epoch": 0.1437953681299689,
"grad_norm": 0.3034101128578186,
"learning_rate": 2.0777189378234143e-06,
"loss": 0.0327,
"step": 936
},
{
"epoch": 0.14394899566002228,
"grad_norm": 0.21612095832824707,
"learning_rate": 2.014494761575314e-06,
"loss": 0.0223,
"step": 937
},
{
"epoch": 0.14410262319007566,
"grad_norm": 0.2813854515552521,
"learning_rate": 1.9522376627055583e-06,
"loss": 0.0326,
"step": 938
},
{
"epoch": 0.14425625072012904,
"grad_norm": 0.23056817054748535,
"learning_rate": 1.8909482556666024e-06,
"loss": 0.0222,
"step": 939
},
{
"epoch": 0.14440987825018242,
"grad_norm": 0.2757590413093567,
"learning_rate": 1.8306271453601199e-06,
"loss": 0.0357,
"step": 940
},
{
"epoch": 0.14456350578023583,
"grad_norm": 0.23835089802742004,
"learning_rate": 1.771274927131139e-06,
"loss": 0.0314,
"step": 941
},
{
"epoch": 0.1447171333102892,
"grad_norm": 0.3432852327823639,
"learning_rate": 1.712892186762083e-06,
"loss": 0.0565,
"step": 942
},
{
"epoch": 0.1448707608403426,
"grad_norm": 1.077993392944336,
"learning_rate": 1.6554795004670388e-06,
"loss": 0.2125,
"step": 943
},
{
"epoch": 0.14502438837039597,
"grad_norm": 0.3463801145553589,
"learning_rate": 1.5990374348860305e-06,
"loss": 0.0521,
"step": 944
},
{
"epoch": 0.14517801590044935,
"grad_norm": 0.5247858762741089,
"learning_rate": 1.543566547079467e-06,
"loss": 0.0177,
"step": 945
},
{
"epoch": 0.14533164343050275,
"grad_norm": 0.3216777741909027,
"learning_rate": 1.4890673845226133e-06,
"loss": 0.0179,
"step": 946
},
{
"epoch": 0.14548527096055613,
"grad_norm": 0.5181776881217957,
"learning_rate": 1.4355404851001952e-06,
"loss": 0.0603,
"step": 947
},
{
"epoch": 0.14563889849060951,
"grad_norm": 0.3061254024505615,
"learning_rate": 1.3829863771011253e-06,
"loss": 0.0435,
"step": 948
},
{
"epoch": 0.1457925260206629,
"grad_norm": 0.28748029470443726,
"learning_rate": 1.3314055792131964e-06,
"loss": 0.0421,
"step": 949
},
{
"epoch": 0.1459461535507163,
"grad_norm": 0.2519327998161316,
"learning_rate": 1.280798600518085e-06,
"loss": 0.0395,
"step": 950
},
{
"epoch": 0.14609978108076968,
"grad_norm": 0.449189156293869,
"learning_rate": 1.231165940486234e-06,
"loss": 0.067,
"step": 951
},
{
"epoch": 0.14625340861082306,
"grad_norm": 0.23478630185127258,
"learning_rate": 1.1825080889719563e-06,
"loss": 0.0243,
"step": 952
},
{
"epoch": 0.14640703614087644,
"grad_norm": 0.331528902053833,
"learning_rate": 1.134825526208605e-06,
"loss": 0.0597,
"step": 953
},
{
"epoch": 0.14656066367092982,
"grad_norm": 0.2607584595680237,
"learning_rate": 1.0881187228038215e-06,
"loss": 0.0197,
"step": 954
},
{
"epoch": 0.14671429120098323,
"grad_norm": 0.13642053306102753,
"learning_rate": 1.0423881397349068e-06,
"loss": 0.016,
"step": 955
},
{
"epoch": 0.1468679187310366,
"grad_norm": 0.223410502076149,
"learning_rate": 9.976342283442463e-07,
"loss": 0.0146,
"step": 956
},
{
"epoch": 0.14702154626108999,
"grad_norm": 0.09570923447608948,
"learning_rate": 9.538574303348813e-07,
"loss": 0.0099,
"step": 957
},
{
"epoch": 0.14717517379114337,
"grad_norm": 0.6243155002593994,
"learning_rate": 9.110581777661331e-07,
"loss": 0.0963,
"step": 958
},
{
"epoch": 0.14732880132119675,
"grad_norm": 0.21072612702846527,
"learning_rate": 8.692368930493521e-07,
"loss": 0.0223,
"step": 959
},
{
"epoch": 0.14748242885125015,
"grad_norm": 0.1275264322757721,
"learning_rate": 8.283939889437209e-07,
"loss": 0.0125,
"step": 960
},
{
"epoch": 0.14763605638130353,
"grad_norm": 0.10450811684131622,
"learning_rate": 7.885298685522235e-07,
"loss": 0.0082,
"step": 961
},
{
"epoch": 0.1477896839113569,
"grad_norm": 0.18714646995067596,
"learning_rate": 7.496449253176274e-07,
"loss": 0.0233,
"step": 962
},
{
"epoch": 0.1479433114414103,
"grad_norm": 0.5050369501113892,
"learning_rate": 7.117395430186414e-07,
"loss": 0.0461,
"step": 963
},
{
"epoch": 0.1480969389714637,
"grad_norm": 0.38498908281326294,
"learning_rate": 6.748140957660631e-07,
"loss": 0.0369,
"step": 964
},
{
"epoch": 0.14825056650151708,
"grad_norm": 0.37190935015678406,
"learning_rate": 6.388689479991605e-07,
"loss": 0.0295,
"step": 965
},
{
"epoch": 0.14840419403157046,
"grad_norm": 0.653339684009552,
"learning_rate": 6.039044544820404e-07,
"loss": 0.0885,
"step": 966
},
{
"epoch": 0.14855782156162384,
"grad_norm": 0.3272092044353485,
"learning_rate": 5.699209603001076e-07,
"loss": 0.0855,
"step": 967
},
{
"epoch": 0.14871144909167722,
"grad_norm": 0.47128379344940186,
"learning_rate": 5.369188008567672e-07,
"loss": 0.045,
"step": 968
},
{
"epoch": 0.14886507662173062,
"grad_norm": 0.2561770975589752,
"learning_rate": 5.048983018699827e-07,
"loss": 0.0267,
"step": 969
},
{
"epoch": 0.149018704151784,
"grad_norm": 0.17779581248760223,
"learning_rate": 4.738597793691679e-07,
"loss": 0.0146,
"step": 970
},
{
"epoch": 0.14917233168183738,
"grad_norm": 0.27892354130744934,
"learning_rate": 4.438035396920004e-07,
"loss": 0.0213,
"step": 971
},
{
"epoch": 0.14932595921189076,
"grad_norm": 0.399075984954834,
"learning_rate": 4.1472987948143473e-07,
"loss": 0.0932,
"step": 972
},
{
"epoch": 0.14947958674194414,
"grad_norm": 0.29902878403663635,
"learning_rate": 3.866390856827495e-07,
"loss": 0.031,
"step": 973
},
{
"epoch": 0.14963321427199755,
"grad_norm": 0.31201279163360596,
"learning_rate": 3.595314355407609e-07,
"loss": 0.0439,
"step": 974
},
{
"epoch": 0.14978684180205093,
"grad_norm": 0.5588178634643555,
"learning_rate": 3.3340719659701313e-07,
"loss": 0.0672,
"step": 975
},
{
"epoch": 0.1499404693321043,
"grad_norm": 0.2838308811187744,
"learning_rate": 3.0826662668720364e-07,
"loss": 0.0367,
"step": 976
},
{
"epoch": 0.1500940968621577,
"grad_norm": 0.4529828429222107,
"learning_rate": 2.841099739386066e-07,
"loss": 0.0331,
"step": 977
},
{
"epoch": 0.1502477243922111,
"grad_norm": 0.45134326815605164,
"learning_rate": 2.609374767676309e-07,
"loss": 0.0903,
"step": 978
},
{
"epoch": 0.15040135192226448,
"grad_norm": 0.24295443296432495,
"learning_rate": 2.387493638774774e-07,
"loss": 0.0219,
"step": 979
},
{
"epoch": 0.15055497945231786,
"grad_norm": 0.3305959105491638,
"learning_rate": 2.175458542558517e-07,
"loss": 0.0327,
"step": 980
},
{
"epoch": 0.15070860698237123,
"grad_norm": 0.33900904655456543,
"learning_rate": 1.973271571728441e-07,
"loss": 0.0322,
"step": 981
},
{
"epoch": 0.15086223451242461,
"grad_norm": 0.30225127935409546,
"learning_rate": 1.7809347217881966e-07,
"loss": 0.0444,
"step": 982
},
{
"epoch": 0.15101586204247802,
"grad_norm": 0.11830917745828629,
"learning_rate": 1.598449891024978e-07,
"loss": 0.0137,
"step": 983
},
{
"epoch": 0.1511694895725314,
"grad_norm": 0.2698158323764801,
"learning_rate": 1.425818880490315e-07,
"loss": 0.0741,
"step": 984
},
{
"epoch": 0.15132311710258478,
"grad_norm": 0.345398485660553,
"learning_rate": 1.2630433939825327e-07,
"loss": 0.0548,
"step": 985
},
{
"epoch": 0.15147674463263816,
"grad_norm": 0.4103194773197174,
"learning_rate": 1.1101250380300965e-07,
"loss": 0.0454,
"step": 986
},
{
"epoch": 0.15163037216269157,
"grad_norm": 0.368034690618515,
"learning_rate": 9.670653218752934e-08,
"loss": 0.0508,
"step": 987
},
{
"epoch": 0.15178399969274495,
"grad_norm": 0.3027742803096771,
"learning_rate": 8.33865657459909e-08,
"loss": 0.0386,
"step": 988
},
{
"epoch": 0.15193762722279833,
"grad_norm": 0.23281732201576233,
"learning_rate": 7.105273594107953e-08,
"loss": 0.0189,
"step": 989
},
{
"epoch": 0.1520912547528517,
"grad_norm": 0.33537131547927856,
"learning_rate": 5.970516450271025e-08,
"loss": 0.0217,
"step": 990
},
{
"epoch": 0.1522448822829051,
"grad_norm": 0.24615319073200226,
"learning_rate": 4.934396342684e-08,
"loss": 0.0421,
"step": 991
},
{
"epoch": 0.1523985098129585,
"grad_norm": 0.5010077953338623,
"learning_rate": 3.996923497434635e-08,
"loss": 0.0683,
"step": 992
},
{
"epoch": 0.15255213734301187,
"grad_norm": 0.49042055010795593,
"learning_rate": 3.1581071670006015e-08,
"loss": 0.072,
"step": 993
},
{
"epoch": 0.15270576487306525,
"grad_norm": 0.399473637342453,
"learning_rate": 2.417955630159563e-08,
"loss": 0.0491,
"step": 994
},
{
"epoch": 0.15285939240311863,
"grad_norm": 0.6103872060775757,
"learning_rate": 1.7764761919103477e-08,
"loss": 0.0822,
"step": 995
},
{
"epoch": 0.153013019933172,
"grad_norm": 0.42136210203170776,
"learning_rate": 1.2336751833941229e-08,
"loss": 0.0439,
"step": 996
},
{
"epoch": 0.15316664746322542,
"grad_norm": 0.16099852323532104,
"learning_rate": 7.895579618388827e-09,
"loss": 0.0185,
"step": 997
},
{
"epoch": 0.1533202749932788,
"grad_norm": 0.34373927116394043,
"learning_rate": 4.4412891050171765e-09,
"loss": 0.024,
"step": 998
},
{
"epoch": 0.15347390252333218,
"grad_norm": 0.17840823531150818,
"learning_rate": 1.973914386288467e-09,
"loss": 0.0092,
"step": 999
},
{
"epoch": 0.15362753005338556,
"grad_norm": 0.38151857256889343,
"learning_rate": 4.934798141786879e-10,
"loss": 0.0468,
"step": 1000
},
{
"epoch": 0.15362753005338556,
"eval_loss": 0.010187927633523941,
"eval_runtime": 219.3998,
"eval_samples_per_second": 24.986,
"eval_steps_per_second": 6.249,
"step": 1000
}
],
"logging_steps": 1,
"max_steps": 1000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.6282308886113812e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}